##// END OF EJS Templates
merge with stable
Matt Mackall -
r22207:8dda6f6f merge default
parent child Browse files
Show More
@@ -1,400 +1,403 b''
1 # bundlerepo.py - repository class for viewing uncompressed bundles
1 # bundlerepo.py - repository class for viewing uncompressed bundles
2 #
2 #
3 # Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
3 # Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Repository class for viewing uncompressed bundles.
8 """Repository class for viewing uncompressed bundles.
9
9
10 This provides a read-only repository interface to bundles as if they
10 This provides a read-only repository interface to bundles as if they
11 were part of the actual repository.
11 were part of the actual repository.
12 """
12 """
13
13
14 from node import nullid
14 from node import nullid
15 from i18n import _
15 from i18n import _
16 import os, tempfile, shutil
16 import os, tempfile, shutil
17 import changegroup, util, mdiff, discovery, cmdutil, scmutil, exchange
17 import changegroup, util, mdiff, discovery, cmdutil, scmutil, exchange
18 import localrepo, changelog, manifest, filelog, revlog, error
18 import localrepo, changelog, manifest, filelog, revlog, error
19
19
20 class bundlerevlog(revlog.revlog):
20 class bundlerevlog(revlog.revlog):
21 def __init__(self, opener, indexfile, bundle, linkmapper):
21 def __init__(self, opener, indexfile, bundle, linkmapper):
22 # How it works:
22 # How it works:
23 # To retrieve a revision, we need to know the offset of the revision in
23 # To retrieve a revision, we need to know the offset of the revision in
24 # the bundle (an unbundle object). We store this offset in the index
24 # the bundle (an unbundle object). We store this offset in the index
25 # (start). The base of the delta is stored in the base field.
25 # (start). The base of the delta is stored in the base field.
26 #
26 #
27 # To differentiate a rev in the bundle from a rev in the revlog, we
27 # To differentiate a rev in the bundle from a rev in the revlog, we
28 # check revision against repotiprev.
28 # check revision against repotiprev.
29 opener = scmutil.readonlyvfs(opener)
29 opener = scmutil.readonlyvfs(opener)
30 revlog.revlog.__init__(self, opener, indexfile)
30 revlog.revlog.__init__(self, opener, indexfile)
31 self.bundle = bundle
31 self.bundle = bundle
32 n = len(self)
32 n = len(self)
33 self.repotiprev = n - 1
33 self.repotiprev = n - 1
34 chain = None
34 chain = None
35 self.bundlerevs = set() # used by 'bundle()' revset expression
35 self.bundlerevs = set() # used by 'bundle()' revset expression
36 while True:
36 while True:
37 chunkdata = bundle.deltachunk(chain)
37 chunkdata = bundle.deltachunk(chain)
38 if not chunkdata:
38 if not chunkdata:
39 break
39 break
40 node = chunkdata['node']
40 node = chunkdata['node']
41 p1 = chunkdata['p1']
41 p1 = chunkdata['p1']
42 p2 = chunkdata['p2']
42 p2 = chunkdata['p2']
43 cs = chunkdata['cs']
43 cs = chunkdata['cs']
44 deltabase = chunkdata['deltabase']
44 deltabase = chunkdata['deltabase']
45 delta = chunkdata['delta']
45 delta = chunkdata['delta']
46
46
47 size = len(delta)
47 size = len(delta)
48 start = bundle.tell() - size
48 start = bundle.tell() - size
49
49
50 link = linkmapper(cs)
50 link = linkmapper(cs)
51 if node in self.nodemap:
51 if node in self.nodemap:
52 # this can happen if two branches make the same change
52 # this can happen if two branches make the same change
53 chain = node
53 chain = node
54 self.bundlerevs.add(self.nodemap[node])
54 self.bundlerevs.add(self.nodemap[node])
55 continue
55 continue
56
56
57 for p in (p1, p2):
57 for p in (p1, p2):
58 if p not in self.nodemap:
58 if p not in self.nodemap:
59 raise error.LookupError(p, self.indexfile,
59 raise error.LookupError(p, self.indexfile,
60 _("unknown parent"))
60 _("unknown parent"))
61
61
62 if deltabase not in self.nodemap:
62 if deltabase not in self.nodemap:
63 raise LookupError(deltabase, self.indexfile,
63 raise LookupError(deltabase, self.indexfile,
64 _('unknown delta base'))
64 _('unknown delta base'))
65
65
66 baserev = self.rev(deltabase)
66 baserev = self.rev(deltabase)
67 # start, size, full unc. size, base (unused), link, p1, p2, node
67 # start, size, full unc. size, base (unused), link, p1, p2, node
68 e = (revlog.offset_type(start, 0), size, -1, baserev, link,
68 e = (revlog.offset_type(start, 0), size, -1, baserev, link,
69 self.rev(p1), self.rev(p2), node)
69 self.rev(p1), self.rev(p2), node)
70 self.index.insert(-1, e)
70 self.index.insert(-1, e)
71 self.nodemap[node] = n
71 self.nodemap[node] = n
72 self.bundlerevs.add(n)
72 self.bundlerevs.add(n)
73 chain = node
73 chain = node
74 n += 1
74 n += 1
75
75
76 def _chunk(self, rev):
76 def _chunk(self, rev):
77 # Warning: in case of bundle, the diff is against what we stored as
77 # Warning: in case of bundle, the diff is against what we stored as
78 # delta base, not against rev - 1
78 # delta base, not against rev - 1
79 # XXX: could use some caching
79 # XXX: could use some caching
80 if rev <= self.repotiprev:
80 if rev <= self.repotiprev:
81 return revlog.revlog._chunk(self, rev)
81 return revlog.revlog._chunk(self, rev)
82 self.bundle.seek(self.start(rev))
82 self.bundle.seek(self.start(rev))
83 return self.bundle.read(self.length(rev))
83 return self.bundle.read(self.length(rev))
84
84
85 def revdiff(self, rev1, rev2):
85 def revdiff(self, rev1, rev2):
86 """return or calculate a delta between two revisions"""
86 """return or calculate a delta between two revisions"""
87 if rev1 > self.repotiprev and rev2 > self.repotiprev:
87 if rev1 > self.repotiprev and rev2 > self.repotiprev:
88 # hot path for bundle
88 # hot path for bundle
89 revb = self.index[rev2][3]
89 revb = self.index[rev2][3]
90 if revb == rev1:
90 if revb == rev1:
91 return self._chunk(rev2)
91 return self._chunk(rev2)
92 elif rev1 <= self.repotiprev and rev2 <= self.repotiprev:
92 elif rev1 <= self.repotiprev and rev2 <= self.repotiprev:
93 return revlog.revlog.revdiff(self, rev1, rev2)
93 return revlog.revlog.revdiff(self, rev1, rev2)
94
94
95 return mdiff.textdiff(self.revision(self.node(rev1)),
95 return mdiff.textdiff(self.revision(self.node(rev1)),
96 self.revision(self.node(rev2)))
96 self.revision(self.node(rev2)))
97
97
98 def revision(self, nodeorrev):
98 def revision(self, nodeorrev):
99 """return an uncompressed revision of a given node or revision
99 """return an uncompressed revision of a given node or revision
100 number.
100 number.
101 """
101 """
102 if isinstance(nodeorrev, int):
102 if isinstance(nodeorrev, int):
103 rev = nodeorrev
103 rev = nodeorrev
104 node = self.node(rev)
104 node = self.node(rev)
105 else:
105 else:
106 node = nodeorrev
106 node = nodeorrev
107 rev = self.rev(node)
107 rev = self.rev(node)
108
108
109 if node == nullid:
109 if node == nullid:
110 return ""
110 return ""
111
111
112 text = None
112 text = None
113 chain = []
113 chain = []
114 iterrev = rev
114 iterrev = rev
115 # reconstruct the revision if it is from a changegroup
115 # reconstruct the revision if it is from a changegroup
116 while iterrev > self.repotiprev:
116 while iterrev > self.repotiprev:
117 if self._cache and self._cache[1] == iterrev:
117 if self._cache and self._cache[1] == iterrev:
118 text = self._cache[2]
118 text = self._cache[2]
119 break
119 break
120 chain.append(iterrev)
120 chain.append(iterrev)
121 iterrev = self.index[iterrev][3]
121 iterrev = self.index[iterrev][3]
122 if text is None:
122 if text is None:
123 text = self.baserevision(iterrev)
123 text = self.baserevision(iterrev)
124
124
125 while chain:
125 while chain:
126 delta = self._chunk(chain.pop())
126 delta = self._chunk(chain.pop())
127 text = mdiff.patches(text, [delta])
127 text = mdiff.patches(text, [delta])
128
128
129 self._checkhash(text, node, rev)
129 self._checkhash(text, node, rev)
130 self._cache = (node, rev, text)
130 self._cache = (node, rev, text)
131 return text
131 return text
132
132
133 def baserevision(self, nodeorrev):
133 def baserevision(self, nodeorrev):
134 # Revlog subclasses may override 'revision' method to modify format of
134 # Revlog subclasses may override 'revision' method to modify format of
135 # content retrieved from revlog. To use bundlerevlog with such class one
135 # content retrieved from revlog. To use bundlerevlog with such class one
136 # needs to override 'baserevision' and make more specific call here.
136 # needs to override 'baserevision' and make more specific call here.
137 return revlog.revlog.revision(self, nodeorrev)
137 return revlog.revlog.revision(self, nodeorrev)
138
138
139 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
139 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
140 raise NotImplementedError
140 raise NotImplementedError
141 def addgroup(self, revs, linkmapper, transaction):
141 def addgroup(self, revs, linkmapper, transaction):
142 raise NotImplementedError
142 raise NotImplementedError
143 def strip(self, rev, minlink):
143 def strip(self, rev, minlink):
144 raise NotImplementedError
144 raise NotImplementedError
145 def checksize(self):
145 def checksize(self):
146 raise NotImplementedError
146 raise NotImplementedError
147
147
148 class bundlechangelog(bundlerevlog, changelog.changelog):
148 class bundlechangelog(bundlerevlog, changelog.changelog):
149 def __init__(self, opener, bundle):
149 def __init__(self, opener, bundle):
150 changelog.changelog.__init__(self, opener)
150 changelog.changelog.__init__(self, opener)
151 linkmapper = lambda x: x
151 linkmapper = lambda x: x
152 bundlerevlog.__init__(self, opener, self.indexfile, bundle,
152 bundlerevlog.__init__(self, opener, self.indexfile, bundle,
153 linkmapper)
153 linkmapper)
154
154
155 def baserevision(self, nodeorrev):
155 def baserevision(self, nodeorrev):
156 # Although changelog doesn't override 'revision' method, some extensions
156 # Although changelog doesn't override 'revision' method, some extensions
157 # may replace this class with another that does. Same story with
157 # may replace this class with another that does. Same story with
158 # manifest and filelog classes.
158 # manifest and filelog classes.
159 return changelog.changelog.revision(self, nodeorrev)
159 return changelog.changelog.revision(self, nodeorrev)
160
160
161 class bundlemanifest(bundlerevlog, manifest.manifest):
161 class bundlemanifest(bundlerevlog, manifest.manifest):
162 def __init__(self, opener, bundle, linkmapper):
162 def __init__(self, opener, bundle, linkmapper):
163 manifest.manifest.__init__(self, opener)
163 manifest.manifest.__init__(self, opener)
164 bundlerevlog.__init__(self, opener, self.indexfile, bundle,
164 bundlerevlog.__init__(self, opener, self.indexfile, bundle,
165 linkmapper)
165 linkmapper)
166
166
167 def baserevision(self, nodeorrev):
167 def baserevision(self, nodeorrev):
168 return manifest.manifest.revision(self, nodeorrev)
168 return manifest.manifest.revision(self, nodeorrev)
169
169
170 class bundlefilelog(bundlerevlog, filelog.filelog):
170 class bundlefilelog(bundlerevlog, filelog.filelog):
171 def __init__(self, opener, path, bundle, linkmapper, repo):
171 def __init__(self, opener, path, bundle, linkmapper, repo):
172 filelog.filelog.__init__(self, opener, path)
172 filelog.filelog.__init__(self, opener, path)
173 bundlerevlog.__init__(self, opener, self.indexfile, bundle,
173 bundlerevlog.__init__(self, opener, self.indexfile, bundle,
174 linkmapper)
174 linkmapper)
175 self._repo = repo
175 self._repo = repo
176
176
177 def baserevision(self, nodeorrev):
177 def baserevision(self, nodeorrev):
178 return filelog.filelog.revision(self, nodeorrev)
178 return filelog.filelog.revision(self, nodeorrev)
179
179
180 def _file(self, f):
180 def _file(self, f):
181 self._repo.file(f)
181 self._repo.file(f)
182
182
183 class bundlepeer(localrepo.localpeer):
183 class bundlepeer(localrepo.localpeer):
184 def canpush(self):
184 def canpush(self):
185 return False
185 return False
186
186
187 class bundlerepository(localrepo.localrepository):
187 class bundlerepository(localrepo.localrepository):
188 def __init__(self, ui, path, bundlename):
188 def __init__(self, ui, path, bundlename):
189 self._tempparent = None
189 self._tempparent = None
190 try:
190 try:
191 localrepo.localrepository.__init__(self, ui, path)
191 localrepo.localrepository.__init__(self, ui, path)
192 except error.RepoError:
192 except error.RepoError:
193 self._tempparent = tempfile.mkdtemp()
193 self._tempparent = tempfile.mkdtemp()
194 localrepo.instance(ui, self._tempparent, 1)
194 localrepo.instance(ui, self._tempparent, 1)
195 localrepo.localrepository.__init__(self, ui, self._tempparent)
195 localrepo.localrepository.__init__(self, ui, self._tempparent)
196 self.ui.setconfig('phases', 'publish', False, 'bundlerepo')
196 self.ui.setconfig('phases', 'publish', False, 'bundlerepo')
197
197
198 if path:
198 if path:
199 self._url = 'bundle:' + util.expandpath(path) + '+' + bundlename
199 self._url = 'bundle:' + util.expandpath(path) + '+' + bundlename
200 else:
200 else:
201 self._url = 'bundle:' + bundlename
201 self._url = 'bundle:' + bundlename
202
202
203 self.tempfile = None
203 self.tempfile = None
204 f = util.posixfile(bundlename, "rb")
204 f = util.posixfile(bundlename, "rb")
205 self.bundle = exchange.readbundle(ui, f, bundlename)
205 self.bundle = exchange.readbundle(ui, f, bundlename)
206 if self.bundle.compressed():
206 if self.bundle.compressed():
207 fdtemp, temp = self.vfs.mkstemp(prefix="hg-bundle-",
207 fdtemp, temp = self.vfs.mkstemp(prefix="hg-bundle-",
208 suffix=".hg10un")
208 suffix=".hg10un")
209 self.tempfile = temp
209 self.tempfile = temp
210 fptemp = os.fdopen(fdtemp, 'wb')
210 fptemp = os.fdopen(fdtemp, 'wb')
211
211
212 try:
212 try:
213 fptemp.write("HG10UN")
213 fptemp.write("HG10UN")
214 while True:
214 while True:
215 chunk = self.bundle.read(2**18)
215 chunk = self.bundle.read(2**18)
216 if not chunk:
216 if not chunk:
217 break
217 break
218 fptemp.write(chunk)
218 fptemp.write(chunk)
219 finally:
219 finally:
220 fptemp.close()
220 fptemp.close()
221
221
222 f = self.vfs.open(self.tempfile, mode="rb")
222 f = self.vfs.open(self.tempfile, mode="rb")
223 self.bundle = exchange.readbundle(ui, f, bundlename, self.vfs)
223 self.bundle = exchange.readbundle(ui, f, bundlename, self.vfs)
224
224
225 # dict with the mapping 'filename' -> position in the bundle
225 # dict with the mapping 'filename' -> position in the bundle
226 self.bundlefilespos = {}
226 self.bundlefilespos = {}
227
227
228 @localrepo.unfilteredpropertycache
228 @localrepo.unfilteredpropertycache
229 def changelog(self):
229 def changelog(self):
230 # consume the header if it exists
230 # consume the header if it exists
231 self.bundle.changelogheader()
231 self.bundle.changelogheader()
232 c = bundlechangelog(self.sopener, self.bundle)
232 c = bundlechangelog(self.sopener, self.bundle)
233 self.manstart = self.bundle.tell()
233 self.manstart = self.bundle.tell()
234 return c
234 return c
235
235
236 @localrepo.unfilteredpropertycache
236 @localrepo.unfilteredpropertycache
237 def manifest(self):
237 def manifest(self):
238 self.bundle.seek(self.manstart)
238 self.bundle.seek(self.manstart)
239 # consume the header if it exists
239 # consume the header if it exists
240 self.bundle.manifestheader()
240 self.bundle.manifestheader()
241 m = bundlemanifest(self.sopener, self.bundle, self.changelog.rev)
241 m = bundlemanifest(self.sopener, self.bundle, self.changelog.rev)
242 self.filestart = self.bundle.tell()
242 self.filestart = self.bundle.tell()
243 return m
243 return m
244
244
245 @localrepo.unfilteredpropertycache
245 @localrepo.unfilteredpropertycache
246 def manstart(self):
246 def manstart(self):
247 self.changelog
247 self.changelog
248 return self.manstart
248 return self.manstart
249
249
250 @localrepo.unfilteredpropertycache
250 @localrepo.unfilteredpropertycache
251 def filestart(self):
251 def filestart(self):
252 self.manifest
252 self.manifest
253 return self.filestart
253 return self.filestart
254
254
255 def url(self):
255 def url(self):
256 return self._url
256 return self._url
257
257
258 def file(self, f):
258 def file(self, f):
259 if not self.bundlefilespos:
259 if not self.bundlefilespos:
260 self.bundle.seek(self.filestart)
260 self.bundle.seek(self.filestart)
261 while True:
261 while True:
262 chunkdata = self.bundle.filelogheader()
262 chunkdata = self.bundle.filelogheader()
263 if not chunkdata:
263 if not chunkdata:
264 break
264 break
265 fname = chunkdata['filename']
265 fname = chunkdata['filename']
266 self.bundlefilespos[fname] = self.bundle.tell()
266 self.bundlefilespos[fname] = self.bundle.tell()
267 while True:
267 while True:
268 c = self.bundle.deltachunk(None)
268 c = self.bundle.deltachunk(None)
269 if not c:
269 if not c:
270 break
270 break
271
271
272 if f in self.bundlefilespos:
272 if f in self.bundlefilespos:
273 self.bundle.seek(self.bundlefilespos[f])
273 self.bundle.seek(self.bundlefilespos[f])
274 return bundlefilelog(self.sopener, f, self.bundle,
274 return bundlefilelog(self.sopener, f, self.bundle,
275 self.changelog.rev, self)
275 self.changelog.rev, self)
276 else:
276 else:
277 return filelog.filelog(self.sopener, f)
277 return filelog.filelog(self.sopener, f)
278
278
279 def close(self):
279 def close(self):
280 """Close assigned bundle file immediately."""
280 """Close assigned bundle file immediately."""
281 self.bundle.close()
281 self.bundle.close()
282 if self.tempfile is not None:
282 if self.tempfile is not None:
283 self.vfs.unlink(self.tempfile)
283 self.vfs.unlink(self.tempfile)
284 if self._tempparent:
284 if self._tempparent:
285 shutil.rmtree(self._tempparent, True)
285 shutil.rmtree(self._tempparent, True)
286
286
287 def cancopy(self):
287 def cancopy(self):
288 return False
288 return False
289
289
290 def peer(self):
290 def peer(self):
291 return bundlepeer(self)
291 return bundlepeer(self)
292
292
293 def getcwd(self):
293 def getcwd(self):
294 return os.getcwd() # always outside the repo
294 return os.getcwd() # always outside the repo
295
295
296
296
297 def instance(ui, path, create):
297 def instance(ui, path, create):
298 if create:
298 if create:
299 raise util.Abort(_('cannot create new bundle repository'))
299 raise util.Abort(_('cannot create new bundle repository'))
300 parentpath = ui.config("bundle", "mainreporoot", "")
300 parentpath = ui.config("bundle", "mainreporoot", "")
301 if not parentpath:
301 if not parentpath:
302 # try to find the correct path to the working directory repo
302 # try to find the correct path to the working directory repo
303 parentpath = cmdutil.findrepo(os.getcwd())
303 parentpath = cmdutil.findrepo(os.getcwd())
304 if parentpath is None:
304 if parentpath is None:
305 parentpath = ''
305 parentpath = ''
306 if parentpath:
306 if parentpath:
307 # Try to make the full path relative so we get a nice, short URL.
307 # Try to make the full path relative so we get a nice, short URL.
308 # In particular, we don't want temp dir names in test outputs.
308 # In particular, we don't want temp dir names in test outputs.
309 cwd = os.getcwd()
309 cwd = os.getcwd()
310 if parentpath == cwd:
310 if parentpath == cwd:
311 parentpath = ''
311 parentpath = ''
312 else:
312 else:
313 cwd = os.path.join(cwd,'')
313 cwd = os.path.join(cwd,'')
314 if parentpath.startswith(cwd):
314 if parentpath.startswith(cwd):
315 parentpath = parentpath[len(cwd):]
315 parentpath = parentpath[len(cwd):]
316 u = util.url(path)
316 u = util.url(path)
317 path = u.localpath()
317 path = u.localpath()
318 if u.scheme == 'bundle':
318 if u.scheme == 'bundle':
319 s = path.split("+", 1)
319 s = path.split("+", 1)
320 if len(s) == 1:
320 if len(s) == 1:
321 repopath, bundlename = parentpath, s[0]
321 repopath, bundlename = parentpath, s[0]
322 else:
322 else:
323 repopath, bundlename = s
323 repopath, bundlename = s
324 else:
324 else:
325 repopath, bundlename = parentpath, path
325 repopath, bundlename = parentpath, path
326 return bundlerepository(ui, repopath, bundlename)
326 return bundlerepository(ui, repopath, bundlename)
327
327
328 def getremotechanges(ui, repo, other, onlyheads=None, bundlename=None,
328 def getremotechanges(ui, repo, other, onlyheads=None, bundlename=None,
329 force=False):
329 force=False):
330 '''obtains a bundle of changes incoming from other
330 '''obtains a bundle of changes incoming from other
331
331
332 "onlyheads" restricts the returned changes to those reachable from the
332 "onlyheads" restricts the returned changes to those reachable from the
333 specified heads.
333 specified heads.
334 "bundlename", if given, stores the bundle to this file path permanently;
334 "bundlename", if given, stores the bundle to this file path permanently;
335 otherwise it's stored to a temp file and gets deleted again when you call
335 otherwise it's stored to a temp file and gets deleted again when you call
336 the returned "cleanupfn".
336 the returned "cleanupfn".
337 "force" indicates whether to proceed on unrelated repos.
337 "force" indicates whether to proceed on unrelated repos.
338
338
339 Returns a tuple (local, csets, cleanupfn):
339 Returns a tuple (local, csets, cleanupfn):
340
340
341 "local" is a local repo from which to obtain the actual incoming
341 "local" is a local repo from which to obtain the actual incoming
342 changesets; it is a bundlerepo for the obtained bundle when the
342 changesets; it is a bundlerepo for the obtained bundle when the
343 original "other" is remote.
343 original "other" is remote.
344 "csets" lists the incoming changeset node ids.
344 "csets" lists the incoming changeset node ids.
345 "cleanupfn" must be called without arguments when you're done processing
345 "cleanupfn" must be called without arguments when you're done processing
346 the changes; it closes both the original "other" and the one returned
346 the changes; it closes both the original "other" and the one returned
347 here.
347 here.
348 '''
348 '''
349 tmp = discovery.findcommonincoming(repo, other, heads=onlyheads,
349 tmp = discovery.findcommonincoming(repo, other, heads=onlyheads,
350 force=force)
350 force=force)
351 common, incoming, rheads = tmp
351 common, incoming, rheads = tmp
352 if not incoming:
352 if not incoming:
353 try:
353 try:
354 if bundlename:
354 if bundlename:
355 os.unlink(bundlename)
355 os.unlink(bundlename)
356 except OSError:
356 except OSError:
357 pass
357 pass
358 return repo, [], other.close
358 return repo, [], other.close
359
359
360 commonset = set(common)
361 rheads = [x for x in rheads if x not in commonset]
362
360 bundle = None
363 bundle = None
361 bundlerepo = None
364 bundlerepo = None
362 localrepo = other.local()
365 localrepo = other.local()
363 if bundlename or not localrepo:
366 if bundlename or not localrepo:
364 # create a bundle (uncompressed if other repo is not local)
367 # create a bundle (uncompressed if other repo is not local)
365
368
366 if other.capable('getbundle'):
369 if other.capable('getbundle'):
367 cg = other.getbundle('incoming', common=common, heads=rheads)
370 cg = other.getbundle('incoming', common=common, heads=rheads)
368 elif onlyheads is None and not other.capable('changegroupsubset'):
371 elif onlyheads is None and not other.capable('changegroupsubset'):
369 # compat with older servers when pulling all remote heads
372 # compat with older servers when pulling all remote heads
370 cg = other.changegroup(incoming, "incoming")
373 cg = other.changegroup(incoming, "incoming")
371 rheads = None
374 rheads = None
372 else:
375 else:
373 cg = other.changegroupsubset(incoming, rheads, 'incoming')
376 cg = other.changegroupsubset(incoming, rheads, 'incoming')
374 bundletype = localrepo and "HG10BZ" or "HG10UN"
377 bundletype = localrepo and "HG10BZ" or "HG10UN"
375 fname = bundle = changegroup.writebundle(cg, bundlename, bundletype)
378 fname = bundle = changegroup.writebundle(cg, bundlename, bundletype)
376 # keep written bundle?
379 # keep written bundle?
377 if bundlename:
380 if bundlename:
378 bundle = None
381 bundle = None
379 if not localrepo:
382 if not localrepo:
380 # use the created uncompressed bundlerepo
383 # use the created uncompressed bundlerepo
381 localrepo = bundlerepo = bundlerepository(repo.baseui, repo.root,
384 localrepo = bundlerepo = bundlerepository(repo.baseui, repo.root,
382 fname)
385 fname)
383 # this repo contains local and other now, so filter out local again
386 # this repo contains local and other now, so filter out local again
384 common = repo.heads()
387 common = repo.heads()
385 if localrepo:
388 if localrepo:
386 # Part of common may be remotely filtered
389 # Part of common may be remotely filtered
387 # So use an unfiltered version
390 # So use an unfiltered version
388 # The discovery process probably need cleanup to avoid that
391 # The discovery process probably need cleanup to avoid that
389 localrepo = localrepo.unfiltered()
392 localrepo = localrepo.unfiltered()
390
393
391 csets = localrepo.changelog.findmissing(common, rheads)
394 csets = localrepo.changelog.findmissing(common, rheads)
392
395
393 def cleanup():
396 def cleanup():
394 if bundlerepo:
397 if bundlerepo:
395 bundlerepo.close()
398 bundlerepo.close()
396 if bundle:
399 if bundle:
397 os.unlink(bundle)
400 os.unlink(bundle)
398 other.close()
401 other.close()
399
402
400 return (localrepo, csets, cleanup)
403 return (localrepo, csets, cleanup)
@@ -1,6060 +1,6063 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, bin, nullid, nullrev, short
8 from node import hex, bin, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _
10 from i18n import _
11 import os, re, difflib, time, tempfile, errno, shlex
11 import os, re, difflib, time, tempfile, errno, shlex
12 import sys
12 import sys
13 import hg, scmutil, util, revlog, copies, error, bookmarks
13 import hg, scmutil, util, revlog, copies, error, bookmarks
14 import patch, help, encoding, templatekw, discovery
14 import patch, help, encoding, templatekw, discovery
15 import archival, changegroup, cmdutil, hbisect
15 import archival, changegroup, cmdutil, hbisect
16 import sshserver, hgweb, commandserver
16 import sshserver, hgweb, commandserver
17 import extensions
17 import extensions
18 from hgweb import server as hgweb_server
18 from hgweb import server as hgweb_server
19 import merge as mergemod
19 import merge as mergemod
20 import minirst, revset, fileset
20 import minirst, revset, fileset
21 import dagparser, context, simplemerge, graphmod
21 import dagparser, context, simplemerge, graphmod
22 import random
22 import random
23 import setdiscovery, treediscovery, dagutil, pvec, localrepo
23 import setdiscovery, treediscovery, dagutil, pvec, localrepo
24 import phases, obsolete, exchange
24 import phases, obsolete, exchange
25
25
26 table = {}
26 table = {}
27
27
28 command = cmdutil.command(table)
28 command = cmdutil.command(table)
29
29
30 # Space delimited list of commands that don't require local repositories.
30 # Space delimited list of commands that don't require local repositories.
31 # This should be populated by passing norepo=True into the @command decorator.
31 # This should be populated by passing norepo=True into the @command decorator.
32 norepo = ''
32 norepo = ''
33 # Space delimited list of commands that optionally require local repositories.
33 # Space delimited list of commands that optionally require local repositories.
34 # This should be populated by passing optionalrepo=True into the @command
34 # This should be populated by passing optionalrepo=True into the @command
35 # decorator.
35 # decorator.
36 optionalrepo = ''
36 optionalrepo = ''
37 # Space delimited list of commands that will examine arguments looking for
37 # Space delimited list of commands that will examine arguments looking for
38 # a repository. This should be populated by passing inferrepo=True into the
38 # a repository. This should be populated by passing inferrepo=True into the
39 # @command decorator.
39 # @command decorator.
40 inferrepo = ''
40 inferrepo = ''
41
41
42 # common command options
42 # common command options
43
43
44 globalopts = [
44 globalopts = [
45 ('R', 'repository', '',
45 ('R', 'repository', '',
46 _('repository root directory or name of overlay bundle file'),
46 _('repository root directory or name of overlay bundle file'),
47 _('REPO')),
47 _('REPO')),
48 ('', 'cwd', '',
48 ('', 'cwd', '',
49 _('change working directory'), _('DIR')),
49 _('change working directory'), _('DIR')),
50 ('y', 'noninteractive', None,
50 ('y', 'noninteractive', None,
51 _('do not prompt, automatically pick the first choice for all prompts')),
51 _('do not prompt, automatically pick the first choice for all prompts')),
52 ('q', 'quiet', None, _('suppress output')),
52 ('q', 'quiet', None, _('suppress output')),
53 ('v', 'verbose', None, _('enable additional output')),
53 ('v', 'verbose', None, _('enable additional output')),
54 ('', 'config', [],
54 ('', 'config', [],
55 _('set/override config option (use \'section.name=value\')'),
55 _('set/override config option (use \'section.name=value\')'),
56 _('CONFIG')),
56 _('CONFIG')),
57 ('', 'debug', None, _('enable debugging output')),
57 ('', 'debug', None, _('enable debugging output')),
58 ('', 'debugger', None, _('start debugger')),
58 ('', 'debugger', None, _('start debugger')),
59 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
59 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
60 _('ENCODE')),
60 _('ENCODE')),
61 ('', 'encodingmode', encoding.encodingmode,
61 ('', 'encodingmode', encoding.encodingmode,
62 _('set the charset encoding mode'), _('MODE')),
62 _('set the charset encoding mode'), _('MODE')),
63 ('', 'traceback', None, _('always print a traceback on exception')),
63 ('', 'traceback', None, _('always print a traceback on exception')),
64 ('', 'time', None, _('time how long the command takes')),
64 ('', 'time', None, _('time how long the command takes')),
65 ('', 'profile', None, _('print command execution profile')),
65 ('', 'profile', None, _('print command execution profile')),
66 ('', 'version', None, _('output version information and exit')),
66 ('', 'version', None, _('output version information and exit')),
67 ('h', 'help', None, _('display help and exit')),
67 ('h', 'help', None, _('display help and exit')),
68 ('', 'hidden', False, _('consider hidden changesets')),
68 ('', 'hidden', False, _('consider hidden changesets')),
69 ]
69 ]
70
70
71 dryrunopts = [('n', 'dry-run', None,
71 dryrunopts = [('n', 'dry-run', None,
72 _('do not perform actions, just print output'))]
72 _('do not perform actions, just print output'))]
73
73
74 remoteopts = [
74 remoteopts = [
75 ('e', 'ssh', '',
75 ('e', 'ssh', '',
76 _('specify ssh command to use'), _('CMD')),
76 _('specify ssh command to use'), _('CMD')),
77 ('', 'remotecmd', '',
77 ('', 'remotecmd', '',
78 _('specify hg command to run on the remote side'), _('CMD')),
78 _('specify hg command to run on the remote side'), _('CMD')),
79 ('', 'insecure', None,
79 ('', 'insecure', None,
80 _('do not verify server certificate (ignoring web.cacerts config)')),
80 _('do not verify server certificate (ignoring web.cacerts config)')),
81 ]
81 ]
82
82
83 walkopts = [
83 walkopts = [
84 ('I', 'include', [],
84 ('I', 'include', [],
85 _('include names matching the given patterns'), _('PATTERN')),
85 _('include names matching the given patterns'), _('PATTERN')),
86 ('X', 'exclude', [],
86 ('X', 'exclude', [],
87 _('exclude names matching the given patterns'), _('PATTERN')),
87 _('exclude names matching the given patterns'), _('PATTERN')),
88 ]
88 ]
89
89
90 commitopts = [
90 commitopts = [
91 ('m', 'message', '',
91 ('m', 'message', '',
92 _('use text as commit message'), _('TEXT')),
92 _('use text as commit message'), _('TEXT')),
93 ('l', 'logfile', '',
93 ('l', 'logfile', '',
94 _('read commit message from file'), _('FILE')),
94 _('read commit message from file'), _('FILE')),
95 ]
95 ]
96
96
97 commitopts2 = [
97 commitopts2 = [
98 ('d', 'date', '',
98 ('d', 'date', '',
99 _('record the specified date as commit date'), _('DATE')),
99 _('record the specified date as commit date'), _('DATE')),
100 ('u', 'user', '',
100 ('u', 'user', '',
101 _('record the specified user as committer'), _('USER')),
101 _('record the specified user as committer'), _('USER')),
102 ]
102 ]
103
103
104 templateopts = [
104 templateopts = [
105 ('', 'style', '',
105 ('', 'style', '',
106 _('display using template map file (DEPRECATED)'), _('STYLE')),
106 _('display using template map file (DEPRECATED)'), _('STYLE')),
107 ('T', 'template', '',
107 ('T', 'template', '',
108 _('display with template'), _('TEMPLATE')),
108 _('display with template'), _('TEMPLATE')),
109 ]
109 ]
110
110
111 logopts = [
111 logopts = [
112 ('p', 'patch', None, _('show patch')),
112 ('p', 'patch', None, _('show patch')),
113 ('g', 'git', None, _('use git extended diff format')),
113 ('g', 'git', None, _('use git extended diff format')),
114 ('l', 'limit', '',
114 ('l', 'limit', '',
115 _('limit number of changes displayed'), _('NUM')),
115 _('limit number of changes displayed'), _('NUM')),
116 ('M', 'no-merges', None, _('do not show merges')),
116 ('M', 'no-merges', None, _('do not show merges')),
117 ('', 'stat', None, _('output diffstat-style summary of changes')),
117 ('', 'stat', None, _('output diffstat-style summary of changes')),
118 ('G', 'graph', None, _("show the revision DAG")),
118 ('G', 'graph', None, _("show the revision DAG")),
119 ] + templateopts
119 ] + templateopts
120
120
121 diffopts = [
121 diffopts = [
122 ('a', 'text', None, _('treat all files as text')),
122 ('a', 'text', None, _('treat all files as text')),
123 ('g', 'git', None, _('use git extended diff format')),
123 ('g', 'git', None, _('use git extended diff format')),
124 ('', 'nodates', None, _('omit dates from diff headers'))
124 ('', 'nodates', None, _('omit dates from diff headers'))
125 ]
125 ]
126
126
127 diffwsopts = [
127 diffwsopts = [
128 ('w', 'ignore-all-space', None,
128 ('w', 'ignore-all-space', None,
129 _('ignore white space when comparing lines')),
129 _('ignore white space when comparing lines')),
130 ('b', 'ignore-space-change', None,
130 ('b', 'ignore-space-change', None,
131 _('ignore changes in the amount of white space')),
131 _('ignore changes in the amount of white space')),
132 ('B', 'ignore-blank-lines', None,
132 ('B', 'ignore-blank-lines', None,
133 _('ignore changes whose lines are all blank')),
133 _('ignore changes whose lines are all blank')),
134 ]
134 ]
135
135
136 diffopts2 = [
136 diffopts2 = [
137 ('p', 'show-function', None, _('show which function each change is in')),
137 ('p', 'show-function', None, _('show which function each change is in')),
138 ('', 'reverse', None, _('produce a diff that undoes the changes')),
138 ('', 'reverse', None, _('produce a diff that undoes the changes')),
139 ] + diffwsopts + [
139 ] + diffwsopts + [
140 ('U', 'unified', '',
140 ('U', 'unified', '',
141 _('number of lines of context to show'), _('NUM')),
141 _('number of lines of context to show'), _('NUM')),
142 ('', 'stat', None, _('output diffstat-style summary of changes')),
142 ('', 'stat', None, _('output diffstat-style summary of changes')),
143 ]
143 ]
144
144
145 mergetoolopts = [
145 mergetoolopts = [
146 ('t', 'tool', '', _('specify merge tool')),
146 ('t', 'tool', '', _('specify merge tool')),
147 ]
147 ]
148
148
149 similarityopts = [
149 similarityopts = [
150 ('s', 'similarity', '',
150 ('s', 'similarity', '',
151 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
151 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
152 ]
152 ]
153
153
154 subrepoopts = [
154 subrepoopts = [
155 ('S', 'subrepos', None,
155 ('S', 'subrepos', None,
156 _('recurse into subrepositories'))
156 _('recurse into subrepositories'))
157 ]
157 ]
158
158
159 # Commands start here, listed alphabetically
159 # Commands start here, listed alphabetically
160
160
161 @command('^add',
161 @command('^add',
162 walkopts + subrepoopts + dryrunopts,
162 walkopts + subrepoopts + dryrunopts,
163 _('[OPTION]... [FILE]...'),
163 _('[OPTION]... [FILE]...'),
164 inferrepo=True)
164 inferrepo=True)
165 def add(ui, repo, *pats, **opts):
165 def add(ui, repo, *pats, **opts):
166 """add the specified files on the next commit
166 """add the specified files on the next commit
167
167
168 Schedule files to be version controlled and added to the
168 Schedule files to be version controlled and added to the
169 repository.
169 repository.
170
170
171 The files will be added to the repository at the next commit. To
171 The files will be added to the repository at the next commit. To
172 undo an add before that, see :hg:`forget`.
172 undo an add before that, see :hg:`forget`.
173
173
174 If no names are given, add all files to the repository.
174 If no names are given, add all files to the repository.
175
175
176 .. container:: verbose
176 .. container:: verbose
177
177
178 An example showing how new (unknown) files are added
178 An example showing how new (unknown) files are added
179 automatically by :hg:`add`::
179 automatically by :hg:`add`::
180
180
181 $ ls
181 $ ls
182 foo.c
182 foo.c
183 $ hg status
183 $ hg status
184 ? foo.c
184 ? foo.c
185 $ hg add
185 $ hg add
186 adding foo.c
186 adding foo.c
187 $ hg status
187 $ hg status
188 A foo.c
188 A foo.c
189
189
190 Returns 0 if all files are successfully added.
190 Returns 0 if all files are successfully added.
191 """
191 """
192
192
193 m = scmutil.match(repo[None], pats, opts)
193 m = scmutil.match(repo[None], pats, opts)
194 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
194 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
195 opts.get('subrepos'), prefix="", explicitonly=False)
195 opts.get('subrepos'), prefix="", explicitonly=False)
196 return rejected and 1 or 0
196 return rejected and 1 or 0
197
197
198 @command('addremove',
198 @command('addremove',
199 similarityopts + walkopts + dryrunopts,
199 similarityopts + walkopts + dryrunopts,
200 _('[OPTION]... [FILE]...'),
200 _('[OPTION]... [FILE]...'),
201 inferrepo=True)
201 inferrepo=True)
202 def addremove(ui, repo, *pats, **opts):
202 def addremove(ui, repo, *pats, **opts):
203 """add all new files, delete all missing files
203 """add all new files, delete all missing files
204
204
205 Add all new files and remove all missing files from the
205 Add all new files and remove all missing files from the
206 repository.
206 repository.
207
207
208 New files are ignored if they match any of the patterns in
208 New files are ignored if they match any of the patterns in
209 ``.hgignore``. As with add, these changes take effect at the next
209 ``.hgignore``. As with add, these changes take effect at the next
210 commit.
210 commit.
211
211
212 Use the -s/--similarity option to detect renamed files. This
212 Use the -s/--similarity option to detect renamed files. This
213 option takes a percentage between 0 (disabled) and 100 (files must
213 option takes a percentage between 0 (disabled) and 100 (files must
214 be identical) as its parameter. With a parameter greater than 0,
214 be identical) as its parameter. With a parameter greater than 0,
215 this compares every removed file with every added file and records
215 this compares every removed file with every added file and records
216 those similar enough as renames. Detecting renamed files this way
216 those similar enough as renames. Detecting renamed files this way
217 can be expensive. After using this option, :hg:`status -C` can be
217 can be expensive. After using this option, :hg:`status -C` can be
218 used to check which files were identified as moved or renamed. If
218 used to check which files were identified as moved or renamed. If
219 not specified, -s/--similarity defaults to 100 and only renames of
219 not specified, -s/--similarity defaults to 100 and only renames of
220 identical files are detected.
220 identical files are detected.
221
221
222 Returns 0 if all files are successfully added.
222 Returns 0 if all files are successfully added.
223 """
223 """
224 try:
224 try:
225 sim = float(opts.get('similarity') or 100)
225 sim = float(opts.get('similarity') or 100)
226 except ValueError:
226 except ValueError:
227 raise util.Abort(_('similarity must be a number'))
227 raise util.Abort(_('similarity must be a number'))
228 if sim < 0 or sim > 100:
228 if sim < 0 or sim > 100:
229 raise util.Abort(_('similarity must be between 0 and 100'))
229 raise util.Abort(_('similarity must be between 0 and 100'))
230 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
230 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
231
231
232 @command('^annotate|blame',
232 @command('^annotate|blame',
233 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
233 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
234 ('', 'follow', None,
234 ('', 'follow', None,
235 _('follow copies/renames and list the filename (DEPRECATED)')),
235 _('follow copies/renames and list the filename (DEPRECATED)')),
236 ('', 'no-follow', None, _("don't follow copies and renames")),
236 ('', 'no-follow', None, _("don't follow copies and renames")),
237 ('a', 'text', None, _('treat all files as text')),
237 ('a', 'text', None, _('treat all files as text')),
238 ('u', 'user', None, _('list the author (long with -v)')),
238 ('u', 'user', None, _('list the author (long with -v)')),
239 ('f', 'file', None, _('list the filename')),
239 ('f', 'file', None, _('list the filename')),
240 ('d', 'date', None, _('list the date (short with -q)')),
240 ('d', 'date', None, _('list the date (short with -q)')),
241 ('n', 'number', None, _('list the revision number (default)')),
241 ('n', 'number', None, _('list the revision number (default)')),
242 ('c', 'changeset', None, _('list the changeset')),
242 ('c', 'changeset', None, _('list the changeset')),
243 ('l', 'line-number', None, _('show line number at the first appearance'))
243 ('l', 'line-number', None, _('show line number at the first appearance'))
244 ] + diffwsopts + walkopts,
244 ] + diffwsopts + walkopts,
245 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
245 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
246 inferrepo=True)
246 inferrepo=True)
247 def annotate(ui, repo, *pats, **opts):
247 def annotate(ui, repo, *pats, **opts):
248 """show changeset information by line for each file
248 """show changeset information by line for each file
249
249
250 List changes in files, showing the revision id responsible for
250 List changes in files, showing the revision id responsible for
251 each line
251 each line
252
252
253 This command is useful for discovering when a change was made and
253 This command is useful for discovering when a change was made and
254 by whom.
254 by whom.
255
255
256 Without the -a/--text option, annotate will avoid processing files
256 Without the -a/--text option, annotate will avoid processing files
257 it detects as binary. With -a, annotate will annotate the file
257 it detects as binary. With -a, annotate will annotate the file
258 anyway, although the results will probably be neither useful
258 anyway, although the results will probably be neither useful
259 nor desirable.
259 nor desirable.
260
260
261 Returns 0 on success.
261 Returns 0 on success.
262 """
262 """
263 if opts.get('follow'):
263 if opts.get('follow'):
264 # --follow is deprecated and now just an alias for -f/--file
264 # --follow is deprecated and now just an alias for -f/--file
265 # to mimic the behavior of Mercurial before version 1.5
265 # to mimic the behavior of Mercurial before version 1.5
266 opts['file'] = True
266 opts['file'] = True
267
267
268 datefunc = ui.quiet and util.shortdate or util.datestr
268 datefunc = ui.quiet and util.shortdate or util.datestr
269 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
269 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
270
270
271 if not pats:
271 if not pats:
272 raise util.Abort(_('at least one filename or pattern is required'))
272 raise util.Abort(_('at least one filename or pattern is required'))
273
273
274 hexfn = ui.debugflag and hex or short
274 hexfn = ui.debugflag and hex or short
275
275
276 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
276 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
277 ('number', ' ', lambda x: str(x[0].rev())),
277 ('number', ' ', lambda x: str(x[0].rev())),
278 ('changeset', ' ', lambda x: hexfn(x[0].node())),
278 ('changeset', ' ', lambda x: hexfn(x[0].node())),
279 ('date', ' ', getdate),
279 ('date', ' ', getdate),
280 ('file', ' ', lambda x: x[0].path()),
280 ('file', ' ', lambda x: x[0].path()),
281 ('line_number', ':', lambda x: str(x[1])),
281 ('line_number', ':', lambda x: str(x[1])),
282 ]
282 ]
283
283
284 if (not opts.get('user') and not opts.get('changeset')
284 if (not opts.get('user') and not opts.get('changeset')
285 and not opts.get('date') and not opts.get('file')):
285 and not opts.get('date') and not opts.get('file')):
286 opts['number'] = True
286 opts['number'] = True
287
287
288 linenumber = opts.get('line_number') is not None
288 linenumber = opts.get('line_number') is not None
289 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
289 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
290 raise util.Abort(_('at least one of -n/-c is required for -l'))
290 raise util.Abort(_('at least one of -n/-c is required for -l'))
291
291
292 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
292 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
293 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
293 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
294
294
295 def bad(x, y):
295 def bad(x, y):
296 raise util.Abort("%s: %s" % (x, y))
296 raise util.Abort("%s: %s" % (x, y))
297
297
298 ctx = scmutil.revsingle(repo, opts.get('rev'))
298 ctx = scmutil.revsingle(repo, opts.get('rev'))
299 m = scmutil.match(ctx, pats, opts)
299 m = scmutil.match(ctx, pats, opts)
300 m.bad = bad
300 m.bad = bad
301 follow = not opts.get('no_follow')
301 follow = not opts.get('no_follow')
302 diffopts = patch.diffopts(ui, opts, section='annotate')
302 diffopts = patch.diffopts(ui, opts, section='annotate')
303 for abs in ctx.walk(m):
303 for abs in ctx.walk(m):
304 fctx = ctx[abs]
304 fctx = ctx[abs]
305 if not opts.get('text') and util.binary(fctx.data()):
305 if not opts.get('text') and util.binary(fctx.data()):
306 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
306 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
307 continue
307 continue
308
308
309 lines = fctx.annotate(follow=follow, linenumber=linenumber,
309 lines = fctx.annotate(follow=follow, linenumber=linenumber,
310 diffopts=diffopts)
310 diffopts=diffopts)
311 pieces = []
311 pieces = []
312
312
313 for f, sep in funcmap:
313 for f, sep in funcmap:
314 l = [f(n) for n, dummy in lines]
314 l = [f(n) for n, dummy in lines]
315 if l:
315 if l:
316 sized = [(x, encoding.colwidth(x)) for x in l]
316 sized = [(x, encoding.colwidth(x)) for x in l]
317 ml = max([w for x, w in sized])
317 ml = max([w for x, w in sized])
318 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
318 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
319 for x, w in sized])
319 for x, w in sized])
320
320
321 if pieces:
321 if pieces:
322 for p, l in zip(zip(*pieces), lines):
322 for p, l in zip(zip(*pieces), lines):
323 ui.write("%s: %s" % ("".join(p), l[1]))
323 ui.write("%s: %s" % ("".join(p), l[1]))
324
324
325 if lines and not lines[-1][1].endswith('\n'):
325 if lines and not lines[-1][1].endswith('\n'):
326 ui.write('\n')
326 ui.write('\n')
327
327
328 @command('archive',
328 @command('archive',
329 [('', 'no-decode', None, _('do not pass files through decoders')),
329 [('', 'no-decode', None, _('do not pass files through decoders')),
330 ('p', 'prefix', '', _('directory prefix for files in archive'),
330 ('p', 'prefix', '', _('directory prefix for files in archive'),
331 _('PREFIX')),
331 _('PREFIX')),
332 ('r', 'rev', '', _('revision to distribute'), _('REV')),
332 ('r', 'rev', '', _('revision to distribute'), _('REV')),
333 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
333 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
334 ] + subrepoopts + walkopts,
334 ] + subrepoopts + walkopts,
335 _('[OPTION]... DEST'))
335 _('[OPTION]... DEST'))
336 def archive(ui, repo, dest, **opts):
336 def archive(ui, repo, dest, **opts):
337 '''create an unversioned archive of a repository revision
337 '''create an unversioned archive of a repository revision
338
338
339 By default, the revision used is the parent of the working
339 By default, the revision used is the parent of the working
340 directory; use -r/--rev to specify a different revision.
340 directory; use -r/--rev to specify a different revision.
341
341
342 The archive type is automatically detected based on file
342 The archive type is automatically detected based on file
343 extension (or override using -t/--type).
343 extension (or override using -t/--type).
344
344
345 .. container:: verbose
345 .. container:: verbose
346
346
347 Examples:
347 Examples:
348
348
349 - create a zip file containing the 1.0 release::
349 - create a zip file containing the 1.0 release::
350
350
351 hg archive -r 1.0 project-1.0.zip
351 hg archive -r 1.0 project-1.0.zip
352
352
353 - create a tarball excluding .hg files::
353 - create a tarball excluding .hg files::
354
354
355 hg archive project.tar.gz -X ".hg*"
355 hg archive project.tar.gz -X ".hg*"
356
356
357 Valid types are:
357 Valid types are:
358
358
359 :``files``: a directory full of files (default)
359 :``files``: a directory full of files (default)
360 :``tar``: tar archive, uncompressed
360 :``tar``: tar archive, uncompressed
361 :``tbz2``: tar archive, compressed using bzip2
361 :``tbz2``: tar archive, compressed using bzip2
362 :``tgz``: tar archive, compressed using gzip
362 :``tgz``: tar archive, compressed using gzip
363 :``uzip``: zip archive, uncompressed
363 :``uzip``: zip archive, uncompressed
364 :``zip``: zip archive, compressed using deflate
364 :``zip``: zip archive, compressed using deflate
365
365
366 The exact name of the destination archive or directory is given
366 The exact name of the destination archive or directory is given
367 using a format string; see :hg:`help export` for details.
367 using a format string; see :hg:`help export` for details.
368
368
369 Each member added to an archive file has a directory prefix
369 Each member added to an archive file has a directory prefix
370 prepended. Use -p/--prefix to specify a format string for the
370 prepended. Use -p/--prefix to specify a format string for the
371 prefix. The default is the basename of the archive, with suffixes
371 prefix. The default is the basename of the archive, with suffixes
372 removed.
372 removed.
373
373
374 Returns 0 on success.
374 Returns 0 on success.
375 '''
375 '''
376
376
377 ctx = scmutil.revsingle(repo, opts.get('rev'))
377 ctx = scmutil.revsingle(repo, opts.get('rev'))
378 if not ctx:
378 if not ctx:
379 raise util.Abort(_('no working directory: please specify a revision'))
379 raise util.Abort(_('no working directory: please specify a revision'))
380 node = ctx.node()
380 node = ctx.node()
381 dest = cmdutil.makefilename(repo, dest, node)
381 dest = cmdutil.makefilename(repo, dest, node)
382 if os.path.realpath(dest) == repo.root:
382 if os.path.realpath(dest) == repo.root:
383 raise util.Abort(_('repository root cannot be destination'))
383 raise util.Abort(_('repository root cannot be destination'))
384
384
385 kind = opts.get('type') or archival.guesskind(dest) or 'files'
385 kind = opts.get('type') or archival.guesskind(dest) or 'files'
386 prefix = opts.get('prefix')
386 prefix = opts.get('prefix')
387
387
388 if dest == '-':
388 if dest == '-':
389 if kind == 'files':
389 if kind == 'files':
390 raise util.Abort(_('cannot archive plain files to stdout'))
390 raise util.Abort(_('cannot archive plain files to stdout'))
391 dest = cmdutil.makefileobj(repo, dest)
391 dest = cmdutil.makefileobj(repo, dest)
392 if not prefix:
392 if not prefix:
393 prefix = os.path.basename(repo.root) + '-%h'
393 prefix = os.path.basename(repo.root) + '-%h'
394
394
395 prefix = cmdutil.makefilename(repo, prefix, node)
395 prefix = cmdutil.makefilename(repo, prefix, node)
396 matchfn = scmutil.match(ctx, [], opts)
396 matchfn = scmutil.match(ctx, [], opts)
397 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
397 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
398 matchfn, prefix, subrepos=opts.get('subrepos'))
398 matchfn, prefix, subrepos=opts.get('subrepos'))
399
399
400 @command('backout',
400 @command('backout',
401 [('', 'merge', None, _('merge with old dirstate parent after backout')),
401 [('', 'merge', None, _('merge with old dirstate parent after backout')),
402 ('', 'parent', '',
402 ('', 'parent', '',
403 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
403 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
404 ('r', 'rev', '', _('revision to backout'), _('REV')),
404 ('r', 'rev', '', _('revision to backout'), _('REV')),
405 ('e', 'edit', False, _('invoke editor on commit messages')),
405 ('e', 'edit', False, _('invoke editor on commit messages')),
406 ] + mergetoolopts + walkopts + commitopts + commitopts2,
406 ] + mergetoolopts + walkopts + commitopts + commitopts2,
407 _('[OPTION]... [-r] REV'))
407 _('[OPTION]... [-r] REV'))
408 def backout(ui, repo, node=None, rev=None, **opts):
408 def backout(ui, repo, node=None, rev=None, **opts):
409 '''reverse effect of earlier changeset
409 '''reverse effect of earlier changeset
410
410
411 Prepare a new changeset with the effect of REV undone in the
411 Prepare a new changeset with the effect of REV undone in the
412 current working directory.
412 current working directory.
413
413
414 If REV is the parent of the working directory, then this new changeset
414 If REV is the parent of the working directory, then this new changeset
415 is committed automatically. Otherwise, hg needs to merge the
415 is committed automatically. Otherwise, hg needs to merge the
416 changes and the merged result is left uncommitted.
416 changes and the merged result is left uncommitted.
417
417
418 .. note::
418 .. note::
419
419
420 backout cannot be used to fix either an unwanted or
420 backout cannot be used to fix either an unwanted or
421 incorrect merge.
421 incorrect merge.
422
422
423 .. container:: verbose
423 .. container:: verbose
424
424
425 By default, the pending changeset will have one parent,
425 By default, the pending changeset will have one parent,
426 maintaining a linear history. With --merge, the pending
426 maintaining a linear history. With --merge, the pending
427 changeset will instead have two parents: the old parent of the
427 changeset will instead have two parents: the old parent of the
428 working directory and a new child of REV that simply undoes REV.
428 working directory and a new child of REV that simply undoes REV.
429
429
430 Before version 1.7, the behavior without --merge was equivalent
430 Before version 1.7, the behavior without --merge was equivalent
431 to specifying --merge followed by :hg:`update --clean .` to
431 to specifying --merge followed by :hg:`update --clean .` to
432 cancel the merge and leave the child of REV as a head to be
432 cancel the merge and leave the child of REV as a head to be
433 merged separately.
433 merged separately.
434
434
435 See :hg:`help dates` for a list of formats valid for -d/--date.
435 See :hg:`help dates` for a list of formats valid for -d/--date.
436
436
437 Returns 0 on success, 1 if nothing to backout or there are unresolved
437 Returns 0 on success, 1 if nothing to backout or there are unresolved
438 files.
438 files.
439 '''
439 '''
440 if rev and node:
440 if rev and node:
441 raise util.Abort(_("please specify just one revision"))
441 raise util.Abort(_("please specify just one revision"))
442
442
443 if not rev:
443 if not rev:
444 rev = node
444 rev = node
445
445
446 if not rev:
446 if not rev:
447 raise util.Abort(_("please specify a revision to backout"))
447 raise util.Abort(_("please specify a revision to backout"))
448
448
449 date = opts.get('date')
449 date = opts.get('date')
450 if date:
450 if date:
451 opts['date'] = util.parsedate(date)
451 opts['date'] = util.parsedate(date)
452
452
453 cmdutil.checkunfinished(repo)
453 cmdutil.checkunfinished(repo)
454 cmdutil.bailifchanged(repo)
454 cmdutil.bailifchanged(repo)
455 node = scmutil.revsingle(repo, rev).node()
455 node = scmutil.revsingle(repo, rev).node()
456
456
457 op1, op2 = repo.dirstate.parents()
457 op1, op2 = repo.dirstate.parents()
458 if node not in repo.changelog.commonancestorsheads(op1, node):
458 if node not in repo.changelog.commonancestorsheads(op1, node):
459 raise util.Abort(_('cannot backout change that is not an ancestor'))
459 raise util.Abort(_('cannot backout change that is not an ancestor'))
460
460
461 p1, p2 = repo.changelog.parents(node)
461 p1, p2 = repo.changelog.parents(node)
462 if p1 == nullid:
462 if p1 == nullid:
463 raise util.Abort(_('cannot backout a change with no parents'))
463 raise util.Abort(_('cannot backout a change with no parents'))
464 if p2 != nullid:
464 if p2 != nullid:
465 if not opts.get('parent'):
465 if not opts.get('parent'):
466 raise util.Abort(_('cannot backout a merge changeset'))
466 raise util.Abort(_('cannot backout a merge changeset'))
467 p = repo.lookup(opts['parent'])
467 p = repo.lookup(opts['parent'])
468 if p not in (p1, p2):
468 if p not in (p1, p2):
469 raise util.Abort(_('%s is not a parent of %s') %
469 raise util.Abort(_('%s is not a parent of %s') %
470 (short(p), short(node)))
470 (short(p), short(node)))
471 parent = p
471 parent = p
472 else:
472 else:
473 if opts.get('parent'):
473 if opts.get('parent'):
474 raise util.Abort(_('cannot use --parent on non-merge changeset'))
474 raise util.Abort(_('cannot use --parent on non-merge changeset'))
475 parent = p1
475 parent = p1
476
476
477 # the backout should appear on the same branch
477 # the backout should appear on the same branch
478 wlock = repo.wlock()
478 wlock = repo.wlock()
479 try:
479 try:
480 branch = repo.dirstate.branch()
480 branch = repo.dirstate.branch()
481 bheads = repo.branchheads(branch)
481 bheads = repo.branchheads(branch)
482 rctx = scmutil.revsingle(repo, hex(parent))
482 rctx = scmutil.revsingle(repo, hex(parent))
483 if not opts.get('merge') and op1 != node:
483 if not opts.get('merge') and op1 != node:
484 try:
484 try:
485 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
485 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
486 'backout')
486 'backout')
487 stats = mergemod.update(repo, parent, True, True, False,
487 stats = mergemod.update(repo, parent, True, True, False,
488 node, False)
488 node, False)
489 repo.setparents(op1, op2)
489 repo.setparents(op1, op2)
490 hg._showstats(repo, stats)
490 hg._showstats(repo, stats)
491 if stats[3]:
491 if stats[3]:
492 repo.ui.status(_("use 'hg resolve' to retry unresolved "
492 repo.ui.status(_("use 'hg resolve' to retry unresolved "
493 "file merges\n"))
493 "file merges\n"))
494 else:
494 else:
495 msg = _("changeset %s backed out, "
495 msg = _("changeset %s backed out, "
496 "don't forget to commit.\n")
496 "don't forget to commit.\n")
497 ui.status(msg % short(node))
497 ui.status(msg % short(node))
498 return stats[3] > 0
498 return stats[3] > 0
499 finally:
499 finally:
500 ui.setconfig('ui', 'forcemerge', '', '')
500 ui.setconfig('ui', 'forcemerge', '', '')
501 else:
501 else:
502 hg.clean(repo, node, show_stats=False)
502 hg.clean(repo, node, show_stats=False)
503 repo.dirstate.setbranch(branch)
503 repo.dirstate.setbranch(branch)
504 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
504 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
505
505
506
506
507 def commitfunc(ui, repo, message, match, opts):
507 def commitfunc(ui, repo, message, match, opts):
508 editform = 'backout'
508 editform = 'backout'
509 e = cmdutil.getcommiteditor(editform=editform, **opts)
509 e = cmdutil.getcommiteditor(editform=editform, **opts)
510 if not message:
510 if not message:
511 # we don't translate commit messages
511 # we don't translate commit messages
512 message = "Backed out changeset %s" % short(node)
512 message = "Backed out changeset %s" % short(node)
513 e = cmdutil.getcommiteditor(edit=True, editform=editform)
513 e = cmdutil.getcommiteditor(edit=True, editform=editform)
514 return repo.commit(message, opts.get('user'), opts.get('date'),
514 return repo.commit(message, opts.get('user'), opts.get('date'),
515 match, editor=e)
515 match, editor=e)
516 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
516 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
517 if not newnode:
517 if not newnode:
518 ui.status(_("nothing changed\n"))
518 ui.status(_("nothing changed\n"))
519 return 1
519 return 1
520 cmdutil.commitstatus(repo, newnode, branch, bheads)
520 cmdutil.commitstatus(repo, newnode, branch, bheads)
521
521
522 def nice(node):
522 def nice(node):
523 return '%d:%s' % (repo.changelog.rev(node), short(node))
523 return '%d:%s' % (repo.changelog.rev(node), short(node))
524 ui.status(_('changeset %s backs out changeset %s\n') %
524 ui.status(_('changeset %s backs out changeset %s\n') %
525 (nice(repo.changelog.tip()), nice(node)))
525 (nice(repo.changelog.tip()), nice(node)))
526 if opts.get('merge') and op1 != node:
526 if opts.get('merge') and op1 != node:
527 hg.clean(repo, op1, show_stats=False)
527 hg.clean(repo, op1, show_stats=False)
528 ui.status(_('merging with changeset %s\n')
528 ui.status(_('merging with changeset %s\n')
529 % nice(repo.changelog.tip()))
529 % nice(repo.changelog.tip()))
530 try:
530 try:
531 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
531 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
532 'backout')
532 'backout')
533 return hg.merge(repo, hex(repo.changelog.tip()))
533 return hg.merge(repo, hex(repo.changelog.tip()))
534 finally:
534 finally:
535 ui.setconfig('ui', 'forcemerge', '', '')
535 ui.setconfig('ui', 'forcemerge', '', '')
536 finally:
536 finally:
537 wlock.release()
537 wlock.release()
538 return 0
538 return 0
539
539
540 @command('bisect',
540 @command('bisect',
541 [('r', 'reset', False, _('reset bisect state')),
541 [('r', 'reset', False, _('reset bisect state')),
542 ('g', 'good', False, _('mark changeset good')),
542 ('g', 'good', False, _('mark changeset good')),
543 ('b', 'bad', False, _('mark changeset bad')),
543 ('b', 'bad', False, _('mark changeset bad')),
544 ('s', 'skip', False, _('skip testing changeset')),
544 ('s', 'skip', False, _('skip testing changeset')),
545 ('e', 'extend', False, _('extend the bisect range')),
545 ('e', 'extend', False, _('extend the bisect range')),
546 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
546 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
547 ('U', 'noupdate', False, _('do not update to target'))],
547 ('U', 'noupdate', False, _('do not update to target'))],
548 _("[-gbsr] [-U] [-c CMD] [REV]"))
548 _("[-gbsr] [-U] [-c CMD] [REV]"))
549 def bisect(ui, repo, rev=None, extra=None, command=None,
549 def bisect(ui, repo, rev=None, extra=None, command=None,
550 reset=None, good=None, bad=None, skip=None, extend=None,
550 reset=None, good=None, bad=None, skip=None, extend=None,
551 noupdate=None):
551 noupdate=None):
552 """subdivision search of changesets
552 """subdivision search of changesets
553
553
554 This command helps to find changesets which introduce problems. To
554 This command helps to find changesets which introduce problems. To
555 use, mark the earliest changeset you know exhibits the problem as
555 use, mark the earliest changeset you know exhibits the problem as
556 bad, then mark the latest changeset which is free from the problem
556 bad, then mark the latest changeset which is free from the problem
557 as good. Bisect will update your working directory to a revision
557 as good. Bisect will update your working directory to a revision
558 for testing (unless the -U/--noupdate option is specified). Once
558 for testing (unless the -U/--noupdate option is specified). Once
559 you have performed tests, mark the working directory as good or
559 you have performed tests, mark the working directory as good or
560 bad, and bisect will either update to another candidate changeset
560 bad, and bisect will either update to another candidate changeset
561 or announce that it has found the bad revision.
561 or announce that it has found the bad revision.
562
562
563 As a shortcut, you can also use the revision argument to mark a
563 As a shortcut, you can also use the revision argument to mark a
564 revision as good or bad without checking it out first.
564 revision as good or bad without checking it out first.
565
565
566 If you supply a command, it will be used for automatic bisection.
566 If you supply a command, it will be used for automatic bisection.
567 The environment variable HG_NODE will contain the ID of the
567 The environment variable HG_NODE will contain the ID of the
568 changeset being tested. The exit status of the command will be
568 changeset being tested. The exit status of the command will be
569 used to mark revisions as good or bad: status 0 means good, 125
569 used to mark revisions as good or bad: status 0 means good, 125
570 means to skip the revision, 127 (command not found) will abort the
570 means to skip the revision, 127 (command not found) will abort the
571 bisection, and any other non-zero exit status means the revision
571 bisection, and any other non-zero exit status means the revision
572 is bad.
572 is bad.
573
573
574 .. container:: verbose
574 .. container:: verbose
575
575
576 Some examples:
576 Some examples:
577
577
578 - start a bisection with known bad revision 34, and good revision 12::
578 - start a bisection with known bad revision 34, and good revision 12::
579
579
580 hg bisect --bad 34
580 hg bisect --bad 34
581 hg bisect --good 12
581 hg bisect --good 12
582
582
583 - advance the current bisection by marking current revision as good or
583 - advance the current bisection by marking current revision as good or
584 bad::
584 bad::
585
585
586 hg bisect --good
586 hg bisect --good
587 hg bisect --bad
587 hg bisect --bad
588
588
589 - mark the current revision, or a known revision, to be skipped (e.g. if
589 - mark the current revision, or a known revision, to be skipped (e.g. if
590 that revision is not usable because of another issue)::
590 that revision is not usable because of another issue)::
591
591
592 hg bisect --skip
592 hg bisect --skip
593 hg bisect --skip 23
593 hg bisect --skip 23
594
594
595 - skip all revisions that do not touch directories ``foo`` or ``bar``::
595 - skip all revisions that do not touch directories ``foo`` or ``bar``::
596
596
597 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
597 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
598
598
599 - forget the current bisection::
599 - forget the current bisection::
600
600
601 hg bisect --reset
601 hg bisect --reset
602
602
603 - use 'make && make tests' to automatically find the first broken
603 - use 'make && make tests' to automatically find the first broken
604 revision::
604 revision::
605
605
606 hg bisect --reset
606 hg bisect --reset
607 hg bisect --bad 34
607 hg bisect --bad 34
608 hg bisect --good 12
608 hg bisect --good 12
609 hg bisect --command "make && make tests"
609 hg bisect --command "make && make tests"
610
610
611 - see all changesets whose states are already known in the current
611 - see all changesets whose states are already known in the current
612 bisection::
612 bisection::
613
613
614 hg log -r "bisect(pruned)"
614 hg log -r "bisect(pruned)"
615
615
616 - see the changeset currently being bisected (especially useful
616 - see the changeset currently being bisected (especially useful
617 if running with -U/--noupdate)::
617 if running with -U/--noupdate)::
618
618
619 hg log -r "bisect(current)"
619 hg log -r "bisect(current)"
620
620
621 - see all changesets that took part in the current bisection::
621 - see all changesets that took part in the current bisection::
622
622
623 hg log -r "bisect(range)"
623 hg log -r "bisect(range)"
624
624
625 - you can even get a nice graph::
625 - you can even get a nice graph::
626
626
627 hg log --graph -r "bisect(range)"
627 hg log --graph -r "bisect(range)"
628
628
629 See :hg:`help revsets` for more about the `bisect()` keyword.
629 See :hg:`help revsets` for more about the `bisect()` keyword.
630
630
631 Returns 0 on success.
631 Returns 0 on success.
632 """
632 """
633 def extendbisectrange(nodes, good):
633 def extendbisectrange(nodes, good):
634 # bisect is incomplete when it ends on a merge node and
634 # bisect is incomplete when it ends on a merge node and
635 # one of the parent was not checked.
635 # one of the parent was not checked.
636 parents = repo[nodes[0]].parents()
636 parents = repo[nodes[0]].parents()
637 if len(parents) > 1:
637 if len(parents) > 1:
638 side = good and state['bad'] or state['good']
638 side = good and state['bad'] or state['good']
639 num = len(set(i.node() for i in parents) & set(side))
639 num = len(set(i.node() for i in parents) & set(side))
640 if num == 1:
640 if num == 1:
641 return parents[0].ancestor(parents[1])
641 return parents[0].ancestor(parents[1])
642 return None
642 return None
643
643
644 def print_result(nodes, good):
644 def print_result(nodes, good):
645 displayer = cmdutil.show_changeset(ui, repo, {})
645 displayer = cmdutil.show_changeset(ui, repo, {})
646 if len(nodes) == 1:
646 if len(nodes) == 1:
647 # narrowed it down to a single revision
647 # narrowed it down to a single revision
648 if good:
648 if good:
649 ui.write(_("The first good revision is:\n"))
649 ui.write(_("The first good revision is:\n"))
650 else:
650 else:
651 ui.write(_("The first bad revision is:\n"))
651 ui.write(_("The first bad revision is:\n"))
652 displayer.show(repo[nodes[0]])
652 displayer.show(repo[nodes[0]])
653 extendnode = extendbisectrange(nodes, good)
653 extendnode = extendbisectrange(nodes, good)
654 if extendnode is not None:
654 if extendnode is not None:
655 ui.write(_('Not all ancestors of this changeset have been'
655 ui.write(_('Not all ancestors of this changeset have been'
656 ' checked.\nUse bisect --extend to continue the '
656 ' checked.\nUse bisect --extend to continue the '
657 'bisection from\nthe common ancestor, %s.\n')
657 'bisection from\nthe common ancestor, %s.\n')
658 % extendnode)
658 % extendnode)
659 else:
659 else:
660 # multiple possible revisions
660 # multiple possible revisions
661 if good:
661 if good:
662 ui.write(_("Due to skipped revisions, the first "
662 ui.write(_("Due to skipped revisions, the first "
663 "good revision could be any of:\n"))
663 "good revision could be any of:\n"))
664 else:
664 else:
665 ui.write(_("Due to skipped revisions, the first "
665 ui.write(_("Due to skipped revisions, the first "
666 "bad revision could be any of:\n"))
666 "bad revision could be any of:\n"))
667 for n in nodes:
667 for n in nodes:
668 displayer.show(repo[n])
668 displayer.show(repo[n])
669 displayer.close()
669 displayer.close()
670
670
671 def check_state(state, interactive=True):
671 def check_state(state, interactive=True):
672 if not state['good'] or not state['bad']:
672 if not state['good'] or not state['bad']:
673 if (good or bad or skip or reset) and interactive:
673 if (good or bad or skip or reset) and interactive:
674 return
674 return
675 if not state['good']:
675 if not state['good']:
676 raise util.Abort(_('cannot bisect (no known good revisions)'))
676 raise util.Abort(_('cannot bisect (no known good revisions)'))
677 else:
677 else:
678 raise util.Abort(_('cannot bisect (no known bad revisions)'))
678 raise util.Abort(_('cannot bisect (no known bad revisions)'))
679 return True
679 return True
680
680
681 # backward compatibility
681 # backward compatibility
682 if rev in "good bad reset init".split():
682 if rev in "good bad reset init".split():
683 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
683 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
684 cmd, rev, extra = rev, extra, None
684 cmd, rev, extra = rev, extra, None
685 if cmd == "good":
685 if cmd == "good":
686 good = True
686 good = True
687 elif cmd == "bad":
687 elif cmd == "bad":
688 bad = True
688 bad = True
689 else:
689 else:
690 reset = True
690 reset = True
691 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
691 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
692 raise util.Abort(_('incompatible arguments'))
692 raise util.Abort(_('incompatible arguments'))
693
693
694 cmdutil.checkunfinished(repo)
694 cmdutil.checkunfinished(repo)
695
695
696 if reset:
696 if reset:
697 p = repo.join("bisect.state")
697 p = repo.join("bisect.state")
698 if os.path.exists(p):
698 if os.path.exists(p):
699 os.unlink(p)
699 os.unlink(p)
700 return
700 return
701
701
702 state = hbisect.load_state(repo)
702 state = hbisect.load_state(repo)
703
703
704 if command:
704 if command:
705 changesets = 1
705 changesets = 1
706 if noupdate:
706 if noupdate:
707 try:
707 try:
708 node = state['current'][0]
708 node = state['current'][0]
709 except LookupError:
709 except LookupError:
710 raise util.Abort(_('current bisect revision is unknown - '
710 raise util.Abort(_('current bisect revision is unknown - '
711 'start a new bisect to fix'))
711 'start a new bisect to fix'))
712 else:
712 else:
713 node, p2 = repo.dirstate.parents()
713 node, p2 = repo.dirstate.parents()
714 if p2 != nullid:
714 if p2 != nullid:
715 raise util.Abort(_('current bisect revision is a merge'))
715 raise util.Abort(_('current bisect revision is a merge'))
716 try:
716 try:
717 while changesets:
717 while changesets:
718 # update state
718 # update state
719 state['current'] = [node]
719 state['current'] = [node]
720 hbisect.save_state(repo, state)
720 hbisect.save_state(repo, state)
721 status = util.system(command,
721 status = util.system(command,
722 environ={'HG_NODE': hex(node)},
722 environ={'HG_NODE': hex(node)},
723 out=ui.fout)
723 out=ui.fout)
724 if status == 125:
724 if status == 125:
725 transition = "skip"
725 transition = "skip"
726 elif status == 0:
726 elif status == 0:
727 transition = "good"
727 transition = "good"
728 # status < 0 means process was killed
728 # status < 0 means process was killed
729 elif status == 127:
729 elif status == 127:
730 raise util.Abort(_("failed to execute %s") % command)
730 raise util.Abort(_("failed to execute %s") % command)
731 elif status < 0:
731 elif status < 0:
732 raise util.Abort(_("%s killed") % command)
732 raise util.Abort(_("%s killed") % command)
733 else:
733 else:
734 transition = "bad"
734 transition = "bad"
735 ctx = scmutil.revsingle(repo, rev, node)
735 ctx = scmutil.revsingle(repo, rev, node)
736 rev = None # clear for future iterations
736 rev = None # clear for future iterations
737 state[transition].append(ctx.node())
737 state[transition].append(ctx.node())
738 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
738 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
739 check_state(state, interactive=False)
739 check_state(state, interactive=False)
740 # bisect
740 # bisect
741 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
741 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
742 # update to next check
742 # update to next check
743 node = nodes[0]
743 node = nodes[0]
744 if not noupdate:
744 if not noupdate:
745 cmdutil.bailifchanged(repo)
745 cmdutil.bailifchanged(repo)
746 hg.clean(repo, node, show_stats=False)
746 hg.clean(repo, node, show_stats=False)
747 finally:
747 finally:
748 state['current'] = [node]
748 state['current'] = [node]
749 hbisect.save_state(repo, state)
749 hbisect.save_state(repo, state)
750 print_result(nodes, bgood)
750 print_result(nodes, bgood)
751 return
751 return
752
752
753 # update state
753 # update state
754
754
755 if rev:
755 if rev:
756 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
756 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
757 else:
757 else:
758 nodes = [repo.lookup('.')]
758 nodes = [repo.lookup('.')]
759
759
760 if good or bad or skip:
760 if good or bad or skip:
761 if good:
761 if good:
762 state['good'] += nodes
762 state['good'] += nodes
763 elif bad:
763 elif bad:
764 state['bad'] += nodes
764 state['bad'] += nodes
765 elif skip:
765 elif skip:
766 state['skip'] += nodes
766 state['skip'] += nodes
767 hbisect.save_state(repo, state)
767 hbisect.save_state(repo, state)
768
768
769 if not check_state(state):
769 if not check_state(state):
770 return
770 return
771
771
772 # actually bisect
772 # actually bisect
773 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
773 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
774 if extend:
774 if extend:
775 if not changesets:
775 if not changesets:
776 extendnode = extendbisectrange(nodes, good)
776 extendnode = extendbisectrange(nodes, good)
777 if extendnode is not None:
777 if extendnode is not None:
778 ui.write(_("Extending search to changeset %d:%s\n")
778 ui.write(_("Extending search to changeset %d:%s\n")
779 % (extendnode.rev(), extendnode))
779 % (extendnode.rev(), extendnode))
780 state['current'] = [extendnode.node()]
780 state['current'] = [extendnode.node()]
781 hbisect.save_state(repo, state)
781 hbisect.save_state(repo, state)
782 if noupdate:
782 if noupdate:
783 return
783 return
784 cmdutil.bailifchanged(repo)
784 cmdutil.bailifchanged(repo)
785 return hg.clean(repo, extendnode.node())
785 return hg.clean(repo, extendnode.node())
786 raise util.Abort(_("nothing to extend"))
786 raise util.Abort(_("nothing to extend"))
787
787
788 if changesets == 0:
788 if changesets == 0:
789 print_result(nodes, good)
789 print_result(nodes, good)
790 else:
790 else:
791 assert len(nodes) == 1 # only a single node can be tested next
791 assert len(nodes) == 1 # only a single node can be tested next
792 node = nodes[0]
792 node = nodes[0]
793 # compute the approximate number of remaining tests
793 # compute the approximate number of remaining tests
794 tests, size = 0, 2
794 tests, size = 0, 2
795 while size <= changesets:
795 while size <= changesets:
796 tests, size = tests + 1, size * 2
796 tests, size = tests + 1, size * 2
797 rev = repo.changelog.rev(node)
797 rev = repo.changelog.rev(node)
798 ui.write(_("Testing changeset %d:%s "
798 ui.write(_("Testing changeset %d:%s "
799 "(%d changesets remaining, ~%d tests)\n")
799 "(%d changesets remaining, ~%d tests)\n")
800 % (rev, short(node), changesets, tests))
800 % (rev, short(node), changesets, tests))
801 state['current'] = [node]
801 state['current'] = [node]
802 hbisect.save_state(repo, state)
802 hbisect.save_state(repo, state)
803 if not noupdate:
803 if not noupdate:
804 cmdutil.bailifchanged(repo)
804 cmdutil.bailifchanged(repo)
805 return hg.clean(repo, node)
805 return hg.clean(repo, node)
806
806
807 @command('bookmarks|bookmark',
807 @command('bookmarks|bookmark',
808 [('f', 'force', False, _('force')),
808 [('f', 'force', False, _('force')),
809 ('r', 'rev', '', _('revision'), _('REV')),
809 ('r', 'rev', '', _('revision'), _('REV')),
810 ('d', 'delete', False, _('delete a given bookmark')),
810 ('d', 'delete', False, _('delete a given bookmark')),
811 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
811 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
812 ('i', 'inactive', False, _('mark a bookmark inactive'))],
812 ('i', 'inactive', False, _('mark a bookmark inactive'))],
813 _('hg bookmarks [OPTIONS]... [NAME]...'))
813 _('hg bookmarks [OPTIONS]... [NAME]...'))
814 def bookmark(ui, repo, *names, **opts):
814 def bookmark(ui, repo, *names, **opts):
815 '''create a new bookmark or list existing bookmarks
815 '''create a new bookmark or list existing bookmarks
816
816
817 Bookmarks are labels on changesets to help track lines of development.
817 Bookmarks are labels on changesets to help track lines of development.
818 Bookmarks are unversioned and can be moved, renamed and deleted.
818 Bookmarks are unversioned and can be moved, renamed and deleted.
819 Deleting or moving a bookmark has no effect on the associated changesets.
819 Deleting or moving a bookmark has no effect on the associated changesets.
820
820
821 Creating or updating to a bookmark causes it to be marked as 'active'.
821 Creating or updating to a bookmark causes it to be marked as 'active'.
822 Active bookmarks are indicated with a '*'.
822 Active bookmarks are indicated with a '*'.
823 When a commit is made, an active bookmark will advance to the new commit.
823 When a commit is made, an active bookmark will advance to the new commit.
824 A plain :hg:`update` will also advance an active bookmark, if possible.
824 A plain :hg:`update` will also advance an active bookmark, if possible.
825 Updating away from a bookmark will cause it to be deactivated.
825 Updating away from a bookmark will cause it to be deactivated.
826
826
827 Bookmarks can be pushed and pulled between repositories (see
827 Bookmarks can be pushed and pulled between repositories (see
828 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
828 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
829 diverged, a new 'divergent bookmark' of the form 'name@path' will
829 diverged, a new 'divergent bookmark' of the form 'name@path' will
830 be created. Using :hg:'merge' will resolve the divergence.
830 be created. Using :hg:'merge' will resolve the divergence.
831
831
832 A bookmark named '@' has the special property that :hg:`clone` will
832 A bookmark named '@' has the special property that :hg:`clone` will
833 check it out by default if it exists.
833 check it out by default if it exists.
834
834
835 .. container:: verbose
835 .. container:: verbose
836
836
837 Examples:
837 Examples:
838
838
839 - create an active bookmark for a new line of development::
839 - create an active bookmark for a new line of development::
840
840
841 hg book new-feature
841 hg book new-feature
842
842
843 - create an inactive bookmark as a place marker::
843 - create an inactive bookmark as a place marker::
844
844
845 hg book -i reviewed
845 hg book -i reviewed
846
846
847 - create an inactive bookmark on another changeset::
847 - create an inactive bookmark on another changeset::
848
848
849 hg book -r .^ tested
849 hg book -r .^ tested
850
850
851 - move the '@' bookmark from another branch::
851 - move the '@' bookmark from another branch::
852
852
853 hg book -f @
853 hg book -f @
854 '''
854 '''
855 force = opts.get('force')
855 force = opts.get('force')
856 rev = opts.get('rev')
856 rev = opts.get('rev')
857 delete = opts.get('delete')
857 delete = opts.get('delete')
858 rename = opts.get('rename')
858 rename = opts.get('rename')
859 inactive = opts.get('inactive')
859 inactive = opts.get('inactive')
860
860
861 def checkformat(mark):
861 def checkformat(mark):
862 mark = mark.strip()
862 mark = mark.strip()
863 if not mark:
863 if not mark:
864 raise util.Abort(_("bookmark names cannot consist entirely of "
864 raise util.Abort(_("bookmark names cannot consist entirely of "
865 "whitespace"))
865 "whitespace"))
866 scmutil.checknewlabel(repo, mark, 'bookmark')
866 scmutil.checknewlabel(repo, mark, 'bookmark')
867 return mark
867 return mark
868
868
869 def checkconflict(repo, mark, cur, force=False, target=None):
869 def checkconflict(repo, mark, cur, force=False, target=None):
870 if mark in marks and not force:
870 if mark in marks and not force:
871 if target:
871 if target:
872 if marks[mark] == target and target == cur:
872 if marks[mark] == target and target == cur:
873 # re-activating a bookmark
873 # re-activating a bookmark
874 return
874 return
875 anc = repo.changelog.ancestors([repo[target].rev()])
875 anc = repo.changelog.ancestors([repo[target].rev()])
876 bmctx = repo[marks[mark]]
876 bmctx = repo[marks[mark]]
877 divs = [repo[b].node() for b in marks
877 divs = [repo[b].node() for b in marks
878 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
878 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
879
879
880 # allow resolving a single divergent bookmark even if moving
880 # allow resolving a single divergent bookmark even if moving
881 # the bookmark across branches when a revision is specified
881 # the bookmark across branches when a revision is specified
882 # that contains a divergent bookmark
882 # that contains a divergent bookmark
883 if bmctx.rev() not in anc and target in divs:
883 if bmctx.rev() not in anc and target in divs:
884 bookmarks.deletedivergent(repo, [target], mark)
884 bookmarks.deletedivergent(repo, [target], mark)
885 return
885 return
886
886
887 deletefrom = [b for b in divs
887 deletefrom = [b for b in divs
888 if repo[b].rev() in anc or b == target]
888 if repo[b].rev() in anc or b == target]
889 bookmarks.deletedivergent(repo, deletefrom, mark)
889 bookmarks.deletedivergent(repo, deletefrom, mark)
890 if bookmarks.validdest(repo, bmctx, repo[target]):
890 if bookmarks.validdest(repo, bmctx, repo[target]):
891 ui.status(_("moving bookmark '%s' forward from %s\n") %
891 ui.status(_("moving bookmark '%s' forward from %s\n") %
892 (mark, short(bmctx.node())))
892 (mark, short(bmctx.node())))
893 return
893 return
894 raise util.Abort(_("bookmark '%s' already exists "
894 raise util.Abort(_("bookmark '%s' already exists "
895 "(use -f to force)") % mark)
895 "(use -f to force)") % mark)
896 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
896 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
897 and not force):
897 and not force):
898 raise util.Abort(
898 raise util.Abort(
899 _("a bookmark cannot have the name of an existing branch"))
899 _("a bookmark cannot have the name of an existing branch"))
900
900
901 if delete and rename:
901 if delete and rename:
902 raise util.Abort(_("--delete and --rename are incompatible"))
902 raise util.Abort(_("--delete and --rename are incompatible"))
903 if delete and rev:
903 if delete and rev:
904 raise util.Abort(_("--rev is incompatible with --delete"))
904 raise util.Abort(_("--rev is incompatible with --delete"))
905 if rename and rev:
905 if rename and rev:
906 raise util.Abort(_("--rev is incompatible with --rename"))
906 raise util.Abort(_("--rev is incompatible with --rename"))
907 if not names and (delete or rev):
907 if not names and (delete or rev):
908 raise util.Abort(_("bookmark name required"))
908 raise util.Abort(_("bookmark name required"))
909
909
910 if delete or rename or names or inactive:
910 if delete or rename or names or inactive:
911 wlock = repo.wlock()
911 wlock = repo.wlock()
912 try:
912 try:
913 cur = repo.changectx('.').node()
913 cur = repo.changectx('.').node()
914 marks = repo._bookmarks
914 marks = repo._bookmarks
915 if delete:
915 if delete:
916 for mark in names:
916 for mark in names:
917 if mark not in marks:
917 if mark not in marks:
918 raise util.Abort(_("bookmark '%s' does not exist") %
918 raise util.Abort(_("bookmark '%s' does not exist") %
919 mark)
919 mark)
920 if mark == repo._bookmarkcurrent:
920 if mark == repo._bookmarkcurrent:
921 bookmarks.unsetcurrent(repo)
921 bookmarks.unsetcurrent(repo)
922 del marks[mark]
922 del marks[mark]
923 marks.write()
923 marks.write()
924
924
925 elif rename:
925 elif rename:
926 if not names:
926 if not names:
927 raise util.Abort(_("new bookmark name required"))
927 raise util.Abort(_("new bookmark name required"))
928 elif len(names) > 1:
928 elif len(names) > 1:
929 raise util.Abort(_("only one new bookmark name allowed"))
929 raise util.Abort(_("only one new bookmark name allowed"))
930 mark = checkformat(names[0])
930 mark = checkformat(names[0])
931 if rename not in marks:
931 if rename not in marks:
932 raise util.Abort(_("bookmark '%s' does not exist") % rename)
932 raise util.Abort(_("bookmark '%s' does not exist") % rename)
933 checkconflict(repo, mark, cur, force)
933 checkconflict(repo, mark, cur, force)
934 marks[mark] = marks[rename]
934 marks[mark] = marks[rename]
935 if repo._bookmarkcurrent == rename and not inactive:
935 if repo._bookmarkcurrent == rename and not inactive:
936 bookmarks.setcurrent(repo, mark)
936 bookmarks.setcurrent(repo, mark)
937 del marks[rename]
937 del marks[rename]
938 marks.write()
938 marks.write()
939
939
940 elif names:
940 elif names:
941 newact = None
941 newact = None
942 for mark in names:
942 for mark in names:
943 mark = checkformat(mark)
943 mark = checkformat(mark)
944 if newact is None:
944 if newact is None:
945 newact = mark
945 newact = mark
946 if inactive and mark == repo._bookmarkcurrent:
946 if inactive and mark == repo._bookmarkcurrent:
947 bookmarks.unsetcurrent(repo)
947 bookmarks.unsetcurrent(repo)
948 return
948 return
949 tgt = cur
949 tgt = cur
950 if rev:
950 if rev:
951 tgt = scmutil.revsingle(repo, rev).node()
951 tgt = scmutil.revsingle(repo, rev).node()
952 checkconflict(repo, mark, cur, force, tgt)
952 checkconflict(repo, mark, cur, force, tgt)
953 marks[mark] = tgt
953 marks[mark] = tgt
954 if not inactive and cur == marks[newact] and not rev:
954 if not inactive and cur == marks[newact] and not rev:
955 bookmarks.setcurrent(repo, newact)
955 bookmarks.setcurrent(repo, newact)
956 elif cur != tgt and newact == repo._bookmarkcurrent:
956 elif cur != tgt and newact == repo._bookmarkcurrent:
957 bookmarks.unsetcurrent(repo)
957 bookmarks.unsetcurrent(repo)
958 marks.write()
958 marks.write()
959
959
960 elif inactive:
960 elif inactive:
961 if len(marks) == 0:
961 if len(marks) == 0:
962 ui.status(_("no bookmarks set\n"))
962 ui.status(_("no bookmarks set\n"))
963 elif not repo._bookmarkcurrent:
963 elif not repo._bookmarkcurrent:
964 ui.status(_("no active bookmark\n"))
964 ui.status(_("no active bookmark\n"))
965 else:
965 else:
966 bookmarks.unsetcurrent(repo)
966 bookmarks.unsetcurrent(repo)
967 finally:
967 finally:
968 wlock.release()
968 wlock.release()
969 else: # show bookmarks
969 else: # show bookmarks
970 hexfn = ui.debugflag and hex or short
970 hexfn = ui.debugflag and hex or short
971 marks = repo._bookmarks
971 marks = repo._bookmarks
972 if len(marks) == 0:
972 if len(marks) == 0:
973 ui.status(_("no bookmarks set\n"))
973 ui.status(_("no bookmarks set\n"))
974 else:
974 else:
975 for bmark, n in sorted(marks.iteritems()):
975 for bmark, n in sorted(marks.iteritems()):
976 current = repo._bookmarkcurrent
976 current = repo._bookmarkcurrent
977 if bmark == current:
977 if bmark == current:
978 prefix, label = '*', 'bookmarks.current'
978 prefix, label = '*', 'bookmarks.current'
979 else:
979 else:
980 prefix, label = ' ', ''
980 prefix, label = ' ', ''
981
981
982 if ui.quiet:
982 if ui.quiet:
983 ui.write("%s\n" % bmark, label=label)
983 ui.write("%s\n" % bmark, label=label)
984 else:
984 else:
985 pad = " " * (25 - encoding.colwidth(bmark))
985 pad = " " * (25 - encoding.colwidth(bmark))
986 ui.write(" %s %s%s %d:%s\n" % (
986 ui.write(" %s %s%s %d:%s\n" % (
987 prefix, bmark, pad, repo.changelog.rev(n), hexfn(n)),
987 prefix, bmark, pad, repo.changelog.rev(n), hexfn(n)),
988 label=label)
988 label=label)
989
989
990 @command('branch',
990 @command('branch',
991 [('f', 'force', None,
991 [('f', 'force', None,
992 _('set branch name even if it shadows an existing branch')),
992 _('set branch name even if it shadows an existing branch')),
993 ('C', 'clean', None, _('reset branch name to parent branch name'))],
993 ('C', 'clean', None, _('reset branch name to parent branch name'))],
994 _('[-fC] [NAME]'))
994 _('[-fC] [NAME]'))
995 def branch(ui, repo, label=None, **opts):
995 def branch(ui, repo, label=None, **opts):
996 """set or show the current branch name
996 """set or show the current branch name
997
997
998 .. note::
998 .. note::
999
999
1000 Branch names are permanent and global. Use :hg:`bookmark` to create a
1000 Branch names are permanent and global. Use :hg:`bookmark` to create a
1001 light-weight bookmark instead. See :hg:`help glossary` for more
1001 light-weight bookmark instead. See :hg:`help glossary` for more
1002 information about named branches and bookmarks.
1002 information about named branches and bookmarks.
1003
1003
1004 With no argument, show the current branch name. With one argument,
1004 With no argument, show the current branch name. With one argument,
1005 set the working directory branch name (the branch will not exist
1005 set the working directory branch name (the branch will not exist
1006 in the repository until the next commit). Standard practice
1006 in the repository until the next commit). Standard practice
1007 recommends that primary development take place on the 'default'
1007 recommends that primary development take place on the 'default'
1008 branch.
1008 branch.
1009
1009
1010 Unless -f/--force is specified, branch will not let you set a
1010 Unless -f/--force is specified, branch will not let you set a
1011 branch name that already exists, even if it's inactive.
1011 branch name that already exists, even if it's inactive.
1012
1012
1013 Use -C/--clean to reset the working directory branch to that of
1013 Use -C/--clean to reset the working directory branch to that of
1014 the parent of the working directory, negating a previous branch
1014 the parent of the working directory, negating a previous branch
1015 change.
1015 change.
1016
1016
1017 Use the command :hg:`update` to switch to an existing branch. Use
1017 Use the command :hg:`update` to switch to an existing branch. Use
1018 :hg:`commit --close-branch` to mark this branch as closed.
1018 :hg:`commit --close-branch` to mark this branch as closed.
1019
1019
1020 Returns 0 on success.
1020 Returns 0 on success.
1021 """
1021 """
1022 if label:
1022 if label:
1023 label = label.strip()
1023 label = label.strip()
1024
1024
1025 if not opts.get('clean') and not label:
1025 if not opts.get('clean') and not label:
1026 ui.write("%s\n" % repo.dirstate.branch())
1026 ui.write("%s\n" % repo.dirstate.branch())
1027 return
1027 return
1028
1028
1029 wlock = repo.wlock()
1029 wlock = repo.wlock()
1030 try:
1030 try:
1031 if opts.get('clean'):
1031 if opts.get('clean'):
1032 label = repo[None].p1().branch()
1032 label = repo[None].p1().branch()
1033 repo.dirstate.setbranch(label)
1033 repo.dirstate.setbranch(label)
1034 ui.status(_('reset working directory to branch %s\n') % label)
1034 ui.status(_('reset working directory to branch %s\n') % label)
1035 elif label:
1035 elif label:
1036 if not opts.get('force') and label in repo.branchmap():
1036 if not opts.get('force') and label in repo.branchmap():
1037 if label not in [p.branch() for p in repo.parents()]:
1037 if label not in [p.branch() for p in repo.parents()]:
1038 raise util.Abort(_('a branch of the same name already'
1038 raise util.Abort(_('a branch of the same name already'
1039 ' exists'),
1039 ' exists'),
1040 # i18n: "it" refers to an existing branch
1040 # i18n: "it" refers to an existing branch
1041 hint=_("use 'hg update' to switch to it"))
1041 hint=_("use 'hg update' to switch to it"))
1042 scmutil.checknewlabel(repo, label, 'branch')
1042 scmutil.checknewlabel(repo, label, 'branch')
1043 repo.dirstate.setbranch(label)
1043 repo.dirstate.setbranch(label)
1044 ui.status(_('marked working directory as branch %s\n') % label)
1044 ui.status(_('marked working directory as branch %s\n') % label)
1045 ui.status(_('(branches are permanent and global, '
1045 ui.status(_('(branches are permanent and global, '
1046 'did you want a bookmark?)\n'))
1046 'did you want a bookmark?)\n'))
1047 finally:
1047 finally:
1048 wlock.release()
1048 wlock.release()
1049
1049
1050 @command('branches',
1050 @command('branches',
1051 [('a', 'active', False, _('show only branches that have unmerged heads')),
1051 [('a', 'active', False, _('show only branches that have unmerged heads')),
1052 ('c', 'closed', False, _('show normal and closed branches'))],
1052 ('c', 'closed', False, _('show normal and closed branches'))],
1053 _('[-ac]'))
1053 _('[-ac]'))
1054 def branches(ui, repo, active=False, closed=False):
1054 def branches(ui, repo, active=False, closed=False):
1055 """list repository named branches
1055 """list repository named branches
1056
1056
1057 List the repository's named branches, indicating which ones are
1057 List the repository's named branches, indicating which ones are
1058 inactive. If -c/--closed is specified, also list branches which have
1058 inactive. If -c/--closed is specified, also list branches which have
1059 been marked closed (see :hg:`commit --close-branch`).
1059 been marked closed (see :hg:`commit --close-branch`).
1060
1060
1061 If -a/--active is specified, only show active branches. A branch
1061 If -a/--active is specified, only show active branches. A branch
1062 is considered active if it contains repository heads.
1062 is considered active if it contains repository heads.
1063
1063
1064 Use the command :hg:`update` to switch to an existing branch.
1064 Use the command :hg:`update` to switch to an existing branch.
1065
1065
1066 Returns 0.
1066 Returns 0.
1067 """
1067 """
1068
1068
1069 hexfunc = ui.debugflag and hex or short
1069 hexfunc = ui.debugflag and hex or short
1070
1070
1071 allheads = set(repo.heads())
1071 allheads = set(repo.heads())
1072 branches = []
1072 branches = []
1073 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1073 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1074 isactive = not isclosed and bool(set(heads) & allheads)
1074 isactive = not isclosed and bool(set(heads) & allheads)
1075 branches.append((tag, repo[tip], isactive, not isclosed))
1075 branches.append((tag, repo[tip], isactive, not isclosed))
1076 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1076 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1077 reverse=True)
1077 reverse=True)
1078
1078
1079 for tag, ctx, isactive, isopen in branches:
1079 for tag, ctx, isactive, isopen in branches:
1080 if (not active) or isactive:
1080 if (not active) or isactive:
1081 if isactive:
1081 if isactive:
1082 label = 'branches.active'
1082 label = 'branches.active'
1083 notice = ''
1083 notice = ''
1084 elif not isopen:
1084 elif not isopen:
1085 if not closed:
1085 if not closed:
1086 continue
1086 continue
1087 label = 'branches.closed'
1087 label = 'branches.closed'
1088 notice = _(' (closed)')
1088 notice = _(' (closed)')
1089 else:
1089 else:
1090 label = 'branches.inactive'
1090 label = 'branches.inactive'
1091 notice = _(' (inactive)')
1091 notice = _(' (inactive)')
1092 if tag == repo.dirstate.branch():
1092 if tag == repo.dirstate.branch():
1093 label = 'branches.current'
1093 label = 'branches.current'
1094 rev = str(ctx.rev()).rjust(31 - encoding.colwidth(tag))
1094 rev = str(ctx.rev()).rjust(31 - encoding.colwidth(tag))
1095 rev = ui.label('%s:%s' % (rev, hexfunc(ctx.node())),
1095 rev = ui.label('%s:%s' % (rev, hexfunc(ctx.node())),
1096 'log.changeset changeset.%s' % ctx.phasestr())
1096 'log.changeset changeset.%s' % ctx.phasestr())
1097 labeledtag = ui.label(tag, label)
1097 labeledtag = ui.label(tag, label)
1098 if ui.quiet:
1098 if ui.quiet:
1099 ui.write("%s\n" % labeledtag)
1099 ui.write("%s\n" % labeledtag)
1100 else:
1100 else:
1101 ui.write("%s %s%s\n" % (labeledtag, rev, notice))
1101 ui.write("%s %s%s\n" % (labeledtag, rev, notice))
1102
1102
1103 @command('bundle',
1103 @command('bundle',
1104 [('f', 'force', None, _('run even when the destination is unrelated')),
1104 [('f', 'force', None, _('run even when the destination is unrelated')),
1105 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1105 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1106 _('REV')),
1106 _('REV')),
1107 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1107 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1108 _('BRANCH')),
1108 _('BRANCH')),
1109 ('', 'base', [],
1109 ('', 'base', [],
1110 _('a base changeset assumed to be available at the destination'),
1110 _('a base changeset assumed to be available at the destination'),
1111 _('REV')),
1111 _('REV')),
1112 ('a', 'all', None, _('bundle all changesets in the repository')),
1112 ('a', 'all', None, _('bundle all changesets in the repository')),
1113 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1113 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1114 ] + remoteopts,
1114 ] + remoteopts,
1115 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1115 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1116 def bundle(ui, repo, fname, dest=None, **opts):
1116 def bundle(ui, repo, fname, dest=None, **opts):
1117 """create a changegroup file
1117 """create a changegroup file
1118
1118
1119 Generate a compressed changegroup file collecting changesets not
1119 Generate a compressed changegroup file collecting changesets not
1120 known to be in another repository.
1120 known to be in another repository.
1121
1121
1122 If you omit the destination repository, then hg assumes the
1122 If you omit the destination repository, then hg assumes the
1123 destination will have all the nodes you specify with --base
1123 destination will have all the nodes you specify with --base
1124 parameters. To create a bundle containing all changesets, use
1124 parameters. To create a bundle containing all changesets, use
1125 -a/--all (or --base null).
1125 -a/--all (or --base null).
1126
1126
1127 You can change compression method with the -t/--type option.
1127 You can change compression method with the -t/--type option.
1128 The available compression methods are: none, bzip2, and
1128 The available compression methods are: none, bzip2, and
1129 gzip (by default, bundles are compressed using bzip2).
1129 gzip (by default, bundles are compressed using bzip2).
1130
1130
1131 The bundle file can then be transferred using conventional means
1131 The bundle file can then be transferred using conventional means
1132 and applied to another repository with the unbundle or pull
1132 and applied to another repository with the unbundle or pull
1133 command. This is useful when direct push and pull are not
1133 command. This is useful when direct push and pull are not
1134 available or when exporting an entire repository is undesirable.
1134 available or when exporting an entire repository is undesirable.
1135
1135
1136 Applying bundles preserves all changeset contents including
1136 Applying bundles preserves all changeset contents including
1137 permissions, copy/rename information, and revision history.
1137 permissions, copy/rename information, and revision history.
1138
1138
1139 Returns 0 on success, 1 if no changes found.
1139 Returns 0 on success, 1 if no changes found.
1140 """
1140 """
1141 revs = None
1141 revs = None
1142 if 'rev' in opts:
1142 if 'rev' in opts:
1143 revs = scmutil.revrange(repo, opts['rev'])
1143 revs = scmutil.revrange(repo, opts['rev'])
1144
1144
1145 bundletype = opts.get('type', 'bzip2').lower()
1145 bundletype = opts.get('type', 'bzip2').lower()
1146 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1146 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1147 bundletype = btypes.get(bundletype)
1147 bundletype = btypes.get(bundletype)
1148 if bundletype not in changegroup.bundletypes:
1148 if bundletype not in changegroup.bundletypes:
1149 raise util.Abort(_('unknown bundle type specified with --type'))
1149 raise util.Abort(_('unknown bundle type specified with --type'))
1150
1150
1151 if opts.get('all'):
1151 if opts.get('all'):
1152 base = ['null']
1152 base = ['null']
1153 else:
1153 else:
1154 base = scmutil.revrange(repo, opts.get('base'))
1154 base = scmutil.revrange(repo, opts.get('base'))
1155 # TODO: get desired bundlecaps from command line.
1155 # TODO: get desired bundlecaps from command line.
1156 bundlecaps = None
1156 bundlecaps = None
1157 if base:
1157 if base:
1158 if dest:
1158 if dest:
1159 raise util.Abort(_("--base is incompatible with specifying "
1159 raise util.Abort(_("--base is incompatible with specifying "
1160 "a destination"))
1160 "a destination"))
1161 common = [repo.lookup(rev) for rev in base]
1161 common = [repo.lookup(rev) for rev in base]
1162 heads = revs and map(repo.lookup, revs) or revs
1162 heads = revs and map(repo.lookup, revs) or revs
1163 cg = changegroup.getbundle(repo, 'bundle', heads=heads, common=common,
1163 cg = changegroup.getbundle(repo, 'bundle', heads=heads, common=common,
1164 bundlecaps=bundlecaps)
1164 bundlecaps=bundlecaps)
1165 outgoing = None
1165 outgoing = None
1166 else:
1166 else:
1167 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1167 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1168 dest, branches = hg.parseurl(dest, opts.get('branch'))
1168 dest, branches = hg.parseurl(dest, opts.get('branch'))
1169 other = hg.peer(repo, opts, dest)
1169 other = hg.peer(repo, opts, dest)
1170 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1170 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1171 heads = revs and map(repo.lookup, revs) or revs
1171 heads = revs and map(repo.lookup, revs) or revs
1172 outgoing = discovery.findcommonoutgoing(repo, other,
1172 outgoing = discovery.findcommonoutgoing(repo, other,
1173 onlyheads=heads,
1173 onlyheads=heads,
1174 force=opts.get('force'),
1174 force=opts.get('force'),
1175 portable=True)
1175 portable=True)
1176 cg = changegroup.getlocalbundle(repo, 'bundle', outgoing, bundlecaps)
1176 cg = changegroup.getlocalbundle(repo, 'bundle', outgoing, bundlecaps)
1177 if not cg:
1177 if not cg:
1178 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1178 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1179 return 1
1179 return 1
1180
1180
1181 changegroup.writebundle(cg, fname, bundletype)
1181 changegroup.writebundle(cg, fname, bundletype)
1182
1182
1183 @command('cat',
1183 @command('cat',
1184 [('o', 'output', '',
1184 [('o', 'output', '',
1185 _('print output to file with formatted name'), _('FORMAT')),
1185 _('print output to file with formatted name'), _('FORMAT')),
1186 ('r', 'rev', '', _('print the given revision'), _('REV')),
1186 ('r', 'rev', '', _('print the given revision'), _('REV')),
1187 ('', 'decode', None, _('apply any matching decode filter')),
1187 ('', 'decode', None, _('apply any matching decode filter')),
1188 ] + walkopts,
1188 ] + walkopts,
1189 _('[OPTION]... FILE...'),
1189 _('[OPTION]... FILE...'),
1190 inferrepo=True)
1190 inferrepo=True)
1191 def cat(ui, repo, file1, *pats, **opts):
1191 def cat(ui, repo, file1, *pats, **opts):
1192 """output the current or given revision of files
1192 """output the current or given revision of files
1193
1193
1194 Print the specified files as they were at the given revision. If
1194 Print the specified files as they were at the given revision. If
1195 no revision is given, the parent of the working directory is used.
1195 no revision is given, the parent of the working directory is used.
1196
1196
1197 Output may be to a file, in which case the name of the file is
1197 Output may be to a file, in which case the name of the file is
1198 given using a format string. The formatting rules as follows:
1198 given using a format string. The formatting rules as follows:
1199
1199
1200 :``%%``: literal "%" character
1200 :``%%``: literal "%" character
1201 :``%s``: basename of file being printed
1201 :``%s``: basename of file being printed
1202 :``%d``: dirname of file being printed, or '.' if in repository root
1202 :``%d``: dirname of file being printed, or '.' if in repository root
1203 :``%p``: root-relative path name of file being printed
1203 :``%p``: root-relative path name of file being printed
1204 :``%H``: changeset hash (40 hexadecimal digits)
1204 :``%H``: changeset hash (40 hexadecimal digits)
1205 :``%R``: changeset revision number
1205 :``%R``: changeset revision number
1206 :``%h``: short-form changeset hash (12 hexadecimal digits)
1206 :``%h``: short-form changeset hash (12 hexadecimal digits)
1207 :``%r``: zero-padded changeset revision number
1207 :``%r``: zero-padded changeset revision number
1208 :``%b``: basename of the exporting repository
1208 :``%b``: basename of the exporting repository
1209
1209
1210 Returns 0 on success.
1210 Returns 0 on success.
1211 """
1211 """
1212 ctx = scmutil.revsingle(repo, opts.get('rev'))
1212 ctx = scmutil.revsingle(repo, opts.get('rev'))
1213 m = scmutil.match(ctx, (file1,) + pats, opts)
1213 m = scmutil.match(ctx, (file1,) + pats, opts)
1214
1214
1215 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1215 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1216
1216
1217 @command('^clone',
1217 @command('^clone',
1218 [('U', 'noupdate', None,
1218 [('U', 'noupdate', None,
1219 _('the clone will include an empty working copy (only a repository)')),
1219 _('the clone will include an empty working copy (only a repository)')),
1220 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1220 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1221 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1221 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1222 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1222 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1223 ('', 'pull', None, _('use pull protocol to copy metadata')),
1223 ('', 'pull', None, _('use pull protocol to copy metadata')),
1224 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1224 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1225 ] + remoteopts,
1225 ] + remoteopts,
1226 _('[OPTION]... SOURCE [DEST]'),
1226 _('[OPTION]... SOURCE [DEST]'),
1227 norepo=True)
1227 norepo=True)
1228 def clone(ui, source, dest=None, **opts):
1228 def clone(ui, source, dest=None, **opts):
1229 """make a copy of an existing repository
1229 """make a copy of an existing repository
1230
1230
1231 Create a copy of an existing repository in a new directory.
1231 Create a copy of an existing repository in a new directory.
1232
1232
1233 If no destination directory name is specified, it defaults to the
1233 If no destination directory name is specified, it defaults to the
1234 basename of the source.
1234 basename of the source.
1235
1235
1236 The location of the source is added to the new repository's
1236 The location of the source is added to the new repository's
1237 ``.hg/hgrc`` file, as the default to be used for future pulls.
1237 ``.hg/hgrc`` file, as the default to be used for future pulls.
1238
1238
1239 Only local paths and ``ssh://`` URLs are supported as
1239 Only local paths and ``ssh://`` URLs are supported as
1240 destinations. For ``ssh://`` destinations, no working directory or
1240 destinations. For ``ssh://`` destinations, no working directory or
1241 ``.hg/hgrc`` will be created on the remote side.
1241 ``.hg/hgrc`` will be created on the remote side.
1242
1242
1243 To pull only a subset of changesets, specify one or more revisions
1243 To pull only a subset of changesets, specify one or more revisions
1244 identifiers with -r/--rev or branches with -b/--branch. The
1244 identifiers with -r/--rev or branches with -b/--branch. The
1245 resulting clone will contain only the specified changesets and
1245 resulting clone will contain only the specified changesets and
1246 their ancestors. These options (or 'clone src#rev dest') imply
1246 their ancestors. These options (or 'clone src#rev dest') imply
1247 --pull, even for local source repositories. Note that specifying a
1247 --pull, even for local source repositories. Note that specifying a
1248 tag will include the tagged changeset but not the changeset
1248 tag will include the tagged changeset but not the changeset
1249 containing the tag.
1249 containing the tag.
1250
1250
1251 If the source repository has a bookmark called '@' set, that
1251 If the source repository has a bookmark called '@' set, that
1252 revision will be checked out in the new repository by default.
1252 revision will be checked out in the new repository by default.
1253
1253
1254 To check out a particular version, use -u/--update, or
1254 To check out a particular version, use -u/--update, or
1255 -U/--noupdate to create a clone with no working directory.
1255 -U/--noupdate to create a clone with no working directory.
1256
1256
1257 .. container:: verbose
1257 .. container:: verbose
1258
1258
1259 For efficiency, hardlinks are used for cloning whenever the
1259 For efficiency, hardlinks are used for cloning whenever the
1260 source and destination are on the same filesystem (note this
1260 source and destination are on the same filesystem (note this
1261 applies only to the repository data, not to the working
1261 applies only to the repository data, not to the working
1262 directory). Some filesystems, such as AFS, implement hardlinking
1262 directory). Some filesystems, such as AFS, implement hardlinking
1263 incorrectly, but do not report errors. In these cases, use the
1263 incorrectly, but do not report errors. In these cases, use the
1264 --pull option to avoid hardlinking.
1264 --pull option to avoid hardlinking.
1265
1265
1266 In some cases, you can clone repositories and the working
1266 In some cases, you can clone repositories and the working
1267 directory using full hardlinks with ::
1267 directory using full hardlinks with ::
1268
1268
1269 $ cp -al REPO REPOCLONE
1269 $ cp -al REPO REPOCLONE
1270
1270
1271 This is the fastest way to clone, but it is not always safe. The
1271 This is the fastest way to clone, but it is not always safe. The
1272 operation is not atomic (making sure REPO is not modified during
1272 operation is not atomic (making sure REPO is not modified during
1273 the operation is up to you) and you have to make sure your
1273 the operation is up to you) and you have to make sure your
1274 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1274 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1275 so). Also, this is not compatible with certain extensions that
1275 so). Also, this is not compatible with certain extensions that
1276 place their metadata under the .hg directory, such as mq.
1276 place their metadata under the .hg directory, such as mq.
1277
1277
1278 Mercurial will update the working directory to the first applicable
1278 Mercurial will update the working directory to the first applicable
1279 revision from this list:
1279 revision from this list:
1280
1280
1281 a) null if -U or the source repository has no changesets
1281 a) null if -U or the source repository has no changesets
1282 b) if -u . and the source repository is local, the first parent of
1282 b) if -u . and the source repository is local, the first parent of
1283 the source repository's working directory
1283 the source repository's working directory
1284 c) the changeset specified with -u (if a branch name, this means the
1284 c) the changeset specified with -u (if a branch name, this means the
1285 latest head of that branch)
1285 latest head of that branch)
1286 d) the changeset specified with -r
1286 d) the changeset specified with -r
1287 e) the tipmost head specified with -b
1287 e) the tipmost head specified with -b
1288 f) the tipmost head specified with the url#branch source syntax
1288 f) the tipmost head specified with the url#branch source syntax
1289 g) the revision marked with the '@' bookmark, if present
1289 g) the revision marked with the '@' bookmark, if present
1290 h) the tipmost head of the default branch
1290 h) the tipmost head of the default branch
1291 i) tip
1291 i) tip
1292
1292
1293 Examples:
1293 Examples:
1294
1294
1295 - clone a remote repository to a new directory named hg/::
1295 - clone a remote repository to a new directory named hg/::
1296
1296
1297 hg clone http://selenic.com/hg
1297 hg clone http://selenic.com/hg
1298
1298
1299 - create a lightweight local clone::
1299 - create a lightweight local clone::
1300
1300
1301 hg clone project/ project-feature/
1301 hg clone project/ project-feature/
1302
1302
1303 - clone from an absolute path on an ssh server (note double-slash)::
1303 - clone from an absolute path on an ssh server (note double-slash)::
1304
1304
1305 hg clone ssh://user@server//home/projects/alpha/
1305 hg clone ssh://user@server//home/projects/alpha/
1306
1306
1307 - do a high-speed clone over a LAN while checking out a
1307 - do a high-speed clone over a LAN while checking out a
1308 specified version::
1308 specified version::
1309
1309
1310 hg clone --uncompressed http://server/repo -u 1.5
1310 hg clone --uncompressed http://server/repo -u 1.5
1311
1311
1312 - create a repository without changesets after a particular revision::
1312 - create a repository without changesets after a particular revision::
1313
1313
1314 hg clone -r 04e544 experimental/ good/
1314 hg clone -r 04e544 experimental/ good/
1315
1315
1316 - clone (and track) a particular named branch::
1316 - clone (and track) a particular named branch::
1317
1317
1318 hg clone http://selenic.com/hg#stable
1318 hg clone http://selenic.com/hg#stable
1319
1319
1320 See :hg:`help urls` for details on specifying URLs.
1320 See :hg:`help urls` for details on specifying URLs.
1321
1321
1322 Returns 0 on success.
1322 Returns 0 on success.
1323 """
1323 """
1324 if opts.get('noupdate') and opts.get('updaterev'):
1324 if opts.get('noupdate') and opts.get('updaterev'):
1325 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1325 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1326
1326
1327 r = hg.clone(ui, opts, source, dest,
1327 r = hg.clone(ui, opts, source, dest,
1328 pull=opts.get('pull'),
1328 pull=opts.get('pull'),
1329 stream=opts.get('uncompressed'),
1329 stream=opts.get('uncompressed'),
1330 rev=opts.get('rev'),
1330 rev=opts.get('rev'),
1331 update=opts.get('updaterev') or not opts.get('noupdate'),
1331 update=opts.get('updaterev') or not opts.get('noupdate'),
1332 branch=opts.get('branch'))
1332 branch=opts.get('branch'))
1333
1333
1334 return r is None
1334 return r is None
1335
1335
1336 @command('^commit|ci',
1336 @command('^commit|ci',
1337 [('A', 'addremove', None,
1337 [('A', 'addremove', None,
1338 _('mark new/missing files as added/removed before committing')),
1338 _('mark new/missing files as added/removed before committing')),
1339 ('', 'close-branch', None,
1339 ('', 'close-branch', None,
1340 _('mark a branch as closed, hiding it from the branch list')),
1340 _('mark a branch as closed, hiding it from the branch list')),
1341 ('', 'amend', None, _('amend the parent of the working dir')),
1341 ('', 'amend', None, _('amend the parent of the working dir')),
1342 ('s', 'secret', None, _('use the secret phase for committing')),
1342 ('s', 'secret', None, _('use the secret phase for committing')),
1343 ('e', 'edit', None, _('invoke editor on commit messages')),
1343 ('e', 'edit', None, _('invoke editor on commit messages')),
1344 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1344 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1345 _('[OPTION]... [FILE]...'),
1345 _('[OPTION]... [FILE]...'),
1346 inferrepo=True)
1346 inferrepo=True)
1347 def commit(ui, repo, *pats, **opts):
1347 def commit(ui, repo, *pats, **opts):
1348 """commit the specified files or all outstanding changes
1348 """commit the specified files or all outstanding changes
1349
1349
1350 Commit changes to the given files into the repository. Unlike a
1350 Commit changes to the given files into the repository. Unlike a
1351 centralized SCM, this operation is a local operation. See
1351 centralized SCM, this operation is a local operation. See
1352 :hg:`push` for a way to actively distribute your changes.
1352 :hg:`push` for a way to actively distribute your changes.
1353
1353
1354 If a list of files is omitted, all changes reported by :hg:`status`
1354 If a list of files is omitted, all changes reported by :hg:`status`
1355 will be committed.
1355 will be committed.
1356
1356
1357 If you are committing the result of a merge, do not provide any
1357 If you are committing the result of a merge, do not provide any
1358 filenames or -I/-X filters.
1358 filenames or -I/-X filters.
1359
1359
1360 If no commit message is specified, Mercurial starts your
1360 If no commit message is specified, Mercurial starts your
1361 configured editor where you can enter a message. In case your
1361 configured editor where you can enter a message. In case your
1362 commit fails, you will find a backup of your message in
1362 commit fails, you will find a backup of your message in
1363 ``.hg/last-message.txt``.
1363 ``.hg/last-message.txt``.
1364
1364
1365 The --amend flag can be used to amend the parent of the
1365 The --amend flag can be used to amend the parent of the
1366 working directory with a new commit that contains the changes
1366 working directory with a new commit that contains the changes
1367 in the parent in addition to those currently reported by :hg:`status`,
1367 in the parent in addition to those currently reported by :hg:`status`,
1368 if there are any. The old commit is stored in a backup bundle in
1368 if there are any. The old commit is stored in a backup bundle in
1369 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1369 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1370 on how to restore it).
1370 on how to restore it).
1371
1371
1372 Message, user and date are taken from the amended commit unless
1372 Message, user and date are taken from the amended commit unless
1373 specified. When a message isn't specified on the command line,
1373 specified. When a message isn't specified on the command line,
1374 the editor will open with the message of the amended commit.
1374 the editor will open with the message of the amended commit.
1375
1375
1376 It is not possible to amend public changesets (see :hg:`help phases`)
1376 It is not possible to amend public changesets (see :hg:`help phases`)
1377 or changesets that have children.
1377 or changesets that have children.
1378
1378
1379 See :hg:`help dates` for a list of formats valid for -d/--date.
1379 See :hg:`help dates` for a list of formats valid for -d/--date.
1380
1380
1381 Returns 0 on success, 1 if nothing changed.
1381 Returns 0 on success, 1 if nothing changed.
1382 """
1382 """
1383 if opts.get('subrepos'):
1383 if opts.get('subrepos'):
1384 if opts.get('amend'):
1384 if opts.get('amend'):
1385 raise util.Abort(_('cannot amend with --subrepos'))
1385 raise util.Abort(_('cannot amend with --subrepos'))
1386 # Let --subrepos on the command line override config setting.
1386 # Let --subrepos on the command line override config setting.
1387 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1387 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1388
1388
1389 cmdutil.checkunfinished(repo, commit=True)
1389 cmdutil.checkunfinished(repo, commit=True)
1390
1390
1391 branch = repo[None].branch()
1391 branch = repo[None].branch()
1392 bheads = repo.branchheads(branch)
1392 bheads = repo.branchheads(branch)
1393
1393
1394 extra = {}
1394 extra = {}
1395 if opts.get('close_branch'):
1395 if opts.get('close_branch'):
1396 extra['close'] = 1
1396 extra['close'] = 1
1397
1397
1398 if not bheads:
1398 if not bheads:
1399 raise util.Abort(_('can only close branch heads'))
1399 raise util.Abort(_('can only close branch heads'))
1400 elif opts.get('amend'):
1400 elif opts.get('amend'):
1401 if repo.parents()[0].p1().branch() != branch and \
1401 if repo.parents()[0].p1().branch() != branch and \
1402 repo.parents()[0].p2().branch() != branch:
1402 repo.parents()[0].p2().branch() != branch:
1403 raise util.Abort(_('can only close branch heads'))
1403 raise util.Abort(_('can only close branch heads'))
1404
1404
1405 if opts.get('amend'):
1405 if opts.get('amend'):
1406 if ui.configbool('ui', 'commitsubrepos'):
1406 if ui.configbool('ui', 'commitsubrepos'):
1407 raise util.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1407 raise util.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1408
1408
1409 old = repo['.']
1409 old = repo['.']
1410 if old.phase() == phases.public:
1410 if old.phase() == phases.public:
1411 raise util.Abort(_('cannot amend public changesets'))
1411 raise util.Abort(_('cannot amend public changesets'))
1412 if len(repo[None].parents()) > 1:
1412 if len(repo[None].parents()) > 1:
1413 raise util.Abort(_('cannot amend while merging'))
1413 raise util.Abort(_('cannot amend while merging'))
1414 if (not obsolete._enabled) and old.children():
1414 if (not obsolete._enabled) and old.children():
1415 raise util.Abort(_('cannot amend changeset with children'))
1415 raise util.Abort(_('cannot amend changeset with children'))
1416
1416
1417 # commitfunc is used only for temporary amend commit by cmdutil.amend
1417 # commitfunc is used only for temporary amend commit by cmdutil.amend
1418 def commitfunc(ui, repo, message, match, opts):
1418 def commitfunc(ui, repo, message, match, opts):
1419 return repo.commit(message,
1419 return repo.commit(message,
1420 opts.get('user') or old.user(),
1420 opts.get('user') or old.user(),
1421 opts.get('date') or old.date(),
1421 opts.get('date') or old.date(),
1422 match,
1422 match,
1423 extra=extra)
1423 extra=extra)
1424
1424
1425 current = repo._bookmarkcurrent
1425 current = repo._bookmarkcurrent
1426 marks = old.bookmarks()
1426 marks = old.bookmarks()
1427 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1427 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1428 if node == old.node():
1428 if node == old.node():
1429 ui.status(_("nothing changed\n"))
1429 ui.status(_("nothing changed\n"))
1430 return 1
1430 return 1
1431 elif marks:
1431 elif marks:
1432 ui.debug('moving bookmarks %r from %s to %s\n' %
1432 ui.debug('moving bookmarks %r from %s to %s\n' %
1433 (marks, old.hex(), hex(node)))
1433 (marks, old.hex(), hex(node)))
1434 newmarks = repo._bookmarks
1434 newmarks = repo._bookmarks
1435 for bm in marks:
1435 for bm in marks:
1436 newmarks[bm] = node
1436 newmarks[bm] = node
1437 if bm == current:
1437 if bm == current:
1438 bookmarks.setcurrent(repo, bm)
1438 bookmarks.setcurrent(repo, bm)
1439 newmarks.write()
1439 newmarks.write()
1440 else:
1440 else:
1441 def commitfunc(ui, repo, message, match, opts):
1441 def commitfunc(ui, repo, message, match, opts):
1442 backup = ui.backupconfig('phases', 'new-commit')
1442 backup = ui.backupconfig('phases', 'new-commit')
1443 baseui = repo.baseui
1443 baseui = repo.baseui
1444 basebackup = baseui.backupconfig('phases', 'new-commit')
1444 basebackup = baseui.backupconfig('phases', 'new-commit')
1445 try:
1445 try:
1446 if opts.get('secret'):
1446 if opts.get('secret'):
1447 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1447 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1448 # Propagate to subrepos
1448 # Propagate to subrepos
1449 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1449 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1450
1450
1451 editform = 'commit.normal'
1451 editform = 'commit.normal'
1452 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1452 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1453 return repo.commit(message, opts.get('user'), opts.get('date'),
1453 return repo.commit(message, opts.get('user'), opts.get('date'),
1454 match,
1454 match,
1455 editor=editor,
1455 editor=editor,
1456 extra=extra)
1456 extra=extra)
1457 finally:
1457 finally:
1458 ui.restoreconfig(backup)
1458 ui.restoreconfig(backup)
1459 repo.baseui.restoreconfig(basebackup)
1459 repo.baseui.restoreconfig(basebackup)
1460
1460
1461
1461
1462 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1462 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1463
1463
1464 if not node:
1464 if not node:
1465 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1465 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1466 if stat[3]:
1466 if stat[3]:
1467 ui.status(_("nothing changed (%d missing files, see "
1467 ui.status(_("nothing changed (%d missing files, see "
1468 "'hg status')\n") % len(stat[3]))
1468 "'hg status')\n") % len(stat[3]))
1469 else:
1469 else:
1470 ui.status(_("nothing changed\n"))
1470 ui.status(_("nothing changed\n"))
1471 return 1
1471 return 1
1472
1472
1473 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1473 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1474
1474
1475 @command('config|showconfig|debugconfig',
1475 @command('config|showconfig|debugconfig',
1476 [('u', 'untrusted', None, _('show untrusted configuration options')),
1476 [('u', 'untrusted', None, _('show untrusted configuration options')),
1477 ('e', 'edit', None, _('edit user config')),
1477 ('e', 'edit', None, _('edit user config')),
1478 ('l', 'local', None, _('edit repository config')),
1478 ('l', 'local', None, _('edit repository config')),
1479 ('g', 'global', None, _('edit global config'))],
1479 ('g', 'global', None, _('edit global config'))],
1480 _('[-u] [NAME]...'),
1480 _('[-u] [NAME]...'),
1481 optionalrepo=True)
1481 optionalrepo=True)
1482 def config(ui, repo, *values, **opts):
1482 def config(ui, repo, *values, **opts):
1483 """show combined config settings from all hgrc files
1483 """show combined config settings from all hgrc files
1484
1484
1485 With no arguments, print names and values of all config items.
1485 With no arguments, print names and values of all config items.
1486
1486
1487 With one argument of the form section.name, print just the value
1487 With one argument of the form section.name, print just the value
1488 of that config item.
1488 of that config item.
1489
1489
1490 With multiple arguments, print names and values of all config
1490 With multiple arguments, print names and values of all config
1491 items with matching section names.
1491 items with matching section names.
1492
1492
1493 With --edit, start an editor on the user-level config file. With
1493 With --edit, start an editor on the user-level config file. With
1494 --global, edit the system-wide config file. With --local, edit the
1494 --global, edit the system-wide config file. With --local, edit the
1495 repository-level config file.
1495 repository-level config file.
1496
1496
1497 With --debug, the source (filename and line number) is printed
1497 With --debug, the source (filename and line number) is printed
1498 for each config item.
1498 for each config item.
1499
1499
1500 See :hg:`help config` for more information about config files.
1500 See :hg:`help config` for more information about config files.
1501
1501
1502 Returns 0 on success.
1502 Returns 0 on success.
1503
1503
1504 """
1504 """
1505
1505
1506 if opts.get('edit') or opts.get('local') or opts.get('global'):
1506 if opts.get('edit') or opts.get('local') or opts.get('global'):
1507 if opts.get('local') and opts.get('global'):
1507 if opts.get('local') and opts.get('global'):
1508 raise util.Abort(_("can't use --local and --global together"))
1508 raise util.Abort(_("can't use --local and --global together"))
1509
1509
1510 if opts.get('local'):
1510 if opts.get('local'):
1511 if not repo:
1511 if not repo:
1512 raise util.Abort(_("can't use --local outside a repository"))
1512 raise util.Abort(_("can't use --local outside a repository"))
1513 paths = [repo.join('hgrc')]
1513 paths = [repo.join('hgrc')]
1514 elif opts.get('global'):
1514 elif opts.get('global'):
1515 paths = scmutil.systemrcpath()
1515 paths = scmutil.systemrcpath()
1516 else:
1516 else:
1517 paths = scmutil.userrcpath()
1517 paths = scmutil.userrcpath()
1518
1518
1519 for f in paths:
1519 for f in paths:
1520 if os.path.exists(f):
1520 if os.path.exists(f):
1521 break
1521 break
1522 else:
1522 else:
1523 f = paths[0]
1523 f = paths[0]
1524 fp = open(f, "w")
1524 fp = open(f, "w")
1525 fp.write(
1525 fp.write(
1526 '# example config (see "hg help config" for more info)\n'
1526 '# example config (see "hg help config" for more info)\n'
1527 '\n'
1527 '\n'
1528 '[ui]\n'
1528 '[ui]\n'
1529 '# name and email, e.g.\n'
1529 '# name and email, e.g.\n'
1530 '# username = Jane Doe <jdoe@example.com>\n'
1530 '# username = Jane Doe <jdoe@example.com>\n'
1531 'username =\n'
1531 'username =\n'
1532 '\n'
1532 '\n'
1533 '[extensions]\n'
1533 '[extensions]\n'
1534 '# uncomment these lines to enable some popular extensions\n'
1534 '# uncomment these lines to enable some popular extensions\n'
1535 '# (see "hg help extensions" for more info)\n'
1535 '# (see "hg help extensions" for more info)\n'
1536 '# pager =\n'
1536 '# pager =\n'
1537 '# progress =\n'
1537 '# progress =\n'
1538 '# color =\n')
1538 '# color =\n')
1539 fp.close()
1539 fp.close()
1540
1540
1541 editor = ui.geteditor()
1541 editor = ui.geteditor()
1542 util.system("%s \"%s\"" % (editor, f),
1542 util.system("%s \"%s\"" % (editor, f),
1543 onerr=util.Abort, errprefix=_("edit failed"),
1543 onerr=util.Abort, errprefix=_("edit failed"),
1544 out=ui.fout)
1544 out=ui.fout)
1545 return
1545 return
1546
1546
1547 for f in scmutil.rcpath():
1547 for f in scmutil.rcpath():
1548 ui.debug('read config from: %s\n' % f)
1548 ui.debug('read config from: %s\n' % f)
1549 untrusted = bool(opts.get('untrusted'))
1549 untrusted = bool(opts.get('untrusted'))
1550 if values:
1550 if values:
1551 sections = [v for v in values if '.' not in v]
1551 sections = [v for v in values if '.' not in v]
1552 items = [v for v in values if '.' in v]
1552 items = [v for v in values if '.' in v]
1553 if len(items) > 1 or items and sections:
1553 if len(items) > 1 or items and sections:
1554 raise util.Abort(_('only one config item permitted'))
1554 raise util.Abort(_('only one config item permitted'))
1555 for section, name, value in ui.walkconfig(untrusted=untrusted):
1555 for section, name, value in ui.walkconfig(untrusted=untrusted):
1556 value = str(value).replace('\n', '\\n')
1556 value = str(value).replace('\n', '\\n')
1557 sectname = section + '.' + name
1557 sectname = section + '.' + name
1558 if values:
1558 if values:
1559 for v in values:
1559 for v in values:
1560 if v == section:
1560 if v == section:
1561 ui.debug('%s: ' %
1561 ui.debug('%s: ' %
1562 ui.configsource(section, name, untrusted))
1562 ui.configsource(section, name, untrusted))
1563 ui.write('%s=%s\n' % (sectname, value))
1563 ui.write('%s=%s\n' % (sectname, value))
1564 elif v == sectname:
1564 elif v == sectname:
1565 ui.debug('%s: ' %
1565 ui.debug('%s: ' %
1566 ui.configsource(section, name, untrusted))
1566 ui.configsource(section, name, untrusted))
1567 ui.write(value, '\n')
1567 ui.write(value, '\n')
1568 else:
1568 else:
1569 ui.debug('%s: ' %
1569 ui.debug('%s: ' %
1570 ui.configsource(section, name, untrusted))
1570 ui.configsource(section, name, untrusted))
1571 ui.write('%s=%s\n' % (sectname, value))
1571 ui.write('%s=%s\n' % (sectname, value))
1572
1572
1573 @command('copy|cp',
1573 @command('copy|cp',
1574 [('A', 'after', None, _('record a copy that has already occurred')),
1574 [('A', 'after', None, _('record a copy that has already occurred')),
1575 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1575 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1576 ] + walkopts + dryrunopts,
1576 ] + walkopts + dryrunopts,
1577 _('[OPTION]... [SOURCE]... DEST'))
1577 _('[OPTION]... [SOURCE]... DEST'))
1578 def copy(ui, repo, *pats, **opts):
1578 def copy(ui, repo, *pats, **opts):
1579 """mark files as copied for the next commit
1579 """mark files as copied for the next commit
1580
1580
1581 Mark dest as having copies of source files. If dest is a
1581 Mark dest as having copies of source files. If dest is a
1582 directory, copies are put in that directory. If dest is a file,
1582 directory, copies are put in that directory. If dest is a file,
1583 the source must be a single file.
1583 the source must be a single file.
1584
1584
1585 By default, this command copies the contents of files as they
1585 By default, this command copies the contents of files as they
1586 exist in the working directory. If invoked with -A/--after, the
1586 exist in the working directory. If invoked with -A/--after, the
1587 operation is recorded, but no copying is performed.
1587 operation is recorded, but no copying is performed.
1588
1588
1589 This command takes effect with the next commit. To undo a copy
1589 This command takes effect with the next commit. To undo a copy
1590 before that, see :hg:`revert`.
1590 before that, see :hg:`revert`.
1591
1591
1592 Returns 0 on success, 1 if errors are encountered.
1592 Returns 0 on success, 1 if errors are encountered.
1593 """
1593 """
1594 wlock = repo.wlock(False)
1594 wlock = repo.wlock(False)
1595 try:
1595 try:
1596 return cmdutil.copy(ui, repo, pats, opts)
1596 return cmdutil.copy(ui, repo, pats, opts)
1597 finally:
1597 finally:
1598 wlock.release()
1598 wlock.release()
1599
1599
1600 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
1600 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
1601 def debugancestor(ui, repo, *args):
1601 def debugancestor(ui, repo, *args):
1602 """find the ancestor revision of two revisions in a given index"""
1602 """find the ancestor revision of two revisions in a given index"""
1603 if len(args) == 3:
1603 if len(args) == 3:
1604 index, rev1, rev2 = args
1604 index, rev1, rev2 = args
1605 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1605 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1606 lookup = r.lookup
1606 lookup = r.lookup
1607 elif len(args) == 2:
1607 elif len(args) == 2:
1608 if not repo:
1608 if not repo:
1609 raise util.Abort(_("there is no Mercurial repository here "
1609 raise util.Abort(_("there is no Mercurial repository here "
1610 "(.hg not found)"))
1610 "(.hg not found)"))
1611 rev1, rev2 = args
1611 rev1, rev2 = args
1612 r = repo.changelog
1612 r = repo.changelog
1613 lookup = repo.lookup
1613 lookup = repo.lookup
1614 else:
1614 else:
1615 raise util.Abort(_('either two or three arguments required'))
1615 raise util.Abort(_('either two or three arguments required'))
1616 a = r.ancestor(lookup(rev1), lookup(rev2))
1616 a = r.ancestor(lookup(rev1), lookup(rev2))
1617 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1617 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1618
1618
1619 @command('debugbuilddag',
1619 @command('debugbuilddag',
1620 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1620 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1621 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1621 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1622 ('n', 'new-file', None, _('add new file at each rev'))],
1622 ('n', 'new-file', None, _('add new file at each rev'))],
1623 _('[OPTION]... [TEXT]'))
1623 _('[OPTION]... [TEXT]'))
1624 def debugbuilddag(ui, repo, text=None,
1624 def debugbuilddag(ui, repo, text=None,
1625 mergeable_file=False,
1625 mergeable_file=False,
1626 overwritten_file=False,
1626 overwritten_file=False,
1627 new_file=False):
1627 new_file=False):
1628 """builds a repo with a given DAG from scratch in the current empty repo
1628 """builds a repo with a given DAG from scratch in the current empty repo
1629
1629
1630 The description of the DAG is read from stdin if not given on the
1630 The description of the DAG is read from stdin if not given on the
1631 command line.
1631 command line.
1632
1632
1633 Elements:
1633 Elements:
1634
1634
1635 - "+n" is a linear run of n nodes based on the current default parent
1635 - "+n" is a linear run of n nodes based on the current default parent
1636 - "." is a single node based on the current default parent
1636 - "." is a single node based on the current default parent
1637 - "$" resets the default parent to null (implied at the start);
1637 - "$" resets the default parent to null (implied at the start);
1638 otherwise the default parent is always the last node created
1638 otherwise the default parent is always the last node created
1639 - "<p" sets the default parent to the backref p
1639 - "<p" sets the default parent to the backref p
1640 - "*p" is a fork at parent p, which is a backref
1640 - "*p" is a fork at parent p, which is a backref
1641 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1641 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1642 - "/p2" is a merge of the preceding node and p2
1642 - "/p2" is a merge of the preceding node and p2
1643 - ":tag" defines a local tag for the preceding node
1643 - ":tag" defines a local tag for the preceding node
1644 - "@branch" sets the named branch for subsequent nodes
1644 - "@branch" sets the named branch for subsequent nodes
1645 - "#...\\n" is a comment up to the end of the line
1645 - "#...\\n" is a comment up to the end of the line
1646
1646
1647 Whitespace between the above elements is ignored.
1647 Whitespace between the above elements is ignored.
1648
1648
1649 A backref is either
1649 A backref is either
1650
1650
1651 - a number n, which references the node curr-n, where curr is the current
1651 - a number n, which references the node curr-n, where curr is the current
1652 node, or
1652 node, or
1653 - the name of a local tag you placed earlier using ":tag", or
1653 - the name of a local tag you placed earlier using ":tag", or
1654 - empty to denote the default parent.
1654 - empty to denote the default parent.
1655
1655
1656 All string valued-elements are either strictly alphanumeric, or must
1656 All string valued-elements are either strictly alphanumeric, or must
1657 be enclosed in double quotes ("..."), with "\\" as escape character.
1657 be enclosed in double quotes ("..."), with "\\" as escape character.
1658 """
1658 """
1659
1659
1660 if text is None:
1660 if text is None:
1661 ui.status(_("reading DAG from stdin\n"))
1661 ui.status(_("reading DAG from stdin\n"))
1662 text = ui.fin.read()
1662 text = ui.fin.read()
1663
1663
1664 cl = repo.changelog
1664 cl = repo.changelog
1665 if len(cl) > 0:
1665 if len(cl) > 0:
1666 raise util.Abort(_('repository is not empty'))
1666 raise util.Abort(_('repository is not empty'))
1667
1667
1668 # determine number of revs in DAG
1668 # determine number of revs in DAG
1669 total = 0
1669 total = 0
1670 for type, data in dagparser.parsedag(text):
1670 for type, data in dagparser.parsedag(text):
1671 if type == 'n':
1671 if type == 'n':
1672 total += 1
1672 total += 1
1673
1673
1674 if mergeable_file:
1674 if mergeable_file:
1675 linesperrev = 2
1675 linesperrev = 2
1676 # make a file with k lines per rev
1676 # make a file with k lines per rev
1677 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1677 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1678 initialmergedlines.append("")
1678 initialmergedlines.append("")
1679
1679
1680 tags = []
1680 tags = []
1681
1681
1682 lock = tr = None
1682 lock = tr = None
1683 try:
1683 try:
1684 lock = repo.lock()
1684 lock = repo.lock()
1685 tr = repo.transaction("builddag")
1685 tr = repo.transaction("builddag")
1686
1686
1687 at = -1
1687 at = -1
1688 atbranch = 'default'
1688 atbranch = 'default'
1689 nodeids = []
1689 nodeids = []
1690 id = 0
1690 id = 0
1691 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1691 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1692 for type, data in dagparser.parsedag(text):
1692 for type, data in dagparser.parsedag(text):
1693 if type == 'n':
1693 if type == 'n':
1694 ui.note(('node %s\n' % str(data)))
1694 ui.note(('node %s\n' % str(data)))
1695 id, ps = data
1695 id, ps = data
1696
1696
1697 files = []
1697 files = []
1698 fctxs = {}
1698 fctxs = {}
1699
1699
1700 p2 = None
1700 p2 = None
1701 if mergeable_file:
1701 if mergeable_file:
1702 fn = "mf"
1702 fn = "mf"
1703 p1 = repo[ps[0]]
1703 p1 = repo[ps[0]]
1704 if len(ps) > 1:
1704 if len(ps) > 1:
1705 p2 = repo[ps[1]]
1705 p2 = repo[ps[1]]
1706 pa = p1.ancestor(p2)
1706 pa = p1.ancestor(p2)
1707 base, local, other = [x[fn].data() for x in (pa, p1,
1707 base, local, other = [x[fn].data() for x in (pa, p1,
1708 p2)]
1708 p2)]
1709 m3 = simplemerge.Merge3Text(base, local, other)
1709 m3 = simplemerge.Merge3Text(base, local, other)
1710 ml = [l.strip() for l in m3.merge_lines()]
1710 ml = [l.strip() for l in m3.merge_lines()]
1711 ml.append("")
1711 ml.append("")
1712 elif at > 0:
1712 elif at > 0:
1713 ml = p1[fn].data().split("\n")
1713 ml = p1[fn].data().split("\n")
1714 else:
1714 else:
1715 ml = initialmergedlines
1715 ml = initialmergedlines
1716 ml[id * linesperrev] += " r%i" % id
1716 ml[id * linesperrev] += " r%i" % id
1717 mergedtext = "\n".join(ml)
1717 mergedtext = "\n".join(ml)
1718 files.append(fn)
1718 files.append(fn)
1719 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
1719 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
1720
1720
1721 if overwritten_file:
1721 if overwritten_file:
1722 fn = "of"
1722 fn = "of"
1723 files.append(fn)
1723 files.append(fn)
1724 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1724 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1725
1725
1726 if new_file:
1726 if new_file:
1727 fn = "nf%i" % id
1727 fn = "nf%i" % id
1728 files.append(fn)
1728 files.append(fn)
1729 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1729 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1730 if len(ps) > 1:
1730 if len(ps) > 1:
1731 if not p2:
1731 if not p2:
1732 p2 = repo[ps[1]]
1732 p2 = repo[ps[1]]
1733 for fn in p2:
1733 for fn in p2:
1734 if fn.startswith("nf"):
1734 if fn.startswith("nf"):
1735 files.append(fn)
1735 files.append(fn)
1736 fctxs[fn] = p2[fn]
1736 fctxs[fn] = p2[fn]
1737
1737
1738 def fctxfn(repo, cx, path):
1738 def fctxfn(repo, cx, path):
1739 return fctxs.get(path)
1739 return fctxs.get(path)
1740
1740
1741 if len(ps) == 0 or ps[0] < 0:
1741 if len(ps) == 0 or ps[0] < 0:
1742 pars = [None, None]
1742 pars = [None, None]
1743 elif len(ps) == 1:
1743 elif len(ps) == 1:
1744 pars = [nodeids[ps[0]], None]
1744 pars = [nodeids[ps[0]], None]
1745 else:
1745 else:
1746 pars = [nodeids[p] for p in ps]
1746 pars = [nodeids[p] for p in ps]
1747 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1747 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1748 date=(id, 0),
1748 date=(id, 0),
1749 user="debugbuilddag",
1749 user="debugbuilddag",
1750 extra={'branch': atbranch})
1750 extra={'branch': atbranch})
1751 nodeid = repo.commitctx(cx)
1751 nodeid = repo.commitctx(cx)
1752 nodeids.append(nodeid)
1752 nodeids.append(nodeid)
1753 at = id
1753 at = id
1754 elif type == 'l':
1754 elif type == 'l':
1755 id, name = data
1755 id, name = data
1756 ui.note(('tag %s\n' % name))
1756 ui.note(('tag %s\n' % name))
1757 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1757 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1758 elif type == 'a':
1758 elif type == 'a':
1759 ui.note(('branch %s\n' % data))
1759 ui.note(('branch %s\n' % data))
1760 atbranch = data
1760 atbranch = data
1761 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1761 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1762 tr.close()
1762 tr.close()
1763
1763
1764 if tags:
1764 if tags:
1765 repo.opener.write("localtags", "".join(tags))
1765 repo.opener.write("localtags", "".join(tags))
1766 finally:
1766 finally:
1767 ui.progress(_('building'), None)
1767 ui.progress(_('building'), None)
1768 release(tr, lock)
1768 release(tr, lock)
1769
1769
1770 @command('debugbundle',
1770 @command('debugbundle',
1771 [('a', 'all', None, _('show all details'))],
1771 [('a', 'all', None, _('show all details'))],
1772 _('FILE'),
1772 _('FILE'),
1773 norepo=True)
1773 norepo=True)
1774 def debugbundle(ui, bundlepath, all=None, **opts):
1774 def debugbundle(ui, bundlepath, all=None, **opts):
1775 """lists the contents of a bundle"""
1775 """lists the contents of a bundle"""
1776 f = hg.openpath(ui, bundlepath)
1776 f = hg.openpath(ui, bundlepath)
1777 try:
1777 try:
1778 gen = exchange.readbundle(ui, f, bundlepath)
1778 gen = exchange.readbundle(ui, f, bundlepath)
1779 if all:
1779 if all:
1780 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1780 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1781
1781
1782 def showchunks(named):
1782 def showchunks(named):
1783 ui.write("\n%s\n" % named)
1783 ui.write("\n%s\n" % named)
1784 chain = None
1784 chain = None
1785 while True:
1785 while True:
1786 chunkdata = gen.deltachunk(chain)
1786 chunkdata = gen.deltachunk(chain)
1787 if not chunkdata:
1787 if not chunkdata:
1788 break
1788 break
1789 node = chunkdata['node']
1789 node = chunkdata['node']
1790 p1 = chunkdata['p1']
1790 p1 = chunkdata['p1']
1791 p2 = chunkdata['p2']
1791 p2 = chunkdata['p2']
1792 cs = chunkdata['cs']
1792 cs = chunkdata['cs']
1793 deltabase = chunkdata['deltabase']
1793 deltabase = chunkdata['deltabase']
1794 delta = chunkdata['delta']
1794 delta = chunkdata['delta']
1795 ui.write("%s %s %s %s %s %s\n" %
1795 ui.write("%s %s %s %s %s %s\n" %
1796 (hex(node), hex(p1), hex(p2),
1796 (hex(node), hex(p1), hex(p2),
1797 hex(cs), hex(deltabase), len(delta)))
1797 hex(cs), hex(deltabase), len(delta)))
1798 chain = node
1798 chain = node
1799
1799
1800 chunkdata = gen.changelogheader()
1800 chunkdata = gen.changelogheader()
1801 showchunks("changelog")
1801 showchunks("changelog")
1802 chunkdata = gen.manifestheader()
1802 chunkdata = gen.manifestheader()
1803 showchunks("manifest")
1803 showchunks("manifest")
1804 while True:
1804 while True:
1805 chunkdata = gen.filelogheader()
1805 chunkdata = gen.filelogheader()
1806 if not chunkdata:
1806 if not chunkdata:
1807 break
1807 break
1808 fname = chunkdata['filename']
1808 fname = chunkdata['filename']
1809 showchunks(fname)
1809 showchunks(fname)
1810 else:
1810 else:
1811 chunkdata = gen.changelogheader()
1811 chunkdata = gen.changelogheader()
1812 chain = None
1812 chain = None
1813 while True:
1813 while True:
1814 chunkdata = gen.deltachunk(chain)
1814 chunkdata = gen.deltachunk(chain)
1815 if not chunkdata:
1815 if not chunkdata:
1816 break
1816 break
1817 node = chunkdata['node']
1817 node = chunkdata['node']
1818 ui.write("%s\n" % hex(node))
1818 ui.write("%s\n" % hex(node))
1819 chain = node
1819 chain = node
1820 finally:
1820 finally:
1821 f.close()
1821 f.close()
1822
1822
1823 @command('debugcheckstate', [], '')
1823 @command('debugcheckstate', [], '')
1824 def debugcheckstate(ui, repo):
1824 def debugcheckstate(ui, repo):
1825 """validate the correctness of the current dirstate"""
1825 """validate the correctness of the current dirstate"""
1826 parent1, parent2 = repo.dirstate.parents()
1826 parent1, parent2 = repo.dirstate.parents()
1827 m1 = repo[parent1].manifest()
1827 m1 = repo[parent1].manifest()
1828 m2 = repo[parent2].manifest()
1828 m2 = repo[parent2].manifest()
1829 errors = 0
1829 errors = 0
1830 for f in repo.dirstate:
1830 for f in repo.dirstate:
1831 state = repo.dirstate[f]
1831 state = repo.dirstate[f]
1832 if state in "nr" and f not in m1:
1832 if state in "nr" and f not in m1:
1833 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1833 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1834 errors += 1
1834 errors += 1
1835 if state in "a" and f in m1:
1835 if state in "a" and f in m1:
1836 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1836 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1837 errors += 1
1837 errors += 1
1838 if state in "m" and f not in m1 and f not in m2:
1838 if state in "m" and f not in m1 and f not in m2:
1839 ui.warn(_("%s in state %s, but not in either manifest\n") %
1839 ui.warn(_("%s in state %s, but not in either manifest\n") %
1840 (f, state))
1840 (f, state))
1841 errors += 1
1841 errors += 1
1842 for f in m1:
1842 for f in m1:
1843 state = repo.dirstate[f]
1843 state = repo.dirstate[f]
1844 if state not in "nrm":
1844 if state not in "nrm":
1845 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1845 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1846 errors += 1
1846 errors += 1
1847 if errors:
1847 if errors:
1848 error = _(".hg/dirstate inconsistent with current parent's manifest")
1848 error = _(".hg/dirstate inconsistent with current parent's manifest")
1849 raise util.Abort(error)
1849 raise util.Abort(error)
1850
1850
1851 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1851 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1852 def debugcommands(ui, cmd='', *args):
1852 def debugcommands(ui, cmd='', *args):
1853 """list all available commands and options"""
1853 """list all available commands and options"""
1854 for cmd, vals in sorted(table.iteritems()):
1854 for cmd, vals in sorted(table.iteritems()):
1855 cmd = cmd.split('|')[0].strip('^')
1855 cmd = cmd.split('|')[0].strip('^')
1856 opts = ', '.join([i[1] for i in vals[1]])
1856 opts = ', '.join([i[1] for i in vals[1]])
1857 ui.write('%s: %s\n' % (cmd, opts))
1857 ui.write('%s: %s\n' % (cmd, opts))
1858
1858
1859 @command('debugcomplete',
1859 @command('debugcomplete',
1860 [('o', 'options', None, _('show the command options'))],
1860 [('o', 'options', None, _('show the command options'))],
1861 _('[-o] CMD'),
1861 _('[-o] CMD'),
1862 norepo=True)
1862 norepo=True)
1863 def debugcomplete(ui, cmd='', **opts):
1863 def debugcomplete(ui, cmd='', **opts):
1864 """returns the completion list associated with the given command"""
1864 """returns the completion list associated with the given command"""
1865
1865
1866 if opts.get('options'):
1866 if opts.get('options'):
1867 options = []
1867 options = []
1868 otables = [globalopts]
1868 otables = [globalopts]
1869 if cmd:
1869 if cmd:
1870 aliases, entry = cmdutil.findcmd(cmd, table, False)
1870 aliases, entry = cmdutil.findcmd(cmd, table, False)
1871 otables.append(entry[1])
1871 otables.append(entry[1])
1872 for t in otables:
1872 for t in otables:
1873 for o in t:
1873 for o in t:
1874 if "(DEPRECATED)" in o[3]:
1874 if "(DEPRECATED)" in o[3]:
1875 continue
1875 continue
1876 if o[0]:
1876 if o[0]:
1877 options.append('-%s' % o[0])
1877 options.append('-%s' % o[0])
1878 options.append('--%s' % o[1])
1878 options.append('--%s' % o[1])
1879 ui.write("%s\n" % "\n".join(options))
1879 ui.write("%s\n" % "\n".join(options))
1880 return
1880 return
1881
1881
1882 cmdlist = cmdutil.findpossible(cmd, table)
1882 cmdlist = cmdutil.findpossible(cmd, table)
1883 if ui.verbose:
1883 if ui.verbose:
1884 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1884 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1885 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1885 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1886
1886
1887 @command('debugdag',
1887 @command('debugdag',
1888 [('t', 'tags', None, _('use tags as labels')),
1888 [('t', 'tags', None, _('use tags as labels')),
1889 ('b', 'branches', None, _('annotate with branch names')),
1889 ('b', 'branches', None, _('annotate with branch names')),
1890 ('', 'dots', None, _('use dots for runs')),
1890 ('', 'dots', None, _('use dots for runs')),
1891 ('s', 'spaces', None, _('separate elements by spaces'))],
1891 ('s', 'spaces', None, _('separate elements by spaces'))],
1892 _('[OPTION]... [FILE [REV]...]'),
1892 _('[OPTION]... [FILE [REV]...]'),
1893 optionalrepo=True)
1893 optionalrepo=True)
1894 def debugdag(ui, repo, file_=None, *revs, **opts):
1894 def debugdag(ui, repo, file_=None, *revs, **opts):
1895 """format the changelog or an index DAG as a concise textual description
1895 """format the changelog or an index DAG as a concise textual description
1896
1896
1897 If you pass a revlog index, the revlog's DAG is emitted. If you list
1897 If you pass a revlog index, the revlog's DAG is emitted. If you list
1898 revision numbers, they get labeled in the output as rN.
1898 revision numbers, they get labeled in the output as rN.
1899
1899
1900 Otherwise, the changelog DAG of the current repo is emitted.
1900 Otherwise, the changelog DAG of the current repo is emitted.
1901 """
1901 """
1902 spaces = opts.get('spaces')
1902 spaces = opts.get('spaces')
1903 dots = opts.get('dots')
1903 dots = opts.get('dots')
1904 if file_:
1904 if file_:
1905 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1905 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1906 revs = set((int(r) for r in revs))
1906 revs = set((int(r) for r in revs))
1907 def events():
1907 def events():
1908 for r in rlog:
1908 for r in rlog:
1909 yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
1909 yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
1910 if p != -1)))
1910 if p != -1)))
1911 if r in revs:
1911 if r in revs:
1912 yield 'l', (r, "r%i" % r)
1912 yield 'l', (r, "r%i" % r)
1913 elif repo:
1913 elif repo:
1914 cl = repo.changelog
1914 cl = repo.changelog
1915 tags = opts.get('tags')
1915 tags = opts.get('tags')
1916 branches = opts.get('branches')
1916 branches = opts.get('branches')
1917 if tags:
1917 if tags:
1918 labels = {}
1918 labels = {}
1919 for l, n in repo.tags().items():
1919 for l, n in repo.tags().items():
1920 labels.setdefault(cl.rev(n), []).append(l)
1920 labels.setdefault(cl.rev(n), []).append(l)
1921 def events():
1921 def events():
1922 b = "default"
1922 b = "default"
1923 for r in cl:
1923 for r in cl:
1924 if branches:
1924 if branches:
1925 newb = cl.read(cl.node(r))[5]['branch']
1925 newb = cl.read(cl.node(r))[5]['branch']
1926 if newb != b:
1926 if newb != b:
1927 yield 'a', newb
1927 yield 'a', newb
1928 b = newb
1928 b = newb
1929 yield 'n', (r, list(set(p for p in cl.parentrevs(r)
1929 yield 'n', (r, list(set(p for p in cl.parentrevs(r)
1930 if p != -1)))
1930 if p != -1)))
1931 if tags:
1931 if tags:
1932 ls = labels.get(r)
1932 ls = labels.get(r)
1933 if ls:
1933 if ls:
1934 for l in ls:
1934 for l in ls:
1935 yield 'l', (r, l)
1935 yield 'l', (r, l)
1936 else:
1936 else:
1937 raise util.Abort(_('need repo for changelog dag'))
1937 raise util.Abort(_('need repo for changelog dag'))
1938
1938
1939 for line in dagparser.dagtextlines(events(),
1939 for line in dagparser.dagtextlines(events(),
1940 addspaces=spaces,
1940 addspaces=spaces,
1941 wraplabels=True,
1941 wraplabels=True,
1942 wrapannotations=True,
1942 wrapannotations=True,
1943 wrapnonlinear=dots,
1943 wrapnonlinear=dots,
1944 usedots=dots,
1944 usedots=dots,
1945 maxlinewidth=70):
1945 maxlinewidth=70):
1946 ui.write(line)
1946 ui.write(line)
1947 ui.write("\n")
1947 ui.write("\n")
1948
1948
1949 @command('debugdata',
1949 @command('debugdata',
1950 [('c', 'changelog', False, _('open changelog')),
1950 [('c', 'changelog', False, _('open changelog')),
1951 ('m', 'manifest', False, _('open manifest'))],
1951 ('m', 'manifest', False, _('open manifest'))],
1952 _('-c|-m|FILE REV'))
1952 _('-c|-m|FILE REV'))
1953 def debugdata(ui, repo, file_, rev=None, **opts):
1953 def debugdata(ui, repo, file_, rev=None, **opts):
1954 """dump the contents of a data file revision"""
1954 """dump the contents of a data file revision"""
1955 if opts.get('changelog') or opts.get('manifest'):
1955 if opts.get('changelog') or opts.get('manifest'):
1956 file_, rev = None, file_
1956 file_, rev = None, file_
1957 elif rev is None:
1957 elif rev is None:
1958 raise error.CommandError('debugdata', _('invalid arguments'))
1958 raise error.CommandError('debugdata', _('invalid arguments'))
1959 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1959 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1960 try:
1960 try:
1961 ui.write(r.revision(r.lookup(rev)))
1961 ui.write(r.revision(r.lookup(rev)))
1962 except KeyError:
1962 except KeyError:
1963 raise util.Abort(_('invalid revision identifier %s') % rev)
1963 raise util.Abort(_('invalid revision identifier %s') % rev)
1964
1964
1965 @command('debugdate',
1965 @command('debugdate',
1966 [('e', 'extended', None, _('try extended date formats'))],
1966 [('e', 'extended', None, _('try extended date formats'))],
1967 _('[-e] DATE [RANGE]'),
1967 _('[-e] DATE [RANGE]'),
1968 norepo=True, optionalrepo=True)
1968 norepo=True, optionalrepo=True)
1969 def debugdate(ui, date, range=None, **opts):
1969 def debugdate(ui, date, range=None, **opts):
1970 """parse and display a date"""
1970 """parse and display a date"""
1971 if opts["extended"]:
1971 if opts["extended"]:
1972 d = util.parsedate(date, util.extendeddateformats)
1972 d = util.parsedate(date, util.extendeddateformats)
1973 else:
1973 else:
1974 d = util.parsedate(date)
1974 d = util.parsedate(date)
1975 ui.write(("internal: %s %s\n") % d)
1975 ui.write(("internal: %s %s\n") % d)
1976 ui.write(("standard: %s\n") % util.datestr(d))
1976 ui.write(("standard: %s\n") % util.datestr(d))
1977 if range:
1977 if range:
1978 m = util.matchdate(range)
1978 m = util.matchdate(range)
1979 ui.write(("match: %s\n") % m(d[0]))
1979 ui.write(("match: %s\n") % m(d[0]))
1980
1980
1981 @command('debugdiscovery',
1981 @command('debugdiscovery',
1982 [('', 'old', None, _('use old-style discovery')),
1982 [('', 'old', None, _('use old-style discovery')),
1983 ('', 'nonheads', None,
1983 ('', 'nonheads', None,
1984 _('use old-style discovery with non-heads included')),
1984 _('use old-style discovery with non-heads included')),
1985 ] + remoteopts,
1985 ] + remoteopts,
1986 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1986 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1987 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1987 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1988 """runs the changeset discovery protocol in isolation"""
1988 """runs the changeset discovery protocol in isolation"""
1989 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
1989 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
1990 opts.get('branch'))
1990 opts.get('branch'))
1991 remote = hg.peer(repo, opts, remoteurl)
1991 remote = hg.peer(repo, opts, remoteurl)
1992 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1992 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1993
1993
1994 # make sure tests are repeatable
1994 # make sure tests are repeatable
1995 random.seed(12323)
1995 random.seed(12323)
1996
1996
1997 def doit(localheads, remoteheads, remote=remote):
1997 def doit(localheads, remoteheads, remote=remote):
1998 if opts.get('old'):
1998 if opts.get('old'):
1999 if localheads:
1999 if localheads:
2000 raise util.Abort('cannot use localheads with old style '
2000 raise util.Abort('cannot use localheads with old style '
2001 'discovery')
2001 'discovery')
2002 if not util.safehasattr(remote, 'branches'):
2002 if not util.safehasattr(remote, 'branches'):
2003 # enable in-client legacy support
2003 # enable in-client legacy support
2004 remote = localrepo.locallegacypeer(remote.local())
2004 remote = localrepo.locallegacypeer(remote.local())
2005 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
2005 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
2006 force=True)
2006 force=True)
2007 common = set(common)
2007 common = set(common)
2008 if not opts.get('nonheads'):
2008 if not opts.get('nonheads'):
2009 ui.write(("unpruned common: %s\n") %
2009 ui.write(("unpruned common: %s\n") %
2010 " ".join(sorted(short(n) for n in common)))
2010 " ".join(sorted(short(n) for n in common)))
2011 dag = dagutil.revlogdag(repo.changelog)
2011 dag = dagutil.revlogdag(repo.changelog)
2012 all = dag.ancestorset(dag.internalizeall(common))
2012 all = dag.ancestorset(dag.internalizeall(common))
2013 common = dag.externalizeall(dag.headsetofconnecteds(all))
2013 common = dag.externalizeall(dag.headsetofconnecteds(all))
2014 else:
2014 else:
2015 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2015 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2016 common = set(common)
2016 common = set(common)
2017 rheads = set(hds)
2017 rheads = set(hds)
2018 lheads = set(repo.heads())
2018 lheads = set(repo.heads())
2019 ui.write(("common heads: %s\n") %
2019 ui.write(("common heads: %s\n") %
2020 " ".join(sorted(short(n) for n in common)))
2020 " ".join(sorted(short(n) for n in common)))
2021 if lheads <= common:
2021 if lheads <= common:
2022 ui.write(("local is subset\n"))
2022 ui.write(("local is subset\n"))
2023 elif rheads <= common:
2023 elif rheads <= common:
2024 ui.write(("remote is subset\n"))
2024 ui.write(("remote is subset\n"))
2025
2025
2026 serverlogs = opts.get('serverlog')
2026 serverlogs = opts.get('serverlog')
2027 if serverlogs:
2027 if serverlogs:
2028 for filename in serverlogs:
2028 for filename in serverlogs:
2029 logfile = open(filename, 'r')
2029 logfile = open(filename, 'r')
2030 try:
2030 try:
2031 line = logfile.readline()
2031 line = logfile.readline()
2032 while line:
2032 while line:
2033 parts = line.strip().split(';')
2033 parts = line.strip().split(';')
2034 op = parts[1]
2034 op = parts[1]
2035 if op == 'cg':
2035 if op == 'cg':
2036 pass
2036 pass
2037 elif op == 'cgss':
2037 elif op == 'cgss':
2038 doit(parts[2].split(' '), parts[3].split(' '))
2038 doit(parts[2].split(' '), parts[3].split(' '))
2039 elif op == 'unb':
2039 elif op == 'unb':
2040 doit(parts[3].split(' '), parts[2].split(' '))
2040 doit(parts[3].split(' '), parts[2].split(' '))
2041 line = logfile.readline()
2041 line = logfile.readline()
2042 finally:
2042 finally:
2043 logfile.close()
2043 logfile.close()
2044
2044
2045 else:
2045 else:
2046 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2046 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2047 opts.get('remote_head'))
2047 opts.get('remote_head'))
2048 localrevs = opts.get('local_head')
2048 localrevs = opts.get('local_head')
2049 doit(localrevs, remoterevs)
2049 doit(localrevs, remoterevs)
2050
2050
2051 @command('debugfileset',
2051 @command('debugfileset',
2052 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2052 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2053 _('[-r REV] FILESPEC'))
2053 _('[-r REV] FILESPEC'))
2054 def debugfileset(ui, repo, expr, **opts):
2054 def debugfileset(ui, repo, expr, **opts):
2055 '''parse and apply a fileset specification'''
2055 '''parse and apply a fileset specification'''
2056 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2056 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2057 if ui.verbose:
2057 if ui.verbose:
2058 tree = fileset.parse(expr)[0]
2058 tree = fileset.parse(expr)[0]
2059 ui.note(tree, "\n")
2059 ui.note(tree, "\n")
2060
2060
2061 for f in ctx.getfileset(expr):
2061 for f in ctx.getfileset(expr):
2062 ui.write("%s\n" % f)
2062 ui.write("%s\n" % f)
2063
2063
2064 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
2064 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
2065 def debugfsinfo(ui, path="."):
2065 def debugfsinfo(ui, path="."):
2066 """show information detected about current filesystem"""
2066 """show information detected about current filesystem"""
2067 util.writefile('.debugfsinfo', '')
2067 util.writefile('.debugfsinfo', '')
2068 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2068 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2069 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2069 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2070 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2070 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2071 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2071 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2072 and 'yes' or 'no'))
2072 and 'yes' or 'no'))
2073 os.unlink('.debugfsinfo')
2073 os.unlink('.debugfsinfo')
2074
2074
2075 @command('debuggetbundle',
2075 @command('debuggetbundle',
2076 [('H', 'head', [], _('id of head node'), _('ID')),
2076 [('H', 'head', [], _('id of head node'), _('ID')),
2077 ('C', 'common', [], _('id of common node'), _('ID')),
2077 ('C', 'common', [], _('id of common node'), _('ID')),
2078 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2078 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2079 _('REPO FILE [-H|-C ID]...'),
2079 _('REPO FILE [-H|-C ID]...'),
2080 norepo=True)
2080 norepo=True)
2081 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2081 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2082 """retrieves a bundle from a repo
2082 """retrieves a bundle from a repo
2083
2083
2084 Every ID must be a full-length hex node id string. Saves the bundle to the
2084 Every ID must be a full-length hex node id string. Saves the bundle to the
2085 given file.
2085 given file.
2086 """
2086 """
2087 repo = hg.peer(ui, opts, repopath)
2087 repo = hg.peer(ui, opts, repopath)
2088 if not repo.capable('getbundle'):
2088 if not repo.capable('getbundle'):
2089 raise util.Abort("getbundle() not supported by target repository")
2089 raise util.Abort("getbundle() not supported by target repository")
2090 args = {}
2090 args = {}
2091 if common:
2091 if common:
2092 args['common'] = [bin(s) for s in common]
2092 args['common'] = [bin(s) for s in common]
2093 if head:
2093 if head:
2094 args['heads'] = [bin(s) for s in head]
2094 args['heads'] = [bin(s) for s in head]
2095 # TODO: get desired bundlecaps from command line.
2095 # TODO: get desired bundlecaps from command line.
2096 args['bundlecaps'] = None
2096 args['bundlecaps'] = None
2097 bundle = repo.getbundle('debug', **args)
2097 bundle = repo.getbundle('debug', **args)
2098
2098
2099 bundletype = opts.get('type', 'bzip2').lower()
2099 bundletype = opts.get('type', 'bzip2').lower()
2100 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
2100 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
2101 bundletype = btypes.get(bundletype)
2101 bundletype = btypes.get(bundletype)
2102 if bundletype not in changegroup.bundletypes:
2102 if bundletype not in changegroup.bundletypes:
2103 raise util.Abort(_('unknown bundle type specified with --type'))
2103 raise util.Abort(_('unknown bundle type specified with --type'))
2104 changegroup.writebundle(bundle, bundlepath, bundletype)
2104 changegroup.writebundle(bundle, bundlepath, bundletype)
2105
2105
2106 @command('debugignore', [], '')
2106 @command('debugignore', [], '')
2107 def debugignore(ui, repo, *values, **opts):
2107 def debugignore(ui, repo, *values, **opts):
2108 """display the combined ignore pattern"""
2108 """display the combined ignore pattern"""
2109 ignore = repo.dirstate._ignore
2109 ignore = repo.dirstate._ignore
2110 includepat = getattr(ignore, 'includepat', None)
2110 includepat = getattr(ignore, 'includepat', None)
2111 if includepat is not None:
2111 if includepat is not None:
2112 ui.write("%s\n" % includepat)
2112 ui.write("%s\n" % includepat)
2113 else:
2113 else:
2114 raise util.Abort(_("no ignore patterns found"))
2114 raise util.Abort(_("no ignore patterns found"))
2115
2115
2116 @command('debugindex',
2116 @command('debugindex',
2117 [('c', 'changelog', False, _('open changelog')),
2117 [('c', 'changelog', False, _('open changelog')),
2118 ('m', 'manifest', False, _('open manifest')),
2118 ('m', 'manifest', False, _('open manifest')),
2119 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2119 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2120 _('[-f FORMAT] -c|-m|FILE'),
2120 _('[-f FORMAT] -c|-m|FILE'),
2121 optionalrepo=True)
2121 optionalrepo=True)
2122 def debugindex(ui, repo, file_=None, **opts):
2122 def debugindex(ui, repo, file_=None, **opts):
2123 """dump the contents of an index file"""
2123 """dump the contents of an index file"""
2124 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2124 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2125 format = opts.get('format', 0)
2125 format = opts.get('format', 0)
2126 if format not in (0, 1):
2126 if format not in (0, 1):
2127 raise util.Abort(_("unknown format %d") % format)
2127 raise util.Abort(_("unknown format %d") % format)
2128
2128
2129 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2129 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2130 if generaldelta:
2130 if generaldelta:
2131 basehdr = ' delta'
2131 basehdr = ' delta'
2132 else:
2132 else:
2133 basehdr = ' base'
2133 basehdr = ' base'
2134
2134
2135 if format == 0:
2135 if format == 0:
2136 ui.write(" rev offset length " + basehdr + " linkrev"
2136 ui.write(" rev offset length " + basehdr + " linkrev"
2137 " nodeid p1 p2\n")
2137 " nodeid p1 p2\n")
2138 elif format == 1:
2138 elif format == 1:
2139 ui.write(" rev flag offset length"
2139 ui.write(" rev flag offset length"
2140 " size " + basehdr + " link p1 p2"
2140 " size " + basehdr + " link p1 p2"
2141 " nodeid\n")
2141 " nodeid\n")
2142
2142
2143 for i in r:
2143 for i in r:
2144 node = r.node(i)
2144 node = r.node(i)
2145 if generaldelta:
2145 if generaldelta:
2146 base = r.deltaparent(i)
2146 base = r.deltaparent(i)
2147 else:
2147 else:
2148 base = r.chainbase(i)
2148 base = r.chainbase(i)
2149 if format == 0:
2149 if format == 0:
2150 try:
2150 try:
2151 pp = r.parents(node)
2151 pp = r.parents(node)
2152 except Exception:
2152 except Exception:
2153 pp = [nullid, nullid]
2153 pp = [nullid, nullid]
2154 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2154 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2155 i, r.start(i), r.length(i), base, r.linkrev(i),
2155 i, r.start(i), r.length(i), base, r.linkrev(i),
2156 short(node), short(pp[0]), short(pp[1])))
2156 short(node), short(pp[0]), short(pp[1])))
2157 elif format == 1:
2157 elif format == 1:
2158 pr = r.parentrevs(i)
2158 pr = r.parentrevs(i)
2159 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2159 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2160 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2160 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2161 base, r.linkrev(i), pr[0], pr[1], short(node)))
2161 base, r.linkrev(i), pr[0], pr[1], short(node)))
2162
2162
2163 @command('debugindexdot', [], _('FILE'), optionalrepo=True)
2163 @command('debugindexdot', [], _('FILE'), optionalrepo=True)
2164 def debugindexdot(ui, repo, file_):
2164 def debugindexdot(ui, repo, file_):
2165 """dump an index DAG as a graphviz dot file"""
2165 """dump an index DAG as a graphviz dot file"""
2166 r = None
2166 r = None
2167 if repo:
2167 if repo:
2168 filelog = repo.file(file_)
2168 filelog = repo.file(file_)
2169 if len(filelog):
2169 if len(filelog):
2170 r = filelog
2170 r = filelog
2171 if not r:
2171 if not r:
2172 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2172 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2173 ui.write(("digraph G {\n"))
2173 ui.write(("digraph G {\n"))
2174 for i in r:
2174 for i in r:
2175 node = r.node(i)
2175 node = r.node(i)
2176 pp = r.parents(node)
2176 pp = r.parents(node)
2177 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2177 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2178 if pp[1] != nullid:
2178 if pp[1] != nullid:
2179 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2179 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2180 ui.write("}\n")
2180 ui.write("}\n")
2181
2181
2182 @command('debuginstall', [], '', norepo=True)
2182 @command('debuginstall', [], '', norepo=True)
2183 def debuginstall(ui):
2183 def debuginstall(ui):
2184 '''test Mercurial installation
2184 '''test Mercurial installation
2185
2185
2186 Returns 0 on success.
2186 Returns 0 on success.
2187 '''
2187 '''
2188
2188
2189 def writetemp(contents):
2189 def writetemp(contents):
2190 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2190 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2191 f = os.fdopen(fd, "wb")
2191 f = os.fdopen(fd, "wb")
2192 f.write(contents)
2192 f.write(contents)
2193 f.close()
2193 f.close()
2194 return name
2194 return name
2195
2195
2196 problems = 0
2196 problems = 0
2197
2197
2198 # encoding
2198 # encoding
2199 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2199 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2200 try:
2200 try:
2201 encoding.fromlocal("test")
2201 encoding.fromlocal("test")
2202 except util.Abort, inst:
2202 except util.Abort, inst:
2203 ui.write(" %s\n" % inst)
2203 ui.write(" %s\n" % inst)
2204 ui.write(_(" (check that your locale is properly set)\n"))
2204 ui.write(_(" (check that your locale is properly set)\n"))
2205 problems += 1
2205 problems += 1
2206
2206
2207 # Python
2207 # Python
2208 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2208 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2209 ui.status(_("checking Python version (%s)\n")
2209 ui.status(_("checking Python version (%s)\n")
2210 % ("%s.%s.%s" % sys.version_info[:3]))
2210 % ("%s.%s.%s" % sys.version_info[:3]))
2211 ui.status(_("checking Python lib (%s)...\n")
2211 ui.status(_("checking Python lib (%s)...\n")
2212 % os.path.dirname(os.__file__))
2212 % os.path.dirname(os.__file__))
2213
2213
2214 # compiled modules
2214 # compiled modules
2215 ui.status(_("checking installed modules (%s)...\n")
2215 ui.status(_("checking installed modules (%s)...\n")
2216 % os.path.dirname(__file__))
2216 % os.path.dirname(__file__))
2217 try:
2217 try:
2218 import bdiff, mpatch, base85, osutil
2218 import bdiff, mpatch, base85, osutil
2219 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2219 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2220 except Exception, inst:
2220 except Exception, inst:
2221 ui.write(" %s\n" % inst)
2221 ui.write(" %s\n" % inst)
2222 ui.write(_(" One or more extensions could not be found"))
2222 ui.write(_(" One or more extensions could not be found"))
2223 ui.write(_(" (check that you compiled the extensions)\n"))
2223 ui.write(_(" (check that you compiled the extensions)\n"))
2224 problems += 1
2224 problems += 1
2225
2225
2226 # templates
2226 # templates
2227 import templater
2227 import templater
2228 p = templater.templatepath()
2228 p = templater.templatepath()
2229 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2229 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2230 if p:
2230 if p:
2231 m = templater.templatepath("map-cmdline.default")
2231 m = templater.templatepath("map-cmdline.default")
2232 if m:
2232 if m:
2233 # template found, check if it is working
2233 # template found, check if it is working
2234 try:
2234 try:
2235 templater.templater(m)
2235 templater.templater(m)
2236 except Exception, inst:
2236 except Exception, inst:
2237 ui.write(" %s\n" % inst)
2237 ui.write(" %s\n" % inst)
2238 p = None
2238 p = None
2239 else:
2239 else:
2240 ui.write(_(" template 'default' not found\n"))
2240 ui.write(_(" template 'default' not found\n"))
2241 p = None
2241 p = None
2242 else:
2242 else:
2243 ui.write(_(" no template directories found\n"))
2243 ui.write(_(" no template directories found\n"))
2244 if not p:
2244 if not p:
2245 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2245 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2246 problems += 1
2246 problems += 1
2247
2247
2248 # editor
2248 # editor
2249 ui.status(_("checking commit editor...\n"))
2249 ui.status(_("checking commit editor...\n"))
2250 editor = ui.geteditor()
2250 editor = ui.geteditor()
2251 cmdpath = util.findexe(shlex.split(editor)[0])
2251 cmdpath = util.findexe(shlex.split(editor)[0])
2252 if not cmdpath:
2252 if not cmdpath:
2253 if editor == 'vi':
2253 if editor == 'vi':
2254 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2254 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2255 ui.write(_(" (specify a commit editor in your configuration"
2255 ui.write(_(" (specify a commit editor in your configuration"
2256 " file)\n"))
2256 " file)\n"))
2257 else:
2257 else:
2258 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2258 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2259 ui.write(_(" (specify a commit editor in your configuration"
2259 ui.write(_(" (specify a commit editor in your configuration"
2260 " file)\n"))
2260 " file)\n"))
2261 problems += 1
2261 problems += 1
2262
2262
2263 # check username
2263 # check username
2264 ui.status(_("checking username...\n"))
2264 ui.status(_("checking username...\n"))
2265 try:
2265 try:
2266 ui.username()
2266 ui.username()
2267 except util.Abort, e:
2267 except util.Abort, e:
2268 ui.write(" %s\n" % e)
2268 ui.write(" %s\n" % e)
2269 ui.write(_(" (specify a username in your configuration file)\n"))
2269 ui.write(_(" (specify a username in your configuration file)\n"))
2270 problems += 1
2270 problems += 1
2271
2271
2272 if not problems:
2272 if not problems:
2273 ui.status(_("no problems detected\n"))
2273 ui.status(_("no problems detected\n"))
2274 else:
2274 else:
2275 ui.write(_("%s problems detected,"
2275 ui.write(_("%s problems detected,"
2276 " please check your install!\n") % problems)
2276 " please check your install!\n") % problems)
2277
2277
2278 return problems
2278 return problems
2279
2279
2280 @command('debugknown', [], _('REPO ID...'), norepo=True)
2280 @command('debugknown', [], _('REPO ID...'), norepo=True)
2281 def debugknown(ui, repopath, *ids, **opts):
2281 def debugknown(ui, repopath, *ids, **opts):
2282 """test whether node ids are known to a repo
2282 """test whether node ids are known to a repo
2283
2283
2284 Every ID must be a full-length hex node id string. Returns a list of 0s
2284 Every ID must be a full-length hex node id string. Returns a list of 0s
2285 and 1s indicating unknown/known.
2285 and 1s indicating unknown/known.
2286 """
2286 """
2287 repo = hg.peer(ui, opts, repopath)
2287 repo = hg.peer(ui, opts, repopath)
2288 if not repo.capable('known'):
2288 if not repo.capable('known'):
2289 raise util.Abort("known() not supported by target repository")
2289 raise util.Abort("known() not supported by target repository")
2290 flags = repo.known([bin(s) for s in ids])
2290 flags = repo.known([bin(s) for s in ids])
2291 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2291 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2292
2292
2293 @command('debuglabelcomplete', [], _('LABEL...'))
2293 @command('debuglabelcomplete', [], _('LABEL...'))
2294 def debuglabelcomplete(ui, repo, *args):
2294 def debuglabelcomplete(ui, repo, *args):
2295 '''complete "labels" - tags, open branch names, bookmark names'''
2295 '''complete "labels" - tags, open branch names, bookmark names'''
2296
2296
2297 labels = set()
2297 labels = set()
2298 labels.update(t[0] for t in repo.tagslist())
2298 labels.update(t[0] for t in repo.tagslist())
2299 labels.update(repo._bookmarks.keys())
2299 labels.update(repo._bookmarks.keys())
2300 labels.update(tag for (tag, heads, tip, closed)
2300 labels.update(tag for (tag, heads, tip, closed)
2301 in repo.branchmap().iterbranches() if not closed)
2301 in repo.branchmap().iterbranches() if not closed)
2302 completions = set()
2302 completions = set()
2303 if not args:
2303 if not args:
2304 args = ['']
2304 args = ['']
2305 for a in args:
2305 for a in args:
2306 completions.update(l for l in labels if l.startswith(a))
2306 completions.update(l for l in labels if l.startswith(a))
2307 ui.write('\n'.join(sorted(completions)))
2307 ui.write('\n'.join(sorted(completions)))
2308 ui.write('\n')
2308 ui.write('\n')
2309
2309
2310 @command('debugobsolete',
2310 @command('debugobsolete',
2311 [('', 'flags', 0, _('markers flag')),
2311 [('', 'flags', 0, _('markers flag')),
2312 ] + commitopts2,
2312 ] + commitopts2,
2313 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2313 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2314 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2314 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2315 """create arbitrary obsolete marker
2315 """create arbitrary obsolete marker
2316
2316
2317 With no arguments, displays the list of obsolescence markers."""
2317 With no arguments, displays the list of obsolescence markers."""
2318
2318
2319 def parsenodeid(s):
2319 def parsenodeid(s):
2320 try:
2320 try:
2321 # We do not use revsingle/revrange functions here to accept
2321 # We do not use revsingle/revrange functions here to accept
2322 # arbitrary node identifiers, possibly not present in the
2322 # arbitrary node identifiers, possibly not present in the
2323 # local repository.
2323 # local repository.
2324 n = bin(s)
2324 n = bin(s)
2325 if len(n) != len(nullid):
2325 if len(n) != len(nullid):
2326 raise TypeError()
2326 raise TypeError()
2327 return n
2327 return n
2328 except TypeError:
2328 except TypeError:
2329 raise util.Abort('changeset references must be full hexadecimal '
2329 raise util.Abort('changeset references must be full hexadecimal '
2330 'node identifiers')
2330 'node identifiers')
2331
2331
2332 if precursor is not None:
2332 if precursor is not None:
2333 metadata = {}
2333 metadata = {}
2334 if 'date' in opts:
2334 if 'date' in opts:
2335 metadata['date'] = opts['date']
2335 metadata['date'] = opts['date']
2336 metadata['user'] = opts['user'] or ui.username()
2336 metadata['user'] = opts['user'] or ui.username()
2337 succs = tuple(parsenodeid(succ) for succ in successors)
2337 succs = tuple(parsenodeid(succ) for succ in successors)
2338 l = repo.lock()
2338 l = repo.lock()
2339 try:
2339 try:
2340 tr = repo.transaction('debugobsolete')
2340 tr = repo.transaction('debugobsolete')
2341 try:
2341 try:
2342 repo.obsstore.create(tr, parsenodeid(precursor), succs,
2342 try:
2343 opts['flags'], metadata)
2343 repo.obsstore.create(tr, parsenodeid(precursor), succs,
2344 tr.close()
2344 opts['flags'], metadata)
2345 tr.close()
2346 except ValueError, exc:
2347 raise util.Abort(_('bad obsmarker input: %s') % exc)
2345 finally:
2348 finally:
2346 tr.release()
2349 tr.release()
2347 finally:
2350 finally:
2348 l.release()
2351 l.release()
2349 else:
2352 else:
2350 for m in obsolete.allmarkers(repo):
2353 for m in obsolete.allmarkers(repo):
2351 cmdutil.showmarker(ui, m)
2354 cmdutil.showmarker(ui, m)
2352
2355
2353 @command('debugpathcomplete',
2356 @command('debugpathcomplete',
2354 [('f', 'full', None, _('complete an entire path')),
2357 [('f', 'full', None, _('complete an entire path')),
2355 ('n', 'normal', None, _('show only normal files')),
2358 ('n', 'normal', None, _('show only normal files')),
2356 ('a', 'added', None, _('show only added files')),
2359 ('a', 'added', None, _('show only added files')),
2357 ('r', 'removed', None, _('show only removed files'))],
2360 ('r', 'removed', None, _('show only removed files'))],
2358 _('FILESPEC...'))
2361 _('FILESPEC...'))
2359 def debugpathcomplete(ui, repo, *specs, **opts):
2362 def debugpathcomplete(ui, repo, *specs, **opts):
2360 '''complete part or all of a tracked path
2363 '''complete part or all of a tracked path
2361
2364
2362 This command supports shells that offer path name completion. It
2365 This command supports shells that offer path name completion. It
2363 currently completes only files already known to the dirstate.
2366 currently completes only files already known to the dirstate.
2364
2367
2365 Completion extends only to the next path segment unless
2368 Completion extends only to the next path segment unless
2366 --full is specified, in which case entire paths are used.'''
2369 --full is specified, in which case entire paths are used.'''
2367
2370
2368 def complete(path, acceptable):
2371 def complete(path, acceptable):
2369 dirstate = repo.dirstate
2372 dirstate = repo.dirstate
2370 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2373 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2371 rootdir = repo.root + os.sep
2374 rootdir = repo.root + os.sep
2372 if spec != repo.root and not spec.startswith(rootdir):
2375 if spec != repo.root and not spec.startswith(rootdir):
2373 return [], []
2376 return [], []
2374 if os.path.isdir(spec):
2377 if os.path.isdir(spec):
2375 spec += '/'
2378 spec += '/'
2376 spec = spec[len(rootdir):]
2379 spec = spec[len(rootdir):]
2377 fixpaths = os.sep != '/'
2380 fixpaths = os.sep != '/'
2378 if fixpaths:
2381 if fixpaths:
2379 spec = spec.replace(os.sep, '/')
2382 spec = spec.replace(os.sep, '/')
2380 speclen = len(spec)
2383 speclen = len(spec)
2381 fullpaths = opts['full']
2384 fullpaths = opts['full']
2382 files, dirs = set(), set()
2385 files, dirs = set(), set()
2383 adddir, addfile = dirs.add, files.add
2386 adddir, addfile = dirs.add, files.add
2384 for f, st in dirstate.iteritems():
2387 for f, st in dirstate.iteritems():
2385 if f.startswith(spec) and st[0] in acceptable:
2388 if f.startswith(spec) and st[0] in acceptable:
2386 if fixpaths:
2389 if fixpaths:
2387 f = f.replace('/', os.sep)
2390 f = f.replace('/', os.sep)
2388 if fullpaths:
2391 if fullpaths:
2389 addfile(f)
2392 addfile(f)
2390 continue
2393 continue
2391 s = f.find(os.sep, speclen)
2394 s = f.find(os.sep, speclen)
2392 if s >= 0:
2395 if s >= 0:
2393 adddir(f[:s])
2396 adddir(f[:s])
2394 else:
2397 else:
2395 addfile(f)
2398 addfile(f)
2396 return files, dirs
2399 return files, dirs
2397
2400
2398 acceptable = ''
2401 acceptable = ''
2399 if opts['normal']:
2402 if opts['normal']:
2400 acceptable += 'nm'
2403 acceptable += 'nm'
2401 if opts['added']:
2404 if opts['added']:
2402 acceptable += 'a'
2405 acceptable += 'a'
2403 if opts['removed']:
2406 if opts['removed']:
2404 acceptable += 'r'
2407 acceptable += 'r'
2405 cwd = repo.getcwd()
2408 cwd = repo.getcwd()
2406 if not specs:
2409 if not specs:
2407 specs = ['.']
2410 specs = ['.']
2408
2411
2409 files, dirs = set(), set()
2412 files, dirs = set(), set()
2410 for spec in specs:
2413 for spec in specs:
2411 f, d = complete(spec, acceptable or 'nmar')
2414 f, d = complete(spec, acceptable or 'nmar')
2412 files.update(f)
2415 files.update(f)
2413 dirs.update(d)
2416 dirs.update(d)
2414 files.update(dirs)
2417 files.update(dirs)
2415 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2418 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2416 ui.write('\n')
2419 ui.write('\n')
2417
2420
2418 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2421 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2419 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2422 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2420 '''access the pushkey key/value protocol
2423 '''access the pushkey key/value protocol
2421
2424
2422 With two args, list the keys in the given namespace.
2425 With two args, list the keys in the given namespace.
2423
2426
2424 With five args, set a key to new if it currently is set to old.
2427 With five args, set a key to new if it currently is set to old.
2425 Reports success or failure.
2428 Reports success or failure.
2426 '''
2429 '''
2427
2430
2428 target = hg.peer(ui, {}, repopath)
2431 target = hg.peer(ui, {}, repopath)
2429 if keyinfo:
2432 if keyinfo:
2430 key, old, new = keyinfo
2433 key, old, new = keyinfo
2431 r = target.pushkey(namespace, key, old, new)
2434 r = target.pushkey(namespace, key, old, new)
2432 ui.status(str(r) + '\n')
2435 ui.status(str(r) + '\n')
2433 return not r
2436 return not r
2434 else:
2437 else:
2435 for k, v in sorted(target.listkeys(namespace).iteritems()):
2438 for k, v in sorted(target.listkeys(namespace).iteritems()):
2436 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2439 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2437 v.encode('string-escape')))
2440 v.encode('string-escape')))
2438
2441
2439 @command('debugpvec', [], _('A B'))
2442 @command('debugpvec', [], _('A B'))
2440 def debugpvec(ui, repo, a, b=None):
2443 def debugpvec(ui, repo, a, b=None):
2441 ca = scmutil.revsingle(repo, a)
2444 ca = scmutil.revsingle(repo, a)
2442 cb = scmutil.revsingle(repo, b)
2445 cb = scmutil.revsingle(repo, b)
2443 pa = pvec.ctxpvec(ca)
2446 pa = pvec.ctxpvec(ca)
2444 pb = pvec.ctxpvec(cb)
2447 pb = pvec.ctxpvec(cb)
2445 if pa == pb:
2448 if pa == pb:
2446 rel = "="
2449 rel = "="
2447 elif pa > pb:
2450 elif pa > pb:
2448 rel = ">"
2451 rel = ">"
2449 elif pa < pb:
2452 elif pa < pb:
2450 rel = "<"
2453 rel = "<"
2451 elif pa | pb:
2454 elif pa | pb:
2452 rel = "|"
2455 rel = "|"
2453 ui.write(_("a: %s\n") % pa)
2456 ui.write(_("a: %s\n") % pa)
2454 ui.write(_("b: %s\n") % pb)
2457 ui.write(_("b: %s\n") % pb)
2455 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2458 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2456 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2459 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2457 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2460 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2458 pa.distance(pb), rel))
2461 pa.distance(pb), rel))
2459
2462
2460 @command('debugrebuilddirstate|debugrebuildstate',
2463 @command('debugrebuilddirstate|debugrebuildstate',
2461 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2464 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2462 _('[-r REV]'))
2465 _('[-r REV]'))
2463 def debugrebuilddirstate(ui, repo, rev):
2466 def debugrebuilddirstate(ui, repo, rev):
2464 """rebuild the dirstate as it would look like for the given revision
2467 """rebuild the dirstate as it would look like for the given revision
2465
2468
2466 If no revision is specified the first current parent will be used.
2469 If no revision is specified the first current parent will be used.
2467
2470
2468 The dirstate will be set to the files of the given revision.
2471 The dirstate will be set to the files of the given revision.
2469 The actual working directory content or existing dirstate
2472 The actual working directory content or existing dirstate
2470 information such as adds or removes is not considered.
2473 information such as adds or removes is not considered.
2471
2474
2472 One use of this command is to make the next :hg:`status` invocation
2475 One use of this command is to make the next :hg:`status` invocation
2473 check the actual file content.
2476 check the actual file content.
2474 """
2477 """
2475 ctx = scmutil.revsingle(repo, rev)
2478 ctx = scmutil.revsingle(repo, rev)
2476 wlock = repo.wlock()
2479 wlock = repo.wlock()
2477 try:
2480 try:
2478 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2481 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2479 finally:
2482 finally:
2480 wlock.release()
2483 wlock.release()
2481
2484
2482 @command('debugrename',
2485 @command('debugrename',
2483 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2486 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2484 _('[-r REV] FILE'))
2487 _('[-r REV] FILE'))
2485 def debugrename(ui, repo, file1, *pats, **opts):
2488 def debugrename(ui, repo, file1, *pats, **opts):
2486 """dump rename information"""
2489 """dump rename information"""
2487
2490
2488 ctx = scmutil.revsingle(repo, opts.get('rev'))
2491 ctx = scmutil.revsingle(repo, opts.get('rev'))
2489 m = scmutil.match(ctx, (file1,) + pats, opts)
2492 m = scmutil.match(ctx, (file1,) + pats, opts)
2490 for abs in ctx.walk(m):
2493 for abs in ctx.walk(m):
2491 fctx = ctx[abs]
2494 fctx = ctx[abs]
2492 o = fctx.filelog().renamed(fctx.filenode())
2495 o = fctx.filelog().renamed(fctx.filenode())
2493 rel = m.rel(abs)
2496 rel = m.rel(abs)
2494 if o:
2497 if o:
2495 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2498 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2496 else:
2499 else:
2497 ui.write(_("%s not renamed\n") % rel)
2500 ui.write(_("%s not renamed\n") % rel)
2498
2501
2499 @command('debugrevlog',
2502 @command('debugrevlog',
2500 [('c', 'changelog', False, _('open changelog')),
2503 [('c', 'changelog', False, _('open changelog')),
2501 ('m', 'manifest', False, _('open manifest')),
2504 ('m', 'manifest', False, _('open manifest')),
2502 ('d', 'dump', False, _('dump index data'))],
2505 ('d', 'dump', False, _('dump index data'))],
2503 _('-c|-m|FILE'),
2506 _('-c|-m|FILE'),
2504 optionalrepo=True)
2507 optionalrepo=True)
2505 def debugrevlog(ui, repo, file_=None, **opts):
2508 def debugrevlog(ui, repo, file_=None, **opts):
2506 """show data and statistics about a revlog"""
2509 """show data and statistics about a revlog"""
2507 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2510 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2508
2511
2509 if opts.get("dump"):
2512 if opts.get("dump"):
2510 numrevs = len(r)
2513 numrevs = len(r)
2511 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2514 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2512 " rawsize totalsize compression heads\n")
2515 " rawsize totalsize compression heads\n")
2513 ts = 0
2516 ts = 0
2514 heads = set()
2517 heads = set()
2515 for rev in xrange(numrevs):
2518 for rev in xrange(numrevs):
2516 dbase = r.deltaparent(rev)
2519 dbase = r.deltaparent(rev)
2517 if dbase == -1:
2520 if dbase == -1:
2518 dbase = rev
2521 dbase = rev
2519 cbase = r.chainbase(rev)
2522 cbase = r.chainbase(rev)
2520 p1, p2 = r.parentrevs(rev)
2523 p1, p2 = r.parentrevs(rev)
2521 rs = r.rawsize(rev)
2524 rs = r.rawsize(rev)
2522 ts = ts + rs
2525 ts = ts + rs
2523 heads -= set(r.parentrevs(rev))
2526 heads -= set(r.parentrevs(rev))
2524 heads.add(rev)
2527 heads.add(rev)
2525 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d %11d %5d\n" %
2528 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d %11d %5d\n" %
2526 (rev, p1, p2, r.start(rev), r.end(rev),
2529 (rev, p1, p2, r.start(rev), r.end(rev),
2527 r.start(dbase), r.start(cbase),
2530 r.start(dbase), r.start(cbase),
2528 r.start(p1), r.start(p2),
2531 r.start(p1), r.start(p2),
2529 rs, ts, ts / r.end(rev), len(heads)))
2532 rs, ts, ts / r.end(rev), len(heads)))
2530 return 0
2533 return 0
2531
2534
2532 v = r.version
2535 v = r.version
2533 format = v & 0xFFFF
2536 format = v & 0xFFFF
2534 flags = []
2537 flags = []
2535 gdelta = False
2538 gdelta = False
2536 if v & revlog.REVLOGNGINLINEDATA:
2539 if v & revlog.REVLOGNGINLINEDATA:
2537 flags.append('inline')
2540 flags.append('inline')
2538 if v & revlog.REVLOGGENERALDELTA:
2541 if v & revlog.REVLOGGENERALDELTA:
2539 gdelta = True
2542 gdelta = True
2540 flags.append('generaldelta')
2543 flags.append('generaldelta')
2541 if not flags:
2544 if not flags:
2542 flags = ['(none)']
2545 flags = ['(none)']
2543
2546
2544 nummerges = 0
2547 nummerges = 0
2545 numfull = 0
2548 numfull = 0
2546 numprev = 0
2549 numprev = 0
2547 nump1 = 0
2550 nump1 = 0
2548 nump2 = 0
2551 nump2 = 0
2549 numother = 0
2552 numother = 0
2550 nump1prev = 0
2553 nump1prev = 0
2551 nump2prev = 0
2554 nump2prev = 0
2552 chainlengths = []
2555 chainlengths = []
2553
2556
2554 datasize = [None, 0, 0L]
2557 datasize = [None, 0, 0L]
2555 fullsize = [None, 0, 0L]
2558 fullsize = [None, 0, 0L]
2556 deltasize = [None, 0, 0L]
2559 deltasize = [None, 0, 0L]
2557
2560
2558 def addsize(size, l):
2561 def addsize(size, l):
2559 if l[0] is None or size < l[0]:
2562 if l[0] is None or size < l[0]:
2560 l[0] = size
2563 l[0] = size
2561 if size > l[1]:
2564 if size > l[1]:
2562 l[1] = size
2565 l[1] = size
2563 l[2] += size
2566 l[2] += size
2564
2567
2565 numrevs = len(r)
2568 numrevs = len(r)
2566 for rev in xrange(numrevs):
2569 for rev in xrange(numrevs):
2567 p1, p2 = r.parentrevs(rev)
2570 p1, p2 = r.parentrevs(rev)
2568 delta = r.deltaparent(rev)
2571 delta = r.deltaparent(rev)
2569 if format > 0:
2572 if format > 0:
2570 addsize(r.rawsize(rev), datasize)
2573 addsize(r.rawsize(rev), datasize)
2571 if p2 != nullrev:
2574 if p2 != nullrev:
2572 nummerges += 1
2575 nummerges += 1
2573 size = r.length(rev)
2576 size = r.length(rev)
2574 if delta == nullrev:
2577 if delta == nullrev:
2575 chainlengths.append(0)
2578 chainlengths.append(0)
2576 numfull += 1
2579 numfull += 1
2577 addsize(size, fullsize)
2580 addsize(size, fullsize)
2578 else:
2581 else:
2579 chainlengths.append(chainlengths[delta] + 1)
2582 chainlengths.append(chainlengths[delta] + 1)
2580 addsize(size, deltasize)
2583 addsize(size, deltasize)
2581 if delta == rev - 1:
2584 if delta == rev - 1:
2582 numprev += 1
2585 numprev += 1
2583 if delta == p1:
2586 if delta == p1:
2584 nump1prev += 1
2587 nump1prev += 1
2585 elif delta == p2:
2588 elif delta == p2:
2586 nump2prev += 1
2589 nump2prev += 1
2587 elif delta == p1:
2590 elif delta == p1:
2588 nump1 += 1
2591 nump1 += 1
2589 elif delta == p2:
2592 elif delta == p2:
2590 nump2 += 1
2593 nump2 += 1
2591 elif delta != nullrev:
2594 elif delta != nullrev:
2592 numother += 1
2595 numother += 1
2593
2596
2594 # Adjust size min value for empty cases
2597 # Adjust size min value for empty cases
2595 for size in (datasize, fullsize, deltasize):
2598 for size in (datasize, fullsize, deltasize):
2596 if size[0] is None:
2599 if size[0] is None:
2597 size[0] = 0
2600 size[0] = 0
2598
2601
2599 numdeltas = numrevs - numfull
2602 numdeltas = numrevs - numfull
2600 numoprev = numprev - nump1prev - nump2prev
2603 numoprev = numprev - nump1prev - nump2prev
2601 totalrawsize = datasize[2]
2604 totalrawsize = datasize[2]
2602 datasize[2] /= numrevs
2605 datasize[2] /= numrevs
2603 fulltotal = fullsize[2]
2606 fulltotal = fullsize[2]
2604 fullsize[2] /= numfull
2607 fullsize[2] /= numfull
2605 deltatotal = deltasize[2]
2608 deltatotal = deltasize[2]
2606 if numrevs - numfull > 0:
2609 if numrevs - numfull > 0:
2607 deltasize[2] /= numrevs - numfull
2610 deltasize[2] /= numrevs - numfull
2608 totalsize = fulltotal + deltatotal
2611 totalsize = fulltotal + deltatotal
2609 avgchainlen = sum(chainlengths) / numrevs
2612 avgchainlen = sum(chainlengths) / numrevs
2610 compratio = totalrawsize / totalsize
2613 compratio = totalrawsize / totalsize
2611
2614
2612 basedfmtstr = '%%%dd\n'
2615 basedfmtstr = '%%%dd\n'
2613 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2616 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2614
2617
2615 def dfmtstr(max):
2618 def dfmtstr(max):
2616 return basedfmtstr % len(str(max))
2619 return basedfmtstr % len(str(max))
2617 def pcfmtstr(max, padding=0):
2620 def pcfmtstr(max, padding=0):
2618 return basepcfmtstr % (len(str(max)), ' ' * padding)
2621 return basepcfmtstr % (len(str(max)), ' ' * padding)
2619
2622
2620 def pcfmt(value, total):
2623 def pcfmt(value, total):
2621 return (value, 100 * float(value) / total)
2624 return (value, 100 * float(value) / total)
2622
2625
2623 ui.write(('format : %d\n') % format)
2626 ui.write(('format : %d\n') % format)
2624 ui.write(('flags : %s\n') % ', '.join(flags))
2627 ui.write(('flags : %s\n') % ', '.join(flags))
2625
2628
2626 ui.write('\n')
2629 ui.write('\n')
2627 fmt = pcfmtstr(totalsize)
2630 fmt = pcfmtstr(totalsize)
2628 fmt2 = dfmtstr(totalsize)
2631 fmt2 = dfmtstr(totalsize)
2629 ui.write(('revisions : ') + fmt2 % numrevs)
2632 ui.write(('revisions : ') + fmt2 % numrevs)
2630 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2633 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2631 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2634 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2632 ui.write(('revisions : ') + fmt2 % numrevs)
2635 ui.write(('revisions : ') + fmt2 % numrevs)
2633 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2636 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2634 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2637 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2635 ui.write(('revision size : ') + fmt2 % totalsize)
2638 ui.write(('revision size : ') + fmt2 % totalsize)
2636 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2639 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2637 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2640 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2638
2641
2639 ui.write('\n')
2642 ui.write('\n')
2640 fmt = dfmtstr(max(avgchainlen, compratio))
2643 fmt = dfmtstr(max(avgchainlen, compratio))
2641 ui.write(('avg chain length : ') + fmt % avgchainlen)
2644 ui.write(('avg chain length : ') + fmt % avgchainlen)
2642 ui.write(('compression ratio : ') + fmt % compratio)
2645 ui.write(('compression ratio : ') + fmt % compratio)
2643
2646
2644 if format > 0:
2647 if format > 0:
2645 ui.write('\n')
2648 ui.write('\n')
2646 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2649 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2647 % tuple(datasize))
2650 % tuple(datasize))
2648 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2651 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2649 % tuple(fullsize))
2652 % tuple(fullsize))
2650 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2653 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2651 % tuple(deltasize))
2654 % tuple(deltasize))
2652
2655
2653 if numdeltas > 0:
2656 if numdeltas > 0:
2654 ui.write('\n')
2657 ui.write('\n')
2655 fmt = pcfmtstr(numdeltas)
2658 fmt = pcfmtstr(numdeltas)
2656 fmt2 = pcfmtstr(numdeltas, 4)
2659 fmt2 = pcfmtstr(numdeltas, 4)
2657 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2660 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2658 if numprev > 0:
2661 if numprev > 0:
2659 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2662 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2660 numprev))
2663 numprev))
2661 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2664 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2662 numprev))
2665 numprev))
2663 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2666 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2664 numprev))
2667 numprev))
2665 if gdelta:
2668 if gdelta:
2666 ui.write(('deltas against p1 : ')
2669 ui.write(('deltas against p1 : ')
2667 + fmt % pcfmt(nump1, numdeltas))
2670 + fmt % pcfmt(nump1, numdeltas))
2668 ui.write(('deltas against p2 : ')
2671 ui.write(('deltas against p2 : ')
2669 + fmt % pcfmt(nump2, numdeltas))
2672 + fmt % pcfmt(nump2, numdeltas))
2670 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2673 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2671 numdeltas))
2674 numdeltas))
2672
2675
2673 @command('debugrevspec',
2676 @command('debugrevspec',
2674 [('', 'optimize', None, _('print parsed tree after optimizing'))],
2677 [('', 'optimize', None, _('print parsed tree after optimizing'))],
2675 ('REVSPEC'))
2678 ('REVSPEC'))
2676 def debugrevspec(ui, repo, expr, **opts):
2679 def debugrevspec(ui, repo, expr, **opts):
2677 """parse and apply a revision specification
2680 """parse and apply a revision specification
2678
2681
2679 Use --verbose to print the parsed tree before and after aliases
2682 Use --verbose to print the parsed tree before and after aliases
2680 expansion.
2683 expansion.
2681 """
2684 """
2682 if ui.verbose:
2685 if ui.verbose:
2683 tree = revset.parse(expr)[0]
2686 tree = revset.parse(expr)[0]
2684 ui.note(revset.prettyformat(tree), "\n")
2687 ui.note(revset.prettyformat(tree), "\n")
2685 newtree = revset.findaliases(ui, tree)
2688 newtree = revset.findaliases(ui, tree)
2686 if newtree != tree:
2689 if newtree != tree:
2687 ui.note(revset.prettyformat(newtree), "\n")
2690 ui.note(revset.prettyformat(newtree), "\n")
2688 if opts["optimize"]:
2691 if opts["optimize"]:
2689 weight, optimizedtree = revset.optimize(newtree, True)
2692 weight, optimizedtree = revset.optimize(newtree, True)
2690 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
2693 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
2691 func = revset.match(ui, expr)
2694 func = revset.match(ui, expr)
2692 for c in func(repo, revset.spanset(repo)):
2695 for c in func(repo, revset.spanset(repo)):
2693 ui.write("%s\n" % c)
2696 ui.write("%s\n" % c)
2694
2697
2695 @command('debugsetparents', [], _('REV1 [REV2]'))
2698 @command('debugsetparents', [], _('REV1 [REV2]'))
2696 def debugsetparents(ui, repo, rev1, rev2=None):
2699 def debugsetparents(ui, repo, rev1, rev2=None):
2697 """manually set the parents of the current working directory
2700 """manually set the parents of the current working directory
2698
2701
2699 This is useful for writing repository conversion tools, but should
2702 This is useful for writing repository conversion tools, but should
2700 be used with care.
2703 be used with care.
2701
2704
2702 Returns 0 on success.
2705 Returns 0 on success.
2703 """
2706 """
2704
2707
2705 r1 = scmutil.revsingle(repo, rev1).node()
2708 r1 = scmutil.revsingle(repo, rev1).node()
2706 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2709 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2707
2710
2708 wlock = repo.wlock()
2711 wlock = repo.wlock()
2709 try:
2712 try:
2710 repo.setparents(r1, r2)
2713 repo.setparents(r1, r2)
2711 finally:
2714 finally:
2712 wlock.release()
2715 wlock.release()
2713
2716
2714 @command('debugdirstate|debugstate',
2717 @command('debugdirstate|debugstate',
2715 [('', 'nodates', None, _('do not display the saved mtime')),
2718 [('', 'nodates', None, _('do not display the saved mtime')),
2716 ('', 'datesort', None, _('sort by saved mtime'))],
2719 ('', 'datesort', None, _('sort by saved mtime'))],
2717 _('[OPTION]...'))
2720 _('[OPTION]...'))
2718 def debugstate(ui, repo, nodates=None, datesort=None):
2721 def debugstate(ui, repo, nodates=None, datesort=None):
2719 """show the contents of the current dirstate"""
2722 """show the contents of the current dirstate"""
2720 timestr = ""
2723 timestr = ""
2721 showdate = not nodates
2724 showdate = not nodates
2722 if datesort:
2725 if datesort:
2723 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2726 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2724 else:
2727 else:
2725 keyfunc = None # sort by filename
2728 keyfunc = None # sort by filename
2726 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2729 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2727 if showdate:
2730 if showdate:
2728 if ent[3] == -1:
2731 if ent[3] == -1:
2729 # Pad or slice to locale representation
2732 # Pad or slice to locale representation
2730 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2733 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2731 time.localtime(0)))
2734 time.localtime(0)))
2732 timestr = 'unset'
2735 timestr = 'unset'
2733 timestr = (timestr[:locale_len] +
2736 timestr = (timestr[:locale_len] +
2734 ' ' * (locale_len - len(timestr)))
2737 ' ' * (locale_len - len(timestr)))
2735 else:
2738 else:
2736 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2739 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2737 time.localtime(ent[3]))
2740 time.localtime(ent[3]))
2738 if ent[1] & 020000:
2741 if ent[1] & 020000:
2739 mode = 'lnk'
2742 mode = 'lnk'
2740 else:
2743 else:
2741 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2744 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2742 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2745 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2743 for f in repo.dirstate.copies():
2746 for f in repo.dirstate.copies():
2744 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2747 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2745
2748
2746 @command('debugsub',
2749 @command('debugsub',
2747 [('r', 'rev', '',
2750 [('r', 'rev', '',
2748 _('revision to check'), _('REV'))],
2751 _('revision to check'), _('REV'))],
2749 _('[-r REV] [REV]'))
2752 _('[-r REV] [REV]'))
2750 def debugsub(ui, repo, rev=None):
2753 def debugsub(ui, repo, rev=None):
2751 ctx = scmutil.revsingle(repo, rev, None)
2754 ctx = scmutil.revsingle(repo, rev, None)
2752 for k, v in sorted(ctx.substate.items()):
2755 for k, v in sorted(ctx.substate.items()):
2753 ui.write(('path %s\n') % k)
2756 ui.write(('path %s\n') % k)
2754 ui.write((' source %s\n') % v[0])
2757 ui.write((' source %s\n') % v[0])
2755 ui.write((' revision %s\n') % v[1])
2758 ui.write((' revision %s\n') % v[1])
2756
2759
2757 @command('debugsuccessorssets',
2760 @command('debugsuccessorssets',
2758 [],
2761 [],
2759 _('[REV]'))
2762 _('[REV]'))
2760 def debugsuccessorssets(ui, repo, *revs):
2763 def debugsuccessorssets(ui, repo, *revs):
2761 """show set of successors for revision
2764 """show set of successors for revision
2762
2765
2763 A successors set of changeset A is a consistent group of revisions that
2766 A successors set of changeset A is a consistent group of revisions that
2764 succeed A. It contains non-obsolete changesets only.
2767 succeed A. It contains non-obsolete changesets only.
2765
2768
2766 In most cases a changeset A has a single successors set containing a single
2769 In most cases a changeset A has a single successors set containing a single
2767 successor (changeset A replaced by A').
2770 successor (changeset A replaced by A').
2768
2771
2769 A changeset that is made obsolete with no successors are called "pruned".
2772 A changeset that is made obsolete with no successors are called "pruned".
2770 Such changesets have no successors sets at all.
2773 Such changesets have no successors sets at all.
2771
2774
2772 A changeset that has been "split" will have a successors set containing
2775 A changeset that has been "split" will have a successors set containing
2773 more than one successor.
2776 more than one successor.
2774
2777
2775 A changeset that has been rewritten in multiple different ways is called
2778 A changeset that has been rewritten in multiple different ways is called
2776 "divergent". Such changesets have multiple successor sets (each of which
2779 "divergent". Such changesets have multiple successor sets (each of which
2777 may also be split, i.e. have multiple successors).
2780 may also be split, i.e. have multiple successors).
2778
2781
2779 Results are displayed as follows::
2782 Results are displayed as follows::
2780
2783
2781 <rev1>
2784 <rev1>
2782 <successors-1A>
2785 <successors-1A>
2783 <rev2>
2786 <rev2>
2784 <successors-2A>
2787 <successors-2A>
2785 <successors-2B1> <successors-2B2> <successors-2B3>
2788 <successors-2B1> <successors-2B2> <successors-2B3>
2786
2789
2787 Here rev2 has two possible (i.e. divergent) successors sets. The first
2790 Here rev2 has two possible (i.e. divergent) successors sets. The first
2788 holds one element, whereas the second holds three (i.e. the changeset has
2791 holds one element, whereas the second holds three (i.e. the changeset has
2789 been split).
2792 been split).
2790 """
2793 """
2791 # passed to successorssets caching computation from one call to another
2794 # passed to successorssets caching computation from one call to another
2792 cache = {}
2795 cache = {}
2793 ctx2str = str
2796 ctx2str = str
2794 node2str = short
2797 node2str = short
2795 if ui.debug():
2798 if ui.debug():
2796 def ctx2str(ctx):
2799 def ctx2str(ctx):
2797 return ctx.hex()
2800 return ctx.hex()
2798 node2str = hex
2801 node2str = hex
2799 for rev in scmutil.revrange(repo, revs):
2802 for rev in scmutil.revrange(repo, revs):
2800 ctx = repo[rev]
2803 ctx = repo[rev]
2801 ui.write('%s\n'% ctx2str(ctx))
2804 ui.write('%s\n'% ctx2str(ctx))
2802 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2805 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2803 if succsset:
2806 if succsset:
2804 ui.write(' ')
2807 ui.write(' ')
2805 ui.write(node2str(succsset[0]))
2808 ui.write(node2str(succsset[0]))
2806 for node in succsset[1:]:
2809 for node in succsset[1:]:
2807 ui.write(' ')
2810 ui.write(' ')
2808 ui.write(node2str(node))
2811 ui.write(node2str(node))
2809 ui.write('\n')
2812 ui.write('\n')
2810
2813
2811 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
2814 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
2812 def debugwalk(ui, repo, *pats, **opts):
2815 def debugwalk(ui, repo, *pats, **opts):
2813 """show how files match on given patterns"""
2816 """show how files match on given patterns"""
2814 m = scmutil.match(repo[None], pats, opts)
2817 m = scmutil.match(repo[None], pats, opts)
2815 items = list(repo.walk(m))
2818 items = list(repo.walk(m))
2816 if not items:
2819 if not items:
2817 return
2820 return
2818 f = lambda fn: fn
2821 f = lambda fn: fn
2819 if ui.configbool('ui', 'slash') and os.sep != '/':
2822 if ui.configbool('ui', 'slash') and os.sep != '/':
2820 f = lambda fn: util.normpath(fn)
2823 f = lambda fn: util.normpath(fn)
2821 fmt = 'f %%-%ds %%-%ds %%s' % (
2824 fmt = 'f %%-%ds %%-%ds %%s' % (
2822 max([len(abs) for abs in items]),
2825 max([len(abs) for abs in items]),
2823 max([len(m.rel(abs)) for abs in items]))
2826 max([len(m.rel(abs)) for abs in items]))
2824 for abs in items:
2827 for abs in items:
2825 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2828 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2826 ui.write("%s\n" % line.rstrip())
2829 ui.write("%s\n" % line.rstrip())
2827
2830
2828 @command('debugwireargs',
2831 @command('debugwireargs',
2829 [('', 'three', '', 'three'),
2832 [('', 'three', '', 'three'),
2830 ('', 'four', '', 'four'),
2833 ('', 'four', '', 'four'),
2831 ('', 'five', '', 'five'),
2834 ('', 'five', '', 'five'),
2832 ] + remoteopts,
2835 ] + remoteopts,
2833 _('REPO [OPTIONS]... [ONE [TWO]]'),
2836 _('REPO [OPTIONS]... [ONE [TWO]]'),
2834 norepo=True)
2837 norepo=True)
2835 def debugwireargs(ui, repopath, *vals, **opts):
2838 def debugwireargs(ui, repopath, *vals, **opts):
2836 repo = hg.peer(ui, opts, repopath)
2839 repo = hg.peer(ui, opts, repopath)
2837 for opt in remoteopts:
2840 for opt in remoteopts:
2838 del opts[opt[1]]
2841 del opts[opt[1]]
2839 args = {}
2842 args = {}
2840 for k, v in opts.iteritems():
2843 for k, v in opts.iteritems():
2841 if v:
2844 if v:
2842 args[k] = v
2845 args[k] = v
2843 # run twice to check that we don't mess up the stream for the next command
2846 # run twice to check that we don't mess up the stream for the next command
2844 res1 = repo.debugwireargs(*vals, **args)
2847 res1 = repo.debugwireargs(*vals, **args)
2845 res2 = repo.debugwireargs(*vals, **args)
2848 res2 = repo.debugwireargs(*vals, **args)
2846 ui.write("%s\n" % res1)
2849 ui.write("%s\n" % res1)
2847 if res1 != res2:
2850 if res1 != res2:
2848 ui.warn("%s\n" % res2)
2851 ui.warn("%s\n" % res2)
2849
2852
2850 @command('^diff',
2853 @command('^diff',
2851 [('r', 'rev', [], _('revision'), _('REV')),
2854 [('r', 'rev', [], _('revision'), _('REV')),
2852 ('c', 'change', '', _('change made by revision'), _('REV'))
2855 ('c', 'change', '', _('change made by revision'), _('REV'))
2853 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2856 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2854 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
2857 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
2855 inferrepo=True)
2858 inferrepo=True)
2856 def diff(ui, repo, *pats, **opts):
2859 def diff(ui, repo, *pats, **opts):
2857 """diff repository (or selected files)
2860 """diff repository (or selected files)
2858
2861
2859 Show differences between revisions for the specified files.
2862 Show differences between revisions for the specified files.
2860
2863
2861 Differences between files are shown using the unified diff format.
2864 Differences between files are shown using the unified diff format.
2862
2865
2863 .. note::
2866 .. note::
2864
2867
2865 diff may generate unexpected results for merges, as it will
2868 diff may generate unexpected results for merges, as it will
2866 default to comparing against the working directory's first
2869 default to comparing against the working directory's first
2867 parent changeset if no revisions are specified.
2870 parent changeset if no revisions are specified.
2868
2871
2869 When two revision arguments are given, then changes are shown
2872 When two revision arguments are given, then changes are shown
2870 between those revisions. If only one revision is specified then
2873 between those revisions. If only one revision is specified then
2871 that revision is compared to the working directory, and, when no
2874 that revision is compared to the working directory, and, when no
2872 revisions are specified, the working directory files are compared
2875 revisions are specified, the working directory files are compared
2873 to its parent.
2876 to its parent.
2874
2877
2875 Alternatively you can specify -c/--change with a revision to see
2878 Alternatively you can specify -c/--change with a revision to see
2876 the changes in that changeset relative to its first parent.
2879 the changes in that changeset relative to its first parent.
2877
2880
2878 Without the -a/--text option, diff will avoid generating diffs of
2881 Without the -a/--text option, diff will avoid generating diffs of
2879 files it detects as binary. With -a, diff will generate a diff
2882 files it detects as binary. With -a, diff will generate a diff
2880 anyway, probably with undesirable results.
2883 anyway, probably with undesirable results.
2881
2884
2882 Use the -g/--git option to generate diffs in the git extended diff
2885 Use the -g/--git option to generate diffs in the git extended diff
2883 format. For more information, read :hg:`help diffs`.
2886 format. For more information, read :hg:`help diffs`.
2884
2887
2885 .. container:: verbose
2888 .. container:: verbose
2886
2889
2887 Examples:
2890 Examples:
2888
2891
2889 - compare a file in the current working directory to its parent::
2892 - compare a file in the current working directory to its parent::
2890
2893
2891 hg diff foo.c
2894 hg diff foo.c
2892
2895
2893 - compare two historical versions of a directory, with rename info::
2896 - compare two historical versions of a directory, with rename info::
2894
2897
2895 hg diff --git -r 1.0:1.2 lib/
2898 hg diff --git -r 1.0:1.2 lib/
2896
2899
2897 - get change stats relative to the last change on some date::
2900 - get change stats relative to the last change on some date::
2898
2901
2899 hg diff --stat -r "date('may 2')"
2902 hg diff --stat -r "date('may 2')"
2900
2903
2901 - diff all newly-added files that contain a keyword::
2904 - diff all newly-added files that contain a keyword::
2902
2905
2903 hg diff "set:added() and grep(GNU)"
2906 hg diff "set:added() and grep(GNU)"
2904
2907
2905 - compare a revision and its parents::
2908 - compare a revision and its parents::
2906
2909
2907 hg diff -c 9353 # compare against first parent
2910 hg diff -c 9353 # compare against first parent
2908 hg diff -r 9353^:9353 # same using revset syntax
2911 hg diff -r 9353^:9353 # same using revset syntax
2909 hg diff -r 9353^2:9353 # compare against the second parent
2912 hg diff -r 9353^2:9353 # compare against the second parent
2910
2913
2911 Returns 0 on success.
2914 Returns 0 on success.
2912 """
2915 """
2913
2916
2914 revs = opts.get('rev')
2917 revs = opts.get('rev')
2915 change = opts.get('change')
2918 change = opts.get('change')
2916 stat = opts.get('stat')
2919 stat = opts.get('stat')
2917 reverse = opts.get('reverse')
2920 reverse = opts.get('reverse')
2918
2921
2919 if revs and change:
2922 if revs and change:
2920 msg = _('cannot specify --rev and --change at the same time')
2923 msg = _('cannot specify --rev and --change at the same time')
2921 raise util.Abort(msg)
2924 raise util.Abort(msg)
2922 elif change:
2925 elif change:
2923 node2 = scmutil.revsingle(repo, change, None).node()
2926 node2 = scmutil.revsingle(repo, change, None).node()
2924 node1 = repo[node2].p1().node()
2927 node1 = repo[node2].p1().node()
2925 else:
2928 else:
2926 node1, node2 = scmutil.revpair(repo, revs)
2929 node1, node2 = scmutil.revpair(repo, revs)
2927
2930
2928 if reverse:
2931 if reverse:
2929 node1, node2 = node2, node1
2932 node1, node2 = node2, node1
2930
2933
2931 diffopts = patch.diffopts(ui, opts)
2934 diffopts = patch.diffopts(ui, opts)
2932 m = scmutil.match(repo[node2], pats, opts)
2935 m = scmutil.match(repo[node2], pats, opts)
2933 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2936 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2934 listsubrepos=opts.get('subrepos'))
2937 listsubrepos=opts.get('subrepos'))
2935
2938
2936 @command('^export',
2939 @command('^export',
2937 [('o', 'output', '',
2940 [('o', 'output', '',
2938 _('print output to file with formatted name'), _('FORMAT')),
2941 _('print output to file with formatted name'), _('FORMAT')),
2939 ('', 'switch-parent', None, _('diff against the second parent')),
2942 ('', 'switch-parent', None, _('diff against the second parent')),
2940 ('r', 'rev', [], _('revisions to export'), _('REV')),
2943 ('r', 'rev', [], _('revisions to export'), _('REV')),
2941 ] + diffopts,
2944 ] + diffopts,
2942 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
2945 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
2943 def export(ui, repo, *changesets, **opts):
2946 def export(ui, repo, *changesets, **opts):
2944 """dump the header and diffs for one or more changesets
2947 """dump the header and diffs for one or more changesets
2945
2948
2946 Print the changeset header and diffs for one or more revisions.
2949 Print the changeset header and diffs for one or more revisions.
2947 If no revision is given, the parent of the working directory is used.
2950 If no revision is given, the parent of the working directory is used.
2948
2951
2949 The information shown in the changeset header is: author, date,
2952 The information shown in the changeset header is: author, date,
2950 branch name (if non-default), changeset hash, parent(s) and commit
2953 branch name (if non-default), changeset hash, parent(s) and commit
2951 comment.
2954 comment.
2952
2955
2953 .. note::
2956 .. note::
2954
2957
2955 export may generate unexpected diff output for merge
2958 export may generate unexpected diff output for merge
2956 changesets, as it will compare the merge changeset against its
2959 changesets, as it will compare the merge changeset against its
2957 first parent only.
2960 first parent only.
2958
2961
2959 Output may be to a file, in which case the name of the file is
2962 Output may be to a file, in which case the name of the file is
2960 given using a format string. The formatting rules are as follows:
2963 given using a format string. The formatting rules are as follows:
2961
2964
2962 :``%%``: literal "%" character
2965 :``%%``: literal "%" character
2963 :``%H``: changeset hash (40 hexadecimal digits)
2966 :``%H``: changeset hash (40 hexadecimal digits)
2964 :``%N``: number of patches being generated
2967 :``%N``: number of patches being generated
2965 :``%R``: changeset revision number
2968 :``%R``: changeset revision number
2966 :``%b``: basename of the exporting repository
2969 :``%b``: basename of the exporting repository
2967 :``%h``: short-form changeset hash (12 hexadecimal digits)
2970 :``%h``: short-form changeset hash (12 hexadecimal digits)
2968 :``%m``: first line of the commit message (only alphanumeric characters)
2971 :``%m``: first line of the commit message (only alphanumeric characters)
2969 :``%n``: zero-padded sequence number, starting at 1
2972 :``%n``: zero-padded sequence number, starting at 1
2970 :``%r``: zero-padded changeset revision number
2973 :``%r``: zero-padded changeset revision number
2971
2974
2972 Without the -a/--text option, export will avoid generating diffs
2975 Without the -a/--text option, export will avoid generating diffs
2973 of files it detects as binary. With -a, export will generate a
2976 of files it detects as binary. With -a, export will generate a
2974 diff anyway, probably with undesirable results.
2977 diff anyway, probably with undesirable results.
2975
2978
2976 Use the -g/--git option to generate diffs in the git extended diff
2979 Use the -g/--git option to generate diffs in the git extended diff
2977 format. See :hg:`help diffs` for more information.
2980 format. See :hg:`help diffs` for more information.
2978
2981
2979 With the --switch-parent option, the diff will be against the
2982 With the --switch-parent option, the diff will be against the
2980 second parent. It can be useful to review a merge.
2983 second parent. It can be useful to review a merge.
2981
2984
2982 .. container:: verbose
2985 .. container:: verbose
2983
2986
2984 Examples:
2987 Examples:
2985
2988
2986 - use export and import to transplant a bugfix to the current
2989 - use export and import to transplant a bugfix to the current
2987 branch::
2990 branch::
2988
2991
2989 hg export -r 9353 | hg import -
2992 hg export -r 9353 | hg import -
2990
2993
2991 - export all the changesets between two revisions to a file with
2994 - export all the changesets between two revisions to a file with
2992 rename information::
2995 rename information::
2993
2996
2994 hg export --git -r 123:150 > changes.txt
2997 hg export --git -r 123:150 > changes.txt
2995
2998
2996 - split outgoing changes into a series of patches with
2999 - split outgoing changes into a series of patches with
2997 descriptive names::
3000 descriptive names::
2998
3001
2999 hg export -r "outgoing()" -o "%n-%m.patch"
3002 hg export -r "outgoing()" -o "%n-%m.patch"
3000
3003
3001 Returns 0 on success.
3004 Returns 0 on success.
3002 """
3005 """
3003 changesets += tuple(opts.get('rev', []))
3006 changesets += tuple(opts.get('rev', []))
3004 if not changesets:
3007 if not changesets:
3005 changesets = ['.']
3008 changesets = ['.']
3006 revs = scmutil.revrange(repo, changesets)
3009 revs = scmutil.revrange(repo, changesets)
3007 if not revs:
3010 if not revs:
3008 raise util.Abort(_("export requires at least one changeset"))
3011 raise util.Abort(_("export requires at least one changeset"))
3009 if len(revs) > 1:
3012 if len(revs) > 1:
3010 ui.note(_('exporting patches:\n'))
3013 ui.note(_('exporting patches:\n'))
3011 else:
3014 else:
3012 ui.note(_('exporting patch:\n'))
3015 ui.note(_('exporting patch:\n'))
3013 cmdutil.export(repo, revs, template=opts.get('output'),
3016 cmdutil.export(repo, revs, template=opts.get('output'),
3014 switch_parent=opts.get('switch_parent'),
3017 switch_parent=opts.get('switch_parent'),
3015 opts=patch.diffopts(ui, opts))
3018 opts=patch.diffopts(ui, opts))
3016
3019
3017 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3020 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3018 def forget(ui, repo, *pats, **opts):
3021 def forget(ui, repo, *pats, **opts):
3019 """forget the specified files on the next commit
3022 """forget the specified files on the next commit
3020
3023
3021 Mark the specified files so they will no longer be tracked
3024 Mark the specified files so they will no longer be tracked
3022 after the next commit.
3025 after the next commit.
3023
3026
3024 This only removes files from the current branch, not from the
3027 This only removes files from the current branch, not from the
3025 entire project history, and it does not delete them from the
3028 entire project history, and it does not delete them from the
3026 working directory.
3029 working directory.
3027
3030
3028 To undo a forget before the next commit, see :hg:`add`.
3031 To undo a forget before the next commit, see :hg:`add`.
3029
3032
3030 .. container:: verbose
3033 .. container:: verbose
3031
3034
3032 Examples:
3035 Examples:
3033
3036
3034 - forget newly-added binary files::
3037 - forget newly-added binary files::
3035
3038
3036 hg forget "set:added() and binary()"
3039 hg forget "set:added() and binary()"
3037
3040
3038 - forget files that would be excluded by .hgignore::
3041 - forget files that would be excluded by .hgignore::
3039
3042
3040 hg forget "set:hgignore()"
3043 hg forget "set:hgignore()"
3041
3044
3042 Returns 0 on success.
3045 Returns 0 on success.
3043 """
3046 """
3044
3047
3045 if not pats:
3048 if not pats:
3046 raise util.Abort(_('no files specified'))
3049 raise util.Abort(_('no files specified'))
3047
3050
3048 m = scmutil.match(repo[None], pats, opts)
3051 m = scmutil.match(repo[None], pats, opts)
3049 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3052 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3050 return rejected and 1 or 0
3053 return rejected and 1 or 0
3051
3054
3052 @command(
3055 @command(
3053 'graft',
3056 'graft',
3054 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3057 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3055 ('c', 'continue', False, _('resume interrupted graft')),
3058 ('c', 'continue', False, _('resume interrupted graft')),
3056 ('e', 'edit', False, _('invoke editor on commit messages')),
3059 ('e', 'edit', False, _('invoke editor on commit messages')),
3057 ('', 'log', None, _('append graft info to log message')),
3060 ('', 'log', None, _('append graft info to log message')),
3058 ('f', 'force', False, _('force graft')),
3061 ('f', 'force', False, _('force graft')),
3059 ('D', 'currentdate', False,
3062 ('D', 'currentdate', False,
3060 _('record the current date as commit date')),
3063 _('record the current date as commit date')),
3061 ('U', 'currentuser', False,
3064 ('U', 'currentuser', False,
3062 _('record the current user as committer'), _('DATE'))]
3065 _('record the current user as committer'), _('DATE'))]
3063 + commitopts2 + mergetoolopts + dryrunopts,
3066 + commitopts2 + mergetoolopts + dryrunopts,
3064 _('[OPTION]... [-r] REV...'))
3067 _('[OPTION]... [-r] REV...'))
3065 def graft(ui, repo, *revs, **opts):
3068 def graft(ui, repo, *revs, **opts):
3066 '''copy changes from other branches onto the current branch
3069 '''copy changes from other branches onto the current branch
3067
3070
3068 This command uses Mercurial's merge logic to copy individual
3071 This command uses Mercurial's merge logic to copy individual
3069 changes from other branches without merging branches in the
3072 changes from other branches without merging branches in the
3070 history graph. This is sometimes known as 'backporting' or
3073 history graph. This is sometimes known as 'backporting' or
3071 'cherry-picking'. By default, graft will copy user, date, and
3074 'cherry-picking'. By default, graft will copy user, date, and
3072 description from the source changesets.
3075 description from the source changesets.
3073
3076
3074 Changesets that are ancestors of the current revision, that have
3077 Changesets that are ancestors of the current revision, that have
3075 already been grafted, or that are merges will be skipped.
3078 already been grafted, or that are merges will be skipped.
3076
3079
3077 If --log is specified, log messages will have a comment appended
3080 If --log is specified, log messages will have a comment appended
3078 of the form::
3081 of the form::
3079
3082
3080 (grafted from CHANGESETHASH)
3083 (grafted from CHANGESETHASH)
3081
3084
3082 If --force is specified, revisions will be grafted even if they
3085 If --force is specified, revisions will be grafted even if they
3083 are already ancestors of or have been grafted to the destination.
3086 are already ancestors of or have been grafted to the destination.
3084 This is useful when the revisions have since been backed out.
3087 This is useful when the revisions have since been backed out.
3085
3088
3086 If a graft merge results in conflicts, the graft process is
3089 If a graft merge results in conflicts, the graft process is
3087 interrupted so that the current merge can be manually resolved.
3090 interrupted so that the current merge can be manually resolved.
3088 Once all conflicts are addressed, the graft process can be
3091 Once all conflicts are addressed, the graft process can be
3089 continued with the -c/--continue option.
3092 continued with the -c/--continue option.
3090
3093
3091 .. note::
3094 .. note::
3092
3095
3093 The -c/--continue option does not reapply earlier options, except
3096 The -c/--continue option does not reapply earlier options, except
3094 for --force.
3097 for --force.
3095
3098
3096 .. container:: verbose
3099 .. container:: verbose
3097
3100
3098 Examples:
3101 Examples:
3099
3102
3100 - copy a single change to the stable branch and edit its description::
3103 - copy a single change to the stable branch and edit its description::
3101
3104
3102 hg update stable
3105 hg update stable
3103 hg graft --edit 9393
3106 hg graft --edit 9393
3104
3107
3105 - graft a range of changesets with one exception, updating dates::
3108 - graft a range of changesets with one exception, updating dates::
3106
3109
3107 hg graft -D "2085::2093 and not 2091"
3110 hg graft -D "2085::2093 and not 2091"
3108
3111
3109 - continue a graft after resolving conflicts::
3112 - continue a graft after resolving conflicts::
3110
3113
3111 hg graft -c
3114 hg graft -c
3112
3115
3113 - show the source of a grafted changeset::
3116 - show the source of a grafted changeset::
3114
3117
3115 hg log --debug -r .
3118 hg log --debug -r .
3116
3119
3117 See :hg:`help revisions` and :hg:`help revsets` for more about
3120 See :hg:`help revisions` and :hg:`help revsets` for more about
3118 specifying revisions.
3121 specifying revisions.
3119
3122
3120 Returns 0 on successful completion.
3123 Returns 0 on successful completion.
3121 '''
3124 '''
3122
3125
3123 revs = list(revs)
3126 revs = list(revs)
3124 revs.extend(opts['rev'])
3127 revs.extend(opts['rev'])
3125
3128
3126 if not opts.get('user') and opts.get('currentuser'):
3129 if not opts.get('user') and opts.get('currentuser'):
3127 opts['user'] = ui.username()
3130 opts['user'] = ui.username()
3128 if not opts.get('date') and opts.get('currentdate'):
3131 if not opts.get('date') and opts.get('currentdate'):
3129 opts['date'] = "%d %d" % util.makedate()
3132 opts['date'] = "%d %d" % util.makedate()
3130
3133
3131 editor = cmdutil.getcommiteditor(editform='graft', **opts)
3134 editor = cmdutil.getcommiteditor(editform='graft', **opts)
3132
3135
3133 cont = False
3136 cont = False
3134 if opts['continue']:
3137 if opts['continue']:
3135 cont = True
3138 cont = True
3136 if revs:
3139 if revs:
3137 raise util.Abort(_("can't specify --continue and revisions"))
3140 raise util.Abort(_("can't specify --continue and revisions"))
3138 # read in unfinished revisions
3141 # read in unfinished revisions
3139 try:
3142 try:
3140 nodes = repo.opener.read('graftstate').splitlines()
3143 nodes = repo.opener.read('graftstate').splitlines()
3141 revs = [repo[node].rev() for node in nodes]
3144 revs = [repo[node].rev() for node in nodes]
3142 except IOError, inst:
3145 except IOError, inst:
3143 if inst.errno != errno.ENOENT:
3146 if inst.errno != errno.ENOENT:
3144 raise
3147 raise
3145 raise util.Abort(_("no graft state found, can't continue"))
3148 raise util.Abort(_("no graft state found, can't continue"))
3146 else:
3149 else:
3147 cmdutil.checkunfinished(repo)
3150 cmdutil.checkunfinished(repo)
3148 cmdutil.bailifchanged(repo)
3151 cmdutil.bailifchanged(repo)
3149 if not revs:
3152 if not revs:
3150 raise util.Abort(_('no revisions specified'))
3153 raise util.Abort(_('no revisions specified'))
3151 revs = scmutil.revrange(repo, revs)
3154 revs = scmutil.revrange(repo, revs)
3152
3155
3153 # check for merges
3156 # check for merges
3154 for rev in repo.revs('%ld and merge()', revs):
3157 for rev in repo.revs('%ld and merge()', revs):
3155 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3158 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3156 revs.remove(rev)
3159 revs.remove(rev)
3157 if not revs:
3160 if not revs:
3158 return -1
3161 return -1
3159
3162
3160 # Don't check in the --continue case, in effect retaining --force across
3163 # Don't check in the --continue case, in effect retaining --force across
3161 # --continues. That's because without --force, any revisions we decided to
3164 # --continues. That's because without --force, any revisions we decided to
3162 # skip would have been filtered out here, so they wouldn't have made their
3165 # skip would have been filtered out here, so they wouldn't have made their
3163 # way to the graftstate. With --force, any revisions we would have otherwise
3166 # way to the graftstate. With --force, any revisions we would have otherwise
3164 # skipped would not have been filtered out, and if they hadn't been applied
3167 # skipped would not have been filtered out, and if they hadn't been applied
3165 # already, they'd have been in the graftstate.
3168 # already, they'd have been in the graftstate.
3166 if not (cont or opts.get('force')):
3169 if not (cont or opts.get('force')):
3167 # check for ancestors of dest branch
3170 # check for ancestors of dest branch
3168 crev = repo['.'].rev()
3171 crev = repo['.'].rev()
3169 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3172 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3170 # Cannot use x.remove(y) on smart set, this has to be a list.
3173 # Cannot use x.remove(y) on smart set, this has to be a list.
3171 # XXX make this lazy in the future
3174 # XXX make this lazy in the future
3172 revs = list(revs)
3175 revs = list(revs)
3173 # don't mutate while iterating, create a copy
3176 # don't mutate while iterating, create a copy
3174 for rev in list(revs):
3177 for rev in list(revs):
3175 if rev in ancestors:
3178 if rev in ancestors:
3176 ui.warn(_('skipping ancestor revision %s\n') % rev)
3179 ui.warn(_('skipping ancestor revision %s\n') % rev)
3177 # XXX remove on list is slow
3180 # XXX remove on list is slow
3178 revs.remove(rev)
3181 revs.remove(rev)
3179 if not revs:
3182 if not revs:
3180 return -1
3183 return -1
3181
3184
3182 # analyze revs for earlier grafts
3185 # analyze revs for earlier grafts
3183 ids = {}
3186 ids = {}
3184 for ctx in repo.set("%ld", revs):
3187 for ctx in repo.set("%ld", revs):
3185 ids[ctx.hex()] = ctx.rev()
3188 ids[ctx.hex()] = ctx.rev()
3186 n = ctx.extra().get('source')
3189 n = ctx.extra().get('source')
3187 if n:
3190 if n:
3188 ids[n] = ctx.rev()
3191 ids[n] = ctx.rev()
3189
3192
3190 # check ancestors for earlier grafts
3193 # check ancestors for earlier grafts
3191 ui.debug('scanning for duplicate grafts\n')
3194 ui.debug('scanning for duplicate grafts\n')
3192
3195
3193 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3196 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3194 ctx = repo[rev]
3197 ctx = repo[rev]
3195 n = ctx.extra().get('source')
3198 n = ctx.extra().get('source')
3196 if n in ids:
3199 if n in ids:
3197 r = repo[n].rev()
3200 r = repo[n].rev()
3198 if r in revs:
3201 if r in revs:
3199 ui.warn(_('skipping revision %s (already grafted to %s)\n')
3202 ui.warn(_('skipping revision %s (already grafted to %s)\n')
3200 % (r, rev))
3203 % (r, rev))
3201 revs.remove(r)
3204 revs.remove(r)
3202 elif ids[n] in revs:
3205 elif ids[n] in revs:
3203 ui.warn(_('skipping already grafted revision %s '
3206 ui.warn(_('skipping already grafted revision %s '
3204 '(%s also has origin %d)\n') % (ids[n], rev, r))
3207 '(%s also has origin %d)\n') % (ids[n], rev, r))
3205 revs.remove(ids[n])
3208 revs.remove(ids[n])
3206 elif ctx.hex() in ids:
3209 elif ctx.hex() in ids:
3207 r = ids[ctx.hex()]
3210 r = ids[ctx.hex()]
3208 ui.warn(_('skipping already grafted revision %s '
3211 ui.warn(_('skipping already grafted revision %s '
3209 '(was grafted from %d)\n') % (r, rev))
3212 '(was grafted from %d)\n') % (r, rev))
3210 revs.remove(r)
3213 revs.remove(r)
3211 if not revs:
3214 if not revs:
3212 return -1
3215 return -1
3213
3216
3214 wlock = repo.wlock()
3217 wlock = repo.wlock()
3215 try:
3218 try:
3216 current = repo['.']
3219 current = repo['.']
3217 for pos, ctx in enumerate(repo.set("%ld", revs)):
3220 for pos, ctx in enumerate(repo.set("%ld", revs)):
3218
3221
3219 ui.status(_('grafting revision %s\n') % ctx.rev())
3222 ui.status(_('grafting revision %s\n') % ctx.rev())
3220 if opts.get('dry_run'):
3223 if opts.get('dry_run'):
3221 continue
3224 continue
3222
3225
3223 source = ctx.extra().get('source')
3226 source = ctx.extra().get('source')
3224 if not source:
3227 if not source:
3225 source = ctx.hex()
3228 source = ctx.hex()
3226 extra = {'source': source}
3229 extra = {'source': source}
3227 user = ctx.user()
3230 user = ctx.user()
3228 if opts.get('user'):
3231 if opts.get('user'):
3229 user = opts['user']
3232 user = opts['user']
3230 date = ctx.date()
3233 date = ctx.date()
3231 if opts.get('date'):
3234 if opts.get('date'):
3232 date = opts['date']
3235 date = opts['date']
3233 message = ctx.description()
3236 message = ctx.description()
3234 if opts.get('log'):
3237 if opts.get('log'):
3235 message += '\n(grafted from %s)' % ctx.hex()
3238 message += '\n(grafted from %s)' % ctx.hex()
3236
3239
3237 # we don't merge the first commit when continuing
3240 # we don't merge the first commit when continuing
3238 if not cont:
3241 if not cont:
3239 # perform the graft merge with p1(rev) as 'ancestor'
3242 # perform the graft merge with p1(rev) as 'ancestor'
3240 try:
3243 try:
3241 # ui.forcemerge is an internal variable, do not document
3244 # ui.forcemerge is an internal variable, do not document
3242 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3245 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3243 'graft')
3246 'graft')
3244 stats = mergemod.update(repo, ctx.node(), True, True, False,
3247 stats = mergemod.update(repo, ctx.node(), True, True, False,
3245 ctx.p1().node(),
3248 ctx.p1().node(),
3246 labels=['local', 'graft'])
3249 labels=['local', 'graft'])
3247 finally:
3250 finally:
3248 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3251 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3249 # report any conflicts
3252 # report any conflicts
3250 if stats and stats[3] > 0:
3253 if stats and stats[3] > 0:
3251 # write out state for --continue
3254 # write out state for --continue
3252 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3255 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3253 repo.opener.write('graftstate', ''.join(nodelines))
3256 repo.opener.write('graftstate', ''.join(nodelines))
3254 raise util.Abort(
3257 raise util.Abort(
3255 _("unresolved conflicts, can't continue"),
3258 _("unresolved conflicts, can't continue"),
3256 hint=_('use hg resolve and hg graft --continue'))
3259 hint=_('use hg resolve and hg graft --continue'))
3257 else:
3260 else:
3258 cont = False
3261 cont = False
3259
3262
3260 # drop the second merge parent
3263 # drop the second merge parent
3261 repo.setparents(current.node(), nullid)
3264 repo.setparents(current.node(), nullid)
3262 repo.dirstate.write()
3265 repo.dirstate.write()
3263 # fix up dirstate for copies and renames
3266 # fix up dirstate for copies and renames
3264 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
3267 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
3265
3268
3266 # commit
3269 # commit
3267 node = repo.commit(text=message, user=user,
3270 node = repo.commit(text=message, user=user,
3268 date=date, extra=extra, editor=editor)
3271 date=date, extra=extra, editor=editor)
3269 if node is None:
3272 if node is None:
3270 ui.status(_('graft for revision %s is empty\n') % ctx.rev())
3273 ui.status(_('graft for revision %s is empty\n') % ctx.rev())
3271 else:
3274 else:
3272 current = repo[node]
3275 current = repo[node]
3273 finally:
3276 finally:
3274 wlock.release()
3277 wlock.release()
3275
3278
3276 # remove state when we complete successfully
3279 # remove state when we complete successfully
3277 if not opts.get('dry_run'):
3280 if not opts.get('dry_run'):
3278 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3281 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3279
3282
3280 return 0
3283 return 0
3281
3284
3282 @command('grep',
3285 @command('grep',
3283 [('0', 'print0', None, _('end fields with NUL')),
3286 [('0', 'print0', None, _('end fields with NUL')),
3284 ('', 'all', None, _('print all revisions that match')),
3287 ('', 'all', None, _('print all revisions that match')),
3285 ('a', 'text', None, _('treat all files as text')),
3288 ('a', 'text', None, _('treat all files as text')),
3286 ('f', 'follow', None,
3289 ('f', 'follow', None,
3287 _('follow changeset history,'
3290 _('follow changeset history,'
3288 ' or file history across copies and renames')),
3291 ' or file history across copies and renames')),
3289 ('i', 'ignore-case', None, _('ignore case when matching')),
3292 ('i', 'ignore-case', None, _('ignore case when matching')),
3290 ('l', 'files-with-matches', None,
3293 ('l', 'files-with-matches', None,
3291 _('print only filenames and revisions that match')),
3294 _('print only filenames and revisions that match')),
3292 ('n', 'line-number', None, _('print matching line numbers')),
3295 ('n', 'line-number', None, _('print matching line numbers')),
3293 ('r', 'rev', [],
3296 ('r', 'rev', [],
3294 _('only search files changed within revision range'), _('REV')),
3297 _('only search files changed within revision range'), _('REV')),
3295 ('u', 'user', None, _('list the author (long with -v)')),
3298 ('u', 'user', None, _('list the author (long with -v)')),
3296 ('d', 'date', None, _('list the date (short with -q)')),
3299 ('d', 'date', None, _('list the date (short with -q)')),
3297 ] + walkopts,
3300 ] + walkopts,
3298 _('[OPTION]... PATTERN [FILE]...'),
3301 _('[OPTION]... PATTERN [FILE]...'),
3299 inferrepo=True)
3302 inferrepo=True)
3300 def grep(ui, repo, pattern, *pats, **opts):
3303 def grep(ui, repo, pattern, *pats, **opts):
3301 """search for a pattern in specified files and revisions
3304 """search for a pattern in specified files and revisions
3302
3305
3303 Search revisions of files for a regular expression.
3306 Search revisions of files for a regular expression.
3304
3307
3305 This command behaves differently than Unix grep. It only accepts
3308 This command behaves differently than Unix grep. It only accepts
3306 Python/Perl regexps. It searches repository history, not the
3309 Python/Perl regexps. It searches repository history, not the
3307 working directory. It always prints the revision number in which a
3310 working directory. It always prints the revision number in which a
3308 match appears.
3311 match appears.
3309
3312
3310 By default, grep only prints output for the first revision of a
3313 By default, grep only prints output for the first revision of a
3311 file in which it finds a match. To get it to print every revision
3314 file in which it finds a match. To get it to print every revision
3312 that contains a change in match status ("-" for a match that
3315 that contains a change in match status ("-" for a match that
3313 becomes a non-match, or "+" for a non-match that becomes a match),
3316 becomes a non-match, or "+" for a non-match that becomes a match),
3314 use the --all flag.
3317 use the --all flag.
3315
3318
3316 Returns 0 if a match is found, 1 otherwise.
3319 Returns 0 if a match is found, 1 otherwise.
3317 """
3320 """
3318 reflags = re.M
3321 reflags = re.M
3319 if opts.get('ignore_case'):
3322 if opts.get('ignore_case'):
3320 reflags |= re.I
3323 reflags |= re.I
3321 try:
3324 try:
3322 regexp = util.re.compile(pattern, reflags)
3325 regexp = util.re.compile(pattern, reflags)
3323 except re.error, inst:
3326 except re.error, inst:
3324 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3327 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3325 return 1
3328 return 1
3326 sep, eol = ':', '\n'
3329 sep, eol = ':', '\n'
3327 if opts.get('print0'):
3330 if opts.get('print0'):
3328 sep = eol = '\0'
3331 sep = eol = '\0'
3329
3332
3330 getfile = util.lrucachefunc(repo.file)
3333 getfile = util.lrucachefunc(repo.file)
3331
3334
3332 def matchlines(body):
3335 def matchlines(body):
3333 begin = 0
3336 begin = 0
3334 linenum = 0
3337 linenum = 0
3335 while begin < len(body):
3338 while begin < len(body):
3336 match = regexp.search(body, begin)
3339 match = regexp.search(body, begin)
3337 if not match:
3340 if not match:
3338 break
3341 break
3339 mstart, mend = match.span()
3342 mstart, mend = match.span()
3340 linenum += body.count('\n', begin, mstart) + 1
3343 linenum += body.count('\n', begin, mstart) + 1
3341 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3344 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3342 begin = body.find('\n', mend) + 1 or len(body) + 1
3345 begin = body.find('\n', mend) + 1 or len(body) + 1
3343 lend = begin - 1
3346 lend = begin - 1
3344 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3347 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3345
3348
3346 class linestate(object):
3349 class linestate(object):
3347 def __init__(self, line, linenum, colstart, colend):
3350 def __init__(self, line, linenum, colstart, colend):
3348 self.line = line
3351 self.line = line
3349 self.linenum = linenum
3352 self.linenum = linenum
3350 self.colstart = colstart
3353 self.colstart = colstart
3351 self.colend = colend
3354 self.colend = colend
3352
3355
3353 def __hash__(self):
3356 def __hash__(self):
3354 return hash((self.linenum, self.line))
3357 return hash((self.linenum, self.line))
3355
3358
3356 def __eq__(self, other):
3359 def __eq__(self, other):
3357 return self.line == other.line
3360 return self.line == other.line
3358
3361
3359 def __iter__(self):
3362 def __iter__(self):
3360 yield (self.line[:self.colstart], '')
3363 yield (self.line[:self.colstart], '')
3361 yield (self.line[self.colstart:self.colend], 'grep.match')
3364 yield (self.line[self.colstart:self.colend], 'grep.match')
3362 rest = self.line[self.colend:]
3365 rest = self.line[self.colend:]
3363 while rest != '':
3366 while rest != '':
3364 match = regexp.search(rest)
3367 match = regexp.search(rest)
3365 if not match:
3368 if not match:
3366 yield (rest, '')
3369 yield (rest, '')
3367 break
3370 break
3368 mstart, mend = match.span()
3371 mstart, mend = match.span()
3369 yield (rest[:mstart], '')
3372 yield (rest[:mstart], '')
3370 yield (rest[mstart:mend], 'grep.match')
3373 yield (rest[mstart:mend], 'grep.match')
3371 rest = rest[mend:]
3374 rest = rest[mend:]
3372
3375
3373 matches = {}
3376 matches = {}
3374 copies = {}
3377 copies = {}
3375 def grepbody(fn, rev, body):
3378 def grepbody(fn, rev, body):
3376 matches[rev].setdefault(fn, [])
3379 matches[rev].setdefault(fn, [])
3377 m = matches[rev][fn]
3380 m = matches[rev][fn]
3378 for lnum, cstart, cend, line in matchlines(body):
3381 for lnum, cstart, cend, line in matchlines(body):
3379 s = linestate(line, lnum, cstart, cend)
3382 s = linestate(line, lnum, cstart, cend)
3380 m.append(s)
3383 m.append(s)
3381
3384
3382 def difflinestates(a, b):
3385 def difflinestates(a, b):
3383 sm = difflib.SequenceMatcher(None, a, b)
3386 sm = difflib.SequenceMatcher(None, a, b)
3384 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3387 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3385 if tag == 'insert':
3388 if tag == 'insert':
3386 for i in xrange(blo, bhi):
3389 for i in xrange(blo, bhi):
3387 yield ('+', b[i])
3390 yield ('+', b[i])
3388 elif tag == 'delete':
3391 elif tag == 'delete':
3389 for i in xrange(alo, ahi):
3392 for i in xrange(alo, ahi):
3390 yield ('-', a[i])
3393 yield ('-', a[i])
3391 elif tag == 'replace':
3394 elif tag == 'replace':
3392 for i in xrange(alo, ahi):
3395 for i in xrange(alo, ahi):
3393 yield ('-', a[i])
3396 yield ('-', a[i])
3394 for i in xrange(blo, bhi):
3397 for i in xrange(blo, bhi):
3395 yield ('+', b[i])
3398 yield ('+', b[i])
3396
3399
3397 def display(fn, ctx, pstates, states):
3400 def display(fn, ctx, pstates, states):
3398 rev = ctx.rev()
3401 rev = ctx.rev()
3399 datefunc = ui.quiet and util.shortdate or util.datestr
3402 datefunc = ui.quiet and util.shortdate or util.datestr
3400 found = False
3403 found = False
3401 @util.cachefunc
3404 @util.cachefunc
3402 def binary():
3405 def binary():
3403 flog = getfile(fn)
3406 flog = getfile(fn)
3404 return util.binary(flog.read(ctx.filenode(fn)))
3407 return util.binary(flog.read(ctx.filenode(fn)))
3405
3408
3406 if opts.get('all'):
3409 if opts.get('all'):
3407 iter = difflinestates(pstates, states)
3410 iter = difflinestates(pstates, states)
3408 else:
3411 else:
3409 iter = [('', l) for l in states]
3412 iter = [('', l) for l in states]
3410 for change, l in iter:
3413 for change, l in iter:
3411 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3414 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3412
3415
3413 if opts.get('line_number'):
3416 if opts.get('line_number'):
3414 cols.append((str(l.linenum), 'grep.linenumber'))
3417 cols.append((str(l.linenum), 'grep.linenumber'))
3415 if opts.get('all'):
3418 if opts.get('all'):
3416 cols.append((change, 'grep.change'))
3419 cols.append((change, 'grep.change'))
3417 if opts.get('user'):
3420 if opts.get('user'):
3418 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3421 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3419 if opts.get('date'):
3422 if opts.get('date'):
3420 cols.append((datefunc(ctx.date()), 'grep.date'))
3423 cols.append((datefunc(ctx.date()), 'grep.date'))
3421 for col, label in cols[:-1]:
3424 for col, label in cols[:-1]:
3422 ui.write(col, label=label)
3425 ui.write(col, label=label)
3423 ui.write(sep, label='grep.sep')
3426 ui.write(sep, label='grep.sep')
3424 ui.write(cols[-1][0], label=cols[-1][1])
3427 ui.write(cols[-1][0], label=cols[-1][1])
3425 if not opts.get('files_with_matches'):
3428 if not opts.get('files_with_matches'):
3426 ui.write(sep, label='grep.sep')
3429 ui.write(sep, label='grep.sep')
3427 if not opts.get('text') and binary():
3430 if not opts.get('text') and binary():
3428 ui.write(" Binary file matches")
3431 ui.write(" Binary file matches")
3429 else:
3432 else:
3430 for s, label in l:
3433 for s, label in l:
3431 ui.write(s, label=label)
3434 ui.write(s, label=label)
3432 ui.write(eol)
3435 ui.write(eol)
3433 found = True
3436 found = True
3434 if opts.get('files_with_matches'):
3437 if opts.get('files_with_matches'):
3435 break
3438 break
3436 return found
3439 return found
3437
3440
3438 skip = {}
3441 skip = {}
3439 revfiles = {}
3442 revfiles = {}
3440 matchfn = scmutil.match(repo[None], pats, opts)
3443 matchfn = scmutil.match(repo[None], pats, opts)
3441 found = False
3444 found = False
3442 follow = opts.get('follow')
3445 follow = opts.get('follow')
3443
3446
3444 def prep(ctx, fns):
3447 def prep(ctx, fns):
3445 rev = ctx.rev()
3448 rev = ctx.rev()
3446 pctx = ctx.p1()
3449 pctx = ctx.p1()
3447 parent = pctx.rev()
3450 parent = pctx.rev()
3448 matches.setdefault(rev, {})
3451 matches.setdefault(rev, {})
3449 matches.setdefault(parent, {})
3452 matches.setdefault(parent, {})
3450 files = revfiles.setdefault(rev, [])
3453 files = revfiles.setdefault(rev, [])
3451 for fn in fns:
3454 for fn in fns:
3452 flog = getfile(fn)
3455 flog = getfile(fn)
3453 try:
3456 try:
3454 fnode = ctx.filenode(fn)
3457 fnode = ctx.filenode(fn)
3455 except error.LookupError:
3458 except error.LookupError:
3456 continue
3459 continue
3457
3460
3458 copied = flog.renamed(fnode)
3461 copied = flog.renamed(fnode)
3459 copy = follow and copied and copied[0]
3462 copy = follow and copied and copied[0]
3460 if copy:
3463 if copy:
3461 copies.setdefault(rev, {})[fn] = copy
3464 copies.setdefault(rev, {})[fn] = copy
3462 if fn in skip:
3465 if fn in skip:
3463 if copy:
3466 if copy:
3464 skip[copy] = True
3467 skip[copy] = True
3465 continue
3468 continue
3466 files.append(fn)
3469 files.append(fn)
3467
3470
3468 if fn not in matches[rev]:
3471 if fn not in matches[rev]:
3469 grepbody(fn, rev, flog.read(fnode))
3472 grepbody(fn, rev, flog.read(fnode))
3470
3473
3471 pfn = copy or fn
3474 pfn = copy or fn
3472 if pfn not in matches[parent]:
3475 if pfn not in matches[parent]:
3473 try:
3476 try:
3474 fnode = pctx.filenode(pfn)
3477 fnode = pctx.filenode(pfn)
3475 grepbody(pfn, parent, flog.read(fnode))
3478 grepbody(pfn, parent, flog.read(fnode))
3476 except error.LookupError:
3479 except error.LookupError:
3477 pass
3480 pass
3478
3481
3479 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3482 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3480 rev = ctx.rev()
3483 rev = ctx.rev()
3481 parent = ctx.p1().rev()
3484 parent = ctx.p1().rev()
3482 for fn in sorted(revfiles.get(rev, [])):
3485 for fn in sorted(revfiles.get(rev, [])):
3483 states = matches[rev][fn]
3486 states = matches[rev][fn]
3484 copy = copies.get(rev, {}).get(fn)
3487 copy = copies.get(rev, {}).get(fn)
3485 if fn in skip:
3488 if fn in skip:
3486 if copy:
3489 if copy:
3487 skip[copy] = True
3490 skip[copy] = True
3488 continue
3491 continue
3489 pstates = matches.get(parent, {}).get(copy or fn, [])
3492 pstates = matches.get(parent, {}).get(copy or fn, [])
3490 if pstates or states:
3493 if pstates or states:
3491 r = display(fn, ctx, pstates, states)
3494 r = display(fn, ctx, pstates, states)
3492 found = found or r
3495 found = found or r
3493 if r and not opts.get('all'):
3496 if r and not opts.get('all'):
3494 skip[fn] = True
3497 skip[fn] = True
3495 if copy:
3498 if copy:
3496 skip[copy] = True
3499 skip[copy] = True
3497 del matches[rev]
3500 del matches[rev]
3498 del revfiles[rev]
3501 del revfiles[rev]
3499
3502
3500 return not found
3503 return not found
3501
3504
3502 @command('heads',
3505 @command('heads',
3503 [('r', 'rev', '',
3506 [('r', 'rev', '',
3504 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3507 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3505 ('t', 'topo', False, _('show topological heads only')),
3508 ('t', 'topo', False, _('show topological heads only')),
3506 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3509 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3507 ('c', 'closed', False, _('show normal and closed branch heads')),
3510 ('c', 'closed', False, _('show normal and closed branch heads')),
3508 ] + templateopts,
3511 ] + templateopts,
3509 _('[-ct] [-r STARTREV] [REV]...'))
3512 _('[-ct] [-r STARTREV] [REV]...'))
3510 def heads(ui, repo, *branchrevs, **opts):
3513 def heads(ui, repo, *branchrevs, **opts):
3511 """show branch heads
3514 """show branch heads
3512
3515
3513 With no arguments, show all open branch heads in the repository.
3516 With no arguments, show all open branch heads in the repository.
3514 Branch heads are changesets that have no descendants on the
3517 Branch heads are changesets that have no descendants on the
3515 same branch. They are where development generally takes place and
3518 same branch. They are where development generally takes place and
3516 are the usual targets for update and merge operations.
3519 are the usual targets for update and merge operations.
3517
3520
3518 If one or more REVs are given, only open branch heads on the
3521 If one or more REVs are given, only open branch heads on the
3519 branches associated with the specified changesets are shown. This
3522 branches associated with the specified changesets are shown. This
3520 means that you can use :hg:`heads .` to see the heads on the
3523 means that you can use :hg:`heads .` to see the heads on the
3521 currently checked-out branch.
3524 currently checked-out branch.
3522
3525
3523 If -c/--closed is specified, also show branch heads marked closed
3526 If -c/--closed is specified, also show branch heads marked closed
3524 (see :hg:`commit --close-branch`).
3527 (see :hg:`commit --close-branch`).
3525
3528
3526 If STARTREV is specified, only those heads that are descendants of
3529 If STARTREV is specified, only those heads that are descendants of
3527 STARTREV will be displayed.
3530 STARTREV will be displayed.
3528
3531
3529 If -t/--topo is specified, named branch mechanics will be ignored and only
3532 If -t/--topo is specified, named branch mechanics will be ignored and only
3530 topological heads (changesets with no children) will be shown.
3533 topological heads (changesets with no children) will be shown.
3531
3534
3532 Returns 0 if matching heads are found, 1 if not.
3535 Returns 0 if matching heads are found, 1 if not.
3533 """
3536 """
3534
3537
3535 start = None
3538 start = None
3536 if 'rev' in opts:
3539 if 'rev' in opts:
3537 start = scmutil.revsingle(repo, opts['rev'], None).node()
3540 start = scmutil.revsingle(repo, opts['rev'], None).node()
3538
3541
3539 if opts.get('topo'):
3542 if opts.get('topo'):
3540 heads = [repo[h] for h in repo.heads(start)]
3543 heads = [repo[h] for h in repo.heads(start)]
3541 else:
3544 else:
3542 heads = []
3545 heads = []
3543 for branch in repo.branchmap():
3546 for branch in repo.branchmap():
3544 heads += repo.branchheads(branch, start, opts.get('closed'))
3547 heads += repo.branchheads(branch, start, opts.get('closed'))
3545 heads = [repo[h] for h in heads]
3548 heads = [repo[h] for h in heads]
3546
3549
3547 if branchrevs:
3550 if branchrevs:
3548 branches = set(repo[br].branch() for br in branchrevs)
3551 branches = set(repo[br].branch() for br in branchrevs)
3549 heads = [h for h in heads if h.branch() in branches]
3552 heads = [h for h in heads if h.branch() in branches]
3550
3553
3551 if opts.get('active') and branchrevs:
3554 if opts.get('active') and branchrevs:
3552 dagheads = repo.heads(start)
3555 dagheads = repo.heads(start)
3553 heads = [h for h in heads if h.node() in dagheads]
3556 heads = [h for h in heads if h.node() in dagheads]
3554
3557
3555 if branchrevs:
3558 if branchrevs:
3556 haveheads = set(h.branch() for h in heads)
3559 haveheads = set(h.branch() for h in heads)
3557 if branches - haveheads:
3560 if branches - haveheads:
3558 headless = ', '.join(b for b in branches - haveheads)
3561 headless = ', '.join(b for b in branches - haveheads)
3559 msg = _('no open branch heads found on branches %s')
3562 msg = _('no open branch heads found on branches %s')
3560 if opts.get('rev'):
3563 if opts.get('rev'):
3561 msg += _(' (started at %s)') % opts['rev']
3564 msg += _(' (started at %s)') % opts['rev']
3562 ui.warn((msg + '\n') % headless)
3565 ui.warn((msg + '\n') % headless)
3563
3566
3564 if not heads:
3567 if not heads:
3565 return 1
3568 return 1
3566
3569
3567 heads = sorted(heads, key=lambda x: -x.rev())
3570 heads = sorted(heads, key=lambda x: -x.rev())
3568 displayer = cmdutil.show_changeset(ui, repo, opts)
3571 displayer = cmdutil.show_changeset(ui, repo, opts)
3569 for ctx in heads:
3572 for ctx in heads:
3570 displayer.show(ctx)
3573 displayer.show(ctx)
3571 displayer.close()
3574 displayer.close()
3572
3575
3573 @command('help',
3576 @command('help',
3574 [('e', 'extension', None, _('show only help for extensions')),
3577 [('e', 'extension', None, _('show only help for extensions')),
3575 ('c', 'command', None, _('show only help for commands')),
3578 ('c', 'command', None, _('show only help for commands')),
3576 ('k', 'keyword', '', _('show topics matching keyword')),
3579 ('k', 'keyword', '', _('show topics matching keyword')),
3577 ],
3580 ],
3578 _('[-ec] [TOPIC]'),
3581 _('[-ec] [TOPIC]'),
3579 norepo=True)
3582 norepo=True)
3580 def help_(ui, name=None, **opts):
3583 def help_(ui, name=None, **opts):
3581 """show help for a given topic or a help overview
3584 """show help for a given topic or a help overview
3582
3585
3583 With no arguments, print a list of commands with short help messages.
3586 With no arguments, print a list of commands with short help messages.
3584
3587
3585 Given a topic, extension, or command name, print help for that
3588 Given a topic, extension, or command name, print help for that
3586 topic.
3589 topic.
3587
3590
3588 Returns 0 if successful.
3591 Returns 0 if successful.
3589 """
3592 """
3590
3593
3591 textwidth = min(ui.termwidth(), 80) - 2
3594 textwidth = min(ui.termwidth(), 80) - 2
3592
3595
3593 keep = ui.verbose and ['verbose'] or []
3596 keep = ui.verbose and ['verbose'] or []
3594 text = help.help_(ui, name, **opts)
3597 text = help.help_(ui, name, **opts)
3595
3598
3596 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3599 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3597 if 'verbose' in pruned:
3600 if 'verbose' in pruned:
3598 keep.append('omitted')
3601 keep.append('omitted')
3599 else:
3602 else:
3600 keep.append('notomitted')
3603 keep.append('notomitted')
3601 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3604 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3602 ui.write(formatted)
3605 ui.write(formatted)
3603
3606
3604
3607
3605 @command('identify|id',
3608 @command('identify|id',
3606 [('r', 'rev', '',
3609 [('r', 'rev', '',
3607 _('identify the specified revision'), _('REV')),
3610 _('identify the specified revision'), _('REV')),
3608 ('n', 'num', None, _('show local revision number')),
3611 ('n', 'num', None, _('show local revision number')),
3609 ('i', 'id', None, _('show global revision id')),
3612 ('i', 'id', None, _('show global revision id')),
3610 ('b', 'branch', None, _('show branch')),
3613 ('b', 'branch', None, _('show branch')),
3611 ('t', 'tags', None, _('show tags')),
3614 ('t', 'tags', None, _('show tags')),
3612 ('B', 'bookmarks', None, _('show bookmarks')),
3615 ('B', 'bookmarks', None, _('show bookmarks')),
3613 ] + remoteopts,
3616 ] + remoteopts,
3614 _('[-nibtB] [-r REV] [SOURCE]'),
3617 _('[-nibtB] [-r REV] [SOURCE]'),
3615 optionalrepo=True)
3618 optionalrepo=True)
3616 def identify(ui, repo, source=None, rev=None,
3619 def identify(ui, repo, source=None, rev=None,
3617 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3620 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3618 """identify the working copy or specified revision
3621 """identify the working copy or specified revision
3619
3622
3620 Print a summary identifying the repository state at REV using one or
3623 Print a summary identifying the repository state at REV using one or
3621 two parent hash identifiers, followed by a "+" if the working
3624 two parent hash identifiers, followed by a "+" if the working
3622 directory has uncommitted changes, the branch name (if not default),
3625 directory has uncommitted changes, the branch name (if not default),
3623 a list of tags, and a list of bookmarks.
3626 a list of tags, and a list of bookmarks.
3624
3627
3625 When REV is not given, print a summary of the current state of the
3628 When REV is not given, print a summary of the current state of the
3626 repository.
3629 repository.
3627
3630
3628 Specifying a path to a repository root or Mercurial bundle will
3631 Specifying a path to a repository root or Mercurial bundle will
3629 cause lookup to operate on that repository/bundle.
3632 cause lookup to operate on that repository/bundle.
3630
3633
3631 .. container:: verbose
3634 .. container:: verbose
3632
3635
3633 Examples:
3636 Examples:
3634
3637
3635 - generate a build identifier for the working directory::
3638 - generate a build identifier for the working directory::
3636
3639
3637 hg id --id > build-id.dat
3640 hg id --id > build-id.dat
3638
3641
3639 - find the revision corresponding to a tag::
3642 - find the revision corresponding to a tag::
3640
3643
3641 hg id -n -r 1.3
3644 hg id -n -r 1.3
3642
3645
3643 - check the most recent revision of a remote repository::
3646 - check the most recent revision of a remote repository::
3644
3647
3645 hg id -r tip http://selenic.com/hg/
3648 hg id -r tip http://selenic.com/hg/
3646
3649
3647 Returns 0 if successful.
3650 Returns 0 if successful.
3648 """
3651 """
3649
3652
3650 if not repo and not source:
3653 if not repo and not source:
3651 raise util.Abort(_("there is no Mercurial repository here "
3654 raise util.Abort(_("there is no Mercurial repository here "
3652 "(.hg not found)"))
3655 "(.hg not found)"))
3653
3656
3654 hexfunc = ui.debugflag and hex or short
3657 hexfunc = ui.debugflag and hex or short
3655 default = not (num or id or branch or tags or bookmarks)
3658 default = not (num or id or branch or tags or bookmarks)
3656 output = []
3659 output = []
3657 revs = []
3660 revs = []
3658
3661
3659 if source:
3662 if source:
3660 source, branches = hg.parseurl(ui.expandpath(source))
3663 source, branches = hg.parseurl(ui.expandpath(source))
3661 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3664 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3662 repo = peer.local()
3665 repo = peer.local()
3663 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3666 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3664
3667
3665 if not repo:
3668 if not repo:
3666 if num or branch or tags:
3669 if num or branch or tags:
3667 raise util.Abort(
3670 raise util.Abort(
3668 _("can't query remote revision number, branch, or tags"))
3671 _("can't query remote revision number, branch, or tags"))
3669 if not rev and revs:
3672 if not rev and revs:
3670 rev = revs[0]
3673 rev = revs[0]
3671 if not rev:
3674 if not rev:
3672 rev = "tip"
3675 rev = "tip"
3673
3676
3674 remoterev = peer.lookup(rev)
3677 remoterev = peer.lookup(rev)
3675 if default or id:
3678 if default or id:
3676 output = [hexfunc(remoterev)]
3679 output = [hexfunc(remoterev)]
3677
3680
3678 def getbms():
3681 def getbms():
3679 bms = []
3682 bms = []
3680
3683
3681 if 'bookmarks' in peer.listkeys('namespaces'):
3684 if 'bookmarks' in peer.listkeys('namespaces'):
3682 hexremoterev = hex(remoterev)
3685 hexremoterev = hex(remoterev)
3683 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
3686 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
3684 if bmr == hexremoterev]
3687 if bmr == hexremoterev]
3685
3688
3686 return sorted(bms)
3689 return sorted(bms)
3687
3690
3688 if bookmarks:
3691 if bookmarks:
3689 output.extend(getbms())
3692 output.extend(getbms())
3690 elif default and not ui.quiet:
3693 elif default and not ui.quiet:
3691 # multiple bookmarks for a single parent separated by '/'
3694 # multiple bookmarks for a single parent separated by '/'
3692 bm = '/'.join(getbms())
3695 bm = '/'.join(getbms())
3693 if bm:
3696 if bm:
3694 output.append(bm)
3697 output.append(bm)
3695 else:
3698 else:
3696 if not rev:
3699 if not rev:
3697 ctx = repo[None]
3700 ctx = repo[None]
3698 parents = ctx.parents()
3701 parents = ctx.parents()
3699 changed = ""
3702 changed = ""
3700 if default or id or num:
3703 if default or id or num:
3701 if (util.any(repo.status())
3704 if (util.any(repo.status())
3702 or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
3705 or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
3703 changed = '+'
3706 changed = '+'
3704 if default or id:
3707 if default or id:
3705 output = ["%s%s" %
3708 output = ["%s%s" %
3706 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3709 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3707 if num:
3710 if num:
3708 output.append("%s%s" %
3711 output.append("%s%s" %
3709 ('+'.join([str(p.rev()) for p in parents]), changed))
3712 ('+'.join([str(p.rev()) for p in parents]), changed))
3710 else:
3713 else:
3711 ctx = scmutil.revsingle(repo, rev)
3714 ctx = scmutil.revsingle(repo, rev)
3712 if default or id:
3715 if default or id:
3713 output = [hexfunc(ctx.node())]
3716 output = [hexfunc(ctx.node())]
3714 if num:
3717 if num:
3715 output.append(str(ctx.rev()))
3718 output.append(str(ctx.rev()))
3716
3719
3717 if default and not ui.quiet:
3720 if default and not ui.quiet:
3718 b = ctx.branch()
3721 b = ctx.branch()
3719 if b != 'default':
3722 if b != 'default':
3720 output.append("(%s)" % b)
3723 output.append("(%s)" % b)
3721
3724
3722 # multiple tags for a single parent separated by '/'
3725 # multiple tags for a single parent separated by '/'
3723 t = '/'.join(ctx.tags())
3726 t = '/'.join(ctx.tags())
3724 if t:
3727 if t:
3725 output.append(t)
3728 output.append(t)
3726
3729
3727 # multiple bookmarks for a single parent separated by '/'
3730 # multiple bookmarks for a single parent separated by '/'
3728 bm = '/'.join(ctx.bookmarks())
3731 bm = '/'.join(ctx.bookmarks())
3729 if bm:
3732 if bm:
3730 output.append(bm)
3733 output.append(bm)
3731 else:
3734 else:
3732 if branch:
3735 if branch:
3733 output.append(ctx.branch())
3736 output.append(ctx.branch())
3734
3737
3735 if tags:
3738 if tags:
3736 output.extend(ctx.tags())
3739 output.extend(ctx.tags())
3737
3740
3738 if bookmarks:
3741 if bookmarks:
3739 output.extend(ctx.bookmarks())
3742 output.extend(ctx.bookmarks())
3740
3743
3741 ui.write("%s\n" % ' '.join(output))
3744 ui.write("%s\n" % ' '.join(output))
3742
3745
3743 @command('import|patch',
3746 @command('import|patch',
3744 [('p', 'strip', 1,
3747 [('p', 'strip', 1,
3745 _('directory strip option for patch. This has the same '
3748 _('directory strip option for patch. This has the same '
3746 'meaning as the corresponding patch option'), _('NUM')),
3749 'meaning as the corresponding patch option'), _('NUM')),
3747 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3750 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3748 ('e', 'edit', False, _('invoke editor on commit messages')),
3751 ('e', 'edit', False, _('invoke editor on commit messages')),
3749 ('f', 'force', None,
3752 ('f', 'force', None,
3750 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
3753 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
3751 ('', 'no-commit', None,
3754 ('', 'no-commit', None,
3752 _("don't commit, just update the working directory")),
3755 _("don't commit, just update the working directory")),
3753 ('', 'bypass', None,
3756 ('', 'bypass', None,
3754 _("apply patch without touching the working directory")),
3757 _("apply patch without touching the working directory")),
3755 ('', 'partial', None,
3758 ('', 'partial', None,
3756 _('commit even if some hunks fail')),
3759 _('commit even if some hunks fail')),
3757 ('', 'exact', None,
3760 ('', 'exact', None,
3758 _('apply patch to the nodes from which it was generated')),
3761 _('apply patch to the nodes from which it was generated')),
3759 ('', 'import-branch', None,
3762 ('', 'import-branch', None,
3760 _('use any branch information in patch (implied by --exact)'))] +
3763 _('use any branch information in patch (implied by --exact)'))] +
3761 commitopts + commitopts2 + similarityopts,
3764 commitopts + commitopts2 + similarityopts,
3762 _('[OPTION]... PATCH...'))
3765 _('[OPTION]... PATCH...'))
3763 def import_(ui, repo, patch1=None, *patches, **opts):
3766 def import_(ui, repo, patch1=None, *patches, **opts):
3764 """import an ordered set of patches
3767 """import an ordered set of patches
3765
3768
3766 Import a list of patches and commit them individually (unless
3769 Import a list of patches and commit them individually (unless
3767 --no-commit is specified).
3770 --no-commit is specified).
3768
3771
3769 Because import first applies changes to the working directory,
3772 Because import first applies changes to the working directory,
3770 import will abort if there are outstanding changes.
3773 import will abort if there are outstanding changes.
3771
3774
3772 You can import a patch straight from a mail message. Even patches
3775 You can import a patch straight from a mail message. Even patches
3773 as attachments work (to use the body part, it must have type
3776 as attachments work (to use the body part, it must have type
3774 text/plain or text/x-patch). From and Subject headers of email
3777 text/plain or text/x-patch). From and Subject headers of email
3775 message are used as default committer and commit message. All
3778 message are used as default committer and commit message. All
3776 text/plain body parts before first diff are added to commit
3779 text/plain body parts before first diff are added to commit
3777 message.
3780 message.
3778
3781
3779 If the imported patch was generated by :hg:`export`, user and
3782 If the imported patch was generated by :hg:`export`, user and
3780 description from patch override values from message headers and
3783 description from patch override values from message headers and
3781 body. Values given on command line with -m/--message and -u/--user
3784 body. Values given on command line with -m/--message and -u/--user
3782 override these.
3785 override these.
3783
3786
3784 If --exact is specified, import will set the working directory to
3787 If --exact is specified, import will set the working directory to
3785 the parent of each patch before applying it, and will abort if the
3788 the parent of each patch before applying it, and will abort if the
3786 resulting changeset has a different ID than the one recorded in
3789 resulting changeset has a different ID than the one recorded in
3787 the patch. This may happen due to character set problems or other
3790 the patch. This may happen due to character set problems or other
3788 deficiencies in the text patch format.
3791 deficiencies in the text patch format.
3789
3792
3790 Use --bypass to apply and commit patches directly to the
3793 Use --bypass to apply and commit patches directly to the
3791 repository, not touching the working directory. Without --exact,
3794 repository, not touching the working directory. Without --exact,
3792 patches will be applied on top of the working directory parent
3795 patches will be applied on top of the working directory parent
3793 revision.
3796 revision.
3794
3797
3795 With -s/--similarity, hg will attempt to discover renames and
3798 With -s/--similarity, hg will attempt to discover renames and
3796 copies in the patch in the same way as :hg:`addremove`.
3799 copies in the patch in the same way as :hg:`addremove`.
3797
3800
3798 Use --partial to ensure a changeset will be created from the patch
3801 Use --partial to ensure a changeset will be created from the patch
3799 even if some hunks fail to apply. Hunks that fail to apply will be
3802 even if some hunks fail to apply. Hunks that fail to apply will be
3800 written to a <target-file>.rej file. Conflicts can then be resolved
3803 written to a <target-file>.rej file. Conflicts can then be resolved
3801 by hand before :hg:`commit --amend` is run to update the created
3804 by hand before :hg:`commit --amend` is run to update the created
3802 changeset. This flag exists to let people import patches that
3805 changeset. This flag exists to let people import patches that
3803 partially apply without losing the associated metadata (author,
3806 partially apply without losing the associated metadata (author,
3804 date, description, ...). Note that when none of the hunk applies
3807 date, description, ...). Note that when none of the hunk applies
3805 cleanly, :hg:`import --partial` will create an empty changeset,
3808 cleanly, :hg:`import --partial` will create an empty changeset,
3806 importing only the patch metadata.
3809 importing only the patch metadata.
3807
3810
3808 To read a patch from standard input, use "-" as the patch name. If
3811 To read a patch from standard input, use "-" as the patch name. If
3809 a URL is specified, the patch will be downloaded from it.
3812 a URL is specified, the patch will be downloaded from it.
3810 See :hg:`help dates` for a list of formats valid for -d/--date.
3813 See :hg:`help dates` for a list of formats valid for -d/--date.
3811
3814
3812 .. container:: verbose
3815 .. container:: verbose
3813
3816
3814 Examples:
3817 Examples:
3815
3818
3816 - import a traditional patch from a website and detect renames::
3819 - import a traditional patch from a website and detect renames::
3817
3820
3818 hg import -s 80 http://example.com/bugfix.patch
3821 hg import -s 80 http://example.com/bugfix.patch
3819
3822
3820 - import a changeset from an hgweb server::
3823 - import a changeset from an hgweb server::
3821
3824
3822 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3825 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3823
3826
3824 - import all the patches in an Unix-style mbox::
3827 - import all the patches in an Unix-style mbox::
3825
3828
3826 hg import incoming-patches.mbox
3829 hg import incoming-patches.mbox
3827
3830
3828 - attempt to exactly restore an exported changeset (not always
3831 - attempt to exactly restore an exported changeset (not always
3829 possible)::
3832 possible)::
3830
3833
3831 hg import --exact proposed-fix.patch
3834 hg import --exact proposed-fix.patch
3832
3835
3833 Returns 0 on success, 1 on partial success (see --partial).
3836 Returns 0 on success, 1 on partial success (see --partial).
3834 """
3837 """
3835
3838
3836 if not patch1:
3839 if not patch1:
3837 raise util.Abort(_('need at least one patch to import'))
3840 raise util.Abort(_('need at least one patch to import'))
3838
3841
3839 patches = (patch1,) + patches
3842 patches = (patch1,) + patches
3840
3843
3841 date = opts.get('date')
3844 date = opts.get('date')
3842 if date:
3845 if date:
3843 opts['date'] = util.parsedate(date)
3846 opts['date'] = util.parsedate(date)
3844
3847
3845 update = not opts.get('bypass')
3848 update = not opts.get('bypass')
3846 if not update and opts.get('no_commit'):
3849 if not update and opts.get('no_commit'):
3847 raise util.Abort(_('cannot use --no-commit with --bypass'))
3850 raise util.Abort(_('cannot use --no-commit with --bypass'))
3848 try:
3851 try:
3849 sim = float(opts.get('similarity') or 0)
3852 sim = float(opts.get('similarity') or 0)
3850 except ValueError:
3853 except ValueError:
3851 raise util.Abort(_('similarity must be a number'))
3854 raise util.Abort(_('similarity must be a number'))
3852 if sim < 0 or sim > 100:
3855 if sim < 0 or sim > 100:
3853 raise util.Abort(_('similarity must be between 0 and 100'))
3856 raise util.Abort(_('similarity must be between 0 and 100'))
3854 if sim and not update:
3857 if sim and not update:
3855 raise util.Abort(_('cannot use --similarity with --bypass'))
3858 raise util.Abort(_('cannot use --similarity with --bypass'))
3856
3859
3857 if update:
3860 if update:
3858 cmdutil.checkunfinished(repo)
3861 cmdutil.checkunfinished(repo)
3859 if (opts.get('exact') or not opts.get('force')) and update:
3862 if (opts.get('exact') or not opts.get('force')) and update:
3860 cmdutil.bailifchanged(repo)
3863 cmdutil.bailifchanged(repo)
3861
3864
3862 base = opts["base"]
3865 base = opts["base"]
3863 wlock = lock = tr = None
3866 wlock = lock = tr = None
3864 msgs = []
3867 msgs = []
3865 ret = 0
3868 ret = 0
3866
3869
3867
3870
3868 try:
3871 try:
3869 try:
3872 try:
3870 wlock = repo.wlock()
3873 wlock = repo.wlock()
3871 if not opts.get('no_commit'):
3874 if not opts.get('no_commit'):
3872 lock = repo.lock()
3875 lock = repo.lock()
3873 tr = repo.transaction('import')
3876 tr = repo.transaction('import')
3874 parents = repo.parents()
3877 parents = repo.parents()
3875 for patchurl in patches:
3878 for patchurl in patches:
3876 if patchurl == '-':
3879 if patchurl == '-':
3877 ui.status(_('applying patch from stdin\n'))
3880 ui.status(_('applying patch from stdin\n'))
3878 patchfile = ui.fin
3881 patchfile = ui.fin
3879 patchurl = 'stdin' # for error message
3882 patchurl = 'stdin' # for error message
3880 else:
3883 else:
3881 patchurl = os.path.join(base, patchurl)
3884 patchurl = os.path.join(base, patchurl)
3882 ui.status(_('applying %s\n') % patchurl)
3885 ui.status(_('applying %s\n') % patchurl)
3883 patchfile = hg.openpath(ui, patchurl)
3886 patchfile = hg.openpath(ui, patchurl)
3884
3887
3885 haspatch = False
3888 haspatch = False
3886 for hunk in patch.split(patchfile):
3889 for hunk in patch.split(patchfile):
3887 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3890 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3888 parents, opts,
3891 parents, opts,
3889 msgs, hg.clean)
3892 msgs, hg.clean)
3890 if msg:
3893 if msg:
3891 haspatch = True
3894 haspatch = True
3892 ui.note(msg + '\n')
3895 ui.note(msg + '\n')
3893 if update or opts.get('exact'):
3896 if update or opts.get('exact'):
3894 parents = repo.parents()
3897 parents = repo.parents()
3895 else:
3898 else:
3896 parents = [repo[node]]
3899 parents = [repo[node]]
3897 if rej:
3900 if rej:
3898 ui.write_err(_("patch applied partially\n"))
3901 ui.write_err(_("patch applied partially\n"))
3899 ui.write_err(_("(fix the .rej files and run "
3902 ui.write_err(_("(fix the .rej files and run "
3900 "`hg commit --amend`)\n"))
3903 "`hg commit --amend`)\n"))
3901 ret = 1
3904 ret = 1
3902 break
3905 break
3903
3906
3904 if not haspatch:
3907 if not haspatch:
3905 raise util.Abort(_('%s: no diffs found') % patchurl)
3908 raise util.Abort(_('%s: no diffs found') % patchurl)
3906
3909
3907 if tr:
3910 if tr:
3908 tr.close()
3911 tr.close()
3909 if msgs:
3912 if msgs:
3910 repo.savecommitmessage('\n* * *\n'.join(msgs))
3913 repo.savecommitmessage('\n* * *\n'.join(msgs))
3911 return ret
3914 return ret
3912 except: # re-raises
3915 except: # re-raises
3913 # wlock.release() indirectly calls dirstate.write(): since
3916 # wlock.release() indirectly calls dirstate.write(): since
3914 # we're crashing, we do not want to change the working dir
3917 # we're crashing, we do not want to change the working dir
3915 # parent after all, so make sure it writes nothing
3918 # parent after all, so make sure it writes nothing
3916 repo.dirstate.invalidate()
3919 repo.dirstate.invalidate()
3917 raise
3920 raise
3918 finally:
3921 finally:
3919 if tr:
3922 if tr:
3920 tr.release()
3923 tr.release()
3921 release(lock, wlock)
3924 release(lock, wlock)
3922
3925
3923 @command('incoming|in',
3926 @command('incoming|in',
3924 [('f', 'force', None,
3927 [('f', 'force', None,
3925 _('run even if remote repository is unrelated')),
3928 _('run even if remote repository is unrelated')),
3926 ('n', 'newest-first', None, _('show newest record first')),
3929 ('n', 'newest-first', None, _('show newest record first')),
3927 ('', 'bundle', '',
3930 ('', 'bundle', '',
3928 _('file to store the bundles into'), _('FILE')),
3931 _('file to store the bundles into'), _('FILE')),
3929 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3932 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3930 ('B', 'bookmarks', False, _("compare bookmarks")),
3933 ('B', 'bookmarks', False, _("compare bookmarks")),
3931 ('b', 'branch', [],
3934 ('b', 'branch', [],
3932 _('a specific branch you would like to pull'), _('BRANCH')),
3935 _('a specific branch you would like to pull'), _('BRANCH')),
3933 ] + logopts + remoteopts + subrepoopts,
3936 ] + logopts + remoteopts + subrepoopts,
3934 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3937 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3935 def incoming(ui, repo, source="default", **opts):
3938 def incoming(ui, repo, source="default", **opts):
3936 """show new changesets found in source
3939 """show new changesets found in source
3937
3940
3938 Show new changesets found in the specified path/URL or the default
3941 Show new changesets found in the specified path/URL or the default
3939 pull location. These are the changesets that would have been pulled
3942 pull location. These are the changesets that would have been pulled
3940 if a pull at the time you issued this command.
3943 if a pull at the time you issued this command.
3941
3944
3942 For remote repository, using --bundle avoids downloading the
3945 For remote repository, using --bundle avoids downloading the
3943 changesets twice if the incoming is followed by a pull.
3946 changesets twice if the incoming is followed by a pull.
3944
3947
3945 See pull for valid source format details.
3948 See pull for valid source format details.
3946
3949
3947 .. container:: verbose
3950 .. container:: verbose
3948
3951
3949 Examples:
3952 Examples:
3950
3953
3951 - show incoming changes with patches and full description::
3954 - show incoming changes with patches and full description::
3952
3955
3953 hg incoming -vp
3956 hg incoming -vp
3954
3957
3955 - show incoming changes excluding merges, store a bundle::
3958 - show incoming changes excluding merges, store a bundle::
3956
3959
3957 hg in -vpM --bundle incoming.hg
3960 hg in -vpM --bundle incoming.hg
3958 hg pull incoming.hg
3961 hg pull incoming.hg
3959
3962
3960 - briefly list changes inside a bundle::
3963 - briefly list changes inside a bundle::
3961
3964
3962 hg in changes.hg -T "{desc|firstline}\\n"
3965 hg in changes.hg -T "{desc|firstline}\\n"
3963
3966
3964 Returns 0 if there are incoming changes, 1 otherwise.
3967 Returns 0 if there are incoming changes, 1 otherwise.
3965 """
3968 """
3966 if opts.get('graph'):
3969 if opts.get('graph'):
3967 cmdutil.checkunsupportedgraphflags([], opts)
3970 cmdutil.checkunsupportedgraphflags([], opts)
3968 def display(other, chlist, displayer):
3971 def display(other, chlist, displayer):
3969 revdag = cmdutil.graphrevs(other, chlist, opts)
3972 revdag = cmdutil.graphrevs(other, chlist, opts)
3970 showparents = [ctx.node() for ctx in repo[None].parents()]
3973 showparents = [ctx.node() for ctx in repo[None].parents()]
3971 cmdutil.displaygraph(ui, revdag, displayer, showparents,
3974 cmdutil.displaygraph(ui, revdag, displayer, showparents,
3972 graphmod.asciiedges)
3975 graphmod.asciiedges)
3973
3976
3974 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3977 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3975 return 0
3978 return 0
3976
3979
3977 if opts.get('bundle') and opts.get('subrepos'):
3980 if opts.get('bundle') and opts.get('subrepos'):
3978 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3981 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3979
3982
3980 if opts.get('bookmarks'):
3983 if opts.get('bookmarks'):
3981 source, branches = hg.parseurl(ui.expandpath(source),
3984 source, branches = hg.parseurl(ui.expandpath(source),
3982 opts.get('branch'))
3985 opts.get('branch'))
3983 other = hg.peer(repo, opts, source)
3986 other = hg.peer(repo, opts, source)
3984 if 'bookmarks' not in other.listkeys('namespaces'):
3987 if 'bookmarks' not in other.listkeys('namespaces'):
3985 ui.warn(_("remote doesn't support bookmarks\n"))
3988 ui.warn(_("remote doesn't support bookmarks\n"))
3986 return 0
3989 return 0
3987 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3990 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3988 return bookmarks.diff(ui, repo, other)
3991 return bookmarks.diff(ui, repo, other)
3989
3992
3990 repo._subtoppath = ui.expandpath(source)
3993 repo._subtoppath = ui.expandpath(source)
3991 try:
3994 try:
3992 return hg.incoming(ui, repo, source, opts)
3995 return hg.incoming(ui, repo, source, opts)
3993 finally:
3996 finally:
3994 del repo._subtoppath
3997 del repo._subtoppath
3995
3998
3996
3999
3997 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
4000 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3998 norepo=True)
4001 norepo=True)
3999 def init(ui, dest=".", **opts):
4002 def init(ui, dest=".", **opts):
4000 """create a new repository in the given directory
4003 """create a new repository in the given directory
4001
4004
4002 Initialize a new repository in the given directory. If the given
4005 Initialize a new repository in the given directory. If the given
4003 directory does not exist, it will be created.
4006 directory does not exist, it will be created.
4004
4007
4005 If no directory is given, the current directory is used.
4008 If no directory is given, the current directory is used.
4006
4009
4007 It is possible to specify an ``ssh://`` URL as the destination.
4010 It is possible to specify an ``ssh://`` URL as the destination.
4008 See :hg:`help urls` for more information.
4011 See :hg:`help urls` for more information.
4009
4012
4010 Returns 0 on success.
4013 Returns 0 on success.
4011 """
4014 """
4012 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4015 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4013
4016
4014 @command('locate',
4017 @command('locate',
4015 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
4018 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
4016 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4019 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4017 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
4020 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
4018 ] + walkopts,
4021 ] + walkopts,
4019 _('[OPTION]... [PATTERN]...'))
4022 _('[OPTION]... [PATTERN]...'))
4020 def locate(ui, repo, *pats, **opts):
4023 def locate(ui, repo, *pats, **opts):
4021 """locate files matching specific patterns
4024 """locate files matching specific patterns
4022
4025
4023 Print files under Mercurial control in the working directory whose
4026 Print files under Mercurial control in the working directory whose
4024 names match the given patterns.
4027 names match the given patterns.
4025
4028
4026 By default, this command searches all directories in the working
4029 By default, this command searches all directories in the working
4027 directory. To search just the current directory and its
4030 directory. To search just the current directory and its
4028 subdirectories, use "--include .".
4031 subdirectories, use "--include .".
4029
4032
4030 If no patterns are given to match, this command prints the names
4033 If no patterns are given to match, this command prints the names
4031 of all files under Mercurial control in the working directory.
4034 of all files under Mercurial control in the working directory.
4032
4035
4033 If you want to feed the output of this command into the "xargs"
4036 If you want to feed the output of this command into the "xargs"
4034 command, use the -0 option to both this command and "xargs". This
4037 command, use the -0 option to both this command and "xargs". This
4035 will avoid the problem of "xargs" treating single filenames that
4038 will avoid the problem of "xargs" treating single filenames that
4036 contain whitespace as multiple filenames.
4039 contain whitespace as multiple filenames.
4037
4040
4038 Returns 0 if a match is found, 1 otherwise.
4041 Returns 0 if a match is found, 1 otherwise.
4039 """
4042 """
4040 end = opts.get('print0') and '\0' or '\n'
4043 end = opts.get('print0') and '\0' or '\n'
4041 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
4044 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
4042
4045
4043 ret = 1
4046 ret = 1
4044 ctx = repo[rev]
4047 ctx = repo[rev]
4045 m = scmutil.match(ctx, pats, opts, default='relglob')
4048 m = scmutil.match(ctx, pats, opts, default='relglob')
4046 m.bad = lambda x, y: False
4049 m.bad = lambda x, y: False
4047
4050
4048 for abs in ctx.matches(m):
4051 for abs in ctx.matches(m):
4049 if opts.get('fullpath'):
4052 if opts.get('fullpath'):
4050 ui.write(repo.wjoin(abs), end)
4053 ui.write(repo.wjoin(abs), end)
4051 else:
4054 else:
4052 ui.write(((pats and m.rel(abs)) or abs), end)
4055 ui.write(((pats and m.rel(abs)) or abs), end)
4053 ret = 0
4056 ret = 0
4054
4057
4055 return ret
4058 return ret
4056
4059
4057 @command('^log|history',
4060 @command('^log|history',
4058 [('f', 'follow', None,
4061 [('f', 'follow', None,
4059 _('follow changeset history, or file history across copies and renames')),
4062 _('follow changeset history, or file history across copies and renames')),
4060 ('', 'follow-first', None,
4063 ('', 'follow-first', None,
4061 _('only follow the first parent of merge changesets (DEPRECATED)')),
4064 _('only follow the first parent of merge changesets (DEPRECATED)')),
4062 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
4065 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
4063 ('C', 'copies', None, _('show copied files')),
4066 ('C', 'copies', None, _('show copied files')),
4064 ('k', 'keyword', [],
4067 ('k', 'keyword', [],
4065 _('do case-insensitive search for a given text'), _('TEXT')),
4068 _('do case-insensitive search for a given text'), _('TEXT')),
4066 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
4069 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
4067 ('', 'removed', None, _('include revisions where files were removed')),
4070 ('', 'removed', None, _('include revisions where files were removed')),
4068 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4071 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4069 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4072 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4070 ('', 'only-branch', [],
4073 ('', 'only-branch', [],
4071 _('show only changesets within the given named branch (DEPRECATED)'),
4074 _('show only changesets within the given named branch (DEPRECATED)'),
4072 _('BRANCH')),
4075 _('BRANCH')),
4073 ('b', 'branch', [],
4076 ('b', 'branch', [],
4074 _('show changesets within the given named branch'), _('BRANCH')),
4077 _('show changesets within the given named branch'), _('BRANCH')),
4075 ('P', 'prune', [],
4078 ('P', 'prune', [],
4076 _('do not display revision or any of its ancestors'), _('REV')),
4079 _('do not display revision or any of its ancestors'), _('REV')),
4077 ] + logopts + walkopts,
4080 ] + logopts + walkopts,
4078 _('[OPTION]... [FILE]'),
4081 _('[OPTION]... [FILE]'),
4079 inferrepo=True)
4082 inferrepo=True)
4080 def log(ui, repo, *pats, **opts):
4083 def log(ui, repo, *pats, **opts):
4081 """show revision history of entire repository or files
4084 """show revision history of entire repository or files
4082
4085
4083 Print the revision history of the specified files or the entire
4086 Print the revision history of the specified files or the entire
4084 project.
4087 project.
4085
4088
4086 If no revision range is specified, the default is ``tip:0`` unless
4089 If no revision range is specified, the default is ``tip:0`` unless
4087 --follow is set, in which case the working directory parent is
4090 --follow is set, in which case the working directory parent is
4088 used as the starting revision.
4091 used as the starting revision.
4089
4092
4090 File history is shown without following rename or copy history of
4093 File history is shown without following rename or copy history of
4091 files. Use -f/--follow with a filename to follow history across
4094 files. Use -f/--follow with a filename to follow history across
4092 renames and copies. --follow without a filename will only show
4095 renames and copies. --follow without a filename will only show
4093 ancestors or descendants of the starting revision.
4096 ancestors or descendants of the starting revision.
4094
4097
4095 By default this command prints revision number and changeset id,
4098 By default this command prints revision number and changeset id,
4096 tags, non-trivial parents, user, date and time, and a summary for
4099 tags, non-trivial parents, user, date and time, and a summary for
4097 each commit. When the -v/--verbose switch is used, the list of
4100 each commit. When the -v/--verbose switch is used, the list of
4098 changed files and full commit message are shown.
4101 changed files and full commit message are shown.
4099
4102
4100 With --graph the revisions are shown as an ASCII art DAG with the most
4103 With --graph the revisions are shown as an ASCII art DAG with the most
4101 recent changeset at the top.
4104 recent changeset at the top.
4102 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4105 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4103 and '+' represents a fork where the changeset from the lines below is a
4106 and '+' represents a fork where the changeset from the lines below is a
4104 parent of the 'o' merge on the same line.
4107 parent of the 'o' merge on the same line.
4105
4108
4106 .. note::
4109 .. note::
4107
4110
4108 log -p/--patch may generate unexpected diff output for merge
4111 log -p/--patch may generate unexpected diff output for merge
4109 changesets, as it will only compare the merge changeset against
4112 changesets, as it will only compare the merge changeset against
4110 its first parent. Also, only files different from BOTH parents
4113 its first parent. Also, only files different from BOTH parents
4111 will appear in files:.
4114 will appear in files:.
4112
4115
4113 .. note::
4116 .. note::
4114
4117
4115 for performance reasons, log FILE may omit duplicate changes
4118 for performance reasons, log FILE may omit duplicate changes
4116 made on branches and will not show deletions. To see all
4119 made on branches and will not show deletions. To see all
4117 changes including duplicates and deletions, use the --removed
4120 changes including duplicates and deletions, use the --removed
4118 switch.
4121 switch.
4119
4122
4120 .. container:: verbose
4123 .. container:: verbose
4121
4124
4122 Some examples:
4125 Some examples:
4123
4126
4124 - changesets with full descriptions and file lists::
4127 - changesets with full descriptions and file lists::
4125
4128
4126 hg log -v
4129 hg log -v
4127
4130
4128 - changesets ancestral to the working directory::
4131 - changesets ancestral to the working directory::
4129
4132
4130 hg log -f
4133 hg log -f
4131
4134
4132 - last 10 commits on the current branch::
4135 - last 10 commits on the current branch::
4133
4136
4134 hg log -l 10 -b .
4137 hg log -l 10 -b .
4135
4138
4136 - changesets showing all modifications of a file, including removals::
4139 - changesets showing all modifications of a file, including removals::
4137
4140
4138 hg log --removed file.c
4141 hg log --removed file.c
4139
4142
4140 - all changesets that touch a directory, with diffs, excluding merges::
4143 - all changesets that touch a directory, with diffs, excluding merges::
4141
4144
4142 hg log -Mp lib/
4145 hg log -Mp lib/
4143
4146
4144 - all revision numbers that match a keyword::
4147 - all revision numbers that match a keyword::
4145
4148
4146 hg log -k bug --template "{rev}\\n"
4149 hg log -k bug --template "{rev}\\n"
4147
4150
4148 - list available log templates::
4151 - list available log templates::
4149
4152
4150 hg log -T list
4153 hg log -T list
4151
4154
4152 - check if a given changeset is included is a tagged release::
4155 - check if a given changeset is included is a tagged release::
4153
4156
4154 hg log -r "a21ccf and ancestor(1.9)"
4157 hg log -r "a21ccf and ancestor(1.9)"
4155
4158
4156 - find all changesets by some user in a date range::
4159 - find all changesets by some user in a date range::
4157
4160
4158 hg log -k alice -d "may 2008 to jul 2008"
4161 hg log -k alice -d "may 2008 to jul 2008"
4159
4162
4160 - summary of all changesets after the last tag::
4163 - summary of all changesets after the last tag::
4161
4164
4162 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4165 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4163
4166
4164 See :hg:`help dates` for a list of formats valid for -d/--date.
4167 See :hg:`help dates` for a list of formats valid for -d/--date.
4165
4168
4166 See :hg:`help revisions` and :hg:`help revsets` for more about
4169 See :hg:`help revisions` and :hg:`help revsets` for more about
4167 specifying revisions.
4170 specifying revisions.
4168
4171
4169 See :hg:`help templates` for more about pre-packaged styles and
4172 See :hg:`help templates` for more about pre-packaged styles and
4170 specifying custom templates.
4173 specifying custom templates.
4171
4174
4172 Returns 0 on success.
4175 Returns 0 on success.
4173 """
4176 """
4174 if opts.get('graph'):
4177 if opts.get('graph'):
4175 return cmdutil.graphlog(ui, repo, *pats, **opts)
4178 return cmdutil.graphlog(ui, repo, *pats, **opts)
4176
4179
4177 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4180 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4178 limit = cmdutil.loglimit(opts)
4181 limit = cmdutil.loglimit(opts)
4179 count = 0
4182 count = 0
4180
4183
4181 getrenamed = None
4184 getrenamed = None
4182 if opts.get('copies'):
4185 if opts.get('copies'):
4183 endrev = None
4186 endrev = None
4184 if opts.get('rev'):
4187 if opts.get('rev'):
4185 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4188 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4186 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4189 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4187
4190
4188 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4191 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4189 for rev in revs:
4192 for rev in revs:
4190 if count == limit:
4193 if count == limit:
4191 break
4194 break
4192 ctx = repo[rev]
4195 ctx = repo[rev]
4193 copies = None
4196 copies = None
4194 if getrenamed is not None and rev:
4197 if getrenamed is not None and rev:
4195 copies = []
4198 copies = []
4196 for fn in ctx.files():
4199 for fn in ctx.files():
4197 rename = getrenamed(fn, rev)
4200 rename = getrenamed(fn, rev)
4198 if rename:
4201 if rename:
4199 copies.append((fn, rename[0]))
4202 copies.append((fn, rename[0]))
4200 revmatchfn = filematcher and filematcher(ctx.rev()) or None
4203 revmatchfn = filematcher and filematcher(ctx.rev()) or None
4201 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4204 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4202 if displayer.flush(rev):
4205 if displayer.flush(rev):
4203 count += 1
4206 count += 1
4204
4207
4205 displayer.close()
4208 displayer.close()
4206
4209
4207 @command('manifest',
4210 @command('manifest',
4208 [('r', 'rev', '', _('revision to display'), _('REV')),
4211 [('r', 'rev', '', _('revision to display'), _('REV')),
4209 ('', 'all', False, _("list files from all revisions"))],
4212 ('', 'all', False, _("list files from all revisions"))],
4210 _('[-r REV]'))
4213 _('[-r REV]'))
4211 def manifest(ui, repo, node=None, rev=None, **opts):
4214 def manifest(ui, repo, node=None, rev=None, **opts):
4212 """output the current or given revision of the project manifest
4215 """output the current or given revision of the project manifest
4213
4216
4214 Print a list of version controlled files for the given revision.
4217 Print a list of version controlled files for the given revision.
4215 If no revision is given, the first parent of the working directory
4218 If no revision is given, the first parent of the working directory
4216 is used, or the null revision if no revision is checked out.
4219 is used, or the null revision if no revision is checked out.
4217
4220
4218 With -v, print file permissions, symlink and executable bits.
4221 With -v, print file permissions, symlink and executable bits.
4219 With --debug, print file revision hashes.
4222 With --debug, print file revision hashes.
4220
4223
4221 If option --all is specified, the list of all files from all revisions
4224 If option --all is specified, the list of all files from all revisions
4222 is printed. This includes deleted and renamed files.
4225 is printed. This includes deleted and renamed files.
4223
4226
4224 Returns 0 on success.
4227 Returns 0 on success.
4225 """
4228 """
4226
4229
4227 fm = ui.formatter('manifest', opts)
4230 fm = ui.formatter('manifest', opts)
4228
4231
4229 if opts.get('all'):
4232 if opts.get('all'):
4230 if rev or node:
4233 if rev or node:
4231 raise util.Abort(_("can't specify a revision with --all"))
4234 raise util.Abort(_("can't specify a revision with --all"))
4232
4235
4233 res = []
4236 res = []
4234 prefix = "data/"
4237 prefix = "data/"
4235 suffix = ".i"
4238 suffix = ".i"
4236 plen = len(prefix)
4239 plen = len(prefix)
4237 slen = len(suffix)
4240 slen = len(suffix)
4238 lock = repo.lock()
4241 lock = repo.lock()
4239 try:
4242 try:
4240 for fn, b, size in repo.store.datafiles():
4243 for fn, b, size in repo.store.datafiles():
4241 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4244 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4242 res.append(fn[plen:-slen])
4245 res.append(fn[plen:-slen])
4243 finally:
4246 finally:
4244 lock.release()
4247 lock.release()
4245 for f in res:
4248 for f in res:
4246 fm.startitem()
4249 fm.startitem()
4247 fm.write("path", '%s\n', f)
4250 fm.write("path", '%s\n', f)
4248 fm.end()
4251 fm.end()
4249 return
4252 return
4250
4253
4251 if rev and node:
4254 if rev and node:
4252 raise util.Abort(_("please specify just one revision"))
4255 raise util.Abort(_("please specify just one revision"))
4253
4256
4254 if not node:
4257 if not node:
4255 node = rev
4258 node = rev
4256
4259
4257 char = {'l': '@', 'x': '*', '': ''}
4260 char = {'l': '@', 'x': '*', '': ''}
4258 mode = {'l': '644', 'x': '755', '': '644'}
4261 mode = {'l': '644', 'x': '755', '': '644'}
4259 ctx = scmutil.revsingle(repo, node)
4262 ctx = scmutil.revsingle(repo, node)
4260 mf = ctx.manifest()
4263 mf = ctx.manifest()
4261 for f in ctx:
4264 for f in ctx:
4262 fm.startitem()
4265 fm.startitem()
4263 fl = ctx[f].flags()
4266 fl = ctx[f].flags()
4264 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4267 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4265 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4268 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4266 fm.write('path', '%s\n', f)
4269 fm.write('path', '%s\n', f)
4267 fm.end()
4270 fm.end()
4268
4271
4269 @command('^merge',
4272 @command('^merge',
4270 [('f', 'force', None,
4273 [('f', 'force', None,
4271 _('force a merge including outstanding changes (DEPRECATED)')),
4274 _('force a merge including outstanding changes (DEPRECATED)')),
4272 ('r', 'rev', '', _('revision to merge'), _('REV')),
4275 ('r', 'rev', '', _('revision to merge'), _('REV')),
4273 ('P', 'preview', None,
4276 ('P', 'preview', None,
4274 _('review revisions to merge (no merge is performed)'))
4277 _('review revisions to merge (no merge is performed)'))
4275 ] + mergetoolopts,
4278 ] + mergetoolopts,
4276 _('[-P] [-f] [[-r] REV]'))
4279 _('[-P] [-f] [[-r] REV]'))
4277 def merge(ui, repo, node=None, **opts):
4280 def merge(ui, repo, node=None, **opts):
4278 """merge working directory with another revision
4281 """merge working directory with another revision
4279
4282
4280 The current working directory is updated with all changes made in
4283 The current working directory is updated with all changes made in
4281 the requested revision since the last common predecessor revision.
4284 the requested revision since the last common predecessor revision.
4282
4285
4283 Files that changed between either parent are marked as changed for
4286 Files that changed between either parent are marked as changed for
4284 the next commit and a commit must be performed before any further
4287 the next commit and a commit must be performed before any further
4285 updates to the repository are allowed. The next commit will have
4288 updates to the repository are allowed. The next commit will have
4286 two parents.
4289 two parents.
4287
4290
4288 ``--tool`` can be used to specify the merge tool used for file
4291 ``--tool`` can be used to specify the merge tool used for file
4289 merges. It overrides the HGMERGE environment variable and your
4292 merges. It overrides the HGMERGE environment variable and your
4290 configuration files. See :hg:`help merge-tools` for options.
4293 configuration files. See :hg:`help merge-tools` for options.
4291
4294
4292 If no revision is specified, the working directory's parent is a
4295 If no revision is specified, the working directory's parent is a
4293 head revision, and the current branch contains exactly one other
4296 head revision, and the current branch contains exactly one other
4294 head, the other head is merged with by default. Otherwise, an
4297 head, the other head is merged with by default. Otherwise, an
4295 explicit revision with which to merge with must be provided.
4298 explicit revision with which to merge with must be provided.
4296
4299
4297 :hg:`resolve` must be used to resolve unresolved files.
4300 :hg:`resolve` must be used to resolve unresolved files.
4298
4301
4299 To undo an uncommitted merge, use :hg:`update --clean .` which
4302 To undo an uncommitted merge, use :hg:`update --clean .` which
4300 will check out a clean copy of the original merge parent, losing
4303 will check out a clean copy of the original merge parent, losing
4301 all changes.
4304 all changes.
4302
4305
4303 Returns 0 on success, 1 if there are unresolved files.
4306 Returns 0 on success, 1 if there are unresolved files.
4304 """
4307 """
4305
4308
4306 if opts.get('rev') and node:
4309 if opts.get('rev') and node:
4307 raise util.Abort(_("please specify just one revision"))
4310 raise util.Abort(_("please specify just one revision"))
4308 if not node:
4311 if not node:
4309 node = opts.get('rev')
4312 node = opts.get('rev')
4310
4313
4311 if node:
4314 if node:
4312 node = scmutil.revsingle(repo, node).node()
4315 node = scmutil.revsingle(repo, node).node()
4313
4316
4314 if not node and repo._bookmarkcurrent:
4317 if not node and repo._bookmarkcurrent:
4315 bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
4318 bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
4316 curhead = repo[repo._bookmarkcurrent].node()
4319 curhead = repo[repo._bookmarkcurrent].node()
4317 if len(bmheads) == 2:
4320 if len(bmheads) == 2:
4318 if curhead == bmheads[0]:
4321 if curhead == bmheads[0]:
4319 node = bmheads[1]
4322 node = bmheads[1]
4320 else:
4323 else:
4321 node = bmheads[0]
4324 node = bmheads[0]
4322 elif len(bmheads) > 2:
4325 elif len(bmheads) > 2:
4323 raise util.Abort(_("multiple matching bookmarks to merge - "
4326 raise util.Abort(_("multiple matching bookmarks to merge - "
4324 "please merge with an explicit rev or bookmark"),
4327 "please merge with an explicit rev or bookmark"),
4325 hint=_("run 'hg heads' to see all heads"))
4328 hint=_("run 'hg heads' to see all heads"))
4326 elif len(bmheads) <= 1:
4329 elif len(bmheads) <= 1:
4327 raise util.Abort(_("no matching bookmark to merge - "
4330 raise util.Abort(_("no matching bookmark to merge - "
4328 "please merge with an explicit rev or bookmark"),
4331 "please merge with an explicit rev or bookmark"),
4329 hint=_("run 'hg heads' to see all heads"))
4332 hint=_("run 'hg heads' to see all heads"))
4330
4333
4331 if not node and not repo._bookmarkcurrent:
4334 if not node and not repo._bookmarkcurrent:
4332 branch = repo[None].branch()
4335 branch = repo[None].branch()
4333 bheads = repo.branchheads(branch)
4336 bheads = repo.branchheads(branch)
4334 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4337 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4335
4338
4336 if len(nbhs) > 2:
4339 if len(nbhs) > 2:
4337 raise util.Abort(_("branch '%s' has %d heads - "
4340 raise util.Abort(_("branch '%s' has %d heads - "
4338 "please merge with an explicit rev")
4341 "please merge with an explicit rev")
4339 % (branch, len(bheads)),
4342 % (branch, len(bheads)),
4340 hint=_("run 'hg heads .' to see heads"))
4343 hint=_("run 'hg heads .' to see heads"))
4341
4344
4342 parent = repo.dirstate.p1()
4345 parent = repo.dirstate.p1()
4343 if len(nbhs) <= 1:
4346 if len(nbhs) <= 1:
4344 if len(bheads) > 1:
4347 if len(bheads) > 1:
4345 raise util.Abort(_("heads are bookmarked - "
4348 raise util.Abort(_("heads are bookmarked - "
4346 "please merge with an explicit rev"),
4349 "please merge with an explicit rev"),
4347 hint=_("run 'hg heads' to see all heads"))
4350 hint=_("run 'hg heads' to see all heads"))
4348 if len(repo.heads()) > 1:
4351 if len(repo.heads()) > 1:
4349 raise util.Abort(_("branch '%s' has one head - "
4352 raise util.Abort(_("branch '%s' has one head - "
4350 "please merge with an explicit rev")
4353 "please merge with an explicit rev")
4351 % branch,
4354 % branch,
4352 hint=_("run 'hg heads' to see all heads"))
4355 hint=_("run 'hg heads' to see all heads"))
4353 msg, hint = _('nothing to merge'), None
4356 msg, hint = _('nothing to merge'), None
4354 if parent != repo.lookup(branch):
4357 if parent != repo.lookup(branch):
4355 hint = _("use 'hg update' instead")
4358 hint = _("use 'hg update' instead")
4356 raise util.Abort(msg, hint=hint)
4359 raise util.Abort(msg, hint=hint)
4357
4360
4358 if parent not in bheads:
4361 if parent not in bheads:
4359 raise util.Abort(_('working directory not at a head revision'),
4362 raise util.Abort(_('working directory not at a head revision'),
4360 hint=_("use 'hg update' or merge with an "
4363 hint=_("use 'hg update' or merge with an "
4361 "explicit revision"))
4364 "explicit revision"))
4362 if parent == nbhs[0]:
4365 if parent == nbhs[0]:
4363 node = nbhs[-1]
4366 node = nbhs[-1]
4364 else:
4367 else:
4365 node = nbhs[0]
4368 node = nbhs[0]
4366
4369
4367 if opts.get('preview'):
4370 if opts.get('preview'):
4368 # find nodes that are ancestors of p2 but not of p1
4371 # find nodes that are ancestors of p2 but not of p1
4369 p1 = repo.lookup('.')
4372 p1 = repo.lookup('.')
4370 p2 = repo.lookup(node)
4373 p2 = repo.lookup(node)
4371 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4374 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4372
4375
4373 displayer = cmdutil.show_changeset(ui, repo, opts)
4376 displayer = cmdutil.show_changeset(ui, repo, opts)
4374 for node in nodes:
4377 for node in nodes:
4375 displayer.show(repo[node])
4378 displayer.show(repo[node])
4376 displayer.close()
4379 displayer.close()
4377 return 0
4380 return 0
4378
4381
4379 try:
4382 try:
4380 # ui.forcemerge is an internal variable, do not document
4383 # ui.forcemerge is an internal variable, do not document
4381 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4384 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4382 return hg.merge(repo, node, force=opts.get('force'))
4385 return hg.merge(repo, node, force=opts.get('force'))
4383 finally:
4386 finally:
4384 ui.setconfig('ui', 'forcemerge', '', 'merge')
4387 ui.setconfig('ui', 'forcemerge', '', 'merge')
4385
4388
4386 @command('outgoing|out',
4389 @command('outgoing|out',
4387 [('f', 'force', None, _('run even when the destination is unrelated')),
4390 [('f', 'force', None, _('run even when the destination is unrelated')),
4388 ('r', 'rev', [],
4391 ('r', 'rev', [],
4389 _('a changeset intended to be included in the destination'), _('REV')),
4392 _('a changeset intended to be included in the destination'), _('REV')),
4390 ('n', 'newest-first', None, _('show newest record first')),
4393 ('n', 'newest-first', None, _('show newest record first')),
4391 ('B', 'bookmarks', False, _('compare bookmarks')),
4394 ('B', 'bookmarks', False, _('compare bookmarks')),
4392 ('b', 'branch', [], _('a specific branch you would like to push'),
4395 ('b', 'branch', [], _('a specific branch you would like to push'),
4393 _('BRANCH')),
4396 _('BRANCH')),
4394 ] + logopts + remoteopts + subrepoopts,
4397 ] + logopts + remoteopts + subrepoopts,
4395 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4398 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4396 def outgoing(ui, repo, dest=None, **opts):
4399 def outgoing(ui, repo, dest=None, **opts):
4397 """show changesets not found in the destination
4400 """show changesets not found in the destination
4398
4401
4399 Show changesets not found in the specified destination repository
4402 Show changesets not found in the specified destination repository
4400 or the default push location. These are the changesets that would
4403 or the default push location. These are the changesets that would
4401 be pushed if a push was requested.
4404 be pushed if a push was requested.
4402
4405
4403 See pull for details of valid destination formats.
4406 See pull for details of valid destination formats.
4404
4407
4405 Returns 0 if there are outgoing changes, 1 otherwise.
4408 Returns 0 if there are outgoing changes, 1 otherwise.
4406 """
4409 """
4407 if opts.get('graph'):
4410 if opts.get('graph'):
4408 cmdutil.checkunsupportedgraphflags([], opts)
4411 cmdutil.checkunsupportedgraphflags([], opts)
4409 o, other = hg._outgoing(ui, repo, dest, opts)
4412 o, other = hg._outgoing(ui, repo, dest, opts)
4410 if not o:
4413 if not o:
4411 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4414 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4412 return
4415 return
4413
4416
4414 revdag = cmdutil.graphrevs(repo, o, opts)
4417 revdag = cmdutil.graphrevs(repo, o, opts)
4415 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4418 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4416 showparents = [ctx.node() for ctx in repo[None].parents()]
4419 showparents = [ctx.node() for ctx in repo[None].parents()]
4417 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4420 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4418 graphmod.asciiedges)
4421 graphmod.asciiedges)
4419 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4422 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4420 return 0
4423 return 0
4421
4424
4422 if opts.get('bookmarks'):
4425 if opts.get('bookmarks'):
4423 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4426 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4424 dest, branches = hg.parseurl(dest, opts.get('branch'))
4427 dest, branches = hg.parseurl(dest, opts.get('branch'))
4425 other = hg.peer(repo, opts, dest)
4428 other = hg.peer(repo, opts, dest)
4426 if 'bookmarks' not in other.listkeys('namespaces'):
4429 if 'bookmarks' not in other.listkeys('namespaces'):
4427 ui.warn(_("remote doesn't support bookmarks\n"))
4430 ui.warn(_("remote doesn't support bookmarks\n"))
4428 return 0
4431 return 0
4429 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4432 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4430 return bookmarks.diff(ui, other, repo)
4433 return bookmarks.diff(ui, other, repo)
4431
4434
4432 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4435 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4433 try:
4436 try:
4434 return hg.outgoing(ui, repo, dest, opts)
4437 return hg.outgoing(ui, repo, dest, opts)
4435 finally:
4438 finally:
4436 del repo._subtoppath
4439 del repo._subtoppath
4437
4440
4438 @command('parents',
4441 @command('parents',
4439 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4442 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4440 ] + templateopts,
4443 ] + templateopts,
4441 _('[-r REV] [FILE]'),
4444 _('[-r REV] [FILE]'),
4442 inferrepo=True)
4445 inferrepo=True)
4443 def parents(ui, repo, file_=None, **opts):
4446 def parents(ui, repo, file_=None, **opts):
4444 """show the parents of the working directory or revision
4447 """show the parents of the working directory or revision
4445
4448
4446 Print the working directory's parent revisions. If a revision is
4449 Print the working directory's parent revisions. If a revision is
4447 given via -r/--rev, the parent of that revision will be printed.
4450 given via -r/--rev, the parent of that revision will be printed.
4448 If a file argument is given, the revision in which the file was
4451 If a file argument is given, the revision in which the file was
4449 last changed (before the working directory revision or the
4452 last changed (before the working directory revision or the
4450 argument to --rev if given) is printed.
4453 argument to --rev if given) is printed.
4451
4454
4452 Returns 0 on success.
4455 Returns 0 on success.
4453 """
4456 """
4454
4457
4455 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4458 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4456
4459
4457 if file_:
4460 if file_:
4458 m = scmutil.match(ctx, (file_,), opts)
4461 m = scmutil.match(ctx, (file_,), opts)
4459 if m.anypats() or len(m.files()) != 1:
4462 if m.anypats() or len(m.files()) != 1:
4460 raise util.Abort(_('can only specify an explicit filename'))
4463 raise util.Abort(_('can only specify an explicit filename'))
4461 file_ = m.files()[0]
4464 file_ = m.files()[0]
4462 filenodes = []
4465 filenodes = []
4463 for cp in ctx.parents():
4466 for cp in ctx.parents():
4464 if not cp:
4467 if not cp:
4465 continue
4468 continue
4466 try:
4469 try:
4467 filenodes.append(cp.filenode(file_))
4470 filenodes.append(cp.filenode(file_))
4468 except error.LookupError:
4471 except error.LookupError:
4469 pass
4472 pass
4470 if not filenodes:
4473 if not filenodes:
4471 raise util.Abort(_("'%s' not found in manifest!") % file_)
4474 raise util.Abort(_("'%s' not found in manifest!") % file_)
4472 p = []
4475 p = []
4473 for fn in filenodes:
4476 for fn in filenodes:
4474 fctx = repo.filectx(file_, fileid=fn)
4477 fctx = repo.filectx(file_, fileid=fn)
4475 p.append(fctx.node())
4478 p.append(fctx.node())
4476 else:
4479 else:
4477 p = [cp.node() for cp in ctx.parents()]
4480 p = [cp.node() for cp in ctx.parents()]
4478
4481
4479 displayer = cmdutil.show_changeset(ui, repo, opts)
4482 displayer = cmdutil.show_changeset(ui, repo, opts)
4480 for n in p:
4483 for n in p:
4481 if n != nullid:
4484 if n != nullid:
4482 displayer.show(repo[n])
4485 displayer.show(repo[n])
4483 displayer.close()
4486 displayer.close()
4484
4487
4485 @command('paths', [], _('[NAME]'), optionalrepo=True)
4488 @command('paths', [], _('[NAME]'), optionalrepo=True)
4486 def paths(ui, repo, search=None):
4489 def paths(ui, repo, search=None):
4487 """show aliases for remote repositories
4490 """show aliases for remote repositories
4488
4491
4489 Show definition of symbolic path name NAME. If no name is given,
4492 Show definition of symbolic path name NAME. If no name is given,
4490 show definition of all available names.
4493 show definition of all available names.
4491
4494
4492 Option -q/--quiet suppresses all output when searching for NAME
4495 Option -q/--quiet suppresses all output when searching for NAME
4493 and shows only the path names when listing all definitions.
4496 and shows only the path names when listing all definitions.
4494
4497
4495 Path names are defined in the [paths] section of your
4498 Path names are defined in the [paths] section of your
4496 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4499 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4497 repository, ``.hg/hgrc`` is used, too.
4500 repository, ``.hg/hgrc`` is used, too.
4498
4501
4499 The path names ``default`` and ``default-push`` have a special
4502 The path names ``default`` and ``default-push`` have a special
4500 meaning. When performing a push or pull operation, they are used
4503 meaning. When performing a push or pull operation, they are used
4501 as fallbacks if no location is specified on the command-line.
4504 as fallbacks if no location is specified on the command-line.
4502 When ``default-push`` is set, it will be used for push and
4505 When ``default-push`` is set, it will be used for push and
4503 ``default`` will be used for pull; otherwise ``default`` is used
4506 ``default`` will be used for pull; otherwise ``default`` is used
4504 as the fallback for both. When cloning a repository, the clone
4507 as the fallback for both. When cloning a repository, the clone
4505 source is written as ``default`` in ``.hg/hgrc``. Note that
4508 source is written as ``default`` in ``.hg/hgrc``. Note that
4506 ``default`` and ``default-push`` apply to all inbound (e.g.
4509 ``default`` and ``default-push`` apply to all inbound (e.g.
4507 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4510 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4508 :hg:`bundle`) operations.
4511 :hg:`bundle`) operations.
4509
4512
4510 See :hg:`help urls` for more information.
4513 See :hg:`help urls` for more information.
4511
4514
4512 Returns 0 on success.
4515 Returns 0 on success.
4513 """
4516 """
4514 if search:
4517 if search:
4515 for name, path in ui.configitems("paths"):
4518 for name, path in ui.configitems("paths"):
4516 if name == search:
4519 if name == search:
4517 ui.status("%s\n" % util.hidepassword(path))
4520 ui.status("%s\n" % util.hidepassword(path))
4518 return
4521 return
4519 if not ui.quiet:
4522 if not ui.quiet:
4520 ui.warn(_("not found!\n"))
4523 ui.warn(_("not found!\n"))
4521 return 1
4524 return 1
4522 else:
4525 else:
4523 for name, path in ui.configitems("paths"):
4526 for name, path in ui.configitems("paths"):
4524 if ui.quiet:
4527 if ui.quiet:
4525 ui.write("%s\n" % name)
4528 ui.write("%s\n" % name)
4526 else:
4529 else:
4527 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4530 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4528
4531
4529 @command('phase',
4532 @command('phase',
4530 [('p', 'public', False, _('set changeset phase to public')),
4533 [('p', 'public', False, _('set changeset phase to public')),
4531 ('d', 'draft', False, _('set changeset phase to draft')),
4534 ('d', 'draft', False, _('set changeset phase to draft')),
4532 ('s', 'secret', False, _('set changeset phase to secret')),
4535 ('s', 'secret', False, _('set changeset phase to secret')),
4533 ('f', 'force', False, _('allow to move boundary backward')),
4536 ('f', 'force', False, _('allow to move boundary backward')),
4534 ('r', 'rev', [], _('target revision'), _('REV')),
4537 ('r', 'rev', [], _('target revision'), _('REV')),
4535 ],
4538 ],
4536 _('[-p|-d|-s] [-f] [-r] REV...'))
4539 _('[-p|-d|-s] [-f] [-r] REV...'))
4537 def phase(ui, repo, *revs, **opts):
4540 def phase(ui, repo, *revs, **opts):
4538 """set or show the current phase name
4541 """set or show the current phase name
4539
4542
4540 With no argument, show the phase name of specified revisions.
4543 With no argument, show the phase name of specified revisions.
4541
4544
4542 With one of -p/--public, -d/--draft or -s/--secret, change the
4545 With one of -p/--public, -d/--draft or -s/--secret, change the
4543 phase value of the specified revisions.
4546 phase value of the specified revisions.
4544
4547
4545 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4548 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4546 lower phase to an higher phase. Phases are ordered as follows::
4549 lower phase to an higher phase. Phases are ordered as follows::
4547
4550
4548 public < draft < secret
4551 public < draft < secret
4549
4552
4550 Returns 0 on success, 1 if no phases were changed or some could not
4553 Returns 0 on success, 1 if no phases were changed or some could not
4551 be changed.
4554 be changed.
4552 """
4555 """
4553 # search for a unique phase argument
4556 # search for a unique phase argument
4554 targetphase = None
4557 targetphase = None
4555 for idx, name in enumerate(phases.phasenames):
4558 for idx, name in enumerate(phases.phasenames):
4556 if opts[name]:
4559 if opts[name]:
4557 if targetphase is not None:
4560 if targetphase is not None:
4558 raise util.Abort(_('only one phase can be specified'))
4561 raise util.Abort(_('only one phase can be specified'))
4559 targetphase = idx
4562 targetphase = idx
4560
4563
4561 # look for specified revision
4564 # look for specified revision
4562 revs = list(revs)
4565 revs = list(revs)
4563 revs.extend(opts['rev'])
4566 revs.extend(opts['rev'])
4564 if not revs:
4567 if not revs:
4565 raise util.Abort(_('no revisions specified'))
4568 raise util.Abort(_('no revisions specified'))
4566
4569
4567 revs = scmutil.revrange(repo, revs)
4570 revs = scmutil.revrange(repo, revs)
4568
4571
4569 lock = None
4572 lock = None
4570 ret = 0
4573 ret = 0
4571 if targetphase is None:
4574 if targetphase is None:
4572 # display
4575 # display
4573 for r in revs:
4576 for r in revs:
4574 ctx = repo[r]
4577 ctx = repo[r]
4575 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4578 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4576 else:
4579 else:
4577 tr = None
4580 tr = None
4578 lock = repo.lock()
4581 lock = repo.lock()
4579 try:
4582 try:
4580 tr = repo.transaction("phase")
4583 tr = repo.transaction("phase")
4581 # set phase
4584 # set phase
4582 if not revs:
4585 if not revs:
4583 raise util.Abort(_('empty revision set'))
4586 raise util.Abort(_('empty revision set'))
4584 nodes = [repo[r].node() for r in revs]
4587 nodes = [repo[r].node() for r in revs]
4585 olddata = repo._phasecache.getphaserevs(repo)[:]
4588 olddata = repo._phasecache.getphaserevs(repo)[:]
4586 phases.advanceboundary(repo, tr, targetphase, nodes)
4589 phases.advanceboundary(repo, tr, targetphase, nodes)
4587 if opts['force']:
4590 if opts['force']:
4588 phases.retractboundary(repo, tr, targetphase, nodes)
4591 phases.retractboundary(repo, tr, targetphase, nodes)
4589 tr.close()
4592 tr.close()
4590 finally:
4593 finally:
4591 if tr is not None:
4594 if tr is not None:
4592 tr.release()
4595 tr.release()
4593 lock.release()
4596 lock.release()
4594 # moving revision from public to draft may hide them
4597 # moving revision from public to draft may hide them
4595 # We have to check result on an unfiltered repository
4598 # We have to check result on an unfiltered repository
4596 unfi = repo.unfiltered()
4599 unfi = repo.unfiltered()
4597 newdata = repo._phasecache.getphaserevs(unfi)
4600 newdata = repo._phasecache.getphaserevs(unfi)
4598 changes = sum(o != newdata[i] for i, o in enumerate(olddata))
4601 changes = sum(o != newdata[i] for i, o in enumerate(olddata))
4599 cl = unfi.changelog
4602 cl = unfi.changelog
4600 rejected = [n for n in nodes
4603 rejected = [n for n in nodes
4601 if newdata[cl.rev(n)] < targetphase]
4604 if newdata[cl.rev(n)] < targetphase]
4602 if rejected:
4605 if rejected:
4603 ui.warn(_('cannot move %i changesets to a higher '
4606 ui.warn(_('cannot move %i changesets to a higher '
4604 'phase, use --force\n') % len(rejected))
4607 'phase, use --force\n') % len(rejected))
4605 ret = 1
4608 ret = 1
4606 if changes:
4609 if changes:
4607 msg = _('phase changed for %i changesets\n') % changes
4610 msg = _('phase changed for %i changesets\n') % changes
4608 if ret:
4611 if ret:
4609 ui.status(msg)
4612 ui.status(msg)
4610 else:
4613 else:
4611 ui.note(msg)
4614 ui.note(msg)
4612 else:
4615 else:
4613 ui.warn(_('no phases changed\n'))
4616 ui.warn(_('no phases changed\n'))
4614 ret = 1
4617 ret = 1
4615 return ret
4618 return ret
4616
4619
4617 def postincoming(ui, repo, modheads, optupdate, checkout):
4620 def postincoming(ui, repo, modheads, optupdate, checkout):
4618 if modheads == 0:
4621 if modheads == 0:
4619 return
4622 return
4620 if optupdate:
4623 if optupdate:
4621 checkout, movemarkfrom = bookmarks.calculateupdate(ui, repo, checkout)
4624 checkout, movemarkfrom = bookmarks.calculateupdate(ui, repo, checkout)
4622 try:
4625 try:
4623 ret = hg.update(repo, checkout)
4626 ret = hg.update(repo, checkout)
4624 except util.Abort, inst:
4627 except util.Abort, inst:
4625 ui.warn(_("not updating: %s\n") % str(inst))
4628 ui.warn(_("not updating: %s\n") % str(inst))
4626 if inst.hint:
4629 if inst.hint:
4627 ui.warn(_("(%s)\n") % inst.hint)
4630 ui.warn(_("(%s)\n") % inst.hint)
4628 return 0
4631 return 0
4629 if not ret and not checkout:
4632 if not ret and not checkout:
4630 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4633 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4631 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4634 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4632 return ret
4635 return ret
4633 if modheads > 1:
4636 if modheads > 1:
4634 currentbranchheads = len(repo.branchheads())
4637 currentbranchheads = len(repo.branchheads())
4635 if currentbranchheads == modheads:
4638 if currentbranchheads == modheads:
4636 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4639 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4637 elif currentbranchheads > 1:
4640 elif currentbranchheads > 1:
4638 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4641 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4639 "merge)\n"))
4642 "merge)\n"))
4640 else:
4643 else:
4641 ui.status(_("(run 'hg heads' to see heads)\n"))
4644 ui.status(_("(run 'hg heads' to see heads)\n"))
4642 else:
4645 else:
4643 ui.status(_("(run 'hg update' to get a working copy)\n"))
4646 ui.status(_("(run 'hg update' to get a working copy)\n"))
4644
4647
4645 @command('^pull',
4648 @command('^pull',
4646 [('u', 'update', None,
4649 [('u', 'update', None,
4647 _('update to new branch head if changesets were pulled')),
4650 _('update to new branch head if changesets were pulled')),
4648 ('f', 'force', None, _('run even when remote repository is unrelated')),
4651 ('f', 'force', None, _('run even when remote repository is unrelated')),
4649 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4652 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4650 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4653 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4651 ('b', 'branch', [], _('a specific branch you would like to pull'),
4654 ('b', 'branch', [], _('a specific branch you would like to pull'),
4652 _('BRANCH')),
4655 _('BRANCH')),
4653 ] + remoteopts,
4656 ] + remoteopts,
4654 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4657 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4655 def pull(ui, repo, source="default", **opts):
4658 def pull(ui, repo, source="default", **opts):
4656 """pull changes from the specified source
4659 """pull changes from the specified source
4657
4660
4658 Pull changes from a remote repository to a local one.
4661 Pull changes from a remote repository to a local one.
4659
4662
4660 This finds all changes from the repository at the specified path
4663 This finds all changes from the repository at the specified path
4661 or URL and adds them to a local repository (the current one unless
4664 or URL and adds them to a local repository (the current one unless
4662 -R is specified). By default, this does not update the copy of the
4665 -R is specified). By default, this does not update the copy of the
4663 project in the working directory.
4666 project in the working directory.
4664
4667
4665 Use :hg:`incoming` if you want to see what would have been added
4668 Use :hg:`incoming` if you want to see what would have been added
4666 by a pull at the time you issued this command. If you then decide
4669 by a pull at the time you issued this command. If you then decide
4667 to add those changes to the repository, you should use :hg:`pull
4670 to add those changes to the repository, you should use :hg:`pull
4668 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4671 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4669
4672
4670 If SOURCE is omitted, the 'default' path will be used.
4673 If SOURCE is omitted, the 'default' path will be used.
4671 See :hg:`help urls` for more information.
4674 See :hg:`help urls` for more information.
4672
4675
4673 Returns 0 on success, 1 if an update had unresolved files.
4676 Returns 0 on success, 1 if an update had unresolved files.
4674 """
4677 """
4675 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4678 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4676 other = hg.peer(repo, opts, source)
4679 other = hg.peer(repo, opts, source)
4677 try:
4680 try:
4678 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4681 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4679 revs, checkout = hg.addbranchrevs(repo, other, branches,
4682 revs, checkout = hg.addbranchrevs(repo, other, branches,
4680 opts.get('rev'))
4683 opts.get('rev'))
4681
4684
4682 remotebookmarks = other.listkeys('bookmarks')
4685 remotebookmarks = other.listkeys('bookmarks')
4683
4686
4684 if opts.get('bookmark'):
4687 if opts.get('bookmark'):
4685 if not revs:
4688 if not revs:
4686 revs = []
4689 revs = []
4687 for b in opts['bookmark']:
4690 for b in opts['bookmark']:
4688 if b not in remotebookmarks:
4691 if b not in remotebookmarks:
4689 raise util.Abort(_('remote bookmark %s not found!') % b)
4692 raise util.Abort(_('remote bookmark %s not found!') % b)
4690 revs.append(remotebookmarks[b])
4693 revs.append(remotebookmarks[b])
4691
4694
4692 if revs:
4695 if revs:
4693 try:
4696 try:
4694 revs = [other.lookup(rev) for rev in revs]
4697 revs = [other.lookup(rev) for rev in revs]
4695 except error.CapabilityError:
4698 except error.CapabilityError:
4696 err = _("other repository doesn't support revision lookup, "
4699 err = _("other repository doesn't support revision lookup, "
4697 "so a rev cannot be specified.")
4700 "so a rev cannot be specified.")
4698 raise util.Abort(err)
4701 raise util.Abort(err)
4699
4702
4700 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4703 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4701 bookmarks.updatefromremote(ui, repo, remotebookmarks, source)
4704 bookmarks.updatefromremote(ui, repo, remotebookmarks, source)
4702 if checkout:
4705 if checkout:
4703 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4706 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4704 repo._subtoppath = source
4707 repo._subtoppath = source
4705 try:
4708 try:
4706 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4709 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4707
4710
4708 finally:
4711 finally:
4709 del repo._subtoppath
4712 del repo._subtoppath
4710
4713
4711 # update specified bookmarks
4714 # update specified bookmarks
4712 if opts.get('bookmark'):
4715 if opts.get('bookmark'):
4713 marks = repo._bookmarks
4716 marks = repo._bookmarks
4714 for b in opts['bookmark']:
4717 for b in opts['bookmark']:
4715 # explicit pull overrides local bookmark if any
4718 # explicit pull overrides local bookmark if any
4716 ui.status(_("importing bookmark %s\n") % b)
4719 ui.status(_("importing bookmark %s\n") % b)
4717 marks[b] = repo[remotebookmarks[b]].node()
4720 marks[b] = repo[remotebookmarks[b]].node()
4718 marks.write()
4721 marks.write()
4719 finally:
4722 finally:
4720 other.close()
4723 other.close()
4721 return ret
4724 return ret
4722
4725
4723 @command('^push',
4726 @command('^push',
4724 [('f', 'force', None, _('force push')),
4727 [('f', 'force', None, _('force push')),
4725 ('r', 'rev', [],
4728 ('r', 'rev', [],
4726 _('a changeset intended to be included in the destination'),
4729 _('a changeset intended to be included in the destination'),
4727 _('REV')),
4730 _('REV')),
4728 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4731 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4729 ('b', 'branch', [],
4732 ('b', 'branch', [],
4730 _('a specific branch you would like to push'), _('BRANCH')),
4733 _('a specific branch you would like to push'), _('BRANCH')),
4731 ('', 'new-branch', False, _('allow pushing a new branch')),
4734 ('', 'new-branch', False, _('allow pushing a new branch')),
4732 ] + remoteopts,
4735 ] + remoteopts,
4733 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4736 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4734 def push(ui, repo, dest=None, **opts):
4737 def push(ui, repo, dest=None, **opts):
4735 """push changes to the specified destination
4738 """push changes to the specified destination
4736
4739
4737 Push changesets from the local repository to the specified
4740 Push changesets from the local repository to the specified
4738 destination.
4741 destination.
4739
4742
4740 This operation is symmetrical to pull: it is identical to a pull
4743 This operation is symmetrical to pull: it is identical to a pull
4741 in the destination repository from the current one.
4744 in the destination repository from the current one.
4742
4745
4743 By default, push will not allow creation of new heads at the
4746 By default, push will not allow creation of new heads at the
4744 destination, since multiple heads would make it unclear which head
4747 destination, since multiple heads would make it unclear which head
4745 to use. In this situation, it is recommended to pull and merge
4748 to use. In this situation, it is recommended to pull and merge
4746 before pushing.
4749 before pushing.
4747
4750
4748 Use --new-branch if you want to allow push to create a new named
4751 Use --new-branch if you want to allow push to create a new named
4749 branch that is not present at the destination. This allows you to
4752 branch that is not present at the destination. This allows you to
4750 only create a new branch without forcing other changes.
4753 only create a new branch without forcing other changes.
4751
4754
4752 .. note::
4755 .. note::
4753
4756
4754 Extra care should be taken with the -f/--force option,
4757 Extra care should be taken with the -f/--force option,
4755 which will push all new heads on all branches, an action which will
4758 which will push all new heads on all branches, an action which will
4756 almost always cause confusion for collaborators.
4759 almost always cause confusion for collaborators.
4757
4760
4758 If -r/--rev is used, the specified revision and all its ancestors
4761 If -r/--rev is used, the specified revision and all its ancestors
4759 will be pushed to the remote repository.
4762 will be pushed to the remote repository.
4760
4763
4761 If -B/--bookmark is used, the specified bookmarked revision, its
4764 If -B/--bookmark is used, the specified bookmarked revision, its
4762 ancestors, and the bookmark will be pushed to the remote
4765 ancestors, and the bookmark will be pushed to the remote
4763 repository.
4766 repository.
4764
4767
4765 Please see :hg:`help urls` for important details about ``ssh://``
4768 Please see :hg:`help urls` for important details about ``ssh://``
4766 URLs. If DESTINATION is omitted, a default path will be used.
4769 URLs. If DESTINATION is omitted, a default path will be used.
4767
4770
4768 Returns 0 if push was successful, 1 if nothing to push.
4771 Returns 0 if push was successful, 1 if nothing to push.
4769 """
4772 """
4770
4773
4771 if opts.get('bookmark'):
4774 if opts.get('bookmark'):
4772 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4775 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4773 for b in opts['bookmark']:
4776 for b in opts['bookmark']:
4774 # translate -B options to -r so changesets get pushed
4777 # translate -B options to -r so changesets get pushed
4775 if b in repo._bookmarks:
4778 if b in repo._bookmarks:
4776 opts.setdefault('rev', []).append(b)
4779 opts.setdefault('rev', []).append(b)
4777 else:
4780 else:
4778 # if we try to push a deleted bookmark, translate it to null
4781 # if we try to push a deleted bookmark, translate it to null
4779 # this lets simultaneous -r, -b options continue working
4782 # this lets simultaneous -r, -b options continue working
4780 opts.setdefault('rev', []).append("null")
4783 opts.setdefault('rev', []).append("null")
4781
4784
4782 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4785 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4783 dest, branches = hg.parseurl(dest, opts.get('branch'))
4786 dest, branches = hg.parseurl(dest, opts.get('branch'))
4784 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4787 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4785 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4788 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4786 try:
4789 try:
4787 other = hg.peer(repo, opts, dest)
4790 other = hg.peer(repo, opts, dest)
4788 except error.RepoError:
4791 except error.RepoError:
4789 if dest == "default-push":
4792 if dest == "default-push":
4790 raise util.Abort(_("default repository not configured!"),
4793 raise util.Abort(_("default repository not configured!"),
4791 hint=_('see the "path" section in "hg help config"'))
4794 hint=_('see the "path" section in "hg help config"'))
4792 else:
4795 else:
4793 raise
4796 raise
4794
4797
4795 if revs:
4798 if revs:
4796 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4799 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4797
4800
4798 repo._subtoppath = dest
4801 repo._subtoppath = dest
4799 try:
4802 try:
4800 # push subrepos depth-first for coherent ordering
4803 # push subrepos depth-first for coherent ordering
4801 c = repo['']
4804 c = repo['']
4802 subs = c.substate # only repos that are committed
4805 subs = c.substate # only repos that are committed
4803 for s in sorted(subs):
4806 for s in sorted(subs):
4804 result = c.sub(s).push(opts)
4807 result = c.sub(s).push(opts)
4805 if result == 0:
4808 if result == 0:
4806 return not result
4809 return not result
4807 finally:
4810 finally:
4808 del repo._subtoppath
4811 del repo._subtoppath
4809 result = repo.push(other, opts.get('force'), revs=revs,
4812 result = repo.push(other, opts.get('force'), revs=revs,
4810 newbranch=opts.get('new_branch'))
4813 newbranch=opts.get('new_branch'))
4811
4814
4812 result = not result
4815 result = not result
4813
4816
4814 if opts.get('bookmark'):
4817 if opts.get('bookmark'):
4815 bresult = bookmarks.pushtoremote(ui, repo, other, opts['bookmark'])
4818 bresult = bookmarks.pushtoremote(ui, repo, other, opts['bookmark'])
4816 if bresult == 2:
4819 if bresult == 2:
4817 return 2
4820 return 2
4818 if not result and bresult:
4821 if not result and bresult:
4819 result = 2
4822 result = 2
4820
4823
4821 return result
4824 return result
4822
4825
4823 @command('recover', [])
4826 @command('recover', [])
4824 def recover(ui, repo):
4827 def recover(ui, repo):
4825 """roll back an interrupted transaction
4828 """roll back an interrupted transaction
4826
4829
4827 Recover from an interrupted commit or pull.
4830 Recover from an interrupted commit or pull.
4828
4831
4829 This command tries to fix the repository status after an
4832 This command tries to fix the repository status after an
4830 interrupted operation. It should only be necessary when Mercurial
4833 interrupted operation. It should only be necessary when Mercurial
4831 suggests it.
4834 suggests it.
4832
4835
4833 Returns 0 if successful, 1 if nothing to recover or verify fails.
4836 Returns 0 if successful, 1 if nothing to recover or verify fails.
4834 """
4837 """
4835 if repo.recover():
4838 if repo.recover():
4836 return hg.verify(repo)
4839 return hg.verify(repo)
4837 return 1
4840 return 1
4838
4841
4839 @command('^remove|rm',
4842 @command('^remove|rm',
4840 [('A', 'after', None, _('record delete for missing files')),
4843 [('A', 'after', None, _('record delete for missing files')),
4841 ('f', 'force', None,
4844 ('f', 'force', None,
4842 _('remove (and delete) file even if added or modified')),
4845 _('remove (and delete) file even if added or modified')),
4843 ] + walkopts,
4846 ] + walkopts,
4844 _('[OPTION]... FILE...'),
4847 _('[OPTION]... FILE...'),
4845 inferrepo=True)
4848 inferrepo=True)
4846 def remove(ui, repo, *pats, **opts):
4849 def remove(ui, repo, *pats, **opts):
4847 """remove the specified files on the next commit
4850 """remove the specified files on the next commit
4848
4851
4849 Schedule the indicated files for removal from the current branch.
4852 Schedule the indicated files for removal from the current branch.
4850
4853
4851 This command schedules the files to be removed at the next commit.
4854 This command schedules the files to be removed at the next commit.
4852 To undo a remove before that, see :hg:`revert`. To undo added
4855 To undo a remove before that, see :hg:`revert`. To undo added
4853 files, see :hg:`forget`.
4856 files, see :hg:`forget`.
4854
4857
4855 .. container:: verbose
4858 .. container:: verbose
4856
4859
4857 -A/--after can be used to remove only files that have already
4860 -A/--after can be used to remove only files that have already
4858 been deleted, -f/--force can be used to force deletion, and -Af
4861 been deleted, -f/--force can be used to force deletion, and -Af
4859 can be used to remove files from the next revision without
4862 can be used to remove files from the next revision without
4860 deleting them from the working directory.
4863 deleting them from the working directory.
4861
4864
4862 The following table details the behavior of remove for different
4865 The following table details the behavior of remove for different
4863 file states (columns) and option combinations (rows). The file
4866 file states (columns) and option combinations (rows). The file
4864 states are Added [A], Clean [C], Modified [M] and Missing [!]
4867 states are Added [A], Clean [C], Modified [M] and Missing [!]
4865 (as reported by :hg:`status`). The actions are Warn, Remove
4868 (as reported by :hg:`status`). The actions are Warn, Remove
4866 (from branch) and Delete (from disk):
4869 (from branch) and Delete (from disk):
4867
4870
4868 ========= == == == ==
4871 ========= == == == ==
4869 opt/state A C M !
4872 opt/state A C M !
4870 ========= == == == ==
4873 ========= == == == ==
4871 none W RD W R
4874 none W RD W R
4872 -f R RD RD R
4875 -f R RD RD R
4873 -A W W W R
4876 -A W W W R
4874 -Af R R R R
4877 -Af R R R R
4875 ========= == == == ==
4878 ========= == == == ==
4876
4879
4877 Note that remove never deletes files in Added [A] state from the
4880 Note that remove never deletes files in Added [A] state from the
4878 working directory, not even if option --force is specified.
4881 working directory, not even if option --force is specified.
4879
4882
4880 Returns 0 on success, 1 if any warnings encountered.
4883 Returns 0 on success, 1 if any warnings encountered.
4881 """
4884 """
4882
4885
4883 ret = 0
4886 ret = 0
4884 after, force = opts.get('after'), opts.get('force')
4887 after, force = opts.get('after'), opts.get('force')
4885 if not pats and not after:
4888 if not pats and not after:
4886 raise util.Abort(_('no files specified'))
4889 raise util.Abort(_('no files specified'))
4887
4890
4888 m = scmutil.match(repo[None], pats, opts)
4891 m = scmutil.match(repo[None], pats, opts)
4889 s = repo.status(match=m, clean=True)
4892 s = repo.status(match=m, clean=True)
4890 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4893 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4891
4894
4892 # warn about failure to delete explicit files/dirs
4895 # warn about failure to delete explicit files/dirs
4893 wctx = repo[None]
4896 wctx = repo[None]
4894 for f in m.files():
4897 for f in m.files():
4895 if f in repo.dirstate or f in wctx.dirs():
4898 if f in repo.dirstate or f in wctx.dirs():
4896 continue
4899 continue
4897 if os.path.exists(m.rel(f)):
4900 if os.path.exists(m.rel(f)):
4898 if os.path.isdir(m.rel(f)):
4901 if os.path.isdir(m.rel(f)):
4899 ui.warn(_('not removing %s: no tracked files\n') % m.rel(f))
4902 ui.warn(_('not removing %s: no tracked files\n') % m.rel(f))
4900 else:
4903 else:
4901 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4904 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4902 # missing files will generate a warning elsewhere
4905 # missing files will generate a warning elsewhere
4903 ret = 1
4906 ret = 1
4904
4907
4905 if force:
4908 if force:
4906 list = modified + deleted + clean + added
4909 list = modified + deleted + clean + added
4907 elif after:
4910 elif after:
4908 list = deleted
4911 list = deleted
4909 for f in modified + added + clean:
4912 for f in modified + added + clean:
4910 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
4913 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
4911 ret = 1
4914 ret = 1
4912 else:
4915 else:
4913 list = deleted + clean
4916 list = deleted + clean
4914 for f in modified:
4917 for f in modified:
4915 ui.warn(_('not removing %s: file is modified (use -f'
4918 ui.warn(_('not removing %s: file is modified (use -f'
4916 ' to force removal)\n') % m.rel(f))
4919 ' to force removal)\n') % m.rel(f))
4917 ret = 1
4920 ret = 1
4918 for f in added:
4921 for f in added:
4919 ui.warn(_('not removing %s: file has been marked for add'
4922 ui.warn(_('not removing %s: file has been marked for add'
4920 ' (use forget to undo)\n') % m.rel(f))
4923 ' (use forget to undo)\n') % m.rel(f))
4921 ret = 1
4924 ret = 1
4922
4925
4923 for f in sorted(list):
4926 for f in sorted(list):
4924 if ui.verbose or not m.exact(f):
4927 if ui.verbose or not m.exact(f):
4925 ui.status(_('removing %s\n') % m.rel(f))
4928 ui.status(_('removing %s\n') % m.rel(f))
4926
4929
4927 wlock = repo.wlock()
4930 wlock = repo.wlock()
4928 try:
4931 try:
4929 if not after:
4932 if not after:
4930 for f in list:
4933 for f in list:
4931 if f in added:
4934 if f in added:
4932 continue # we never unlink added files on remove
4935 continue # we never unlink added files on remove
4933 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
4936 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
4934 repo[None].forget(list)
4937 repo[None].forget(list)
4935 finally:
4938 finally:
4936 wlock.release()
4939 wlock.release()
4937
4940
4938 return ret
4941 return ret
4939
4942
4940 @command('rename|move|mv',
4943 @command('rename|move|mv',
4941 [('A', 'after', None, _('record a rename that has already occurred')),
4944 [('A', 'after', None, _('record a rename that has already occurred')),
4942 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4945 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4943 ] + walkopts + dryrunopts,
4946 ] + walkopts + dryrunopts,
4944 _('[OPTION]... SOURCE... DEST'))
4947 _('[OPTION]... SOURCE... DEST'))
4945 def rename(ui, repo, *pats, **opts):
4948 def rename(ui, repo, *pats, **opts):
4946 """rename files; equivalent of copy + remove
4949 """rename files; equivalent of copy + remove
4947
4950
4948 Mark dest as copies of sources; mark sources for deletion. If dest
4951 Mark dest as copies of sources; mark sources for deletion. If dest
4949 is a directory, copies are put in that directory. If dest is a
4952 is a directory, copies are put in that directory. If dest is a
4950 file, there can only be one source.
4953 file, there can only be one source.
4951
4954
4952 By default, this command copies the contents of files as they
4955 By default, this command copies the contents of files as they
4953 exist in the working directory. If invoked with -A/--after, the
4956 exist in the working directory. If invoked with -A/--after, the
4954 operation is recorded, but no copying is performed.
4957 operation is recorded, but no copying is performed.
4955
4958
4956 This command takes effect at the next commit. To undo a rename
4959 This command takes effect at the next commit. To undo a rename
4957 before that, see :hg:`revert`.
4960 before that, see :hg:`revert`.
4958
4961
4959 Returns 0 on success, 1 if errors are encountered.
4962 Returns 0 on success, 1 if errors are encountered.
4960 """
4963 """
4961 wlock = repo.wlock(False)
4964 wlock = repo.wlock(False)
4962 try:
4965 try:
4963 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4966 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4964 finally:
4967 finally:
4965 wlock.release()
4968 wlock.release()
4966
4969
4967 @command('resolve',
4970 @command('resolve',
4968 [('a', 'all', None, _('select all unresolved files')),
4971 [('a', 'all', None, _('select all unresolved files')),
4969 ('l', 'list', None, _('list state of files needing merge')),
4972 ('l', 'list', None, _('list state of files needing merge')),
4970 ('m', 'mark', None, _('mark files as resolved')),
4973 ('m', 'mark', None, _('mark files as resolved')),
4971 ('u', 'unmark', None, _('mark files as unresolved')),
4974 ('u', 'unmark', None, _('mark files as unresolved')),
4972 ('n', 'no-status', None, _('hide status prefix'))]
4975 ('n', 'no-status', None, _('hide status prefix'))]
4973 + mergetoolopts + walkopts,
4976 + mergetoolopts + walkopts,
4974 _('[OPTION]... [FILE]...'),
4977 _('[OPTION]... [FILE]...'),
4975 inferrepo=True)
4978 inferrepo=True)
4976 def resolve(ui, repo, *pats, **opts):
4979 def resolve(ui, repo, *pats, **opts):
4977 """redo merges or set/view the merge status of files
4980 """redo merges or set/view the merge status of files
4978
4981
4979 Merges with unresolved conflicts are often the result of
4982 Merges with unresolved conflicts are often the result of
4980 non-interactive merging using the ``internal:merge`` configuration
4983 non-interactive merging using the ``internal:merge`` configuration
4981 setting, or a command-line merge tool like ``diff3``. The resolve
4984 setting, or a command-line merge tool like ``diff3``. The resolve
4982 command is used to manage the files involved in a merge, after
4985 command is used to manage the files involved in a merge, after
4983 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4986 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4984 working directory must have two parents). See :hg:`help
4987 working directory must have two parents). See :hg:`help
4985 merge-tools` for information on configuring merge tools.
4988 merge-tools` for information on configuring merge tools.
4986
4989
4987 The resolve command can be used in the following ways:
4990 The resolve command can be used in the following ways:
4988
4991
4989 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4992 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4990 files, discarding any previous merge attempts. Re-merging is not
4993 files, discarding any previous merge attempts. Re-merging is not
4991 performed for files already marked as resolved. Use ``--all/-a``
4994 performed for files already marked as resolved. Use ``--all/-a``
4992 to select all unresolved files. ``--tool`` can be used to specify
4995 to select all unresolved files. ``--tool`` can be used to specify
4993 the merge tool used for the given files. It overrides the HGMERGE
4996 the merge tool used for the given files. It overrides the HGMERGE
4994 environment variable and your configuration files. Previous file
4997 environment variable and your configuration files. Previous file
4995 contents are saved with a ``.orig`` suffix.
4998 contents are saved with a ``.orig`` suffix.
4996
4999
4997 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
5000 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4998 (e.g. after having manually fixed-up the files). The default is
5001 (e.g. after having manually fixed-up the files). The default is
4999 to mark all unresolved files.
5002 to mark all unresolved files.
5000
5003
5001 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5004 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5002 default is to mark all resolved files.
5005 default is to mark all resolved files.
5003
5006
5004 - :hg:`resolve -l`: list files which had or still have conflicts.
5007 - :hg:`resolve -l`: list files which had or still have conflicts.
5005 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5008 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5006
5009
5007 Note that Mercurial will not let you commit files with unresolved
5010 Note that Mercurial will not let you commit files with unresolved
5008 merge conflicts. You must use :hg:`resolve -m ...` before you can
5011 merge conflicts. You must use :hg:`resolve -m ...` before you can
5009 commit after a conflicting merge.
5012 commit after a conflicting merge.
5010
5013
5011 Returns 0 on success, 1 if any files fail a resolve attempt.
5014 Returns 0 on success, 1 if any files fail a resolve attempt.
5012 """
5015 """
5013
5016
5014 all, mark, unmark, show, nostatus = \
5017 all, mark, unmark, show, nostatus = \
5015 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
5018 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
5016
5019
5017 if (show and (mark or unmark)) or (mark and unmark):
5020 if (show and (mark or unmark)) or (mark and unmark):
5018 raise util.Abort(_("too many options specified"))
5021 raise util.Abort(_("too many options specified"))
5019 if pats and all:
5022 if pats and all:
5020 raise util.Abort(_("can't specify --all and patterns"))
5023 raise util.Abort(_("can't specify --all and patterns"))
5021 if not (all or pats or show or mark or unmark):
5024 if not (all or pats or show or mark or unmark):
5022 raise util.Abort(_('no files or directories specified'),
5025 raise util.Abort(_('no files or directories specified'),
5023 hint=('use --all to remerge all files'))
5026 hint=('use --all to remerge all files'))
5024
5027
5025 wlock = repo.wlock()
5028 wlock = repo.wlock()
5026 try:
5029 try:
5027 ms = mergemod.mergestate(repo)
5030 ms = mergemod.mergestate(repo)
5028
5031
5029 if not ms.active() and not show:
5032 if not ms.active() and not show:
5030 raise util.Abort(
5033 raise util.Abort(
5031 _('resolve command not applicable when not merging'))
5034 _('resolve command not applicable when not merging'))
5032
5035
5033 m = scmutil.match(repo[None], pats, opts)
5036 m = scmutil.match(repo[None], pats, opts)
5034 ret = 0
5037 ret = 0
5035 didwork = False
5038 didwork = False
5036
5039
5037 for f in ms:
5040 for f in ms:
5038 if not m(f):
5041 if not m(f):
5039 continue
5042 continue
5040
5043
5041 didwork = True
5044 didwork = True
5042
5045
5043 if show:
5046 if show:
5044 if nostatus:
5047 if nostatus:
5045 ui.write("%s\n" % f)
5048 ui.write("%s\n" % f)
5046 else:
5049 else:
5047 ui.write("%s %s\n" % (ms[f].upper(), f),
5050 ui.write("%s %s\n" % (ms[f].upper(), f),
5048 label='resolve.' +
5051 label='resolve.' +
5049 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
5052 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
5050 elif mark:
5053 elif mark:
5051 ms.mark(f, "r")
5054 ms.mark(f, "r")
5052 elif unmark:
5055 elif unmark:
5053 ms.mark(f, "u")
5056 ms.mark(f, "u")
5054 else:
5057 else:
5055 wctx = repo[None]
5058 wctx = repo[None]
5056
5059
5057 # backup pre-resolve (merge uses .orig for its own purposes)
5060 # backup pre-resolve (merge uses .orig for its own purposes)
5058 a = repo.wjoin(f)
5061 a = repo.wjoin(f)
5059 util.copyfile(a, a + ".resolve")
5062 util.copyfile(a, a + ".resolve")
5060
5063
5061 try:
5064 try:
5062 # resolve file
5065 # resolve file
5063 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5066 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5064 'resolve')
5067 'resolve')
5065 if ms.resolve(f, wctx):
5068 if ms.resolve(f, wctx):
5066 ret = 1
5069 ret = 1
5067 finally:
5070 finally:
5068 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5071 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5069 ms.commit()
5072 ms.commit()
5070
5073
5071 # replace filemerge's .orig file with our resolve file
5074 # replace filemerge's .orig file with our resolve file
5072 util.rename(a + ".resolve", a + ".orig")
5075 util.rename(a + ".resolve", a + ".orig")
5073
5076
5074 ms.commit()
5077 ms.commit()
5075
5078
5076 if not didwork and pats:
5079 if not didwork and pats:
5077 ui.warn(_("arguments do not match paths that need resolving\n"))
5080 ui.warn(_("arguments do not match paths that need resolving\n"))
5078
5081
5079 finally:
5082 finally:
5080 wlock.release()
5083 wlock.release()
5081
5084
5082 # Nudge users into finishing an unfinished operation. We don't print
5085 # Nudge users into finishing an unfinished operation. We don't print
5083 # this with the list/show operation because we want list/show to remain
5086 # this with the list/show operation because we want list/show to remain
5084 # machine readable.
5087 # machine readable.
5085 if not list(ms.unresolved()) and not show:
5088 if not list(ms.unresolved()) and not show:
5086 ui.status(_('(no more unresolved files)\n'))
5089 ui.status(_('(no more unresolved files)\n'))
5087
5090
5088 return ret
5091 return ret
5089
5092
5090 @command('revert',
5093 @command('revert',
5091 [('a', 'all', None, _('revert all changes when no arguments given')),
5094 [('a', 'all', None, _('revert all changes when no arguments given')),
5092 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5095 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5093 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5096 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5094 ('C', 'no-backup', None, _('do not save backup copies of files')),
5097 ('C', 'no-backup', None, _('do not save backup copies of files')),
5095 ] + walkopts + dryrunopts,
5098 ] + walkopts + dryrunopts,
5096 _('[OPTION]... [-r REV] [NAME]...'))
5099 _('[OPTION]... [-r REV] [NAME]...'))
5097 def revert(ui, repo, *pats, **opts):
5100 def revert(ui, repo, *pats, **opts):
5098 """restore files to their checkout state
5101 """restore files to their checkout state
5099
5102
5100 .. note::
5103 .. note::
5101
5104
5102 To check out earlier revisions, you should use :hg:`update REV`.
5105 To check out earlier revisions, you should use :hg:`update REV`.
5103 To cancel an uncommitted merge (and lose your changes),
5106 To cancel an uncommitted merge (and lose your changes),
5104 use :hg:`update --clean .`.
5107 use :hg:`update --clean .`.
5105
5108
5106 With no revision specified, revert the specified files or directories
5109 With no revision specified, revert the specified files or directories
5107 to the contents they had in the parent of the working directory.
5110 to the contents they had in the parent of the working directory.
5108 This restores the contents of files to an unmodified
5111 This restores the contents of files to an unmodified
5109 state and unschedules adds, removes, copies, and renames. If the
5112 state and unschedules adds, removes, copies, and renames. If the
5110 working directory has two parents, you must explicitly specify a
5113 working directory has two parents, you must explicitly specify a
5111 revision.
5114 revision.
5112
5115
5113 Using the -r/--rev or -d/--date options, revert the given files or
5116 Using the -r/--rev or -d/--date options, revert the given files or
5114 directories to their states as of a specific revision. Because
5117 directories to their states as of a specific revision. Because
5115 revert does not change the working directory parents, this will
5118 revert does not change the working directory parents, this will
5116 cause these files to appear modified. This can be helpful to "back
5119 cause these files to appear modified. This can be helpful to "back
5117 out" some or all of an earlier change. See :hg:`backout` for a
5120 out" some or all of an earlier change. See :hg:`backout` for a
5118 related method.
5121 related method.
5119
5122
5120 Modified files are saved with a .orig suffix before reverting.
5123 Modified files are saved with a .orig suffix before reverting.
5121 To disable these backups, use --no-backup.
5124 To disable these backups, use --no-backup.
5122
5125
5123 See :hg:`help dates` for a list of formats valid for -d/--date.
5126 See :hg:`help dates` for a list of formats valid for -d/--date.
5124
5127
5125 Returns 0 on success.
5128 Returns 0 on success.
5126 """
5129 """
5127
5130
5128 if opts.get("date"):
5131 if opts.get("date"):
5129 if opts.get("rev"):
5132 if opts.get("rev"):
5130 raise util.Abort(_("you can't specify a revision and a date"))
5133 raise util.Abort(_("you can't specify a revision and a date"))
5131 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5134 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5132
5135
5133 parent, p2 = repo.dirstate.parents()
5136 parent, p2 = repo.dirstate.parents()
5134 if not opts.get('rev') and p2 != nullid:
5137 if not opts.get('rev') and p2 != nullid:
5135 # revert after merge is a trap for new users (issue2915)
5138 # revert after merge is a trap for new users (issue2915)
5136 raise util.Abort(_('uncommitted merge with no revision specified'),
5139 raise util.Abort(_('uncommitted merge with no revision specified'),
5137 hint=_('use "hg update" or see "hg help revert"'))
5140 hint=_('use "hg update" or see "hg help revert"'))
5138
5141
5139 ctx = scmutil.revsingle(repo, opts.get('rev'))
5142 ctx = scmutil.revsingle(repo, opts.get('rev'))
5140
5143
5141 if not pats and not opts.get('all'):
5144 if not pats and not opts.get('all'):
5142 msg = _("no files or directories specified")
5145 msg = _("no files or directories specified")
5143 if p2 != nullid:
5146 if p2 != nullid:
5144 hint = _("uncommitted merge, use --all to discard all changes,"
5147 hint = _("uncommitted merge, use --all to discard all changes,"
5145 " or 'hg update -C .' to abort the merge")
5148 " or 'hg update -C .' to abort the merge")
5146 raise util.Abort(msg, hint=hint)
5149 raise util.Abort(msg, hint=hint)
5147 dirty = util.any(repo.status())
5150 dirty = util.any(repo.status())
5148 node = ctx.node()
5151 node = ctx.node()
5149 if node != parent:
5152 if node != parent:
5150 if dirty:
5153 if dirty:
5151 hint = _("uncommitted changes, use --all to discard all"
5154 hint = _("uncommitted changes, use --all to discard all"
5152 " changes, or 'hg update %s' to update") % ctx.rev()
5155 " changes, or 'hg update %s' to update") % ctx.rev()
5153 else:
5156 else:
5154 hint = _("use --all to revert all files,"
5157 hint = _("use --all to revert all files,"
5155 " or 'hg update %s' to update") % ctx.rev()
5158 " or 'hg update %s' to update") % ctx.rev()
5156 elif dirty:
5159 elif dirty:
5157 hint = _("uncommitted changes, use --all to discard all changes")
5160 hint = _("uncommitted changes, use --all to discard all changes")
5158 else:
5161 else:
5159 hint = _("use --all to revert all files")
5162 hint = _("use --all to revert all files")
5160 raise util.Abort(msg, hint=hint)
5163 raise util.Abort(msg, hint=hint)
5161
5164
5162 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5165 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5163
5166
5164 @command('rollback', dryrunopts +
5167 @command('rollback', dryrunopts +
5165 [('f', 'force', False, _('ignore safety measures'))])
5168 [('f', 'force', False, _('ignore safety measures'))])
5166 def rollback(ui, repo, **opts):
5169 def rollback(ui, repo, **opts):
5167 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5170 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5168
5171
5169 Please use :hg:`commit --amend` instead of rollback to correct
5172 Please use :hg:`commit --amend` instead of rollback to correct
5170 mistakes in the last commit.
5173 mistakes in the last commit.
5171
5174
5172 This command should be used with care. There is only one level of
5175 This command should be used with care. There is only one level of
5173 rollback, and there is no way to undo a rollback. It will also
5176 rollback, and there is no way to undo a rollback. It will also
5174 restore the dirstate at the time of the last transaction, losing
5177 restore the dirstate at the time of the last transaction, losing
5175 any dirstate changes since that time. This command does not alter
5178 any dirstate changes since that time. This command does not alter
5176 the working directory.
5179 the working directory.
5177
5180
5178 Transactions are used to encapsulate the effects of all commands
5181 Transactions are used to encapsulate the effects of all commands
5179 that create new changesets or propagate existing changesets into a
5182 that create new changesets or propagate existing changesets into a
5180 repository.
5183 repository.
5181
5184
5182 .. container:: verbose
5185 .. container:: verbose
5183
5186
5184 For example, the following commands are transactional, and their
5187 For example, the following commands are transactional, and their
5185 effects can be rolled back:
5188 effects can be rolled back:
5186
5189
5187 - commit
5190 - commit
5188 - import
5191 - import
5189 - pull
5192 - pull
5190 - push (with this repository as the destination)
5193 - push (with this repository as the destination)
5191 - unbundle
5194 - unbundle
5192
5195
5193 To avoid permanent data loss, rollback will refuse to rollback a
5196 To avoid permanent data loss, rollback will refuse to rollback a
5194 commit transaction if it isn't checked out. Use --force to
5197 commit transaction if it isn't checked out. Use --force to
5195 override this protection.
5198 override this protection.
5196
5199
5197 This command is not intended for use on public repositories. Once
5200 This command is not intended for use on public repositories. Once
5198 changes are visible for pull by other users, rolling a transaction
5201 changes are visible for pull by other users, rolling a transaction
5199 back locally is ineffective (someone else may already have pulled
5202 back locally is ineffective (someone else may already have pulled
5200 the changes). Furthermore, a race is possible with readers of the
5203 the changes). Furthermore, a race is possible with readers of the
5201 repository; for example an in-progress pull from the repository
5204 repository; for example an in-progress pull from the repository
5202 may fail if a rollback is performed.
5205 may fail if a rollback is performed.
5203
5206
5204 Returns 0 on success, 1 if no rollback data is available.
5207 Returns 0 on success, 1 if no rollback data is available.
5205 """
5208 """
5206 return repo.rollback(dryrun=opts.get('dry_run'),
5209 return repo.rollback(dryrun=opts.get('dry_run'),
5207 force=opts.get('force'))
5210 force=opts.get('force'))
5208
5211
5209 @command('root', [])
5212 @command('root', [])
5210 def root(ui, repo):
5213 def root(ui, repo):
5211 """print the root (top) of the current working directory
5214 """print the root (top) of the current working directory
5212
5215
5213 Print the root directory of the current repository.
5216 Print the root directory of the current repository.
5214
5217
5215 Returns 0 on success.
5218 Returns 0 on success.
5216 """
5219 """
5217 ui.write(repo.root + "\n")
5220 ui.write(repo.root + "\n")
5218
5221
5219 @command('^serve',
5222 @command('^serve',
5220 [('A', 'accesslog', '', _('name of access log file to write to'),
5223 [('A', 'accesslog', '', _('name of access log file to write to'),
5221 _('FILE')),
5224 _('FILE')),
5222 ('d', 'daemon', None, _('run server in background')),
5225 ('d', 'daemon', None, _('run server in background')),
5223 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
5226 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
5224 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5227 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5225 # use string type, then we can check if something was passed
5228 # use string type, then we can check if something was passed
5226 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5229 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5227 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5230 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5228 _('ADDR')),
5231 _('ADDR')),
5229 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5232 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5230 _('PREFIX')),
5233 _('PREFIX')),
5231 ('n', 'name', '',
5234 ('n', 'name', '',
5232 _('name to show in web pages (default: working directory)'), _('NAME')),
5235 _('name to show in web pages (default: working directory)'), _('NAME')),
5233 ('', 'web-conf', '',
5236 ('', 'web-conf', '',
5234 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5237 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5235 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5238 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5236 _('FILE')),
5239 _('FILE')),
5237 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5240 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5238 ('', 'stdio', None, _('for remote clients')),
5241 ('', 'stdio', None, _('for remote clients')),
5239 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5242 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5240 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5243 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5241 ('', 'style', '', _('template style to use'), _('STYLE')),
5244 ('', 'style', '', _('template style to use'), _('STYLE')),
5242 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5245 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5243 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5246 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5244 _('[OPTION]...'),
5247 _('[OPTION]...'),
5245 optionalrepo=True)
5248 optionalrepo=True)
5246 def serve(ui, repo, **opts):
5249 def serve(ui, repo, **opts):
5247 """start stand-alone webserver
5250 """start stand-alone webserver
5248
5251
5249 Start a local HTTP repository browser and pull server. You can use
5252 Start a local HTTP repository browser and pull server. You can use
5250 this for ad-hoc sharing and browsing of repositories. It is
5253 this for ad-hoc sharing and browsing of repositories. It is
5251 recommended to use a real web server to serve a repository for
5254 recommended to use a real web server to serve a repository for
5252 longer periods of time.
5255 longer periods of time.
5253
5256
5254 Please note that the server does not implement access control.
5257 Please note that the server does not implement access control.
5255 This means that, by default, anybody can read from the server and
5258 This means that, by default, anybody can read from the server and
5256 nobody can write to it by default. Set the ``web.allow_push``
5259 nobody can write to it by default. Set the ``web.allow_push``
5257 option to ``*`` to allow everybody to push to the server. You
5260 option to ``*`` to allow everybody to push to the server. You
5258 should use a real web server if you need to authenticate users.
5261 should use a real web server if you need to authenticate users.
5259
5262
5260 By default, the server logs accesses to stdout and errors to
5263 By default, the server logs accesses to stdout and errors to
5261 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5264 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5262 files.
5265 files.
5263
5266
5264 To have the server choose a free port number to listen on, specify
5267 To have the server choose a free port number to listen on, specify
5265 a port number of 0; in this case, the server will print the port
5268 a port number of 0; in this case, the server will print the port
5266 number it uses.
5269 number it uses.
5267
5270
5268 Returns 0 on success.
5271 Returns 0 on success.
5269 """
5272 """
5270
5273
5271 if opts["stdio"] and opts["cmdserver"]:
5274 if opts["stdio"] and opts["cmdserver"]:
5272 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5275 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5273
5276
5274 if opts["stdio"]:
5277 if opts["stdio"]:
5275 if repo is None:
5278 if repo is None:
5276 raise error.RepoError(_("there is no Mercurial repository here"
5279 raise error.RepoError(_("there is no Mercurial repository here"
5277 " (.hg not found)"))
5280 " (.hg not found)"))
5278 s = sshserver.sshserver(ui, repo)
5281 s = sshserver.sshserver(ui, repo)
5279 s.serve_forever()
5282 s.serve_forever()
5280
5283
5281 if opts["cmdserver"]:
5284 if opts["cmdserver"]:
5282 s = commandserver.server(ui, repo, opts["cmdserver"])
5285 s = commandserver.server(ui, repo, opts["cmdserver"])
5283 return s.serve()
5286 return s.serve()
5284
5287
5285 # this way we can check if something was given in the command-line
5288 # this way we can check if something was given in the command-line
5286 if opts.get('port'):
5289 if opts.get('port'):
5287 opts['port'] = util.getport(opts.get('port'))
5290 opts['port'] = util.getport(opts.get('port'))
5288
5291
5289 baseui = repo and repo.baseui or ui
5292 baseui = repo and repo.baseui or ui
5290 optlist = ("name templates style address port prefix ipv6"
5293 optlist = ("name templates style address port prefix ipv6"
5291 " accesslog errorlog certificate encoding")
5294 " accesslog errorlog certificate encoding")
5292 for o in optlist.split():
5295 for o in optlist.split():
5293 val = opts.get(o, '')
5296 val = opts.get(o, '')
5294 if val in (None, ''): # should check against default options instead
5297 if val in (None, ''): # should check against default options instead
5295 continue
5298 continue
5296 baseui.setconfig("web", o, val, 'serve')
5299 baseui.setconfig("web", o, val, 'serve')
5297 if repo and repo.ui != baseui:
5300 if repo and repo.ui != baseui:
5298 repo.ui.setconfig("web", o, val, 'serve')
5301 repo.ui.setconfig("web", o, val, 'serve')
5299
5302
5300 o = opts.get('web_conf') or opts.get('webdir_conf')
5303 o = opts.get('web_conf') or opts.get('webdir_conf')
5301 if not o:
5304 if not o:
5302 if not repo:
5305 if not repo:
5303 raise error.RepoError(_("there is no Mercurial repository"
5306 raise error.RepoError(_("there is no Mercurial repository"
5304 " here (.hg not found)"))
5307 " here (.hg not found)"))
5305 o = repo
5308 o = repo
5306
5309
5307 app = hgweb.hgweb(o, baseui=baseui)
5310 app = hgweb.hgweb(o, baseui=baseui)
5308 service = httpservice(ui, app, opts)
5311 service = httpservice(ui, app, opts)
5309 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5312 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5310
5313
5311 class httpservice(object):
5314 class httpservice(object):
5312 def __init__(self, ui, app, opts):
5315 def __init__(self, ui, app, opts):
5313 self.ui = ui
5316 self.ui = ui
5314 self.app = app
5317 self.app = app
5315 self.opts = opts
5318 self.opts = opts
5316
5319
5317 def init(self):
5320 def init(self):
5318 util.setsignalhandler()
5321 util.setsignalhandler()
5319 self.httpd = hgweb_server.create_server(self.ui, self.app)
5322 self.httpd = hgweb_server.create_server(self.ui, self.app)
5320
5323
5321 if self.opts['port'] and not self.ui.verbose:
5324 if self.opts['port'] and not self.ui.verbose:
5322 return
5325 return
5323
5326
5324 if self.httpd.prefix:
5327 if self.httpd.prefix:
5325 prefix = self.httpd.prefix.strip('/') + '/'
5328 prefix = self.httpd.prefix.strip('/') + '/'
5326 else:
5329 else:
5327 prefix = ''
5330 prefix = ''
5328
5331
5329 port = ':%d' % self.httpd.port
5332 port = ':%d' % self.httpd.port
5330 if port == ':80':
5333 if port == ':80':
5331 port = ''
5334 port = ''
5332
5335
5333 bindaddr = self.httpd.addr
5336 bindaddr = self.httpd.addr
5334 if bindaddr == '0.0.0.0':
5337 if bindaddr == '0.0.0.0':
5335 bindaddr = '*'
5338 bindaddr = '*'
5336 elif ':' in bindaddr: # IPv6
5339 elif ':' in bindaddr: # IPv6
5337 bindaddr = '[%s]' % bindaddr
5340 bindaddr = '[%s]' % bindaddr
5338
5341
5339 fqaddr = self.httpd.fqaddr
5342 fqaddr = self.httpd.fqaddr
5340 if ':' in fqaddr:
5343 if ':' in fqaddr:
5341 fqaddr = '[%s]' % fqaddr
5344 fqaddr = '[%s]' % fqaddr
5342 if self.opts['port']:
5345 if self.opts['port']:
5343 write = self.ui.status
5346 write = self.ui.status
5344 else:
5347 else:
5345 write = self.ui.write
5348 write = self.ui.write
5346 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5349 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5347 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5350 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5348 self.ui.flush() # avoid buffering of status message
5351 self.ui.flush() # avoid buffering of status message
5349
5352
5350 def run(self):
5353 def run(self):
5351 self.httpd.serve_forever()
5354 self.httpd.serve_forever()
5352
5355
5353
5356
5354 @command('^status|st',
5357 @command('^status|st',
5355 [('A', 'all', None, _('show status of all files')),
5358 [('A', 'all', None, _('show status of all files')),
5356 ('m', 'modified', None, _('show only modified files')),
5359 ('m', 'modified', None, _('show only modified files')),
5357 ('a', 'added', None, _('show only added files')),
5360 ('a', 'added', None, _('show only added files')),
5358 ('r', 'removed', None, _('show only removed files')),
5361 ('r', 'removed', None, _('show only removed files')),
5359 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5362 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5360 ('c', 'clean', None, _('show only files without changes')),
5363 ('c', 'clean', None, _('show only files without changes')),
5361 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5364 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5362 ('i', 'ignored', None, _('show only ignored files')),
5365 ('i', 'ignored', None, _('show only ignored files')),
5363 ('n', 'no-status', None, _('hide status prefix')),
5366 ('n', 'no-status', None, _('hide status prefix')),
5364 ('C', 'copies', None, _('show source of copied files')),
5367 ('C', 'copies', None, _('show source of copied files')),
5365 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5368 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5366 ('', 'rev', [], _('show difference from revision'), _('REV')),
5369 ('', 'rev', [], _('show difference from revision'), _('REV')),
5367 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5370 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5368 ] + walkopts + subrepoopts,
5371 ] + walkopts + subrepoopts,
5369 _('[OPTION]... [FILE]...'),
5372 _('[OPTION]... [FILE]...'),
5370 inferrepo=True)
5373 inferrepo=True)
5371 def status(ui, repo, *pats, **opts):
5374 def status(ui, repo, *pats, **opts):
5372 """show changed files in the working directory
5375 """show changed files in the working directory
5373
5376
5374 Show status of files in the repository. If names are given, only
5377 Show status of files in the repository. If names are given, only
5375 files that match are shown. Files that are clean or ignored or
5378 files that match are shown. Files that are clean or ignored or
5376 the source of a copy/move operation, are not listed unless
5379 the source of a copy/move operation, are not listed unless
5377 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5380 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5378 Unless options described with "show only ..." are given, the
5381 Unless options described with "show only ..." are given, the
5379 options -mardu are used.
5382 options -mardu are used.
5380
5383
5381 Option -q/--quiet hides untracked (unknown and ignored) files
5384 Option -q/--quiet hides untracked (unknown and ignored) files
5382 unless explicitly requested with -u/--unknown or -i/--ignored.
5385 unless explicitly requested with -u/--unknown or -i/--ignored.
5383
5386
5384 .. note::
5387 .. note::
5385
5388
5386 status may appear to disagree with diff if permissions have
5389 status may appear to disagree with diff if permissions have
5387 changed or a merge has occurred. The standard diff format does
5390 changed or a merge has occurred. The standard diff format does
5388 not report permission changes and diff only reports changes
5391 not report permission changes and diff only reports changes
5389 relative to one merge parent.
5392 relative to one merge parent.
5390
5393
5391 If one revision is given, it is used as the base revision.
5394 If one revision is given, it is used as the base revision.
5392 If two revisions are given, the differences between them are
5395 If two revisions are given, the differences between them are
5393 shown. The --change option can also be used as a shortcut to list
5396 shown. The --change option can also be used as a shortcut to list
5394 the changed files of a revision from its first parent.
5397 the changed files of a revision from its first parent.
5395
5398
5396 The codes used to show the status of files are::
5399 The codes used to show the status of files are::
5397
5400
5398 M = modified
5401 M = modified
5399 A = added
5402 A = added
5400 R = removed
5403 R = removed
5401 C = clean
5404 C = clean
5402 ! = missing (deleted by non-hg command, but still tracked)
5405 ! = missing (deleted by non-hg command, but still tracked)
5403 ? = not tracked
5406 ? = not tracked
5404 I = ignored
5407 I = ignored
5405 = origin of the previous file (with --copies)
5408 = origin of the previous file (with --copies)
5406
5409
5407 .. container:: verbose
5410 .. container:: verbose
5408
5411
5409 Examples:
5412 Examples:
5410
5413
5411 - show changes in the working directory relative to a
5414 - show changes in the working directory relative to a
5412 changeset::
5415 changeset::
5413
5416
5414 hg status --rev 9353
5417 hg status --rev 9353
5415
5418
5416 - show all changes including copies in an existing changeset::
5419 - show all changes including copies in an existing changeset::
5417
5420
5418 hg status --copies --change 9353
5421 hg status --copies --change 9353
5419
5422
5420 - get a NUL separated list of added files, suitable for xargs::
5423 - get a NUL separated list of added files, suitable for xargs::
5421
5424
5422 hg status -an0
5425 hg status -an0
5423
5426
5424 Returns 0 on success.
5427 Returns 0 on success.
5425 """
5428 """
5426
5429
5427 revs = opts.get('rev')
5430 revs = opts.get('rev')
5428 change = opts.get('change')
5431 change = opts.get('change')
5429
5432
5430 if revs and change:
5433 if revs and change:
5431 msg = _('cannot specify --rev and --change at the same time')
5434 msg = _('cannot specify --rev and --change at the same time')
5432 raise util.Abort(msg)
5435 raise util.Abort(msg)
5433 elif change:
5436 elif change:
5434 node2 = scmutil.revsingle(repo, change, None).node()
5437 node2 = scmutil.revsingle(repo, change, None).node()
5435 node1 = repo[node2].p1().node()
5438 node1 = repo[node2].p1().node()
5436 else:
5439 else:
5437 node1, node2 = scmutil.revpair(repo, revs)
5440 node1, node2 = scmutil.revpair(repo, revs)
5438
5441
5439 cwd = (pats and repo.getcwd()) or ''
5442 cwd = (pats and repo.getcwd()) or ''
5440 end = opts.get('print0') and '\0' or '\n'
5443 end = opts.get('print0') and '\0' or '\n'
5441 copy = {}
5444 copy = {}
5442 states = 'modified added removed deleted unknown ignored clean'.split()
5445 states = 'modified added removed deleted unknown ignored clean'.split()
5443 show = [k for k in states if opts.get(k)]
5446 show = [k for k in states if opts.get(k)]
5444 if opts.get('all'):
5447 if opts.get('all'):
5445 show += ui.quiet and (states[:4] + ['clean']) or states
5448 show += ui.quiet and (states[:4] + ['clean']) or states
5446 if not show:
5449 if not show:
5447 show = ui.quiet and states[:4] or states[:5]
5450 show = ui.quiet and states[:4] or states[:5]
5448
5451
5449 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5452 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5450 'ignored' in show, 'clean' in show, 'unknown' in show,
5453 'ignored' in show, 'clean' in show, 'unknown' in show,
5451 opts.get('subrepos'))
5454 opts.get('subrepos'))
5452 changestates = zip(states, 'MAR!?IC', stat)
5455 changestates = zip(states, 'MAR!?IC', stat)
5453
5456
5454 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5457 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5455 copy = copies.pathcopies(repo[node1], repo[node2])
5458 copy = copies.pathcopies(repo[node1], repo[node2])
5456
5459
5457 fm = ui.formatter('status', opts)
5460 fm = ui.formatter('status', opts)
5458 fmt = '%s' + end
5461 fmt = '%s' + end
5459 showchar = not opts.get('no_status')
5462 showchar = not opts.get('no_status')
5460
5463
5461 for state, char, files in changestates:
5464 for state, char, files in changestates:
5462 if state in show:
5465 if state in show:
5463 label = 'status.' + state
5466 label = 'status.' + state
5464 for f in files:
5467 for f in files:
5465 fm.startitem()
5468 fm.startitem()
5466 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5469 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5467 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5470 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5468 if f in copy:
5471 if f in copy:
5469 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5472 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5470 label='status.copied')
5473 label='status.copied')
5471 fm.end()
5474 fm.end()
5472
5475
5473 @command('^summary|sum',
5476 @command('^summary|sum',
5474 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5477 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5475 def summary(ui, repo, **opts):
5478 def summary(ui, repo, **opts):
5476 """summarize working directory state
5479 """summarize working directory state
5477
5480
5478 This generates a brief summary of the working directory state,
5481 This generates a brief summary of the working directory state,
5479 including parents, branch, commit status, and available updates.
5482 including parents, branch, commit status, and available updates.
5480
5483
5481 With the --remote option, this will check the default paths for
5484 With the --remote option, this will check the default paths for
5482 incoming and outgoing changes. This can be time-consuming.
5485 incoming and outgoing changes. This can be time-consuming.
5483
5486
5484 Returns 0 on success.
5487 Returns 0 on success.
5485 """
5488 """
5486
5489
5487 ctx = repo[None]
5490 ctx = repo[None]
5488 parents = ctx.parents()
5491 parents = ctx.parents()
5489 pnode = parents[0].node()
5492 pnode = parents[0].node()
5490 marks = []
5493 marks = []
5491
5494
5492 for p in parents:
5495 for p in parents:
5493 # label with log.changeset (instead of log.parent) since this
5496 # label with log.changeset (instead of log.parent) since this
5494 # shows a working directory parent *changeset*:
5497 # shows a working directory parent *changeset*:
5495 # i18n: column positioning for "hg summary"
5498 # i18n: column positioning for "hg summary"
5496 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5499 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5497 label='log.changeset changeset.%s' % p.phasestr())
5500 label='log.changeset changeset.%s' % p.phasestr())
5498 ui.write(' '.join(p.tags()), label='log.tag')
5501 ui.write(' '.join(p.tags()), label='log.tag')
5499 if p.bookmarks():
5502 if p.bookmarks():
5500 marks.extend(p.bookmarks())
5503 marks.extend(p.bookmarks())
5501 if p.rev() == -1:
5504 if p.rev() == -1:
5502 if not len(repo):
5505 if not len(repo):
5503 ui.write(_(' (empty repository)'))
5506 ui.write(_(' (empty repository)'))
5504 else:
5507 else:
5505 ui.write(_(' (no revision checked out)'))
5508 ui.write(_(' (no revision checked out)'))
5506 ui.write('\n')
5509 ui.write('\n')
5507 if p.description():
5510 if p.description():
5508 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5511 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5509 label='log.summary')
5512 label='log.summary')
5510
5513
5511 branch = ctx.branch()
5514 branch = ctx.branch()
5512 bheads = repo.branchheads(branch)
5515 bheads = repo.branchheads(branch)
5513 # i18n: column positioning for "hg summary"
5516 # i18n: column positioning for "hg summary"
5514 m = _('branch: %s\n') % branch
5517 m = _('branch: %s\n') % branch
5515 if branch != 'default':
5518 if branch != 'default':
5516 ui.write(m, label='log.branch')
5519 ui.write(m, label='log.branch')
5517 else:
5520 else:
5518 ui.status(m, label='log.branch')
5521 ui.status(m, label='log.branch')
5519
5522
5520 if marks:
5523 if marks:
5521 current = repo._bookmarkcurrent
5524 current = repo._bookmarkcurrent
5522 # i18n: column positioning for "hg summary"
5525 # i18n: column positioning for "hg summary"
5523 ui.write(_('bookmarks:'), label='log.bookmark')
5526 ui.write(_('bookmarks:'), label='log.bookmark')
5524 if current is not None:
5527 if current is not None:
5525 if current in marks:
5528 if current in marks:
5526 ui.write(' *' + current, label='bookmarks.current')
5529 ui.write(' *' + current, label='bookmarks.current')
5527 marks.remove(current)
5530 marks.remove(current)
5528 else:
5531 else:
5529 ui.write(' [%s]' % current, label='bookmarks.current')
5532 ui.write(' [%s]' % current, label='bookmarks.current')
5530 for m in marks:
5533 for m in marks:
5531 ui.write(' ' + m, label='log.bookmark')
5534 ui.write(' ' + m, label='log.bookmark')
5532 ui.write('\n', label='log.bookmark')
5535 ui.write('\n', label='log.bookmark')
5533
5536
5534 st = list(repo.status(unknown=True))[:6]
5537 st = list(repo.status(unknown=True))[:6]
5535
5538
5536 c = repo.dirstate.copies()
5539 c = repo.dirstate.copies()
5537 copied, renamed = [], []
5540 copied, renamed = [], []
5538 for d, s in c.iteritems():
5541 for d, s in c.iteritems():
5539 if s in st[2]:
5542 if s in st[2]:
5540 st[2].remove(s)
5543 st[2].remove(s)
5541 renamed.append(d)
5544 renamed.append(d)
5542 else:
5545 else:
5543 copied.append(d)
5546 copied.append(d)
5544 if d in st[1]:
5547 if d in st[1]:
5545 st[1].remove(d)
5548 st[1].remove(d)
5546 st.insert(3, renamed)
5549 st.insert(3, renamed)
5547 st.insert(4, copied)
5550 st.insert(4, copied)
5548
5551
5549 ms = mergemod.mergestate(repo)
5552 ms = mergemod.mergestate(repo)
5550 st.append([f for f in ms if ms[f] == 'u'])
5553 st.append([f for f in ms if ms[f] == 'u'])
5551
5554
5552 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5555 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5553 st.append(subs)
5556 st.append(subs)
5554
5557
5555 labels = [ui.label(_('%d modified'), 'status.modified'),
5558 labels = [ui.label(_('%d modified'), 'status.modified'),
5556 ui.label(_('%d added'), 'status.added'),
5559 ui.label(_('%d added'), 'status.added'),
5557 ui.label(_('%d removed'), 'status.removed'),
5560 ui.label(_('%d removed'), 'status.removed'),
5558 ui.label(_('%d renamed'), 'status.copied'),
5561 ui.label(_('%d renamed'), 'status.copied'),
5559 ui.label(_('%d copied'), 'status.copied'),
5562 ui.label(_('%d copied'), 'status.copied'),
5560 ui.label(_('%d deleted'), 'status.deleted'),
5563 ui.label(_('%d deleted'), 'status.deleted'),
5561 ui.label(_('%d unknown'), 'status.unknown'),
5564 ui.label(_('%d unknown'), 'status.unknown'),
5562 ui.label(_('%d ignored'), 'status.ignored'),
5565 ui.label(_('%d ignored'), 'status.ignored'),
5563 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5566 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5564 ui.label(_('%d subrepos'), 'status.modified')]
5567 ui.label(_('%d subrepos'), 'status.modified')]
5565 t = []
5568 t = []
5566 for s, l in zip(st, labels):
5569 for s, l in zip(st, labels):
5567 if s:
5570 if s:
5568 t.append(l % len(s))
5571 t.append(l % len(s))
5569
5572
5570 t = ', '.join(t)
5573 t = ', '.join(t)
5571 cleanworkdir = False
5574 cleanworkdir = False
5572
5575
5573 if repo.vfs.exists('updatestate'):
5576 if repo.vfs.exists('updatestate'):
5574 t += _(' (interrupted update)')
5577 t += _(' (interrupted update)')
5575 elif len(parents) > 1:
5578 elif len(parents) > 1:
5576 t += _(' (merge)')
5579 t += _(' (merge)')
5577 elif branch != parents[0].branch():
5580 elif branch != parents[0].branch():
5578 t += _(' (new branch)')
5581 t += _(' (new branch)')
5579 elif (parents[0].closesbranch() and
5582 elif (parents[0].closesbranch() and
5580 pnode in repo.branchheads(branch, closed=True)):
5583 pnode in repo.branchheads(branch, closed=True)):
5581 t += _(' (head closed)')
5584 t += _(' (head closed)')
5582 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5585 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5583 t += _(' (clean)')
5586 t += _(' (clean)')
5584 cleanworkdir = True
5587 cleanworkdir = True
5585 elif pnode not in bheads:
5588 elif pnode not in bheads:
5586 t += _(' (new branch head)')
5589 t += _(' (new branch head)')
5587
5590
5588 if cleanworkdir:
5591 if cleanworkdir:
5589 # i18n: column positioning for "hg summary"
5592 # i18n: column positioning for "hg summary"
5590 ui.status(_('commit: %s\n') % t.strip())
5593 ui.status(_('commit: %s\n') % t.strip())
5591 else:
5594 else:
5592 # i18n: column positioning for "hg summary"
5595 # i18n: column positioning for "hg summary"
5593 ui.write(_('commit: %s\n') % t.strip())
5596 ui.write(_('commit: %s\n') % t.strip())
5594
5597
5595 # all ancestors of branch heads - all ancestors of parent = new csets
5598 # all ancestors of branch heads - all ancestors of parent = new csets
5596 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
5599 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
5597 bheads))
5600 bheads))
5598
5601
5599 if new == 0:
5602 if new == 0:
5600 # i18n: column positioning for "hg summary"
5603 # i18n: column positioning for "hg summary"
5601 ui.status(_('update: (current)\n'))
5604 ui.status(_('update: (current)\n'))
5602 elif pnode not in bheads:
5605 elif pnode not in bheads:
5603 # i18n: column positioning for "hg summary"
5606 # i18n: column positioning for "hg summary"
5604 ui.write(_('update: %d new changesets (update)\n') % new)
5607 ui.write(_('update: %d new changesets (update)\n') % new)
5605 else:
5608 else:
5606 # i18n: column positioning for "hg summary"
5609 # i18n: column positioning for "hg summary"
5607 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5610 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5608 (new, len(bheads)))
5611 (new, len(bheads)))
5609
5612
5610 cmdutil.summaryhooks(ui, repo)
5613 cmdutil.summaryhooks(ui, repo)
5611
5614
5612 if opts.get('remote'):
5615 if opts.get('remote'):
5613 needsincoming, needsoutgoing = True, True
5616 needsincoming, needsoutgoing = True, True
5614 else:
5617 else:
5615 needsincoming, needsoutgoing = False, False
5618 needsincoming, needsoutgoing = False, False
5616 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5619 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5617 if i:
5620 if i:
5618 needsincoming = True
5621 needsincoming = True
5619 if o:
5622 if o:
5620 needsoutgoing = True
5623 needsoutgoing = True
5621 if not needsincoming and not needsoutgoing:
5624 if not needsincoming and not needsoutgoing:
5622 return
5625 return
5623
5626
5624 def getincoming():
5627 def getincoming():
5625 source, branches = hg.parseurl(ui.expandpath('default'))
5628 source, branches = hg.parseurl(ui.expandpath('default'))
5626 sbranch = branches[0]
5629 sbranch = branches[0]
5627 try:
5630 try:
5628 other = hg.peer(repo, {}, source)
5631 other = hg.peer(repo, {}, source)
5629 except error.RepoError:
5632 except error.RepoError:
5630 if opts.get('remote'):
5633 if opts.get('remote'):
5631 raise
5634 raise
5632 return source, sbranch, None, None, None
5635 return source, sbranch, None, None, None
5633 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5636 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5634 if revs:
5637 if revs:
5635 revs = [other.lookup(rev) for rev in revs]
5638 revs = [other.lookup(rev) for rev in revs]
5636 ui.debug('comparing with %s\n' % util.hidepassword(source))
5639 ui.debug('comparing with %s\n' % util.hidepassword(source))
5637 repo.ui.pushbuffer()
5640 repo.ui.pushbuffer()
5638 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5641 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5639 repo.ui.popbuffer()
5642 repo.ui.popbuffer()
5640 return source, sbranch, other, commoninc, commoninc[1]
5643 return source, sbranch, other, commoninc, commoninc[1]
5641
5644
5642 if needsincoming:
5645 if needsincoming:
5643 source, sbranch, sother, commoninc, incoming = getincoming()
5646 source, sbranch, sother, commoninc, incoming = getincoming()
5644 else:
5647 else:
5645 source = sbranch = sother = commoninc = incoming = None
5648 source = sbranch = sother = commoninc = incoming = None
5646
5649
5647 def getoutgoing():
5650 def getoutgoing():
5648 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5651 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5649 dbranch = branches[0]
5652 dbranch = branches[0]
5650 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5653 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5651 if source != dest:
5654 if source != dest:
5652 try:
5655 try:
5653 dother = hg.peer(repo, {}, dest)
5656 dother = hg.peer(repo, {}, dest)
5654 except error.RepoError:
5657 except error.RepoError:
5655 if opts.get('remote'):
5658 if opts.get('remote'):
5656 raise
5659 raise
5657 return dest, dbranch, None, None
5660 return dest, dbranch, None, None
5658 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5661 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5659 elif sother is None:
5662 elif sother is None:
5660 # there is no explicit destination peer, but source one is invalid
5663 # there is no explicit destination peer, but source one is invalid
5661 return dest, dbranch, None, None
5664 return dest, dbranch, None, None
5662 else:
5665 else:
5663 dother = sother
5666 dother = sother
5664 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5667 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5665 common = None
5668 common = None
5666 else:
5669 else:
5667 common = commoninc
5670 common = commoninc
5668 if revs:
5671 if revs:
5669 revs = [repo.lookup(rev) for rev in revs]
5672 revs = [repo.lookup(rev) for rev in revs]
5670 repo.ui.pushbuffer()
5673 repo.ui.pushbuffer()
5671 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5674 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5672 commoninc=common)
5675 commoninc=common)
5673 repo.ui.popbuffer()
5676 repo.ui.popbuffer()
5674 return dest, dbranch, dother, outgoing
5677 return dest, dbranch, dother, outgoing
5675
5678
5676 if needsoutgoing:
5679 if needsoutgoing:
5677 dest, dbranch, dother, outgoing = getoutgoing()
5680 dest, dbranch, dother, outgoing = getoutgoing()
5678 else:
5681 else:
5679 dest = dbranch = dother = outgoing = None
5682 dest = dbranch = dother = outgoing = None
5680
5683
5681 if opts.get('remote'):
5684 if opts.get('remote'):
5682 t = []
5685 t = []
5683 if incoming:
5686 if incoming:
5684 t.append(_('1 or more incoming'))
5687 t.append(_('1 or more incoming'))
5685 o = outgoing.missing
5688 o = outgoing.missing
5686 if o:
5689 if o:
5687 t.append(_('%d outgoing') % len(o))
5690 t.append(_('%d outgoing') % len(o))
5688 other = dother or sother
5691 other = dother or sother
5689 if 'bookmarks' in other.listkeys('namespaces'):
5692 if 'bookmarks' in other.listkeys('namespaces'):
5690 lmarks = repo.listkeys('bookmarks')
5693 lmarks = repo.listkeys('bookmarks')
5691 rmarks = other.listkeys('bookmarks')
5694 rmarks = other.listkeys('bookmarks')
5692 diff = set(rmarks) - set(lmarks)
5695 diff = set(rmarks) - set(lmarks)
5693 if len(diff) > 0:
5696 if len(diff) > 0:
5694 t.append(_('%d incoming bookmarks') % len(diff))
5697 t.append(_('%d incoming bookmarks') % len(diff))
5695 diff = set(lmarks) - set(rmarks)
5698 diff = set(lmarks) - set(rmarks)
5696 if len(diff) > 0:
5699 if len(diff) > 0:
5697 t.append(_('%d outgoing bookmarks') % len(diff))
5700 t.append(_('%d outgoing bookmarks') % len(diff))
5698
5701
5699 if t:
5702 if t:
5700 # i18n: column positioning for "hg summary"
5703 # i18n: column positioning for "hg summary"
5701 ui.write(_('remote: %s\n') % (', '.join(t)))
5704 ui.write(_('remote: %s\n') % (', '.join(t)))
5702 else:
5705 else:
5703 # i18n: column positioning for "hg summary"
5706 # i18n: column positioning for "hg summary"
5704 ui.status(_('remote: (synced)\n'))
5707 ui.status(_('remote: (synced)\n'))
5705
5708
5706 cmdutil.summaryremotehooks(ui, repo, opts,
5709 cmdutil.summaryremotehooks(ui, repo, opts,
5707 ((source, sbranch, sother, commoninc),
5710 ((source, sbranch, sother, commoninc),
5708 (dest, dbranch, dother, outgoing)))
5711 (dest, dbranch, dother, outgoing)))
5709
5712
5710 @command('tag',
5713 @command('tag',
5711 [('f', 'force', None, _('force tag')),
5714 [('f', 'force', None, _('force tag')),
5712 ('l', 'local', None, _('make the tag local')),
5715 ('l', 'local', None, _('make the tag local')),
5713 ('r', 'rev', '', _('revision to tag'), _('REV')),
5716 ('r', 'rev', '', _('revision to tag'), _('REV')),
5714 ('', 'remove', None, _('remove a tag')),
5717 ('', 'remove', None, _('remove a tag')),
5715 # -l/--local is already there, commitopts cannot be used
5718 # -l/--local is already there, commitopts cannot be used
5716 ('e', 'edit', None, _('invoke editor on commit messages')),
5719 ('e', 'edit', None, _('invoke editor on commit messages')),
5717 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5720 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5718 ] + commitopts2,
5721 ] + commitopts2,
5719 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5722 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5720 def tag(ui, repo, name1, *names, **opts):
5723 def tag(ui, repo, name1, *names, **opts):
5721 """add one or more tags for the current or given revision
5724 """add one or more tags for the current or given revision
5722
5725
5723 Name a particular revision using <name>.
5726 Name a particular revision using <name>.
5724
5727
5725 Tags are used to name particular revisions of the repository and are
5728 Tags are used to name particular revisions of the repository and are
5726 very useful to compare different revisions, to go back to significant
5729 very useful to compare different revisions, to go back to significant
5727 earlier versions or to mark branch points as releases, etc. Changing
5730 earlier versions or to mark branch points as releases, etc. Changing
5728 an existing tag is normally disallowed; use -f/--force to override.
5731 an existing tag is normally disallowed; use -f/--force to override.
5729
5732
5730 If no revision is given, the parent of the working directory is
5733 If no revision is given, the parent of the working directory is
5731 used.
5734 used.
5732
5735
5733 To facilitate version control, distribution, and merging of tags,
5736 To facilitate version control, distribution, and merging of tags,
5734 they are stored as a file named ".hgtags" which is managed similarly
5737 they are stored as a file named ".hgtags" which is managed similarly
5735 to other project files and can be hand-edited if necessary. This
5738 to other project files and can be hand-edited if necessary. This
5736 also means that tagging creates a new commit. The file
5739 also means that tagging creates a new commit. The file
5737 ".hg/localtags" is used for local tags (not shared among
5740 ".hg/localtags" is used for local tags (not shared among
5738 repositories).
5741 repositories).
5739
5742
5740 Tag commits are usually made at the head of a branch. If the parent
5743 Tag commits are usually made at the head of a branch. If the parent
5741 of the working directory is not a branch head, :hg:`tag` aborts; use
5744 of the working directory is not a branch head, :hg:`tag` aborts; use
5742 -f/--force to force the tag commit to be based on a non-head
5745 -f/--force to force the tag commit to be based on a non-head
5743 changeset.
5746 changeset.
5744
5747
5745 See :hg:`help dates` for a list of formats valid for -d/--date.
5748 See :hg:`help dates` for a list of formats valid for -d/--date.
5746
5749
5747 Since tag names have priority over branch names during revision
5750 Since tag names have priority over branch names during revision
5748 lookup, using an existing branch name as a tag name is discouraged.
5751 lookup, using an existing branch name as a tag name is discouraged.
5749
5752
5750 Returns 0 on success.
5753 Returns 0 on success.
5751 """
5754 """
5752 wlock = lock = None
5755 wlock = lock = None
5753 try:
5756 try:
5754 wlock = repo.wlock()
5757 wlock = repo.wlock()
5755 lock = repo.lock()
5758 lock = repo.lock()
5756 rev_ = "."
5759 rev_ = "."
5757 names = [t.strip() for t in (name1,) + names]
5760 names = [t.strip() for t in (name1,) + names]
5758 if len(names) != len(set(names)):
5761 if len(names) != len(set(names)):
5759 raise util.Abort(_('tag names must be unique'))
5762 raise util.Abort(_('tag names must be unique'))
5760 for n in names:
5763 for n in names:
5761 scmutil.checknewlabel(repo, n, 'tag')
5764 scmutil.checknewlabel(repo, n, 'tag')
5762 if not n:
5765 if not n:
5763 raise util.Abort(_('tag names cannot consist entirely of '
5766 raise util.Abort(_('tag names cannot consist entirely of '
5764 'whitespace'))
5767 'whitespace'))
5765 if opts.get('rev') and opts.get('remove'):
5768 if opts.get('rev') and opts.get('remove'):
5766 raise util.Abort(_("--rev and --remove are incompatible"))
5769 raise util.Abort(_("--rev and --remove are incompatible"))
5767 if opts.get('rev'):
5770 if opts.get('rev'):
5768 rev_ = opts['rev']
5771 rev_ = opts['rev']
5769 message = opts.get('message')
5772 message = opts.get('message')
5770 if opts.get('remove'):
5773 if opts.get('remove'):
5771 expectedtype = opts.get('local') and 'local' or 'global'
5774 expectedtype = opts.get('local') and 'local' or 'global'
5772 for n in names:
5775 for n in names:
5773 if not repo.tagtype(n):
5776 if not repo.tagtype(n):
5774 raise util.Abort(_("tag '%s' does not exist") % n)
5777 raise util.Abort(_("tag '%s' does not exist") % n)
5775 if repo.tagtype(n) != expectedtype:
5778 if repo.tagtype(n) != expectedtype:
5776 if expectedtype == 'global':
5779 if expectedtype == 'global':
5777 raise util.Abort(_("tag '%s' is not a global tag") % n)
5780 raise util.Abort(_("tag '%s' is not a global tag") % n)
5778 else:
5781 else:
5779 raise util.Abort(_("tag '%s' is not a local tag") % n)
5782 raise util.Abort(_("tag '%s' is not a local tag") % n)
5780 rev_ = nullid
5783 rev_ = nullid
5781 if not message:
5784 if not message:
5782 # we don't translate commit messages
5785 # we don't translate commit messages
5783 message = 'Removed tag %s' % ', '.join(names)
5786 message = 'Removed tag %s' % ', '.join(names)
5784 elif not opts.get('force'):
5787 elif not opts.get('force'):
5785 for n in names:
5788 for n in names:
5786 if n in repo.tags():
5789 if n in repo.tags():
5787 raise util.Abort(_("tag '%s' already exists "
5790 raise util.Abort(_("tag '%s' already exists "
5788 "(use -f to force)") % n)
5791 "(use -f to force)") % n)
5789 if not opts.get('local'):
5792 if not opts.get('local'):
5790 p1, p2 = repo.dirstate.parents()
5793 p1, p2 = repo.dirstate.parents()
5791 if p2 != nullid:
5794 if p2 != nullid:
5792 raise util.Abort(_('uncommitted merge'))
5795 raise util.Abort(_('uncommitted merge'))
5793 bheads = repo.branchheads()
5796 bheads = repo.branchheads()
5794 if not opts.get('force') and bheads and p1 not in bheads:
5797 if not opts.get('force') and bheads and p1 not in bheads:
5795 raise util.Abort(_('not at a branch head (use -f to force)'))
5798 raise util.Abort(_('not at a branch head (use -f to force)'))
5796 r = scmutil.revsingle(repo, rev_).node()
5799 r = scmutil.revsingle(repo, rev_).node()
5797
5800
5798 if not message:
5801 if not message:
5799 # we don't translate commit messages
5802 # we don't translate commit messages
5800 message = ('Added tag %s for changeset %s' %
5803 message = ('Added tag %s for changeset %s' %
5801 (', '.join(names), short(r)))
5804 (', '.join(names), short(r)))
5802
5805
5803 date = opts.get('date')
5806 date = opts.get('date')
5804 if date:
5807 if date:
5805 date = util.parsedate(date)
5808 date = util.parsedate(date)
5806
5809
5807 if opts.get('remove'):
5810 if opts.get('remove'):
5808 editform = 'tag.remove'
5811 editform = 'tag.remove'
5809 else:
5812 else:
5810 editform = 'tag.add'
5813 editform = 'tag.add'
5811 editor = cmdutil.getcommiteditor(editform=editform, **opts)
5814 editor = cmdutil.getcommiteditor(editform=editform, **opts)
5812
5815
5813 # don't allow tagging the null rev
5816 # don't allow tagging the null rev
5814 if (not opts.get('remove') and
5817 if (not opts.get('remove') and
5815 scmutil.revsingle(repo, rev_).rev() == nullrev):
5818 scmutil.revsingle(repo, rev_).rev() == nullrev):
5816 raise util.Abort(_("cannot tag null revision"))
5819 raise util.Abort(_("cannot tag null revision"))
5817
5820
5818 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
5821 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
5819 editor=editor)
5822 editor=editor)
5820 finally:
5823 finally:
5821 release(lock, wlock)
5824 release(lock, wlock)
5822
5825
5823 @command('tags', [], '')
5826 @command('tags', [], '')
5824 def tags(ui, repo, **opts):
5827 def tags(ui, repo, **opts):
5825 """list repository tags
5828 """list repository tags
5826
5829
5827 This lists both regular and local tags. When the -v/--verbose
5830 This lists both regular and local tags. When the -v/--verbose
5828 switch is used, a third column "local" is printed for local tags.
5831 switch is used, a third column "local" is printed for local tags.
5829
5832
5830 Returns 0 on success.
5833 Returns 0 on success.
5831 """
5834 """
5832
5835
5833 fm = ui.formatter('tags', opts)
5836 fm = ui.formatter('tags', opts)
5834 hexfunc = ui.debugflag and hex or short
5837 hexfunc = ui.debugflag and hex or short
5835 tagtype = ""
5838 tagtype = ""
5836
5839
5837 for t, n in reversed(repo.tagslist()):
5840 for t, n in reversed(repo.tagslist()):
5838 hn = hexfunc(n)
5841 hn = hexfunc(n)
5839 label = 'tags.normal'
5842 label = 'tags.normal'
5840 tagtype = ''
5843 tagtype = ''
5841 if repo.tagtype(t) == 'local':
5844 if repo.tagtype(t) == 'local':
5842 label = 'tags.local'
5845 label = 'tags.local'
5843 tagtype = 'local'
5846 tagtype = 'local'
5844
5847
5845 fm.startitem()
5848 fm.startitem()
5846 fm.write('tag', '%s', t, label=label)
5849 fm.write('tag', '%s', t, label=label)
5847 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5850 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5848 fm.condwrite(not ui.quiet, 'rev id', fmt,
5851 fm.condwrite(not ui.quiet, 'rev id', fmt,
5849 repo.changelog.rev(n), hn, label=label)
5852 repo.changelog.rev(n), hn, label=label)
5850 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5853 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5851 tagtype, label=label)
5854 tagtype, label=label)
5852 fm.plain('\n')
5855 fm.plain('\n')
5853 fm.end()
5856 fm.end()
5854
5857
5855 @command('tip',
5858 @command('tip',
5856 [('p', 'patch', None, _('show patch')),
5859 [('p', 'patch', None, _('show patch')),
5857 ('g', 'git', None, _('use git extended diff format')),
5860 ('g', 'git', None, _('use git extended diff format')),
5858 ] + templateopts,
5861 ] + templateopts,
5859 _('[-p] [-g]'))
5862 _('[-p] [-g]'))
5860 def tip(ui, repo, **opts):
5863 def tip(ui, repo, **opts):
5861 """show the tip revision (DEPRECATED)
5864 """show the tip revision (DEPRECATED)
5862
5865
5863 The tip revision (usually just called the tip) is the changeset
5866 The tip revision (usually just called the tip) is the changeset
5864 most recently added to the repository (and therefore the most
5867 most recently added to the repository (and therefore the most
5865 recently changed head).
5868 recently changed head).
5866
5869
5867 If you have just made a commit, that commit will be the tip. If
5870 If you have just made a commit, that commit will be the tip. If
5868 you have just pulled changes from another repository, the tip of
5871 you have just pulled changes from another repository, the tip of
5869 that repository becomes the current tip. The "tip" tag is special
5872 that repository becomes the current tip. The "tip" tag is special
5870 and cannot be renamed or assigned to a different changeset.
5873 and cannot be renamed or assigned to a different changeset.
5871
5874
5872 This command is deprecated, please use :hg:`heads` instead.
5875 This command is deprecated, please use :hg:`heads` instead.
5873
5876
5874 Returns 0 on success.
5877 Returns 0 on success.
5875 """
5878 """
5876 displayer = cmdutil.show_changeset(ui, repo, opts)
5879 displayer = cmdutil.show_changeset(ui, repo, opts)
5877 displayer.show(repo['tip'])
5880 displayer.show(repo['tip'])
5878 displayer.close()
5881 displayer.close()
5879
5882
5880 @command('unbundle',
5883 @command('unbundle',
5881 [('u', 'update', None,
5884 [('u', 'update', None,
5882 _('update to new branch head if changesets were unbundled'))],
5885 _('update to new branch head if changesets were unbundled'))],
5883 _('[-u] FILE...'))
5886 _('[-u] FILE...'))
5884 def unbundle(ui, repo, fname1, *fnames, **opts):
5887 def unbundle(ui, repo, fname1, *fnames, **opts):
5885 """apply one or more changegroup files
5888 """apply one or more changegroup files
5886
5889
5887 Apply one or more compressed changegroup files generated by the
5890 Apply one or more compressed changegroup files generated by the
5888 bundle command.
5891 bundle command.
5889
5892
5890 Returns 0 on success, 1 if an update has unresolved files.
5893 Returns 0 on success, 1 if an update has unresolved files.
5891 """
5894 """
5892 fnames = (fname1,) + fnames
5895 fnames = (fname1,) + fnames
5893
5896
5894 lock = repo.lock()
5897 lock = repo.lock()
5895 try:
5898 try:
5896 for fname in fnames:
5899 for fname in fnames:
5897 f = hg.openpath(ui, fname)
5900 f = hg.openpath(ui, fname)
5898 gen = exchange.readbundle(ui, f, fname)
5901 gen = exchange.readbundle(ui, f, fname)
5899 modheads = changegroup.addchangegroup(repo, gen, 'unbundle',
5902 modheads = changegroup.addchangegroup(repo, gen, 'unbundle',
5900 'bundle:' + fname)
5903 'bundle:' + fname)
5901 finally:
5904 finally:
5902 lock.release()
5905 lock.release()
5903
5906
5904 return postincoming(ui, repo, modheads, opts.get('update'), None)
5907 return postincoming(ui, repo, modheads, opts.get('update'), None)
5905
5908
5906 @command('^update|up|checkout|co',
5909 @command('^update|up|checkout|co',
5907 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5910 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5908 ('c', 'check', None,
5911 ('c', 'check', None,
5909 _('update across branches if no uncommitted changes')),
5912 _('update across branches if no uncommitted changes')),
5910 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5913 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5911 ('r', 'rev', '', _('revision'), _('REV'))
5914 ('r', 'rev', '', _('revision'), _('REV'))
5912 ] + mergetoolopts,
5915 ] + mergetoolopts,
5913 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5916 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5914 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5917 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5915 tool=None):
5918 tool=None):
5916 """update working directory (or switch revisions)
5919 """update working directory (or switch revisions)
5917
5920
5918 Update the repository's working directory to the specified
5921 Update the repository's working directory to the specified
5919 changeset. If no changeset is specified, update to the tip of the
5922 changeset. If no changeset is specified, update to the tip of the
5920 current named branch and move the current bookmark (see :hg:`help
5923 current named branch and move the current bookmark (see :hg:`help
5921 bookmarks`).
5924 bookmarks`).
5922
5925
5923 Update sets the working directory's parent revision to the specified
5926 Update sets the working directory's parent revision to the specified
5924 changeset (see :hg:`help parents`).
5927 changeset (see :hg:`help parents`).
5925
5928
5926 If the changeset is not a descendant or ancestor of the working
5929 If the changeset is not a descendant or ancestor of the working
5927 directory's parent, the update is aborted. With the -c/--check
5930 directory's parent, the update is aborted. With the -c/--check
5928 option, the working directory is checked for uncommitted changes; if
5931 option, the working directory is checked for uncommitted changes; if
5929 none are found, the working directory is updated to the specified
5932 none are found, the working directory is updated to the specified
5930 changeset.
5933 changeset.
5931
5934
5932 .. container:: verbose
5935 .. container:: verbose
5933
5936
5934 The following rules apply when the working directory contains
5937 The following rules apply when the working directory contains
5935 uncommitted changes:
5938 uncommitted changes:
5936
5939
5937 1. If neither -c/--check nor -C/--clean is specified, and if
5940 1. If neither -c/--check nor -C/--clean is specified, and if
5938 the requested changeset is an ancestor or descendant of
5941 the requested changeset is an ancestor or descendant of
5939 the working directory's parent, the uncommitted changes
5942 the working directory's parent, the uncommitted changes
5940 are merged into the requested changeset and the merged
5943 are merged into the requested changeset and the merged
5941 result is left uncommitted. If the requested changeset is
5944 result is left uncommitted. If the requested changeset is
5942 not an ancestor or descendant (that is, it is on another
5945 not an ancestor or descendant (that is, it is on another
5943 branch), the update is aborted and the uncommitted changes
5946 branch), the update is aborted and the uncommitted changes
5944 are preserved.
5947 are preserved.
5945
5948
5946 2. With the -c/--check option, the update is aborted and the
5949 2. With the -c/--check option, the update is aborted and the
5947 uncommitted changes are preserved.
5950 uncommitted changes are preserved.
5948
5951
5949 3. With the -C/--clean option, uncommitted changes are discarded and
5952 3. With the -C/--clean option, uncommitted changes are discarded and
5950 the working directory is updated to the requested changeset.
5953 the working directory is updated to the requested changeset.
5951
5954
5952 To cancel an uncommitted merge (and lose your changes), use
5955 To cancel an uncommitted merge (and lose your changes), use
5953 :hg:`update --clean .`.
5956 :hg:`update --clean .`.
5954
5957
5955 Use null as the changeset to remove the working directory (like
5958 Use null as the changeset to remove the working directory (like
5956 :hg:`clone -U`).
5959 :hg:`clone -U`).
5957
5960
5958 If you want to revert just one file to an older revision, use
5961 If you want to revert just one file to an older revision, use
5959 :hg:`revert [-r REV] NAME`.
5962 :hg:`revert [-r REV] NAME`.
5960
5963
5961 See :hg:`help dates` for a list of formats valid for -d/--date.
5964 See :hg:`help dates` for a list of formats valid for -d/--date.
5962
5965
5963 Returns 0 on success, 1 if there are unresolved files.
5966 Returns 0 on success, 1 if there are unresolved files.
5964 """
5967 """
5965 if rev and node:
5968 if rev and node:
5966 raise util.Abort(_("please specify just one revision"))
5969 raise util.Abort(_("please specify just one revision"))
5967
5970
5968 if rev is None or rev == '':
5971 if rev is None or rev == '':
5969 rev = node
5972 rev = node
5970
5973
5971 cmdutil.clearunfinished(repo)
5974 cmdutil.clearunfinished(repo)
5972
5975
5973 # with no argument, we also move the current bookmark, if any
5976 # with no argument, we also move the current bookmark, if any
5974 rev, movemarkfrom = bookmarks.calculateupdate(ui, repo, rev)
5977 rev, movemarkfrom = bookmarks.calculateupdate(ui, repo, rev)
5975
5978
5976 # if we defined a bookmark, we have to remember the original bookmark name
5979 # if we defined a bookmark, we have to remember the original bookmark name
5977 brev = rev
5980 brev = rev
5978 rev = scmutil.revsingle(repo, rev, rev).rev()
5981 rev = scmutil.revsingle(repo, rev, rev).rev()
5979
5982
5980 if check and clean:
5983 if check and clean:
5981 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5984 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5982
5985
5983 if date:
5986 if date:
5984 if rev is not None:
5987 if rev is not None:
5985 raise util.Abort(_("you can't specify a revision and a date"))
5988 raise util.Abort(_("you can't specify a revision and a date"))
5986 rev = cmdutil.finddate(ui, repo, date)
5989 rev = cmdutil.finddate(ui, repo, date)
5987
5990
5988 if check:
5991 if check:
5989 c = repo[None]
5992 c = repo[None]
5990 if c.dirty(merge=False, branch=False, missing=True):
5993 if c.dirty(merge=False, branch=False, missing=True):
5991 raise util.Abort(_("uncommitted changes"))
5994 raise util.Abort(_("uncommitted changes"))
5992 if rev is None:
5995 if rev is None:
5993 rev = repo[repo[None].branch()].rev()
5996 rev = repo[repo[None].branch()].rev()
5994 mergemod._checkunknown(repo, repo[None], repo[rev])
5997 mergemod._checkunknown(repo, repo[None], repo[rev])
5995
5998
5996 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5999 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5997
6000
5998 if clean:
6001 if clean:
5999 ret = hg.clean(repo, rev)
6002 ret = hg.clean(repo, rev)
6000 else:
6003 else:
6001 ret = hg.update(repo, rev)
6004 ret = hg.update(repo, rev)
6002
6005
6003 if not ret and movemarkfrom:
6006 if not ret and movemarkfrom:
6004 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
6007 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
6005 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
6008 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
6006 elif brev in repo._bookmarks:
6009 elif brev in repo._bookmarks:
6007 bookmarks.setcurrent(repo, brev)
6010 bookmarks.setcurrent(repo, brev)
6008 ui.status(_("(activating bookmark %s)\n") % brev)
6011 ui.status(_("(activating bookmark %s)\n") % brev)
6009 elif brev:
6012 elif brev:
6010 if repo._bookmarkcurrent:
6013 if repo._bookmarkcurrent:
6011 ui.status(_("(leaving bookmark %s)\n") %
6014 ui.status(_("(leaving bookmark %s)\n") %
6012 repo._bookmarkcurrent)
6015 repo._bookmarkcurrent)
6013 bookmarks.unsetcurrent(repo)
6016 bookmarks.unsetcurrent(repo)
6014
6017
6015 return ret
6018 return ret
6016
6019
6017 @command('verify', [])
6020 @command('verify', [])
6018 def verify(ui, repo):
6021 def verify(ui, repo):
6019 """verify the integrity of the repository
6022 """verify the integrity of the repository
6020
6023
6021 Verify the integrity of the current repository.
6024 Verify the integrity of the current repository.
6022
6025
6023 This will perform an extensive check of the repository's
6026 This will perform an extensive check of the repository's
6024 integrity, validating the hashes and checksums of each entry in
6027 integrity, validating the hashes and checksums of each entry in
6025 the changelog, manifest, and tracked files, as well as the
6028 the changelog, manifest, and tracked files, as well as the
6026 integrity of their crosslinks and indices.
6029 integrity of their crosslinks and indices.
6027
6030
6028 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
6031 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
6029 for more information about recovery from corruption of the
6032 for more information about recovery from corruption of the
6030 repository.
6033 repository.
6031
6034
6032 Returns 0 on success, 1 if errors are encountered.
6035 Returns 0 on success, 1 if errors are encountered.
6033 """
6036 """
6034 return hg.verify(repo)
6037 return hg.verify(repo)
6035
6038
6036 @command('version', [], norepo=True)
6039 @command('version', [], norepo=True)
6037 def version_(ui):
6040 def version_(ui):
6038 """output version and copyright information"""
6041 """output version and copyright information"""
6039 ui.write(_("Mercurial Distributed SCM (version %s)\n")
6042 ui.write(_("Mercurial Distributed SCM (version %s)\n")
6040 % util.version())
6043 % util.version())
6041 ui.status(_(
6044 ui.status(_(
6042 "(see http://mercurial.selenic.com for more information)\n"
6045 "(see http://mercurial.selenic.com for more information)\n"
6043 "\nCopyright (C) 2005-2014 Matt Mackall and others\n"
6046 "\nCopyright (C) 2005-2014 Matt Mackall and others\n"
6044 "This is free software; see the source for copying conditions. "
6047 "This is free software; see the source for copying conditions. "
6045 "There is NO\nwarranty; "
6048 "There is NO\nwarranty; "
6046 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
6049 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
6047 ))
6050 ))
6048
6051
6049 ui.note(_("\nEnabled extensions:\n\n"))
6052 ui.note(_("\nEnabled extensions:\n\n"))
6050 if ui.verbose:
6053 if ui.verbose:
6051 # format names and versions into columns
6054 # format names and versions into columns
6052 names = []
6055 names = []
6053 vers = []
6056 vers = []
6054 for name, module in extensions.extensions():
6057 for name, module in extensions.extensions():
6055 names.append(name)
6058 names.append(name)
6056 vers.append(extensions.moduleversion(module))
6059 vers.append(extensions.moduleversion(module))
6057 if names:
6060 if names:
6058 maxnamelen = max(len(n) for n in names)
6061 maxnamelen = max(len(n) for n in names)
6059 for i, name in enumerate(names):
6062 for i, name in enumerate(names):
6060 ui.write(" %-*s %s\n" % (maxnamelen, name, vers[i]))
6063 ui.write(" %-*s %s\n" % (maxnamelen, name, vers[i]))
@@ -1,1692 +1,1694 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, nullrev, short, hex, bin
8 from node import nullid, nullrev, short, hex, bin
9 from i18n import _
9 from i18n import _
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
11 import match as matchmod
11 import match as matchmod
12 import os, errno, stat
12 import os, errno, stat
13 import obsolete as obsmod
13 import obsolete as obsmod
14 import repoview
14 import repoview
15 import fileset
15 import fileset
16 import revlog
16 import revlog
17
17
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19
19
20 class basectx(object):
20 class basectx(object):
21 """A basectx object represents the common logic for its children:
21 """A basectx object represents the common logic for its children:
22 changectx: read-only context that is already present in the repo,
22 changectx: read-only context that is already present in the repo,
23 workingctx: a context that represents the working directory and can
23 workingctx: a context that represents the working directory and can
24 be committed,
24 be committed,
25 memctx: a context that represents changes in-memory and can also
25 memctx: a context that represents changes in-memory and can also
26 be committed."""
26 be committed."""
27 def __new__(cls, repo, changeid='', *args, **kwargs):
27 def __new__(cls, repo, changeid='', *args, **kwargs):
28 if isinstance(changeid, basectx):
28 if isinstance(changeid, basectx):
29 return changeid
29 return changeid
30
30
31 o = super(basectx, cls).__new__(cls)
31 o = super(basectx, cls).__new__(cls)
32
32
33 o._repo = repo
33 o._repo = repo
34 o._rev = nullrev
34 o._rev = nullrev
35 o._node = nullid
35 o._node = nullid
36
36
37 return o
37 return o
38
38
39 def __str__(self):
39 def __str__(self):
40 return short(self.node())
40 return short(self.node())
41
41
42 def __int__(self):
42 def __int__(self):
43 return self.rev()
43 return self.rev()
44
44
45 def __repr__(self):
45 def __repr__(self):
46 return "<%s %s>" % (type(self).__name__, str(self))
46 return "<%s %s>" % (type(self).__name__, str(self))
47
47
48 def __eq__(self, other):
48 def __eq__(self, other):
49 try:
49 try:
50 return type(self) == type(other) and self._rev == other._rev
50 return type(self) == type(other) and self._rev == other._rev
51 except AttributeError:
51 except AttributeError:
52 return False
52 return False
53
53
54 def __ne__(self, other):
54 def __ne__(self, other):
55 return not (self == other)
55 return not (self == other)
56
56
57 def __contains__(self, key):
57 def __contains__(self, key):
58 return key in self._manifest
58 return key in self._manifest
59
59
60 def __getitem__(self, key):
60 def __getitem__(self, key):
61 return self.filectx(key)
61 return self.filectx(key)
62
62
63 def __iter__(self):
63 def __iter__(self):
64 for f in sorted(self._manifest):
64 for f in sorted(self._manifest):
65 yield f
65 yield f
66
66
67 def _manifestmatches(self, match, s):
67 def _manifestmatches(self, match, s):
68 """generate a new manifest filtered by the match argument
68 """generate a new manifest filtered by the match argument
69
69
70 This method is for internal use only and mainly exists to provide an
70 This method is for internal use only and mainly exists to provide an
71 object oriented way for other contexts to customize the manifest
71 object oriented way for other contexts to customize the manifest
72 generation.
72 generation.
73 """
73 """
74 if match.always():
74 if match.always():
75 return self.manifest().copy()
75 return self.manifest().copy()
76
76
77 files = match.files()
77 files = match.files()
78 if (match.matchfn == match.exact or
78 if (match.matchfn == match.exact or
79 (not match.anypats() and util.all(fn in self for fn in files))):
79 (not match.anypats() and util.all(fn in self for fn in files))):
80 return self.manifest().intersectfiles(files)
80 return self.manifest().intersectfiles(files)
81
81
82 mf = self.manifest().copy()
82 mf = self.manifest().copy()
83 for fn in mf.keys():
83 for fn in mf.keys():
84 if not match(fn):
84 if not match(fn):
85 del mf[fn]
85 del mf[fn]
86 return mf
86 return mf
87
87
88 def _matchstatus(self, other, s, match, listignored, listclean,
88 def _matchstatus(self, other, s, match, listignored, listclean,
89 listunknown):
89 listunknown):
90 """return match.always if match is none
90 """return match.always if match is none
91
91
92 This internal method provides a way for child objects to override the
92 This internal method provides a way for child objects to override the
93 match operator.
93 match operator.
94 """
94 """
95 return match or matchmod.always(self._repo.root, self._repo.getcwd())
95 return match or matchmod.always(self._repo.root, self._repo.getcwd())
96
96
97 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
97 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
98 """provide a hook to allow child objects to preprocess status results
98 """provide a hook to allow child objects to preprocess status results
99
99
100 For example, this allows other contexts, such as workingctx, to query
100 For example, this allows other contexts, such as workingctx, to query
101 the dirstate before comparing the manifests.
101 the dirstate before comparing the manifests.
102 """
102 """
103 # load earliest manifest first for caching reasons
103 # load earliest manifest first for caching reasons
104 if self.rev() < other.rev():
104 if self.rev() < other.rev():
105 self.manifest()
105 self.manifest()
106 return s
106 return s
107
107
108 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
108 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
109 """provide a hook to allow child objects to postprocess status results
109 """provide a hook to allow child objects to postprocess status results
110
110
111 For example, this allows other contexts, such as workingctx, to filter
111 For example, this allows other contexts, such as workingctx, to filter
112 suspect symlinks in the case of FAT32 and NTFS filesytems.
112 suspect symlinks in the case of FAT32 and NTFS filesytems.
113 """
113 """
114 return s
114 return s
115
115
116 def _buildstatus(self, other, s, match, listignored, listclean,
116 def _buildstatus(self, other, s, match, listignored, listclean,
117 listunknown):
117 listunknown):
118 """build a status with respect to another context"""
118 """build a status with respect to another context"""
119 mf1 = other._manifestmatches(match, s)
119 mf1 = other._manifestmatches(match, s)
120 mf2 = self._manifestmatches(match, s)
120 mf2 = self._manifestmatches(match, s)
121
121
122 modified, added, clean = [], [], []
122 modified, added, clean = [], [], []
123 deleted, unknown, ignored = s[3], s[4], s[5]
123 deleted, unknown, ignored = s[3], s[4], s[5]
124 withflags = mf1.withflags() | mf2.withflags()
124 withflags = mf1.withflags() | mf2.withflags()
125 for fn, mf2node in mf2.iteritems():
125 for fn, mf2node in mf2.iteritems():
126 if fn in mf1:
126 if fn in mf1:
127 if (fn not in deleted and
127 if (fn not in deleted and
128 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
128 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
129 (mf1[fn] != mf2node and
129 (mf1[fn] != mf2node and
130 (mf2node or self[fn].cmp(other[fn]))))):
130 (mf2node or self[fn].cmp(other[fn]))))):
131 modified.append(fn)
131 modified.append(fn)
132 elif listclean:
132 elif listclean:
133 clean.append(fn)
133 clean.append(fn)
134 del mf1[fn]
134 del mf1[fn]
135 elif fn not in deleted:
135 elif fn not in deleted:
136 added.append(fn)
136 added.append(fn)
137 removed = mf1.keys()
137 removed = mf1.keys()
138 if removed:
138 if removed:
139 # need to filter files if they are already reported as removed
139 # need to filter files if they are already reported as removed
140 unknown = [fn for fn in unknown if fn not in mf1]
140 unknown = [fn for fn in unknown if fn not in mf1]
141 ignored = [fn for fn in ignored if fn not in mf1]
141 ignored = [fn for fn in ignored if fn not in mf1]
142
142
143 return [modified, added, removed, deleted, unknown, ignored, clean]
143 return [modified, added, removed, deleted, unknown, ignored, clean]
144
144
145 @propertycache
145 @propertycache
146 def substate(self):
146 def substate(self):
147 return subrepo.state(self, self._repo.ui)
147 return subrepo.state(self, self._repo.ui)
148
148
149 def subrev(self, subpath):
149 def subrev(self, subpath):
150 return self.substate[subpath][1]
150 return self.substate[subpath][1]
151
151
152 def rev(self):
152 def rev(self):
153 return self._rev
153 return self._rev
154 def node(self):
154 def node(self):
155 return self._node
155 return self._node
156 def hex(self):
156 def hex(self):
157 return hex(self.node())
157 return hex(self.node())
158 def manifest(self):
158 def manifest(self):
159 return self._manifest
159 return self._manifest
160 def phasestr(self):
160 def phasestr(self):
161 return phases.phasenames[self.phase()]
161 return phases.phasenames[self.phase()]
162 def mutable(self):
162 def mutable(self):
163 return self.phase() > phases.public
163 return self.phase() > phases.public
164
164
165 def getfileset(self, expr):
165 def getfileset(self, expr):
166 return fileset.getfileset(self, expr)
166 return fileset.getfileset(self, expr)
167
167
168 def obsolete(self):
168 def obsolete(self):
169 """True if the changeset is obsolete"""
169 """True if the changeset is obsolete"""
170 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
170 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
171
171
172 def extinct(self):
172 def extinct(self):
173 """True if the changeset is extinct"""
173 """True if the changeset is extinct"""
174 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
174 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
175
175
176 def unstable(self):
176 def unstable(self):
177 """True if the changeset is not obsolete but it's ancestor are"""
177 """True if the changeset is not obsolete but it's ancestor are"""
178 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
178 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
179
179
180 def bumped(self):
180 def bumped(self):
181 """True if the changeset try to be a successor of a public changeset
181 """True if the changeset try to be a successor of a public changeset
182
182
183 Only non-public and non-obsolete changesets may be bumped.
183 Only non-public and non-obsolete changesets may be bumped.
184 """
184 """
185 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
185 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
186
186
187 def divergent(self):
187 def divergent(self):
188 """Is a successors of a changeset with multiple possible successors set
188 """Is a successors of a changeset with multiple possible successors set
189
189
190 Only non-public and non-obsolete changesets may be divergent.
190 Only non-public and non-obsolete changesets may be divergent.
191 """
191 """
192 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
192 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
193
193
194 def troubled(self):
194 def troubled(self):
195 """True if the changeset is either unstable, bumped or divergent"""
195 """True if the changeset is either unstable, bumped or divergent"""
196 return self.unstable() or self.bumped() or self.divergent()
196 return self.unstable() or self.bumped() or self.divergent()
197
197
198 def troubles(self):
198 def troubles(self):
199 """return the list of troubles affecting this changesets.
199 """return the list of troubles affecting this changesets.
200
200
201 Troubles are returned as strings. possible values are:
201 Troubles are returned as strings. possible values are:
202 - unstable,
202 - unstable,
203 - bumped,
203 - bumped,
204 - divergent.
204 - divergent.
205 """
205 """
206 troubles = []
206 troubles = []
207 if self.unstable():
207 if self.unstable():
208 troubles.append('unstable')
208 troubles.append('unstable')
209 if self.bumped():
209 if self.bumped():
210 troubles.append('bumped')
210 troubles.append('bumped')
211 if self.divergent():
211 if self.divergent():
212 troubles.append('divergent')
212 troubles.append('divergent')
213 return troubles
213 return troubles
214
214
215 def parents(self):
215 def parents(self):
216 """return contexts for each parent changeset"""
216 """return contexts for each parent changeset"""
217 return self._parents
217 return self._parents
218
218
219 def p1(self):
219 def p1(self):
220 return self._parents[0]
220 return self._parents[0]
221
221
222 def p2(self):
222 def p2(self):
223 if len(self._parents) == 2:
223 if len(self._parents) == 2:
224 return self._parents[1]
224 return self._parents[1]
225 return changectx(self._repo, -1)
225 return changectx(self._repo, -1)
226
226
227 def _fileinfo(self, path):
227 def _fileinfo(self, path):
228 if '_manifest' in self.__dict__:
228 if '_manifest' in self.__dict__:
229 try:
229 try:
230 return self._manifest[path], self._manifest.flags(path)
230 return self._manifest[path], self._manifest.flags(path)
231 except KeyError:
231 except KeyError:
232 raise error.ManifestLookupError(self._node, path,
232 raise error.ManifestLookupError(self._node, path,
233 _('not found in manifest'))
233 _('not found in manifest'))
234 if '_manifestdelta' in self.__dict__ or path in self.files():
234 if '_manifestdelta' in self.__dict__ or path in self.files():
235 if path in self._manifestdelta:
235 if path in self._manifestdelta:
236 return (self._manifestdelta[path],
236 return (self._manifestdelta[path],
237 self._manifestdelta.flags(path))
237 self._manifestdelta.flags(path))
238 node, flag = self._repo.manifest.find(self._changeset[0], path)
238 node, flag = self._repo.manifest.find(self._changeset[0], path)
239 if not node:
239 if not node:
240 raise error.ManifestLookupError(self._node, path,
240 raise error.ManifestLookupError(self._node, path,
241 _('not found in manifest'))
241 _('not found in manifest'))
242
242
243 return node, flag
243 return node, flag
244
244
245 def filenode(self, path):
245 def filenode(self, path):
246 return self._fileinfo(path)[0]
246 return self._fileinfo(path)[0]
247
247
248 def flags(self, path):
248 def flags(self, path):
249 try:
249 try:
250 return self._fileinfo(path)[1]
250 return self._fileinfo(path)[1]
251 except error.LookupError:
251 except error.LookupError:
252 return ''
252 return ''
253
253
254 def sub(self, path):
254 def sub(self, path):
255 return subrepo.subrepo(self, path)
255 return subrepo.subrepo(self, path)
256
256
257 def match(self, pats=[], include=None, exclude=None, default='glob'):
257 def match(self, pats=[], include=None, exclude=None, default='glob'):
258 r = self._repo
258 r = self._repo
259 return matchmod.match(r.root, r.getcwd(), pats,
259 return matchmod.match(r.root, r.getcwd(), pats,
260 include, exclude, default,
260 include, exclude, default,
261 auditor=r.auditor, ctx=self)
261 auditor=r.auditor, ctx=self)
262
262
263 def diff(self, ctx2=None, match=None, **opts):
263 def diff(self, ctx2=None, match=None, **opts):
264 """Returns a diff generator for the given contexts and matcher"""
264 """Returns a diff generator for the given contexts and matcher"""
265 if ctx2 is None:
265 if ctx2 is None:
266 ctx2 = self.p1()
266 ctx2 = self.p1()
267 if ctx2 is not None:
267 if ctx2 is not None:
268 ctx2 = self._repo[ctx2]
268 ctx2 = self._repo[ctx2]
269 diffopts = patch.diffopts(self._repo.ui, opts)
269 diffopts = patch.diffopts(self._repo.ui, opts)
270 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
270 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
271
271
272 @propertycache
272 @propertycache
273 def _dirs(self):
273 def _dirs(self):
274 return scmutil.dirs(self._manifest)
274 return scmutil.dirs(self._manifest)
275
275
276 def dirs(self):
276 def dirs(self):
277 return self._dirs
277 return self._dirs
278
278
279 def dirty(self, missing=False, merge=True, branch=True):
279 def dirty(self, missing=False, merge=True, branch=True):
280 return False
280 return False
281
281
282 def status(self, other=None, match=None, listignored=False,
282 def status(self, other=None, match=None, listignored=False,
283 listclean=False, listunknown=False, listsubrepos=False):
283 listclean=False, listunknown=False, listsubrepos=False):
284 """return status of files between two nodes or node and working
284 """return status of files between two nodes or node and working
285 directory.
285 directory.
286
286
287 If other is None, compare this node with working directory.
287 If other is None, compare this node with working directory.
288
288
289 returns (modified, added, removed, deleted, unknown, ignored, clean)
289 returns (modified, added, removed, deleted, unknown, ignored, clean)
290 """
290 """
291
291
292 ctx1 = self
292 ctx1 = self
293 ctx2 = self._repo[other]
293 ctx2 = self._repo[other]
294
294
295 # This next code block is, admittedly, fragile logic that tests for
295 # This next code block is, admittedly, fragile logic that tests for
296 # reversing the contexts and wouldn't need to exist if it weren't for
296 # reversing the contexts and wouldn't need to exist if it weren't for
297 # the fast (and common) code path of comparing the working directory
297 # the fast (and common) code path of comparing the working directory
298 # with its first parent.
298 # with its first parent.
299 #
299 #
300 # What we're aiming for here is the ability to call:
300 # What we're aiming for here is the ability to call:
301 #
301 #
302 # workingctx.status(parentctx)
302 # workingctx.status(parentctx)
303 #
303 #
304 # If we always built the manifest for each context and compared those,
304 # If we always built the manifest for each context and compared those,
305 # then we'd be done. But the special case of the above call means we
305 # then we'd be done. But the special case of the above call means we
306 # just copy the manifest of the parent.
306 # just copy the manifest of the parent.
307 reversed = False
307 reversed = False
308 if (not isinstance(ctx1, changectx)
308 if (not isinstance(ctx1, changectx)
309 and isinstance(ctx2, changectx)):
309 and isinstance(ctx2, changectx)):
310 reversed = True
310 reversed = True
311 ctx1, ctx2 = ctx2, ctx1
311 ctx1, ctx2 = ctx2, ctx1
312
312
313 r = [[], [], [], [], [], [], []]
313 r = [[], [], [], [], [], [], []]
314 match = ctx2._matchstatus(ctx1, r, match, listignored, listclean,
314 match = ctx2._matchstatus(ctx1, r, match, listignored, listclean,
315 listunknown)
315 listunknown)
316 r = ctx2._prestatus(ctx1, r, match, listignored, listclean, listunknown)
316 r = ctx2._prestatus(ctx1, r, match, listignored, listclean, listunknown)
317 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
317 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
318 listunknown)
318 listunknown)
319 r = ctx2._poststatus(ctx1, r, match, listignored, listclean,
319 r = ctx2._poststatus(ctx1, r, match, listignored, listclean,
320 listunknown)
320 listunknown)
321
321
322 if reversed:
322 if reversed:
323 # reverse added and removed
323 # reverse added and removed
324 r[1], r[2] = r[2], r[1]
324 r[1], r[2] = r[2], r[1]
325
325
326 if listsubrepos:
326 if listsubrepos:
327 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
327 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
328 rev2 = ctx2.subrev(subpath)
328 rev2 = ctx2.subrev(subpath)
329 try:
329 try:
330 submatch = matchmod.narrowmatcher(subpath, match)
330 submatch = matchmod.narrowmatcher(subpath, match)
331 s = sub.status(rev2, match=submatch, ignored=listignored,
331 s = sub.status(rev2, match=submatch, ignored=listignored,
332 clean=listclean, unknown=listunknown,
332 clean=listclean, unknown=listunknown,
333 listsubrepos=True)
333 listsubrepos=True)
334 for rfiles, sfiles in zip(r, s):
334 for rfiles, sfiles in zip(r, s):
335 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
335 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
336 except error.LookupError:
336 except error.LookupError:
337 self._repo.ui.status(_("skipping missing "
337 self._repo.ui.status(_("skipping missing "
338 "subrepository: %s\n") % subpath)
338 "subrepository: %s\n") % subpath)
339
339
340 for l in r:
340 for l in r:
341 l.sort()
341 l.sort()
342
342
343 # we return a tuple to signify that this list isn't changing
343 # we return a tuple to signify that this list isn't changing
344 return tuple(r)
344 return tuple(r)
345
345
346
346
347 def makememctx(repo, parents, text, user, date, branch, files, store,
347 def makememctx(repo, parents, text, user, date, branch, files, store,
348 editor=None):
348 editor=None):
349 def getfilectx(repo, memctx, path):
349 def getfilectx(repo, memctx, path):
350 data, (islink, isexec), copied = store.getfile(path)
350 data, (islink, isexec), copied = store.getfile(path)
351 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
351 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
352 copied=copied, memctx=memctx)
352 copied=copied, memctx=memctx)
353 extra = {}
353 extra = {}
354 if branch:
354 if branch:
355 extra['branch'] = encoding.fromlocal(branch)
355 extra['branch'] = encoding.fromlocal(branch)
356 ctx = memctx(repo, parents, text, files, getfilectx, user,
356 ctx = memctx(repo, parents, text, files, getfilectx, user,
357 date, extra, editor)
357 date, extra, editor)
358 return ctx
358 return ctx
359
359
360 class changectx(basectx):
360 class changectx(basectx):
361 """A changecontext object makes access to data related to a particular
361 """A changecontext object makes access to data related to a particular
362 changeset convenient. It represents a read-only context already present in
362 changeset convenient. It represents a read-only context already present in
363 the repo."""
363 the repo."""
364 def __init__(self, repo, changeid=''):
364 def __init__(self, repo, changeid=''):
365 """changeid is a revision number, node, or tag"""
365 """changeid is a revision number, node, or tag"""
366
366
367 # since basectx.__new__ already took care of copying the object, we
367 # since basectx.__new__ already took care of copying the object, we
368 # don't need to do anything in __init__, so we just exit here
368 # don't need to do anything in __init__, so we just exit here
369 if isinstance(changeid, basectx):
369 if isinstance(changeid, basectx):
370 return
370 return
371
371
372 if changeid == '':
372 if changeid == '':
373 changeid = '.'
373 changeid = '.'
374 self._repo = repo
374 self._repo = repo
375
375
376 if isinstance(changeid, int):
376 if isinstance(changeid, int):
377 try:
377 try:
378 self._node = repo.changelog.node(changeid)
378 self._node = repo.changelog.node(changeid)
379 except IndexError:
379 except IndexError:
380 raise error.RepoLookupError(
380 raise error.RepoLookupError(
381 _("unknown revision '%s'") % changeid)
381 _("unknown revision '%s'") % changeid)
382 self._rev = changeid
382 self._rev = changeid
383 return
383 return
384 if isinstance(changeid, long):
384 if isinstance(changeid, long):
385 changeid = str(changeid)
385 changeid = str(changeid)
386 if changeid == '.':
386 if changeid == '.':
387 self._node = repo.dirstate.p1()
387 self._node = repo.dirstate.p1()
388 self._rev = repo.changelog.rev(self._node)
388 self._rev = repo.changelog.rev(self._node)
389 return
389 return
390 if changeid == 'null':
390 if changeid == 'null':
391 self._node = nullid
391 self._node = nullid
392 self._rev = nullrev
392 self._rev = nullrev
393 return
393 return
394 if changeid == 'tip':
394 if changeid == 'tip':
395 self._node = repo.changelog.tip()
395 self._node = repo.changelog.tip()
396 self._rev = repo.changelog.rev(self._node)
396 self._rev = repo.changelog.rev(self._node)
397 return
397 return
398 if len(changeid) == 20:
398 if len(changeid) == 20:
399 try:
399 try:
400 self._node = changeid
400 self._node = changeid
401 self._rev = repo.changelog.rev(changeid)
401 self._rev = repo.changelog.rev(changeid)
402 return
402 return
403 except LookupError:
403 except LookupError:
404 pass
404 pass
405
405
406 try:
406 try:
407 r = int(changeid)
407 r = int(changeid)
408 if str(r) != changeid:
408 if str(r) != changeid:
409 raise ValueError
409 raise ValueError
410 l = len(repo.changelog)
410 l = len(repo.changelog)
411 if r < 0:
411 if r < 0:
412 r += l
412 r += l
413 if r < 0 or r >= l:
413 if r < 0 or r >= l:
414 raise ValueError
414 raise ValueError
415 self._rev = r
415 self._rev = r
416 self._node = repo.changelog.node(r)
416 self._node = repo.changelog.node(r)
417 return
417 return
418 except (ValueError, OverflowError, IndexError):
418 except (ValueError, OverflowError, IndexError):
419 pass
419 pass
420
420
421 if len(changeid) == 40:
421 if len(changeid) == 40:
422 try:
422 try:
423 self._node = bin(changeid)
423 self._node = bin(changeid)
424 self._rev = repo.changelog.rev(self._node)
424 self._rev = repo.changelog.rev(self._node)
425 return
425 return
426 except (TypeError, LookupError):
426 except (TypeError, LookupError):
427 pass
427 pass
428
428
429 if changeid in repo._bookmarks:
429 if changeid in repo._bookmarks:
430 self._node = repo._bookmarks[changeid]
430 self._node = repo._bookmarks[changeid]
431 self._rev = repo.changelog.rev(self._node)
431 self._rev = repo.changelog.rev(self._node)
432 return
432 return
433 if changeid in repo._tagscache.tags:
433 if changeid in repo._tagscache.tags:
434 self._node = repo._tagscache.tags[changeid]
434 self._node = repo._tagscache.tags[changeid]
435 self._rev = repo.changelog.rev(self._node)
435 self._rev = repo.changelog.rev(self._node)
436 return
436 return
437 try:
437 try:
438 self._node = repo.branchtip(changeid)
438 self._node = repo.branchtip(changeid)
439 self._rev = repo.changelog.rev(self._node)
439 self._rev = repo.changelog.rev(self._node)
440 return
440 return
441 except error.RepoLookupError:
441 except error.RepoLookupError:
442 pass
442 pass
443
443
444 self._node = repo.changelog._partialmatch(changeid)
444 self._node = repo.changelog._partialmatch(changeid)
445 if self._node is not None:
445 if self._node is not None:
446 self._rev = repo.changelog.rev(self._node)
446 self._rev = repo.changelog.rev(self._node)
447 return
447 return
448
448
449 # lookup failed
449 # lookup failed
450 # check if it might have come from damaged dirstate
450 # check if it might have come from damaged dirstate
451 #
451 #
452 # XXX we could avoid the unfiltered if we had a recognizable exception
452 # XXX we could avoid the unfiltered if we had a recognizable exception
453 # for filtered changeset access
453 # for filtered changeset access
454 if changeid in repo.unfiltered().dirstate.parents():
454 if changeid in repo.unfiltered().dirstate.parents():
455 raise error.Abort(_("working directory has unknown parent '%s'!")
455 raise error.Abort(_("working directory has unknown parent '%s'!")
456 % short(changeid))
456 % short(changeid))
457 try:
457 try:
458 if len(changeid) == 20:
458 if len(changeid) == 20:
459 changeid = hex(changeid)
459 changeid = hex(changeid)
460 except TypeError:
460 except TypeError:
461 pass
461 pass
462 raise error.RepoLookupError(
462 raise error.RepoLookupError(
463 _("unknown revision '%s'") % changeid)
463 _("unknown revision '%s'") % changeid)
464
464
465 def __hash__(self):
465 def __hash__(self):
466 try:
466 try:
467 return hash(self._rev)
467 return hash(self._rev)
468 except AttributeError:
468 except AttributeError:
469 return id(self)
469 return id(self)
470
470
471 def __nonzero__(self):
471 def __nonzero__(self):
472 return self._rev != nullrev
472 return self._rev != nullrev
473
473
474 @propertycache
474 @propertycache
475 def _changeset(self):
475 def _changeset(self):
476 return self._repo.changelog.read(self.rev())
476 return self._repo.changelog.read(self.rev())
477
477
478 @propertycache
478 @propertycache
479 def _manifest(self):
479 def _manifest(self):
480 return self._repo.manifest.read(self._changeset[0])
480 return self._repo.manifest.read(self._changeset[0])
481
481
482 @propertycache
482 @propertycache
483 def _manifestdelta(self):
483 def _manifestdelta(self):
484 return self._repo.manifest.readdelta(self._changeset[0])
484 return self._repo.manifest.readdelta(self._changeset[0])
485
485
486 @propertycache
486 @propertycache
487 def _parents(self):
487 def _parents(self):
488 p = self._repo.changelog.parentrevs(self._rev)
488 p = self._repo.changelog.parentrevs(self._rev)
489 if p[1] == nullrev:
489 if p[1] == nullrev:
490 p = p[:-1]
490 p = p[:-1]
491 return [changectx(self._repo, x) for x in p]
491 return [changectx(self._repo, x) for x in p]
492
492
493 def changeset(self):
493 def changeset(self):
494 return self._changeset
494 return self._changeset
495 def manifestnode(self):
495 def manifestnode(self):
496 return self._changeset[0]
496 return self._changeset[0]
497
497
498 def user(self):
498 def user(self):
499 return self._changeset[1]
499 return self._changeset[1]
500 def date(self):
500 def date(self):
501 return self._changeset[2]
501 return self._changeset[2]
502 def files(self):
502 def files(self):
503 return self._changeset[3]
503 return self._changeset[3]
504 def description(self):
504 def description(self):
505 return self._changeset[4]
505 return self._changeset[4]
506 def branch(self):
506 def branch(self):
507 return encoding.tolocal(self._changeset[5].get("branch"))
507 return encoding.tolocal(self._changeset[5].get("branch"))
508 def closesbranch(self):
508 def closesbranch(self):
509 return 'close' in self._changeset[5]
509 return 'close' in self._changeset[5]
510 def extra(self):
510 def extra(self):
511 return self._changeset[5]
511 return self._changeset[5]
512 def tags(self):
512 def tags(self):
513 return self._repo.nodetags(self._node)
513 return self._repo.nodetags(self._node)
514 def bookmarks(self):
514 def bookmarks(self):
515 return self._repo.nodebookmarks(self._node)
515 return self._repo.nodebookmarks(self._node)
516 def phase(self):
516 def phase(self):
517 return self._repo._phasecache.phase(self._repo, self._rev)
517 return self._repo._phasecache.phase(self._repo, self._rev)
518 def hidden(self):
518 def hidden(self):
519 return self._rev in repoview.filterrevs(self._repo, 'visible')
519 return self._rev in repoview.filterrevs(self._repo, 'visible')
520
520
521 def children(self):
521 def children(self):
522 """return contexts for each child changeset"""
522 """return contexts for each child changeset"""
523 c = self._repo.changelog.children(self._node)
523 c = self._repo.changelog.children(self._node)
524 return [changectx(self._repo, x) for x in c]
524 return [changectx(self._repo, x) for x in c]
525
525
526 def ancestors(self):
526 def ancestors(self):
527 for a in self._repo.changelog.ancestors([self._rev]):
527 for a in self._repo.changelog.ancestors([self._rev]):
528 yield changectx(self._repo, a)
528 yield changectx(self._repo, a)
529
529
530 def descendants(self):
530 def descendants(self):
531 for d in self._repo.changelog.descendants([self._rev]):
531 for d in self._repo.changelog.descendants([self._rev]):
532 yield changectx(self._repo, d)
532 yield changectx(self._repo, d)
533
533
534 def filectx(self, path, fileid=None, filelog=None):
534 def filectx(self, path, fileid=None, filelog=None):
535 """get a file context from this changeset"""
535 """get a file context from this changeset"""
536 if fileid is None:
536 if fileid is None:
537 fileid = self.filenode(path)
537 fileid = self.filenode(path)
538 return filectx(self._repo, path, fileid=fileid,
538 return filectx(self._repo, path, fileid=fileid,
539 changectx=self, filelog=filelog)
539 changectx=self, filelog=filelog)
540
540
541 def ancestor(self, c2, warn=False):
541 def ancestor(self, c2, warn=False):
542 """
542 """
543 return the "best" ancestor context of self and c2
543 return the "best" ancestor context of self and c2
544 """
544 """
545 # deal with workingctxs
545 # deal with workingctxs
546 n2 = c2._node
546 n2 = c2._node
547 if n2 is None:
547 if n2 is None:
548 n2 = c2._parents[0]._node
548 n2 = c2._parents[0]._node
549 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
549 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
550 if not cahs:
550 if not cahs:
551 anc = nullid
551 anc = nullid
552 elif len(cahs) == 1:
552 elif len(cahs) == 1:
553 anc = cahs[0]
553 anc = cahs[0]
554 else:
554 else:
555 for r in self._repo.ui.configlist('merge', 'preferancestor'):
555 for r in self._repo.ui.configlist('merge', 'preferancestor'):
556 if r == '*':
557 continue
556 ctx = changectx(self._repo, r)
558 ctx = changectx(self._repo, r)
557 anc = ctx.node()
559 anc = ctx.node()
558 if anc in cahs:
560 if anc in cahs:
559 break
561 break
560 else:
562 else:
561 anc = self._repo.changelog.ancestor(self._node, n2)
563 anc = self._repo.changelog.ancestor(self._node, n2)
562 if warn:
564 if warn:
563 self._repo.ui.status(
565 self._repo.ui.status(
564 (_("note: using %s as ancestor of %s and %s\n") %
566 (_("note: using %s as ancestor of %s and %s\n") %
565 (short(anc), short(self._node), short(n2))) +
567 (short(anc), short(self._node), short(n2))) +
566 ''.join(_(" alternatively, use --config "
568 ''.join(_(" alternatively, use --config "
567 "merge.preferancestor=%s\n") %
569 "merge.preferancestor=%s\n") %
568 short(n) for n in sorted(cahs) if n != anc))
570 short(n) for n in sorted(cahs) if n != anc))
569 return changectx(self._repo, anc)
571 return changectx(self._repo, anc)
570
572
571 def descendant(self, other):
573 def descendant(self, other):
572 """True if other is descendant of this changeset"""
574 """True if other is descendant of this changeset"""
573 return self._repo.changelog.descendant(self._rev, other._rev)
575 return self._repo.changelog.descendant(self._rev, other._rev)
574
576
575 def walk(self, match):
577 def walk(self, match):
576 fset = set(match.files())
578 fset = set(match.files())
577 # for dirstate.walk, files=['.'] means "walk the whole tree".
579 # for dirstate.walk, files=['.'] means "walk the whole tree".
578 # follow that here, too
580 # follow that here, too
579 fset.discard('.')
581 fset.discard('.')
580
582
581 # avoid the entire walk if we're only looking for specific files
583 # avoid the entire walk if we're only looking for specific files
582 if fset and not match.anypats():
584 if fset and not match.anypats():
583 if util.all([fn in self for fn in fset]):
585 if util.all([fn in self for fn in fset]):
584 for fn in sorted(fset):
586 for fn in sorted(fset):
585 if match(fn):
587 if match(fn):
586 yield fn
588 yield fn
587 raise StopIteration
589 raise StopIteration
588
590
589 for fn in self:
591 for fn in self:
590 if fn in fset:
592 if fn in fset:
591 # specified pattern is the exact name
593 # specified pattern is the exact name
592 fset.remove(fn)
594 fset.remove(fn)
593 if match(fn):
595 if match(fn):
594 yield fn
596 yield fn
595 for fn in sorted(fset):
597 for fn in sorted(fset):
596 if fn in self._dirs:
598 if fn in self._dirs:
597 # specified pattern is a directory
599 # specified pattern is a directory
598 continue
600 continue
599 match.bad(fn, _('no such file in rev %s') % self)
601 match.bad(fn, _('no such file in rev %s') % self)
600
602
601 def matches(self, match):
603 def matches(self, match):
602 return self.walk(match)
604 return self.walk(match)
603
605
604 class basefilectx(object):
606 class basefilectx(object):
605 """A filecontext object represents the common logic for its children:
607 """A filecontext object represents the common logic for its children:
606 filectx: read-only access to a filerevision that is already present
608 filectx: read-only access to a filerevision that is already present
607 in the repo,
609 in the repo,
608 workingfilectx: a filecontext that represents files from the working
610 workingfilectx: a filecontext that represents files from the working
609 directory,
611 directory,
610 memfilectx: a filecontext that represents files in-memory."""
612 memfilectx: a filecontext that represents files in-memory."""
611 def __new__(cls, repo, path, *args, **kwargs):
613 def __new__(cls, repo, path, *args, **kwargs):
612 return super(basefilectx, cls).__new__(cls)
614 return super(basefilectx, cls).__new__(cls)
613
615
614 @propertycache
616 @propertycache
615 def _filelog(self):
617 def _filelog(self):
616 return self._repo.file(self._path)
618 return self._repo.file(self._path)
617
619
618 @propertycache
620 @propertycache
619 def _changeid(self):
621 def _changeid(self):
620 if '_changeid' in self.__dict__:
622 if '_changeid' in self.__dict__:
621 return self._changeid
623 return self._changeid
622 elif '_changectx' in self.__dict__:
624 elif '_changectx' in self.__dict__:
623 return self._changectx.rev()
625 return self._changectx.rev()
624 else:
626 else:
625 return self._filelog.linkrev(self._filerev)
627 return self._filelog.linkrev(self._filerev)
626
628
627 @propertycache
629 @propertycache
628 def _filenode(self):
630 def _filenode(self):
629 if '_fileid' in self.__dict__:
631 if '_fileid' in self.__dict__:
630 return self._filelog.lookup(self._fileid)
632 return self._filelog.lookup(self._fileid)
631 else:
633 else:
632 return self._changectx.filenode(self._path)
634 return self._changectx.filenode(self._path)
633
635
634 @propertycache
636 @propertycache
635 def _filerev(self):
637 def _filerev(self):
636 return self._filelog.rev(self._filenode)
638 return self._filelog.rev(self._filenode)
637
639
638 @propertycache
640 @propertycache
639 def _repopath(self):
641 def _repopath(self):
640 return self._path
642 return self._path
641
643
642 def __nonzero__(self):
644 def __nonzero__(self):
643 try:
645 try:
644 self._filenode
646 self._filenode
645 return True
647 return True
646 except error.LookupError:
648 except error.LookupError:
647 # file is missing
649 # file is missing
648 return False
650 return False
649
651
650 def __str__(self):
652 def __str__(self):
651 return "%s@%s" % (self.path(), self._changectx)
653 return "%s@%s" % (self.path(), self._changectx)
652
654
653 def __repr__(self):
655 def __repr__(self):
654 return "<%s %s>" % (type(self).__name__, str(self))
656 return "<%s %s>" % (type(self).__name__, str(self))
655
657
656 def __hash__(self):
658 def __hash__(self):
657 try:
659 try:
658 return hash((self._path, self._filenode))
660 return hash((self._path, self._filenode))
659 except AttributeError:
661 except AttributeError:
660 return id(self)
662 return id(self)
661
663
662 def __eq__(self, other):
664 def __eq__(self, other):
663 try:
665 try:
664 return (type(self) == type(other) and self._path == other._path
666 return (type(self) == type(other) and self._path == other._path
665 and self._filenode == other._filenode)
667 and self._filenode == other._filenode)
666 except AttributeError:
668 except AttributeError:
667 return False
669 return False
668
670
669 def __ne__(self, other):
671 def __ne__(self, other):
670 return not (self == other)
672 return not (self == other)
671
673
672 def filerev(self):
674 def filerev(self):
673 return self._filerev
675 return self._filerev
674 def filenode(self):
676 def filenode(self):
675 return self._filenode
677 return self._filenode
676 def flags(self):
678 def flags(self):
677 return self._changectx.flags(self._path)
679 return self._changectx.flags(self._path)
678 def filelog(self):
680 def filelog(self):
679 return self._filelog
681 return self._filelog
680 def rev(self):
682 def rev(self):
681 return self._changeid
683 return self._changeid
682 def linkrev(self):
684 def linkrev(self):
683 return self._filelog.linkrev(self._filerev)
685 return self._filelog.linkrev(self._filerev)
684 def node(self):
686 def node(self):
685 return self._changectx.node()
687 return self._changectx.node()
686 def hex(self):
688 def hex(self):
687 return self._changectx.hex()
689 return self._changectx.hex()
688 def user(self):
690 def user(self):
689 return self._changectx.user()
691 return self._changectx.user()
690 def date(self):
692 def date(self):
691 return self._changectx.date()
693 return self._changectx.date()
692 def files(self):
694 def files(self):
693 return self._changectx.files()
695 return self._changectx.files()
694 def description(self):
696 def description(self):
695 return self._changectx.description()
697 return self._changectx.description()
696 def branch(self):
698 def branch(self):
697 return self._changectx.branch()
699 return self._changectx.branch()
698 def extra(self):
700 def extra(self):
699 return self._changectx.extra()
701 return self._changectx.extra()
700 def phase(self):
702 def phase(self):
701 return self._changectx.phase()
703 return self._changectx.phase()
702 def phasestr(self):
704 def phasestr(self):
703 return self._changectx.phasestr()
705 return self._changectx.phasestr()
704 def manifest(self):
706 def manifest(self):
705 return self._changectx.manifest()
707 return self._changectx.manifest()
706 def changectx(self):
708 def changectx(self):
707 return self._changectx
709 return self._changectx
708
710
709 def path(self):
711 def path(self):
710 return self._path
712 return self._path
711
713
712 def isbinary(self):
714 def isbinary(self):
713 try:
715 try:
714 return util.binary(self.data())
716 return util.binary(self.data())
715 except IOError:
717 except IOError:
716 return False
718 return False
717 def isexec(self):
719 def isexec(self):
718 return 'x' in self.flags()
720 return 'x' in self.flags()
719 def islink(self):
721 def islink(self):
720 return 'l' in self.flags()
722 return 'l' in self.flags()
721
723
722 def cmp(self, fctx):
724 def cmp(self, fctx):
723 """compare with other file context
725 """compare with other file context
724
726
725 returns True if different than fctx.
727 returns True if different than fctx.
726 """
728 """
727 if (fctx._filerev is None
729 if (fctx._filerev is None
728 and (self._repo._encodefilterpats
730 and (self._repo._encodefilterpats
729 # if file data starts with '\1\n', empty metadata block is
731 # if file data starts with '\1\n', empty metadata block is
730 # prepended, which adds 4 bytes to filelog.size().
732 # prepended, which adds 4 bytes to filelog.size().
731 or self.size() - 4 == fctx.size())
733 or self.size() - 4 == fctx.size())
732 or self.size() == fctx.size()):
734 or self.size() == fctx.size()):
733 return self._filelog.cmp(self._filenode, fctx.data())
735 return self._filelog.cmp(self._filenode, fctx.data())
734
736
735 return True
737 return True
736
738
737 def parents(self):
739 def parents(self):
738 _path = self._path
740 _path = self._path
739 fl = self._filelog
741 fl = self._filelog
740 pl = [(_path, n, fl) for n in self._filelog.parents(self._filenode)]
742 pl = [(_path, n, fl) for n in self._filelog.parents(self._filenode)]
741
743
742 r = self._filelog.renamed(self._filenode)
744 r = self._filelog.renamed(self._filenode)
743 if r:
745 if r:
744 pl[0] = (r[0], r[1], None)
746 pl[0] = (r[0], r[1], None)
745
747
746 return [filectx(self._repo, p, fileid=n, filelog=l)
748 return [filectx(self._repo, p, fileid=n, filelog=l)
747 for p, n, l in pl if n != nullid]
749 for p, n, l in pl if n != nullid]
748
750
749 def p1(self):
751 def p1(self):
750 return self.parents()[0]
752 return self.parents()[0]
751
753
752 def p2(self):
754 def p2(self):
753 p = self.parents()
755 p = self.parents()
754 if len(p) == 2:
756 if len(p) == 2:
755 return p[1]
757 return p[1]
756 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
758 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
757
759
758 def annotate(self, follow=False, linenumber=None, diffopts=None):
760 def annotate(self, follow=False, linenumber=None, diffopts=None):
759 '''returns a list of tuples of (ctx, line) for each line
761 '''returns a list of tuples of (ctx, line) for each line
760 in the file, where ctx is the filectx of the node where
762 in the file, where ctx is the filectx of the node where
761 that line was last changed.
763 that line was last changed.
762 This returns tuples of ((ctx, linenumber), line) for each line,
764 This returns tuples of ((ctx, linenumber), line) for each line,
763 if "linenumber" parameter is NOT "None".
765 if "linenumber" parameter is NOT "None".
764 In such tuples, linenumber means one at the first appearance
766 In such tuples, linenumber means one at the first appearance
765 in the managed file.
767 in the managed file.
766 To reduce annotation cost,
768 To reduce annotation cost,
767 this returns fixed value(False is used) as linenumber,
769 this returns fixed value(False is used) as linenumber,
768 if "linenumber" parameter is "False".'''
770 if "linenumber" parameter is "False".'''
769
771
770 if linenumber is None:
772 if linenumber is None:
771 def decorate(text, rev):
773 def decorate(text, rev):
772 return ([rev] * len(text.splitlines()), text)
774 return ([rev] * len(text.splitlines()), text)
773 elif linenumber:
775 elif linenumber:
774 def decorate(text, rev):
776 def decorate(text, rev):
775 size = len(text.splitlines())
777 size = len(text.splitlines())
776 return ([(rev, i) for i in xrange(1, size + 1)], text)
778 return ([(rev, i) for i in xrange(1, size + 1)], text)
777 else:
779 else:
778 def decorate(text, rev):
780 def decorate(text, rev):
779 return ([(rev, False)] * len(text.splitlines()), text)
781 return ([(rev, False)] * len(text.splitlines()), text)
780
782
781 def pair(parent, child):
783 def pair(parent, child):
782 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
784 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
783 refine=True)
785 refine=True)
784 for (a1, a2, b1, b2), t in blocks:
786 for (a1, a2, b1, b2), t in blocks:
785 # Changed blocks ('!') or blocks made only of blank lines ('~')
787 # Changed blocks ('!') or blocks made only of blank lines ('~')
786 # belong to the child.
788 # belong to the child.
787 if t == '=':
789 if t == '=':
788 child[0][b1:b2] = parent[0][a1:a2]
790 child[0][b1:b2] = parent[0][a1:a2]
789 return child
791 return child
790
792
791 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
793 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
792
794
793 def parents(f):
795 def parents(f):
794 pl = f.parents()
796 pl = f.parents()
795
797
796 # Don't return renamed parents if we aren't following.
798 # Don't return renamed parents if we aren't following.
797 if not follow:
799 if not follow:
798 pl = [p for p in pl if p.path() == f.path()]
800 pl = [p for p in pl if p.path() == f.path()]
799
801
800 # renamed filectx won't have a filelog yet, so set it
802 # renamed filectx won't have a filelog yet, so set it
801 # from the cache to save time
803 # from the cache to save time
802 for p in pl:
804 for p in pl:
803 if not '_filelog' in p.__dict__:
805 if not '_filelog' in p.__dict__:
804 p._filelog = getlog(p.path())
806 p._filelog = getlog(p.path())
805
807
806 return pl
808 return pl
807
809
808 # use linkrev to find the first changeset where self appeared
810 # use linkrev to find the first changeset where self appeared
809 if self.rev() != self.linkrev():
811 if self.rev() != self.linkrev():
810 base = self.filectx(self.filenode())
812 base = self.filectx(self.filenode())
811 else:
813 else:
812 base = self
814 base = self
813
815
814 # This algorithm would prefer to be recursive, but Python is a
816 # This algorithm would prefer to be recursive, but Python is a
815 # bit recursion-hostile. Instead we do an iterative
817 # bit recursion-hostile. Instead we do an iterative
816 # depth-first search.
818 # depth-first search.
817
819
818 visit = [base]
820 visit = [base]
819 hist = {}
821 hist = {}
820 pcache = {}
822 pcache = {}
821 needed = {base: 1}
823 needed = {base: 1}
822 while visit:
824 while visit:
823 f = visit[-1]
825 f = visit[-1]
824 pcached = f in pcache
826 pcached = f in pcache
825 if not pcached:
827 if not pcached:
826 pcache[f] = parents(f)
828 pcache[f] = parents(f)
827
829
828 ready = True
830 ready = True
829 pl = pcache[f]
831 pl = pcache[f]
830 for p in pl:
832 for p in pl:
831 if p not in hist:
833 if p not in hist:
832 ready = False
834 ready = False
833 visit.append(p)
835 visit.append(p)
834 if not pcached:
836 if not pcached:
835 needed[p] = needed.get(p, 0) + 1
837 needed[p] = needed.get(p, 0) + 1
836 if ready:
838 if ready:
837 visit.pop()
839 visit.pop()
838 reusable = f in hist
840 reusable = f in hist
839 if reusable:
841 if reusable:
840 curr = hist[f]
842 curr = hist[f]
841 else:
843 else:
842 curr = decorate(f.data(), f)
844 curr = decorate(f.data(), f)
843 for p in pl:
845 for p in pl:
844 if not reusable:
846 if not reusable:
845 curr = pair(hist[p], curr)
847 curr = pair(hist[p], curr)
846 if needed[p] == 1:
848 if needed[p] == 1:
847 del hist[p]
849 del hist[p]
848 del needed[p]
850 del needed[p]
849 else:
851 else:
850 needed[p] -= 1
852 needed[p] -= 1
851
853
852 hist[f] = curr
854 hist[f] = curr
853 pcache[f] = []
855 pcache[f] = []
854
856
855 return zip(hist[base][0], hist[base][1].splitlines(True))
857 return zip(hist[base][0], hist[base][1].splitlines(True))
856
858
857 def ancestors(self, followfirst=False):
859 def ancestors(self, followfirst=False):
858 visit = {}
860 visit = {}
859 c = self
861 c = self
860 cut = followfirst and 1 or None
862 cut = followfirst and 1 or None
861 while True:
863 while True:
862 for parent in c.parents()[:cut]:
864 for parent in c.parents()[:cut]:
863 visit[(parent.rev(), parent.node())] = parent
865 visit[(parent.rev(), parent.node())] = parent
864 if not visit:
866 if not visit:
865 break
867 break
866 c = visit.pop(max(visit))
868 c = visit.pop(max(visit))
867 yield c
869 yield c
868
870
869 class filectx(basefilectx):
871 class filectx(basefilectx):
870 """A filecontext object makes access to data related to a particular
872 """A filecontext object makes access to data related to a particular
871 filerevision convenient."""
873 filerevision convenient."""
872 def __init__(self, repo, path, changeid=None, fileid=None,
874 def __init__(self, repo, path, changeid=None, fileid=None,
873 filelog=None, changectx=None):
875 filelog=None, changectx=None):
874 """changeid can be a changeset revision, node, or tag.
876 """changeid can be a changeset revision, node, or tag.
875 fileid can be a file revision or node."""
877 fileid can be a file revision or node."""
876 self._repo = repo
878 self._repo = repo
877 self._path = path
879 self._path = path
878
880
879 assert (changeid is not None
881 assert (changeid is not None
880 or fileid is not None
882 or fileid is not None
881 or changectx is not None), \
883 or changectx is not None), \
882 ("bad args: changeid=%r, fileid=%r, changectx=%r"
884 ("bad args: changeid=%r, fileid=%r, changectx=%r"
883 % (changeid, fileid, changectx))
885 % (changeid, fileid, changectx))
884
886
885 if filelog is not None:
887 if filelog is not None:
886 self._filelog = filelog
888 self._filelog = filelog
887
889
888 if changeid is not None:
890 if changeid is not None:
889 self._changeid = changeid
891 self._changeid = changeid
890 if changectx is not None:
892 if changectx is not None:
891 self._changectx = changectx
893 self._changectx = changectx
892 if fileid is not None:
894 if fileid is not None:
893 self._fileid = fileid
895 self._fileid = fileid
894
896
895 @propertycache
897 @propertycache
896 def _changectx(self):
898 def _changectx(self):
897 try:
899 try:
898 return changectx(self._repo, self._changeid)
900 return changectx(self._repo, self._changeid)
899 except error.RepoLookupError:
901 except error.RepoLookupError:
900 # Linkrev may point to any revision in the repository. When the
902 # Linkrev may point to any revision in the repository. When the
901 # repository is filtered this may lead to `filectx` trying to build
903 # repository is filtered this may lead to `filectx` trying to build
902 # `changectx` for filtered revision. In such case we fallback to
904 # `changectx` for filtered revision. In such case we fallback to
903 # creating `changectx` on the unfiltered version of the reposition.
905 # creating `changectx` on the unfiltered version of the reposition.
904 # This fallback should not be an issue because `changectx` from
906 # This fallback should not be an issue because `changectx` from
905 # `filectx` are not used in complex operations that care about
907 # `filectx` are not used in complex operations that care about
906 # filtering.
908 # filtering.
907 #
909 #
908 # This fallback is a cheap and dirty fix that prevent several
910 # This fallback is a cheap and dirty fix that prevent several
909 # crashes. It does not ensure the behavior is correct. However the
911 # crashes. It does not ensure the behavior is correct. However the
910 # behavior was not correct before filtering either and "incorrect
912 # behavior was not correct before filtering either and "incorrect
911 # behavior" is seen as better as "crash"
913 # behavior" is seen as better as "crash"
912 #
914 #
913 # Linkrevs have several serious troubles with filtering that are
915 # Linkrevs have several serious troubles with filtering that are
914 # complicated to solve. Proper handling of the issue here should be
916 # complicated to solve. Proper handling of the issue here should be
915 # considered when solving linkrev issue are on the table.
917 # considered when solving linkrev issue are on the table.
916 return changectx(self._repo.unfiltered(), self._changeid)
918 return changectx(self._repo.unfiltered(), self._changeid)
917
919
918 def filectx(self, fileid):
920 def filectx(self, fileid):
919 '''opens an arbitrary revision of the file without
921 '''opens an arbitrary revision of the file without
920 opening a new filelog'''
922 opening a new filelog'''
921 return filectx(self._repo, self._path, fileid=fileid,
923 return filectx(self._repo, self._path, fileid=fileid,
922 filelog=self._filelog)
924 filelog=self._filelog)
923
925
924 def data(self):
926 def data(self):
925 return self._filelog.read(self._filenode)
927 return self._filelog.read(self._filenode)
926 def size(self):
928 def size(self):
927 return self._filelog.size(self._filerev)
929 return self._filelog.size(self._filerev)
928
930
929 def renamed(self):
931 def renamed(self):
930 """check if file was actually renamed in this changeset revision
932 """check if file was actually renamed in this changeset revision
931
933
932 If rename logged in file revision, we report copy for changeset only
934 If rename logged in file revision, we report copy for changeset only
933 if file revisions linkrev points back to the changeset in question
935 if file revisions linkrev points back to the changeset in question
934 or both changeset parents contain different file revisions.
936 or both changeset parents contain different file revisions.
935 """
937 """
936
938
937 renamed = self._filelog.renamed(self._filenode)
939 renamed = self._filelog.renamed(self._filenode)
938 if not renamed:
940 if not renamed:
939 return renamed
941 return renamed
940
942
941 if self.rev() == self.linkrev():
943 if self.rev() == self.linkrev():
942 return renamed
944 return renamed
943
945
944 name = self.path()
946 name = self.path()
945 fnode = self._filenode
947 fnode = self._filenode
946 for p in self._changectx.parents():
948 for p in self._changectx.parents():
947 try:
949 try:
948 if fnode == p.filenode(name):
950 if fnode == p.filenode(name):
949 return None
951 return None
950 except error.LookupError:
952 except error.LookupError:
951 pass
953 pass
952 return renamed
954 return renamed
953
955
954 def children(self):
956 def children(self):
955 # hard for renames
957 # hard for renames
956 c = self._filelog.children(self._filenode)
958 c = self._filelog.children(self._filenode)
957 return [filectx(self._repo, self._path, fileid=x,
959 return [filectx(self._repo, self._path, fileid=x,
958 filelog=self._filelog) for x in c]
960 filelog=self._filelog) for x in c]
959
961
960 class committablectx(basectx):
962 class committablectx(basectx):
961 """A committablectx object provides common functionality for a context that
963 """A committablectx object provides common functionality for a context that
962 wants the ability to commit, e.g. workingctx or memctx."""
964 wants the ability to commit, e.g. workingctx or memctx."""
963 def __init__(self, repo, text="", user=None, date=None, extra=None,
965 def __init__(self, repo, text="", user=None, date=None, extra=None,
964 changes=None):
966 changes=None):
965 self._repo = repo
967 self._repo = repo
966 self._rev = None
968 self._rev = None
967 self._node = None
969 self._node = None
968 self._text = text
970 self._text = text
969 if date:
971 if date:
970 self._date = util.parsedate(date)
972 self._date = util.parsedate(date)
971 if user:
973 if user:
972 self._user = user
974 self._user = user
973 if changes:
975 if changes:
974 self._status = changes
976 self._status = changes
975
977
976 self._extra = {}
978 self._extra = {}
977 if extra:
979 if extra:
978 self._extra = extra.copy()
980 self._extra = extra.copy()
979 if 'branch' not in self._extra:
981 if 'branch' not in self._extra:
980 try:
982 try:
981 branch = encoding.fromlocal(self._repo.dirstate.branch())
983 branch = encoding.fromlocal(self._repo.dirstate.branch())
982 except UnicodeDecodeError:
984 except UnicodeDecodeError:
983 raise util.Abort(_('branch name not in UTF-8!'))
985 raise util.Abort(_('branch name not in UTF-8!'))
984 self._extra['branch'] = branch
986 self._extra['branch'] = branch
985 if self._extra['branch'] == '':
987 if self._extra['branch'] == '':
986 self._extra['branch'] = 'default'
988 self._extra['branch'] = 'default'
987
989
988 def __str__(self):
990 def __str__(self):
989 return str(self._parents[0]) + "+"
991 return str(self._parents[0]) + "+"
990
992
991 def __nonzero__(self):
993 def __nonzero__(self):
992 return True
994 return True
993
995
994 def _buildflagfunc(self):
996 def _buildflagfunc(self):
995 # Create a fallback function for getting file flags when the
997 # Create a fallback function for getting file flags when the
996 # filesystem doesn't support them
998 # filesystem doesn't support them
997
999
998 copiesget = self._repo.dirstate.copies().get
1000 copiesget = self._repo.dirstate.copies().get
999
1001
1000 if len(self._parents) < 2:
1002 if len(self._parents) < 2:
1001 # when we have one parent, it's easy: copy from parent
1003 # when we have one parent, it's easy: copy from parent
1002 man = self._parents[0].manifest()
1004 man = self._parents[0].manifest()
1003 def func(f):
1005 def func(f):
1004 f = copiesget(f, f)
1006 f = copiesget(f, f)
1005 return man.flags(f)
1007 return man.flags(f)
1006 else:
1008 else:
1007 # merges are tricky: we try to reconstruct the unstored
1009 # merges are tricky: we try to reconstruct the unstored
1008 # result from the merge (issue1802)
1010 # result from the merge (issue1802)
1009 p1, p2 = self._parents
1011 p1, p2 = self._parents
1010 pa = p1.ancestor(p2)
1012 pa = p1.ancestor(p2)
1011 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1013 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1012
1014
1013 def func(f):
1015 def func(f):
1014 f = copiesget(f, f) # may be wrong for merges with copies
1016 f = copiesget(f, f) # may be wrong for merges with copies
1015 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1017 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1016 if fl1 == fl2:
1018 if fl1 == fl2:
1017 return fl1
1019 return fl1
1018 if fl1 == fla:
1020 if fl1 == fla:
1019 return fl2
1021 return fl2
1020 if fl2 == fla:
1022 if fl2 == fla:
1021 return fl1
1023 return fl1
1022 return '' # punt for conflicts
1024 return '' # punt for conflicts
1023
1025
1024 return func
1026 return func
1025
1027
1026 @propertycache
1028 @propertycache
1027 def _flagfunc(self):
1029 def _flagfunc(self):
1028 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1030 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1029
1031
1030 @propertycache
1032 @propertycache
1031 def _manifest(self):
1033 def _manifest(self):
1032 """generate a manifest corresponding to the values in self._status"""
1034 """generate a manifest corresponding to the values in self._status"""
1033
1035
1034 man = self._parents[0].manifest().copy()
1036 man = self._parents[0].manifest().copy()
1035 if len(self._parents) > 1:
1037 if len(self._parents) > 1:
1036 man2 = self.p2().manifest()
1038 man2 = self.p2().manifest()
1037 def getman(f):
1039 def getman(f):
1038 if f in man:
1040 if f in man:
1039 return man
1041 return man
1040 return man2
1042 return man2
1041 else:
1043 else:
1042 getman = lambda f: man
1044 getman = lambda f: man
1043
1045
1044 copied = self._repo.dirstate.copies()
1046 copied = self._repo.dirstate.copies()
1045 ff = self._flagfunc
1047 ff = self._flagfunc
1046 modified, added, removed, deleted = self._status[:4]
1048 modified, added, removed, deleted = self._status[:4]
1047 for i, l in (("a", added), ("m", modified)):
1049 for i, l in (("a", added), ("m", modified)):
1048 for f in l:
1050 for f in l:
1049 orig = copied.get(f, f)
1051 orig = copied.get(f, f)
1050 man[f] = getman(orig).get(orig, nullid) + i
1052 man[f] = getman(orig).get(orig, nullid) + i
1051 try:
1053 try:
1052 man.set(f, ff(f))
1054 man.set(f, ff(f))
1053 except OSError:
1055 except OSError:
1054 pass
1056 pass
1055
1057
1056 for f in deleted + removed:
1058 for f in deleted + removed:
1057 if f in man:
1059 if f in man:
1058 del man[f]
1060 del man[f]
1059
1061
1060 return man
1062 return man
1061
1063
1062 @propertycache
1064 @propertycache
1063 def _status(self):
1065 def _status(self):
1064 return self._repo.status()
1066 return self._repo.status()
1065
1067
1066 @propertycache
1068 @propertycache
1067 def _user(self):
1069 def _user(self):
1068 return self._repo.ui.username()
1070 return self._repo.ui.username()
1069
1071
1070 @propertycache
1072 @propertycache
1071 def _date(self):
1073 def _date(self):
1072 return util.makedate()
1074 return util.makedate()
1073
1075
1074 def subrev(self, subpath):
1076 def subrev(self, subpath):
1075 return None
1077 return None
1076
1078
1077 def user(self):
1079 def user(self):
1078 return self._user or self._repo.ui.username()
1080 return self._user or self._repo.ui.username()
1079 def date(self):
1081 def date(self):
1080 return self._date
1082 return self._date
1081 def description(self):
1083 def description(self):
1082 return self._text
1084 return self._text
1083 def files(self):
1085 def files(self):
1084 return sorted(self._status[0] + self._status[1] + self._status[2])
1086 return sorted(self._status[0] + self._status[1] + self._status[2])
1085
1087
1086 def modified(self):
1088 def modified(self):
1087 return self._status[0]
1089 return self._status[0]
1088 def added(self):
1090 def added(self):
1089 return self._status[1]
1091 return self._status[1]
1090 def removed(self):
1092 def removed(self):
1091 return self._status[2]
1093 return self._status[2]
1092 def deleted(self):
1094 def deleted(self):
1093 return self._status[3]
1095 return self._status[3]
1094 def unknown(self):
1096 def unknown(self):
1095 return self._status[4]
1097 return self._status[4]
1096 def ignored(self):
1098 def ignored(self):
1097 return self._status[5]
1099 return self._status[5]
1098 def clean(self):
1100 def clean(self):
1099 return self._status[6]
1101 return self._status[6]
1100 def branch(self):
1102 def branch(self):
1101 return encoding.tolocal(self._extra['branch'])
1103 return encoding.tolocal(self._extra['branch'])
1102 def closesbranch(self):
1104 def closesbranch(self):
1103 return 'close' in self._extra
1105 return 'close' in self._extra
1104 def extra(self):
1106 def extra(self):
1105 return self._extra
1107 return self._extra
1106
1108
1107 def tags(self):
1109 def tags(self):
1108 t = []
1110 t = []
1109 for p in self.parents():
1111 for p in self.parents():
1110 t.extend(p.tags())
1112 t.extend(p.tags())
1111 return t
1113 return t
1112
1114
1113 def bookmarks(self):
1115 def bookmarks(self):
1114 b = []
1116 b = []
1115 for p in self.parents():
1117 for p in self.parents():
1116 b.extend(p.bookmarks())
1118 b.extend(p.bookmarks())
1117 return b
1119 return b
1118
1120
1119 def phase(self):
1121 def phase(self):
1120 phase = phases.draft # default phase to draft
1122 phase = phases.draft # default phase to draft
1121 for p in self.parents():
1123 for p in self.parents():
1122 phase = max(phase, p.phase())
1124 phase = max(phase, p.phase())
1123 return phase
1125 return phase
1124
1126
1125 def hidden(self):
1127 def hidden(self):
1126 return False
1128 return False
1127
1129
1128 def children(self):
1130 def children(self):
1129 return []
1131 return []
1130
1132
1131 def flags(self, path):
1133 def flags(self, path):
1132 if '_manifest' in self.__dict__:
1134 if '_manifest' in self.__dict__:
1133 try:
1135 try:
1134 return self._manifest.flags(path)
1136 return self._manifest.flags(path)
1135 except KeyError:
1137 except KeyError:
1136 return ''
1138 return ''
1137
1139
1138 try:
1140 try:
1139 return self._flagfunc(path)
1141 return self._flagfunc(path)
1140 except OSError:
1142 except OSError:
1141 return ''
1143 return ''
1142
1144
1143 def ancestor(self, c2):
1145 def ancestor(self, c2):
1144 """return the ancestor context of self and c2"""
1146 """return the ancestor context of self and c2"""
1145 return self._parents[0].ancestor(c2) # punt on two parents for now
1147 return self._parents[0].ancestor(c2) # punt on two parents for now
1146
1148
1147 def walk(self, match):
1149 def walk(self, match):
1148 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1150 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1149 True, False))
1151 True, False))
1150
1152
1151 def matches(self, match):
1153 def matches(self, match):
1152 return sorted(self._repo.dirstate.matches(match))
1154 return sorted(self._repo.dirstate.matches(match))
1153
1155
1154 def ancestors(self):
1156 def ancestors(self):
1155 for a in self._repo.changelog.ancestors(
1157 for a in self._repo.changelog.ancestors(
1156 [p.rev() for p in self._parents]):
1158 [p.rev() for p in self._parents]):
1157 yield changectx(self._repo, a)
1159 yield changectx(self._repo, a)
1158
1160
1159 def markcommitted(self, node):
1161 def markcommitted(self, node):
1160 """Perform post-commit cleanup necessary after committing this ctx
1162 """Perform post-commit cleanup necessary after committing this ctx
1161
1163
1162 Specifically, this updates backing stores this working context
1164 Specifically, this updates backing stores this working context
1163 wraps to reflect the fact that the changes reflected by this
1165 wraps to reflect the fact that the changes reflected by this
1164 workingctx have been committed. For example, it marks
1166 workingctx have been committed. For example, it marks
1165 modified and added files as normal in the dirstate.
1167 modified and added files as normal in the dirstate.
1166
1168
1167 """
1169 """
1168
1170
1169 for f in self.modified() + self.added():
1171 for f in self.modified() + self.added():
1170 self._repo.dirstate.normal(f)
1172 self._repo.dirstate.normal(f)
1171 for f in self.removed():
1173 for f in self.removed():
1172 self._repo.dirstate.drop(f)
1174 self._repo.dirstate.drop(f)
1173 self._repo.dirstate.setparents(node)
1175 self._repo.dirstate.setparents(node)
1174
1176
1175 def dirs(self):
1177 def dirs(self):
1176 return self._repo.dirstate.dirs()
1178 return self._repo.dirstate.dirs()
1177
1179
1178 class workingctx(committablectx):
1180 class workingctx(committablectx):
1179 """A workingctx object makes access to data related to
1181 """A workingctx object makes access to data related to
1180 the current working directory convenient.
1182 the current working directory convenient.
1181 date - any valid date string or (unixtime, offset), or None.
1183 date - any valid date string or (unixtime, offset), or None.
1182 user - username string, or None.
1184 user - username string, or None.
1183 extra - a dictionary of extra values, or None.
1185 extra - a dictionary of extra values, or None.
1184 changes - a list of file lists as returned by localrepo.status()
1186 changes - a list of file lists as returned by localrepo.status()
1185 or None to use the repository status.
1187 or None to use the repository status.
1186 """
1188 """
1187 def __init__(self, repo, text="", user=None, date=None, extra=None,
1189 def __init__(self, repo, text="", user=None, date=None, extra=None,
1188 changes=None):
1190 changes=None):
1189 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1191 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1190
1192
1191 def __iter__(self):
1193 def __iter__(self):
1192 d = self._repo.dirstate
1194 d = self._repo.dirstate
1193 for f in d:
1195 for f in d:
1194 if d[f] != 'r':
1196 if d[f] != 'r':
1195 yield f
1197 yield f
1196
1198
1197 def __contains__(self, key):
1199 def __contains__(self, key):
1198 return self._repo.dirstate[key] not in "?r"
1200 return self._repo.dirstate[key] not in "?r"
1199
1201
1200 @propertycache
1202 @propertycache
1201 def _parents(self):
1203 def _parents(self):
1202 p = self._repo.dirstate.parents()
1204 p = self._repo.dirstate.parents()
1203 if p[1] == nullid:
1205 if p[1] == nullid:
1204 p = p[:-1]
1206 p = p[:-1]
1205 return [changectx(self._repo, x) for x in p]
1207 return [changectx(self._repo, x) for x in p]
1206
1208
1207 def filectx(self, path, filelog=None):
1209 def filectx(self, path, filelog=None):
1208 """get a file context from the working directory"""
1210 """get a file context from the working directory"""
1209 return workingfilectx(self._repo, path, workingctx=self,
1211 return workingfilectx(self._repo, path, workingctx=self,
1210 filelog=filelog)
1212 filelog=filelog)
1211
1213
1212 def dirty(self, missing=False, merge=True, branch=True):
1214 def dirty(self, missing=False, merge=True, branch=True):
1213 "check whether a working directory is modified"
1215 "check whether a working directory is modified"
1214 # check subrepos first
1216 # check subrepos first
1215 for s in sorted(self.substate):
1217 for s in sorted(self.substate):
1216 if self.sub(s).dirty():
1218 if self.sub(s).dirty():
1217 return True
1219 return True
1218 # check current working dir
1220 # check current working dir
1219 return ((merge and self.p2()) or
1221 return ((merge and self.p2()) or
1220 (branch and self.branch() != self.p1().branch()) or
1222 (branch and self.branch() != self.p1().branch()) or
1221 self.modified() or self.added() or self.removed() or
1223 self.modified() or self.added() or self.removed() or
1222 (missing and self.deleted()))
1224 (missing and self.deleted()))
1223
1225
1224 def add(self, list, prefix=""):
1226 def add(self, list, prefix=""):
1225 join = lambda f: os.path.join(prefix, f)
1227 join = lambda f: os.path.join(prefix, f)
1226 wlock = self._repo.wlock()
1228 wlock = self._repo.wlock()
1227 ui, ds = self._repo.ui, self._repo.dirstate
1229 ui, ds = self._repo.ui, self._repo.dirstate
1228 try:
1230 try:
1229 rejected = []
1231 rejected = []
1230 lstat = self._repo.wvfs.lstat
1232 lstat = self._repo.wvfs.lstat
1231 for f in list:
1233 for f in list:
1232 scmutil.checkportable(ui, join(f))
1234 scmutil.checkportable(ui, join(f))
1233 try:
1235 try:
1234 st = lstat(f)
1236 st = lstat(f)
1235 except OSError:
1237 except OSError:
1236 ui.warn(_("%s does not exist!\n") % join(f))
1238 ui.warn(_("%s does not exist!\n") % join(f))
1237 rejected.append(f)
1239 rejected.append(f)
1238 continue
1240 continue
1239 if st.st_size > 10000000:
1241 if st.st_size > 10000000:
1240 ui.warn(_("%s: up to %d MB of RAM may be required "
1242 ui.warn(_("%s: up to %d MB of RAM may be required "
1241 "to manage this file\n"
1243 "to manage this file\n"
1242 "(use 'hg revert %s' to cancel the "
1244 "(use 'hg revert %s' to cancel the "
1243 "pending addition)\n")
1245 "pending addition)\n")
1244 % (f, 3 * st.st_size // 1000000, join(f)))
1246 % (f, 3 * st.st_size // 1000000, join(f)))
1245 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1247 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1246 ui.warn(_("%s not added: only files and symlinks "
1248 ui.warn(_("%s not added: only files and symlinks "
1247 "supported currently\n") % join(f))
1249 "supported currently\n") % join(f))
1248 rejected.append(f)
1250 rejected.append(f)
1249 elif ds[f] in 'amn':
1251 elif ds[f] in 'amn':
1250 ui.warn(_("%s already tracked!\n") % join(f))
1252 ui.warn(_("%s already tracked!\n") % join(f))
1251 elif ds[f] == 'r':
1253 elif ds[f] == 'r':
1252 ds.normallookup(f)
1254 ds.normallookup(f)
1253 else:
1255 else:
1254 ds.add(f)
1256 ds.add(f)
1255 return rejected
1257 return rejected
1256 finally:
1258 finally:
1257 wlock.release()
1259 wlock.release()
1258
1260
1259 def forget(self, files, prefix=""):
1261 def forget(self, files, prefix=""):
1260 join = lambda f: os.path.join(prefix, f)
1262 join = lambda f: os.path.join(prefix, f)
1261 wlock = self._repo.wlock()
1263 wlock = self._repo.wlock()
1262 try:
1264 try:
1263 rejected = []
1265 rejected = []
1264 for f in files:
1266 for f in files:
1265 if f not in self._repo.dirstate:
1267 if f not in self._repo.dirstate:
1266 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1268 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1267 rejected.append(f)
1269 rejected.append(f)
1268 elif self._repo.dirstate[f] != 'a':
1270 elif self._repo.dirstate[f] != 'a':
1269 self._repo.dirstate.remove(f)
1271 self._repo.dirstate.remove(f)
1270 else:
1272 else:
1271 self._repo.dirstate.drop(f)
1273 self._repo.dirstate.drop(f)
1272 return rejected
1274 return rejected
1273 finally:
1275 finally:
1274 wlock.release()
1276 wlock.release()
1275
1277
1276 def undelete(self, list):
1278 def undelete(self, list):
1277 pctxs = self.parents()
1279 pctxs = self.parents()
1278 wlock = self._repo.wlock()
1280 wlock = self._repo.wlock()
1279 try:
1281 try:
1280 for f in list:
1282 for f in list:
1281 if self._repo.dirstate[f] != 'r':
1283 if self._repo.dirstate[f] != 'r':
1282 self._repo.ui.warn(_("%s not removed!\n") % f)
1284 self._repo.ui.warn(_("%s not removed!\n") % f)
1283 else:
1285 else:
1284 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1286 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1285 t = fctx.data()
1287 t = fctx.data()
1286 self._repo.wwrite(f, t, fctx.flags())
1288 self._repo.wwrite(f, t, fctx.flags())
1287 self._repo.dirstate.normal(f)
1289 self._repo.dirstate.normal(f)
1288 finally:
1290 finally:
1289 wlock.release()
1291 wlock.release()
1290
1292
1291 def copy(self, source, dest):
1293 def copy(self, source, dest):
1292 try:
1294 try:
1293 st = self._repo.wvfs.lstat(dest)
1295 st = self._repo.wvfs.lstat(dest)
1294 except OSError, err:
1296 except OSError, err:
1295 if err.errno != errno.ENOENT:
1297 if err.errno != errno.ENOENT:
1296 raise
1298 raise
1297 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1299 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1298 return
1300 return
1299 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1301 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1300 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1302 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1301 "symbolic link\n") % dest)
1303 "symbolic link\n") % dest)
1302 else:
1304 else:
1303 wlock = self._repo.wlock()
1305 wlock = self._repo.wlock()
1304 try:
1306 try:
1305 if self._repo.dirstate[dest] in '?r':
1307 if self._repo.dirstate[dest] in '?r':
1306 self._repo.dirstate.add(dest)
1308 self._repo.dirstate.add(dest)
1307 self._repo.dirstate.copy(source, dest)
1309 self._repo.dirstate.copy(source, dest)
1308 finally:
1310 finally:
1309 wlock.release()
1311 wlock.release()
1310
1312
1311 def _filtersuspectsymlink(self, files):
1313 def _filtersuspectsymlink(self, files):
1312 if not files or self._repo.dirstate._checklink:
1314 if not files or self._repo.dirstate._checklink:
1313 return files
1315 return files
1314
1316
1315 # Symlink placeholders may get non-symlink-like contents
1317 # Symlink placeholders may get non-symlink-like contents
1316 # via user error or dereferencing by NFS or Samba servers,
1318 # via user error or dereferencing by NFS or Samba servers,
1317 # so we filter out any placeholders that don't look like a
1319 # so we filter out any placeholders that don't look like a
1318 # symlink
1320 # symlink
1319 sane = []
1321 sane = []
1320 for f in files:
1322 for f in files:
1321 if self.flags(f) == 'l':
1323 if self.flags(f) == 'l':
1322 d = self[f].data()
1324 d = self[f].data()
1323 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1325 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1324 self._repo.ui.debug('ignoring suspect symlink placeholder'
1326 self._repo.ui.debug('ignoring suspect symlink placeholder'
1325 ' "%s"\n' % f)
1327 ' "%s"\n' % f)
1326 continue
1328 continue
1327 sane.append(f)
1329 sane.append(f)
1328 return sane
1330 return sane
1329
1331
1330 def _checklookup(self, files):
1332 def _checklookup(self, files):
1331 # check for any possibly clean files
1333 # check for any possibly clean files
1332 if not files:
1334 if not files:
1333 return [], []
1335 return [], []
1334
1336
1335 modified = []
1337 modified = []
1336 fixup = []
1338 fixup = []
1337 pctx = self._parents[0]
1339 pctx = self._parents[0]
1338 # do a full compare of any files that might have changed
1340 # do a full compare of any files that might have changed
1339 for f in sorted(files):
1341 for f in sorted(files):
1340 if (f not in pctx or self.flags(f) != pctx.flags(f)
1342 if (f not in pctx or self.flags(f) != pctx.flags(f)
1341 or pctx[f].cmp(self[f])):
1343 or pctx[f].cmp(self[f])):
1342 modified.append(f)
1344 modified.append(f)
1343 else:
1345 else:
1344 fixup.append(f)
1346 fixup.append(f)
1345
1347
1346 # update dirstate for files that are actually clean
1348 # update dirstate for files that are actually clean
1347 if fixup:
1349 if fixup:
1348 try:
1350 try:
1349 # updating the dirstate is optional
1351 # updating the dirstate is optional
1350 # so we don't wait on the lock
1352 # so we don't wait on the lock
1351 # wlock can invalidate the dirstate, so cache normal _after_
1353 # wlock can invalidate the dirstate, so cache normal _after_
1352 # taking the lock
1354 # taking the lock
1353 wlock = self._repo.wlock(False)
1355 wlock = self._repo.wlock(False)
1354 normal = self._repo.dirstate.normal
1356 normal = self._repo.dirstate.normal
1355 try:
1357 try:
1356 for f in fixup:
1358 for f in fixup:
1357 normal(f)
1359 normal(f)
1358 finally:
1360 finally:
1359 wlock.release()
1361 wlock.release()
1360 except error.LockError:
1362 except error.LockError:
1361 pass
1363 pass
1362 return modified, fixup
1364 return modified, fixup
1363
1365
1364 def _manifestmatches(self, match, s):
1366 def _manifestmatches(self, match, s):
1365 """Slow path for workingctx
1367 """Slow path for workingctx
1366
1368
1367 The fast path is when we compare the working directory to its parent
1369 The fast path is when we compare the working directory to its parent
1368 which means this function is comparing with a non-parent; therefore we
1370 which means this function is comparing with a non-parent; therefore we
1369 need to build a manifest and return what matches.
1371 need to build a manifest and return what matches.
1370 """
1372 """
1371 mf = self._repo['.']._manifestmatches(match, s)
1373 mf = self._repo['.']._manifestmatches(match, s)
1372 modified, added, removed = s[0:3]
1374 modified, added, removed = s[0:3]
1373 for f in modified + added:
1375 for f in modified + added:
1374 mf[f] = None
1376 mf[f] = None
1375 mf.set(f, self.flags(f))
1377 mf.set(f, self.flags(f))
1376 for f in removed:
1378 for f in removed:
1377 if f in mf:
1379 if f in mf:
1378 del mf[f]
1380 del mf[f]
1379 return mf
1381 return mf
1380
1382
1381 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
1383 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
1382 """override the parent hook with a dirstate query
1384 """override the parent hook with a dirstate query
1383
1385
1384 We use this prestatus hook to populate the status with information from
1386 We use this prestatus hook to populate the status with information from
1385 the dirstate.
1387 the dirstate.
1386 """
1388 """
1387 # doesn't need to call super; if that changes, be aware that super
1389 # doesn't need to call super; if that changes, be aware that super
1388 # calls self.manifest which would slow down the common case of calling
1390 # calls self.manifest which would slow down the common case of calling
1389 # status against a workingctx's parent
1391 # status against a workingctx's parent
1390 return self._dirstatestatus(match, listignored, listclean, listunknown)
1392 return self._dirstatestatus(match, listignored, listclean, listunknown)
1391
1393
1392 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
1394 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
1393 """override the parent hook with a filter for suspect symlinks
1395 """override the parent hook with a filter for suspect symlinks
1394
1396
1395 We use this poststatus hook to filter out symlinks that might have
1397 We use this poststatus hook to filter out symlinks that might have
1396 accidentally ended up with the entire contents of the file they are
1398 accidentally ended up with the entire contents of the file they are
1397 susposed to be linking to.
1399 susposed to be linking to.
1398 """
1400 """
1399 s[0] = self._filtersuspectsymlink(s[0])
1401 s[0] = self._filtersuspectsymlink(s[0])
1400 self._status = s[:]
1402 self._status = s[:]
1401 return s
1403 return s
1402
1404
1403 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1405 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1404 unknown=False):
1406 unknown=False):
1405 '''Gets the status from the dirstate -- internal use only.'''
1407 '''Gets the status from the dirstate -- internal use only.'''
1406 listignored, listclean, listunknown = ignored, clean, unknown
1408 listignored, listclean, listunknown = ignored, clean, unknown
1407 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1409 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1408 subrepos = []
1410 subrepos = []
1409 if '.hgsub' in self:
1411 if '.hgsub' in self:
1410 subrepos = sorted(self.substate)
1412 subrepos = sorted(self.substate)
1411 s = self._repo.dirstate.status(match, subrepos, listignored,
1413 s = self._repo.dirstate.status(match, subrepos, listignored,
1412 listclean, listunknown)
1414 listclean, listunknown)
1413 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1415 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1414
1416
1415 # check for any possibly clean files
1417 # check for any possibly clean files
1416 if cmp:
1418 if cmp:
1417 modified2, fixup = self._checklookup(cmp)
1419 modified2, fixup = self._checklookup(cmp)
1418 modified += modified2
1420 modified += modified2
1419
1421
1420 # update dirstate for files that are actually clean
1422 # update dirstate for files that are actually clean
1421 if fixup and listclean:
1423 if fixup and listclean:
1422 clean += fixup
1424 clean += fixup
1423
1425
1424 return [modified, added, removed, deleted, unknown, ignored, clean]
1426 return [modified, added, removed, deleted, unknown, ignored, clean]
1425
1427
1426 def _buildstatus(self, other, s, match, listignored, listclean,
1428 def _buildstatus(self, other, s, match, listignored, listclean,
1427 listunknown):
1429 listunknown):
1428 """build a status with respect to another context
1430 """build a status with respect to another context
1429
1431
1430 This includes logic for maintaining the fast path of status when
1432 This includes logic for maintaining the fast path of status when
1431 comparing the working directory against its parent, which is to skip
1433 comparing the working directory against its parent, which is to skip
1432 building a new manifest if self (working directory) is not comparing
1434 building a new manifest if self (working directory) is not comparing
1433 against its parent (repo['.']).
1435 against its parent (repo['.']).
1434 """
1436 """
1435 if other != self._repo['.']:
1437 if other != self._repo['.']:
1436 s = super(workingctx, self)._buildstatus(other, s, match,
1438 s = super(workingctx, self)._buildstatus(other, s, match,
1437 listignored, listclean,
1439 listignored, listclean,
1438 listunknown)
1440 listunknown)
1439 return s
1441 return s
1440
1442
1441 def _matchstatus(self, other, s, match, listignored, listclean,
1443 def _matchstatus(self, other, s, match, listignored, listclean,
1442 listunknown):
1444 listunknown):
1443 """override the match method with a filter for directory patterns
1445 """override the match method with a filter for directory patterns
1444
1446
1445 We use inheritance to customize the match.bad method only in cases of
1447 We use inheritance to customize the match.bad method only in cases of
1446 workingctx since it belongs only to the working directory when
1448 workingctx since it belongs only to the working directory when
1447 comparing against the parent changeset.
1449 comparing against the parent changeset.
1448
1450
1449 If we aren't comparing against the working directory's parent, then we
1451 If we aren't comparing against the working directory's parent, then we
1450 just use the default match object sent to us.
1452 just use the default match object sent to us.
1451 """
1453 """
1452 superself = super(workingctx, self)
1454 superself = super(workingctx, self)
1453 match = superself._matchstatus(other, s, match, listignored, listclean,
1455 match = superself._matchstatus(other, s, match, listignored, listclean,
1454 listunknown)
1456 listunknown)
1455 if other != self._repo['.']:
1457 if other != self._repo['.']:
1456 def bad(f, msg):
1458 def bad(f, msg):
1457 # 'f' may be a directory pattern from 'match.files()',
1459 # 'f' may be a directory pattern from 'match.files()',
1458 # so 'f not in ctx1' is not enough
1460 # so 'f not in ctx1' is not enough
1459 if f not in other and f not in other.dirs():
1461 if f not in other and f not in other.dirs():
1460 self._repo.ui.warn('%s: %s\n' %
1462 self._repo.ui.warn('%s: %s\n' %
1461 (self._repo.dirstate.pathto(f), msg))
1463 (self._repo.dirstate.pathto(f), msg))
1462 match.bad = bad
1464 match.bad = bad
1463 return match
1465 return match
1464
1466
1465 def status(self, other='.', match=None, listignored=False,
1467 def status(self, other='.', match=None, listignored=False,
1466 listclean=False, listunknown=False, listsubrepos=False):
1468 listclean=False, listunknown=False, listsubrepos=False):
1467 # yet to be determined: what to do if 'other' is a 'workingctx' or a
1469 # yet to be determined: what to do if 'other' is a 'workingctx' or a
1468 # 'memctx'?
1470 # 'memctx'?
1469 s = super(workingctx, self).status(other, match, listignored, listclean,
1471 s = super(workingctx, self).status(other, match, listignored, listclean,
1470 listunknown, listsubrepos)
1472 listunknown, listsubrepos)
1471 # calling 'super' subtly reveresed the contexts, so we flip the results
1473 # calling 'super' subtly reveresed the contexts, so we flip the results
1472 # (s[1] is 'added' and s[2] is 'removed')
1474 # (s[1] is 'added' and s[2] is 'removed')
1473 s = list(s)
1475 s = list(s)
1474 s[1], s[2] = s[2], s[1]
1476 s[1], s[2] = s[2], s[1]
1475 return tuple(s)
1477 return tuple(s)
1476
1478
1477 class committablefilectx(basefilectx):
1479 class committablefilectx(basefilectx):
1478 """A committablefilectx provides common functionality for a file context
1480 """A committablefilectx provides common functionality for a file context
1479 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1481 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1480 def __init__(self, repo, path, filelog=None, ctx=None):
1482 def __init__(self, repo, path, filelog=None, ctx=None):
1481 self._repo = repo
1483 self._repo = repo
1482 self._path = path
1484 self._path = path
1483 self._changeid = None
1485 self._changeid = None
1484 self._filerev = self._filenode = None
1486 self._filerev = self._filenode = None
1485
1487
1486 if filelog is not None:
1488 if filelog is not None:
1487 self._filelog = filelog
1489 self._filelog = filelog
1488 if ctx:
1490 if ctx:
1489 self._changectx = ctx
1491 self._changectx = ctx
1490
1492
1491 def __nonzero__(self):
1493 def __nonzero__(self):
1492 return True
1494 return True
1493
1495
1494 def parents(self):
1496 def parents(self):
1495 '''return parent filectxs, following copies if necessary'''
1497 '''return parent filectxs, following copies if necessary'''
1496 def filenode(ctx, path):
1498 def filenode(ctx, path):
1497 return ctx._manifest.get(path, nullid)
1499 return ctx._manifest.get(path, nullid)
1498
1500
1499 path = self._path
1501 path = self._path
1500 fl = self._filelog
1502 fl = self._filelog
1501 pcl = self._changectx._parents
1503 pcl = self._changectx._parents
1502 renamed = self.renamed()
1504 renamed = self.renamed()
1503
1505
1504 if renamed:
1506 if renamed:
1505 pl = [renamed + (None,)]
1507 pl = [renamed + (None,)]
1506 else:
1508 else:
1507 pl = [(path, filenode(pcl[0], path), fl)]
1509 pl = [(path, filenode(pcl[0], path), fl)]
1508
1510
1509 for pc in pcl[1:]:
1511 for pc in pcl[1:]:
1510 pl.append((path, filenode(pc, path), fl))
1512 pl.append((path, filenode(pc, path), fl))
1511
1513
1512 return [filectx(self._repo, p, fileid=n, filelog=l)
1514 return [filectx(self._repo, p, fileid=n, filelog=l)
1513 for p, n, l in pl if n != nullid]
1515 for p, n, l in pl if n != nullid]
1514
1516
1515 def children(self):
1517 def children(self):
1516 return []
1518 return []
1517
1519
1518 class workingfilectx(committablefilectx):
1520 class workingfilectx(committablefilectx):
1519 """A workingfilectx object makes access to data related to a particular
1521 """A workingfilectx object makes access to data related to a particular
1520 file in the working directory convenient."""
1522 file in the working directory convenient."""
1521 def __init__(self, repo, path, filelog=None, workingctx=None):
1523 def __init__(self, repo, path, filelog=None, workingctx=None):
1522 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1524 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1523
1525
1524 @propertycache
1526 @propertycache
1525 def _changectx(self):
1527 def _changectx(self):
1526 return workingctx(self._repo)
1528 return workingctx(self._repo)
1527
1529
1528 def data(self):
1530 def data(self):
1529 return self._repo.wread(self._path)
1531 return self._repo.wread(self._path)
1530 def renamed(self):
1532 def renamed(self):
1531 rp = self._repo.dirstate.copied(self._path)
1533 rp = self._repo.dirstate.copied(self._path)
1532 if not rp:
1534 if not rp:
1533 return None
1535 return None
1534 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1536 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1535
1537
1536 def size(self):
1538 def size(self):
1537 return self._repo.wvfs.lstat(self._path).st_size
1539 return self._repo.wvfs.lstat(self._path).st_size
1538 def date(self):
1540 def date(self):
1539 t, tz = self._changectx.date()
1541 t, tz = self._changectx.date()
1540 try:
1542 try:
1541 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1543 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1542 except OSError, err:
1544 except OSError, err:
1543 if err.errno != errno.ENOENT:
1545 if err.errno != errno.ENOENT:
1544 raise
1546 raise
1545 return (t, tz)
1547 return (t, tz)
1546
1548
1547 def cmp(self, fctx):
1549 def cmp(self, fctx):
1548 """compare with other file context
1550 """compare with other file context
1549
1551
1550 returns True if different than fctx.
1552 returns True if different than fctx.
1551 """
1553 """
1552 # fctx should be a filectx (not a workingfilectx)
1554 # fctx should be a filectx (not a workingfilectx)
1553 # invert comparison to reuse the same code path
1555 # invert comparison to reuse the same code path
1554 return fctx.cmp(self)
1556 return fctx.cmp(self)
1555
1557
1556 def remove(self, ignoremissing=False):
1558 def remove(self, ignoremissing=False):
1557 """wraps unlink for a repo's working directory"""
1559 """wraps unlink for a repo's working directory"""
1558 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1560 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1559
1561
1560 def write(self, data, flags):
1562 def write(self, data, flags):
1561 """wraps repo.wwrite"""
1563 """wraps repo.wwrite"""
1562 self._repo.wwrite(self._path, data, flags)
1564 self._repo.wwrite(self._path, data, flags)
1563
1565
1564 class memctx(committablectx):
1566 class memctx(committablectx):
1565 """Use memctx to perform in-memory commits via localrepo.commitctx().
1567 """Use memctx to perform in-memory commits via localrepo.commitctx().
1566
1568
1567 Revision information is supplied at initialization time while
1569 Revision information is supplied at initialization time while
1568 related files data and is made available through a callback
1570 related files data and is made available through a callback
1569 mechanism. 'repo' is the current localrepo, 'parents' is a
1571 mechanism. 'repo' is the current localrepo, 'parents' is a
1570 sequence of two parent revisions identifiers (pass None for every
1572 sequence of two parent revisions identifiers (pass None for every
1571 missing parent), 'text' is the commit message and 'files' lists
1573 missing parent), 'text' is the commit message and 'files' lists
1572 names of files touched by the revision (normalized and relative to
1574 names of files touched by the revision (normalized and relative to
1573 repository root).
1575 repository root).
1574
1576
1575 filectxfn(repo, memctx, path) is a callable receiving the
1577 filectxfn(repo, memctx, path) is a callable receiving the
1576 repository, the current memctx object and the normalized path of
1578 repository, the current memctx object and the normalized path of
1577 requested file, relative to repository root. It is fired by the
1579 requested file, relative to repository root. It is fired by the
1578 commit function for every file in 'files', but calls order is
1580 commit function for every file in 'files', but calls order is
1579 undefined. If the file is available in the revision being
1581 undefined. If the file is available in the revision being
1580 committed (updated or added), filectxfn returns a memfilectx
1582 committed (updated or added), filectxfn returns a memfilectx
1581 object. If the file was removed, filectxfn raises an
1583 object. If the file was removed, filectxfn raises an
1582 IOError. Moved files are represented by marking the source file
1584 IOError. Moved files are represented by marking the source file
1583 removed and the new file added with copy information (see
1585 removed and the new file added with copy information (see
1584 memfilectx).
1586 memfilectx).
1585
1587
1586 user receives the committer name and defaults to current
1588 user receives the committer name and defaults to current
1587 repository username, date is the commit date in any format
1589 repository username, date is the commit date in any format
1588 supported by util.parsedate() and defaults to current date, extra
1590 supported by util.parsedate() and defaults to current date, extra
1589 is a dictionary of metadata or is left empty.
1591 is a dictionary of metadata or is left empty.
1590 """
1592 """
1591 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1593 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1592 date=None, extra=None, editor=False):
1594 date=None, extra=None, editor=False):
1593 super(memctx, self).__init__(repo, text, user, date, extra)
1595 super(memctx, self).__init__(repo, text, user, date, extra)
1594 self._rev = None
1596 self._rev = None
1595 self._node = None
1597 self._node = None
1596 parents = [(p or nullid) for p in parents]
1598 parents = [(p or nullid) for p in parents]
1597 p1, p2 = parents
1599 p1, p2 = parents
1598 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1600 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1599 files = sorted(set(files))
1601 files = sorted(set(files))
1600 self._status = [files, [], [], [], []]
1602 self._status = [files, [], [], [], []]
1601 self._filectxfn = filectxfn
1603 self._filectxfn = filectxfn
1602 self.substate = {}
1604 self.substate = {}
1603
1605
1604 # if store is not callable, wrap it in a function
1606 # if store is not callable, wrap it in a function
1605 if not callable(filectxfn):
1607 if not callable(filectxfn):
1606 def getfilectx(repo, memctx, path):
1608 def getfilectx(repo, memctx, path):
1607 fctx = filectxfn[path]
1609 fctx = filectxfn[path]
1608 # this is weird but apparently we only keep track of one parent
1610 # this is weird but apparently we only keep track of one parent
1609 # (why not only store that instead of a tuple?)
1611 # (why not only store that instead of a tuple?)
1610 copied = fctx.renamed()
1612 copied = fctx.renamed()
1611 if copied:
1613 if copied:
1612 copied = copied[0]
1614 copied = copied[0]
1613 return memfilectx(repo, path, fctx.data(),
1615 return memfilectx(repo, path, fctx.data(),
1614 islink=fctx.islink(), isexec=fctx.isexec(),
1616 islink=fctx.islink(), isexec=fctx.isexec(),
1615 copied=copied, memctx=memctx)
1617 copied=copied, memctx=memctx)
1616 self._filectxfn = getfilectx
1618 self._filectxfn = getfilectx
1617
1619
1618 self._extra = extra and extra.copy() or {}
1620 self._extra = extra and extra.copy() or {}
1619 if self._extra.get('branch', '') == '':
1621 if self._extra.get('branch', '') == '':
1620 self._extra['branch'] = 'default'
1622 self._extra['branch'] = 'default'
1621
1623
1622 if editor:
1624 if editor:
1623 self._text = editor(self._repo, self, [])
1625 self._text = editor(self._repo, self, [])
1624 self._repo.savecommitmessage(self._text)
1626 self._repo.savecommitmessage(self._text)
1625
1627
1626 def filectx(self, path, filelog=None):
1628 def filectx(self, path, filelog=None):
1627 """get a file context from the working directory"""
1629 """get a file context from the working directory"""
1628 return self._filectxfn(self._repo, self, path)
1630 return self._filectxfn(self._repo, self, path)
1629
1631
1630 def commit(self):
1632 def commit(self):
1631 """commit context to the repo"""
1633 """commit context to the repo"""
1632 return self._repo.commitctx(self)
1634 return self._repo.commitctx(self)
1633
1635
1634 @propertycache
1636 @propertycache
1635 def _manifest(self):
1637 def _manifest(self):
1636 """generate a manifest based on the return values of filectxfn"""
1638 """generate a manifest based on the return values of filectxfn"""
1637
1639
1638 # keep this simple for now; just worry about p1
1640 # keep this simple for now; just worry about p1
1639 pctx = self._parents[0]
1641 pctx = self._parents[0]
1640 man = pctx.manifest().copy()
1642 man = pctx.manifest().copy()
1641
1643
1642 for f, fnode in man.iteritems():
1644 for f, fnode in man.iteritems():
1643 p1node = nullid
1645 p1node = nullid
1644 p2node = nullid
1646 p2node = nullid
1645 p = pctx[f].parents() # if file isn't in pctx, check p2?
1647 p = pctx[f].parents() # if file isn't in pctx, check p2?
1646 if len(p) > 0:
1648 if len(p) > 0:
1647 p1node = p[0].node()
1649 p1node = p[0].node()
1648 if len(p) > 1:
1650 if len(p) > 1:
1649 p2node = p[1].node()
1651 p2node = p[1].node()
1650 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1652 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1651
1653
1652 return man
1654 return man
1653
1655
1654
1656
1655 class memfilectx(committablefilectx):
1657 class memfilectx(committablefilectx):
1656 """memfilectx represents an in-memory file to commit.
1658 """memfilectx represents an in-memory file to commit.
1657
1659
1658 See memctx and commitablefilectx for more details.
1660 See memctx and commitablefilectx for more details.
1659 """
1661 """
1660 def __init__(self, repo, path, data, islink=False,
1662 def __init__(self, repo, path, data, islink=False,
1661 isexec=False, copied=None, memctx=None):
1663 isexec=False, copied=None, memctx=None):
1662 """
1664 """
1663 path is the normalized file path relative to repository root.
1665 path is the normalized file path relative to repository root.
1664 data is the file content as a string.
1666 data is the file content as a string.
1665 islink is True if the file is a symbolic link.
1667 islink is True if the file is a symbolic link.
1666 isexec is True if the file is executable.
1668 isexec is True if the file is executable.
1667 copied is the source file path if current file was copied in the
1669 copied is the source file path if current file was copied in the
1668 revision being committed, or None."""
1670 revision being committed, or None."""
1669 super(memfilectx, self).__init__(repo, path, None, memctx)
1671 super(memfilectx, self).__init__(repo, path, None, memctx)
1670 self._data = data
1672 self._data = data
1671 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1673 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1672 self._copied = None
1674 self._copied = None
1673 if copied:
1675 if copied:
1674 self._copied = (copied, nullid)
1676 self._copied = (copied, nullid)
1675
1677
1676 def data(self):
1678 def data(self):
1677 return self._data
1679 return self._data
1678 def size(self):
1680 def size(self):
1679 return len(self.data())
1681 return len(self.data())
1680 def flags(self):
1682 def flags(self):
1681 return self._flags
1683 return self._flags
1682 def renamed(self):
1684 def renamed(self):
1683 return self._copied
1685 return self._copied
1684
1686
1685 def remove(self, ignoremissing=False):
1687 def remove(self, ignoremissing=False):
1686 """wraps unlink for a repo's working directory"""
1688 """wraps unlink for a repo's working directory"""
1687 # need to figure out what to do here
1689 # need to figure out what to do here
1688 del self._changectx[self._path]
1690 del self._changectx[self._path]
1689
1691
1690 def write(self, data, flags):
1692 def write(self, data, flags):
1691 """wraps repo.wwrite"""
1693 """wraps repo.wwrite"""
1692 self._data = data
1694 self._data = data
@@ -1,366 +1,373 b''
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import util, setdiscovery, treediscovery, phases, obsolete, bookmarks
10 import util, setdiscovery, treediscovery, phases, obsolete, bookmarks
11 import branchmap
11 import branchmap
12
12
13 def findcommonincoming(repo, remote, heads=None, force=False):
13 def findcommonincoming(repo, remote, heads=None, force=False):
14 """Return a tuple (common, anyincoming, heads) used to identify the common
14 """Return a tuple (common, anyincoming, heads) used to identify the common
15 subset of nodes between repo and remote.
15 subset of nodes between repo and remote.
16
16
17 "common" is a list of (at least) the heads of the common subset.
17 "common" is a list of (at least) the heads of the common subset.
18 "anyincoming" is testable as a boolean indicating if any nodes are missing
18 "anyincoming" is testable as a boolean indicating if any nodes are missing
19 locally. If remote does not support getbundle, this actually is a list of
19 locally. If remote does not support getbundle, this actually is a list of
20 roots of the nodes that would be incoming, to be supplied to
20 roots of the nodes that would be incoming, to be supplied to
21 changegroupsubset. No code except for pull should be relying on this fact
21 changegroupsubset. No code except for pull should be relying on this fact
22 any longer.
22 any longer.
23 "heads" is either the supplied heads, or else the remote's heads.
23 "heads" is either the supplied heads, or else the remote's heads.
24
24
25 If you pass heads and they are all known locally, the response lists just
25 If you pass heads and they are all known locally, the response lists just
26 these heads in "common" and in "heads".
26 these heads in "common" and in "heads".
27
27
28 Please use findcommonoutgoing to compute the set of outgoing nodes to give
28 Please use findcommonoutgoing to compute the set of outgoing nodes to give
29 extensions a good hook into outgoing.
29 extensions a good hook into outgoing.
30 """
30 """
31
31
32 if not remote.capable('getbundle'):
32 if not remote.capable('getbundle'):
33 return treediscovery.findcommonincoming(repo, remote, heads, force)
33 return treediscovery.findcommonincoming(repo, remote, heads, force)
34
34
35 if heads:
35 if heads:
36 allknown = True
36 allknown = True
37 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
37 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
38 for h in heads:
38 for h in heads:
39 if not knownnode(h):
39 if not knownnode(h):
40 allknown = False
40 allknown = False
41 break
41 break
42 if allknown:
42 if allknown:
43 return (heads, False, heads)
43 return (heads, False, heads)
44
44
45 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
45 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
46 abortwhenunrelated=not force)
46 abortwhenunrelated=not force)
47 common, anyinc, srvheads = res
47 common, anyinc, srvheads = res
48 return (list(common), anyinc, heads or list(srvheads))
48 return (list(common), anyinc, heads or list(srvheads))
49
49
50 class outgoing(object):
50 class outgoing(object):
51 '''Represents the set of nodes present in a local repo but not in a
51 '''Represents the set of nodes present in a local repo but not in a
52 (possibly) remote one.
52 (possibly) remote one.
53
53
54 Members:
54 Members:
55
55
56 missing is a list of all nodes present in local but not in remote.
56 missing is a list of all nodes present in local but not in remote.
57 common is a list of all nodes shared between the two repos.
57 common is a list of all nodes shared between the two repos.
58 excluded is the list of missing changeset that shouldn't be sent remotely.
58 excluded is the list of missing changeset that shouldn't be sent remotely.
59 missingheads is the list of heads of missing.
59 missingheads is the list of heads of missing.
60 commonheads is the list of heads of common.
60 commonheads is the list of heads of common.
61
61
62 The sets are computed on demand from the heads, unless provided upfront
62 The sets are computed on demand from the heads, unless provided upfront
63 by discovery.'''
63 by discovery.'''
64
64
65 def __init__(self, revlog, commonheads, missingheads):
65 def __init__(self, revlog, commonheads, missingheads):
66 self.commonheads = commonheads
66 self.commonheads = commonheads
67 self.missingheads = missingheads
67 self.missingheads = missingheads
68 self._revlog = revlog
68 self._revlog = revlog
69 self._common = None
69 self._common = None
70 self._missing = None
70 self._missing = None
71 self.excluded = []
71 self.excluded = []
72
72
73 def _computecommonmissing(self):
73 def _computecommonmissing(self):
74 sets = self._revlog.findcommonmissing(self.commonheads,
74 sets = self._revlog.findcommonmissing(self.commonheads,
75 self.missingheads)
75 self.missingheads)
76 self._common, self._missing = sets
76 self._common, self._missing = sets
77
77
78 @util.propertycache
78 @util.propertycache
79 def common(self):
79 def common(self):
80 if self._common is None:
80 if self._common is None:
81 self._computecommonmissing()
81 self._computecommonmissing()
82 return self._common
82 return self._common
83
83
84 @util.propertycache
84 @util.propertycache
85 def missing(self):
85 def missing(self):
86 if self._missing is None:
86 if self._missing is None:
87 self._computecommonmissing()
87 self._computecommonmissing()
88 return self._missing
88 return self._missing
89
89
90 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
90 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
91 commoninc=None, portable=False):
91 commoninc=None, portable=False):
92 '''Return an outgoing instance to identify the nodes present in repo but
92 '''Return an outgoing instance to identify the nodes present in repo but
93 not in other.
93 not in other.
94
94
95 If onlyheads is given, only nodes ancestral to nodes in onlyheads
95 If onlyheads is given, only nodes ancestral to nodes in onlyheads
96 (inclusive) are included. If you already know the local repo's heads,
96 (inclusive) are included. If you already know the local repo's heads,
97 passing them in onlyheads is faster than letting them be recomputed here.
97 passing them in onlyheads is faster than letting them be recomputed here.
98
98
99 If commoninc is given, it must be the result of a prior call to
99 If commoninc is given, it must be the result of a prior call to
100 findcommonincoming(repo, other, force) to avoid recomputing it here.
100 findcommonincoming(repo, other, force) to avoid recomputing it here.
101
101
102 If portable is given, compute more conservative common and missingheads,
102 If portable is given, compute more conservative common and missingheads,
103 to make bundles created from the instance more portable.'''
103 to make bundles created from the instance more portable.'''
104 # declare an empty outgoing object to be filled later
104 # declare an empty outgoing object to be filled later
105 og = outgoing(repo.changelog, None, None)
105 og = outgoing(repo.changelog, None, None)
106
106
107 # get common set if not provided
107 # get common set if not provided
108 if commoninc is None:
108 if commoninc is None:
109 commoninc = findcommonincoming(repo, other, force=force)
109 commoninc = findcommonincoming(repo, other, force=force)
110 og.commonheads, _any, _hds = commoninc
110 og.commonheads, _any, _hds = commoninc
111
111
112 # compute outgoing
112 # compute outgoing
113 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
113 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
114 if not mayexclude:
114 if not mayexclude:
115 og.missingheads = onlyheads or repo.heads()
115 og.missingheads = onlyheads or repo.heads()
116 elif onlyheads is None:
116 elif onlyheads is None:
117 # use visible heads as it should be cached
117 # use visible heads as it should be cached
118 og.missingheads = repo.filtered("served").heads()
118 og.missingheads = repo.filtered("served").heads()
119 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
119 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
120 else:
120 else:
121 # compute common, missing and exclude secret stuff
121 # compute common, missing and exclude secret stuff
122 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
122 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
123 og._common, allmissing = sets
123 og._common, allmissing = sets
124 og._missing = missing = []
124 og._missing = missing = []
125 og.excluded = excluded = []
125 og.excluded = excluded = []
126 for node in allmissing:
126 for node in allmissing:
127 ctx = repo[node]
127 ctx = repo[node]
128 if ctx.phase() >= phases.secret or ctx.extinct():
128 if ctx.phase() >= phases.secret or ctx.extinct():
129 excluded.append(node)
129 excluded.append(node)
130 else:
130 else:
131 missing.append(node)
131 missing.append(node)
132 if len(missing) == len(allmissing):
132 if len(missing) == len(allmissing):
133 missingheads = onlyheads
133 missingheads = onlyheads
134 else: # update missing heads
134 else: # update missing heads
135 missingheads = phases.newheads(repo, onlyheads, excluded)
135 missingheads = phases.newheads(repo, onlyheads, excluded)
136 og.missingheads = missingheads
136 og.missingheads = missingheads
137 if portable:
137 if portable:
138 # recompute common and missingheads as if -r<rev> had been given for
138 # recompute common and missingheads as if -r<rev> had been given for
139 # each head of missing, and --base <rev> for each head of the proper
139 # each head of missing, and --base <rev> for each head of the proper
140 # ancestors of missing
140 # ancestors of missing
141 og._computecommonmissing()
141 og._computecommonmissing()
142 cl = repo.changelog
142 cl = repo.changelog
143 missingrevs = set(cl.rev(n) for n in og._missing)
143 missingrevs = set(cl.rev(n) for n in og._missing)
144 og._common = set(cl.ancestors(missingrevs)) - missingrevs
144 og._common = set(cl.ancestors(missingrevs)) - missingrevs
145 commonheads = set(og.commonheads)
145 commonheads = set(og.commonheads)
146 og.missingheads = [h for h in og.missingheads if h not in commonheads]
146 og.missingheads = [h for h in og.missingheads if h not in commonheads]
147
147
148 return og
148 return og
149
149
150 def _headssummary(repo, remote, outgoing):
150 def _headssummary(repo, remote, outgoing):
151 """compute a summary of branch and heads status before and after push
151 """compute a summary of branch and heads status before and after push
152
152
153 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
153 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
154
154
155 - branch: the branch name
155 - branch: the branch name
156 - remoteheads: the list of remote heads known locally
156 - remoteheads: the list of remote heads known locally
157 None if the branch is new
157 None if the branch is new
158 - newheads: the new remote heads (known locally) with outgoing pushed
158 - newheads: the new remote heads (known locally) with outgoing pushed
159 - unsyncedheads: the list of remote heads unknown locally.
159 - unsyncedheads: the list of remote heads unknown locally.
160 """
160 """
161 cl = repo.changelog
161 cl = repo.changelog
162 headssum = {}
162 headssum = {}
163 # A. Create set of branches involved in the push.
163 # A. Create set of branches involved in the push.
164 branches = set(repo[n].branch() for n in outgoing.missing)
164 branches = set(repo[n].branch() for n in outgoing.missing)
165 remotemap = remote.branchmap()
165 remotemap = remote.branchmap()
166 newbranches = branches - set(remotemap)
166 newbranches = branches - set(remotemap)
167 branches.difference_update(newbranches)
167 branches.difference_update(newbranches)
168
168
169 # A. register remote heads
169 # A. register remote heads
170 remotebranches = set()
170 remotebranches = set()
171 for branch, heads in remote.branchmap().iteritems():
171 for branch, heads in remote.branchmap().iteritems():
172 remotebranches.add(branch)
172 remotebranches.add(branch)
173 known = []
173 known = []
174 unsynced = []
174 unsynced = []
175 knownnode = cl.hasnode # do not use nodemap until it is filtered
175 knownnode = cl.hasnode # do not use nodemap until it is filtered
176 for h in heads:
176 for h in heads:
177 if knownnode(h):
177 if knownnode(h):
178 known.append(h)
178 known.append(h)
179 else:
179 else:
180 unsynced.append(h)
180 unsynced.append(h)
181 headssum[branch] = (known, list(known), unsynced)
181 headssum[branch] = (known, list(known), unsynced)
182 # B. add new branch data
182 # B. add new branch data
183 missingctx = list(repo[n] for n in outgoing.missing)
183 missingctx = list(repo[n] for n in outgoing.missing)
184 touchedbranches = set()
184 touchedbranches = set()
185 for ctx in missingctx:
185 for ctx in missingctx:
186 branch = ctx.branch()
186 branch = ctx.branch()
187 touchedbranches.add(branch)
187 touchedbranches.add(branch)
188 if branch not in headssum:
188 if branch not in headssum:
189 headssum[branch] = (None, [], [])
189 headssum[branch] = (None, [], [])
190
190
191 # C drop data about untouched branches:
191 # C drop data about untouched branches:
192 for branch in remotebranches - touchedbranches:
192 for branch in remotebranches - touchedbranches:
193 del headssum[branch]
193 del headssum[branch]
194
194
195 # D. Update newmap with outgoing changes.
195 # D. Update newmap with outgoing changes.
196 # This will possibly add new heads and remove existing ones.
196 # This will possibly add new heads and remove existing ones.
197 newmap = branchmap.branchcache((branch, heads[1])
197 newmap = branchmap.branchcache((branch, heads[1])
198 for branch, heads in headssum.iteritems()
198 for branch, heads in headssum.iteritems()
199 if heads[0] is not None)
199 if heads[0] is not None)
200 newmap.update(repo, (ctx.rev() for ctx in missingctx))
200 newmap.update(repo, (ctx.rev() for ctx in missingctx))
201 for branch, newheads in newmap.iteritems():
201 for branch, newheads in newmap.iteritems():
202 headssum[branch][1][:] = newheads
202 headssum[branch][1][:] = newheads
203 return headssum
203 return headssum
204
204
205 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
205 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
206 """Compute branchmapsummary for repo without branchmap support"""
206 """Compute branchmapsummary for repo without branchmap support"""
207
207
208 # 1-4b. old servers: Check for new topological heads.
208 # 1-4b. old servers: Check for new topological heads.
209 # Construct {old,new}map with branch = None (topological branch).
209 # Construct {old,new}map with branch = None (topological branch).
210 # (code based on update)
210 # (code based on update)
211 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
211 knownnode = repo.changelog.hasnode # no nodemap until it is filtered
212 oldheads = set(h for h in remoteheads if knownnode(h))
212 oldheads = set(h for h in remoteheads if knownnode(h))
213 # all nodes in outgoing.missing are children of either:
213 # all nodes in outgoing.missing are children of either:
214 # - an element of oldheads
214 # - an element of oldheads
215 # - another element of outgoing.missing
215 # - another element of outgoing.missing
216 # - nullrev
216 # - nullrev
217 # This explains why the new head are very simple to compute.
217 # This explains why the new head are very simple to compute.
218 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
218 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
219 newheads = list(c.node() for c in r)
219 newheads = list(c.node() for c in r)
220 # set some unsynced head to issue the "unsynced changes" warning
220 unsynced = inc and set([None]) or set()
221 unsynced = inc and set([None]) or set()
221 return {None: (oldheads, newheads, unsynced)}
222 return {None: (oldheads, newheads, unsynced)}
222
223
223 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False,
224 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False,
224 newbookmarks=[]):
225 newbookmarks=[]):
225 """Check that a push won't add any outgoing head
226 """Check that a push won't add any outgoing head
226
227
227 raise Abort error and display ui message as needed.
228 raise Abort error and display ui message as needed.
228 """
229 """
229 # Check for each named branch if we're creating new remote heads.
230 # Check for each named branch if we're creating new remote heads.
230 # To be a remote head after push, node must be either:
231 # To be a remote head after push, node must be either:
231 # - unknown locally
232 # - unknown locally
232 # - a local outgoing head descended from update
233 # - a local outgoing head descended from update
233 # - a remote head that's known locally and not
234 # - a remote head that's known locally and not
234 # ancestral to an outgoing head
235 # ancestral to an outgoing head
235 if remoteheads == [nullid]:
236 if remoteheads == [nullid]:
236 # remote is empty, nothing to check.
237 # remote is empty, nothing to check.
237 return
238 return
238
239
239 if remote.capable('branchmap'):
240 if remote.capable('branchmap'):
240 headssum = _headssummary(repo, remote, outgoing)
241 headssum = _headssummary(repo, remote, outgoing)
241 else:
242 else:
242 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
243 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
243 newbranches = [branch for branch, heads in headssum.iteritems()
244 newbranches = [branch for branch, heads in headssum.iteritems()
244 if heads[0] is None]
245 if heads[0] is None]
245 # 1. Check for new branches on the remote.
246 # 1. Check for new branches on the remote.
246 if newbranches and not newbranch: # new branch requires --new-branch
247 if newbranches and not newbranch: # new branch requires --new-branch
247 branchnames = ', '.join(sorted(newbranches))
248 branchnames = ', '.join(sorted(newbranches))
248 raise util.Abort(_("push creates new remote branches: %s!")
249 raise util.Abort(_("push creates new remote branches: %s!")
249 % branchnames,
250 % branchnames,
250 hint=_("use 'hg push --new-branch' to create"
251 hint=_("use 'hg push --new-branch' to create"
251 " new remote branches"))
252 " new remote branches"))
252
253
253 # 2. Compute newly pushed bookmarks. We don't warn about bookmarked heads.
254 # 2. Compute newly pushed bookmarks. We don't warn about bookmarked heads.
254 localbookmarks = repo._bookmarks
255 localbookmarks = repo._bookmarks
255 remotebookmarks = remote.listkeys('bookmarks')
256 remotebookmarks = remote.listkeys('bookmarks')
256 bookmarkedheads = set()
257 bookmarkedheads = set()
257 for bm in localbookmarks:
258 for bm in localbookmarks:
258 rnode = remotebookmarks.get(bm)
259 rnode = remotebookmarks.get(bm)
259 if rnode and rnode in repo:
260 if rnode and rnode in repo:
260 lctx, rctx = repo[bm], repo[rnode]
261 lctx, rctx = repo[bm], repo[rnode]
261 if bookmarks.validdest(repo, rctx, lctx):
262 if bookmarks.validdest(repo, rctx, lctx):
262 bookmarkedheads.add(lctx.node())
263 bookmarkedheads.add(lctx.node())
263 else:
264 else:
264 if bm in newbookmarks:
265 if bm in newbookmarks:
265 bookmarkedheads.add(repo[bm].node())
266 bookmarkedheads.add(repo[bm].node())
266
267
267 # 3. Check for new heads.
268 # 3. Check for new heads.
268 # If there are more heads after the push than before, a suitable
269 # If there are more heads after the push than before, a suitable
269 # error message, depending on unsynced status, is displayed.
270 # error message, depending on unsynced status, is displayed.
270 error = None
271 error = None
271 allmissing = set(outgoing.missing)
272 allmissing = set(outgoing.missing)
272 allfuturecommon = set(c.node() for c in repo.set('%ld', outgoing.common))
273 allfuturecommon = set(c.node() for c in repo.set('%ld', outgoing.common))
273 allfuturecommon.update(allmissing)
274 allfuturecommon.update(allmissing)
274 for branch, heads in sorted(headssum.iteritems()):
275 for branch, heads in sorted(headssum.iteritems()):
275 remoteheads, newheads, unsyncedheads = heads
276 remoteheads, newheads, unsyncedheads = heads
276 candidate_newhs = set(newheads)
277 candidate_newhs = set(newheads)
277 # add unsynced data
278 # add unsynced data
278 if remoteheads is None:
279 if remoteheads is None:
279 oldhs = set()
280 oldhs = set()
280 else:
281 else:
281 oldhs = set(remoteheads)
282 oldhs = set(remoteheads)
282 oldhs.update(unsyncedheads)
283 oldhs.update(unsyncedheads)
283 candidate_newhs.update(unsyncedheads)
284 candidate_newhs.update(unsyncedheads)
284 dhs = None # delta heads, the new heads on branch
285 dhs = None # delta heads, the new heads on branch
285 discardedheads = set()
286 discardedheads = set()
286 if repo.obsstore:
287 if repo.obsstore:
287 # remove future heads which are actually obsoleted by another
288 # remove future heads which are actually obsoleted by another
288 # pushed element:
289 # pushed element:
289 #
290 #
290 # XXX as above, There are several cases this case does not handle
291 # XXX as above, There are several cases this case does not handle
291 # XXX properly
292 # XXX properly
292 #
293 #
293 # (1) if <nh> is public, it won't be affected by obsolete marker
294 # (1) if <nh> is public, it won't be affected by obsolete marker
294 # and a new is created
295 # and a new is created
295 #
296 #
296 # (2) if the new heads have ancestors which are not obsolete and
297 # (2) if the new heads have ancestors which are not obsolete and
297 # not ancestors of any other heads we will have a new head too.
298 # not ancestors of any other heads we will have a new head too.
298 #
299 #
299 # These two cases will be easy to handle for known changeset but
300 # These two cases will be easy to handle for known changeset but
300 # much more tricky for unsynced changes.
301 # much more tricky for unsynced changes.
301 newhs = set()
302 newhs = set()
302 for nh in candidate_newhs:
303 for nh in candidate_newhs:
303 if nh in repo and repo[nh].phase() <= phases.public:
304 if nh in repo and repo[nh].phase() <= phases.public:
304 newhs.add(nh)
305 newhs.add(nh)
305 else:
306 else:
306 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
307 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
307 if suc != nh and suc in allfuturecommon:
308 if suc != nh and suc in allfuturecommon:
308 discardedheads.add(nh)
309 discardedheads.add(nh)
309 break
310 break
310 else:
311 else:
311 newhs.add(nh)
312 newhs.add(nh)
312 else:
313 else:
313 newhs = candidate_newhs
314 newhs = candidate_newhs
314 unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
315 unsynced = sorted(h for h in unsyncedheads if h not in discardedheads)
315 if unsynced:
316 if unsynced:
316 if len(unsynced) <= 4 or repo.ui.verbose:
317 if None in unsynced:
318 # old remote, no heads data
319 heads = None
320 elif len(unsynced) <= 4 or repo.ui.verbose:
317 heads = ' '.join(short(h) for h in unsynced)
321 heads = ' '.join(short(h) for h in unsynced)
318 else:
322 else:
319 heads = (' '.join(short(h) for h in unsynced[:4]) +
323 heads = (' '.join(short(h) for h in unsynced[:4]) +
320 ' ' + _("and %s others") % (len(unsynced) - 4))
324 ' ' + _("and %s others") % (len(unsynced) - 4))
321 if branch is None:
325 if heads is None:
326 repo.ui.status(_("remote has heads that are "
327 "not known locally\n"))
328 elif branch is None:
322 repo.ui.status(_("remote has heads that are "
329 repo.ui.status(_("remote has heads that are "
323 "not known locally: %s\n") % heads)
330 "not known locally: %s\n") % heads)
324 else:
331 else:
325 repo.ui.status(_("remote has heads on branch '%s' that are "
332 repo.ui.status(_("remote has heads on branch '%s' that are "
326 "not known locally: %s\n") % (branch, heads))
333 "not known locally: %s\n") % (branch, heads))
327 if remoteheads is None:
334 if remoteheads is None:
328 if len(newhs) > 1:
335 if len(newhs) > 1:
329 dhs = list(newhs)
336 dhs = list(newhs)
330 if error is None:
337 if error is None:
331 error = (_("push creates new branch '%s' "
338 error = (_("push creates new branch '%s' "
332 "with multiple heads") % (branch))
339 "with multiple heads") % (branch))
333 hint = _("merge or"
340 hint = _("merge or"
334 " see \"hg help push\" for details about"
341 " see \"hg help push\" for details about"
335 " pushing new heads")
342 " pushing new heads")
336 elif len(newhs) > len(oldhs):
343 elif len(newhs) > len(oldhs):
337 # remove bookmarked or existing remote heads from the new heads list
344 # remove bookmarked or existing remote heads from the new heads list
338 dhs = sorted(newhs - bookmarkedheads - oldhs)
345 dhs = sorted(newhs - bookmarkedheads - oldhs)
339 if dhs:
346 if dhs:
340 if error is None:
347 if error is None:
341 if branch not in ('default', None):
348 if branch not in ('default', None):
342 error = _("push creates new remote head %s "
349 error = _("push creates new remote head %s "
343 "on branch '%s'!") % (short(dhs[0]), branch)
350 "on branch '%s'!") % (short(dhs[0]), branch)
344 elif repo[dhs[0]].bookmarks():
351 elif repo[dhs[0]].bookmarks():
345 error = _("push creates new remote head %s "
352 error = _("push creates new remote head %s "
346 "with bookmark '%s'!") % (
353 "with bookmark '%s'!") % (
347 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
354 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
348 else:
355 else:
349 error = _("push creates new remote head %s!"
356 error = _("push creates new remote head %s!"
350 ) % short(dhs[0])
357 ) % short(dhs[0])
351 if unsyncedheads:
358 if unsyncedheads:
352 hint = _("pull and merge or"
359 hint = _("pull and merge or"
353 " see \"hg help push\" for details about"
360 " see \"hg help push\" for details about"
354 " pushing new heads")
361 " pushing new heads")
355 else:
362 else:
356 hint = _("merge or"
363 hint = _("merge or"
357 " see \"hg help push\" for details about"
364 " see \"hg help push\" for details about"
358 " pushing new heads")
365 " pushing new heads")
359 if branch is None:
366 if branch is None:
360 repo.ui.note(_("new remote heads:\n"))
367 repo.ui.note(_("new remote heads:\n"))
361 else:
368 else:
362 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
369 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
363 for h in dhs:
370 for h in dhs:
364 repo.ui.note((" %s\n") % short(h))
371 repo.ui.note((" %s\n") % short(h))
365 if error:
372 if error:
366 raise util.Abort(error, hint=hint)
373 raise util.Abort(error, hint=hint)
@@ -1,1149 +1,1149 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import struct
8 import struct
9
9
10 from node import nullid, nullrev, hex, bin
10 from node import nullid, nullrev, hex, bin
11 from i18n import _
11 from i18n import _
12 from mercurial import obsolete
12 from mercurial import obsolete
13 import error as errormod, util, filemerge, copies, subrepo, worker, dicthelpers
13 import error as errormod, util, filemerge, copies, subrepo, worker, dicthelpers
14 import errno, os, shutil
14 import errno, os, shutil
15
15
16 _pack = struct.pack
16 _pack = struct.pack
17 _unpack = struct.unpack
17 _unpack = struct.unpack
18
18
19 def _droponode(data):
19 def _droponode(data):
20 # used for compatibility for v1
20 # used for compatibility for v1
21 bits = data.split("\0")
21 bits = data.split("\0")
22 bits = bits[:-2] + bits[-1:]
22 bits = bits[:-2] + bits[-1:]
23 return "\0".join(bits)
23 return "\0".join(bits)
24
24
25 class mergestate(object):
25 class mergestate(object):
26 '''track 3-way merge state of individual files
26 '''track 3-way merge state of individual files
27
27
28 it is stored on disk when needed. Two file are used, one with an old
28 it is stored on disk when needed. Two file are used, one with an old
29 format, one with a new format. Both contains similar data, but the new
29 format, one with a new format. Both contains similar data, but the new
30 format can store new kind of field.
30 format can store new kind of field.
31
31
32 Current new format is a list of arbitrary record of the form:
32 Current new format is a list of arbitrary record of the form:
33
33
34 [type][length][content]
34 [type][length][content]
35
35
36 Type is a single character, length is a 4 bytes integer, content is an
36 Type is a single character, length is a 4 bytes integer, content is an
37 arbitrary suites of bytes of length `length`.
37 arbitrary suites of bytes of length `length`.
38
38
39 Type should be a letter. Capital letter are mandatory record, Mercurial
39 Type should be a letter. Capital letter are mandatory record, Mercurial
40 should abort if they are unknown. lower case record can be safely ignored.
40 should abort if they are unknown. lower case record can be safely ignored.
41
41
42 Currently known record:
42 Currently known record:
43
43
44 L: the node of the "local" part of the merge (hexified version)
44 L: the node of the "local" part of the merge (hexified version)
45 O: the node of the "other" part of the merge (hexified version)
45 O: the node of the "other" part of the merge (hexified version)
46 F: a file to be merged entry
46 F: a file to be merged entry
47 '''
47 '''
48 statepathv1 = "merge/state"
48 statepathv1 = "merge/state"
49 statepathv2 = "merge/state2"
49 statepathv2 = "merge/state2"
50
50
51 def __init__(self, repo):
51 def __init__(self, repo):
52 self._repo = repo
52 self._repo = repo
53 self._dirty = False
53 self._dirty = False
54 self._read()
54 self._read()
55
55
56 def reset(self, node=None, other=None):
56 def reset(self, node=None, other=None):
57 self._state = {}
57 self._state = {}
58 self._local = None
58 self._local = None
59 self._other = None
59 self._other = None
60 if node:
60 if node:
61 self._local = node
61 self._local = node
62 self._other = other
62 self._other = other
63 shutil.rmtree(self._repo.join("merge"), True)
63 shutil.rmtree(self._repo.join("merge"), True)
64 self._dirty = False
64 self._dirty = False
65
65
66 def _read(self):
66 def _read(self):
67 """Analyse each record content to restore a serialized state from disk
67 """Analyse each record content to restore a serialized state from disk
68
68
69 This function process "record" entry produced by the de-serialization
69 This function process "record" entry produced by the de-serialization
70 of on disk file.
70 of on disk file.
71 """
71 """
72 self._state = {}
72 self._state = {}
73 self._local = None
73 self._local = None
74 self._other = None
74 self._other = None
75 records = self._readrecords()
75 records = self._readrecords()
76 for rtype, record in records:
76 for rtype, record in records:
77 if rtype == 'L':
77 if rtype == 'L':
78 self._local = bin(record)
78 self._local = bin(record)
79 elif rtype == 'O':
79 elif rtype == 'O':
80 self._other = bin(record)
80 self._other = bin(record)
81 elif rtype == "F":
81 elif rtype == "F":
82 bits = record.split("\0")
82 bits = record.split("\0")
83 self._state[bits[0]] = bits[1:]
83 self._state[bits[0]] = bits[1:]
84 elif not rtype.islower():
84 elif not rtype.islower():
85 raise util.Abort(_('unsupported merge state record: %s')
85 raise util.Abort(_('unsupported merge state record: %s')
86 % rtype)
86 % rtype)
87 self._dirty = False
87 self._dirty = False
88
88
89 def _readrecords(self):
89 def _readrecords(self):
90 """Read merge state from disk and return a list of record (TYPE, data)
90 """Read merge state from disk and return a list of record (TYPE, data)
91
91
92 We read data from both v1 and v2 files and decide which one to use.
92 We read data from both v1 and v2 files and decide which one to use.
93
93
94 V1 has been used by version prior to 2.9.1 and contains less data than
94 V1 has been used by version prior to 2.9.1 and contains less data than
95 v2. We read both versions and check if no data in v2 contradicts
95 v2. We read both versions and check if no data in v2 contradicts
96 v1. If there is not contradiction we can safely assume that both v1
96 v1. If there is not contradiction we can safely assume that both v1
97 and v2 were written at the same time and use the extract data in v2. If
97 and v2 were written at the same time and use the extract data in v2. If
98 there is contradiction we ignore v2 content as we assume an old version
98 there is contradiction we ignore v2 content as we assume an old version
99 of Mercurial has overwritten the mergestate file and left an old v2
99 of Mercurial has overwritten the mergestate file and left an old v2
100 file around.
100 file around.
101
101
102 returns list of record [(TYPE, data), ...]"""
102 returns list of record [(TYPE, data), ...]"""
103 v1records = self._readrecordsv1()
103 v1records = self._readrecordsv1()
104 v2records = self._readrecordsv2()
104 v2records = self._readrecordsv2()
105 oldv2 = set() # old format version of v2 record
105 oldv2 = set() # old format version of v2 record
106 for rec in v2records:
106 for rec in v2records:
107 if rec[0] == 'L':
107 if rec[0] == 'L':
108 oldv2.add(rec)
108 oldv2.add(rec)
109 elif rec[0] == 'F':
109 elif rec[0] == 'F':
110 # drop the onode data (not contained in v1)
110 # drop the onode data (not contained in v1)
111 oldv2.add(('F', _droponode(rec[1])))
111 oldv2.add(('F', _droponode(rec[1])))
112 for rec in v1records:
112 for rec in v1records:
113 if rec not in oldv2:
113 if rec not in oldv2:
114 # v1 file is newer than v2 file, use it
114 # v1 file is newer than v2 file, use it
115 # we have to infer the "other" changeset of the merge
115 # we have to infer the "other" changeset of the merge
116 # we cannot do better than that with v1 of the format
116 # we cannot do better than that with v1 of the format
117 mctx = self._repo[None].parents()[-1]
117 mctx = self._repo[None].parents()[-1]
118 v1records.append(('O', mctx.hex()))
118 v1records.append(('O', mctx.hex()))
119 # add place holder "other" file node information
119 # add place holder "other" file node information
120 # nobody is using it yet so we do no need to fetch the data
120 # nobody is using it yet so we do no need to fetch the data
121 # if mctx was wrong `mctx[bits[-2]]` may fails.
121 # if mctx was wrong `mctx[bits[-2]]` may fails.
122 for idx, r in enumerate(v1records):
122 for idx, r in enumerate(v1records):
123 if r[0] == 'F':
123 if r[0] == 'F':
124 bits = r[1].split("\0")
124 bits = r[1].split("\0")
125 bits.insert(-2, '')
125 bits.insert(-2, '')
126 v1records[idx] = (r[0], "\0".join(bits))
126 v1records[idx] = (r[0], "\0".join(bits))
127 return v1records
127 return v1records
128 else:
128 else:
129 return v2records
129 return v2records
130
130
131 def _readrecordsv1(self):
131 def _readrecordsv1(self):
132 """read on disk merge state for version 1 file
132 """read on disk merge state for version 1 file
133
133
134 returns list of record [(TYPE, data), ...]
134 returns list of record [(TYPE, data), ...]
135
135
136 Note: the "F" data from this file are one entry short
136 Note: the "F" data from this file are one entry short
137 (no "other file node" entry)
137 (no "other file node" entry)
138 """
138 """
139 records = []
139 records = []
140 try:
140 try:
141 f = self._repo.opener(self.statepathv1)
141 f = self._repo.opener(self.statepathv1)
142 for i, l in enumerate(f):
142 for i, l in enumerate(f):
143 if i == 0:
143 if i == 0:
144 records.append(('L', l[:-1]))
144 records.append(('L', l[:-1]))
145 else:
145 else:
146 records.append(('F', l[:-1]))
146 records.append(('F', l[:-1]))
147 f.close()
147 f.close()
148 except IOError, err:
148 except IOError, err:
149 if err.errno != errno.ENOENT:
149 if err.errno != errno.ENOENT:
150 raise
150 raise
151 return records
151 return records
152
152
153 def _readrecordsv2(self):
153 def _readrecordsv2(self):
154 """read on disk merge state for version 2 file
154 """read on disk merge state for version 2 file
155
155
156 returns list of record [(TYPE, data), ...]
156 returns list of record [(TYPE, data), ...]
157 """
157 """
158 records = []
158 records = []
159 try:
159 try:
160 f = self._repo.opener(self.statepathv2)
160 f = self._repo.opener(self.statepathv2)
161 data = f.read()
161 data = f.read()
162 off = 0
162 off = 0
163 end = len(data)
163 end = len(data)
164 while off < end:
164 while off < end:
165 rtype = data[off]
165 rtype = data[off]
166 off += 1
166 off += 1
167 length = _unpack('>I', data[off:(off + 4)])[0]
167 length = _unpack('>I', data[off:(off + 4)])[0]
168 off += 4
168 off += 4
169 record = data[off:(off + length)]
169 record = data[off:(off + length)]
170 off += length
170 off += length
171 records.append((rtype, record))
171 records.append((rtype, record))
172 f.close()
172 f.close()
173 except IOError, err:
173 except IOError, err:
174 if err.errno != errno.ENOENT:
174 if err.errno != errno.ENOENT:
175 raise
175 raise
176 return records
176 return records
177
177
178 def active(self):
178 def active(self):
179 """Whether mergestate is active.
179 """Whether mergestate is active.
180
180
181 Returns True if there appears to be mergestate. This is a rough proxy
181 Returns True if there appears to be mergestate. This is a rough proxy
182 for "is a merge in progress."
182 for "is a merge in progress."
183 """
183 """
184 # Check local variables before looking at filesystem for performance
184 # Check local variables before looking at filesystem for performance
185 # reasons.
185 # reasons.
186 return bool(self._local) or bool(self._state) or \
186 return bool(self._local) or bool(self._state) or \
187 self._repo.opener.exists(self.statepathv1) or \
187 self._repo.opener.exists(self.statepathv1) or \
188 self._repo.opener.exists(self.statepathv2)
188 self._repo.opener.exists(self.statepathv2)
189
189
190 def commit(self):
190 def commit(self):
191 """Write current state on disk (if necessary)"""
191 """Write current state on disk (if necessary)"""
192 if self._dirty:
192 if self._dirty:
193 records = []
193 records = []
194 records.append(("L", hex(self._local)))
194 records.append(("L", hex(self._local)))
195 records.append(("O", hex(self._other)))
195 records.append(("O", hex(self._other)))
196 for d, v in self._state.iteritems():
196 for d, v in self._state.iteritems():
197 records.append(("F", "\0".join([d] + v)))
197 records.append(("F", "\0".join([d] + v)))
198 self._writerecords(records)
198 self._writerecords(records)
199 self._dirty = False
199 self._dirty = False
200
200
201 def _writerecords(self, records):
201 def _writerecords(self, records):
202 """Write current state on disk (both v1 and v2)"""
202 """Write current state on disk (both v1 and v2)"""
203 self._writerecordsv1(records)
203 self._writerecordsv1(records)
204 self._writerecordsv2(records)
204 self._writerecordsv2(records)
205
205
206 def _writerecordsv1(self, records):
206 def _writerecordsv1(self, records):
207 """Write current state on disk in a version 1 file"""
207 """Write current state on disk in a version 1 file"""
208 f = self._repo.opener(self.statepathv1, "w")
208 f = self._repo.opener(self.statepathv1, "w")
209 irecords = iter(records)
209 irecords = iter(records)
210 lrecords = irecords.next()
210 lrecords = irecords.next()
211 assert lrecords[0] == 'L'
211 assert lrecords[0] == 'L'
212 f.write(hex(self._local) + "\n")
212 f.write(hex(self._local) + "\n")
213 for rtype, data in irecords:
213 for rtype, data in irecords:
214 if rtype == "F":
214 if rtype == "F":
215 f.write("%s\n" % _droponode(data))
215 f.write("%s\n" % _droponode(data))
216 f.close()
216 f.close()
217
217
218 def _writerecordsv2(self, records):
218 def _writerecordsv2(self, records):
219 """Write current state on disk in a version 2 file"""
219 """Write current state on disk in a version 2 file"""
220 f = self._repo.opener(self.statepathv2, "w")
220 f = self._repo.opener(self.statepathv2, "w")
221 for key, data in records:
221 for key, data in records:
222 assert len(key) == 1
222 assert len(key) == 1
223 format = ">sI%is" % len(data)
223 format = ">sI%is" % len(data)
224 f.write(_pack(format, key, len(data), data))
224 f.write(_pack(format, key, len(data), data))
225 f.close()
225 f.close()
226
226
227 def add(self, fcl, fco, fca, fd):
227 def add(self, fcl, fco, fca, fd):
228 """add a new (potentially?) conflicting file the merge state
228 """add a new (potentially?) conflicting file the merge state
229 fcl: file context for local,
229 fcl: file context for local,
230 fco: file context for remote,
230 fco: file context for remote,
231 fca: file context for ancestors,
231 fca: file context for ancestors,
232 fd: file path of the resulting merge.
232 fd: file path of the resulting merge.
233
233
234 note: also write the local version to the `.hg/merge` directory.
234 note: also write the local version to the `.hg/merge` directory.
235 """
235 """
236 hash = util.sha1(fcl.path()).hexdigest()
236 hash = util.sha1(fcl.path()).hexdigest()
237 self._repo.opener.write("merge/" + hash, fcl.data())
237 self._repo.opener.write("merge/" + hash, fcl.data())
238 self._state[fd] = ['u', hash, fcl.path(),
238 self._state[fd] = ['u', hash, fcl.path(),
239 fca.path(), hex(fca.filenode()),
239 fca.path(), hex(fca.filenode()),
240 fco.path(), hex(fco.filenode()),
240 fco.path(), hex(fco.filenode()),
241 fcl.flags()]
241 fcl.flags()]
242 self._dirty = True
242 self._dirty = True
243
243
244 def __contains__(self, dfile):
244 def __contains__(self, dfile):
245 return dfile in self._state
245 return dfile in self._state
246
246
247 def __getitem__(self, dfile):
247 def __getitem__(self, dfile):
248 return self._state[dfile][0]
248 return self._state[dfile][0]
249
249
250 def __iter__(self):
250 def __iter__(self):
251 return iter(sorted(self._state))
251 return iter(sorted(self._state))
252
252
253 def files(self):
253 def files(self):
254 return self._state.keys()
254 return self._state.keys()
255
255
256 def mark(self, dfile, state):
256 def mark(self, dfile, state):
257 self._state[dfile][0] = state
257 self._state[dfile][0] = state
258 self._dirty = True
258 self._dirty = True
259
259
260 def unresolved(self):
260 def unresolved(self):
261 """Obtain the paths of unresolved files."""
261 """Obtain the paths of unresolved files."""
262
262
263 for f, entry in self._state.items():
263 for f, entry in self._state.items():
264 if entry[0] == 'u':
264 if entry[0] == 'u':
265 yield f
265 yield f
266
266
267 def resolve(self, dfile, wctx, labels=None):
267 def resolve(self, dfile, wctx, labels=None):
268 """rerun merge process for file path `dfile`"""
268 """rerun merge process for file path `dfile`"""
269 if self[dfile] == 'r':
269 if self[dfile] == 'r':
270 return 0
270 return 0
271 stateentry = self._state[dfile]
271 stateentry = self._state[dfile]
272 state, hash, lfile, afile, anode, ofile, onode, flags = stateentry
272 state, hash, lfile, afile, anode, ofile, onode, flags = stateentry
273 octx = self._repo[self._other]
273 octx = self._repo[self._other]
274 fcd = wctx[dfile]
274 fcd = wctx[dfile]
275 fco = octx[ofile]
275 fco = octx[ofile]
276 fca = self._repo.filectx(afile, fileid=anode)
276 fca = self._repo.filectx(afile, fileid=anode)
277 # "premerge" x flags
277 # "premerge" x flags
278 flo = fco.flags()
278 flo = fco.flags()
279 fla = fca.flags()
279 fla = fca.flags()
280 if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
280 if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
281 if fca.node() == nullid:
281 if fca.node() == nullid:
282 self._repo.ui.warn(_('warning: cannot merge flags for %s\n') %
282 self._repo.ui.warn(_('warning: cannot merge flags for %s\n') %
283 afile)
283 afile)
284 elif flags == fla:
284 elif flags == fla:
285 flags = flo
285 flags = flo
286 # restore local
286 # restore local
287 f = self._repo.opener("merge/" + hash)
287 f = self._repo.opener("merge/" + hash)
288 self._repo.wwrite(dfile, f.read(), flags)
288 self._repo.wwrite(dfile, f.read(), flags)
289 f.close()
289 f.close()
290 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca,
290 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca,
291 labels=labels)
291 labels=labels)
292 if r is None:
292 if r is None:
293 # no real conflict
293 # no real conflict
294 del self._state[dfile]
294 del self._state[dfile]
295 self._dirty = True
295 self._dirty = True
296 elif not r:
296 elif not r:
297 self.mark(dfile, 'r')
297 self.mark(dfile, 'r')
298 return r
298 return r
299
299
300 def _checkunknownfile(repo, wctx, mctx, f):
300 def _checkunknownfile(repo, wctx, mctx, f):
301 return (not repo.dirstate._ignore(f)
301 return (not repo.dirstate._ignore(f)
302 and os.path.isfile(repo.wjoin(f))
302 and os.path.isfile(repo.wjoin(f))
303 and repo.wopener.audit.check(f)
303 and repo.wopener.audit.check(f)
304 and repo.dirstate.normalize(f) not in repo.dirstate
304 and repo.dirstate.normalize(f) not in repo.dirstate
305 and mctx[f].cmp(wctx[f]))
305 and mctx[f].cmp(wctx[f]))
306
306
307 def _checkunknown(repo, wctx, mctx):
307 def _checkunknown(repo, wctx, mctx):
308 "check for collisions between unknown files and files in mctx"
308 "check for collisions between unknown files and files in mctx"
309
309
310 error = False
310 error = False
311 for f in mctx:
311 for f in mctx:
312 if f not in wctx and _checkunknownfile(repo, wctx, mctx, f):
312 if f not in wctx and _checkunknownfile(repo, wctx, mctx, f):
313 error = True
313 error = True
314 wctx._repo.ui.warn(_("%s: untracked file differs\n") % f)
314 wctx._repo.ui.warn(_("%s: untracked file differs\n") % f)
315 if error:
315 if error:
316 raise util.Abort(_("untracked files in working directory differ "
316 raise util.Abort(_("untracked files in working directory differ "
317 "from files in requested revision"))
317 "from files in requested revision"))
318
318
319 def _forgetremoved(wctx, mctx, branchmerge):
319 def _forgetremoved(wctx, mctx, branchmerge):
320 """
320 """
321 Forget removed files
321 Forget removed files
322
322
323 If we're jumping between revisions (as opposed to merging), and if
323 If we're jumping between revisions (as opposed to merging), and if
324 neither the working directory nor the target rev has the file,
324 neither the working directory nor the target rev has the file,
325 then we need to remove it from the dirstate, to prevent the
325 then we need to remove it from the dirstate, to prevent the
326 dirstate from listing the file when it is no longer in the
326 dirstate from listing the file when it is no longer in the
327 manifest.
327 manifest.
328
328
329 If we're merging, and the other revision has removed a file
329 If we're merging, and the other revision has removed a file
330 that is not present in the working directory, we need to mark it
330 that is not present in the working directory, we need to mark it
331 as removed.
331 as removed.
332 """
332 """
333
333
334 ractions = []
334 ractions = []
335 factions = xactions = []
335 factions = xactions = []
336 if branchmerge:
336 if branchmerge:
337 xactions = ractions
337 xactions = ractions
338 for f in wctx.deleted():
338 for f in wctx.deleted():
339 if f not in mctx:
339 if f not in mctx:
340 xactions.append((f, None, "forget deleted"))
340 xactions.append((f, None, "forget deleted"))
341
341
342 if not branchmerge:
342 if not branchmerge:
343 for f in wctx.removed():
343 for f in wctx.removed():
344 if f not in mctx:
344 if f not in mctx:
345 factions.append((f, None, "forget removed"))
345 factions.append((f, None, "forget removed"))
346
346
347 return ractions, factions
347 return ractions, factions
348
348
349 def _checkcollision(repo, wmf, actions):
349 def _checkcollision(repo, wmf, actions):
350 # build provisional merged manifest up
350 # build provisional merged manifest up
351 pmmf = set(wmf)
351 pmmf = set(wmf)
352
352
353 if actions:
353 if actions:
354 # k, dr, e and rd are no-op
354 # k, dr, e and rd are no-op
355 for m in 'a', 'f', 'g', 'cd', 'dc':
355 for m in 'a', 'f', 'g', 'cd', 'dc':
356 for f, args, msg in actions[m]:
356 for f, args, msg in actions[m]:
357 pmmf.add(f)
357 pmmf.add(f)
358 for f, args, msg in actions['r']:
358 for f, args, msg in actions['r']:
359 pmmf.discard(f)
359 pmmf.discard(f)
360 for f, args, msg in actions['dm']:
360 for f, args, msg in actions['dm']:
361 f2, flags = args
361 f2, flags = args
362 pmmf.discard(f2)
362 pmmf.discard(f2)
363 pmmf.add(f)
363 pmmf.add(f)
364 for f, args, msg in actions['dg']:
364 for f, args, msg in actions['dg']:
365 f2, flags = args
365 f2, flags = args
366 pmmf.add(f)
366 pmmf.add(f)
367 for f, args, msg in actions['m']:
367 for f, args, msg in actions['m']:
368 f1, f2, fa, move, anc = args
368 f1, f2, fa, move, anc = args
369 if move:
369 if move:
370 pmmf.discard(f1)
370 pmmf.discard(f1)
371 pmmf.add(f)
371 pmmf.add(f)
372
372
373 # check case-folding collision in provisional merged manifest
373 # check case-folding collision in provisional merged manifest
374 foldmap = {}
374 foldmap = {}
375 for f in sorted(pmmf):
375 for f in sorted(pmmf):
376 fold = util.normcase(f)
376 fold = util.normcase(f)
377 if fold in foldmap:
377 if fold in foldmap:
378 raise util.Abort(_("case-folding collision between %s and %s")
378 raise util.Abort(_("case-folding collision between %s and %s")
379 % (f, foldmap[fold]))
379 % (f, foldmap[fold]))
380 foldmap[fold] = f
380 foldmap[fold] = f
381
381
382 def manifestmerge(repo, wctx, p2, pa, branchmerge, force, partial,
382 def manifestmerge(repo, wctx, p2, pa, branchmerge, force, partial,
383 acceptremote, followcopies):
383 acceptremote, followcopies):
384 """
384 """
385 Merge p1 and p2 with ancestor pa and generate merge action list
385 Merge p1 and p2 with ancestor pa and generate merge action list
386
386
387 branchmerge and force are as passed in to update
387 branchmerge and force are as passed in to update
388 partial = function to filter file lists
388 partial = function to filter file lists
389 acceptremote = accept the incoming changes without prompting
389 acceptremote = accept the incoming changes without prompting
390 """
390 """
391
391
392 actions = dict((m, []) for m in 'a f g cd dc r dm dg m dr e rd k'.split())
392 actions = dict((m, []) for m in 'a f g cd dc r dm dg m dr e rd k'.split())
393 copy, movewithdir = {}, {}
393 copy, movewithdir = {}, {}
394
394
395 # manifests fetched in order are going to be faster, so prime the caches
395 # manifests fetched in order are going to be faster, so prime the caches
396 [x.manifest() for x in
396 [x.manifest() for x in
397 sorted(wctx.parents() + [p2, pa], key=lambda x: x.rev())]
397 sorted(wctx.parents() + [p2, pa], key=lambda x: x.rev())]
398
398
399 if followcopies:
399 if followcopies:
400 ret = copies.mergecopies(repo, wctx, p2, pa)
400 ret = copies.mergecopies(repo, wctx, p2, pa)
401 copy, movewithdir, diverge, renamedelete = ret
401 copy, movewithdir, diverge, renamedelete = ret
402 for of, fl in diverge.iteritems():
402 for of, fl in diverge.iteritems():
403 actions['dr'].append((of, (fl,), "divergent renames"))
403 actions['dr'].append((of, (fl,), "divergent renames"))
404 for of, fl in renamedelete.iteritems():
404 for of, fl in renamedelete.iteritems():
405 actions['rd'].append((of, (fl,), "rename and delete"))
405 actions['rd'].append((of, (fl,), "rename and delete"))
406
406
407 repo.ui.note(_("resolving manifests\n"))
407 repo.ui.note(_("resolving manifests\n"))
408 repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
408 repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
409 % (bool(branchmerge), bool(force), bool(partial)))
409 % (bool(branchmerge), bool(force), bool(partial)))
410 repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
410 repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
411
411
412 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
412 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
413 copied = set(copy.values())
413 copied = set(copy.values())
414 copied.update(movewithdir.values())
414 copied.update(movewithdir.values())
415
415
416 if '.hgsubstate' in m1:
416 if '.hgsubstate' in m1:
417 # check whether sub state is modified
417 # check whether sub state is modified
418 for s in sorted(wctx.substate):
418 for s in sorted(wctx.substate):
419 if wctx.sub(s).dirty():
419 if wctx.sub(s).dirty():
420 m1['.hgsubstate'] += "+"
420 m1['.hgsubstate'] += "+"
421 break
421 break
422
422
423 aborts = []
423 aborts = []
424 # Compare manifests
424 # Compare manifests
425 fdiff = dicthelpers.diff(m1, m2)
425 fdiff = dicthelpers.diff(m1, m2)
426 flagsdiff = m1.flagsdiff(m2)
426 flagsdiff = m1.flagsdiff(m2)
427 diff12 = dicthelpers.join(fdiff, flagsdiff)
427 diff12 = dicthelpers.join(fdiff, flagsdiff)
428
428
429 for f, (n12, fl12) in diff12.iteritems():
429 for f, (n12, fl12) in diff12.iteritems():
430 if n12:
430 if n12:
431 n1, n2 = n12
431 n1, n2 = n12
432 else: # file contents didn't change, but flags did
432 else: # file contents didn't change, but flags did
433 n1 = n2 = m1.get(f, None)
433 n1 = n2 = m1.get(f, None)
434 if n1 is None:
434 if n1 is None:
435 # Since n1 == n2, the file isn't present in m2 either. This
435 # Since n1 == n2, the file isn't present in m2 either. This
436 # means that the file was removed or deleted locally and
436 # means that the file was removed or deleted locally and
437 # removed remotely, but that residual entries remain in flags.
437 # removed remotely, but that residual entries remain in flags.
438 # This can happen in manifests generated by workingctx.
438 # This can happen in manifests generated by workingctx.
439 continue
439 continue
440 if fl12:
440 if fl12:
441 fl1, fl2 = fl12
441 fl1, fl2 = fl12
442 else: # flags didn't change, file contents did
442 else: # flags didn't change, file contents did
443 fl1 = fl2 = m1.flags(f)
443 fl1 = fl2 = m1.flags(f)
444
444
445 if partial and not partial(f):
445 if partial and not partial(f):
446 continue
446 continue
447 if n1 and n2:
447 if n1 and n2:
448 fa = f
448 fa = f
449 a = ma.get(f, nullid)
449 a = ma.get(f, nullid)
450 if a == nullid:
450 if a == nullid:
451 fa = copy.get(f, f)
451 fa = copy.get(f, f)
452 # Note: f as default is wrong - we can't really make a 3-way
452 # Note: f as default is wrong - we can't really make a 3-way
453 # merge without an ancestor file.
453 # merge without an ancestor file.
454 fla = ma.flags(fa)
454 fla = ma.flags(fa)
455 nol = 'l' not in fl1 + fl2 + fla
455 nol = 'l' not in fl1 + fl2 + fla
456 if n2 == a and fl2 == fla:
456 if n2 == a and fl2 == fla:
457 actions['k'].append((f, (), "keep")) # remote unchanged
457 actions['k'].append((f, (), "keep")) # remote unchanged
458 elif n1 == a and fl1 == fla: # local unchanged - use remote
458 elif n1 == a and fl1 == fla: # local unchanged - use remote
459 if n1 == n2: # optimization: keep local content
459 if n1 == n2: # optimization: keep local content
460 actions['e'].append((f, (fl2,), "update permissions"))
460 actions['e'].append((f, (fl2,), "update permissions"))
461 else:
461 else:
462 actions['g'].append((f, (fl2,), "remote is newer"))
462 actions['g'].append((f, (fl2,), "remote is newer"))
463 elif nol and n2 == a: # remote only changed 'x'
463 elif nol and n2 == a: # remote only changed 'x'
464 actions['e'].append((f, (fl2,), "update permissions"))
464 actions['e'].append((f, (fl2,), "update permissions"))
465 elif nol and n1 == a: # local only changed 'x'
465 elif nol and n1 == a: # local only changed 'x'
466 actions['g'].append((f, (fl1,), "remote is newer"))
466 actions['g'].append((f, (fl1,), "remote is newer"))
467 else: # both changed something
467 else: # both changed something
468 actions['m'].append((f, (f, f, fa, False, pa.node()),
468 actions['m'].append((f, (f, f, fa, False, pa.node()),
469 "versions differ"))
469 "versions differ"))
470 elif f in copied: # files we'll deal with on m2 side
470 elif f in copied: # files we'll deal with on m2 side
471 pass
471 pass
472 elif n1 and f in movewithdir: # directory rename, move local
472 elif n1 and f in movewithdir: # directory rename, move local
473 f2 = movewithdir[f]
473 f2 = movewithdir[f]
474 actions['dm'].append((f2, (f, fl1),
474 actions['dm'].append((f2, (f, fl1),
475 "remote directory rename - move from " + f))
475 "remote directory rename - move from " + f))
476 elif n1 and f in copy:
476 elif n1 and f in copy:
477 f2 = copy[f]
477 f2 = copy[f]
478 actions['m'].append((f, (f, f2, f2, False, pa.node()),
478 actions['m'].append((f, (f, f2, f2, False, pa.node()),
479 "local copied/moved from " + f2))
479 "local copied/moved from " + f2))
480 elif n1 and f in ma: # clean, a different, no remote
480 elif n1 and f in ma: # clean, a different, no remote
481 if n1 != ma[f]:
481 if n1 != ma[f]:
482 if acceptremote:
482 if acceptremote:
483 actions['r'].append((f, None, "remote delete"))
483 actions['r'].append((f, None, "remote delete"))
484 else:
484 else:
485 actions['cd'].append((f, None, "prompt changed/deleted"))
485 actions['cd'].append((f, None, "prompt changed/deleted"))
486 elif n1[20:] == "a": # added, no remote
486 elif n1[20:] == "a": # added, no remote
487 actions['f'].append((f, None, "remote deleted"))
487 actions['f'].append((f, None, "remote deleted"))
488 else:
488 else:
489 actions['r'].append((f, None, "other deleted"))
489 actions['r'].append((f, None, "other deleted"))
490 elif n2 and f in movewithdir:
490 elif n2 and f in movewithdir:
491 f2 = movewithdir[f]
491 f2 = movewithdir[f]
492 actions['dg'].append((f2, (f, fl2),
492 actions['dg'].append((f2, (f, fl2),
493 "local directory rename - get from " + f))
493 "local directory rename - get from " + f))
494 elif n2 and f in copy:
494 elif n2 and f in copy:
495 f2 = copy[f]
495 f2 = copy[f]
496 if f2 in m2:
496 if f2 in m2:
497 actions['m'].append((f, (f2, f, f2, False, pa.node()),
497 actions['m'].append((f, (f2, f, f2, False, pa.node()),
498 "remote copied from " + f2))
498 "remote copied from " + f2))
499 else:
499 else:
500 actions['m'].append((f, (f2, f, f2, True, pa.node()),
500 actions['m'].append((f, (f2, f, f2, True, pa.node()),
501 "remote moved from " + f2))
501 "remote moved from " + f2))
502 elif n2 and f not in ma:
502 elif n2 and f not in ma:
503 # local unknown, remote created: the logic is described by the
503 # local unknown, remote created: the logic is described by the
504 # following table:
504 # following table:
505 #
505 #
506 # force branchmerge different | action
506 # force branchmerge different | action
507 # n * n | get
507 # n * n | get
508 # n * y | abort
508 # n * y | abort
509 # y n * | get
509 # y n * | get
510 # y y n | get
510 # y y n | get
511 # y y y | merge
511 # y y y | merge
512 #
512 #
513 # Checking whether the files are different is expensive, so we
513 # Checking whether the files are different is expensive, so we
514 # don't do that when we can avoid it.
514 # don't do that when we can avoid it.
515 if force and not branchmerge:
515 if force and not branchmerge:
516 actions['g'].append((f, (fl2,), "remote created"))
516 actions['g'].append((f, (fl2,), "remote created"))
517 else:
517 else:
518 different = _checkunknownfile(repo, wctx, p2, f)
518 different = _checkunknownfile(repo, wctx, p2, f)
519 if force and branchmerge and different:
519 if force and branchmerge and different:
520 # FIXME: This is wrong - f is not in ma ...
520 # FIXME: This is wrong - f is not in ma ...
521 actions['m'].append((f, (f, f, f, False, pa.node()),
521 actions['m'].append((f, (f, f, f, False, pa.node()),
522 "remote differs from untracked local"))
522 "remote differs from untracked local"))
523 elif not force and different:
523 elif not force and different:
524 aborts.append((f, "ud"))
524 aborts.append((f, "ud"))
525 else:
525 else:
526 actions['g'].append((f, (fl2,), "remote created"))
526 actions['g'].append((f, (fl2,), "remote created"))
527 elif n2 and n2 != ma[f]:
527 elif n2 and n2 != ma[f]:
528 different = _checkunknownfile(repo, wctx, p2, f)
528 different = _checkunknownfile(repo, wctx, p2, f)
529 if not force and different:
529 if not force and different:
530 aborts.append((f, "ud"))
530 aborts.append((f, "ud"))
531 else:
531 else:
532 # if different: old untracked f may be overwritten and lost
532 # if different: old untracked f may be overwritten and lost
533 if acceptremote:
533 if acceptremote:
534 actions['g'].append((f, (m2.flags(f),),
534 actions['g'].append((f, (m2.flags(f),),
535 "remote recreating"))
535 "remote recreating"))
536 else:
536 else:
537 actions['dc'].append((f, (m2.flags(f),),
537 actions['dc'].append((f, (m2.flags(f),),
538 "prompt deleted/changed"))
538 "prompt deleted/changed"))
539
539
540 for f, m in sorted(aborts):
540 for f, m in sorted(aborts):
541 if m == "ud":
541 if m == "ud":
542 repo.ui.warn(_("%s: untracked file differs\n") % f)
542 repo.ui.warn(_("%s: untracked file differs\n") % f)
543 else: assert False, m
543 else: assert False, m
544 if aborts:
544 if aborts:
545 raise util.Abort(_("untracked files in working directory differ "
545 raise util.Abort(_("untracked files in working directory differ "
546 "from files in requested revision"))
546 "from files in requested revision"))
547
547
548 if not util.checkcase(repo.path):
548 if not util.checkcase(repo.path):
549 # check collision between files only in p2 for clean update
549 # check collision between files only in p2 for clean update
550 if (not branchmerge and
550 if (not branchmerge and
551 (force or not wctx.dirty(missing=True, branch=False))):
551 (force or not wctx.dirty(missing=True, branch=False))):
552 _checkcollision(repo, m2, None)
552 _checkcollision(repo, m2, None)
553 else:
553 else:
554 _checkcollision(repo, m1, actions)
554 _checkcollision(repo, m1, actions)
555
555
556 return actions
556 return actions
557
557
558 def batchremove(repo, actions):
558 def batchremove(repo, actions):
559 """apply removes to the working directory
559 """apply removes to the working directory
560
560
561 yields tuples for progress updates
561 yields tuples for progress updates
562 """
562 """
563 verbose = repo.ui.verbose
563 verbose = repo.ui.verbose
564 unlink = util.unlinkpath
564 unlink = util.unlinkpath
565 wjoin = repo.wjoin
565 wjoin = repo.wjoin
566 audit = repo.wopener.audit
566 audit = repo.wopener.audit
567 i = 0
567 i = 0
568 for f, args, msg in actions:
568 for f, args, msg in actions:
569 repo.ui.debug(" %s: %s -> r\n" % (f, msg))
569 repo.ui.debug(" %s: %s -> r\n" % (f, msg))
570 if verbose:
570 if verbose:
571 repo.ui.note(_("removing %s\n") % f)
571 repo.ui.note(_("removing %s\n") % f)
572 audit(f)
572 audit(f)
573 try:
573 try:
574 unlink(wjoin(f), ignoremissing=True)
574 unlink(wjoin(f), ignoremissing=True)
575 except OSError, inst:
575 except OSError, inst:
576 repo.ui.warn(_("update failed to remove %s: %s!\n") %
576 repo.ui.warn(_("update failed to remove %s: %s!\n") %
577 (f, inst.strerror))
577 (f, inst.strerror))
578 if i == 100:
578 if i == 100:
579 yield i, f
579 yield i, f
580 i = 0
580 i = 0
581 i += 1
581 i += 1
582 if i > 0:
582 if i > 0:
583 yield i, f
583 yield i, f
584
584
585 def batchget(repo, mctx, actions):
585 def batchget(repo, mctx, actions):
586 """apply gets to the working directory
586 """apply gets to the working directory
587
587
588 mctx is the context to get from
588 mctx is the context to get from
589
589
590 yields tuples for progress updates
590 yields tuples for progress updates
591 """
591 """
592 verbose = repo.ui.verbose
592 verbose = repo.ui.verbose
593 fctx = mctx.filectx
593 fctx = mctx.filectx
594 wwrite = repo.wwrite
594 wwrite = repo.wwrite
595 i = 0
595 i = 0
596 for f, args, msg in actions:
596 for f, args, msg in actions:
597 repo.ui.debug(" %s: %s -> g\n" % (f, msg))
597 repo.ui.debug(" %s: %s -> g\n" % (f, msg))
598 if verbose:
598 if verbose:
599 repo.ui.note(_("getting %s\n") % f)
599 repo.ui.note(_("getting %s\n") % f)
600 wwrite(f, fctx(f).data(), args[0])
600 wwrite(f, fctx(f).data(), args[0])
601 if i == 100:
601 if i == 100:
602 yield i, f
602 yield i, f
603 i = 0
603 i = 0
604 i += 1
604 i += 1
605 if i > 0:
605 if i > 0:
606 yield i, f
606 yield i, f
607
607
608 def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None):
608 def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None):
609 """apply the merge action list to the working directory
609 """apply the merge action list to the working directory
610
610
611 wctx is the working copy context
611 wctx is the working copy context
612 mctx is the context to be merged into the working copy
612 mctx is the context to be merged into the working copy
613
613
614 Return a tuple of counts (updated, merged, removed, unresolved) that
614 Return a tuple of counts (updated, merged, removed, unresolved) that
615 describes how many files were affected by the update.
615 describes how many files were affected by the update.
616 """
616 """
617
617
618 updated, merged, removed, unresolved = 0, 0, 0, 0
618 updated, merged, removed, unresolved = 0, 0, 0, 0
619 ms = mergestate(repo)
619 ms = mergestate(repo)
620 ms.reset(wctx.p1().node(), mctx.node())
620 ms.reset(wctx.p1().node(), mctx.node())
621 moves = []
621 moves = []
622 for m, l in actions.items():
622 for m, l in actions.items():
623 l.sort()
623 l.sort()
624
624
625 # prescan for merges
625 # prescan for merges
626 for f, args, msg in actions['m']:
626 for f, args, msg in actions['m']:
627 f1, f2, fa, move, anc = args
627 f1, f2, fa, move, anc = args
628 if f == '.hgsubstate': # merged internally
628 if f == '.hgsubstate': # merged internally
629 continue
629 continue
630 repo.ui.debug(" preserving %s for resolve of %s\n" % (f1, f))
630 repo.ui.debug(" preserving %s for resolve of %s\n" % (f1, f))
631 fcl = wctx[f1]
631 fcl = wctx[f1]
632 fco = mctx[f2]
632 fco = mctx[f2]
633 actx = repo[anc]
633 actx = repo[anc]
634 if fa in actx:
634 if fa in actx:
635 fca = actx[fa]
635 fca = actx[fa]
636 else:
636 else:
637 fca = repo.filectx(f1, fileid=nullrev)
637 fca = repo.filectx(f1, fileid=nullrev)
638 ms.add(fcl, fco, fca, f)
638 ms.add(fcl, fco, fca, f)
639 if f1 != f and move:
639 if f1 != f and move:
640 moves.append(f1)
640 moves.append(f1)
641
641
642 audit = repo.wopener.audit
642 audit = repo.wopener.audit
643 _updating = _('updating')
643 _updating = _('updating')
644 _files = _('files')
644 _files = _('files')
645 progress = repo.ui.progress
645 progress = repo.ui.progress
646
646
647 # remove renamed files after safely stored
647 # remove renamed files after safely stored
648 for f in moves:
648 for f in moves:
649 if os.path.lexists(repo.wjoin(f)):
649 if os.path.lexists(repo.wjoin(f)):
650 repo.ui.debug("removing %s\n" % f)
650 repo.ui.debug("removing %s\n" % f)
651 audit(f)
651 audit(f)
652 util.unlinkpath(repo.wjoin(f))
652 util.unlinkpath(repo.wjoin(f))
653
653
654 numupdates = sum(len(l) for m, l in actions.items() if m != 'k')
654 numupdates = sum(len(l) for m, l in actions.items() if m != 'k')
655
655
656 if [a for a in actions['r'] if a[0] == '.hgsubstate']:
656 if [a for a in actions['r'] if a[0] == '.hgsubstate']:
657 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
657 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
658
658
659 # remove in parallel (must come first)
659 # remove in parallel (must come first)
660 z = 0
660 z = 0
661 prog = worker.worker(repo.ui, 0.001, batchremove, (repo,), actions['r'])
661 prog = worker.worker(repo.ui, 0.001, batchremove, (repo,), actions['r'])
662 for i, item in prog:
662 for i, item in prog:
663 z += i
663 z += i
664 progress(_updating, z, item=item, total=numupdates, unit=_files)
664 progress(_updating, z, item=item, total=numupdates, unit=_files)
665 removed = len(actions['r'])
665 removed = len(actions['r'])
666
666
667 # get in parallel
667 # get in parallel
668 prog = worker.worker(repo.ui, 0.001, batchget, (repo, mctx), actions['g'])
668 prog = worker.worker(repo.ui, 0.001, batchget, (repo, mctx), actions['g'])
669 for i, item in prog:
669 for i, item in prog:
670 z += i
670 z += i
671 progress(_updating, z, item=item, total=numupdates, unit=_files)
671 progress(_updating, z, item=item, total=numupdates, unit=_files)
672 updated = len(actions['g'])
672 updated = len(actions['g'])
673
673
674 if [a for a in actions['g'] if a[0] == '.hgsubstate']:
674 if [a for a in actions['g'] if a[0] == '.hgsubstate']:
675 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
675 subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
676
676
677 # forget (manifest only, just log it) (must come first)
677 # forget (manifest only, just log it) (must come first)
678 for f, args, msg in actions['f']:
678 for f, args, msg in actions['f']:
679 repo.ui.debug(" %s: %s -> f\n" % (f, msg))
679 repo.ui.debug(" %s: %s -> f\n" % (f, msg))
680 z += 1
680 z += 1
681 progress(_updating, z, item=f, total=numupdates, unit=_files)
681 progress(_updating, z, item=f, total=numupdates, unit=_files)
682
682
683 # re-add (manifest only, just log it)
683 # re-add (manifest only, just log it)
684 for f, args, msg in actions['a']:
684 for f, args, msg in actions['a']:
685 repo.ui.debug(" %s: %s -> a\n" % (f, msg))
685 repo.ui.debug(" %s: %s -> a\n" % (f, msg))
686 z += 1
686 z += 1
687 progress(_updating, z, item=f, total=numupdates, unit=_files)
687 progress(_updating, z, item=f, total=numupdates, unit=_files)
688
688
689 # keep (noop, just log it)
689 # keep (noop, just log it)
690 for f, args, msg in actions['k']:
690 for f, args, msg in actions['k']:
691 repo.ui.debug(" %s: %s -> k\n" % (f, msg))
691 repo.ui.debug(" %s: %s -> k\n" % (f, msg))
692 # no progress
692 # no progress
693
693
694 # merge
694 # merge
695 for f, args, msg in actions['m']:
695 for f, args, msg in actions['m']:
696 repo.ui.debug(" %s: %s -> m\n" % (f, msg))
696 repo.ui.debug(" %s: %s -> m\n" % (f, msg))
697 z += 1
697 z += 1
698 progress(_updating, z, item=f, total=numupdates, unit=_files)
698 progress(_updating, z, item=f, total=numupdates, unit=_files)
699 f1, f2, fa, move, anc = args
699 f1, f2, fa, move, anc = args
700 if f == '.hgsubstate': # subrepo states need updating
700 if f == '.hgsubstate': # subrepo states need updating
701 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
701 subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
702 overwrite)
702 overwrite)
703 continue
703 continue
704 audit(f)
704 audit(f)
705 r = ms.resolve(f, wctx, labels=labels)
705 r = ms.resolve(f, wctx, labels=labels)
706 if r is not None and r > 0:
706 if r is not None and r > 0:
707 unresolved += 1
707 unresolved += 1
708 else:
708 else:
709 if r is None:
709 if r is None:
710 updated += 1
710 updated += 1
711 else:
711 else:
712 merged += 1
712 merged += 1
713
713
714 # directory rename, move local
714 # directory rename, move local
715 for f, args, msg in actions['dm']:
715 for f, args, msg in actions['dm']:
716 repo.ui.debug(" %s: %s -> dm\n" % (f, msg))
716 repo.ui.debug(" %s: %s -> dm\n" % (f, msg))
717 z += 1
717 z += 1
718 progress(_updating, z, item=f, total=numupdates, unit=_files)
718 progress(_updating, z, item=f, total=numupdates, unit=_files)
719 f0, flags = args
719 f0, flags = args
720 repo.ui.note(_("moving %s to %s\n") % (f0, f))
720 repo.ui.note(_("moving %s to %s\n") % (f0, f))
721 audit(f)
721 audit(f)
722 repo.wwrite(f, wctx.filectx(f0).data(), flags)
722 repo.wwrite(f, wctx.filectx(f0).data(), flags)
723 util.unlinkpath(repo.wjoin(f0))
723 util.unlinkpath(repo.wjoin(f0))
724 updated += 1
724 updated += 1
725
725
726 # local directory rename, get
726 # local directory rename, get
727 for f, args, msg in actions['dg']:
727 for f, args, msg in actions['dg']:
728 repo.ui.debug(" %s: %s -> dg\n" % (f, msg))
728 repo.ui.debug(" %s: %s -> dg\n" % (f, msg))
729 z += 1
729 z += 1
730 progress(_updating, z, item=f, total=numupdates, unit=_files)
730 progress(_updating, z, item=f, total=numupdates, unit=_files)
731 f0, flags = args
731 f0, flags = args
732 repo.ui.note(_("getting %s to %s\n") % (f0, f))
732 repo.ui.note(_("getting %s to %s\n") % (f0, f))
733 repo.wwrite(f, mctx.filectx(f0).data(), flags)
733 repo.wwrite(f, mctx.filectx(f0).data(), flags)
734 updated += 1
734 updated += 1
735
735
736 # divergent renames
736 # divergent renames
737 for f, args, msg in actions['dr']:
737 for f, args, msg in actions['dr']:
738 repo.ui.debug(" %s: %s -> dr\n" % (f, msg))
738 repo.ui.debug(" %s: %s -> dr\n" % (f, msg))
739 z += 1
739 z += 1
740 progress(_updating, z, item=f, total=numupdates, unit=_files)
740 progress(_updating, z, item=f, total=numupdates, unit=_files)
741 fl, = args
741 fl, = args
742 repo.ui.warn(_("note: possible conflict - %s was renamed "
742 repo.ui.warn(_("note: possible conflict - %s was renamed "
743 "multiple times to:\n") % f)
743 "multiple times to:\n") % f)
744 for nf in fl:
744 for nf in fl:
745 repo.ui.warn(" %s\n" % nf)
745 repo.ui.warn(" %s\n" % nf)
746
746
747 # rename and delete
747 # rename and delete
748 for f, args, msg in actions['rd']:
748 for f, args, msg in actions['rd']:
749 repo.ui.debug(" %s: %s -> rd\n" % (f, msg))
749 repo.ui.debug(" %s: %s -> rd\n" % (f, msg))
750 z += 1
750 z += 1
751 progress(_updating, z, item=f, total=numupdates, unit=_files)
751 progress(_updating, z, item=f, total=numupdates, unit=_files)
752 fl, = args
752 fl, = args
753 repo.ui.warn(_("note: possible conflict - %s was deleted "
753 repo.ui.warn(_("note: possible conflict - %s was deleted "
754 "and renamed to:\n") % f)
754 "and renamed to:\n") % f)
755 for nf in fl:
755 for nf in fl:
756 repo.ui.warn(" %s\n" % nf)
756 repo.ui.warn(" %s\n" % nf)
757
757
758 # exec
758 # exec
759 for f, args, msg in actions['e']:
759 for f, args, msg in actions['e']:
760 repo.ui.debug(" %s: %s -> e\n" % (f, msg))
760 repo.ui.debug(" %s: %s -> e\n" % (f, msg))
761 z += 1
761 z += 1
762 progress(_updating, z, item=f, total=numupdates, unit=_files)
762 progress(_updating, z, item=f, total=numupdates, unit=_files)
763 flags, = args
763 flags, = args
764 audit(f)
764 audit(f)
765 util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
765 util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
766 updated += 1
766 updated += 1
767
767
768 ms.commit()
768 ms.commit()
769 progress(_updating, None, total=numupdates, unit=_files)
769 progress(_updating, None, total=numupdates, unit=_files)
770
770
771 return updated, merged, removed, unresolved
771 return updated, merged, removed, unresolved
772
772
773 def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force, partial,
773 def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force, partial,
774 acceptremote, followcopies):
774 acceptremote, followcopies):
775 "Calculate the actions needed to merge mctx into wctx using ancestors"
775 "Calculate the actions needed to merge mctx into wctx using ancestors"
776
776
777 if len(ancestors) == 1: # default
777 if len(ancestors) == 1: # default
778 actions = manifestmerge(repo, wctx, mctx, ancestors[0],
778 actions = manifestmerge(repo, wctx, mctx, ancestors[0],
779 branchmerge, force,
779 branchmerge, force,
780 partial, acceptremote, followcopies)
780 partial, acceptremote, followcopies)
781
781
782 else: # only when merge.preferancestor=* - experimentalish code
782 else: # only when merge.preferancestor=* - experimentalish code
783 repo.ui.status(
783 repo.ui.status(
784 _("note: merging %s and %s using bids from ancestors %s\n") %
784 _("note: merging %s and %s using bids from ancestors %s\n") %
785 (wctx, mctx, _(' and ').join(str(anc) for anc in ancestors)))
785 (wctx, mctx, _(' and ').join(str(anc) for anc in ancestors)))
786
786
787 # Call for bids
787 # Call for bids
788 fbids = {} # mapping filename to bids (action method to list af actions)
788 fbids = {} # mapping filename to bids (action method to list af actions)
789 for ancestor in ancestors:
789 for ancestor in ancestors:
790 repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
790 repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
791 actions = manifestmerge(repo, wctx, mctx, ancestor,
791 actions = manifestmerge(repo, wctx, mctx, ancestor,
792 branchmerge, force,
792 branchmerge, force,
793 partial, acceptremote, followcopies)
793 partial, acceptremote, followcopies)
794 for m, l in sorted(actions.items()):
794 for m, l in sorted(actions.items()):
795 for a in l:
795 for a in l:
796 f, args, msg = a
796 f, args, msg = a
797 repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
797 repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
798 if f in fbids:
798 if f in fbids:
799 d = fbids[f]
799 d = fbids[f]
800 if m in d:
800 if m in d:
801 d[m].append(a)
801 d[m].append(a)
802 else:
802 else:
803 d[m] = [a]
803 d[m] = [a]
804 else:
804 else:
805 fbids[f] = {m: [a]}
805 fbids[f] = {m: [a]}
806
806
807 # Pick the best bid for each file
807 # Pick the best bid for each file
808 repo.ui.note(_('\nauction for merging merge bids\n'))
808 repo.ui.note(_('\nauction for merging merge bids\n'))
809 actions = dict((m, []) for m in actions.keys())
809 actions = dict((m, []) for m in actions.keys())
810 for f, bids in sorted(fbids.items()):
810 for f, bids in sorted(fbids.items()):
811 # bids is a mapping from action method to list af actions
811 # bids is a mapping from action method to list af actions
812 # Consensus?
812 # Consensus?
813 if len(bids) == 1: # all bids are the same kind of method
813 if len(bids) == 1: # all bids are the same kind of method
814 m, l = bids.items()[0]
814 m, l = bids.items()[0]
815 if util.all(a == l[0] for a in l[1:]): # len(bids) is > 1
815 if util.all(a == l[0] for a in l[1:]): # len(bids) is > 1
816 repo.ui.note(" %s: consensus for %s\n" % (f, m))
816 repo.ui.note(" %s: consensus for %s\n" % (f, m))
817 actions[m].append(l[0])
817 actions[m].append(l[0])
818 continue
818 continue
819 # If keep is an option, just do it.
819 # If keep is an option, just do it.
820 if "k" in bids:
820 if "k" in bids:
821 repo.ui.note(" %s: picking 'keep' action\n" % f)
821 repo.ui.note(" %s: picking 'keep' action\n" % f)
822 actions['k'].append(bids["k"][0])
822 actions['k'].append(bids["k"][0])
823 continue
823 continue
824 # If there are gets and they all agree [how could they not?], do it.
824 # If there are gets and they all agree [how could they not?], do it.
825 if "g" in bids:
825 if "g" in bids:
826 ga0 = bids["g"][0]
826 ga0 = bids["g"][0]
827 if util.all(a == ga0 for a in bids["g"][1:]):
827 if util.all(a == ga0 for a in bids["g"][1:]):
828 repo.ui.note(" %s: picking 'get' action\n" % f)
828 repo.ui.note(" %s: picking 'get' action\n" % f)
829 actions['g'].append(ga0)
829 actions['g'].append(ga0)
830 continue
830 continue
831 # TODO: Consider other simple actions such as mode changes
831 # TODO: Consider other simple actions such as mode changes
832 # Handle inefficient democrazy.
832 # Handle inefficient democrazy.
833 repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
833 repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
834 for m, l in sorted(bids.items()):
834 for m, l in sorted(bids.items()):
835 for _f, args, msg in l:
835 for _f, args, msg in l:
836 repo.ui.note(' %s -> %s\n' % (msg, m))
836 repo.ui.note(' %s -> %s\n' % (msg, m))
837 # Pick random action. TODO: Instead, prompt user when resolving
837 # Pick random action. TODO: Instead, prompt user when resolving
838 m, l = bids.items()[0]
838 m, l = bids.items()[0]
839 repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
839 repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
840 (f, m))
840 (f, m))
841 actions[m].append(l[0])
841 actions[m].append(l[0])
842 continue
842 continue
843 repo.ui.note(_('end of auction\n\n'))
843 repo.ui.note(_('end of auction\n\n'))
844
844
845 # Prompt and create actions. TODO: Move this towards resolve phase.
845 # Prompt and create actions. TODO: Move this towards resolve phase.
846 for f, args, msg in actions['cd']:
846 for f, args, msg in actions['cd']:
847 if repo.ui.promptchoice(
847 if repo.ui.promptchoice(
848 _("local changed %s which remote deleted\n"
848 _("local changed %s which remote deleted\n"
849 "use (c)hanged version or (d)elete?"
849 "use (c)hanged version or (d)elete?"
850 "$$ &Changed $$ &Delete") % f, 0):
850 "$$ &Changed $$ &Delete") % f, 0):
851 actions['r'].append((f, None, "prompt delete"))
851 actions['r'].append((f, None, "prompt delete"))
852 else:
852 else:
853 actions['a'].append((f, None, "prompt keep"))
853 actions['a'].append((f, None, "prompt keep"))
854 del actions['cd'][:]
854 del actions['cd'][:]
855
855
856 for f, args, msg in actions['dc']:
856 for f, args, msg in actions['dc']:
857 flags, = args
857 flags, = args
858 if repo.ui.promptchoice(
858 if repo.ui.promptchoice(
859 _("remote changed %s which local deleted\n"
859 _("remote changed %s which local deleted\n"
860 "use (c)hanged version or leave (d)eleted?"
860 "use (c)hanged version or leave (d)eleted?"
861 "$$ &Changed $$ &Deleted") % f, 0) == 0:
861 "$$ &Changed $$ &Deleted") % f, 0) == 0:
862 actions['g'].append((f, (flags,), "prompt recreating"))
862 actions['g'].append((f, (flags,), "prompt recreating"))
863 del actions['dc'][:]
863 del actions['dc'][:]
864
864
865 if wctx.rev() is None:
865 if wctx.rev() is None:
866 ractions, factions = _forgetremoved(wctx, mctx, branchmerge)
866 ractions, factions = _forgetremoved(wctx, mctx, branchmerge)
867 actions['r'].extend(ractions)
867 actions['r'].extend(ractions)
868 actions['f'].extend(factions)
868 actions['f'].extend(factions)
869
869
870 return actions
870 return actions
871
871
872 def recordupdates(repo, actions, branchmerge):
872 def recordupdates(repo, actions, branchmerge):
873 "record merge actions to the dirstate"
873 "record merge actions to the dirstate"
874 # remove (must come first)
874 # remove (must come first)
875 for f, args, msg in actions['r']:
875 for f, args, msg in actions['r']:
876 if branchmerge:
876 if branchmerge:
877 repo.dirstate.remove(f)
877 repo.dirstate.remove(f)
878 else:
878 else:
879 repo.dirstate.drop(f)
879 repo.dirstate.drop(f)
880
880
881 # forget (must come first)
881 # forget (must come first)
882 for f, args, msg in actions['f']:
882 for f, args, msg in actions['f']:
883 repo.dirstate.drop(f)
883 repo.dirstate.drop(f)
884
884
885 # re-add
885 # re-add
886 for f, args, msg in actions['a']:
886 for f, args, msg in actions['a']:
887 if not branchmerge:
887 if not branchmerge:
888 repo.dirstate.add(f)
888 repo.dirstate.add(f)
889
889
890 # exec change
890 # exec change
891 for f, args, msg in actions['e']:
891 for f, args, msg in actions['e']:
892 repo.dirstate.normallookup(f)
892 repo.dirstate.normallookup(f)
893
893
894 # keep
894 # keep
895 for f, args, msg in actions['k']:
895 for f, args, msg in actions['k']:
896 pass
896 pass
897
897
898 # get
898 # get
899 for f, args, msg in actions['g']:
899 for f, args, msg in actions['g']:
900 if branchmerge:
900 if branchmerge:
901 repo.dirstate.otherparent(f)
901 repo.dirstate.otherparent(f)
902 else:
902 else:
903 repo.dirstate.normal(f)
903 repo.dirstate.normal(f)
904
904
905 # merge
905 # merge
906 for f, args, msg in actions['m']:
906 for f, args, msg in actions['m']:
907 f1, f2, fa, move, anc = args
907 f1, f2, fa, move, anc = args
908 if branchmerge:
908 if branchmerge:
909 # We've done a branch merge, mark this file as merged
909 # We've done a branch merge, mark this file as merged
910 # so that we properly record the merger later
910 # so that we properly record the merger later
911 repo.dirstate.merge(f)
911 repo.dirstate.merge(f)
912 if f1 != f2: # copy/rename
912 if f1 != f2: # copy/rename
913 if move:
913 if move:
914 repo.dirstate.remove(f1)
914 repo.dirstate.remove(f1)
915 if f1 != f:
915 if f1 != f:
916 repo.dirstate.copy(f1, f)
916 repo.dirstate.copy(f1, f)
917 else:
917 else:
918 repo.dirstate.copy(f2, f)
918 repo.dirstate.copy(f2, f)
919 else:
919 else:
920 # We've update-merged a locally modified file, so
920 # We've update-merged a locally modified file, so
921 # we set the dirstate to emulate a normal checkout
921 # we set the dirstate to emulate a normal checkout
922 # of that file some time in the past. Thus our
922 # of that file some time in the past. Thus our
923 # merge will appear as a normal local file
923 # merge will appear as a normal local file
924 # modification.
924 # modification.
925 if f2 == f: # file not locally copied/moved
925 if f2 == f: # file not locally copied/moved
926 repo.dirstate.normallookup(f)
926 repo.dirstate.normallookup(f)
927 if move:
927 if move:
928 repo.dirstate.drop(f1)
928 repo.dirstate.drop(f1)
929
929
930 # directory rename, move local
930 # directory rename, move local
931 for f, args, msg in actions['dm']:
931 for f, args, msg in actions['dm']:
932 f0, flag = args
932 f0, flag = args
933 if f0 not in repo.dirstate:
933 if f0 not in repo.dirstate:
934 # untracked file moved
934 # untracked file moved
935 continue
935 continue
936 if branchmerge:
936 if branchmerge:
937 repo.dirstate.add(f)
937 repo.dirstate.add(f)
938 repo.dirstate.remove(f0)
938 repo.dirstate.remove(f0)
939 repo.dirstate.copy(f0, f)
939 repo.dirstate.copy(f0, f)
940 else:
940 else:
941 repo.dirstate.normal(f)
941 repo.dirstate.normal(f)
942 repo.dirstate.drop(f0)
942 repo.dirstate.drop(f0)
943
943
944 # directory rename, get
944 # directory rename, get
945 for f, args, msg in actions['dg']:
945 for f, args, msg in actions['dg']:
946 f0, flag = args
946 f0, flag = args
947 if branchmerge:
947 if branchmerge:
948 repo.dirstate.add(f)
948 repo.dirstate.add(f)
949 repo.dirstate.copy(f0, f)
949 repo.dirstate.copy(f0, f)
950 else:
950 else:
951 repo.dirstate.normal(f)
951 repo.dirstate.normal(f)
952
952
953 def update(repo, node, branchmerge, force, partial, ancestor=None,
953 def update(repo, node, branchmerge, force, partial, ancestor=None,
954 mergeancestor=False, labels=None):
954 mergeancestor=False, labels=None):
955 """
955 """
956 Perform a merge between the working directory and the given node
956 Perform a merge between the working directory and the given node
957
957
958 node = the node to update to, or None if unspecified
958 node = the node to update to, or None if unspecified
959 branchmerge = whether to merge between branches
959 branchmerge = whether to merge between branches
960 force = whether to force branch merging or file overwriting
960 force = whether to force branch merging or file overwriting
961 partial = a function to filter file lists (dirstate not updated)
961 partial = a function to filter file lists (dirstate not updated)
962 mergeancestor = whether it is merging with an ancestor. If true,
962 mergeancestor = whether it is merging with an ancestor. If true,
963 we should accept the incoming changes for any prompts that occur.
963 we should accept the incoming changes for any prompts that occur.
964 If false, merging with an ancestor (fast-forward) is only allowed
964 If false, merging with an ancestor (fast-forward) is only allowed
965 between different named branches. This flag is used by rebase extension
965 between different named branches. This flag is used by rebase extension
966 as a temporary fix and should be avoided in general.
966 as a temporary fix and should be avoided in general.
967
967
968 The table below shows all the behaviors of the update command
968 The table below shows all the behaviors of the update command
969 given the -c and -C or no options, whether the working directory
969 given the -c and -C or no options, whether the working directory
970 is dirty, whether a revision is specified, and the relationship of
970 is dirty, whether a revision is specified, and the relationship of
971 the parent rev to the target rev (linear, on the same named
971 the parent rev to the target rev (linear, on the same named
972 branch, or on another named branch).
972 branch, or on another named branch).
973
973
974 This logic is tested by test-update-branches.t.
974 This logic is tested by test-update-branches.t.
975
975
976 -c -C dirty rev | linear same cross
976 -c -C dirty rev | linear same cross
977 n n n n | ok (1) x
977 n n n n | ok (1) x
978 n n n y | ok ok ok
978 n n n y | ok ok ok
979 n n y n | merge (2) (2)
979 n n y n | merge (2) (2)
980 n n y y | merge (3) (3)
980 n n y y | merge (3) (3)
981 n y * * | --- discard ---
981 n y * * | --- discard ---
982 y n y * | --- (4) ---
982 y n y * | --- (4) ---
983 y n n * | --- ok ---
983 y n n * | --- ok ---
984 y y * * | --- (5) ---
984 y y * * | --- (5) ---
985
985
986 x = can't happen
986 x = can't happen
987 * = don't-care
987 * = don't-care
988 1 = abort: not a linear update (merge or update --check to force update)
988 1 = abort: not a linear update (merge or update --check to force update)
989 2 = abort: uncommitted changes (commit and merge, or update --clean to
989 2 = abort: uncommitted changes (commit and merge, or update --clean to
990 discard changes)
990 discard changes)
991 3 = abort: uncommitted changes (commit or update --clean to discard changes)
991 3 = abort: uncommitted changes (commit or update --clean to discard changes)
992 4 = abort: uncommitted changes (checked in commands.py)
992 4 = abort: uncommitted changes (checked in commands.py)
993 5 = incompatible options (checked in commands.py)
993 5 = incompatible options (checked in commands.py)
994
994
995 Return the same tuple as applyupdates().
995 Return the same tuple as applyupdates().
996 """
996 """
997
997
998 onode = node
998 onode = node
999 wlock = repo.wlock()
999 wlock = repo.wlock()
1000 try:
1000 try:
1001 wc = repo[None]
1001 wc = repo[None]
1002 pl = wc.parents()
1002 pl = wc.parents()
1003 p1 = pl[0]
1003 p1 = pl[0]
1004 pas = [None]
1004 pas = [None]
1005 if ancestor:
1005 if ancestor:
1006 pas = [repo[ancestor]]
1006 pas = [repo[ancestor]]
1007
1007
1008 if node is None:
1008 if node is None:
1009 # Here is where we should consider bookmarks, divergent bookmarks,
1009 # Here is where we should consider bookmarks, divergent bookmarks,
1010 # foreground changesets (successors), and tip of current branch;
1010 # foreground changesets (successors), and tip of current branch;
1011 # but currently we are only checking the branch tips.
1011 # but currently we are only checking the branch tips.
1012 try:
1012 try:
1013 node = repo.branchtip(wc.branch())
1013 node = repo.branchtip(wc.branch())
1014 except errormod.RepoLookupError:
1014 except errormod.RepoLookupError:
1015 if wc.branch() == "default": # no default branch!
1015 if wc.branch() == "default": # no default branch!
1016 node = repo.lookup("tip") # update to tip
1016 node = repo.lookup("tip") # update to tip
1017 else:
1017 else:
1018 raise util.Abort(_("branch %s not found") % wc.branch())
1018 raise util.Abort(_("branch %s not found") % wc.branch())
1019
1019
1020 if p1.obsolete() and not p1.children():
1020 if p1.obsolete() and not p1.children():
1021 # allow updating to successors
1021 # allow updating to successors
1022 successors = obsolete.successorssets(repo, p1.node())
1022 successors = obsolete.successorssets(repo, p1.node())
1023
1023
1024 # behavior of certain cases is as follows,
1024 # behavior of certain cases is as follows,
1025 #
1025 #
1026 # divergent changesets: update to highest rev, similar to what
1026 # divergent changesets: update to highest rev, similar to what
1027 # is currently done when there are more than one head
1027 # is currently done when there are more than one head
1028 # (i.e. 'tip')
1028 # (i.e. 'tip')
1029 #
1029 #
1030 # replaced changesets: same as divergent except we know there
1030 # replaced changesets: same as divergent except we know there
1031 # is no conflict
1031 # is no conflict
1032 #
1032 #
1033 # pruned changeset: no update is done; though, we could
1033 # pruned changeset: no update is done; though, we could
1034 # consider updating to the first non-obsolete parent,
1034 # consider updating to the first non-obsolete parent,
1035 # similar to what is current done for 'hg prune'
1035 # similar to what is current done for 'hg prune'
1036
1036
1037 if successors:
1037 if successors:
1038 # flatten the list here handles both divergent (len > 1)
1038 # flatten the list here handles both divergent (len > 1)
1039 # and the usual case (len = 1)
1039 # and the usual case (len = 1)
1040 successors = [n for sub in successors for n in sub]
1040 successors = [n for sub in successors for n in sub]
1041
1041
1042 # get the max revision for the given successors set,
1042 # get the max revision for the given successors set,
1043 # i.e. the 'tip' of a set
1043 # i.e. the 'tip' of a set
1044 node = repo.revs("max(%ln)", successors)[0]
1044 node = repo.revs("max(%ln)", successors)[0]
1045 pas = [p1]
1045 pas = [p1]
1046
1046
1047 overwrite = force and not branchmerge
1047 overwrite = force and not branchmerge
1048
1048
1049 p2 = repo[node]
1049 p2 = repo[node]
1050 if pas[0] is None:
1050 if pas[0] is None:
1051 if repo.ui.config("merge", "preferancestor") == '*':
1051 if repo.ui.config("merge", "preferancestor") == '*':
1052 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1052 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1053 pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
1053 pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
1054 else:
1054 else:
1055 pas = [p1.ancestor(p2, warn=True)]
1055 pas = [p1.ancestor(p2, warn=branchmerge)]
1056
1056
1057 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
1057 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
1058
1058
1059 ### check phase
1059 ### check phase
1060 if not overwrite and len(pl) > 1:
1060 if not overwrite and len(pl) > 1:
1061 raise util.Abort(_("outstanding uncommitted merges"))
1061 raise util.Abort(_("outstanding uncommitted merges"))
1062 if branchmerge:
1062 if branchmerge:
1063 if pas == [p2]:
1063 if pas == [p2]:
1064 raise util.Abort(_("merging with a working directory ancestor"
1064 raise util.Abort(_("merging with a working directory ancestor"
1065 " has no effect"))
1065 " has no effect"))
1066 elif pas == [p1]:
1066 elif pas == [p1]:
1067 if not mergeancestor and p1.branch() == p2.branch():
1067 if not mergeancestor and p1.branch() == p2.branch():
1068 raise util.Abort(_("nothing to merge"),
1068 raise util.Abort(_("nothing to merge"),
1069 hint=_("use 'hg update' "
1069 hint=_("use 'hg update' "
1070 "or check 'hg heads'"))
1070 "or check 'hg heads'"))
1071 if not force and (wc.files() or wc.deleted()):
1071 if not force and (wc.files() or wc.deleted()):
1072 raise util.Abort(_("uncommitted changes"),
1072 raise util.Abort(_("uncommitted changes"),
1073 hint=_("use 'hg status' to list changes"))
1073 hint=_("use 'hg status' to list changes"))
1074 for s in sorted(wc.substate):
1074 for s in sorted(wc.substate):
1075 if wc.sub(s).dirty():
1075 if wc.sub(s).dirty():
1076 raise util.Abort(_("uncommitted changes in "
1076 raise util.Abort(_("uncommitted changes in "
1077 "subrepository '%s'") % s)
1077 "subrepository '%s'") % s)
1078
1078
1079 elif not overwrite:
1079 elif not overwrite:
1080 if p1 == p2: # no-op update
1080 if p1 == p2: # no-op update
1081 # call the hooks and exit early
1081 # call the hooks and exit early
1082 repo.hook('preupdate', throw=True, parent1=xp2, parent2='')
1082 repo.hook('preupdate', throw=True, parent1=xp2, parent2='')
1083 repo.hook('update', parent1=xp2, parent2='', error=0)
1083 repo.hook('update', parent1=xp2, parent2='', error=0)
1084 return 0, 0, 0, 0
1084 return 0, 0, 0, 0
1085
1085
1086 if pas not in ([p1], [p2]): # nonlinear
1086 if pas not in ([p1], [p2]): # nonlinear
1087 dirty = wc.dirty(missing=True)
1087 dirty = wc.dirty(missing=True)
1088 if dirty or onode is None:
1088 if dirty or onode is None:
1089 # Branching is a bit strange to ensure we do the minimal
1089 # Branching is a bit strange to ensure we do the minimal
1090 # amount of call to obsolete.background.
1090 # amount of call to obsolete.background.
1091 foreground = obsolete.foreground(repo, [p1.node()])
1091 foreground = obsolete.foreground(repo, [p1.node()])
1092 # note: the <node> variable contains a random identifier
1092 # note: the <node> variable contains a random identifier
1093 if repo[node].node() in foreground:
1093 if repo[node].node() in foreground:
1094 pas = [p1] # allow updating to successors
1094 pas = [p1] # allow updating to successors
1095 elif dirty:
1095 elif dirty:
1096 msg = _("uncommitted changes")
1096 msg = _("uncommitted changes")
1097 if onode is None:
1097 if onode is None:
1098 hint = _("commit and merge, or update --clean to"
1098 hint = _("commit and merge, or update --clean to"
1099 " discard changes")
1099 " discard changes")
1100 else:
1100 else:
1101 hint = _("commit or update --clean to discard"
1101 hint = _("commit or update --clean to discard"
1102 " changes")
1102 " changes")
1103 raise util.Abort(msg, hint=hint)
1103 raise util.Abort(msg, hint=hint)
1104 else: # node is none
1104 else: # node is none
1105 msg = _("not a linear update")
1105 msg = _("not a linear update")
1106 hint = _("merge or update --check to force update")
1106 hint = _("merge or update --check to force update")
1107 raise util.Abort(msg, hint=hint)
1107 raise util.Abort(msg, hint=hint)
1108 else:
1108 else:
1109 # Allow jumping branches if clean and specific rev given
1109 # Allow jumping branches if clean and specific rev given
1110 pas = [p1]
1110 pas = [p1]
1111
1111
1112 followcopies = False
1112 followcopies = False
1113 if overwrite:
1113 if overwrite:
1114 pas = [wc]
1114 pas = [wc]
1115 elif pas == [p2]: # backwards
1115 elif pas == [p2]: # backwards
1116 pas = [wc.p1()]
1116 pas = [wc.p1()]
1117 elif not branchmerge and not wc.dirty(missing=True):
1117 elif not branchmerge and not wc.dirty(missing=True):
1118 pass
1118 pass
1119 elif pas[0] and repo.ui.configbool("merge", "followcopies", True):
1119 elif pas[0] and repo.ui.configbool("merge", "followcopies", True):
1120 followcopies = True
1120 followcopies = True
1121
1121
1122 ### calculate phase
1122 ### calculate phase
1123 actions = calculateupdates(repo, wc, p2, pas, branchmerge, force,
1123 actions = calculateupdates(repo, wc, p2, pas, branchmerge, force,
1124 partial, mergeancestor, followcopies)
1124 partial, mergeancestor, followcopies)
1125
1125
1126 ### apply phase
1126 ### apply phase
1127 if not branchmerge: # just jump to the new rev
1127 if not branchmerge: # just jump to the new rev
1128 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
1128 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
1129 if not partial:
1129 if not partial:
1130 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
1130 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
1131 # note that we're in the middle of an update
1131 # note that we're in the middle of an update
1132 repo.vfs.write('updatestate', p2.hex())
1132 repo.vfs.write('updatestate', p2.hex())
1133
1133
1134 stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
1134 stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
1135
1135
1136 if not partial:
1136 if not partial:
1137 repo.setparents(fp1, fp2)
1137 repo.setparents(fp1, fp2)
1138 recordupdates(repo, actions, branchmerge)
1138 recordupdates(repo, actions, branchmerge)
1139 # update completed, clear state
1139 # update completed, clear state
1140 util.unlink(repo.join('updatestate'))
1140 util.unlink(repo.join('updatestate'))
1141
1141
1142 if not branchmerge:
1142 if not branchmerge:
1143 repo.dirstate.setbranch(p2.branch())
1143 repo.dirstate.setbranch(p2.branch())
1144 finally:
1144 finally:
1145 wlock.release()
1145 wlock.release()
1146
1146
1147 if not partial:
1147 if not partial:
1148 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
1148 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
1149 return stats
1149 return stats
@@ -1,869 +1,871 b''
1 # obsolete.py - obsolete markers handling
1 # obsolete.py - obsolete markers handling
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """Obsolete marker handling
9 """Obsolete marker handling
10
10
11 An obsolete marker maps an old changeset to a list of new
11 An obsolete marker maps an old changeset to a list of new
12 changesets. If the list of new changesets is empty, the old changeset
12 changesets. If the list of new changesets is empty, the old changeset
13 is said to be "killed". Otherwise, the old changeset is being
13 is said to be "killed". Otherwise, the old changeset is being
14 "replaced" by the new changesets.
14 "replaced" by the new changesets.
15
15
16 Obsolete markers can be used to record and distribute changeset graph
16 Obsolete markers can be used to record and distribute changeset graph
17 transformations performed by history rewrite operations, and help
17 transformations performed by history rewrite operations, and help
18 building new tools to reconcile conflicting rewrite actions. To
18 building new tools to reconcile conflicting rewrite actions. To
19 facilitate conflict resolution, markers include various annotations
19 facilitate conflict resolution, markers include various annotations
20 besides old and news changeset identifiers, such as creation date or
20 besides old and news changeset identifiers, such as creation date or
21 author name.
21 author name.
22
22
23 The old obsoleted changeset is called a "precursor" and possible
23 The old obsoleted changeset is called a "precursor" and possible
24 replacements are called "successors". Markers that used changeset X as
24 replacements are called "successors". Markers that used changeset X as
25 a precursor are called "successor markers of X" because they hold
25 a precursor are called "successor markers of X" because they hold
26 information about the successors of X. Markers that use changeset Y as
26 information about the successors of X. Markers that use changeset Y as
27 a successors are call "precursor markers of Y" because they hold
27 a successors are call "precursor markers of Y" because they hold
28 information about the precursors of Y.
28 information about the precursors of Y.
29
29
30 Examples:
30 Examples:
31
31
32 - When changeset A is replaced by changeset A', one marker is stored:
32 - When changeset A is replaced by changeset A', one marker is stored:
33
33
34 (A, (A',))
34 (A, (A',))
35
35
36 - When changesets A and B are folded into a new changeset C, two markers are
36 - When changesets A and B are folded into a new changeset C, two markers are
37 stored:
37 stored:
38
38
39 (A, (C,)) and (B, (C,))
39 (A, (C,)) and (B, (C,))
40
40
41 - When changeset A is simply "pruned" from the graph, a marker is created:
41 - When changeset A is simply "pruned" from the graph, a marker is created:
42
42
43 (A, ())
43 (A, ())
44
44
45 - When changeset A is split into B and C, a single marker are used:
45 - When changeset A is split into B and C, a single marker are used:
46
46
47 (A, (C, C))
47 (A, (C, C))
48
48
49 We use a single marker to distinguish the "split" case from the "divergence"
49 We use a single marker to distinguish the "split" case from the "divergence"
50 case. If two independent operations rewrite the same changeset A in to A' and
50 case. If two independent operations rewrite the same changeset A in to A' and
51 A'', we have an error case: divergent rewriting. We can detect it because
51 A'', we have an error case: divergent rewriting. We can detect it because
52 two markers will be created independently:
52 two markers will be created independently:
53
53
54 (A, (B,)) and (A, (C,))
54 (A, (B,)) and (A, (C,))
55
55
56 Format
56 Format
57 ------
57 ------
58
58
59 Markers are stored in an append-only file stored in
59 Markers are stored in an append-only file stored in
60 '.hg/store/obsstore'.
60 '.hg/store/obsstore'.
61
61
62 The file starts with a version header:
62 The file starts with a version header:
63
63
64 - 1 unsigned byte: version number, starting at zero.
64 - 1 unsigned byte: version number, starting at zero.
65
65
66
66
67 The header is followed by the markers. Each marker is made of:
67 The header is followed by the markers. Each marker is made of:
68
68
69 - 1 unsigned byte: number of new changesets "N", can be zero.
69 - 1 unsigned byte: number of new changesets "N", can be zero.
70
70
71 - 1 unsigned 32-bits integer: metadata size "M" in bytes.
71 - 1 unsigned 32-bits integer: metadata size "M" in bytes.
72
72
73 - 1 byte: a bit field. It is reserved for flags used in common
73 - 1 byte: a bit field. It is reserved for flags used in common
74 obsolete marker operations, to avoid repeated decoding of metadata
74 obsolete marker operations, to avoid repeated decoding of metadata
75 entries.
75 entries.
76
76
77 - 20 bytes: obsoleted changeset identifier.
77 - 20 bytes: obsoleted changeset identifier.
78
78
79 - N*20 bytes: new changesets identifiers.
79 - N*20 bytes: new changesets identifiers.
80
80
81 - M bytes: metadata as a sequence of nul-terminated strings. Each
81 - M bytes: metadata as a sequence of nul-terminated strings. Each
82 string contains a key and a value, separated by a colon ':', without
82 string contains a key and a value, separated by a colon ':', without
83 additional encoding. Keys cannot contain '\0' or ':' and values
83 additional encoding. Keys cannot contain '\0' or ':' and values
84 cannot contain '\0'.
84 cannot contain '\0'.
85
85
86 """
86 """
87 import struct
87 import struct
88 import util, base85, node
88 import util, base85, node
89 import phases
89 import phases
90 from i18n import _
90 from i18n import _
91
91
92 _pack = struct.pack
92 _pack = struct.pack
93 _unpack = struct.unpack
93 _unpack = struct.unpack
94
94
95 _SEEK_END = 2 # os.SEEK_END was introduced in Python 2.5
95 _SEEK_END = 2 # os.SEEK_END was introduced in Python 2.5
96
96
97 # the obsolete feature is not mature enough to be enabled by default.
97 # the obsolete feature is not mature enough to be enabled by default.
98 # you have to rely on third party extension extension to enable this.
98 # you have to rely on third party extension extension to enable this.
99 _enabled = False
99 _enabled = False
100
100
101 # data used for parsing and writing
101 # data used for parsing and writing
102 _fmversion = 0
102 _fmversion = 0
103 _fmfixed = '>BIB20s'
103 _fmfixed = '>BIB20s'
104 _fmnode = '20s'
104 _fmnode = '20s'
105 _fmfsize = struct.calcsize(_fmfixed)
105 _fmfsize = struct.calcsize(_fmfixed)
106 _fnodesize = struct.calcsize(_fmnode)
106 _fnodesize = struct.calcsize(_fmnode)
107
107
108 ### obsolescence marker flag
108 ### obsolescence marker flag
109
109
110 ## bumpedfix flag
110 ## bumpedfix flag
111 #
111 #
112 # When a changeset A' succeed to a changeset A which became public, we call A'
112 # When a changeset A' succeed to a changeset A which became public, we call A'
113 # "bumped" because it's a successors of a public changesets
113 # "bumped" because it's a successors of a public changesets
114 #
114 #
115 # o A' (bumped)
115 # o A' (bumped)
116 # |`:
116 # |`:
117 # | o A
117 # | o A
118 # |/
118 # |/
119 # o Z
119 # o Z
120 #
120 #
121 # The way to solve this situation is to create a new changeset Ad as children
121 # The way to solve this situation is to create a new changeset Ad as children
122 # of A. This changeset have the same content than A'. So the diff from A to A'
122 # of A. This changeset have the same content than A'. So the diff from A to A'
123 # is the same than the diff from A to Ad. Ad is marked as a successors of A'
123 # is the same than the diff from A to Ad. Ad is marked as a successors of A'
124 #
124 #
125 # o Ad
125 # o Ad
126 # |`:
126 # |`:
127 # | x A'
127 # | x A'
128 # |'|
128 # |'|
129 # o | A
129 # o | A
130 # |/
130 # |/
131 # o Z
131 # o Z
132 #
132 #
133 # But by transitivity Ad is also a successors of A. To avoid having Ad marked
133 # But by transitivity Ad is also a successors of A. To avoid having Ad marked
134 # as bumped too, we add the `bumpedfix` flag to the marker. <A', (Ad,)>.
134 # as bumped too, we add the `bumpedfix` flag to the marker. <A', (Ad,)>.
135 # This flag mean that the successors express the changes between the public and
135 # This flag mean that the successors express the changes between the public and
136 # bumped version and fix the situation, breaking the transitivity of
136 # bumped version and fix the situation, breaking the transitivity of
137 # "bumped" here.
137 # "bumped" here.
138 bumpedfix = 1
138 bumpedfix = 1
139
139
140 def _readmarkers(data):
140 def _readmarkers(data):
141 """Read and enumerate markers from raw data"""
141 """Read and enumerate markers from raw data"""
142 off = 0
142 off = 0
143 diskversion = _unpack('>B', data[off:off + 1])[0]
143 diskversion = _unpack('>B', data[off:off + 1])[0]
144 off += 1
144 off += 1
145 if diskversion != _fmversion:
145 if diskversion != _fmversion:
146 raise util.Abort(_('parsing obsolete marker: unknown version %r')
146 raise util.Abort(_('parsing obsolete marker: unknown version %r')
147 % diskversion)
147 % diskversion)
148
148
149 # Loop on markers
149 # Loop on markers
150 l = len(data)
150 l = len(data)
151 while off + _fmfsize <= l:
151 while off + _fmfsize <= l:
152 # read fixed part
152 # read fixed part
153 cur = data[off:off + _fmfsize]
153 cur = data[off:off + _fmfsize]
154 off += _fmfsize
154 off += _fmfsize
155 nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
155 nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
156 # read replacement
156 # read replacement
157 sucs = ()
157 sucs = ()
158 if nbsuc:
158 if nbsuc:
159 s = (_fnodesize * nbsuc)
159 s = (_fnodesize * nbsuc)
160 cur = data[off:off + s]
160 cur = data[off:off + s]
161 sucs = _unpack(_fmnode * nbsuc, cur)
161 sucs = _unpack(_fmnode * nbsuc, cur)
162 off += s
162 off += s
163 # read metadata
163 # read metadata
164 # (metadata will be decoded on demand)
164 # (metadata will be decoded on demand)
165 metadata = data[off:off + mdsize]
165 metadata = data[off:off + mdsize]
166 if len(metadata) != mdsize:
166 if len(metadata) != mdsize:
167 raise util.Abort(_('parsing obsolete marker: metadata is too '
167 raise util.Abort(_('parsing obsolete marker: metadata is too '
168 'short, %d bytes expected, got %d')
168 'short, %d bytes expected, got %d')
169 % (mdsize, len(metadata)))
169 % (mdsize, len(metadata)))
170 off += mdsize
170 off += mdsize
171 yield (pre, sucs, flags, metadata)
171 yield (pre, sucs, flags, metadata)
172
172
173 def encodemeta(meta):
173 def encodemeta(meta):
174 """Return encoded metadata string to string mapping.
174 """Return encoded metadata string to string mapping.
175
175
176 Assume no ':' in key and no '\0' in both key and value."""
176 Assume no ':' in key and no '\0' in both key and value."""
177 for key, value in meta.iteritems():
177 for key, value in meta.iteritems():
178 if ':' in key or '\0' in key:
178 if ':' in key or '\0' in key:
179 raise ValueError("':' and '\0' are forbidden in metadata key'")
179 raise ValueError("':' and '\0' are forbidden in metadata key'")
180 if '\0' in value:
180 if '\0' in value:
181 raise ValueError("':' is forbidden in metadata value'")
181 raise ValueError("':' is forbidden in metadata value'")
182 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
182 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
183
183
184 def decodemeta(data):
184 def decodemeta(data):
185 """Return string to string dictionary from encoded version."""
185 """Return string to string dictionary from encoded version."""
186 d = {}
186 d = {}
187 for l in data.split('\0'):
187 for l in data.split('\0'):
188 if l:
188 if l:
189 key, value = l.split(':')
189 key, value = l.split(':')
190 d[key] = value
190 d[key] = value
191 return d
191 return d
192
192
193 class marker(object):
193 class marker(object):
194 """Wrap obsolete marker raw data"""
194 """Wrap obsolete marker raw data"""
195
195
196 def __init__(self, repo, data):
196 def __init__(self, repo, data):
197 # the repo argument will be used to create changectx in later version
197 # the repo argument will be used to create changectx in later version
198 self._repo = repo
198 self._repo = repo
199 self._data = data
199 self._data = data
200 self._decodedmeta = None
200 self._decodedmeta = None
201
201
202 def __hash__(self):
202 def __hash__(self):
203 return hash(self._data)
203 return hash(self._data)
204
204
205 def __eq__(self, other):
205 def __eq__(self, other):
206 if type(other) != type(self):
206 if type(other) != type(self):
207 return False
207 return False
208 return self._data == other._data
208 return self._data == other._data
209
209
210 def precnode(self):
210 def precnode(self):
211 """Precursor changeset node identifier"""
211 """Precursor changeset node identifier"""
212 return self._data[0]
212 return self._data[0]
213
213
214 def succnodes(self):
214 def succnodes(self):
215 """List of successor changesets node identifiers"""
215 """List of successor changesets node identifiers"""
216 return self._data[1]
216 return self._data[1]
217
217
218 def metadata(self):
218 def metadata(self):
219 """Decoded metadata dictionary"""
219 """Decoded metadata dictionary"""
220 if self._decodedmeta is None:
220 if self._decodedmeta is None:
221 self._decodedmeta = decodemeta(self._data[3])
221 self._decodedmeta = decodemeta(self._data[3])
222 return self._decodedmeta
222 return self._decodedmeta
223
223
224 def date(self):
224 def date(self):
225 """Creation date as (unixtime, offset)"""
225 """Creation date as (unixtime, offset)"""
226 parts = self.metadata()['date'].split(' ')
226 parts = self.metadata()['date'].split(' ')
227 return (float(parts[0]), int(parts[1]))
227 return (float(parts[0]), int(parts[1]))
228
228
229 class obsstore(object):
229 class obsstore(object):
230 """Store obsolete markers
230 """Store obsolete markers
231
231
232 Markers can be accessed with two mappings:
232 Markers can be accessed with two mappings:
233 - precursors[x] -> set(markers on precursors edges of x)
233 - precursors[x] -> set(markers on precursors edges of x)
234 - successors[x] -> set(markers on successors edges of x)
234 - successors[x] -> set(markers on successors edges of x)
235 """
235 """
236
236
237 def __init__(self, sopener):
237 def __init__(self, sopener):
238 # caches for various obsolescence related cache
238 # caches for various obsolescence related cache
239 self.caches = {}
239 self.caches = {}
240 self._all = []
240 self._all = []
241 # new markers to serialize
241 # new markers to serialize
242 self.precursors = {}
242 self.precursors = {}
243 self.successors = {}
243 self.successors = {}
244 self.sopener = sopener
244 self.sopener = sopener
245 data = sopener.tryread('obsstore')
245 data = sopener.tryread('obsstore')
246 if data:
246 if data:
247 self._load(_readmarkers(data))
247 self._load(_readmarkers(data))
248
248
249 def __iter__(self):
249 def __iter__(self):
250 return iter(self._all)
250 return iter(self._all)
251
251
252 def __len__(self):
252 def __len__(self):
253 return len(self._all)
253 return len(self._all)
254
254
255 def __nonzero__(self):
255 def __nonzero__(self):
256 return bool(self._all)
256 return bool(self._all)
257
257
258 def create(self, transaction, prec, succs=(), flag=0, metadata=None):
258 def create(self, transaction, prec, succs=(), flag=0, metadata=None):
259 """obsolete: add a new obsolete marker
259 """obsolete: add a new obsolete marker
260
260
261 * ensuring it is hashable
261 * ensuring it is hashable
262 * check mandatory metadata
262 * check mandatory metadata
263 * encode metadata
263 * encode metadata
264
264
265 If you are a human writing code creating marker you want to use the
265 If you are a human writing code creating marker you want to use the
266 `createmarkers` function in this module instead.
266 `createmarkers` function in this module instead.
267
267
268 return True if a new marker have been added, False if the markers
268 return True if a new marker have been added, False if the markers
269 already existed (no op).
269 already existed (no op).
270 """
270 """
271 if metadata is None:
271 if metadata is None:
272 metadata = {}
272 metadata = {}
273 if 'date' not in metadata:
273 if 'date' not in metadata:
274 metadata['date'] = "%d %d" % util.makedate()
274 metadata['date'] = "%d %d" % util.makedate()
275 if len(prec) != 20:
275 if len(prec) != 20:
276 raise ValueError(prec)
276 raise ValueError(prec)
277 for succ in succs:
277 for succ in succs:
278 if len(succ) != 20:
278 if len(succ) != 20:
279 raise ValueError(succ)
279 raise ValueError(succ)
280 if prec in succs:
281 raise ValueError(_('in-marker cycle with %s') % node.hex(prec))
280 marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
282 marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
281 return bool(self.add(transaction, [marker]))
283 return bool(self.add(transaction, [marker]))
282
284
283 def add(self, transaction, markers):
285 def add(self, transaction, markers):
284 """Add new markers to the store
286 """Add new markers to the store
285
287
286 Take care of filtering duplicate.
288 Take care of filtering duplicate.
287 Return the number of new marker."""
289 Return the number of new marker."""
288 if not _enabled:
290 if not _enabled:
289 raise util.Abort('obsolete feature is not enabled on this repo')
291 raise util.Abort('obsolete feature is not enabled on this repo')
290 known = set(self._all)
292 known = set(self._all)
291 new = []
293 new = []
292 for m in markers:
294 for m in markers:
293 if m not in known:
295 if m not in known:
294 known.add(m)
296 known.add(m)
295 new.append(m)
297 new.append(m)
296 if new:
298 if new:
297 f = self.sopener('obsstore', 'ab')
299 f = self.sopener('obsstore', 'ab')
298 try:
300 try:
299 # Whether the file's current position is at the begin or at
301 # Whether the file's current position is at the begin or at
300 # the end after opening a file for appending is implementation
302 # the end after opening a file for appending is implementation
301 # defined. So we must seek to the end before calling tell(),
303 # defined. So we must seek to the end before calling tell(),
302 # or we may get a zero offset for non-zero sized files on
304 # or we may get a zero offset for non-zero sized files on
303 # some platforms (issue3543).
305 # some platforms (issue3543).
304 f.seek(0, _SEEK_END)
306 f.seek(0, _SEEK_END)
305 offset = f.tell()
307 offset = f.tell()
306 transaction.add('obsstore', offset)
308 transaction.add('obsstore', offset)
307 # offset == 0: new file - add the version header
309 # offset == 0: new file - add the version header
308 for bytes in _encodemarkers(new, offset == 0):
310 for bytes in _encodemarkers(new, offset == 0):
309 f.write(bytes)
311 f.write(bytes)
310 finally:
312 finally:
311 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
313 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
312 # call 'filecacheentry.refresh()' here
314 # call 'filecacheentry.refresh()' here
313 f.close()
315 f.close()
314 self._load(new)
316 self._load(new)
315 # new marker *may* have changed several set. invalidate the cache.
317 # new marker *may* have changed several set. invalidate the cache.
316 self.caches.clear()
318 self.caches.clear()
317 return len(new)
319 return len(new)
318
320
319 def mergemarkers(self, transaction, data):
321 def mergemarkers(self, transaction, data):
320 markers = _readmarkers(data)
322 markers = _readmarkers(data)
321 self.add(transaction, markers)
323 self.add(transaction, markers)
322
324
323 def _load(self, markers):
325 def _load(self, markers):
324 for mark in markers:
326 for mark in markers:
325 self._all.append(mark)
327 self._all.append(mark)
326 pre, sucs = mark[:2]
328 pre, sucs = mark[:2]
327 self.successors.setdefault(pre, set()).add(mark)
329 self.successors.setdefault(pre, set()).add(mark)
328 for suc in sucs:
330 for suc in sucs:
329 self.precursors.setdefault(suc, set()).add(mark)
331 self.precursors.setdefault(suc, set()).add(mark)
330 if node.nullid in self.precursors:
332 if node.nullid in self.precursors:
331 raise util.Abort(_('bad obsolescence marker detected: '
333 raise util.Abort(_('bad obsolescence marker detected: '
332 'invalid successors nullid'))
334 'invalid successors nullid'))
333
335
334 def _encodemarkers(markers, addheader=False):
336 def _encodemarkers(markers, addheader=False):
335 # Kept separate from flushmarkers(), it will be reused for
337 # Kept separate from flushmarkers(), it will be reused for
336 # markers exchange.
338 # markers exchange.
337 if addheader:
339 if addheader:
338 yield _pack('>B', _fmversion)
340 yield _pack('>B', _fmversion)
339 for marker in markers:
341 for marker in markers:
340 yield _encodeonemarker(marker)
342 yield _encodeonemarker(marker)
341
343
342
344
343 def _encodeonemarker(marker):
345 def _encodeonemarker(marker):
344 pre, sucs, flags, metadata = marker
346 pre, sucs, flags, metadata = marker
345 nbsuc = len(sucs)
347 nbsuc = len(sucs)
346 format = _fmfixed + (_fmnode * nbsuc)
348 format = _fmfixed + (_fmnode * nbsuc)
347 data = [nbsuc, len(metadata), flags, pre]
349 data = [nbsuc, len(metadata), flags, pre]
348 data.extend(sucs)
350 data.extend(sucs)
349 return _pack(format, *data) + metadata
351 return _pack(format, *data) + metadata
350
352
351 # arbitrary picked to fit into 8K limit from HTTP server
353 # arbitrary picked to fit into 8K limit from HTTP server
352 # you have to take in account:
354 # you have to take in account:
353 # - the version header
355 # - the version header
354 # - the base85 encoding
356 # - the base85 encoding
355 _maxpayload = 5300
357 _maxpayload = 5300
356
358
357 def _pushkeyescape(markers):
359 def _pushkeyescape(markers):
358 """encode markers into a dict suitable for pushkey exchange
360 """encode markers into a dict suitable for pushkey exchange
359
361
360 - binary data is base85 encoded
362 - binary data is base85 encoded
361 - split in chunks smaller than 5300 bytes"""
363 - split in chunks smaller than 5300 bytes"""
362 keys = {}
364 keys = {}
363 parts = []
365 parts = []
364 currentlen = _maxpayload * 2 # ensure we create a new part
366 currentlen = _maxpayload * 2 # ensure we create a new part
365 for marker in markers:
367 for marker in markers:
366 nextdata = _encodeonemarker(marker)
368 nextdata = _encodeonemarker(marker)
367 if (len(nextdata) + currentlen > _maxpayload):
369 if (len(nextdata) + currentlen > _maxpayload):
368 currentpart = []
370 currentpart = []
369 currentlen = 0
371 currentlen = 0
370 parts.append(currentpart)
372 parts.append(currentpart)
371 currentpart.append(nextdata)
373 currentpart.append(nextdata)
372 currentlen += len(nextdata)
374 currentlen += len(nextdata)
373 for idx, part in enumerate(reversed(parts)):
375 for idx, part in enumerate(reversed(parts)):
374 data = ''.join([_pack('>B', _fmversion)] + part)
376 data = ''.join([_pack('>B', _fmversion)] + part)
375 keys['dump%i' % idx] = base85.b85encode(data)
377 keys['dump%i' % idx] = base85.b85encode(data)
376 return keys
378 return keys
377
379
378 def listmarkers(repo):
380 def listmarkers(repo):
379 """List markers over pushkey"""
381 """List markers over pushkey"""
380 if not repo.obsstore:
382 if not repo.obsstore:
381 return {}
383 return {}
382 return _pushkeyescape(repo.obsstore)
384 return _pushkeyescape(repo.obsstore)
383
385
384 def pushmarker(repo, key, old, new):
386 def pushmarker(repo, key, old, new):
385 """Push markers over pushkey"""
387 """Push markers over pushkey"""
386 if not key.startswith('dump'):
388 if not key.startswith('dump'):
387 repo.ui.warn(_('unknown key: %r') % key)
389 repo.ui.warn(_('unknown key: %r') % key)
388 return 0
390 return 0
389 if old:
391 if old:
390 repo.ui.warn(_('unexpected old value for %r') % key)
392 repo.ui.warn(_('unexpected old value for %r') % key)
391 return 0
393 return 0
392 data = base85.b85decode(new)
394 data = base85.b85decode(new)
393 lock = repo.lock()
395 lock = repo.lock()
394 try:
396 try:
395 tr = repo.transaction('pushkey: obsolete markers')
397 tr = repo.transaction('pushkey: obsolete markers')
396 try:
398 try:
397 repo.obsstore.mergemarkers(tr, data)
399 repo.obsstore.mergemarkers(tr, data)
398 tr.close()
400 tr.close()
399 return 1
401 return 1
400 finally:
402 finally:
401 tr.release()
403 tr.release()
402 finally:
404 finally:
403 lock.release()
405 lock.release()
404
406
405 def allmarkers(repo):
407 def allmarkers(repo):
406 """all obsolete markers known in a repository"""
408 """all obsolete markers known in a repository"""
407 for markerdata in repo.obsstore:
409 for markerdata in repo.obsstore:
408 yield marker(repo, markerdata)
410 yield marker(repo, markerdata)
409
411
410 def precursormarkers(ctx):
412 def precursormarkers(ctx):
411 """obsolete marker marking this changeset as a successors"""
413 """obsolete marker marking this changeset as a successors"""
412 for data in ctx._repo.obsstore.precursors.get(ctx.node(), ()):
414 for data in ctx._repo.obsstore.precursors.get(ctx.node(), ()):
413 yield marker(ctx._repo, data)
415 yield marker(ctx._repo, data)
414
416
415 def successormarkers(ctx):
417 def successormarkers(ctx):
416 """obsolete marker making this changeset obsolete"""
418 """obsolete marker making this changeset obsolete"""
417 for data in ctx._repo.obsstore.successors.get(ctx.node(), ()):
419 for data in ctx._repo.obsstore.successors.get(ctx.node(), ()):
418 yield marker(ctx._repo, data)
420 yield marker(ctx._repo, data)
419
421
420 def allsuccessors(obsstore, nodes, ignoreflags=0):
422 def allsuccessors(obsstore, nodes, ignoreflags=0):
421 """Yield node for every successor of <nodes>.
423 """Yield node for every successor of <nodes>.
422
424
423 Some successors may be unknown locally.
425 Some successors may be unknown locally.
424
426
425 This is a linear yield unsuited to detecting split changesets. It includes
427 This is a linear yield unsuited to detecting split changesets. It includes
426 initial nodes too."""
428 initial nodes too."""
427 remaining = set(nodes)
429 remaining = set(nodes)
428 seen = set(remaining)
430 seen = set(remaining)
429 while remaining:
431 while remaining:
430 current = remaining.pop()
432 current = remaining.pop()
431 yield current
433 yield current
432 for mark in obsstore.successors.get(current, ()):
434 for mark in obsstore.successors.get(current, ()):
433 # ignore marker flagged with specified flag
435 # ignore marker flagged with specified flag
434 if mark[2] & ignoreflags:
436 if mark[2] & ignoreflags:
435 continue
437 continue
436 for suc in mark[1]:
438 for suc in mark[1]:
437 if suc not in seen:
439 if suc not in seen:
438 seen.add(suc)
440 seen.add(suc)
439 remaining.add(suc)
441 remaining.add(suc)
440
442
441 def allprecursors(obsstore, nodes, ignoreflags=0):
443 def allprecursors(obsstore, nodes, ignoreflags=0):
442 """Yield node for every precursors of <nodes>.
444 """Yield node for every precursors of <nodes>.
443
445
444 Some precursors may be unknown locally.
446 Some precursors may be unknown locally.
445
447
446 This is a linear yield unsuited to detecting folded changesets. It includes
448 This is a linear yield unsuited to detecting folded changesets. It includes
447 initial nodes too."""
449 initial nodes too."""
448
450
449 remaining = set(nodes)
451 remaining = set(nodes)
450 seen = set(remaining)
452 seen = set(remaining)
451 while remaining:
453 while remaining:
452 current = remaining.pop()
454 current = remaining.pop()
453 yield current
455 yield current
454 for mark in obsstore.precursors.get(current, ()):
456 for mark in obsstore.precursors.get(current, ()):
455 # ignore marker flagged with specified flag
457 # ignore marker flagged with specified flag
456 if mark[2] & ignoreflags:
458 if mark[2] & ignoreflags:
457 continue
459 continue
458 suc = mark[0]
460 suc = mark[0]
459 if suc not in seen:
461 if suc not in seen:
460 seen.add(suc)
462 seen.add(suc)
461 remaining.add(suc)
463 remaining.add(suc)
462
464
463 def foreground(repo, nodes):
465 def foreground(repo, nodes):
464 """return all nodes in the "foreground" of other node
466 """return all nodes in the "foreground" of other node
465
467
466 The foreground of a revision is anything reachable using parent -> children
468 The foreground of a revision is anything reachable using parent -> children
467 or precursor -> successor relation. It is very similar to "descendant" but
469 or precursor -> successor relation. It is very similar to "descendant" but
468 augmented with obsolescence information.
470 augmented with obsolescence information.
469
471
470 Beware that possible obsolescence cycle may result if complex situation.
472 Beware that possible obsolescence cycle may result if complex situation.
471 """
473 """
472 repo = repo.unfiltered()
474 repo = repo.unfiltered()
473 foreground = set(repo.set('%ln::', nodes))
475 foreground = set(repo.set('%ln::', nodes))
474 if repo.obsstore:
476 if repo.obsstore:
475 # We only need this complicated logic if there is obsolescence
477 # We only need this complicated logic if there is obsolescence
476 # XXX will probably deserve an optimised revset.
478 # XXX will probably deserve an optimised revset.
477 nm = repo.changelog.nodemap
479 nm = repo.changelog.nodemap
478 plen = -1
480 plen = -1
479 # compute the whole set of successors or descendants
481 # compute the whole set of successors or descendants
480 while len(foreground) != plen:
482 while len(foreground) != plen:
481 plen = len(foreground)
483 plen = len(foreground)
482 succs = set(c.node() for c in foreground)
484 succs = set(c.node() for c in foreground)
483 mutable = [c.node() for c in foreground if c.mutable()]
485 mutable = [c.node() for c in foreground if c.mutable()]
484 succs.update(allsuccessors(repo.obsstore, mutable))
486 succs.update(allsuccessors(repo.obsstore, mutable))
485 known = (n for n in succs if n in nm)
487 known = (n for n in succs if n in nm)
486 foreground = set(repo.set('%ln::', known))
488 foreground = set(repo.set('%ln::', known))
487 return set(c.node() for c in foreground)
489 return set(c.node() for c in foreground)
488
490
489
491
490 def successorssets(repo, initialnode, cache=None):
492 def successorssets(repo, initialnode, cache=None):
491 """Return all set of successors of initial nodes
493 """Return all set of successors of initial nodes
492
494
493 The successors set of a changeset A are a group of revisions that succeed
495 The successors set of a changeset A are a group of revisions that succeed
494 A. It succeeds A as a consistent whole, each revision being only a partial
496 A. It succeeds A as a consistent whole, each revision being only a partial
495 replacement. The successors set contains non-obsolete changesets only.
497 replacement. The successors set contains non-obsolete changesets only.
496
498
497 This function returns the full list of successor sets which is why it
499 This function returns the full list of successor sets which is why it
498 returns a list of tuples and not just a single tuple. Each tuple is a valid
500 returns a list of tuples and not just a single tuple. Each tuple is a valid
499 successors set. Not that (A,) may be a valid successors set for changeset A
501 successors set. Not that (A,) may be a valid successors set for changeset A
500 (see below).
502 (see below).
501
503
502 In most cases, a changeset A will have a single element (e.g. the changeset
504 In most cases, a changeset A will have a single element (e.g. the changeset
503 A is replaced by A') in its successors set. Though, it is also common for a
505 A is replaced by A') in its successors set. Though, it is also common for a
504 changeset A to have no elements in its successor set (e.g. the changeset
506 changeset A to have no elements in its successor set (e.g. the changeset
505 has been pruned). Therefore, the returned list of successors sets will be
507 has been pruned). Therefore, the returned list of successors sets will be
506 [(A',)] or [], respectively.
508 [(A',)] or [], respectively.
507
509
508 When a changeset A is split into A' and B', however, it will result in a
510 When a changeset A is split into A' and B', however, it will result in a
509 successors set containing more than a single element, i.e. [(A',B')].
511 successors set containing more than a single element, i.e. [(A',B')].
510 Divergent changesets will result in multiple successors sets, i.e. [(A',),
512 Divergent changesets will result in multiple successors sets, i.e. [(A',),
511 (A'')].
513 (A'')].
512
514
513 If a changeset A is not obsolete, then it will conceptually have no
515 If a changeset A is not obsolete, then it will conceptually have no
514 successors set. To distinguish this from a pruned changeset, the successor
516 successors set. To distinguish this from a pruned changeset, the successor
515 set will only contain itself, i.e. [(A,)].
517 set will only contain itself, i.e. [(A,)].
516
518
517 Finally, successors unknown locally are considered to be pruned (obsoleted
519 Finally, successors unknown locally are considered to be pruned (obsoleted
518 without any successors).
520 without any successors).
519
521
520 The optional `cache` parameter is a dictionary that may contain precomputed
522 The optional `cache` parameter is a dictionary that may contain precomputed
521 successors sets. It is meant to reuse the computation of a previous call to
523 successors sets. It is meant to reuse the computation of a previous call to
522 `successorssets` when multiple calls are made at the same time. The cache
524 `successorssets` when multiple calls are made at the same time. The cache
523 dictionary is updated in place. The caller is responsible for its live
525 dictionary is updated in place. The caller is responsible for its live
524 spawn. Code that makes multiple calls to `successorssets` *must* use this
526 spawn. Code that makes multiple calls to `successorssets` *must* use this
525 cache mechanism or suffer terrible performances.
527 cache mechanism or suffer terrible performances.
526
528
527 """
529 """
528
530
529 succmarkers = repo.obsstore.successors
531 succmarkers = repo.obsstore.successors
530
532
531 # Stack of nodes we search successors sets for
533 # Stack of nodes we search successors sets for
532 toproceed = [initialnode]
534 toproceed = [initialnode]
533 # set version of above list for fast loop detection
535 # set version of above list for fast loop detection
534 # element added to "toproceed" must be added here
536 # element added to "toproceed" must be added here
535 stackedset = set(toproceed)
537 stackedset = set(toproceed)
536 if cache is None:
538 if cache is None:
537 cache = {}
539 cache = {}
538
540
539 # This while loop is the flattened version of a recursive search for
541 # This while loop is the flattened version of a recursive search for
540 # successors sets
542 # successors sets
541 #
543 #
542 # def successorssets(x):
544 # def successorssets(x):
543 # successors = directsuccessors(x)
545 # successors = directsuccessors(x)
544 # ss = [[]]
546 # ss = [[]]
545 # for succ in directsuccessors(x):
547 # for succ in directsuccessors(x):
546 # # product as in itertools cartesian product
548 # # product as in itertools cartesian product
547 # ss = product(ss, successorssets(succ))
549 # ss = product(ss, successorssets(succ))
548 # return ss
550 # return ss
549 #
551 #
550 # But we can not use plain recursive calls here:
552 # But we can not use plain recursive calls here:
551 # - that would blow the python call stack
553 # - that would blow the python call stack
552 # - obsolescence markers may have cycles, we need to handle them.
554 # - obsolescence markers may have cycles, we need to handle them.
553 #
555 #
554 # The `toproceed` list act as our call stack. Every node we search
556 # The `toproceed` list act as our call stack. Every node we search
555 # successors set for are stacked there.
557 # successors set for are stacked there.
556 #
558 #
557 # The `stackedset` is set version of this stack used to check if a node is
559 # The `stackedset` is set version of this stack used to check if a node is
558 # already stacked. This check is used to detect cycles and prevent infinite
560 # already stacked. This check is used to detect cycles and prevent infinite
559 # loop.
561 # loop.
560 #
562 #
561 # successors set of all nodes are stored in the `cache` dictionary.
563 # successors set of all nodes are stored in the `cache` dictionary.
562 #
564 #
563 # After this while loop ends we use the cache to return the successors sets
565 # After this while loop ends we use the cache to return the successors sets
564 # for the node requested by the caller.
566 # for the node requested by the caller.
565 while toproceed:
567 while toproceed:
566 # Every iteration tries to compute the successors sets of the topmost
568 # Every iteration tries to compute the successors sets of the topmost
567 # node of the stack: CURRENT.
569 # node of the stack: CURRENT.
568 #
570 #
569 # There are four possible outcomes:
571 # There are four possible outcomes:
570 #
572 #
571 # 1) We already know the successors sets of CURRENT:
573 # 1) We already know the successors sets of CURRENT:
572 # -> mission accomplished, pop it from the stack.
574 # -> mission accomplished, pop it from the stack.
573 # 2) Node is not obsolete:
575 # 2) Node is not obsolete:
574 # -> the node is its own successors sets. Add it to the cache.
576 # -> the node is its own successors sets. Add it to the cache.
575 # 3) We do not know successors set of direct successors of CURRENT:
577 # 3) We do not know successors set of direct successors of CURRENT:
576 # -> We add those successors to the stack.
578 # -> We add those successors to the stack.
577 # 4) We know successors sets of all direct successors of CURRENT:
579 # 4) We know successors sets of all direct successors of CURRENT:
578 # -> We can compute CURRENT successors set and add it to the
580 # -> We can compute CURRENT successors set and add it to the
579 # cache.
581 # cache.
580 #
582 #
581 current = toproceed[-1]
583 current = toproceed[-1]
582 if current in cache:
584 if current in cache:
583 # case (1): We already know the successors sets
585 # case (1): We already know the successors sets
584 stackedset.remove(toproceed.pop())
586 stackedset.remove(toproceed.pop())
585 elif current not in succmarkers:
587 elif current not in succmarkers:
586 # case (2): The node is not obsolete.
588 # case (2): The node is not obsolete.
587 if current in repo:
589 if current in repo:
588 # We have a valid last successors.
590 # We have a valid last successors.
589 cache[current] = [(current,)]
591 cache[current] = [(current,)]
590 else:
592 else:
591 # Final obsolete version is unknown locally.
593 # Final obsolete version is unknown locally.
592 # Do not count that as a valid successors
594 # Do not count that as a valid successors
593 cache[current] = []
595 cache[current] = []
594 else:
596 else:
595 # cases (3) and (4)
597 # cases (3) and (4)
596 #
598 #
597 # We proceed in two phases. Phase 1 aims to distinguish case (3)
599 # We proceed in two phases. Phase 1 aims to distinguish case (3)
598 # from case (4):
600 # from case (4):
599 #
601 #
600 # For each direct successors of CURRENT, we check whether its
602 # For each direct successors of CURRENT, we check whether its
601 # successors sets are known. If they are not, we stack the
603 # successors sets are known. If they are not, we stack the
602 # unknown node and proceed to the next iteration of the while
604 # unknown node and proceed to the next iteration of the while
603 # loop. (case 3)
605 # loop. (case 3)
604 #
606 #
605 # During this step, we may detect obsolescence cycles: a node
607 # During this step, we may detect obsolescence cycles: a node
606 # with unknown successors sets but already in the call stack.
608 # with unknown successors sets but already in the call stack.
607 # In such a situation, we arbitrary set the successors sets of
609 # In such a situation, we arbitrary set the successors sets of
608 # the node to nothing (node pruned) to break the cycle.
610 # the node to nothing (node pruned) to break the cycle.
609 #
611 #
610 # If no break was encountered we proceed to phase 2.
612 # If no break was encountered we proceed to phase 2.
611 #
613 #
612 # Phase 2 computes successors sets of CURRENT (case 4); see details
614 # Phase 2 computes successors sets of CURRENT (case 4); see details
613 # in phase 2 itself.
615 # in phase 2 itself.
614 #
616 #
615 # Note the two levels of iteration in each phase.
617 # Note the two levels of iteration in each phase.
616 # - The first one handles obsolescence markers using CURRENT as
618 # - The first one handles obsolescence markers using CURRENT as
617 # precursor (successors markers of CURRENT).
619 # precursor (successors markers of CURRENT).
618 #
620 #
619 # Having multiple entry here means divergence.
621 # Having multiple entry here means divergence.
620 #
622 #
621 # - The second one handles successors defined in each marker.
623 # - The second one handles successors defined in each marker.
622 #
624 #
623 # Having none means pruned node, multiple successors means split,
625 # Having none means pruned node, multiple successors means split,
624 # single successors are standard replacement.
626 # single successors are standard replacement.
625 #
627 #
626 for mark in sorted(succmarkers[current]):
628 for mark in sorted(succmarkers[current]):
627 for suc in mark[1]:
629 for suc in mark[1]:
628 if suc not in cache:
630 if suc not in cache:
629 if suc in stackedset:
631 if suc in stackedset:
630 # cycle breaking
632 # cycle breaking
631 cache[suc] = []
633 cache[suc] = []
632 else:
634 else:
633 # case (3) If we have not computed successors sets
635 # case (3) If we have not computed successors sets
634 # of one of those successors we add it to the
636 # of one of those successors we add it to the
635 # `toproceed` stack and stop all work for this
637 # `toproceed` stack and stop all work for this
636 # iteration.
638 # iteration.
637 toproceed.append(suc)
639 toproceed.append(suc)
638 stackedset.add(suc)
640 stackedset.add(suc)
639 break
641 break
640 else:
642 else:
641 continue
643 continue
642 break
644 break
643 else:
645 else:
644 # case (4): we know all successors sets of all direct
646 # case (4): we know all successors sets of all direct
645 # successors
647 # successors
646 #
648 #
647 # Successors set contributed by each marker depends on the
649 # Successors set contributed by each marker depends on the
648 # successors sets of all its "successors" node.
650 # successors sets of all its "successors" node.
649 #
651 #
650 # Each different marker is a divergence in the obsolescence
652 # Each different marker is a divergence in the obsolescence
651 # history. It contributes successors sets distinct from other
653 # history. It contributes successors sets distinct from other
652 # markers.
654 # markers.
653 #
655 #
654 # Within a marker, a successor may have divergent successors
656 # Within a marker, a successor may have divergent successors
655 # sets. In such a case, the marker will contribute multiple
657 # sets. In such a case, the marker will contribute multiple
656 # divergent successors sets. If multiple successors have
658 # divergent successors sets. If multiple successors have
657 # divergent successors sets, a Cartesian product is used.
659 # divergent successors sets, a Cartesian product is used.
658 #
660 #
659 # At the end we post-process successors sets to remove
661 # At the end we post-process successors sets to remove
660 # duplicated entry and successors set that are strict subset of
662 # duplicated entry and successors set that are strict subset of
661 # another one.
663 # another one.
662 succssets = []
664 succssets = []
663 for mark in sorted(succmarkers[current]):
665 for mark in sorted(succmarkers[current]):
664 # successors sets contributed by this marker
666 # successors sets contributed by this marker
665 markss = [[]]
667 markss = [[]]
666 for suc in mark[1]:
668 for suc in mark[1]:
667 # cardinal product with previous successors
669 # cardinal product with previous successors
668 productresult = []
670 productresult = []
669 for prefix in markss:
671 for prefix in markss:
670 for suffix in cache[suc]:
672 for suffix in cache[suc]:
671 newss = list(prefix)
673 newss = list(prefix)
672 for part in suffix:
674 for part in suffix:
673 # do not duplicated entry in successors set
675 # do not duplicated entry in successors set
674 # first entry wins.
676 # first entry wins.
675 if part not in newss:
677 if part not in newss:
676 newss.append(part)
678 newss.append(part)
677 productresult.append(newss)
679 productresult.append(newss)
678 markss = productresult
680 markss = productresult
679 succssets.extend(markss)
681 succssets.extend(markss)
680 # remove duplicated and subset
682 # remove duplicated and subset
681 seen = []
683 seen = []
682 final = []
684 final = []
683 candidate = sorted(((set(s), s) for s in succssets if s),
685 candidate = sorted(((set(s), s) for s in succssets if s),
684 key=lambda x: len(x[1]), reverse=True)
686 key=lambda x: len(x[1]), reverse=True)
685 for setversion, listversion in candidate:
687 for setversion, listversion in candidate:
686 for seenset in seen:
688 for seenset in seen:
687 if setversion.issubset(seenset):
689 if setversion.issubset(seenset):
688 break
690 break
689 else:
691 else:
690 final.append(listversion)
692 final.append(listversion)
691 seen.append(setversion)
693 seen.append(setversion)
692 final.reverse() # put small successors set first
694 final.reverse() # put small successors set first
693 cache[current] = final
695 cache[current] = final
694 return cache[initialnode]
696 return cache[initialnode]
695
697
696 def _knownrevs(repo, nodes):
698 def _knownrevs(repo, nodes):
697 """yield revision numbers of known nodes passed in parameters
699 """yield revision numbers of known nodes passed in parameters
698
700
699 Unknown revisions are silently ignored."""
701 Unknown revisions are silently ignored."""
700 torev = repo.changelog.nodemap.get
702 torev = repo.changelog.nodemap.get
701 for n in nodes:
703 for n in nodes:
702 rev = torev(n)
704 rev = torev(n)
703 if rev is not None:
705 if rev is not None:
704 yield rev
706 yield rev
705
707
706 # mapping of 'set-name' -> <function to compute this set>
708 # mapping of 'set-name' -> <function to compute this set>
707 cachefuncs = {}
709 cachefuncs = {}
708 def cachefor(name):
710 def cachefor(name):
709 """Decorator to register a function as computing the cache for a set"""
711 """Decorator to register a function as computing the cache for a set"""
710 def decorator(func):
712 def decorator(func):
711 assert name not in cachefuncs
713 assert name not in cachefuncs
712 cachefuncs[name] = func
714 cachefuncs[name] = func
713 return func
715 return func
714 return decorator
716 return decorator
715
717
716 def getrevs(repo, name):
718 def getrevs(repo, name):
717 """Return the set of revision that belong to the <name> set
719 """Return the set of revision that belong to the <name> set
718
720
719 Such access may compute the set and cache it for future use"""
721 Such access may compute the set and cache it for future use"""
720 repo = repo.unfiltered()
722 repo = repo.unfiltered()
721 if not repo.obsstore:
723 if not repo.obsstore:
722 return ()
724 return ()
723 if name not in repo.obsstore.caches:
725 if name not in repo.obsstore.caches:
724 repo.obsstore.caches[name] = cachefuncs[name](repo)
726 repo.obsstore.caches[name] = cachefuncs[name](repo)
725 return repo.obsstore.caches[name]
727 return repo.obsstore.caches[name]
726
728
727 # To be simple we need to invalidate obsolescence cache when:
729 # To be simple we need to invalidate obsolescence cache when:
728 #
730 #
729 # - new changeset is added:
731 # - new changeset is added:
730 # - public phase is changed
732 # - public phase is changed
731 # - obsolescence marker are added
733 # - obsolescence marker are added
732 # - strip is used a repo
734 # - strip is used a repo
733 def clearobscaches(repo):
735 def clearobscaches(repo):
734 """Remove all obsolescence related cache from a repo
736 """Remove all obsolescence related cache from a repo
735
737
736 This remove all cache in obsstore is the obsstore already exist on the
738 This remove all cache in obsstore is the obsstore already exist on the
737 repo.
739 repo.
738
740
739 (We could be smarter here given the exact event that trigger the cache
741 (We could be smarter here given the exact event that trigger the cache
740 clearing)"""
742 clearing)"""
741 # only clear cache is there is obsstore data in this repo
743 # only clear cache is there is obsstore data in this repo
742 if 'obsstore' in repo._filecache:
744 if 'obsstore' in repo._filecache:
743 repo.obsstore.caches.clear()
745 repo.obsstore.caches.clear()
744
746
745 @cachefor('obsolete')
747 @cachefor('obsolete')
746 def _computeobsoleteset(repo):
748 def _computeobsoleteset(repo):
747 """the set of obsolete revisions"""
749 """the set of obsolete revisions"""
748 obs = set()
750 obs = set()
749 getrev = repo.changelog.nodemap.get
751 getrev = repo.changelog.nodemap.get
750 getphase = repo._phasecache.phase
752 getphase = repo._phasecache.phase
751 for n in repo.obsstore.successors:
753 for n in repo.obsstore.successors:
752 rev = getrev(n)
754 rev = getrev(n)
753 if rev is not None and getphase(repo, rev):
755 if rev is not None and getphase(repo, rev):
754 obs.add(rev)
756 obs.add(rev)
755 return obs
757 return obs
756
758
757 @cachefor('unstable')
759 @cachefor('unstable')
758 def _computeunstableset(repo):
760 def _computeunstableset(repo):
759 """the set of non obsolete revisions with obsolete parents"""
761 """the set of non obsolete revisions with obsolete parents"""
760 # revset is not efficient enough here
762 # revset is not efficient enough here
761 # we do (obsolete()::) - obsolete() by hand
763 # we do (obsolete()::) - obsolete() by hand
762 obs = getrevs(repo, 'obsolete')
764 obs = getrevs(repo, 'obsolete')
763 if not obs:
765 if not obs:
764 return set()
766 return set()
765 cl = repo.changelog
767 cl = repo.changelog
766 return set(r for r in cl.descendants(obs) if r not in obs)
768 return set(r for r in cl.descendants(obs) if r not in obs)
767
769
768 @cachefor('suspended')
770 @cachefor('suspended')
769 def _computesuspendedset(repo):
771 def _computesuspendedset(repo):
770 """the set of obsolete parents with non obsolete descendants"""
772 """the set of obsolete parents with non obsolete descendants"""
771 suspended = repo.changelog.ancestors(getrevs(repo, 'unstable'))
773 suspended = repo.changelog.ancestors(getrevs(repo, 'unstable'))
772 return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
774 return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
773
775
774 @cachefor('extinct')
776 @cachefor('extinct')
775 def _computeextinctset(repo):
777 def _computeextinctset(repo):
776 """the set of obsolete parents without non obsolete descendants"""
778 """the set of obsolete parents without non obsolete descendants"""
777 return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
779 return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
778
780
779
781
780 @cachefor('bumped')
782 @cachefor('bumped')
781 def _computebumpedset(repo):
783 def _computebumpedset(repo):
782 """the set of revs trying to obsolete public revisions"""
784 """the set of revs trying to obsolete public revisions"""
783 bumped = set()
785 bumped = set()
784 # util function (avoid attribute lookup in the loop)
786 # util function (avoid attribute lookup in the loop)
785 phase = repo._phasecache.phase # would be faster to grab the full list
787 phase = repo._phasecache.phase # would be faster to grab the full list
786 public = phases.public
788 public = phases.public
787 cl = repo.changelog
789 cl = repo.changelog
788 torev = cl.nodemap.get
790 torev = cl.nodemap.get
789 obs = getrevs(repo, 'obsolete')
791 obs = getrevs(repo, 'obsolete')
790 for rev in repo:
792 for rev in repo:
791 # We only evaluate mutable, non-obsolete revision
793 # We only evaluate mutable, non-obsolete revision
792 if (public < phase(repo, rev)) and (rev not in obs):
794 if (public < phase(repo, rev)) and (rev not in obs):
793 node = cl.node(rev)
795 node = cl.node(rev)
794 # (future) A cache of precursors may worth if split is very common
796 # (future) A cache of precursors may worth if split is very common
795 for pnode in allprecursors(repo.obsstore, [node],
797 for pnode in allprecursors(repo.obsstore, [node],
796 ignoreflags=bumpedfix):
798 ignoreflags=bumpedfix):
797 prev = torev(pnode) # unfiltered! but so is phasecache
799 prev = torev(pnode) # unfiltered! but so is phasecache
798 if (prev is not None) and (phase(repo, prev) <= public):
800 if (prev is not None) and (phase(repo, prev) <= public):
799 # we have a public precursors
801 # we have a public precursors
800 bumped.add(rev)
802 bumped.add(rev)
801 break # Next draft!
803 break # Next draft!
802 return bumped
804 return bumped
803
805
804 @cachefor('divergent')
806 @cachefor('divergent')
805 def _computedivergentset(repo):
807 def _computedivergentset(repo):
806 """the set of rev that compete to be the final successors of some revision.
808 """the set of rev that compete to be the final successors of some revision.
807 """
809 """
808 divergent = set()
810 divergent = set()
809 obsstore = repo.obsstore
811 obsstore = repo.obsstore
810 newermap = {}
812 newermap = {}
811 for ctx in repo.set('(not public()) - obsolete()'):
813 for ctx in repo.set('(not public()) - obsolete()'):
812 mark = obsstore.precursors.get(ctx.node(), ())
814 mark = obsstore.precursors.get(ctx.node(), ())
813 toprocess = set(mark)
815 toprocess = set(mark)
814 while toprocess:
816 while toprocess:
815 prec = toprocess.pop()[0]
817 prec = toprocess.pop()[0]
816 if prec not in newermap:
818 if prec not in newermap:
817 successorssets(repo, prec, newermap)
819 successorssets(repo, prec, newermap)
818 newer = [n for n in newermap[prec] if n]
820 newer = [n for n in newermap[prec] if n]
819 if len(newer) > 1:
821 if len(newer) > 1:
820 divergent.add(ctx.rev())
822 divergent.add(ctx.rev())
821 break
823 break
822 toprocess.update(obsstore.precursors.get(prec, ()))
824 toprocess.update(obsstore.precursors.get(prec, ()))
823 return divergent
825 return divergent
824
826
825
827
826 def createmarkers(repo, relations, flag=0, metadata=None):
828 def createmarkers(repo, relations, flag=0, metadata=None):
827 """Add obsolete markers between changesets in a repo
829 """Add obsolete markers between changesets in a repo
828
830
829 <relations> must be an iterable of (<old>, (<new>, ...)[,{metadata}])
831 <relations> must be an iterable of (<old>, (<new>, ...)[,{metadata}])
830 tuple. `old` and `news` are changectx. metadata is an optional dictionary
832 tuple. `old` and `news` are changectx. metadata is an optional dictionary
831 containing metadata for this marker only. It is merged with the global
833 containing metadata for this marker only. It is merged with the global
832 metadata specified through the `metadata` argument of this function,
834 metadata specified through the `metadata` argument of this function,
833
835
834 Trying to obsolete a public changeset will raise an exception.
836 Trying to obsolete a public changeset will raise an exception.
835
837
836 Current user and date are used except if specified otherwise in the
838 Current user and date are used except if specified otherwise in the
837 metadata attribute.
839 metadata attribute.
838
840
839 This function operates within a transaction of its own, but does
841 This function operates within a transaction of its own, but does
840 not take any lock on the repo.
842 not take any lock on the repo.
841 """
843 """
842 # prepare metadata
844 # prepare metadata
843 if metadata is None:
845 if metadata is None:
844 metadata = {}
846 metadata = {}
845 if 'date' not in metadata:
847 if 'date' not in metadata:
846 metadata['date'] = '%i %i' % util.makedate()
848 metadata['date'] = '%i %i' % util.makedate()
847 if 'user' not in metadata:
849 if 'user' not in metadata:
848 metadata['user'] = repo.ui.username()
850 metadata['user'] = repo.ui.username()
849 tr = repo.transaction('add-obsolescence-marker')
851 tr = repo.transaction('add-obsolescence-marker')
850 try:
852 try:
851 for rel in relations:
853 for rel in relations:
852 prec = rel[0]
854 prec = rel[0]
853 sucs = rel[1]
855 sucs = rel[1]
854 localmetadata = metadata.copy()
856 localmetadata = metadata.copy()
855 if 2 < len(rel):
857 if 2 < len(rel):
856 localmetadata.update(rel[2])
858 localmetadata.update(rel[2])
857
859
858 if not prec.mutable():
860 if not prec.mutable():
859 raise util.Abort("cannot obsolete immutable changeset: %s"
861 raise util.Abort("cannot obsolete immutable changeset: %s"
860 % prec)
862 % prec)
861 nprec = prec.node()
863 nprec = prec.node()
862 nsucs = tuple(s.node() for s in sucs)
864 nsucs = tuple(s.node() for s in sucs)
863 if nprec in nsucs:
865 if nprec in nsucs:
864 raise util.Abort("changeset %s cannot obsolete itself" % prec)
866 raise util.Abort("changeset %s cannot obsolete itself" % prec)
865 repo.obsstore.create(tr, nprec, nsucs, flag, localmetadata)
867 repo.obsstore.create(tr, nprec, nsucs, flag, localmetadata)
866 repo.filteredrevcache.clear()
868 repo.filteredrevcache.clear()
867 tr.close()
869 tr.close()
868 finally:
870 finally:
869 tr.release()
871 tr.release()
@@ -1,348 +1,355 b''
1 Criss cross merging
1 Criss cross merging
2
2
3 $ hg init criss-cross
3 $ hg init criss-cross
4 $ cd criss-cross
4 $ cd criss-cross
5 $ echo '0 base' > f1
5 $ echo '0 base' > f1
6 $ echo '0 base' > f2
6 $ echo '0 base' > f2
7 $ hg ci -Aqm '0 base'
7 $ hg ci -Aqm '0 base'
8
8
9 $ echo '1 first change' > f1
9 $ echo '1 first change' > f1
10 $ hg ci -m '1 first change f1'
10 $ hg ci -m '1 first change f1'
11
11
12 $ hg up -qr0
12 $ hg up -qr0
13 $ echo '2 first change' > f2
13 $ echo '2 first change' > f2
14 $ hg ci -qm '2 first change f2'
14 $ hg ci -qm '2 first change f2'
15
15
16 $ hg merge -qr 1
16 $ hg merge -qr 1
17 $ hg ci -m '3 merge'
17 $ hg ci -m '3 merge'
18
18
19 $ hg up -qr2
19 $ hg up -qr2
20 $ hg merge -qr1
20 $ hg merge -qr1
21 $ hg ci -qm '4 merge'
21 $ hg ci -qm '4 merge'
22
22
23 $ echo '5 second change' > f1
23 $ echo '5 second change' > f1
24 $ hg ci -m '5 second change f1'
24 $ hg ci -m '5 second change f1'
25
25
26 $ hg up -r3
26 $ hg up -r3
27 note: using 0f6b37dbe527 as ancestor of adfe50279922 and cf89f02107e5
28 alternatively, use --config merge.preferancestor=40663881a6dd
29 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
30 $ echo '6 second change' > f2
28 $ echo '6 second change' > f2
31 $ hg ci -m '6 second change f2'
29 $ hg ci -m '6 second change f2'
32
30
33 $ hg log -G
31 $ hg log -G
34 @ changeset: 6:3b08d01b0ab5
32 @ changeset: 6:3b08d01b0ab5
35 | tag: tip
33 | tag: tip
36 | parent: 3:cf89f02107e5
34 | parent: 3:cf89f02107e5
37 | user: test
35 | user: test
38 | date: Thu Jan 01 00:00:00 1970 +0000
36 | date: Thu Jan 01 00:00:00 1970 +0000
39 | summary: 6 second change f2
37 | summary: 6 second change f2
40 |
38 |
41 | o changeset: 5:adfe50279922
39 | o changeset: 5:adfe50279922
42 | | user: test
40 | | user: test
43 | | date: Thu Jan 01 00:00:00 1970 +0000
41 | | date: Thu Jan 01 00:00:00 1970 +0000
44 | | summary: 5 second change f1
42 | | summary: 5 second change f1
45 | |
43 | |
46 | o changeset: 4:7d3e55501ae6
44 | o changeset: 4:7d3e55501ae6
47 | |\ parent: 2:40663881a6dd
45 | |\ parent: 2:40663881a6dd
48 | | | parent: 1:0f6b37dbe527
46 | | | parent: 1:0f6b37dbe527
49 | | | user: test
47 | | | user: test
50 | | | date: Thu Jan 01 00:00:00 1970 +0000
48 | | | date: Thu Jan 01 00:00:00 1970 +0000
51 | | | summary: 4 merge
49 | | | summary: 4 merge
52 | | |
50 | | |
53 o---+ changeset: 3:cf89f02107e5
51 o---+ changeset: 3:cf89f02107e5
54 | | | parent: 2:40663881a6dd
52 | | | parent: 2:40663881a6dd
55 |/ / parent: 1:0f6b37dbe527
53 |/ / parent: 1:0f6b37dbe527
56 | | user: test
54 | | user: test
57 | | date: Thu Jan 01 00:00:00 1970 +0000
55 | | date: Thu Jan 01 00:00:00 1970 +0000
58 | | summary: 3 merge
56 | | summary: 3 merge
59 | |
57 | |
60 | o changeset: 2:40663881a6dd
58 | o changeset: 2:40663881a6dd
61 | | parent: 0:40494bf2444c
59 | | parent: 0:40494bf2444c
62 | | user: test
60 | | user: test
63 | | date: Thu Jan 01 00:00:00 1970 +0000
61 | | date: Thu Jan 01 00:00:00 1970 +0000
64 | | summary: 2 first change f2
62 | | summary: 2 first change f2
65 | |
63 | |
66 o | changeset: 1:0f6b37dbe527
64 o | changeset: 1:0f6b37dbe527
67 |/ user: test
65 |/ user: test
68 | date: Thu Jan 01 00:00:00 1970 +0000
66 | date: Thu Jan 01 00:00:00 1970 +0000
69 | summary: 1 first change f1
67 | summary: 1 first change f1
70 |
68 |
71 o changeset: 0:40494bf2444c
69 o changeset: 0:40494bf2444c
72 user: test
70 user: test
73 date: Thu Jan 01 00:00:00 1970 +0000
71 date: Thu Jan 01 00:00:00 1970 +0000
74 summary: 0 base
72 summary: 0 base
75
73
76
74
77 $ hg merge -v --debug --tool internal:dump 5
75 $ hg merge -v --debug --tool internal:dump 5
78 note: using 0f6b37dbe527 as ancestor of 3b08d01b0ab5 and adfe50279922
76 note: using 0f6b37dbe527 as ancestor of 3b08d01b0ab5 and adfe50279922
79 alternatively, use --config merge.preferancestor=40663881a6dd
77 alternatively, use --config merge.preferancestor=40663881a6dd
80 searching for copies back to rev 3
78 searching for copies back to rev 3
81 resolving manifests
79 resolving manifests
82 branchmerge: True, force: False, partial: False
80 branchmerge: True, force: False, partial: False
83 ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
81 ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
84 preserving f2 for resolve of f2
82 preserving f2 for resolve of f2
85 f1: remote is newer -> g
83 f1: remote is newer -> g
86 getting f1
84 getting f1
87 updating: f1 1/2 files (50.00%)
85 updating: f1 1/2 files (50.00%)
88 f2: versions differ -> m
86 f2: versions differ -> m
89 updating: f2 2/2 files (100.00%)
87 updating: f2 2/2 files (100.00%)
90 picked tool 'internal:dump' for f2 (binary False symlink False)
88 picked tool 'internal:dump' for f2 (binary False symlink False)
91 merging f2
89 merging f2
92 my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@40494bf2444c
90 my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@40494bf2444c
93 1 files updated, 0 files merged, 0 files removed, 1 files unresolved
91 1 files updated, 0 files merged, 0 files removed, 1 files unresolved
94 use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
92 use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
95 [1]
93 [1]
96
94
97 $ head *
95 $ head *
98 ==> f1 <==
96 ==> f1 <==
99 5 second change
97 5 second change
100
98
101 ==> f2 <==
99 ==> f2 <==
102 6 second change
100 6 second change
103
101
104 ==> f2.base <==
102 ==> f2.base <==
105 0 base
103 0 base
106
104
107 ==> f2.local <==
105 ==> f2.local <==
108 6 second change
106 6 second change
109
107
110 ==> f2.orig <==
108 ==> f2.orig <==
111 6 second change
109 6 second change
112
110
113 ==> f2.other <==
111 ==> f2.other <==
114 2 first change
112 2 first change
115
113
116 $ hg up -qC .
114 $ hg up -qC .
117 $ hg merge -v --tool internal:dump 5 --config merge.preferancestor="null 40663881 3b08d"
115 $ hg merge -v --tool internal:dump 5 --config merge.preferancestor="null 40663881 3b08d"
118 note: using 40663881a6dd as ancestor of 3b08d01b0ab5 and adfe50279922
116 note: using 40663881a6dd as ancestor of 3b08d01b0ab5 and adfe50279922
119 alternatively, use --config merge.preferancestor=0f6b37dbe527
117 alternatively, use --config merge.preferancestor=0f6b37dbe527
120 resolving manifests
118 resolving manifests
121 merging f1
119 merging f1
122 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
120 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
123 use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
121 use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
124 [1]
122 [1]
125
123
126 Redo merge with merge.preferancestor="*" to enable bid merge
124 Redo merge with merge.preferancestor="*" to enable bid merge
127
125
128 $ rm f*
126 $ rm f*
129 $ hg up -qC .
127 $ hg up -qC .
130 $ hg merge -v --debug --tool internal:dump 5 --config merge.preferancestor="*"
128 $ hg merge -v --debug --tool internal:dump 5 --config merge.preferancestor="*"
131 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
129 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
132
130
133 calculating bids for ancestor 0f6b37dbe527
131 calculating bids for ancestor 0f6b37dbe527
134 searching for copies back to rev 3
132 searching for copies back to rev 3
135 resolving manifests
133 resolving manifests
136 branchmerge: True, force: False, partial: False
134 branchmerge: True, force: False, partial: False
137 ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
135 ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
138 f1: remote is newer -> g
136 f1: remote is newer -> g
139 f2: versions differ -> m
137 f2: versions differ -> m
140
138
141 calculating bids for ancestor 40663881a6dd
139 calculating bids for ancestor 40663881a6dd
142 searching for copies back to rev 3
140 searching for copies back to rev 3
143 resolving manifests
141 resolving manifests
144 branchmerge: True, force: False, partial: False
142 branchmerge: True, force: False, partial: False
145 ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
143 ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
146 f2: keep -> k
144 f2: keep -> k
147 f1: versions differ -> m
145 f1: versions differ -> m
148
146
149 auction for merging merge bids
147 auction for merging merge bids
150 f1: picking 'get' action
148 f1: picking 'get' action
151 f2: picking 'keep' action
149 f2: picking 'keep' action
152 end of auction
150 end of auction
153
151
154 f1: remote is newer -> g
152 f1: remote is newer -> g
155 getting f1
153 getting f1
156 updating: f1 1/1 files (100.00%)
154 updating: f1 1/1 files (100.00%)
157 f2: keep -> k
155 f2: keep -> k
158 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
156 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
159 (branch merge, don't forget to commit)
157 (branch merge, don't forget to commit)
160
158
161 $ head *
159 $ head *
162 ==> f1 <==
160 ==> f1 <==
163 5 second change
161 5 second change
164
162
165 ==> f2 <==
163 ==> f2 <==
166 6 second change
164 6 second change
167
165
168
166
169 The other way around:
167 The other way around:
170
168
171 $ hg up -C -r5
169 $ hg up -C -r5
172 note: using 0f6b37dbe527 as ancestor of 3b08d01b0ab5 and adfe50279922
173 alternatively, use --config merge.preferancestor=40663881a6dd
174 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
170 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
175 $ hg merge -v --debug --config merge.preferancestor="*"
171 $ hg merge -v --debug --config merge.preferancestor="*"
176 note: merging adfe50279922+ and 3b08d01b0ab5 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
172 note: merging adfe50279922+ and 3b08d01b0ab5 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
177
173
178 calculating bids for ancestor 0f6b37dbe527
174 calculating bids for ancestor 0f6b37dbe527
179 searching for copies back to rev 3
175 searching for copies back to rev 3
180 resolving manifests
176 resolving manifests
181 branchmerge: True, force: False, partial: False
177 branchmerge: True, force: False, partial: False
182 ancestor: 0f6b37dbe527, local: adfe50279922+, remote: 3b08d01b0ab5
178 ancestor: 0f6b37dbe527, local: adfe50279922+, remote: 3b08d01b0ab5
183 f1: keep -> k
179 f1: keep -> k
184 f2: versions differ -> m
180 f2: versions differ -> m
185
181
186 calculating bids for ancestor 40663881a6dd
182 calculating bids for ancestor 40663881a6dd
187 searching for copies back to rev 3
183 searching for copies back to rev 3
188 resolving manifests
184 resolving manifests
189 branchmerge: True, force: False, partial: False
185 branchmerge: True, force: False, partial: False
190 ancestor: 40663881a6dd, local: adfe50279922+, remote: 3b08d01b0ab5
186 ancestor: 40663881a6dd, local: adfe50279922+, remote: 3b08d01b0ab5
191 f2: remote is newer -> g
187 f2: remote is newer -> g
192 f1: versions differ -> m
188 f1: versions differ -> m
193
189
194 auction for merging merge bids
190 auction for merging merge bids
195 f1: picking 'keep' action
191 f1: picking 'keep' action
196 f2: picking 'get' action
192 f2: picking 'get' action
197 end of auction
193 end of auction
198
194
199 f2: remote is newer -> g
195 f2: remote is newer -> g
200 getting f2
196 getting f2
201 updating: f2 1/1 files (100.00%)
197 updating: f2 1/1 files (100.00%)
202 f1: keep -> k
198 f1: keep -> k
203 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
199 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
204 (branch merge, don't forget to commit)
200 (branch merge, don't forget to commit)
205
201
206 $ head *
202 $ head *
207 ==> f1 <==
203 ==> f1 <==
208 5 second change
204 5 second change
209
205
210 ==> f2 <==
206 ==> f2 <==
211 6 second change
207 6 second change
212
208
213 Verify how the output looks and and how verbose it is:
209 Verify how the output looks and and how verbose it is:
214
210
215 $ hg up -qC
211 $ hg up -qC
216 $ hg merge --config merge.preferancestor="*"
212 $ hg merge --config merge.preferancestor="*"
217 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
213 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
218 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
214 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
219 (branch merge, don't forget to commit)
215 (branch merge, don't forget to commit)
220
216
221 $ hg up -qC
217 $ hg up -qC
222 $ hg merge -v --config merge.preferancestor="*"
218 $ hg merge -v --config merge.preferancestor="*"
223 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
219 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
224
220
225 calculating bids for ancestor 0f6b37dbe527
221 calculating bids for ancestor 0f6b37dbe527
226 resolving manifests
222 resolving manifests
227
223
228 calculating bids for ancestor 40663881a6dd
224 calculating bids for ancestor 40663881a6dd
229 resolving manifests
225 resolving manifests
230
226
231 auction for merging merge bids
227 auction for merging merge bids
232 f1: picking 'get' action
228 f1: picking 'get' action
233 f2: picking 'keep' action
229 f2: picking 'keep' action
234 end of auction
230 end of auction
235
231
236 getting f1
232 getting f1
237 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
233 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
238 (branch merge, don't forget to commit)
234 (branch merge, don't forget to commit)
239
235
240 $ hg up -qC
236 $ hg up -qC
241 $ hg merge -v --debug --config merge.preferancestor="*"
237 $ hg merge -v --debug --config merge.preferancestor="*"
242 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
238 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
243
239
244 calculating bids for ancestor 0f6b37dbe527
240 calculating bids for ancestor 0f6b37dbe527
245 searching for copies back to rev 3
241 searching for copies back to rev 3
246 resolving manifests
242 resolving manifests
247 branchmerge: True, force: False, partial: False
243 branchmerge: True, force: False, partial: False
248 ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
244 ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
249 f1: remote is newer -> g
245 f1: remote is newer -> g
250 f2: versions differ -> m
246 f2: versions differ -> m
251
247
252 calculating bids for ancestor 40663881a6dd
248 calculating bids for ancestor 40663881a6dd
253 searching for copies back to rev 3
249 searching for copies back to rev 3
254 resolving manifests
250 resolving manifests
255 branchmerge: True, force: False, partial: False
251 branchmerge: True, force: False, partial: False
256 ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
252 ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
257 f2: keep -> k
253 f2: keep -> k
258 f1: versions differ -> m
254 f1: versions differ -> m
259
255
260 auction for merging merge bids
256 auction for merging merge bids
261 f1: picking 'get' action
257 f1: picking 'get' action
262 f2: picking 'keep' action
258 f2: picking 'keep' action
263 end of auction
259 end of auction
264
260
265 f1: remote is newer -> g
261 f1: remote is newer -> g
266 getting f1
262 getting f1
267 updating: f1 1/1 files (100.00%)
263 updating: f1 1/1 files (100.00%)
268 f2: keep -> k
264 f2: keep -> k
269 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
265 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
270 (branch merge, don't forget to commit)
266 (branch merge, don't forget to commit)
271
267
272 $ cd ..
268 $ cd ..
273
269
274 http://stackoverflow.com/questions/9350005/how-do-i-specify-a-merge-base-to-use-in-a-hg-merge/9430810
270 http://stackoverflow.com/questions/9350005/how-do-i-specify-a-merge-base-to-use-in-a-hg-merge/9430810
275
271
276 $ hg init ancestor-merging
272 $ hg init ancestor-merging
277 $ cd ancestor-merging
273 $ cd ancestor-merging
278 $ echo a > x
274 $ echo a > x
279 $ hg commit -A -m a x
275 $ hg commit -A -m a x
280 $ hg update -q 0
276 $ hg update -q 0
281 $ echo b >> x
277 $ echo b >> x
282 $ hg commit -m b
278 $ hg commit -m b
283 $ hg update -q 0
279 $ hg update -q 0
284 $ echo c >> x
280 $ echo c >> x
285 $ hg commit -qm c
281 $ hg commit -qm c
286 $ hg update -q 1
282 $ hg update -q 1
287 $ hg merge -q --tool internal:local 2
283 $ hg merge -q --tool internal:local 2
288 $ echo c >> x
284 $ echo c >> x
289 $ hg commit -m bc
285 $ hg commit -m bc
290 $ hg update -q 2
286 $ hg update -q 2
291 $ hg merge -q --tool internal:local 1
287 $ hg merge -q --tool internal:local 1
292 $ echo b >> x
288 $ echo b >> x
293 $ hg commit -qm cb
289 $ hg commit -qm cb
294
290
295 $ hg merge
291 $ hg merge
296 note: using 70008a2163f6 as ancestor of 0d355fdef312 and 4b8b546a3eef
292 note: using 70008a2163f6 as ancestor of 0d355fdef312 and 4b8b546a3eef
297 alternatively, use --config merge.preferancestor=b211bbc6eb3c
293 alternatively, use --config merge.preferancestor=b211bbc6eb3c
298 merging x
294 merging x
299 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
295 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
300 (branch merge, don't forget to commit)
296 (branch merge, don't forget to commit)
301 $ cat x
297 $ cat x
302 a
298 a
303 c
299 c
304 b
300 b
305 c
301 c
306
302
307 $ hg up -qC .
303 $ hg up -qC .
308
304
309 $ hg merge --config merge.preferancestor=b211bbc6eb3c
305 $ hg merge --config merge.preferancestor=b211bbc6eb3c
310 note: using b211bbc6eb3c as ancestor of 0d355fdef312 and 4b8b546a3eef
306 note: using b211bbc6eb3c as ancestor of 0d355fdef312 and 4b8b546a3eef
311 alternatively, use --config merge.preferancestor=70008a2163f6
307 alternatively, use --config merge.preferancestor=70008a2163f6
312 merging x
308 merging x
313 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
309 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
314 (branch merge, don't forget to commit)
310 (branch merge, don't forget to commit)
315 $ cat x
311 $ cat x
316 a
312 a
317 b
313 b
318 c
314 c
319 b
315 b
320
316
321 $ hg up -qC .
317 $ hg up -qC .
322
318
323 $ hg merge -v --config merge.preferancestor="*"
319 $ hg merge -v --config merge.preferancestor="*"
324 note: merging 0d355fdef312+ and 4b8b546a3eef using bids from ancestors 70008a2163f6 and b211bbc6eb3c
320 note: merging 0d355fdef312+ and 4b8b546a3eef using bids from ancestors 70008a2163f6 and b211bbc6eb3c
325
321
326 calculating bids for ancestor 70008a2163f6
322 calculating bids for ancestor 70008a2163f6
327 resolving manifests
323 resolving manifests
328
324
329 calculating bids for ancestor b211bbc6eb3c
325 calculating bids for ancestor b211bbc6eb3c
330 resolving manifests
326 resolving manifests
331
327
332 auction for merging merge bids
328 auction for merging merge bids
333 x: multiple bids for merge action:
329 x: multiple bids for merge action:
334 versions differ -> m
330 versions differ -> m
335 versions differ -> m
331 versions differ -> m
336 x: ambiguous merge - picked m action
332 x: ambiguous merge - picked m action
337 end of auction
333 end of auction
338
334
339 merging x
335 merging x
340 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
336 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
341 (branch merge, don't forget to commit)
337 (branch merge, don't forget to commit)
342 $ cat x
338 $ cat x
343 a
339 a
344 c
340 c
345 b
341 b
346 c
342 c
347
343
344 Verify that the old context ancestor works with / despite preferancestor:
345
346 $ hg log -r 'ancestor(head())' --config merge.preferancestor=1 -T '{rev}\n'
347 1
348 $ hg log -r 'ancestor(head())' --config merge.preferancestor=2 -T '{rev}\n'
349 2
350 $ hg log -r 'ancestor(head())' --config merge.preferancestor=3 -T '{rev}\n'
351 1
352 $ hg log -r 'ancestor(head())' --config merge.preferancestor='*' -T '{rev}\n'
353 1
354
348 $ cd ..
355 $ cd ..
@@ -1,670 +1,678 b''
1 $ cat >> $HGRCPATH << EOF
1 $ cat >> $HGRCPATH << EOF
2 > [phases]
2 > [phases]
3 > # public changeset are not obsolete
3 > # public changeset are not obsolete
4 > publish=false
4 > publish=false
5 > [ui]
5 > [ui]
6 > logtemplate="{rev}:{node|short} ({phase}) [{tags} {bookmarks}] {desc|firstline}\n"
6 > logtemplate="{rev}:{node|short} ({phase}) [{tags} {bookmarks}] {desc|firstline}\n"
7 > EOF
7 > EOF
8 $ mkcommit() {
8 $ mkcommit() {
9 > echo "$1" > "$1"
9 > echo "$1" > "$1"
10 > hg add "$1"
10 > hg add "$1"
11 > hg ci -m "add $1"
11 > hg ci -m "add $1"
12 > }
12 > }
13 $ getid() {
13 $ getid() {
14 > hg id --debug --hidden -ir "desc('$1')"
14 > hg id --debug --hidden -ir "desc('$1')"
15 > }
15 > }
16
16
17 $ cat > debugkeys.py <<EOF
17 $ cat > debugkeys.py <<EOF
18 > def reposetup(ui, repo):
18 > def reposetup(ui, repo):
19 > class debugkeysrepo(repo.__class__):
19 > class debugkeysrepo(repo.__class__):
20 > def listkeys(self, namespace):
20 > def listkeys(self, namespace):
21 > ui.write('listkeys %s\n' % (namespace,))
21 > ui.write('listkeys %s\n' % (namespace,))
22 > return super(debugkeysrepo, self).listkeys(namespace)
22 > return super(debugkeysrepo, self).listkeys(namespace)
23 >
23 >
24 > if repo.local():
24 > if repo.local():
25 > repo.__class__ = debugkeysrepo
25 > repo.__class__ = debugkeysrepo
26 > EOF
26 > EOF
27
27
28 $ hg init tmpa
28 $ hg init tmpa
29 $ cd tmpa
29 $ cd tmpa
30 $ mkcommit kill_me
30 $ mkcommit kill_me
31
31
32 Checking that the feature is properly disabled
32 Checking that the feature is properly disabled
33
33
34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
35 abort: obsolete feature is not enabled on this repo
35 abort: obsolete feature is not enabled on this repo
36 [255]
36 [255]
37
37
38 Enabling it
38 Enabling it
39
39
40 $ cat > ../obs.py << EOF
40 $ cat > ../obs.py << EOF
41 > import mercurial.obsolete
41 > import mercurial.obsolete
42 > mercurial.obsolete._enabled = True
42 > mercurial.obsolete._enabled = True
43 > EOF
43 > EOF
44 $ echo '[extensions]' >> $HGRCPATH
44 $ echo '[extensions]' >> $HGRCPATH
45 $ echo "obs=${TESTTMP}/obs.py" >> $HGRCPATH
45 $ echo "obs=${TESTTMP}/obs.py" >> $HGRCPATH
46
46
47 Killing a single changeset without replacement
47 Killing a single changeset without replacement
48
48
49 $ hg debugobsolete 0
49 $ hg debugobsolete 0
50 abort: changeset references must be full hexadecimal node identifiers
50 abort: changeset references must be full hexadecimal node identifiers
51 [255]
51 [255]
52 $ hg debugobsolete '00'
52 $ hg debugobsolete '00'
53 abort: changeset references must be full hexadecimal node identifiers
53 abort: changeset references must be full hexadecimal node identifiers
54 [255]
54 [255]
55 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
55 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
56 $ hg debugobsolete
56 $ hg debugobsolete
57 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 {'date': '0 0', 'user': 'babar'}
57 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 {'date': '0 0', 'user': 'babar'}
58
58
59 (test that mercurial is not confused)
59 (test that mercurial is not confused)
60
60
61 $ hg up null --quiet # having 0 as parent prevents it to be hidden
61 $ hg up null --quiet # having 0 as parent prevents it to be hidden
62 $ hg tip
62 $ hg tip
63 -1:000000000000 (public) [tip ]
63 -1:000000000000 (public) [tip ]
64 $ hg up --hidden tip --quiet
64 $ hg up --hidden tip --quiet
65
66 Killing a single changeset with itself should fail
67 (simple local safeguard)
68
69 $ hg debugobsolete `getid kill_me` `getid kill_me`
70 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
71 [255]
72
65 $ cd ..
73 $ cd ..
66
74
67 Killing a single changeset with replacement
75 Killing a single changeset with replacement
68
76
69 $ hg init tmpb
77 $ hg init tmpb
70 $ cd tmpb
78 $ cd tmpb
71 $ mkcommit a
79 $ mkcommit a
72 $ mkcommit b
80 $ mkcommit b
73 $ mkcommit original_c
81 $ mkcommit original_c
74 $ hg up "desc('b')"
82 $ hg up "desc('b')"
75 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
83 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
76 $ mkcommit new_c
84 $ mkcommit new_c
77 created new head
85 created new head
78 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
86 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
79 $ hg debugobsolete --flag 12 `getid original_c` `getid new_c` -d '56 12'
87 $ hg debugobsolete --flag 12 `getid original_c` `getid new_c` -d '56 12'
80 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
88 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
81 2:245bde4270cd add original_c
89 2:245bde4270cd add original_c
82 $ hg debugrevlog -cd
90 $ hg debugrevlog -cd
83 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads
91 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads
84 0 -1 -1 0 59 0 0 0 0 58 58 0 1
92 0 -1 -1 0 59 0 0 0 0 58 58 0 1
85 1 0 -1 59 118 59 59 0 0 58 116 0 1
93 1 0 -1 59 118 59 59 0 0 58 116 0 1
86 2 1 -1 118 204 59 59 59 0 76 192 0 1
94 2 1 -1 118 204 59 59 59 0 76 192 0 1
87 3 1 -1 204 271 204 204 59 0 66 258 0 2
95 3 1 -1 204 271 204 204 59 0 66 258 0 2
88 $ hg debugobsolete
96 $ hg debugobsolete
89 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
97 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
90
98
91 do it again (it read the obsstore before adding new changeset)
99 do it again (it read the obsstore before adding new changeset)
92
100
93 $ hg up '.^'
101 $ hg up '.^'
94 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
102 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
95 $ mkcommit new_2_c
103 $ mkcommit new_2_c
96 created new head
104 created new head
97 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
105 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
98 $ hg debugobsolete
106 $ hg debugobsolete
99 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
107 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
100 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
108 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
101
109
102 Register two markers with a missing node
110 Register two markers with a missing node
103
111
104 $ hg up '.^'
112 $ hg up '.^'
105 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
113 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
106 $ mkcommit new_3_c
114 $ mkcommit new_3_c
107 created new head
115 created new head
108 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
116 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
109 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
117 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
110 $ hg debugobsolete
118 $ hg debugobsolete
111 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
119 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
112 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
120 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
113 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
121 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
114 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
122 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
115
123
116 Refuse pathological nullid successors
124 Refuse pathological nullid successors
117 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
125 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
118 transaction abort!
126 transaction abort!
119 rollback completed
127 rollback completed
120 abort: bad obsolescence marker detected: invalid successors nullid
128 abort: bad obsolescence marker detected: invalid successors nullid
121 [255]
129 [255]
122
130
123 Check that graphlog detect that a changeset is obsolete:
131 Check that graphlog detect that a changeset is obsolete:
124
132
125 $ hg log -G
133 $ hg log -G
126 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
134 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
127 |
135 |
128 o 1:7c3bad9141dc (draft) [ ] add b
136 o 1:7c3bad9141dc (draft) [ ] add b
129 |
137 |
130 o 0:1f0dee641bb7 (draft) [ ] add a
138 o 0:1f0dee641bb7 (draft) [ ] add a
131
139
132
140
133 check that heads does not report them
141 check that heads does not report them
134
142
135 $ hg heads
143 $ hg heads
136 5:5601fb93a350 (draft) [tip ] add new_3_c
144 5:5601fb93a350 (draft) [tip ] add new_3_c
137 $ hg heads --hidden
145 $ hg heads --hidden
138 5:5601fb93a350 (draft) [tip ] add new_3_c
146 5:5601fb93a350 (draft) [tip ] add new_3_c
139 4:ca819180edb9 (draft) [ ] add new_2_c
147 4:ca819180edb9 (draft) [ ] add new_2_c
140 3:cdbce2fbb163 (draft) [ ] add new_c
148 3:cdbce2fbb163 (draft) [ ] add new_c
141 2:245bde4270cd (draft) [ ] add original_c
149 2:245bde4270cd (draft) [ ] add original_c
142
150
143
151
144 check that summary does not report them
152 check that summary does not report them
145
153
146 $ hg init ../sink
154 $ hg init ../sink
147 $ echo '[paths]' >> .hg/hgrc
155 $ echo '[paths]' >> .hg/hgrc
148 $ echo 'default=../sink' >> .hg/hgrc
156 $ echo 'default=../sink' >> .hg/hgrc
149 $ hg summary --remote
157 $ hg summary --remote
150 parent: 5:5601fb93a350 tip
158 parent: 5:5601fb93a350 tip
151 add new_3_c
159 add new_3_c
152 branch: default
160 branch: default
153 commit: (clean)
161 commit: (clean)
154 update: (current)
162 update: (current)
155 remote: 3 outgoing
163 remote: 3 outgoing
156
164
157 $ hg summary --remote --hidden
165 $ hg summary --remote --hidden
158 parent: 5:5601fb93a350 tip
166 parent: 5:5601fb93a350 tip
159 add new_3_c
167 add new_3_c
160 branch: default
168 branch: default
161 commit: (clean)
169 commit: (clean)
162 update: 3 new changesets, 4 branch heads (merge)
170 update: 3 new changesets, 4 branch heads (merge)
163 remote: 3 outgoing
171 remote: 3 outgoing
164
172
165 check that various commands work well with filtering
173 check that various commands work well with filtering
166
174
167 $ hg tip
175 $ hg tip
168 5:5601fb93a350 (draft) [tip ] add new_3_c
176 5:5601fb93a350 (draft) [tip ] add new_3_c
169 $ hg log -r 6
177 $ hg log -r 6
170 abort: unknown revision '6'!
178 abort: unknown revision '6'!
171 [255]
179 [255]
172 $ hg log -r 4
180 $ hg log -r 4
173 abort: unknown revision '4'!
181 abort: unknown revision '4'!
174 [255]
182 [255]
175
183
176 Check that public changeset are not accounted as obsolete:
184 Check that public changeset are not accounted as obsolete:
177
185
178 $ hg --hidden phase --public 2
186 $ hg --hidden phase --public 2
179 $ hg log -G
187 $ hg log -G
180 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
188 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
181 |
189 |
182 | o 2:245bde4270cd (public) [ ] add original_c
190 | o 2:245bde4270cd (public) [ ] add original_c
183 |/
191 |/
184 o 1:7c3bad9141dc (public) [ ] add b
192 o 1:7c3bad9141dc (public) [ ] add b
185 |
193 |
186 o 0:1f0dee641bb7 (public) [ ] add a
194 o 0:1f0dee641bb7 (public) [ ] add a
187
195
188
196
189 And that bumped changeset are detected
197 And that bumped changeset are detected
190 --------------------------------------
198 --------------------------------------
191
199
192 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
200 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
193 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
201 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
194 the public changeset
202 the public changeset
195
203
196 $ hg log --hidden -r 'bumped()'
204 $ hg log --hidden -r 'bumped()'
197 5:5601fb93a350 (draft) [tip ] add new_3_c
205 5:5601fb93a350 (draft) [tip ] add new_3_c
198
206
199 And that we can't push bumped changeset
207 And that we can't push bumped changeset
200
208
201 $ hg push ../tmpa -r 0 --force #(make repo related)
209 $ hg push ../tmpa -r 0 --force #(make repo related)
202 pushing to ../tmpa
210 pushing to ../tmpa
203 searching for changes
211 searching for changes
204 warning: repository is unrelated
212 warning: repository is unrelated
205 adding changesets
213 adding changesets
206 adding manifests
214 adding manifests
207 adding file changes
215 adding file changes
208 added 1 changesets with 1 changes to 1 files (+1 heads)
216 added 1 changesets with 1 changes to 1 files (+1 heads)
209 $ hg push ../tmpa
217 $ hg push ../tmpa
210 pushing to ../tmpa
218 pushing to ../tmpa
211 searching for changes
219 searching for changes
212 abort: push includes bumped changeset: 5601fb93a350!
220 abort: push includes bumped changeset: 5601fb93a350!
213 [255]
221 [255]
214
222
215 Fixing "bumped" situation
223 Fixing "bumped" situation
216 We need to create a clone of 5 and add a special marker with a flag
224 We need to create a clone of 5 and add a special marker with a flag
217
225
218 $ hg up '5^'
226 $ hg up '5^'
219 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
227 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
220 $ hg revert -ar 5
228 $ hg revert -ar 5
221 adding new_3_c
229 adding new_3_c
222 $ hg ci -m 'add n3w_3_c'
230 $ hg ci -m 'add n3w_3_c'
223 created new head
231 created new head
224 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
232 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
225 $ hg log -r 'bumped()'
233 $ hg log -r 'bumped()'
226 $ hg log -G
234 $ hg log -G
227 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
235 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
228 |
236 |
229 | o 2:245bde4270cd (public) [ ] add original_c
237 | o 2:245bde4270cd (public) [ ] add original_c
230 |/
238 |/
231 o 1:7c3bad9141dc (public) [ ] add b
239 o 1:7c3bad9141dc (public) [ ] add b
232 |
240 |
233 o 0:1f0dee641bb7 (public) [ ] add a
241 o 0:1f0dee641bb7 (public) [ ] add a
234
242
235
243
236
244
237
245
238 $ cd ..
246 $ cd ..
239
247
240 Exchange Test
248 Exchange Test
241 ============================
249 ============================
242
250
243 Destination repo does not have any data
251 Destination repo does not have any data
244 ---------------------------------------
252 ---------------------------------------
245
253
246 Simple incoming test
254 Simple incoming test
247
255
248 $ hg init tmpc
256 $ hg init tmpc
249 $ cd tmpc
257 $ cd tmpc
250 $ hg incoming ../tmpb
258 $ hg incoming ../tmpb
251 comparing with ../tmpb
259 comparing with ../tmpb
252 0:1f0dee641bb7 (public) [ ] add a
260 0:1f0dee641bb7 (public) [ ] add a
253 1:7c3bad9141dc (public) [ ] add b
261 1:7c3bad9141dc (public) [ ] add b
254 2:245bde4270cd (public) [ ] add original_c
262 2:245bde4270cd (public) [ ] add original_c
255 6:6f9641995072 (draft) [tip ] add n3w_3_c
263 6:6f9641995072 (draft) [tip ] add n3w_3_c
256
264
257 Try to pull markers
265 Try to pull markers
258 (extinct changeset are excluded but marker are pushed)
266 (extinct changeset are excluded but marker are pushed)
259
267
260 $ hg pull ../tmpb
268 $ hg pull ../tmpb
261 pulling from ../tmpb
269 pulling from ../tmpb
262 requesting all changes
270 requesting all changes
263 adding changesets
271 adding changesets
264 adding manifests
272 adding manifests
265 adding file changes
273 adding file changes
266 added 4 changesets with 4 changes to 4 files (+1 heads)
274 added 4 changesets with 4 changes to 4 files (+1 heads)
267 (run 'hg heads' to see heads, 'hg merge' to merge)
275 (run 'hg heads' to see heads, 'hg merge' to merge)
268 $ hg debugobsolete
276 $ hg debugobsolete
269 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
277 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
270 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
278 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
271 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
279 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
272 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
280 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
273 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
281 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
274
282
275 Rollback//Transaction support
283 Rollback//Transaction support
276
284
277 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
285 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
278 $ hg debugobsolete
286 $ hg debugobsolete
279 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
287 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
280 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
288 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
281 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
289 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
282 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
290 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
283 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
291 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
284 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 {'date': '1340 0', 'user': 'test'}
292 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 {'date': '1340 0', 'user': 'test'}
285 $ hg rollback -n
293 $ hg rollback -n
286 repository tip rolled back to revision 3 (undo debugobsolete)
294 repository tip rolled back to revision 3 (undo debugobsolete)
287 $ hg rollback
295 $ hg rollback
288 repository tip rolled back to revision 3 (undo debugobsolete)
296 repository tip rolled back to revision 3 (undo debugobsolete)
289 $ hg debugobsolete
297 $ hg debugobsolete
290 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
298 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
291 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
299 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
292 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
300 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
293 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
301 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
294 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
302 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
295
303
296 $ cd ..
304 $ cd ..
297
305
298 Try to push markers
306 Try to push markers
299
307
300 $ hg init tmpd
308 $ hg init tmpd
301 $ hg -R tmpb push tmpd
309 $ hg -R tmpb push tmpd
302 pushing to tmpd
310 pushing to tmpd
303 searching for changes
311 searching for changes
304 adding changesets
312 adding changesets
305 adding manifests
313 adding manifests
306 adding file changes
314 adding file changes
307 added 4 changesets with 4 changes to 4 files (+1 heads)
315 added 4 changesets with 4 changes to 4 files (+1 heads)
308 $ hg -R tmpd debugobsolete
316 $ hg -R tmpd debugobsolete
309 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
317 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
310 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
318 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
311 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
319 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
312 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
320 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
313 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
321 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
314
322
315 Check obsolete keys are exchanged only if source has an obsolete store
323 Check obsolete keys are exchanged only if source has an obsolete store
316
324
317 $ hg init empty
325 $ hg init empty
318 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
326 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
319 pushing to tmpd
327 pushing to tmpd
320 listkeys phases
328 listkeys phases
321 no changes found
329 no changes found
322 listkeys phases
330 listkeys phases
323 listkeys bookmarks
331 listkeys bookmarks
324 [1]
332 [1]
325
333
326 clone support
334 clone support
327 (markers are copied and extinct changesets are included to allow hardlinks)
335 (markers are copied and extinct changesets are included to allow hardlinks)
328
336
329 $ hg clone tmpb clone-dest
337 $ hg clone tmpb clone-dest
330 updating to branch default
338 updating to branch default
331 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
339 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
332 $ hg -R clone-dest log -G --hidden
340 $ hg -R clone-dest log -G --hidden
333 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
341 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
334 |
342 |
335 | x 5:5601fb93a350 (draft) [ ] add new_3_c
343 | x 5:5601fb93a350 (draft) [ ] add new_3_c
336 |/
344 |/
337 | x 4:ca819180edb9 (draft) [ ] add new_2_c
345 | x 4:ca819180edb9 (draft) [ ] add new_2_c
338 |/
346 |/
339 | x 3:cdbce2fbb163 (draft) [ ] add new_c
347 | x 3:cdbce2fbb163 (draft) [ ] add new_c
340 |/
348 |/
341 | o 2:245bde4270cd (public) [ ] add original_c
349 | o 2:245bde4270cd (public) [ ] add original_c
342 |/
350 |/
343 o 1:7c3bad9141dc (public) [ ] add b
351 o 1:7c3bad9141dc (public) [ ] add b
344 |
352 |
345 o 0:1f0dee641bb7 (public) [ ] add a
353 o 0:1f0dee641bb7 (public) [ ] add a
346
354
347 $ hg -R clone-dest debugobsolete
355 $ hg -R clone-dest debugobsolete
348 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
356 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
349 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
357 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
350 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
358 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
351 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
359 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
352 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
360 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
353
361
354
362
355 Destination repo have existing data
363 Destination repo have existing data
356 ---------------------------------------
364 ---------------------------------------
357
365
358 On pull
366 On pull
359
367
360 $ hg init tmpe
368 $ hg init tmpe
361 $ cd tmpe
369 $ cd tmpe
362 $ hg debugobsolete -d '1339 0' 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339
370 $ hg debugobsolete -d '1339 0' 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339
363 $ hg pull ../tmpb
371 $ hg pull ../tmpb
364 pulling from ../tmpb
372 pulling from ../tmpb
365 requesting all changes
373 requesting all changes
366 adding changesets
374 adding changesets
367 adding manifests
375 adding manifests
368 adding file changes
376 adding file changes
369 added 4 changesets with 4 changes to 4 files (+1 heads)
377 added 4 changesets with 4 changes to 4 files (+1 heads)
370 (run 'hg heads' to see heads, 'hg merge' to merge)
378 (run 'hg heads' to see heads, 'hg merge' to merge)
371 $ hg debugobsolete
379 $ hg debugobsolete
372 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
380 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
373 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
381 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
374 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
382 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
375 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
383 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
376 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
384 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
377 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
385 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
378
386
379
387
380 On push
388 On push
381
389
382 $ hg push ../tmpc
390 $ hg push ../tmpc
383 pushing to ../tmpc
391 pushing to ../tmpc
384 searching for changes
392 searching for changes
385 no changes found
393 no changes found
386 [1]
394 [1]
387 $ hg -R ../tmpc debugobsolete
395 $ hg -R ../tmpc debugobsolete
388 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
396 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C {'date': '56 12', 'user': 'test'}
389 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
397 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 {'date': '1337 0', 'user': 'test'}
390 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
398 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 {'date': '1338 0', 'user': 'test'}
391 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
399 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 {'date': '1339 0', 'user': 'test'}
392 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
400 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 {'date': '1338 0', 'user': 'test'}
393 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
401 2448244824482448244824482448244824482448 1339133913391339133913391339133913391339 0 {'date': '1339 0', 'user': 'test'}
394
402
395 detect outgoing obsolete and unstable
403 detect outgoing obsolete and unstable
396 ---------------------------------------
404 ---------------------------------------
397
405
398
406
399 $ hg log -G
407 $ hg log -G
400 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
408 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
401 |
409 |
402 | o 2:245bde4270cd (public) [ ] add original_c
410 | o 2:245bde4270cd (public) [ ] add original_c
403 |/
411 |/
404 o 1:7c3bad9141dc (public) [ ] add b
412 o 1:7c3bad9141dc (public) [ ] add b
405 |
413 |
406 o 0:1f0dee641bb7 (public) [ ] add a
414 o 0:1f0dee641bb7 (public) [ ] add a
407
415
408 $ hg up 'desc("n3w_3_c")'
416 $ hg up 'desc("n3w_3_c")'
409 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
417 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
410 $ mkcommit original_d
418 $ mkcommit original_d
411 $ mkcommit original_e
419 $ mkcommit original_e
412 $ hg debugobsolete `getid original_d` -d '0 0'
420 $ hg debugobsolete `getid original_d` -d '0 0'
413 $ hg log -r 'obsolete()'
421 $ hg log -r 'obsolete()'
414 4:94b33453f93b (draft) [ ] add original_d
422 4:94b33453f93b (draft) [ ] add original_d
415 $ hg log -G -r '::unstable()'
423 $ hg log -G -r '::unstable()'
416 @ 5:cda648ca50f5 (draft) [tip ] add original_e
424 @ 5:cda648ca50f5 (draft) [tip ] add original_e
417 |
425 |
418 x 4:94b33453f93b (draft) [ ] add original_d
426 x 4:94b33453f93b (draft) [ ] add original_d
419 |
427 |
420 o 3:6f9641995072 (draft) [ ] add n3w_3_c
428 o 3:6f9641995072 (draft) [ ] add n3w_3_c
421 |
429 |
422 o 1:7c3bad9141dc (public) [ ] add b
430 o 1:7c3bad9141dc (public) [ ] add b
423 |
431 |
424 o 0:1f0dee641bb7 (public) [ ] add a
432 o 0:1f0dee641bb7 (public) [ ] add a
425
433
426
434
427 refuse to push obsolete changeset
435 refuse to push obsolete changeset
428
436
429 $ hg push ../tmpc/ -r 'desc("original_d")'
437 $ hg push ../tmpc/ -r 'desc("original_d")'
430 pushing to ../tmpc/
438 pushing to ../tmpc/
431 searching for changes
439 searching for changes
432 abort: push includes obsolete changeset: 94b33453f93b!
440 abort: push includes obsolete changeset: 94b33453f93b!
433 [255]
441 [255]
434
442
435 refuse to push unstable changeset
443 refuse to push unstable changeset
436
444
437 $ hg push ../tmpc/
445 $ hg push ../tmpc/
438 pushing to ../tmpc/
446 pushing to ../tmpc/
439 searching for changes
447 searching for changes
440 abort: push includes unstable changeset: cda648ca50f5!
448 abort: push includes unstable changeset: cda648ca50f5!
441 [255]
449 [255]
442
450
443 Test that extinct changeset are properly detected
451 Test that extinct changeset are properly detected
444
452
445 $ hg log -r 'extinct()'
453 $ hg log -r 'extinct()'
446
454
447 Don't try to push extinct changeset
455 Don't try to push extinct changeset
448
456
449 $ hg init ../tmpf
457 $ hg init ../tmpf
450 $ hg out ../tmpf
458 $ hg out ../tmpf
451 comparing with ../tmpf
459 comparing with ../tmpf
452 searching for changes
460 searching for changes
453 0:1f0dee641bb7 (public) [ ] add a
461 0:1f0dee641bb7 (public) [ ] add a
454 1:7c3bad9141dc (public) [ ] add b
462 1:7c3bad9141dc (public) [ ] add b
455 2:245bde4270cd (public) [ ] add original_c
463 2:245bde4270cd (public) [ ] add original_c
456 3:6f9641995072 (draft) [ ] add n3w_3_c
464 3:6f9641995072 (draft) [ ] add n3w_3_c
457 4:94b33453f93b (draft) [ ] add original_d
465 4:94b33453f93b (draft) [ ] add original_d
458 5:cda648ca50f5 (draft) [tip ] add original_e
466 5:cda648ca50f5 (draft) [tip ] add original_e
459 $ hg push ../tmpf -f # -f because be push unstable too
467 $ hg push ../tmpf -f # -f because be push unstable too
460 pushing to ../tmpf
468 pushing to ../tmpf
461 searching for changes
469 searching for changes
462 adding changesets
470 adding changesets
463 adding manifests
471 adding manifests
464 adding file changes
472 adding file changes
465 added 6 changesets with 6 changes to 6 files (+1 heads)
473 added 6 changesets with 6 changes to 6 files (+1 heads)
466
474
467 no warning displayed
475 no warning displayed
468
476
469 $ hg push ../tmpf
477 $ hg push ../tmpf
470 pushing to ../tmpf
478 pushing to ../tmpf
471 searching for changes
479 searching for changes
472 no changes found
480 no changes found
473 [1]
481 [1]
474
482
475 Do not warn about new head when the new head is a successors of a remote one
483 Do not warn about new head when the new head is a successors of a remote one
476
484
477 $ hg log -G
485 $ hg log -G
478 @ 5:cda648ca50f5 (draft) [tip ] add original_e
486 @ 5:cda648ca50f5 (draft) [tip ] add original_e
479 |
487 |
480 x 4:94b33453f93b (draft) [ ] add original_d
488 x 4:94b33453f93b (draft) [ ] add original_d
481 |
489 |
482 o 3:6f9641995072 (draft) [ ] add n3w_3_c
490 o 3:6f9641995072 (draft) [ ] add n3w_3_c
483 |
491 |
484 | o 2:245bde4270cd (public) [ ] add original_c
492 | o 2:245bde4270cd (public) [ ] add original_c
485 |/
493 |/
486 o 1:7c3bad9141dc (public) [ ] add b
494 o 1:7c3bad9141dc (public) [ ] add b
487 |
495 |
488 o 0:1f0dee641bb7 (public) [ ] add a
496 o 0:1f0dee641bb7 (public) [ ] add a
489
497
490 $ hg up -q 'desc(n3w_3_c)'
498 $ hg up -q 'desc(n3w_3_c)'
491 $ mkcommit obsolete_e
499 $ mkcommit obsolete_e
492 created new head
500 created new head
493 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
501 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
494 $ hg outgoing ../tmpf # parasite hg outgoing testin
502 $ hg outgoing ../tmpf # parasite hg outgoing testin
495 comparing with ../tmpf
503 comparing with ../tmpf
496 searching for changes
504 searching for changes
497 6:3de5eca88c00 (draft) [tip ] add obsolete_e
505 6:3de5eca88c00 (draft) [tip ] add obsolete_e
498 $ hg push ../tmpf
506 $ hg push ../tmpf
499 pushing to ../tmpf
507 pushing to ../tmpf
500 searching for changes
508 searching for changes
501 adding changesets
509 adding changesets
502 adding manifests
510 adding manifests
503 adding file changes
511 adding file changes
504 added 1 changesets with 1 changes to 1 files (+1 heads)
512 added 1 changesets with 1 changes to 1 files (+1 heads)
505
513
506 #if serve
514 #if serve
507
515
508 check hgweb does not explode
516 check hgweb does not explode
509 ====================================
517 ====================================
510
518
511 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
519 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
512 adding changesets
520 adding changesets
513 adding manifests
521 adding manifests
514 adding file changes
522 adding file changes
515 added 62 changesets with 63 changes to 9 files (+60 heads)
523 added 62 changesets with 63 changes to 9 files (+60 heads)
516 (run 'hg heads .' to see heads, 'hg merge' to merge)
524 (run 'hg heads .' to see heads, 'hg merge' to merge)
517 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
525 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
518 > do
526 > do
519 > hg debugobsolete $node
527 > hg debugobsolete $node
520 > done
528 > done
521 $ hg up tip
529 $ hg up tip
522 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
530 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
523
531
524 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
532 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
525 $ cat hg.pid >> $DAEMON_PIDS
533 $ cat hg.pid >> $DAEMON_PIDS
526
534
527 check changelog view
535 check changelog view
528
536
529 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'shortlog/'
537 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'shortlog/'
530 200 Script output follows
538 200 Script output follows
531
539
532 check graph view
540 check graph view
533
541
534 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'graph'
542 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'graph'
535 200 Script output follows
543 200 Script output follows
536
544
537 check filelog view
545 check filelog view
538
546
539 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'log/'`hg id --debug --id`/'babar'
547 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'log/'`hg id --debug --id`/'babar'
540 200 Script output follows
548 200 Script output follows
541
549
542 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/68'
550 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/68'
543 200 Script output follows
551 200 Script output follows
544 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/67'
552 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/67'
545 404 Not Found
553 404 Not Found
546 [1]
554 [1]
547
555
548 check that web.view config option:
556 check that web.view config option:
549
557
550 $ "$TESTDIR/killdaemons.py" hg.pid
558 $ "$TESTDIR/killdaemons.py" hg.pid
551 $ cat >> .hg/hgrc << EOF
559 $ cat >> .hg/hgrc << EOF
552 > [web]
560 > [web]
553 > view=all
561 > view=all
554 > EOF
562 > EOF
555 $ wait
563 $ wait
556 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
564 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
557 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/67'
565 $ "$TESTDIR/get-with-headers.py" --headeronly localhost:$HGPORT 'rev/67'
558 200 Script output follows
566 200 Script output follows
559 $ "$TESTDIR/killdaemons.py" hg.pid
567 $ "$TESTDIR/killdaemons.py" hg.pid
560
568
561 Checking _enable=False warning if obsolete marker exists
569 Checking _enable=False warning if obsolete marker exists
562
570
563 $ echo '[extensions]' >> $HGRCPATH
571 $ echo '[extensions]' >> $HGRCPATH
564 $ echo "obs=!" >> $HGRCPATH
572 $ echo "obs=!" >> $HGRCPATH
565 $ hg log -r tip
573 $ hg log -r tip
566 obsolete feature not enabled but 68 markers found!
574 obsolete feature not enabled but 68 markers found!
567 68:c15e9edfca13 (draft) [tip ] add celestine
575 68:c15e9edfca13 (draft) [tip ] add celestine
568
576
569 reenable for later test
577 reenable for later test
570
578
571 $ echo '[extensions]' >> $HGRCPATH
579 $ echo '[extensions]' >> $HGRCPATH
572 $ echo "obs=${TESTTMP}/obs.py" >> $HGRCPATH
580 $ echo "obs=${TESTTMP}/obs.py" >> $HGRCPATH
573
581
574 #endif
582 #endif
575
583
576 Test incoming/outcoming with changesets obsoleted remotely, known locally
584 Test incoming/outcoming with changesets obsoleted remotely, known locally
577 ===============================================================================
585 ===============================================================================
578
586
579 This test issue 3805
587 This test issue 3805
580
588
581 $ hg init repo-issue3805
589 $ hg init repo-issue3805
582 $ cd repo-issue3805
590 $ cd repo-issue3805
583 $ echo "foo" > foo
591 $ echo "foo" > foo
584 $ hg ci -Am "A"
592 $ hg ci -Am "A"
585 adding foo
593 adding foo
586 $ hg clone . ../other-issue3805
594 $ hg clone . ../other-issue3805
587 updating to branch default
595 updating to branch default
588 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
596 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
589 $ echo "bar" >> foo
597 $ echo "bar" >> foo
590 $ hg ci --amend
598 $ hg ci --amend
591 $ cd ../other-issue3805
599 $ cd ../other-issue3805
592 $ hg log -G
600 $ hg log -G
593 @ 0:193e9254ce7e (draft) [tip ] A
601 @ 0:193e9254ce7e (draft) [tip ] A
594
602
595 $ hg log -G -R ../repo-issue3805
603 $ hg log -G -R ../repo-issue3805
596 @ 2:3816541e5485 (draft) [tip ] A
604 @ 2:3816541e5485 (draft) [tip ] A
597
605
598 $ hg incoming
606 $ hg incoming
599 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
607 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
600 searching for changes
608 searching for changes
601 2:3816541e5485 (draft) [tip ] A
609 2:3816541e5485 (draft) [tip ] A
602 $ hg incoming --bundle ../issue3805.hg
610 $ hg incoming --bundle ../issue3805.hg
603 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
611 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
604 searching for changes
612 searching for changes
605 2:3816541e5485 (draft) [tip ] A
613 2:3816541e5485 (draft) [tip ] A
606 $ hg outgoing
614 $ hg outgoing
607 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
615 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
608 searching for changes
616 searching for changes
609 no changes found
617 no changes found
610 [1]
618 [1]
611
619
612 #if serve
620 #if serve
613
621
614 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
622 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
615 $ cat hg.pid >> $DAEMON_PIDS
623 $ cat hg.pid >> $DAEMON_PIDS
616
624
617 $ hg incoming http://localhost:$HGPORT
625 $ hg incoming http://localhost:$HGPORT
618 comparing with http://localhost:$HGPORT/
626 comparing with http://localhost:$HGPORT/
619 searching for changes
627 searching for changes
620 1:3816541e5485 (public) [tip ] A
628 1:3816541e5485 (public) [tip ] A
621 $ hg outgoing http://localhost:$HGPORT
629 $ hg outgoing http://localhost:$HGPORT
622 comparing with http://localhost:$HGPORT/
630 comparing with http://localhost:$HGPORT/
623 searching for changes
631 searching for changes
624 no changes found
632 no changes found
625 [1]
633 [1]
626
634
627 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
635 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
628
636
629 #endif
637 #endif
630
638
631 This test issue 3814
639 This test issue 3814
632
640
633 (nothing to push but locally hidden changeset)
641 (nothing to push but locally hidden changeset)
634
642
635 $ cd ..
643 $ cd ..
636 $ hg init repo-issue3814
644 $ hg init repo-issue3814
637 $ cd repo-issue3805
645 $ cd repo-issue3805
638 $ hg push -r 3816541e5485 ../repo-issue3814
646 $ hg push -r 3816541e5485 ../repo-issue3814
639 pushing to ../repo-issue3814
647 pushing to ../repo-issue3814
640 searching for changes
648 searching for changes
641 adding changesets
649 adding changesets
642 adding manifests
650 adding manifests
643 adding file changes
651 adding file changes
644 added 1 changesets with 1 changes to 1 files
652 added 1 changesets with 1 changes to 1 files
645 $ hg out ../repo-issue3814
653 $ hg out ../repo-issue3814
646 comparing with ../repo-issue3814
654 comparing with ../repo-issue3814
647 searching for changes
655 searching for changes
648 no changes found
656 no changes found
649 [1]
657 [1]
650
658
651 Test that a local tag blocks a changeset from being hidden
659 Test that a local tag blocks a changeset from being hidden
652
660
653 $ hg tag -l visible -r 0 --hidden
661 $ hg tag -l visible -r 0 --hidden
654 $ hg log -G
662 $ hg log -G
655 @ 2:3816541e5485 (draft) [tip ] A
663 @ 2:3816541e5485 (draft) [tip ] A
656
664
657 x 0:193e9254ce7e (draft) [visible ] A
665 x 0:193e9254ce7e (draft) [visible ] A
658
666
659 Test that removing a local tag does not cause some commands to fail
667 Test that removing a local tag does not cause some commands to fail
660
668
661 $ hg tag -l -r tip tiptag
669 $ hg tag -l -r tip tiptag
662 $ hg tags
670 $ hg tags
663 tiptag 2:3816541e5485
671 tiptag 2:3816541e5485
664 tip 2:3816541e5485
672 tip 2:3816541e5485
665 visible 0:193e9254ce7e
673 visible 0:193e9254ce7e
666 $ hg --config extensions.strip= strip -r tip --no-backup
674 $ hg --config extensions.strip= strip -r tip --no-backup
667 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
675 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
668 $ hg tags
676 $ hg tags
669 visible 0:193e9254ce7e
677 visible 0:193e9254ce7e
670 tip 0:193e9254ce7e
678 tip 0:193e9254ce7e
@@ -1,327 +1,353 b''
1
1
2 Function to test discovery between two repos in both directions, using both the local shortcut
2 Function to test discovery between two repos in both directions, using both the local shortcut
3 (which is currently not activated by default) and the full remotable protocol:
3 (which is currently not activated by default) and the full remotable protocol:
4
4
5 $ testdesc() { # revs_a, revs_b, dagdesc
5 $ testdesc() { # revs_a, revs_b, dagdesc
6 > if [ -d foo ]; then rm -rf foo; fi
6 > if [ -d foo ]; then rm -rf foo; fi
7 > hg init foo
7 > hg init foo
8 > cd foo
8 > cd foo
9 > hg debugbuilddag "$3"
9 > hg debugbuilddag "$3"
10 > hg clone . a $1 --quiet
10 > hg clone . a $1 --quiet
11 > hg clone . b $2 --quiet
11 > hg clone . b $2 --quiet
12 > echo
12 > echo
13 > echo "% -- a -> b tree"
13 > echo "% -- a -> b tree"
14 > hg -R a debugdiscovery b --verbose --old
14 > hg -R a debugdiscovery b --verbose --old
15 > echo
15 > echo
16 > echo "% -- a -> b set"
16 > echo "% -- a -> b set"
17 > hg -R a debugdiscovery b --verbose --debug
17 > hg -R a debugdiscovery b --verbose --debug
18 > echo
18 > echo
19 > echo "% -- b -> a tree"
19 > echo "% -- b -> a tree"
20 > hg -R b debugdiscovery a --verbose --old
20 > hg -R b debugdiscovery a --verbose --old
21 > echo
21 > echo
22 > echo "% -- b -> a set"
22 > echo "% -- b -> a set"
23 > hg -R b debugdiscovery a --verbose --debug
23 > hg -R b debugdiscovery a --verbose --debug
24 > cd ..
24 > cd ..
25 > }
25 > }
26
26
27
27
28 Small superset:
28 Small superset:
29
29
30 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
30 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
31 > +2:f +1:a1:b1
31 > +2:f +1:a1:b1
32 > <f +4 :a2
32 > <f +4 :a2
33 > +5 :b2
33 > +5 :b2
34 > <f +3 :b3'
34 > <f +3 :b3'
35
35
36 % -- a -> b tree
36 % -- a -> b tree
37 comparing with b
37 comparing with b
38 searching for changes
38 searching for changes
39 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
39 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
40 common heads: 01241442b3c2 b5714e113bc0
40 common heads: 01241442b3c2 b5714e113bc0
41 local is subset
41 local is subset
42
42
43 % -- a -> b set
43 % -- a -> b set
44 comparing with b
44 comparing with b
45 query 1; heads
45 query 1; heads
46 searching for changes
46 searching for changes
47 all local heads known remotely
47 all local heads known remotely
48 common heads: 01241442b3c2 b5714e113bc0
48 common heads: 01241442b3c2 b5714e113bc0
49 local is subset
49 local is subset
50
50
51 % -- b -> a tree
51 % -- b -> a tree
52 comparing with a
52 comparing with a
53 searching for changes
53 searching for changes
54 unpruned common: 01241442b3c2 b5714e113bc0
54 unpruned common: 01241442b3c2 b5714e113bc0
55 common heads: 01241442b3c2 b5714e113bc0
55 common heads: 01241442b3c2 b5714e113bc0
56 remote is subset
56 remote is subset
57
57
58 % -- b -> a set
58 % -- b -> a set
59 comparing with a
59 comparing with a
60 query 1; heads
60 query 1; heads
61 searching for changes
61 searching for changes
62 all remote heads known locally
62 all remote heads known locally
63 common heads: 01241442b3c2 b5714e113bc0
63 common heads: 01241442b3c2 b5714e113bc0
64 remote is subset
64 remote is subset
65
65
66
66
67 Many new:
67 Many new:
68
68
69 $ testdesc '-ra1 -ra2' '-rb' '
69 $ testdesc '-ra1 -ra2' '-rb' '
70 > +2:f +3:a1 +3:b
70 > +2:f +3:a1 +3:b
71 > <f +30 :a2'
71 > <f +30 :a2'
72
72
73 % -- a -> b tree
73 % -- a -> b tree
74 comparing with b
74 comparing with b
75 searching for changes
75 searching for changes
76 unpruned common: bebd167eb94d
76 unpruned common: bebd167eb94d
77 common heads: bebd167eb94d
77 common heads: bebd167eb94d
78
78
79 % -- a -> b set
79 % -- a -> b set
80 comparing with b
80 comparing with b
81 query 1; heads
81 query 1; heads
82 searching for changes
82 searching for changes
83 taking initial sample
83 taking initial sample
84 searching: 2 queries
84 searching: 2 queries
85 query 2; still undecided: 29, sample size is: 29
85 query 2; still undecided: 29, sample size is: 29
86 2 total queries
86 2 total queries
87 common heads: bebd167eb94d
87 common heads: bebd167eb94d
88
88
89 % -- b -> a tree
89 % -- b -> a tree
90 comparing with a
90 comparing with a
91 searching for changes
91 searching for changes
92 unpruned common: 66f7d451a68b bebd167eb94d
92 unpruned common: 66f7d451a68b bebd167eb94d
93 common heads: bebd167eb94d
93 common heads: bebd167eb94d
94
94
95 % -- b -> a set
95 % -- b -> a set
96 comparing with a
96 comparing with a
97 query 1; heads
97 query 1; heads
98 searching for changes
98 searching for changes
99 taking initial sample
99 taking initial sample
100 searching: 2 queries
100 searching: 2 queries
101 query 2; still undecided: 2, sample size is: 2
101 query 2; still undecided: 2, sample size is: 2
102 2 total queries
102 2 total queries
103 common heads: bebd167eb94d
103 common heads: bebd167eb94d
104
104
105
105
106 Both sides many new with stub:
106 Both sides many new with stub:
107
107
108 $ testdesc '-ra1 -ra2' '-rb' '
108 $ testdesc '-ra1 -ra2' '-rb' '
109 > +2:f +2:a1 +30 :b
109 > +2:f +2:a1 +30 :b
110 > <f +30 :a2'
110 > <f +30 :a2'
111
111
112 % -- a -> b tree
112 % -- a -> b tree
113 comparing with b
113 comparing with b
114 searching for changes
114 searching for changes
115 unpruned common: 2dc09a01254d
115 unpruned common: 2dc09a01254d
116 common heads: 2dc09a01254d
116 common heads: 2dc09a01254d
117
117
118 % -- a -> b set
118 % -- a -> b set
119 comparing with b
119 comparing with b
120 query 1; heads
120 query 1; heads
121 searching for changes
121 searching for changes
122 taking initial sample
122 taking initial sample
123 searching: 2 queries
123 searching: 2 queries
124 query 2; still undecided: 29, sample size is: 29
124 query 2; still undecided: 29, sample size is: 29
125 2 total queries
125 2 total queries
126 common heads: 2dc09a01254d
126 common heads: 2dc09a01254d
127
127
128 % -- b -> a tree
128 % -- b -> a tree
129 comparing with a
129 comparing with a
130 searching for changes
130 searching for changes
131 unpruned common: 2dc09a01254d 66f7d451a68b
131 unpruned common: 2dc09a01254d 66f7d451a68b
132 common heads: 2dc09a01254d
132 common heads: 2dc09a01254d
133
133
134 % -- b -> a set
134 % -- b -> a set
135 comparing with a
135 comparing with a
136 query 1; heads
136 query 1; heads
137 searching for changes
137 searching for changes
138 taking initial sample
138 taking initial sample
139 searching: 2 queries
139 searching: 2 queries
140 query 2; still undecided: 29, sample size is: 29
140 query 2; still undecided: 29, sample size is: 29
141 2 total queries
141 2 total queries
142 common heads: 2dc09a01254d
142 common heads: 2dc09a01254d
143
143
144
144
145 Both many new:
145 Both many new:
146
146
147 $ testdesc '-ra' '-rb' '
147 $ testdesc '-ra' '-rb' '
148 > +2:f +30 :b
148 > +2:f +30 :b
149 > <f +30 :a'
149 > <f +30 :a'
150
150
151 % -- a -> b tree
151 % -- a -> b tree
152 comparing with b
152 comparing with b
153 searching for changes
153 searching for changes
154 unpruned common: 66f7d451a68b
154 unpruned common: 66f7d451a68b
155 common heads: 66f7d451a68b
155 common heads: 66f7d451a68b
156
156
157 % -- a -> b set
157 % -- a -> b set
158 comparing with b
158 comparing with b
159 query 1; heads
159 query 1; heads
160 searching for changes
160 searching for changes
161 taking quick initial sample
161 taking quick initial sample
162 searching: 2 queries
162 searching: 2 queries
163 query 2; still undecided: 31, sample size is: 31
163 query 2; still undecided: 31, sample size is: 31
164 2 total queries
164 2 total queries
165 common heads: 66f7d451a68b
165 common heads: 66f7d451a68b
166
166
167 % -- b -> a tree
167 % -- b -> a tree
168 comparing with a
168 comparing with a
169 searching for changes
169 searching for changes
170 unpruned common: 66f7d451a68b
170 unpruned common: 66f7d451a68b
171 common heads: 66f7d451a68b
171 common heads: 66f7d451a68b
172
172
173 % -- b -> a set
173 % -- b -> a set
174 comparing with a
174 comparing with a
175 query 1; heads
175 query 1; heads
176 searching for changes
176 searching for changes
177 taking quick initial sample
177 taking quick initial sample
178 searching: 2 queries
178 searching: 2 queries
179 query 2; still undecided: 31, sample size is: 31
179 query 2; still undecided: 31, sample size is: 31
180 2 total queries
180 2 total queries
181 common heads: 66f7d451a68b
181 common heads: 66f7d451a68b
182
182
183
183
184 Both many new skewed:
184 Both many new skewed:
185
185
186 $ testdesc '-ra' '-rb' '
186 $ testdesc '-ra' '-rb' '
187 > +2:f +30 :b
187 > +2:f +30 :b
188 > <f +50 :a'
188 > <f +50 :a'
189
189
190 % -- a -> b tree
190 % -- a -> b tree
191 comparing with b
191 comparing with b
192 searching for changes
192 searching for changes
193 unpruned common: 66f7d451a68b
193 unpruned common: 66f7d451a68b
194 common heads: 66f7d451a68b
194 common heads: 66f7d451a68b
195
195
196 % -- a -> b set
196 % -- a -> b set
197 comparing with b
197 comparing with b
198 query 1; heads
198 query 1; heads
199 searching for changes
199 searching for changes
200 taking quick initial sample
200 taking quick initial sample
201 searching: 2 queries
201 searching: 2 queries
202 query 2; still undecided: 51, sample size is: 51
202 query 2; still undecided: 51, sample size is: 51
203 2 total queries
203 2 total queries
204 common heads: 66f7d451a68b
204 common heads: 66f7d451a68b
205
205
206 % -- b -> a tree
206 % -- b -> a tree
207 comparing with a
207 comparing with a
208 searching for changes
208 searching for changes
209 unpruned common: 66f7d451a68b
209 unpruned common: 66f7d451a68b
210 common heads: 66f7d451a68b
210 common heads: 66f7d451a68b
211
211
212 % -- b -> a set
212 % -- b -> a set
213 comparing with a
213 comparing with a
214 query 1; heads
214 query 1; heads
215 searching for changes
215 searching for changes
216 taking quick initial sample
216 taking quick initial sample
217 searching: 2 queries
217 searching: 2 queries
218 query 2; still undecided: 31, sample size is: 31
218 query 2; still undecided: 31, sample size is: 31
219 2 total queries
219 2 total queries
220 common heads: 66f7d451a68b
220 common heads: 66f7d451a68b
221
221
222
222
223 Both many new on top of long history:
223 Both many new on top of long history:
224
224
225 $ testdesc '-ra' '-rb' '
225 $ testdesc '-ra' '-rb' '
226 > +1000:f +30 :b
226 > +1000:f +30 :b
227 > <f +50 :a'
227 > <f +50 :a'
228
228
229 % -- a -> b tree
229 % -- a -> b tree
230 comparing with b
230 comparing with b
231 searching for changes
231 searching for changes
232 unpruned common: 7ead0cba2838
232 unpruned common: 7ead0cba2838
233 common heads: 7ead0cba2838
233 common heads: 7ead0cba2838
234
234
235 % -- a -> b set
235 % -- a -> b set
236 comparing with b
236 comparing with b
237 query 1; heads
237 query 1; heads
238 searching for changes
238 searching for changes
239 taking quick initial sample
239 taking quick initial sample
240 searching: 2 queries
240 searching: 2 queries
241 query 2; still undecided: 1049, sample size is: 11
241 query 2; still undecided: 1049, sample size is: 11
242 sampling from both directions
242 sampling from both directions
243 searching: 3 queries
243 searching: 3 queries
244 query 3; still undecided: 31, sample size is: 31
244 query 3; still undecided: 31, sample size is: 31
245 3 total queries
245 3 total queries
246 common heads: 7ead0cba2838
246 common heads: 7ead0cba2838
247
247
248 % -- b -> a tree
248 % -- b -> a tree
249 comparing with a
249 comparing with a
250 searching for changes
250 searching for changes
251 unpruned common: 7ead0cba2838
251 unpruned common: 7ead0cba2838
252 common heads: 7ead0cba2838
252 common heads: 7ead0cba2838
253
253
254 % -- b -> a set
254 % -- b -> a set
255 comparing with a
255 comparing with a
256 query 1; heads
256 query 1; heads
257 searching for changes
257 searching for changes
258 taking quick initial sample
258 taking quick initial sample
259 searching: 2 queries
259 searching: 2 queries
260 query 2; still undecided: 1029, sample size is: 11
260 query 2; still undecided: 1029, sample size is: 11
261 sampling from both directions
261 sampling from both directions
262 searching: 3 queries
262 searching: 3 queries
263 query 3; still undecided: 15, sample size is: 15
263 query 3; still undecided: 15, sample size is: 15
264 3 total queries
264 3 total queries
265 common heads: 7ead0cba2838
265 common heads: 7ead0cba2838
266
266
267
267
268 One with >200 heads, which used to use up all of the sample:
268 One with >200 heads, which used to use up all of the sample:
269
269
270 $ hg init manyheads
270 $ hg init manyheads
271 $ cd manyheads
271 $ cd manyheads
272 $ echo "+300:r @a" >dagdesc
272 $ echo "+300:r @a" >dagdesc
273 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
273 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
274 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
274 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
275 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
275 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
276 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
276 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
277 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
277 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
278 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
278 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
279 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
279 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
280 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
280 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
281 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
281 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
282 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
282 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
283 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
283 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
284 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
284 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
285 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
285 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
286 $ echo "@b *r+3" >>dagdesc # one more head
286 $ echo "@b *r+3" >>dagdesc # one more head
287 $ hg debugbuilddag <dagdesc
287 $ hg debugbuilddag <dagdesc
288 reading DAG from stdin
288 reading DAG from stdin
289
289
290 $ hg heads -t --template . | wc -c
290 $ hg heads -t --template . | wc -c
291 \s*261 (re)
291 \s*261 (re)
292
292
293 $ hg clone -b a . a
293 $ hg clone -b a . a
294 adding changesets
294 adding changesets
295 adding manifests
295 adding manifests
296 adding file changes
296 adding file changes
297 added 1340 changesets with 0 changes to 0 files (+259 heads)
297 added 1340 changesets with 0 changes to 0 files (+259 heads)
298 updating to branch a
298 updating to branch a
299 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
299 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
300 $ hg clone -b b . b
300 $ hg clone -b b . b
301 adding changesets
301 adding changesets
302 adding manifests
302 adding manifests
303 adding file changes
303 adding file changes
304 added 304 changesets with 0 changes to 0 files
304 added 304 changesets with 0 changes to 0 files
305 updating to branch b
305 updating to branch b
306 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
306 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
307
307
308 $ hg -R a debugdiscovery b --debug --verbose
308 $ hg -R a debugdiscovery b --debug --verbose
309 comparing with b
309 comparing with b
310 query 1; heads
310 query 1; heads
311 searching for changes
311 searching for changes
312 taking quick initial sample
312 taking quick initial sample
313 searching: 2 queries
313 searching: 2 queries
314 query 2; still undecided: 1080, sample size is: 260
314 query 2; still undecided: 1080, sample size is: 260
315 sampling from both directions
315 sampling from both directions
316 searching: 3 queries
316 searching: 3 queries
317 query 3; still undecided: 820, sample size is: 260
317 query 3; still undecided: 820, sample size is: 260
318 sampling from both directions
318 sampling from both directions
319 searching: 4 queries
319 searching: 4 queries
320 query 4; still undecided: 560, sample size is: 260
320 query 4; still undecided: 560, sample size is: 260
321 sampling from both directions
321 sampling from both directions
322 searching: 5 queries
322 searching: 5 queries
323 query 5; still undecided: 300, sample size is: 200
323 query 5; still undecided: 300, sample size is: 200
324 5 total queries
324 5 total queries
325 common heads: 3ee37d65064a
325 common heads: 3ee37d65064a
326
326
327 Test actual protocol when pulling one new head in addition to common heads
328
329 $ hg clone -U b c
330 $ hg -R c id -ir tip
331 513314ca8b3a
332 $ hg -R c up -qr default
333 $ touch c/f
334 $ hg -R c ci -Aqm "extra head"
335 $ hg -R c id -i
336 e64a39e7da8b
337
338 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
339 $ cat hg.pid >> $DAEMON_PIDS
340
341 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
342 comparing with http://localhost:$HGPORT/
343 searching for changes
344 e64a39e7da8b
345
346 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
347 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
348 "GET /?cmd=capabilities HTTP/1.1" 200 -
349 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db
350 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477
351 $ cat errors.log
352
327 $ cd ..
353 $ cd ..
@@ -1,502 +1,534 b''
1 #require killdaemons
1 #require killdaemons
2
2
3 Tests discovery against servers without getbundle support:
3 Tests discovery against servers without getbundle support:
4
4
5 $ CAP=getbundle
5 $ CAP=getbundle
6 $ . "$TESTDIR/notcapable"
6 $ . "$TESTDIR/notcapable"
7 $ cat >> $HGRCPATH <<EOF
7 $ cat >> $HGRCPATH <<EOF
8 > [ui]
8 > [ui]
9 > logtemplate="{rev} {node|short}: {desc} {branches}\n"
9 > logtemplate="{rev} {node|short}: {desc} {branches}\n"
10 > EOF
10 > EOF
11
11
12 Setup HTTP server control:
12 Setup HTTP server control:
13
13
14 $ remote=http://localhost:$HGPORT/
14 $ remote=http://localhost:$HGPORT/
15 $ export remote
15 $ export remote
16 $ tstart() {
16 $ tstart() {
17 > echo '[web]' > $1/.hg/hgrc
17 > echo '[web]' > $1/.hg/hgrc
18 > echo 'push_ssl = false' >> $1/.hg/hgrc
18 > echo 'push_ssl = false' >> $1/.hg/hgrc
19 > echo 'allow_push = *' >> $1/.hg/hgrc
19 > echo 'allow_push = *' >> $1/.hg/hgrc
20 > hg serve -R $1 -p $HGPORT -d --pid-file=hg.pid -E errors.log
20 > hg serve -R $1 -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
21 > cat hg.pid >> $DAEMON_PIDS
21 > cat hg.pid >> $DAEMON_PIDS
22 > }
22 > }
23 $ tstop() {
23 $ tstop() {
24 > "$TESTDIR/killdaemons.py" $DAEMON_PIDS
24 > "$TESTDIR/killdaemons.py" $DAEMON_PIDS
25 > [ "$1" ] && cut -d' ' -f6- access.log && cat errors.log
26 > rm access.log errors.log
25 > }
27 > }
26
28
27 Both are empty:
29 Both are empty:
28
30
29 $ hg init empty1
31 $ hg init empty1
30 $ hg init empty2
32 $ hg init empty2
31 $ tstart empty2
33 $ tstart empty2
32 $ hg incoming -R empty1 $remote
34 $ hg incoming -R empty1 $remote
33 comparing with http://localhost:$HGPORT/
35 comparing with http://localhost:$HGPORT/
34 no changes found
36 no changes found
35 [1]
37 [1]
36 $ hg outgoing -R empty1 $remote
38 $ hg outgoing -R empty1 $remote
37 comparing with http://localhost:$HGPORT/
39 comparing with http://localhost:$HGPORT/
38 no changes found
40 no changes found
39 [1]
41 [1]
40 $ hg pull -R empty1 $remote
42 $ hg pull -R empty1 $remote
41 pulling from http://localhost:$HGPORT/
43 pulling from http://localhost:$HGPORT/
42 no changes found
44 no changes found
43 $ hg push -R empty1 $remote
45 $ hg push -R empty1 $remote
44 pushing to http://localhost:$HGPORT/
46 pushing to http://localhost:$HGPORT/
45 no changes found
47 no changes found
46 [1]
48 [1]
47 $ tstop
49 $ tstop
48
50
49 Base repo:
51 Base repo:
50
52
51 $ hg init main
53 $ hg init main
52 $ cd main
54 $ cd main
53 $ hg debugbuilddag -mo '+2:tbase @name1 +3:thead1 <tbase @name2 +4:thead2 @both /thead1 +2:tmaintip'
55 $ hg debugbuilddag -mo '+2:tbase @name1 +3:thead1 <tbase @name2 +4:thead2 @both /thead1 +2:tmaintip'
54 $ hg log -G
56 $ hg log -G
55 o 11 a19bfa7e7328: r11 both
57 o 11 a19bfa7e7328: r11 both
56 |
58 |
57 o 10 8b6bad1512e1: r10 both
59 o 10 8b6bad1512e1: r10 both
58 |
60 |
59 o 9 025829e08038: r9 both
61 o 9 025829e08038: r9 both
60 |\
62 |\
61 | o 8 d8f638ac69e9: r8 name2
63 | o 8 d8f638ac69e9: r8 name2
62 | |
64 | |
63 | o 7 b6b4d315a2ac: r7 name2
65 | o 7 b6b4d315a2ac: r7 name2
64 | |
66 | |
65 | o 6 6c6f5d5f3c11: r6 name2
67 | o 6 6c6f5d5f3c11: r6 name2
66 | |
68 | |
67 | o 5 70314b29987d: r5 name2
69 | o 5 70314b29987d: r5 name2
68 | |
70 | |
69 o | 4 e71dbbc70e03: r4 name1
71 o | 4 e71dbbc70e03: r4 name1
70 | |
72 | |
71 o | 3 2c8d5d5ec612: r3 name1
73 o | 3 2c8d5d5ec612: r3 name1
72 | |
74 | |
73 o | 2 a7892891da29: r2 name1
75 o | 2 a7892891da29: r2 name1
74 |/
76 |/
75 o 1 0019a3b924fd: r1
77 o 1 0019a3b924fd: r1
76 |
78 |
77 o 0 d57206cc072a: r0
79 o 0 d57206cc072a: r0
78
80
79 $ cd ..
81 $ cd ..
80 $ tstart main
82 $ tstart main
81
83
82 Full clone:
84 Full clone:
83
85
84 $ hg clone main full
86 $ hg clone main full
85 updating to branch default
87 updating to branch default
86 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
88 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
87 $ cd full
89 $ cd full
88 $ hg incoming $remote
90 $ hg incoming $remote
89 comparing with http://localhost:$HGPORT/
91 comparing with http://localhost:$HGPORT/
90 searching for changes
92 searching for changes
91 no changes found
93 no changes found
92 [1]
94 [1]
93 $ hg outgoing $remote
95 $ hg outgoing $remote
94 comparing with http://localhost:$HGPORT/
96 comparing with http://localhost:$HGPORT/
95 searching for changes
97 searching for changes
96 no changes found
98 no changes found
97 [1]
99 [1]
98 $ hg pull $remote
100 $ hg pull $remote
99 pulling from http://localhost:$HGPORT/
101 pulling from http://localhost:$HGPORT/
100 searching for changes
102 searching for changes
101 no changes found
103 no changes found
102 $ hg push $remote
104 $ hg push $remote
103 pushing to http://localhost:$HGPORT/
105 pushing to http://localhost:$HGPORT/
104 searching for changes
106 searching for changes
105 no changes found
107 no changes found
106 [1]
108 [1]
107 $ cd ..
109 $ cd ..
108
110
109 Local is empty:
111 Local is empty:
110
112
111 $ cd empty1
113 $ cd empty1
112 $ hg incoming $remote
114 $ hg incoming $remote
113 comparing with http://localhost:$HGPORT/
115 comparing with http://localhost:$HGPORT/
114 0 d57206cc072a: r0
116 0 d57206cc072a: r0
115 1 0019a3b924fd: r1
117 1 0019a3b924fd: r1
116 2 a7892891da29: r2 name1
118 2 a7892891da29: r2 name1
117 3 2c8d5d5ec612: r3 name1
119 3 2c8d5d5ec612: r3 name1
118 4 e71dbbc70e03: r4 name1
120 4 e71dbbc70e03: r4 name1
119 5 70314b29987d: r5 name2
121 5 70314b29987d: r5 name2
120 6 6c6f5d5f3c11: r6 name2
122 6 6c6f5d5f3c11: r6 name2
121 7 b6b4d315a2ac: r7 name2
123 7 b6b4d315a2ac: r7 name2
122 8 d8f638ac69e9: r8 name2
124 8 d8f638ac69e9: r8 name2
123 9 025829e08038: r9 both
125 9 025829e08038: r9 both
124 10 8b6bad1512e1: r10 both
126 10 8b6bad1512e1: r10 both
125 11 a19bfa7e7328: r11 both
127 11 a19bfa7e7328: r11 both
126 $ hg outgoing $remote
128 $ hg outgoing $remote
127 comparing with http://localhost:$HGPORT/
129 comparing with http://localhost:$HGPORT/
128 no changes found
130 no changes found
129 [1]
131 [1]
130 $ hg push $remote
132 $ hg push $remote
131 pushing to http://localhost:$HGPORT/
133 pushing to http://localhost:$HGPORT/
132 no changes found
134 no changes found
133 [1]
135 [1]
134 $ hg pull $remote
136 $ hg pull $remote
135 pulling from http://localhost:$HGPORT/
137 pulling from http://localhost:$HGPORT/
136 requesting all changes
138 requesting all changes
137 adding changesets
139 adding changesets
138 adding manifests
140 adding manifests
139 adding file changes
141 adding file changes
140 added 12 changesets with 24 changes to 2 files
142 added 12 changesets with 24 changes to 2 files
141 (run 'hg update' to get a working copy)
143 (run 'hg update' to get a working copy)
142 $ hg incoming $remote
144 $ hg incoming $remote
143 comparing with http://localhost:$HGPORT/
145 comparing with http://localhost:$HGPORT/
144 searching for changes
146 searching for changes
145 no changes found
147 no changes found
146 [1]
148 [1]
147 $ cd ..
149 $ cd ..
148
150
149 Local is subset:
151 Local is subset:
150
152
151 $ hg clone main subset --rev name2 ; cd subset
153 $ hg clone main subset --rev name2 ; cd subset
152 adding changesets
154 adding changesets
153 adding manifests
155 adding manifests
154 adding file changes
156 adding file changes
155 added 6 changesets with 12 changes to 2 files
157 added 6 changesets with 12 changes to 2 files
156 updating to branch name2
158 updating to branch name2
157 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
159 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
158 $ hg incoming $remote
160 $ hg incoming $remote
159 comparing with http://localhost:$HGPORT/
161 comparing with http://localhost:$HGPORT/
160 searching for changes
162 searching for changes
161 6 a7892891da29: r2 name1
163 6 a7892891da29: r2 name1
162 7 2c8d5d5ec612: r3 name1
164 7 2c8d5d5ec612: r3 name1
163 8 e71dbbc70e03: r4 name1
165 8 e71dbbc70e03: r4 name1
164 9 025829e08038: r9 both
166 9 025829e08038: r9 both
165 10 8b6bad1512e1: r10 both
167 10 8b6bad1512e1: r10 both
166 11 a19bfa7e7328: r11 both
168 11 a19bfa7e7328: r11 both
167 $ hg outgoing $remote
169 $ hg outgoing $remote
168 comparing with http://localhost:$HGPORT/
170 comparing with http://localhost:$HGPORT/
169 searching for changes
171 searching for changes
170 no changes found
172 no changes found
171 [1]
173 [1]
172 $ hg push $remote
174 $ hg push $remote
173 pushing to http://localhost:$HGPORT/
175 pushing to http://localhost:$HGPORT/
174 searching for changes
176 searching for changes
175 no changes found
177 no changes found
176 [1]
178 [1]
177 $ hg pull $remote
179 $ hg pull $remote
178 pulling from http://localhost:$HGPORT/
180 pulling from http://localhost:$HGPORT/
179 searching for changes
181 searching for changes
180 adding changesets
182 adding changesets
181 adding manifests
183 adding manifests
182 adding file changes
184 adding file changes
183 added 6 changesets with 12 changes to 2 files
185 added 6 changesets with 12 changes to 2 files
184 (run 'hg update' to get a working copy)
186 (run 'hg update' to get a working copy)
185 $ hg incoming $remote
187 $ hg incoming $remote
186 comparing with http://localhost:$HGPORT/
188 comparing with http://localhost:$HGPORT/
187 searching for changes
189 searching for changes
188 no changes found
190 no changes found
189 [1]
191 [1]
190 $ cd ..
192 $ cd ..
193 $ tstop
191
194
192 Remote is empty:
195 Remote is empty:
193
196
194 $ tstop ; tstart empty2
197 $ tstart empty2
195 $ cd main
198 $ cd main
196 $ hg incoming $remote
199 $ hg incoming $remote
197 comparing with http://localhost:$HGPORT/
200 comparing with http://localhost:$HGPORT/
198 searching for changes
201 searching for changes
199 no changes found
202 no changes found
200 [1]
203 [1]
201 $ hg outgoing $remote
204 $ hg outgoing $remote
202 comparing with http://localhost:$HGPORT/
205 comparing with http://localhost:$HGPORT/
203 searching for changes
206 searching for changes
204 0 d57206cc072a: r0
207 0 d57206cc072a: r0
205 1 0019a3b924fd: r1
208 1 0019a3b924fd: r1
206 2 a7892891da29: r2 name1
209 2 a7892891da29: r2 name1
207 3 2c8d5d5ec612: r3 name1
210 3 2c8d5d5ec612: r3 name1
208 4 e71dbbc70e03: r4 name1
211 4 e71dbbc70e03: r4 name1
209 5 70314b29987d: r5 name2
212 5 70314b29987d: r5 name2
210 6 6c6f5d5f3c11: r6 name2
213 6 6c6f5d5f3c11: r6 name2
211 7 b6b4d315a2ac: r7 name2
214 7 b6b4d315a2ac: r7 name2
212 8 d8f638ac69e9: r8 name2
215 8 d8f638ac69e9: r8 name2
213 9 025829e08038: r9 both
216 9 025829e08038: r9 both
214 10 8b6bad1512e1: r10 both
217 10 8b6bad1512e1: r10 both
215 11 a19bfa7e7328: r11 both
218 11 a19bfa7e7328: r11 both
216 $ hg pull $remote
219 $ hg pull $remote
217 pulling from http://localhost:$HGPORT/
220 pulling from http://localhost:$HGPORT/
218 searching for changes
221 searching for changes
219 no changes found
222 no changes found
220 $ hg push $remote
223 $ hg push $remote
221 pushing to http://localhost:$HGPORT/
224 pushing to http://localhost:$HGPORT/
222 searching for changes
225 searching for changes
223 remote: adding changesets
226 remote: adding changesets
224 remote: adding manifests
227 remote: adding manifests
225 remote: adding file changes
228 remote: adding file changes
226 remote: added 12 changesets with 24 changes to 2 files
229 remote: added 12 changesets with 24 changes to 2 files
227 $ hg outgoing $remote
230 $ hg outgoing $remote
228 comparing with http://localhost:$HGPORT/
231 comparing with http://localhost:$HGPORT/
229 searching for changes
232 searching for changes
230 no changes found
233 no changes found
231 [1]
234 [1]
232 $ cd ..
235 $ cd ..
236 $ tstop
233
237
234 Local is superset:
238 Local is superset:
235
239
236 $ tstop
237 $ hg clone main subset2 --rev name2
240 $ hg clone main subset2 --rev name2
238 adding changesets
241 adding changesets
239 adding manifests
242 adding manifests
240 adding file changes
243 adding file changes
241 added 6 changesets with 12 changes to 2 files
244 added 6 changesets with 12 changes to 2 files
242 updating to branch name2
245 updating to branch name2
243 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
246 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
244 $ tstart subset2
247 $ tstart subset2
245 $ cd main
248 $ cd main
246 $ hg incoming $remote
249 $ hg incoming $remote
247 comparing with http://localhost:$HGPORT/
250 comparing with http://localhost:$HGPORT/
248 searching for changes
251 searching for changes
249 no changes found
252 no changes found
250 [1]
253 [1]
251 $ hg outgoing $remote
254 $ hg outgoing $remote
252 comparing with http://localhost:$HGPORT/
255 comparing with http://localhost:$HGPORT/
253 searching for changes
256 searching for changes
254 2 a7892891da29: r2 name1
257 2 a7892891da29: r2 name1
255 3 2c8d5d5ec612: r3 name1
258 3 2c8d5d5ec612: r3 name1
256 4 e71dbbc70e03: r4 name1
259 4 e71dbbc70e03: r4 name1
257 9 025829e08038: r9 both
260 9 025829e08038: r9 both
258 10 8b6bad1512e1: r10 both
261 10 8b6bad1512e1: r10 both
259 11 a19bfa7e7328: r11 both
262 11 a19bfa7e7328: r11 both
260 $ hg pull $remote
263 $ hg pull $remote
261 pulling from http://localhost:$HGPORT/
264 pulling from http://localhost:$HGPORT/
262 searching for changes
265 searching for changes
263 no changes found
266 no changes found
264 $ hg push $remote
267 $ hg push $remote
265 pushing to http://localhost:$HGPORT/
268 pushing to http://localhost:$HGPORT/
266 searching for changes
269 searching for changes
267 abort: push creates new remote branches: both, name1!
270 abort: push creates new remote branches: both, name1!
268 (use 'hg push --new-branch' to create new remote branches)
271 (use 'hg push --new-branch' to create new remote branches)
269 [255]
272 [255]
270 $ hg push $remote --new-branch
273 $ hg push $remote --new-branch
271 pushing to http://localhost:$HGPORT/
274 pushing to http://localhost:$HGPORT/
272 searching for changes
275 searching for changes
273 remote: adding changesets
276 remote: adding changesets
274 remote: adding manifests
277 remote: adding manifests
275 remote: adding file changes
278 remote: adding file changes
276 remote: added 6 changesets with 12 changes to 2 files
279 remote: added 6 changesets with 12 changes to 2 files
277 $ hg outgoing $remote
280 $ hg outgoing $remote
278 comparing with http://localhost:$HGPORT/
281 comparing with http://localhost:$HGPORT/
279 searching for changes
282 searching for changes
280 no changes found
283 no changes found
281 [1]
284 [1]
282 $ cd ..
285 $ cd ..
286 $ tstop
283
287
284 Partial pull:
288 Partial pull:
285
289
286 $ tstop ; tstart main
290 $ tstart main
287 $ hg clone $remote partial --rev name2
291 $ hg clone $remote partial --rev name2
288 adding changesets
292 adding changesets
289 adding manifests
293 adding manifests
290 adding file changes
294 adding file changes
291 added 6 changesets with 12 changes to 2 files
295 added 6 changesets with 12 changes to 2 files
292 updating to branch name2
296 updating to branch name2
293 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
297 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
294 $ cd partial
298 $ cd partial
295 $ hg incoming $remote
299 $ hg incoming $remote
296 comparing with http://localhost:$HGPORT/
300 comparing with http://localhost:$HGPORT/
297 searching for changes
301 searching for changes
298 6 a7892891da29: r2 name1
302 6 a7892891da29: r2 name1
299 7 2c8d5d5ec612: r3 name1
303 7 2c8d5d5ec612: r3 name1
300 8 e71dbbc70e03: r4 name1
304 8 e71dbbc70e03: r4 name1
301 9 025829e08038: r9 both
305 9 025829e08038: r9 both
302 10 8b6bad1512e1: r10 both
306 10 8b6bad1512e1: r10 both
303 11 a19bfa7e7328: r11 both
307 11 a19bfa7e7328: r11 both
304 $ hg incoming $remote --rev name1
308 $ hg incoming $remote --rev name1
305 comparing with http://localhost:$HGPORT/
309 comparing with http://localhost:$HGPORT/
306 searching for changes
310 searching for changes
307 6 a7892891da29: r2 name1
311 6 a7892891da29: r2 name1
308 7 2c8d5d5ec612: r3 name1
312 7 2c8d5d5ec612: r3 name1
309 8 e71dbbc70e03: r4 name1
313 8 e71dbbc70e03: r4 name1
310 $ hg pull $remote --rev name1
314 $ hg pull $remote --rev name1
311 pulling from http://localhost:$HGPORT/
315 pulling from http://localhost:$HGPORT/
312 searching for changes
316 searching for changes
313 adding changesets
317 adding changesets
314 adding manifests
318 adding manifests
315 adding file changes
319 adding file changes
316 added 3 changesets with 6 changes to 2 files (+1 heads)
320 added 3 changesets with 6 changes to 2 files (+1 heads)
317 (run 'hg heads' to see heads)
321 (run 'hg heads' to see heads)
318 $ hg incoming $remote
322 $ hg incoming $remote
319 comparing with http://localhost:$HGPORT/
323 comparing with http://localhost:$HGPORT/
320 searching for changes
324 searching for changes
321 9 025829e08038: r9 both
325 9 025829e08038: r9 both
322 10 8b6bad1512e1: r10 both
326 10 8b6bad1512e1: r10 both
323 11 a19bfa7e7328: r11 both
327 11 a19bfa7e7328: r11 both
324 $ cd ..
328 $ cd ..
329 $ tstop
325
330
326 Both have new stuff in new named branches:
331 Both have new stuff in new named branches:
327
332
328 $ tstop
329 $ hg clone main repo1a --rev name1 -q
333 $ hg clone main repo1a --rev name1 -q
330 $ hg clone repo1a repo1b -q
334 $ hg clone repo1a repo1b -q
331 $ hg clone main repo2a --rev name2 -q
335 $ hg clone main repo2a --rev name2 -q
332 $ hg clone repo2a repo2b -q
336 $ hg clone repo2a repo2b -q
333 $ tstart repo1a
337 $ tstart repo1a
334
338
335 $ cd repo2a
339 $ cd repo2a
336 $ hg incoming $remote
340 $ hg incoming $remote
337 comparing with http://localhost:$HGPORT/
341 comparing with http://localhost:$HGPORT/
338 searching for changes
342 searching for changes
339 6 a7892891da29: r2 name1
343 6 a7892891da29: r2 name1
340 7 2c8d5d5ec612: r3 name1
344 7 2c8d5d5ec612: r3 name1
341 8 e71dbbc70e03: r4 name1
345 8 e71dbbc70e03: r4 name1
342 $ hg outgoing $remote
346 $ hg outgoing $remote
343 comparing with http://localhost:$HGPORT/
347 comparing with http://localhost:$HGPORT/
344 searching for changes
348 searching for changes
345 2 70314b29987d: r5 name2
349 2 70314b29987d: r5 name2
346 3 6c6f5d5f3c11: r6 name2
350 3 6c6f5d5f3c11: r6 name2
347 4 b6b4d315a2ac: r7 name2
351 4 b6b4d315a2ac: r7 name2
348 5 d8f638ac69e9: r8 name2
352 5 d8f638ac69e9: r8 name2
349 $ hg push $remote --new-branch
353 $ hg push $remote --new-branch
350 pushing to http://localhost:$HGPORT/
354 pushing to http://localhost:$HGPORT/
351 searching for changes
355 searching for changes
352 remote: adding changesets
356 remote: adding changesets
353 remote: adding manifests
357 remote: adding manifests
354 remote: adding file changes
358 remote: adding file changes
355 remote: added 4 changesets with 8 changes to 2 files (+1 heads)
359 remote: added 4 changesets with 8 changes to 2 files (+1 heads)
356 $ hg pull $remote
360 $ hg pull $remote
357 pulling from http://localhost:$HGPORT/
361 pulling from http://localhost:$HGPORT/
358 searching for changes
362 searching for changes
359 adding changesets
363 adding changesets
360 adding manifests
364 adding manifests
361 adding file changes
365 adding file changes
362 added 3 changesets with 6 changes to 2 files (+1 heads)
366 added 3 changesets with 6 changes to 2 files (+1 heads)
363 (run 'hg heads' to see heads)
367 (run 'hg heads' to see heads)
364 $ hg incoming $remote
368 $ hg incoming $remote
365 comparing with http://localhost:$HGPORT/
369 comparing with http://localhost:$HGPORT/
366 searching for changes
370 searching for changes
367 no changes found
371 no changes found
368 [1]
372 [1]
369 $ hg outgoing $remote
373 $ hg outgoing $remote
370 comparing with http://localhost:$HGPORT/
374 comparing with http://localhost:$HGPORT/
371 searching for changes
375 searching for changes
372 no changes found
376 no changes found
373 [1]
377 [1]
374 $ cd ..
378 $ cd ..
379 $ tstop
375
380
376 $ tstop ; tstart repo1b
381 $ tstart repo1b
377 $ cd repo2b
382 $ cd repo2b
378 $ hg incoming $remote
383 $ hg incoming $remote
379 comparing with http://localhost:$HGPORT/
384 comparing with http://localhost:$HGPORT/
380 searching for changes
385 searching for changes
381 6 a7892891da29: r2 name1
386 6 a7892891da29: r2 name1
382 7 2c8d5d5ec612: r3 name1
387 7 2c8d5d5ec612: r3 name1
383 8 e71dbbc70e03: r4 name1
388 8 e71dbbc70e03: r4 name1
384 $ hg outgoing $remote
389 $ hg outgoing $remote
385 comparing with http://localhost:$HGPORT/
390 comparing with http://localhost:$HGPORT/
386 searching for changes
391 searching for changes
387 2 70314b29987d: r5 name2
392 2 70314b29987d: r5 name2
388 3 6c6f5d5f3c11: r6 name2
393 3 6c6f5d5f3c11: r6 name2
389 4 b6b4d315a2ac: r7 name2
394 4 b6b4d315a2ac: r7 name2
390 5 d8f638ac69e9: r8 name2
395 5 d8f638ac69e9: r8 name2
391 $ hg pull $remote
396 $ hg pull $remote
392 pulling from http://localhost:$HGPORT/
397 pulling from http://localhost:$HGPORT/
393 searching for changes
398 searching for changes
394 adding changesets
399 adding changesets
395 adding manifests
400 adding manifests
396 adding file changes
401 adding file changes
397 added 3 changesets with 6 changes to 2 files (+1 heads)
402 added 3 changesets with 6 changes to 2 files (+1 heads)
398 (run 'hg heads' to see heads)
403 (run 'hg heads' to see heads)
399 $ hg push $remote --new-branch
404 $ hg push $remote --new-branch
400 pushing to http://localhost:$HGPORT/
405 pushing to http://localhost:$HGPORT/
401 searching for changes
406 searching for changes
402 remote: adding changesets
407 remote: adding changesets
403 remote: adding manifests
408 remote: adding manifests
404 remote: adding file changes
409 remote: adding file changes
405 remote: added 4 changesets with 8 changes to 2 files (+1 heads)
410 remote: added 4 changesets with 8 changes to 2 files (+1 heads)
406 $ hg incoming $remote
411 $ hg incoming $remote
407 comparing with http://localhost:$HGPORT/
412 comparing with http://localhost:$HGPORT/
408 searching for changes
413 searching for changes
409 no changes found
414 no changes found
410 [1]
415 [1]
411 $ hg outgoing $remote
416 $ hg outgoing $remote
412 comparing with http://localhost:$HGPORT/
417 comparing with http://localhost:$HGPORT/
413 searching for changes
418 searching for changes
414 no changes found
419 no changes found
415 [1]
420 [1]
416 $ cd ..
421 $ cd ..
422 $ tstop
417
423
418 Both have new stuff in existing named branches:
424 Both have new stuff in existing named branches:
419
425
420 $ tstop
421 $ rm -r repo1a repo1b repo2a repo2b
426 $ rm -r repo1a repo1b repo2a repo2b
422 $ hg clone main repo1a --rev 3 --rev 8 -q
427 $ hg clone main repo1a --rev 3 --rev 8 -q
423 $ hg clone repo1a repo1b -q
428 $ hg clone repo1a repo1b -q
424 $ hg clone main repo2a --rev 4 --rev 7 -q
429 $ hg clone main repo2a --rev 4 --rev 7 -q
425 $ hg clone repo2a repo2b -q
430 $ hg clone repo2a repo2b -q
426 $ tstart repo1a
431 $ tstart repo1a
427
432
428 $ cd repo2a
433 $ cd repo2a
429 $ hg incoming $remote
434 $ hg incoming $remote
430 comparing with http://localhost:$HGPORT/
435 comparing with http://localhost:$HGPORT/
431 searching for changes
436 searching for changes
432 8 d8f638ac69e9: r8 name2
437 8 d8f638ac69e9: r8 name2
433 $ hg outgoing $remote
438 $ hg outgoing $remote
434 comparing with http://localhost:$HGPORT/
439 comparing with http://localhost:$HGPORT/
435 searching for changes
440 searching for changes
436 4 e71dbbc70e03: r4 name1
441 4 e71dbbc70e03: r4 name1
437 $ hg push $remote --new-branch
442 $ hg push $remote --new-branch
438 pushing to http://localhost:$HGPORT/
443 pushing to http://localhost:$HGPORT/
439 searching for changes
444 searching for changes
440 remote: adding changesets
445 remote: adding changesets
441 remote: adding manifests
446 remote: adding manifests
442 remote: adding file changes
447 remote: adding file changes
443 remote: added 1 changesets with 2 changes to 2 files
448 remote: added 1 changesets with 2 changes to 2 files
444 $ hg pull $remote
449 $ hg pull $remote
445 pulling from http://localhost:$HGPORT/
450 pulling from http://localhost:$HGPORT/
446 searching for changes
451 searching for changes
447 adding changesets
452 adding changesets
448 adding manifests
453 adding manifests
449 adding file changes
454 adding file changes
450 added 1 changesets with 2 changes to 2 files
455 added 1 changesets with 2 changes to 2 files
451 (run 'hg update' to get a working copy)
456 (run 'hg update' to get a working copy)
452 $ hg incoming $remote
457 $ hg incoming $remote
453 comparing with http://localhost:$HGPORT/
458 comparing with http://localhost:$HGPORT/
454 searching for changes
459 searching for changes
455 no changes found
460 no changes found
456 [1]
461 [1]
457 $ hg outgoing $remote
462 $ hg outgoing $remote
458 comparing with http://localhost:$HGPORT/
463 comparing with http://localhost:$HGPORT/
459 searching for changes
464 searching for changes
460 no changes found
465 no changes found
461 [1]
466 [1]
462 $ cd ..
467 $ cd ..
468 $ tstop
463
469
464 $ tstop ; tstart repo1b
470 $ tstart repo1b
465 $ cd repo2b
471 $ cd repo2b
466 $ hg incoming $remote
472 $ hg incoming $remote
467 comparing with http://localhost:$HGPORT/
473 comparing with http://localhost:$HGPORT/
468 searching for changes
474 searching for changes
469 8 d8f638ac69e9: r8 name2
475 8 d8f638ac69e9: r8 name2
470 $ hg outgoing $remote
476 $ hg outgoing $remote
471 comparing with http://localhost:$HGPORT/
477 comparing with http://localhost:$HGPORT/
472 searching for changes
478 searching for changes
473 4 e71dbbc70e03: r4 name1
479 4 e71dbbc70e03: r4 name1
474 $ hg pull $remote
480 $ hg pull $remote
475 pulling from http://localhost:$HGPORT/
481 pulling from http://localhost:$HGPORT/
476 searching for changes
482 searching for changes
477 adding changesets
483 adding changesets
478 adding manifests
484 adding manifests
479 adding file changes
485 adding file changes
480 added 1 changesets with 2 changes to 2 files
486 added 1 changesets with 2 changes to 2 files
481 (run 'hg update' to get a working copy)
487 (run 'hg update' to get a working copy)
482 $ hg push $remote --new-branch
488 $ hg push $remote --new-branch
483 pushing to http://localhost:$HGPORT/
489 pushing to http://localhost:$HGPORT/
484 searching for changes
490 searching for changes
485 remote: adding changesets
491 remote: adding changesets
486 remote: adding manifests
492 remote: adding manifests
487 remote: adding file changes
493 remote: adding file changes
488 remote: added 1 changesets with 2 changes to 2 files
494 remote: added 1 changesets with 2 changes to 2 files
489 $ hg incoming $remote
495 $ hg incoming $remote
490 comparing with http://localhost:$HGPORT/
496 comparing with http://localhost:$HGPORT/
491 searching for changes
497 searching for changes
492 no changes found
498 no changes found
493 [1]
499 [1]
494 $ hg outgoing $remote
500 $ hg outgoing $remote
495 comparing with http://localhost:$HGPORT/
501 comparing with http://localhost:$HGPORT/
496 searching for changes
502 searching for changes
497 no changes found
503 no changes found
498 [1]
504 [1]
499 $ cd ..
505 $ cd ..
500
506 $ tstop show
501 $ tstop
507 "GET /?cmd=capabilities HTTP/1.1" 200 -
502
508 "GET /?cmd=heads HTTP/1.1" 200 -
509 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961
510 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785
511 "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961
512 "GET /?cmd=capabilities HTTP/1.1" 200 -
513 "GET /?cmd=heads HTTP/1.1" 200 -
514 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961
515 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785
516 "GET /?cmd=capabilities HTTP/1.1" 200 -
517 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
518 "GET /?cmd=heads HTTP/1.1" 200 -
519 "GET /?cmd=branches HTTP/1.1" 200 - x-hgarg-1:nodes=d8f638ac69e9ae8dea4f09f11d696546a912d961
520 "GET /?cmd=between HTTP/1.1" 200 - x-hgarg-1:pairs=d8f638ac69e9ae8dea4f09f11d696546a912d961-d57206cc072a18317c1e381fb60aa31bd3401785
521 "GET /?cmd=changegroupsubset HTTP/1.1" 200 - x-hgarg-1:bases=d8f638ac69e9ae8dea4f09f11d696546a912d961&heads=d8f638ac69e9ae8dea4f09f11d696546a912d961+2c8d5d5ec612be65cdfdeac78b7662ab1696324a
522 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
523 "GET /?cmd=capabilities HTTP/1.1" 200 -
524 "GET /?cmd=heads HTTP/1.1" 200 -
525 "GET /?cmd=branchmap HTTP/1.1" 200 -
526 "GET /?cmd=branchmap HTTP/1.1" 200 -
527 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
528 "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+1827a5bb63e602382eb89dd58f2ac9f3b007ad91
529 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases
530 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks
531 "GET /?cmd=capabilities HTTP/1.1" 200 -
532 "GET /?cmd=heads HTTP/1.1" 200 -
533 "GET /?cmd=capabilities HTTP/1.1" 200 -
534 "GET /?cmd=heads HTTP/1.1" 200 -
General Comments 0
You need to be logged in to leave comments. Login now