##// END OF EJS Templates
filtering: rename filters to their antonyms...
Kevin Bullock -
r18382:f3b21beb default
parent child Browse files
Show More
@@ -1,1364 +1,1364
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, nullrev, short, hex, bin
8 from node import nullid, nullrev, short, hex, bin
9 from i18n import _
9 from i18n import _
10 import ancestor, mdiff, error, util, scmutil, subrepo, patch, encoding, phases
10 import ancestor, mdiff, error, util, scmutil, subrepo, patch, encoding, phases
11 import copies
11 import copies
12 import match as matchmod
12 import match as matchmod
13 import os, errno, stat
13 import os, errno, stat
14 import obsolete as obsmod
14 import obsolete as obsmod
15 import repoview
15 import repoview
16
16
17 propertycache = util.propertycache
17 propertycache = util.propertycache
18
18
19 class changectx(object):
19 class changectx(object):
20 """A changecontext object makes access to data related to a particular
20 """A changecontext object makes access to data related to a particular
21 changeset convenient."""
21 changeset convenient."""
22 def __init__(self, repo, changeid=''):
22 def __init__(self, repo, changeid=''):
23 """changeid is a revision number, node, or tag"""
23 """changeid is a revision number, node, or tag"""
24 if changeid == '':
24 if changeid == '':
25 changeid = '.'
25 changeid = '.'
26 self._repo = repo
26 self._repo = repo
27
27
28 if isinstance(changeid, int):
28 if isinstance(changeid, int):
29 try:
29 try:
30 self._node = repo.changelog.node(changeid)
30 self._node = repo.changelog.node(changeid)
31 except IndexError:
31 except IndexError:
32 raise error.RepoLookupError(
32 raise error.RepoLookupError(
33 _("unknown revision '%s'") % changeid)
33 _("unknown revision '%s'") % changeid)
34 self._rev = changeid
34 self._rev = changeid
35 return
35 return
36 if isinstance(changeid, long):
36 if isinstance(changeid, long):
37 changeid = str(changeid)
37 changeid = str(changeid)
38 if changeid == '.':
38 if changeid == '.':
39 self._node = repo.dirstate.p1()
39 self._node = repo.dirstate.p1()
40 self._rev = repo.changelog.rev(self._node)
40 self._rev = repo.changelog.rev(self._node)
41 return
41 return
42 if changeid == 'null':
42 if changeid == 'null':
43 self._node = nullid
43 self._node = nullid
44 self._rev = nullrev
44 self._rev = nullrev
45 return
45 return
46 if changeid == 'tip':
46 if changeid == 'tip':
47 self._rev = len(repo.changelog) - 1
47 self._rev = len(repo.changelog) - 1
48 self._node = repo.changelog.node(self._rev)
48 self._node = repo.changelog.node(self._rev)
49 return
49 return
50 if len(changeid) == 20:
50 if len(changeid) == 20:
51 try:
51 try:
52 self._node = changeid
52 self._node = changeid
53 self._rev = repo.changelog.rev(changeid)
53 self._rev = repo.changelog.rev(changeid)
54 return
54 return
55 except LookupError:
55 except LookupError:
56 pass
56 pass
57
57
58 try:
58 try:
59 r = int(changeid)
59 r = int(changeid)
60 if str(r) != changeid:
60 if str(r) != changeid:
61 raise ValueError
61 raise ValueError
62 l = len(repo.changelog)
62 l = len(repo.changelog)
63 if r < 0:
63 if r < 0:
64 r += l
64 r += l
65 if r < 0 or r >= l:
65 if r < 0 or r >= l:
66 raise ValueError
66 raise ValueError
67 self._rev = r
67 self._rev = r
68 self._node = repo.changelog.node(r)
68 self._node = repo.changelog.node(r)
69 return
69 return
70 except (ValueError, OverflowError):
70 except (ValueError, OverflowError):
71 pass
71 pass
72
72
73 if len(changeid) == 40:
73 if len(changeid) == 40:
74 try:
74 try:
75 self._node = bin(changeid)
75 self._node = bin(changeid)
76 self._rev = repo.changelog.rev(self._node)
76 self._rev = repo.changelog.rev(self._node)
77 return
77 return
78 except (TypeError, LookupError):
78 except (TypeError, LookupError):
79 pass
79 pass
80
80
81 if changeid in repo._bookmarks:
81 if changeid in repo._bookmarks:
82 self._node = repo._bookmarks[changeid]
82 self._node = repo._bookmarks[changeid]
83 self._rev = repo.changelog.rev(self._node)
83 self._rev = repo.changelog.rev(self._node)
84 return
84 return
85 if changeid in repo._tagscache.tags:
85 if changeid in repo._tagscache.tags:
86 self._node = repo._tagscache.tags[changeid]
86 self._node = repo._tagscache.tags[changeid]
87 self._rev = repo.changelog.rev(self._node)
87 self._rev = repo.changelog.rev(self._node)
88 return
88 return
89 try:
89 try:
90 self._node = repo.branchtip(changeid)
90 self._node = repo.branchtip(changeid)
91 self._rev = repo.changelog.rev(self._node)
91 self._rev = repo.changelog.rev(self._node)
92 return
92 return
93 except error.RepoLookupError:
93 except error.RepoLookupError:
94 pass
94 pass
95
95
96 self._node = repo.changelog._partialmatch(changeid)
96 self._node = repo.changelog._partialmatch(changeid)
97 if self._node is not None:
97 if self._node is not None:
98 self._rev = repo.changelog.rev(self._node)
98 self._rev = repo.changelog.rev(self._node)
99 return
99 return
100
100
101 # lookup failed
101 # lookup failed
102 # check if it might have come from damaged dirstate
102 # check if it might have come from damaged dirstate
103 #
103 #
104 # XXX we could avoid the unfiltered if we had a recognizable exception
104 # XXX we could avoid the unfiltered if we had a recognizable exception
105 # for filtered changeset access
105 # for filtered changeset access
106 if changeid in repo.unfiltered().dirstate.parents():
106 if changeid in repo.unfiltered().dirstate.parents():
107 raise error.Abort(_("working directory has unknown parent '%s'!")
107 raise error.Abort(_("working directory has unknown parent '%s'!")
108 % short(changeid))
108 % short(changeid))
109 try:
109 try:
110 if len(changeid) == 20:
110 if len(changeid) == 20:
111 changeid = hex(changeid)
111 changeid = hex(changeid)
112 except TypeError:
112 except TypeError:
113 pass
113 pass
114 raise error.RepoLookupError(
114 raise error.RepoLookupError(
115 _("unknown revision '%s'") % changeid)
115 _("unknown revision '%s'") % changeid)
116
116
117 def __str__(self):
117 def __str__(self):
118 return short(self.node())
118 return short(self.node())
119
119
120 def __int__(self):
120 def __int__(self):
121 return self.rev()
121 return self.rev()
122
122
123 def __repr__(self):
123 def __repr__(self):
124 return "<changectx %s>" % str(self)
124 return "<changectx %s>" % str(self)
125
125
126 def __hash__(self):
126 def __hash__(self):
127 try:
127 try:
128 return hash(self._rev)
128 return hash(self._rev)
129 except AttributeError:
129 except AttributeError:
130 return id(self)
130 return id(self)
131
131
132 def __eq__(self, other):
132 def __eq__(self, other):
133 try:
133 try:
134 return self._rev == other._rev
134 return self._rev == other._rev
135 except AttributeError:
135 except AttributeError:
136 return False
136 return False
137
137
138 def __ne__(self, other):
138 def __ne__(self, other):
139 return not (self == other)
139 return not (self == other)
140
140
141 def __nonzero__(self):
141 def __nonzero__(self):
142 return self._rev != nullrev
142 return self._rev != nullrev
143
143
144 @propertycache
144 @propertycache
145 def _changeset(self):
145 def _changeset(self):
146 return self._repo.changelog.read(self.rev())
146 return self._repo.changelog.read(self.rev())
147
147
148 @propertycache
148 @propertycache
149 def _manifest(self):
149 def _manifest(self):
150 return self._repo.manifest.read(self._changeset[0])
150 return self._repo.manifest.read(self._changeset[0])
151
151
152 @propertycache
152 @propertycache
153 def _manifestdelta(self):
153 def _manifestdelta(self):
154 return self._repo.manifest.readdelta(self._changeset[0])
154 return self._repo.manifest.readdelta(self._changeset[0])
155
155
156 @propertycache
156 @propertycache
157 def _parents(self):
157 def _parents(self):
158 p = self._repo.changelog.parentrevs(self._rev)
158 p = self._repo.changelog.parentrevs(self._rev)
159 if p[1] == nullrev:
159 if p[1] == nullrev:
160 p = p[:-1]
160 p = p[:-1]
161 return [changectx(self._repo, x) for x in p]
161 return [changectx(self._repo, x) for x in p]
162
162
163 @propertycache
163 @propertycache
164 def substate(self):
164 def substate(self):
165 return subrepo.state(self, self._repo.ui)
165 return subrepo.state(self, self._repo.ui)
166
166
167 def __contains__(self, key):
167 def __contains__(self, key):
168 return key in self._manifest
168 return key in self._manifest
169
169
170 def __getitem__(self, key):
170 def __getitem__(self, key):
171 return self.filectx(key)
171 return self.filectx(key)
172
172
173 def __iter__(self):
173 def __iter__(self):
174 for f in sorted(self._manifest):
174 for f in sorted(self._manifest):
175 yield f
175 yield f
176
176
177 def changeset(self):
177 def changeset(self):
178 return self._changeset
178 return self._changeset
179 def manifest(self):
179 def manifest(self):
180 return self._manifest
180 return self._manifest
181 def manifestnode(self):
181 def manifestnode(self):
182 return self._changeset[0]
182 return self._changeset[0]
183
183
184 def rev(self):
184 def rev(self):
185 return self._rev
185 return self._rev
186 def node(self):
186 def node(self):
187 return self._node
187 return self._node
188 def hex(self):
188 def hex(self):
189 return hex(self._node)
189 return hex(self._node)
190 def user(self):
190 def user(self):
191 return self._changeset[1]
191 return self._changeset[1]
192 def date(self):
192 def date(self):
193 return self._changeset[2]
193 return self._changeset[2]
194 def files(self):
194 def files(self):
195 return self._changeset[3]
195 return self._changeset[3]
196 def description(self):
196 def description(self):
197 return self._changeset[4]
197 return self._changeset[4]
198 def branch(self):
198 def branch(self):
199 return encoding.tolocal(self._changeset[5].get("branch"))
199 return encoding.tolocal(self._changeset[5].get("branch"))
200 def closesbranch(self):
200 def closesbranch(self):
201 return 'close' in self._changeset[5]
201 return 'close' in self._changeset[5]
202 def extra(self):
202 def extra(self):
203 return self._changeset[5]
203 return self._changeset[5]
204 def tags(self):
204 def tags(self):
205 return self._repo.nodetags(self._node)
205 return self._repo.nodetags(self._node)
206 def bookmarks(self):
206 def bookmarks(self):
207 return self._repo.nodebookmarks(self._node)
207 return self._repo.nodebookmarks(self._node)
208 def phase(self):
208 def phase(self):
209 return self._repo._phasecache.phase(self._repo, self._rev)
209 return self._repo._phasecache.phase(self._repo, self._rev)
210 def phasestr(self):
210 def phasestr(self):
211 return phases.phasenames[self.phase()]
211 return phases.phasenames[self.phase()]
212 def mutable(self):
212 def mutable(self):
213 return self.phase() > phases.public
213 return self.phase() > phases.public
214 def hidden(self):
214 def hidden(self):
215 return self._rev in repoview.filteredrevs(self._repo, 'hidden')
215 return self._rev in repoview.filterrevs(self._repo, 'visible')
216
216
217 def parents(self):
217 def parents(self):
218 """return contexts for each parent changeset"""
218 """return contexts for each parent changeset"""
219 return self._parents
219 return self._parents
220
220
221 def p1(self):
221 def p1(self):
222 return self._parents[0]
222 return self._parents[0]
223
223
224 def p2(self):
224 def p2(self):
225 if len(self._parents) == 2:
225 if len(self._parents) == 2:
226 return self._parents[1]
226 return self._parents[1]
227 return changectx(self._repo, -1)
227 return changectx(self._repo, -1)
228
228
229 def children(self):
229 def children(self):
230 """return contexts for each child changeset"""
230 """return contexts for each child changeset"""
231 c = self._repo.changelog.children(self._node)
231 c = self._repo.changelog.children(self._node)
232 return [changectx(self._repo, x) for x in c]
232 return [changectx(self._repo, x) for x in c]
233
233
234 def ancestors(self):
234 def ancestors(self):
235 for a in self._repo.changelog.ancestors([self._rev]):
235 for a in self._repo.changelog.ancestors([self._rev]):
236 yield changectx(self._repo, a)
236 yield changectx(self._repo, a)
237
237
238 def descendants(self):
238 def descendants(self):
239 for d in self._repo.changelog.descendants([self._rev]):
239 for d in self._repo.changelog.descendants([self._rev]):
240 yield changectx(self._repo, d)
240 yield changectx(self._repo, d)
241
241
242 def obsolete(self):
242 def obsolete(self):
243 """True if the changeset is obsolete"""
243 """True if the changeset is obsolete"""
244 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
244 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
245
245
246 def extinct(self):
246 def extinct(self):
247 """True if the changeset is extinct"""
247 """True if the changeset is extinct"""
248 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
248 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
249
249
250 def unstable(self):
250 def unstable(self):
251 """True if the changeset is not obsolete but it's ancestor are"""
251 """True if the changeset is not obsolete but it's ancestor are"""
252 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
252 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
253
253
254 def bumped(self):
254 def bumped(self):
255 """True if the changeset try to be a successor of a public changeset
255 """True if the changeset try to be a successor of a public changeset
256
256
257 Only non-public and non-obsolete changesets may be bumped.
257 Only non-public and non-obsolete changesets may be bumped.
258 """
258 """
259 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
259 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
260
260
261 def divergent(self):
261 def divergent(self):
262 """Is a successors of a changeset with multiple possible successors set
262 """Is a successors of a changeset with multiple possible successors set
263
263
264 Only non-public and non-obsolete changesets may be divergent.
264 Only non-public and non-obsolete changesets may be divergent.
265 """
265 """
266 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
266 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
267
267
268 def troubled(self):
268 def troubled(self):
269 """True if the changeset is either unstable, bumped or divergent"""
269 """True if the changeset is either unstable, bumped or divergent"""
270 return self.unstable() or self.bumped() or self.divergent()
270 return self.unstable() or self.bumped() or self.divergent()
271
271
272 def troubles(self):
272 def troubles(self):
273 """return the list of troubles affecting this changesets.
273 """return the list of troubles affecting this changesets.
274
274
275 Troubles are returned as strings. possible values are:
275 Troubles are returned as strings. possible values are:
276 - unstable,
276 - unstable,
277 - bumped,
277 - bumped,
278 - divergent.
278 - divergent.
279 """
279 """
280 troubles = []
280 troubles = []
281 if self.unstable():
281 if self.unstable():
282 troubles.append('unstable')
282 troubles.append('unstable')
283 if self.bumped():
283 if self.bumped():
284 troubles.append('bumped')
284 troubles.append('bumped')
285 if self.divergent():
285 if self.divergent():
286 troubles.append('divergent')
286 troubles.append('divergent')
287 return troubles
287 return troubles
288
288
289 def _fileinfo(self, path):
289 def _fileinfo(self, path):
290 if '_manifest' in self.__dict__:
290 if '_manifest' in self.__dict__:
291 try:
291 try:
292 return self._manifest[path], self._manifest.flags(path)
292 return self._manifest[path], self._manifest.flags(path)
293 except KeyError:
293 except KeyError:
294 raise error.LookupError(self._node, path,
294 raise error.LookupError(self._node, path,
295 _('not found in manifest'))
295 _('not found in manifest'))
296 if '_manifestdelta' in self.__dict__ or path in self.files():
296 if '_manifestdelta' in self.__dict__ or path in self.files():
297 if path in self._manifestdelta:
297 if path in self._manifestdelta:
298 return (self._manifestdelta[path],
298 return (self._manifestdelta[path],
299 self._manifestdelta.flags(path))
299 self._manifestdelta.flags(path))
300 node, flag = self._repo.manifest.find(self._changeset[0], path)
300 node, flag = self._repo.manifest.find(self._changeset[0], path)
301 if not node:
301 if not node:
302 raise error.LookupError(self._node, path,
302 raise error.LookupError(self._node, path,
303 _('not found in manifest'))
303 _('not found in manifest'))
304
304
305 return node, flag
305 return node, flag
306
306
307 def filenode(self, path):
307 def filenode(self, path):
308 return self._fileinfo(path)[0]
308 return self._fileinfo(path)[0]
309
309
310 def flags(self, path):
310 def flags(self, path):
311 try:
311 try:
312 return self._fileinfo(path)[1]
312 return self._fileinfo(path)[1]
313 except error.LookupError:
313 except error.LookupError:
314 return ''
314 return ''
315
315
316 def filectx(self, path, fileid=None, filelog=None):
316 def filectx(self, path, fileid=None, filelog=None):
317 """get a file context from this changeset"""
317 """get a file context from this changeset"""
318 if fileid is None:
318 if fileid is None:
319 fileid = self.filenode(path)
319 fileid = self.filenode(path)
320 return filectx(self._repo, path, fileid=fileid,
320 return filectx(self._repo, path, fileid=fileid,
321 changectx=self, filelog=filelog)
321 changectx=self, filelog=filelog)
322
322
323 def ancestor(self, c2):
323 def ancestor(self, c2):
324 """
324 """
325 return the ancestor context of self and c2
325 return the ancestor context of self and c2
326 """
326 """
327 # deal with workingctxs
327 # deal with workingctxs
328 n2 = c2._node
328 n2 = c2._node
329 if n2 is None:
329 if n2 is None:
330 n2 = c2._parents[0]._node
330 n2 = c2._parents[0]._node
331 n = self._repo.changelog.ancestor(self._node, n2)
331 n = self._repo.changelog.ancestor(self._node, n2)
332 return changectx(self._repo, n)
332 return changectx(self._repo, n)
333
333
334 def descendant(self, other):
334 def descendant(self, other):
335 """True if other is descendant of this changeset"""
335 """True if other is descendant of this changeset"""
336 return self._repo.changelog.descendant(self._rev, other._rev)
336 return self._repo.changelog.descendant(self._rev, other._rev)
337
337
338 def walk(self, match):
338 def walk(self, match):
339 fset = set(match.files())
339 fset = set(match.files())
340 # for dirstate.walk, files=['.'] means "walk the whole tree".
340 # for dirstate.walk, files=['.'] means "walk the whole tree".
341 # follow that here, too
341 # follow that here, too
342 fset.discard('.')
342 fset.discard('.')
343 for fn in self:
343 for fn in self:
344 if fn in fset:
344 if fn in fset:
345 # specified pattern is the exact name
345 # specified pattern is the exact name
346 fset.remove(fn)
346 fset.remove(fn)
347 if match(fn):
347 if match(fn):
348 yield fn
348 yield fn
349 for fn in sorted(fset):
349 for fn in sorted(fset):
350 if fn in self._dirs:
350 if fn in self._dirs:
351 # specified pattern is a directory
351 # specified pattern is a directory
352 continue
352 continue
353 if match.bad(fn, _('no such file in rev %s') % self) and match(fn):
353 if match.bad(fn, _('no such file in rev %s') % self) and match(fn):
354 yield fn
354 yield fn
355
355
356 def sub(self, path):
356 def sub(self, path):
357 return subrepo.subrepo(self, path)
357 return subrepo.subrepo(self, path)
358
358
359 def match(self, pats=[], include=None, exclude=None, default='glob'):
359 def match(self, pats=[], include=None, exclude=None, default='glob'):
360 r = self._repo
360 r = self._repo
361 return matchmod.match(r.root, r.getcwd(), pats,
361 return matchmod.match(r.root, r.getcwd(), pats,
362 include, exclude, default,
362 include, exclude, default,
363 auditor=r.auditor, ctx=self)
363 auditor=r.auditor, ctx=self)
364
364
365 def diff(self, ctx2=None, match=None, **opts):
365 def diff(self, ctx2=None, match=None, **opts):
366 """Returns a diff generator for the given contexts and matcher"""
366 """Returns a diff generator for the given contexts and matcher"""
367 if ctx2 is None:
367 if ctx2 is None:
368 ctx2 = self.p1()
368 ctx2 = self.p1()
369 if ctx2 is not None and not isinstance(ctx2, changectx):
369 if ctx2 is not None and not isinstance(ctx2, changectx):
370 ctx2 = self._repo[ctx2]
370 ctx2 = self._repo[ctx2]
371 diffopts = patch.diffopts(self._repo.ui, opts)
371 diffopts = patch.diffopts(self._repo.ui, opts)
372 return patch.diff(self._repo, ctx2.node(), self.node(),
372 return patch.diff(self._repo, ctx2.node(), self.node(),
373 match=match, opts=diffopts)
373 match=match, opts=diffopts)
374
374
375 @propertycache
375 @propertycache
376 def _dirs(self):
376 def _dirs(self):
377 dirs = set()
377 dirs = set()
378 for f in self._manifest:
378 for f in self._manifest:
379 pos = f.rfind('/')
379 pos = f.rfind('/')
380 while pos != -1:
380 while pos != -1:
381 f = f[:pos]
381 f = f[:pos]
382 if f in dirs:
382 if f in dirs:
383 break # dirs already contains this and above
383 break # dirs already contains this and above
384 dirs.add(f)
384 dirs.add(f)
385 pos = f.rfind('/')
385 pos = f.rfind('/')
386 return dirs
386 return dirs
387
387
388 def dirs(self):
388 def dirs(self):
389 return self._dirs
389 return self._dirs
390
390
391 def dirty(self):
391 def dirty(self):
392 return False
392 return False
393
393
394 class filectx(object):
394 class filectx(object):
395 """A filecontext object makes access to data related to a particular
395 """A filecontext object makes access to data related to a particular
396 filerevision convenient."""
396 filerevision convenient."""
397 def __init__(self, repo, path, changeid=None, fileid=None,
397 def __init__(self, repo, path, changeid=None, fileid=None,
398 filelog=None, changectx=None):
398 filelog=None, changectx=None):
399 """changeid can be a changeset revision, node, or tag.
399 """changeid can be a changeset revision, node, or tag.
400 fileid can be a file revision or node."""
400 fileid can be a file revision or node."""
401 self._repo = repo
401 self._repo = repo
402 self._path = path
402 self._path = path
403
403
404 assert (changeid is not None
404 assert (changeid is not None
405 or fileid is not None
405 or fileid is not None
406 or changectx is not None), \
406 or changectx is not None), \
407 ("bad args: changeid=%r, fileid=%r, changectx=%r"
407 ("bad args: changeid=%r, fileid=%r, changectx=%r"
408 % (changeid, fileid, changectx))
408 % (changeid, fileid, changectx))
409
409
410 if filelog:
410 if filelog:
411 self._filelog = filelog
411 self._filelog = filelog
412
412
413 if changeid is not None:
413 if changeid is not None:
414 self._changeid = changeid
414 self._changeid = changeid
415 if changectx is not None:
415 if changectx is not None:
416 self._changectx = changectx
416 self._changectx = changectx
417 if fileid is not None:
417 if fileid is not None:
418 self._fileid = fileid
418 self._fileid = fileid
419
419
420 @propertycache
420 @propertycache
421 def _changectx(self):
421 def _changectx(self):
422 try:
422 try:
423 return changectx(self._repo, self._changeid)
423 return changectx(self._repo, self._changeid)
424 except error.RepoLookupError:
424 except error.RepoLookupError:
425 # Linkrev may point to any revision in the repository. When the
425 # Linkrev may point to any revision in the repository. When the
426 # repository is filtered this may lead to `filectx` trying to build
426 # repository is filtered this may lead to `filectx` trying to build
427 # `changectx` for filtered revision. In such case we fallback to
427 # `changectx` for filtered revision. In such case we fallback to
428 # creating `changectx` on the unfiltered version of the reposition.
428 # creating `changectx` on the unfiltered version of the reposition.
429 # This fallback should not be an issue because`changectx` from
429 # This fallback should not be an issue because`changectx` from
430 # `filectx` are not used in complexe operation that care about
430 # `filectx` are not used in complexe operation that care about
431 # filtering.
431 # filtering.
432 #
432 #
433 # This fallback is a cheap and dirty fix that prevent several
433 # This fallback is a cheap and dirty fix that prevent several
434 # crash. It does not ensure the behavior is correct. However the
434 # crash. It does not ensure the behavior is correct. However the
435 # behavior was not correct before filtering either and "incorrect
435 # behavior was not correct before filtering either and "incorrect
436 # behavior" is seen as better as "crash"
436 # behavior" is seen as better as "crash"
437 #
437 #
438 # Linkrevs have several serious troubles with filtering that are
438 # Linkrevs have several serious troubles with filtering that are
439 # complicated to solve. Proper handling of the issue here should be
439 # complicated to solve. Proper handling of the issue here should be
440 # considered when solving linkrev issue are on the table.
440 # considered when solving linkrev issue are on the table.
441 return changectx(self._repo.unfiltered(), self._changeid)
441 return changectx(self._repo.unfiltered(), self._changeid)
442
442
443 @propertycache
443 @propertycache
444 def _filelog(self):
444 def _filelog(self):
445 return self._repo.file(self._path)
445 return self._repo.file(self._path)
446
446
447 @propertycache
447 @propertycache
448 def _changeid(self):
448 def _changeid(self):
449 if '_changectx' in self.__dict__:
449 if '_changectx' in self.__dict__:
450 return self._changectx.rev()
450 return self._changectx.rev()
451 else:
451 else:
452 return self._filelog.linkrev(self._filerev)
452 return self._filelog.linkrev(self._filerev)
453
453
454 @propertycache
454 @propertycache
455 def _filenode(self):
455 def _filenode(self):
456 if '_fileid' in self.__dict__:
456 if '_fileid' in self.__dict__:
457 return self._filelog.lookup(self._fileid)
457 return self._filelog.lookup(self._fileid)
458 else:
458 else:
459 return self._changectx.filenode(self._path)
459 return self._changectx.filenode(self._path)
460
460
461 @propertycache
461 @propertycache
462 def _filerev(self):
462 def _filerev(self):
463 return self._filelog.rev(self._filenode)
463 return self._filelog.rev(self._filenode)
464
464
465 @propertycache
465 @propertycache
466 def _repopath(self):
466 def _repopath(self):
467 return self._path
467 return self._path
468
468
469 def __nonzero__(self):
469 def __nonzero__(self):
470 try:
470 try:
471 self._filenode
471 self._filenode
472 return True
472 return True
473 except error.LookupError:
473 except error.LookupError:
474 # file is missing
474 # file is missing
475 return False
475 return False
476
476
477 def __str__(self):
477 def __str__(self):
478 return "%s@%s" % (self.path(), short(self.node()))
478 return "%s@%s" % (self.path(), short(self.node()))
479
479
480 def __repr__(self):
480 def __repr__(self):
481 return "<filectx %s>" % str(self)
481 return "<filectx %s>" % str(self)
482
482
483 def __hash__(self):
483 def __hash__(self):
484 try:
484 try:
485 return hash((self._path, self._filenode))
485 return hash((self._path, self._filenode))
486 except AttributeError:
486 except AttributeError:
487 return id(self)
487 return id(self)
488
488
489 def __eq__(self, other):
489 def __eq__(self, other):
490 try:
490 try:
491 return (self._path == other._path
491 return (self._path == other._path
492 and self._filenode == other._filenode)
492 and self._filenode == other._filenode)
493 except AttributeError:
493 except AttributeError:
494 return False
494 return False
495
495
496 def __ne__(self, other):
496 def __ne__(self, other):
497 return not (self == other)
497 return not (self == other)
498
498
499 def filectx(self, fileid):
499 def filectx(self, fileid):
500 '''opens an arbitrary revision of the file without
500 '''opens an arbitrary revision of the file without
501 opening a new filelog'''
501 opening a new filelog'''
502 return filectx(self._repo, self._path, fileid=fileid,
502 return filectx(self._repo, self._path, fileid=fileid,
503 filelog=self._filelog)
503 filelog=self._filelog)
504
504
505 def filerev(self):
505 def filerev(self):
506 return self._filerev
506 return self._filerev
507 def filenode(self):
507 def filenode(self):
508 return self._filenode
508 return self._filenode
509 def flags(self):
509 def flags(self):
510 return self._changectx.flags(self._path)
510 return self._changectx.flags(self._path)
511 def filelog(self):
511 def filelog(self):
512 return self._filelog
512 return self._filelog
513
513
514 def rev(self):
514 def rev(self):
515 if '_changectx' in self.__dict__:
515 if '_changectx' in self.__dict__:
516 return self._changectx.rev()
516 return self._changectx.rev()
517 if '_changeid' in self.__dict__:
517 if '_changeid' in self.__dict__:
518 return self._changectx.rev()
518 return self._changectx.rev()
519 return self._filelog.linkrev(self._filerev)
519 return self._filelog.linkrev(self._filerev)
520
520
521 def linkrev(self):
521 def linkrev(self):
522 return self._filelog.linkrev(self._filerev)
522 return self._filelog.linkrev(self._filerev)
523 def node(self):
523 def node(self):
524 return self._changectx.node()
524 return self._changectx.node()
525 def hex(self):
525 def hex(self):
526 return hex(self.node())
526 return hex(self.node())
527 def user(self):
527 def user(self):
528 return self._changectx.user()
528 return self._changectx.user()
529 def date(self):
529 def date(self):
530 return self._changectx.date()
530 return self._changectx.date()
531 def files(self):
531 def files(self):
532 return self._changectx.files()
532 return self._changectx.files()
533 def description(self):
533 def description(self):
534 return self._changectx.description()
534 return self._changectx.description()
535 def branch(self):
535 def branch(self):
536 return self._changectx.branch()
536 return self._changectx.branch()
537 def extra(self):
537 def extra(self):
538 return self._changectx.extra()
538 return self._changectx.extra()
539 def phase(self):
539 def phase(self):
540 return self._changectx.phase()
540 return self._changectx.phase()
541 def phasestr(self):
541 def phasestr(self):
542 return self._changectx.phasestr()
542 return self._changectx.phasestr()
543 def manifest(self):
543 def manifest(self):
544 return self._changectx.manifest()
544 return self._changectx.manifest()
545 def changectx(self):
545 def changectx(self):
546 return self._changectx
546 return self._changectx
547
547
548 def data(self):
548 def data(self):
549 return self._filelog.read(self._filenode)
549 return self._filelog.read(self._filenode)
550 def path(self):
550 def path(self):
551 return self._path
551 return self._path
552 def size(self):
552 def size(self):
553 return self._filelog.size(self._filerev)
553 return self._filelog.size(self._filerev)
554
554
555 def isbinary(self):
555 def isbinary(self):
556 try:
556 try:
557 return util.binary(self.data())
557 return util.binary(self.data())
558 except IOError:
558 except IOError:
559 return False
559 return False
560
560
561 def cmp(self, fctx):
561 def cmp(self, fctx):
562 """compare with other file context
562 """compare with other file context
563
563
564 returns True if different than fctx.
564 returns True if different than fctx.
565 """
565 """
566 if (fctx._filerev is None
566 if (fctx._filerev is None
567 and (self._repo._encodefilterpats
567 and (self._repo._encodefilterpats
568 # if file data starts with '\1\n', empty metadata block is
568 # if file data starts with '\1\n', empty metadata block is
569 # prepended, which adds 4 bytes to filelog.size().
569 # prepended, which adds 4 bytes to filelog.size().
570 or self.size() - 4 == fctx.size())
570 or self.size() - 4 == fctx.size())
571 or self.size() == fctx.size()):
571 or self.size() == fctx.size()):
572 return self._filelog.cmp(self._filenode, fctx.data())
572 return self._filelog.cmp(self._filenode, fctx.data())
573
573
574 return True
574 return True
575
575
576 def renamed(self):
576 def renamed(self):
577 """check if file was actually renamed in this changeset revision
577 """check if file was actually renamed in this changeset revision
578
578
579 If rename logged in file revision, we report copy for changeset only
579 If rename logged in file revision, we report copy for changeset only
580 if file revisions linkrev points back to the changeset in question
580 if file revisions linkrev points back to the changeset in question
581 or both changeset parents contain different file revisions.
581 or both changeset parents contain different file revisions.
582 """
582 """
583
583
584 renamed = self._filelog.renamed(self._filenode)
584 renamed = self._filelog.renamed(self._filenode)
585 if not renamed:
585 if not renamed:
586 return renamed
586 return renamed
587
587
588 if self.rev() == self.linkrev():
588 if self.rev() == self.linkrev():
589 return renamed
589 return renamed
590
590
591 name = self.path()
591 name = self.path()
592 fnode = self._filenode
592 fnode = self._filenode
593 for p in self._changectx.parents():
593 for p in self._changectx.parents():
594 try:
594 try:
595 if fnode == p.filenode(name):
595 if fnode == p.filenode(name):
596 return None
596 return None
597 except error.LookupError:
597 except error.LookupError:
598 pass
598 pass
599 return renamed
599 return renamed
600
600
601 def parents(self):
601 def parents(self):
602 p = self._path
602 p = self._path
603 fl = self._filelog
603 fl = self._filelog
604 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
604 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
605
605
606 r = self._filelog.renamed(self._filenode)
606 r = self._filelog.renamed(self._filenode)
607 if r:
607 if r:
608 pl[0] = (r[0], r[1], None)
608 pl[0] = (r[0], r[1], None)
609
609
610 return [filectx(self._repo, p, fileid=n, filelog=l)
610 return [filectx(self._repo, p, fileid=n, filelog=l)
611 for p, n, l in pl if n != nullid]
611 for p, n, l in pl if n != nullid]
612
612
613 def p1(self):
613 def p1(self):
614 return self.parents()[0]
614 return self.parents()[0]
615
615
616 def p2(self):
616 def p2(self):
617 p = self.parents()
617 p = self.parents()
618 if len(p) == 2:
618 if len(p) == 2:
619 return p[1]
619 return p[1]
620 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
620 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
621
621
622 def children(self):
622 def children(self):
623 # hard for renames
623 # hard for renames
624 c = self._filelog.children(self._filenode)
624 c = self._filelog.children(self._filenode)
625 return [filectx(self._repo, self._path, fileid=x,
625 return [filectx(self._repo, self._path, fileid=x,
626 filelog=self._filelog) for x in c]
626 filelog=self._filelog) for x in c]
627
627
628 def annotate(self, follow=False, linenumber=None, diffopts=None):
628 def annotate(self, follow=False, linenumber=None, diffopts=None):
629 '''returns a list of tuples of (ctx, line) for each line
629 '''returns a list of tuples of (ctx, line) for each line
630 in the file, where ctx is the filectx of the node where
630 in the file, where ctx is the filectx of the node where
631 that line was last changed.
631 that line was last changed.
632 This returns tuples of ((ctx, linenumber), line) for each line,
632 This returns tuples of ((ctx, linenumber), line) for each line,
633 if "linenumber" parameter is NOT "None".
633 if "linenumber" parameter is NOT "None".
634 In such tuples, linenumber means one at the first appearance
634 In such tuples, linenumber means one at the first appearance
635 in the managed file.
635 in the managed file.
636 To reduce annotation cost,
636 To reduce annotation cost,
637 this returns fixed value(False is used) as linenumber,
637 this returns fixed value(False is used) as linenumber,
638 if "linenumber" parameter is "False".'''
638 if "linenumber" parameter is "False".'''
639
639
640 def decorate_compat(text, rev):
640 def decorate_compat(text, rev):
641 return ([rev] * len(text.splitlines()), text)
641 return ([rev] * len(text.splitlines()), text)
642
642
643 def without_linenumber(text, rev):
643 def without_linenumber(text, rev):
644 return ([(rev, False)] * len(text.splitlines()), text)
644 return ([(rev, False)] * len(text.splitlines()), text)
645
645
646 def with_linenumber(text, rev):
646 def with_linenumber(text, rev):
647 size = len(text.splitlines())
647 size = len(text.splitlines())
648 return ([(rev, i) for i in xrange(1, size + 1)], text)
648 return ([(rev, i) for i in xrange(1, size + 1)], text)
649
649
650 decorate = (((linenumber is None) and decorate_compat) or
650 decorate = (((linenumber is None) and decorate_compat) or
651 (linenumber and with_linenumber) or
651 (linenumber and with_linenumber) or
652 without_linenumber)
652 without_linenumber)
653
653
654 def pair(parent, child):
654 def pair(parent, child):
655 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
655 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
656 refine=True)
656 refine=True)
657 for (a1, a2, b1, b2), t in blocks:
657 for (a1, a2, b1, b2), t in blocks:
658 # Changed blocks ('!') or blocks made only of blank lines ('~')
658 # Changed blocks ('!') or blocks made only of blank lines ('~')
659 # belong to the child.
659 # belong to the child.
660 if t == '=':
660 if t == '=':
661 child[0][b1:b2] = parent[0][a1:a2]
661 child[0][b1:b2] = parent[0][a1:a2]
662 return child
662 return child
663
663
664 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
664 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
665 def getctx(path, fileid):
665 def getctx(path, fileid):
666 log = path == self._path and self._filelog or getlog(path)
666 log = path == self._path and self._filelog or getlog(path)
667 return filectx(self._repo, path, fileid=fileid, filelog=log)
667 return filectx(self._repo, path, fileid=fileid, filelog=log)
668 getctx = util.lrucachefunc(getctx)
668 getctx = util.lrucachefunc(getctx)
669
669
670 def parents(f):
670 def parents(f):
671 # we want to reuse filectx objects as much as possible
671 # we want to reuse filectx objects as much as possible
672 p = f._path
672 p = f._path
673 if f._filerev is None: # working dir
673 if f._filerev is None: # working dir
674 pl = [(n.path(), n.filerev()) for n in f.parents()]
674 pl = [(n.path(), n.filerev()) for n in f.parents()]
675 else:
675 else:
676 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
676 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
677
677
678 if follow:
678 if follow:
679 r = f.renamed()
679 r = f.renamed()
680 if r:
680 if r:
681 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
681 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
682
682
683 return [getctx(p, n) for p, n in pl if n != nullrev]
683 return [getctx(p, n) for p, n in pl if n != nullrev]
684
684
685 # use linkrev to find the first changeset where self appeared
685 # use linkrev to find the first changeset where self appeared
686 if self.rev() != self.linkrev():
686 if self.rev() != self.linkrev():
687 base = self.filectx(self.filerev())
687 base = self.filectx(self.filerev())
688 else:
688 else:
689 base = self
689 base = self
690
690
691 # This algorithm would prefer to be recursive, but Python is a
691 # This algorithm would prefer to be recursive, but Python is a
692 # bit recursion-hostile. Instead we do an iterative
692 # bit recursion-hostile. Instead we do an iterative
693 # depth-first search.
693 # depth-first search.
694
694
695 visit = [base]
695 visit = [base]
696 hist = {}
696 hist = {}
697 pcache = {}
697 pcache = {}
698 needed = {base: 1}
698 needed = {base: 1}
699 while visit:
699 while visit:
700 f = visit[-1]
700 f = visit[-1]
701 if f not in pcache:
701 if f not in pcache:
702 pcache[f] = parents(f)
702 pcache[f] = parents(f)
703
703
704 ready = True
704 ready = True
705 pl = pcache[f]
705 pl = pcache[f]
706 for p in pl:
706 for p in pl:
707 if p not in hist:
707 if p not in hist:
708 ready = False
708 ready = False
709 visit.append(p)
709 visit.append(p)
710 needed[p] = needed.get(p, 0) + 1
710 needed[p] = needed.get(p, 0) + 1
711 if ready:
711 if ready:
712 visit.pop()
712 visit.pop()
713 curr = decorate(f.data(), f)
713 curr = decorate(f.data(), f)
714 for p in pl:
714 for p in pl:
715 curr = pair(hist[p], curr)
715 curr = pair(hist[p], curr)
716 if needed[p] == 1:
716 if needed[p] == 1:
717 del hist[p]
717 del hist[p]
718 else:
718 else:
719 needed[p] -= 1
719 needed[p] -= 1
720
720
721 hist[f] = curr
721 hist[f] = curr
722 pcache[f] = []
722 pcache[f] = []
723
723
724 return zip(hist[base][0], hist[base][1].splitlines(True))
724 return zip(hist[base][0], hist[base][1].splitlines(True))
725
725
726 def ancestor(self, fc2, actx):
726 def ancestor(self, fc2, actx):
727 """
727 """
728 find the common ancestor file context, if any, of self, and fc2
728 find the common ancestor file context, if any, of self, and fc2
729
729
730 actx must be the changectx of the common ancestor
730 actx must be the changectx of the common ancestor
731 of self's and fc2's respective changesets.
731 of self's and fc2's respective changesets.
732 """
732 """
733
733
734 # the easy case: no (relevant) renames
734 # the easy case: no (relevant) renames
735 if fc2.path() == self.path() and self.path() in actx:
735 if fc2.path() == self.path() and self.path() in actx:
736 return actx[self.path()]
736 return actx[self.path()]
737
737
738 # the next easiest cases: unambiguous predecessor (name trumps
738 # the next easiest cases: unambiguous predecessor (name trumps
739 # history)
739 # history)
740 if self.path() in actx and fc2.path() not in actx:
740 if self.path() in actx and fc2.path() not in actx:
741 return actx[self.path()]
741 return actx[self.path()]
742 if fc2.path() in actx and self.path() not in actx:
742 if fc2.path() in actx and self.path() not in actx:
743 return actx[fc2.path()]
743 return actx[fc2.path()]
744
744
745 # prime the ancestor cache for the working directory
745 # prime the ancestor cache for the working directory
746 acache = {}
746 acache = {}
747 for c in (self, fc2):
747 for c in (self, fc2):
748 if c._filerev is None:
748 if c._filerev is None:
749 pl = [(n.path(), n.filenode()) for n in c.parents()]
749 pl = [(n.path(), n.filenode()) for n in c.parents()]
750 acache[(c._path, None)] = pl
750 acache[(c._path, None)] = pl
751
751
752 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
752 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
753 def parents(vertex):
753 def parents(vertex):
754 if vertex in acache:
754 if vertex in acache:
755 return acache[vertex]
755 return acache[vertex]
756 f, n = vertex
756 f, n = vertex
757 if f not in flcache:
757 if f not in flcache:
758 flcache[f] = self._repo.file(f)
758 flcache[f] = self._repo.file(f)
759 fl = flcache[f]
759 fl = flcache[f]
760 pl = [(f, p) for p in fl.parents(n) if p != nullid]
760 pl = [(f, p) for p in fl.parents(n) if p != nullid]
761 re = fl.renamed(n)
761 re = fl.renamed(n)
762 if re:
762 if re:
763 pl.append(re)
763 pl.append(re)
764 acache[vertex] = pl
764 acache[vertex] = pl
765 return pl
765 return pl
766
766
767 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
767 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
768 v = ancestor.ancestor(a, b, parents)
768 v = ancestor.ancestor(a, b, parents)
769 if v:
769 if v:
770 f, n = v
770 f, n = v
771 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
771 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
772
772
773 return None
773 return None
774
774
775 def ancestors(self, followfirst=False):
775 def ancestors(self, followfirst=False):
776 visit = {}
776 visit = {}
777 c = self
777 c = self
778 cut = followfirst and 1 or None
778 cut = followfirst and 1 or None
779 while True:
779 while True:
780 for parent in c.parents()[:cut]:
780 for parent in c.parents()[:cut]:
781 visit[(parent.rev(), parent.node())] = parent
781 visit[(parent.rev(), parent.node())] = parent
782 if not visit:
782 if not visit:
783 break
783 break
784 c = visit.pop(max(visit))
784 c = visit.pop(max(visit))
785 yield c
785 yield c
786
786
787 def copies(self, c2):
787 def copies(self, c2):
788 if not util.safehasattr(self, "_copycache"):
788 if not util.safehasattr(self, "_copycache"):
789 self._copycache = {}
789 self._copycache = {}
790 sc2 = str(c2)
790 sc2 = str(c2)
791 if sc2 not in self._copycache:
791 if sc2 not in self._copycache:
792 self._copycache[sc2] = copies.pathcopies(c2)
792 self._copycache[sc2] = copies.pathcopies(c2)
793 return self._copycache[sc2]
793 return self._copycache[sc2]
794
794
795 class workingctx(changectx):
795 class workingctx(changectx):
796 """A workingctx object makes access to data related to
796 """A workingctx object makes access to data related to
797 the current working directory convenient.
797 the current working directory convenient.
798 date - any valid date string or (unixtime, offset), or None.
798 date - any valid date string or (unixtime, offset), or None.
799 user - username string, or None.
799 user - username string, or None.
800 extra - a dictionary of extra values, or None.
800 extra - a dictionary of extra values, or None.
801 changes - a list of file lists as returned by localrepo.status()
801 changes - a list of file lists as returned by localrepo.status()
802 or None to use the repository status.
802 or None to use the repository status.
803 """
803 """
804 def __init__(self, repo, text="", user=None, date=None, extra=None,
804 def __init__(self, repo, text="", user=None, date=None, extra=None,
805 changes=None):
805 changes=None):
806 self._repo = repo
806 self._repo = repo
807 self._rev = None
807 self._rev = None
808 self._node = None
808 self._node = None
809 self._text = text
809 self._text = text
810 if date:
810 if date:
811 self._date = util.parsedate(date)
811 self._date = util.parsedate(date)
812 if user:
812 if user:
813 self._user = user
813 self._user = user
814 if changes:
814 if changes:
815 self._status = list(changes[:4])
815 self._status = list(changes[:4])
816 self._unknown = changes[4]
816 self._unknown = changes[4]
817 self._ignored = changes[5]
817 self._ignored = changes[5]
818 self._clean = changes[6]
818 self._clean = changes[6]
819 else:
819 else:
820 self._unknown = None
820 self._unknown = None
821 self._ignored = None
821 self._ignored = None
822 self._clean = None
822 self._clean = None
823
823
824 self._extra = {}
824 self._extra = {}
825 if extra:
825 if extra:
826 self._extra = extra.copy()
826 self._extra = extra.copy()
827 if 'branch' not in self._extra:
827 if 'branch' not in self._extra:
828 try:
828 try:
829 branch = encoding.fromlocal(self._repo.dirstate.branch())
829 branch = encoding.fromlocal(self._repo.dirstate.branch())
830 except UnicodeDecodeError:
830 except UnicodeDecodeError:
831 raise util.Abort(_('branch name not in UTF-8!'))
831 raise util.Abort(_('branch name not in UTF-8!'))
832 self._extra['branch'] = branch
832 self._extra['branch'] = branch
833 if self._extra['branch'] == '':
833 if self._extra['branch'] == '':
834 self._extra['branch'] = 'default'
834 self._extra['branch'] = 'default'
835
835
836 def __str__(self):
836 def __str__(self):
837 return str(self._parents[0]) + "+"
837 return str(self._parents[0]) + "+"
838
838
839 def __repr__(self):
839 def __repr__(self):
840 return "<workingctx %s>" % str(self)
840 return "<workingctx %s>" % str(self)
841
841
842 def __nonzero__(self):
842 def __nonzero__(self):
843 return True
843 return True
844
844
845 def __contains__(self, key):
845 def __contains__(self, key):
846 return self._repo.dirstate[key] not in "?r"
846 return self._repo.dirstate[key] not in "?r"
847
847
848 def _buildflagfunc(self):
848 def _buildflagfunc(self):
849 # Create a fallback function for getting file flags when the
849 # Create a fallback function for getting file flags when the
850 # filesystem doesn't support them
850 # filesystem doesn't support them
851
851
852 copiesget = self._repo.dirstate.copies().get
852 copiesget = self._repo.dirstate.copies().get
853
853
854 if len(self._parents) < 2:
854 if len(self._parents) < 2:
855 # when we have one parent, it's easy: copy from parent
855 # when we have one parent, it's easy: copy from parent
856 man = self._parents[0].manifest()
856 man = self._parents[0].manifest()
857 def func(f):
857 def func(f):
858 f = copiesget(f, f)
858 f = copiesget(f, f)
859 return man.flags(f)
859 return man.flags(f)
860 else:
860 else:
861 # merges are tricky: we try to reconstruct the unstored
861 # merges are tricky: we try to reconstruct the unstored
862 # result from the merge (issue1802)
862 # result from the merge (issue1802)
863 p1, p2 = self._parents
863 p1, p2 = self._parents
864 pa = p1.ancestor(p2)
864 pa = p1.ancestor(p2)
865 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
865 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
866
866
867 def func(f):
867 def func(f):
868 f = copiesget(f, f) # may be wrong for merges with copies
868 f = copiesget(f, f) # may be wrong for merges with copies
869 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
869 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
870 if fl1 == fl2:
870 if fl1 == fl2:
871 return fl1
871 return fl1
872 if fl1 == fla:
872 if fl1 == fla:
873 return fl2
873 return fl2
874 if fl2 == fla:
874 if fl2 == fla:
875 return fl1
875 return fl1
876 return '' # punt for conflicts
876 return '' # punt for conflicts
877
877
878 return func
878 return func
879
879
880 @propertycache
880 @propertycache
881 def _flagfunc(self):
881 def _flagfunc(self):
882 return self._repo.dirstate.flagfunc(self._buildflagfunc)
882 return self._repo.dirstate.flagfunc(self._buildflagfunc)
883
883
884 @propertycache
884 @propertycache
885 def _manifest(self):
885 def _manifest(self):
886 """generate a manifest corresponding to the working directory"""
886 """generate a manifest corresponding to the working directory"""
887
887
888 man = self._parents[0].manifest().copy()
888 man = self._parents[0].manifest().copy()
889 if len(self._parents) > 1:
889 if len(self._parents) > 1:
890 man2 = self.p2().manifest()
890 man2 = self.p2().manifest()
891 def getman(f):
891 def getman(f):
892 if f in man:
892 if f in man:
893 return man
893 return man
894 return man2
894 return man2
895 else:
895 else:
896 getman = lambda f: man
896 getman = lambda f: man
897
897
898 copied = self._repo.dirstate.copies()
898 copied = self._repo.dirstate.copies()
899 ff = self._flagfunc
899 ff = self._flagfunc
900 modified, added, removed, deleted = self._status
900 modified, added, removed, deleted = self._status
901 for i, l in (("a", added), ("m", modified)):
901 for i, l in (("a", added), ("m", modified)):
902 for f in l:
902 for f in l:
903 orig = copied.get(f, f)
903 orig = copied.get(f, f)
904 man[f] = getman(orig).get(orig, nullid) + i
904 man[f] = getman(orig).get(orig, nullid) + i
905 try:
905 try:
906 man.set(f, ff(f))
906 man.set(f, ff(f))
907 except OSError:
907 except OSError:
908 pass
908 pass
909
909
910 for f in deleted + removed:
910 for f in deleted + removed:
911 if f in man:
911 if f in man:
912 del man[f]
912 del man[f]
913
913
914 return man
914 return man
915
915
916 def __iter__(self):
916 def __iter__(self):
917 d = self._repo.dirstate
917 d = self._repo.dirstate
918 for f in d:
918 for f in d:
919 if d[f] != 'r':
919 if d[f] != 'r':
920 yield f
920 yield f
921
921
922 @propertycache
922 @propertycache
923 def _status(self):
923 def _status(self):
924 return self._repo.status()[:4]
924 return self._repo.status()[:4]
925
925
926 @propertycache
926 @propertycache
927 def _user(self):
927 def _user(self):
928 return self._repo.ui.username()
928 return self._repo.ui.username()
929
929
930 @propertycache
930 @propertycache
931 def _date(self):
931 def _date(self):
932 return util.makedate()
932 return util.makedate()
933
933
934 @propertycache
934 @propertycache
935 def _parents(self):
935 def _parents(self):
936 p = self._repo.dirstate.parents()
936 p = self._repo.dirstate.parents()
937 if p[1] == nullid:
937 if p[1] == nullid:
938 p = p[:-1]
938 p = p[:-1]
939 return [changectx(self._repo, x) for x in p]
939 return [changectx(self._repo, x) for x in p]
940
940
941 def status(self, ignored=False, clean=False, unknown=False):
941 def status(self, ignored=False, clean=False, unknown=False):
942 """Explicit status query
942 """Explicit status query
943 Unless this method is used to query the working copy status, the
943 Unless this method is used to query the working copy status, the
944 _status property will implicitly read the status using its default
944 _status property will implicitly read the status using its default
945 arguments."""
945 arguments."""
946 stat = self._repo.status(ignored=ignored, clean=clean, unknown=unknown)
946 stat = self._repo.status(ignored=ignored, clean=clean, unknown=unknown)
947 self._unknown = self._ignored = self._clean = None
947 self._unknown = self._ignored = self._clean = None
948 if unknown:
948 if unknown:
949 self._unknown = stat[4]
949 self._unknown = stat[4]
950 if ignored:
950 if ignored:
951 self._ignored = stat[5]
951 self._ignored = stat[5]
952 if clean:
952 if clean:
953 self._clean = stat[6]
953 self._clean = stat[6]
954 self._status = stat[:4]
954 self._status = stat[:4]
955 return stat
955 return stat
956
956
957 def manifest(self):
957 def manifest(self):
958 return self._manifest
958 return self._manifest
959 def user(self):
959 def user(self):
960 return self._user or self._repo.ui.username()
960 return self._user or self._repo.ui.username()
961 def date(self):
961 def date(self):
962 return self._date
962 return self._date
963 def description(self):
963 def description(self):
964 return self._text
964 return self._text
965 def files(self):
965 def files(self):
966 return sorted(self._status[0] + self._status[1] + self._status[2])
966 return sorted(self._status[0] + self._status[1] + self._status[2])
967
967
968 def modified(self):
968 def modified(self):
969 return self._status[0]
969 return self._status[0]
970 def added(self):
970 def added(self):
971 return self._status[1]
971 return self._status[1]
972 def removed(self):
972 def removed(self):
973 return self._status[2]
973 return self._status[2]
974 def deleted(self):
974 def deleted(self):
975 return self._status[3]
975 return self._status[3]
976 def unknown(self):
976 def unknown(self):
977 assert self._unknown is not None # must call status first
977 assert self._unknown is not None # must call status first
978 return self._unknown
978 return self._unknown
979 def ignored(self):
979 def ignored(self):
980 assert self._ignored is not None # must call status first
980 assert self._ignored is not None # must call status first
981 return self._ignored
981 return self._ignored
982 def clean(self):
982 def clean(self):
983 assert self._clean is not None # must call status first
983 assert self._clean is not None # must call status first
984 return self._clean
984 return self._clean
985 def branch(self):
985 def branch(self):
986 return encoding.tolocal(self._extra['branch'])
986 return encoding.tolocal(self._extra['branch'])
987 def closesbranch(self):
987 def closesbranch(self):
988 return 'close' in self._extra
988 return 'close' in self._extra
989 def extra(self):
989 def extra(self):
990 return self._extra
990 return self._extra
991
991
992 def tags(self):
992 def tags(self):
993 t = []
993 t = []
994 for p in self.parents():
994 for p in self.parents():
995 t.extend(p.tags())
995 t.extend(p.tags())
996 return t
996 return t
997
997
998 def bookmarks(self):
998 def bookmarks(self):
999 b = []
999 b = []
1000 for p in self.parents():
1000 for p in self.parents():
1001 b.extend(p.bookmarks())
1001 b.extend(p.bookmarks())
1002 return b
1002 return b
1003
1003
1004 def phase(self):
1004 def phase(self):
1005 phase = phases.draft # default phase to draft
1005 phase = phases.draft # default phase to draft
1006 for p in self.parents():
1006 for p in self.parents():
1007 phase = max(phase, p.phase())
1007 phase = max(phase, p.phase())
1008 return phase
1008 return phase
1009
1009
1010 def hidden(self):
1010 def hidden(self):
1011 return False
1011 return False
1012
1012
1013 def children(self):
1013 def children(self):
1014 return []
1014 return []
1015
1015
1016 def flags(self, path):
1016 def flags(self, path):
1017 if '_manifest' in self.__dict__:
1017 if '_manifest' in self.__dict__:
1018 try:
1018 try:
1019 return self._manifest.flags(path)
1019 return self._manifest.flags(path)
1020 except KeyError:
1020 except KeyError:
1021 return ''
1021 return ''
1022
1022
1023 try:
1023 try:
1024 return self._flagfunc(path)
1024 return self._flagfunc(path)
1025 except OSError:
1025 except OSError:
1026 return ''
1026 return ''
1027
1027
1028 def filectx(self, path, filelog=None):
1028 def filectx(self, path, filelog=None):
1029 """get a file context from the working directory"""
1029 """get a file context from the working directory"""
1030 return workingfilectx(self._repo, path, workingctx=self,
1030 return workingfilectx(self._repo, path, workingctx=self,
1031 filelog=filelog)
1031 filelog=filelog)
1032
1032
1033 def ancestor(self, c2):
1033 def ancestor(self, c2):
1034 """return the ancestor context of self and c2"""
1034 """return the ancestor context of self and c2"""
1035 return self._parents[0].ancestor(c2) # punt on two parents for now
1035 return self._parents[0].ancestor(c2) # punt on two parents for now
1036
1036
1037 def walk(self, match):
1037 def walk(self, match):
1038 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1038 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1039 True, False))
1039 True, False))
1040
1040
1041 def dirty(self, missing=False, merge=True, branch=True):
1041 def dirty(self, missing=False, merge=True, branch=True):
1042 "check whether a working directory is modified"
1042 "check whether a working directory is modified"
1043 # check subrepos first
1043 # check subrepos first
1044 for s in sorted(self.substate):
1044 for s in sorted(self.substate):
1045 if self.sub(s).dirty():
1045 if self.sub(s).dirty():
1046 return True
1046 return True
1047 # check current working dir
1047 # check current working dir
1048 return ((merge and self.p2()) or
1048 return ((merge and self.p2()) or
1049 (branch and self.branch() != self.p1().branch()) or
1049 (branch and self.branch() != self.p1().branch()) or
1050 self.modified() or self.added() or self.removed() or
1050 self.modified() or self.added() or self.removed() or
1051 (missing and self.deleted()))
1051 (missing and self.deleted()))
1052
1052
1053 def add(self, list, prefix=""):
1053 def add(self, list, prefix=""):
1054 join = lambda f: os.path.join(prefix, f)
1054 join = lambda f: os.path.join(prefix, f)
1055 wlock = self._repo.wlock()
1055 wlock = self._repo.wlock()
1056 ui, ds = self._repo.ui, self._repo.dirstate
1056 ui, ds = self._repo.ui, self._repo.dirstate
1057 try:
1057 try:
1058 rejected = []
1058 rejected = []
1059 for f in list:
1059 for f in list:
1060 scmutil.checkportable(ui, join(f))
1060 scmutil.checkportable(ui, join(f))
1061 p = self._repo.wjoin(f)
1061 p = self._repo.wjoin(f)
1062 try:
1062 try:
1063 st = os.lstat(p)
1063 st = os.lstat(p)
1064 except OSError:
1064 except OSError:
1065 ui.warn(_("%s does not exist!\n") % join(f))
1065 ui.warn(_("%s does not exist!\n") % join(f))
1066 rejected.append(f)
1066 rejected.append(f)
1067 continue
1067 continue
1068 if st.st_size > 10000000:
1068 if st.st_size > 10000000:
1069 ui.warn(_("%s: up to %d MB of RAM may be required "
1069 ui.warn(_("%s: up to %d MB of RAM may be required "
1070 "to manage this file\n"
1070 "to manage this file\n"
1071 "(use 'hg revert %s' to cancel the "
1071 "(use 'hg revert %s' to cancel the "
1072 "pending addition)\n")
1072 "pending addition)\n")
1073 % (f, 3 * st.st_size // 1000000, join(f)))
1073 % (f, 3 * st.st_size // 1000000, join(f)))
1074 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1074 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1075 ui.warn(_("%s not added: only files and symlinks "
1075 ui.warn(_("%s not added: only files and symlinks "
1076 "supported currently\n") % join(f))
1076 "supported currently\n") % join(f))
1077 rejected.append(p)
1077 rejected.append(p)
1078 elif ds[f] in 'amn':
1078 elif ds[f] in 'amn':
1079 ui.warn(_("%s already tracked!\n") % join(f))
1079 ui.warn(_("%s already tracked!\n") % join(f))
1080 elif ds[f] == 'r':
1080 elif ds[f] == 'r':
1081 ds.normallookup(f)
1081 ds.normallookup(f)
1082 else:
1082 else:
1083 ds.add(f)
1083 ds.add(f)
1084 return rejected
1084 return rejected
1085 finally:
1085 finally:
1086 wlock.release()
1086 wlock.release()
1087
1087
1088 def forget(self, files, prefix=""):
1088 def forget(self, files, prefix=""):
1089 join = lambda f: os.path.join(prefix, f)
1089 join = lambda f: os.path.join(prefix, f)
1090 wlock = self._repo.wlock()
1090 wlock = self._repo.wlock()
1091 try:
1091 try:
1092 rejected = []
1092 rejected = []
1093 for f in files:
1093 for f in files:
1094 if f not in self._repo.dirstate:
1094 if f not in self._repo.dirstate:
1095 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1095 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1096 rejected.append(f)
1096 rejected.append(f)
1097 elif self._repo.dirstate[f] != 'a':
1097 elif self._repo.dirstate[f] != 'a':
1098 self._repo.dirstate.remove(f)
1098 self._repo.dirstate.remove(f)
1099 else:
1099 else:
1100 self._repo.dirstate.drop(f)
1100 self._repo.dirstate.drop(f)
1101 return rejected
1101 return rejected
1102 finally:
1102 finally:
1103 wlock.release()
1103 wlock.release()
1104
1104
1105 def ancestors(self):
1105 def ancestors(self):
1106 for a in self._repo.changelog.ancestors(
1106 for a in self._repo.changelog.ancestors(
1107 [p.rev() for p in self._parents]):
1107 [p.rev() for p in self._parents]):
1108 yield changectx(self._repo, a)
1108 yield changectx(self._repo, a)
1109
1109
1110 def undelete(self, list):
1110 def undelete(self, list):
1111 pctxs = self.parents()
1111 pctxs = self.parents()
1112 wlock = self._repo.wlock()
1112 wlock = self._repo.wlock()
1113 try:
1113 try:
1114 for f in list:
1114 for f in list:
1115 if self._repo.dirstate[f] != 'r':
1115 if self._repo.dirstate[f] != 'r':
1116 self._repo.ui.warn(_("%s not removed!\n") % f)
1116 self._repo.ui.warn(_("%s not removed!\n") % f)
1117 else:
1117 else:
1118 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1118 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1119 t = fctx.data()
1119 t = fctx.data()
1120 self._repo.wwrite(f, t, fctx.flags())
1120 self._repo.wwrite(f, t, fctx.flags())
1121 self._repo.dirstate.normal(f)
1121 self._repo.dirstate.normal(f)
1122 finally:
1122 finally:
1123 wlock.release()
1123 wlock.release()
1124
1124
1125 def copy(self, source, dest):
1125 def copy(self, source, dest):
1126 p = self._repo.wjoin(dest)
1126 p = self._repo.wjoin(dest)
1127 if not os.path.lexists(p):
1127 if not os.path.lexists(p):
1128 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1128 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1129 elif not (os.path.isfile(p) or os.path.islink(p)):
1129 elif not (os.path.isfile(p) or os.path.islink(p)):
1130 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1130 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1131 "symbolic link\n") % dest)
1131 "symbolic link\n") % dest)
1132 else:
1132 else:
1133 wlock = self._repo.wlock()
1133 wlock = self._repo.wlock()
1134 try:
1134 try:
1135 if self._repo.dirstate[dest] in '?r':
1135 if self._repo.dirstate[dest] in '?r':
1136 self._repo.dirstate.add(dest)
1136 self._repo.dirstate.add(dest)
1137 self._repo.dirstate.copy(source, dest)
1137 self._repo.dirstate.copy(source, dest)
1138 finally:
1138 finally:
1139 wlock.release()
1139 wlock.release()
1140
1140
1141 def dirs(self):
1141 def dirs(self):
1142 return set(self._repo.dirstate.dirs())
1142 return set(self._repo.dirstate.dirs())
1143
1143
1144 class workingfilectx(filectx):
1144 class workingfilectx(filectx):
1145 """A workingfilectx object makes access to data related to a particular
1145 """A workingfilectx object makes access to data related to a particular
1146 file in the working directory convenient."""
1146 file in the working directory convenient."""
1147 def __init__(self, repo, path, filelog=None, workingctx=None):
1147 def __init__(self, repo, path, filelog=None, workingctx=None):
1148 """changeid can be a changeset revision, node, or tag.
1148 """changeid can be a changeset revision, node, or tag.
1149 fileid can be a file revision or node."""
1149 fileid can be a file revision or node."""
1150 self._repo = repo
1150 self._repo = repo
1151 self._path = path
1151 self._path = path
1152 self._changeid = None
1152 self._changeid = None
1153 self._filerev = self._filenode = None
1153 self._filerev = self._filenode = None
1154
1154
1155 if filelog:
1155 if filelog:
1156 self._filelog = filelog
1156 self._filelog = filelog
1157 if workingctx:
1157 if workingctx:
1158 self._changectx = workingctx
1158 self._changectx = workingctx
1159
1159
1160 @propertycache
1160 @propertycache
1161 def _changectx(self):
1161 def _changectx(self):
1162 return workingctx(self._repo)
1162 return workingctx(self._repo)
1163
1163
1164 def __nonzero__(self):
1164 def __nonzero__(self):
1165 return True
1165 return True
1166
1166
1167 def __str__(self):
1167 def __str__(self):
1168 return "%s@%s" % (self.path(), self._changectx)
1168 return "%s@%s" % (self.path(), self._changectx)
1169
1169
1170 def __repr__(self):
1170 def __repr__(self):
1171 return "<workingfilectx %s>" % str(self)
1171 return "<workingfilectx %s>" % str(self)
1172
1172
1173 def data(self):
1173 def data(self):
1174 return self._repo.wread(self._path)
1174 return self._repo.wread(self._path)
1175 def renamed(self):
1175 def renamed(self):
1176 rp = self._repo.dirstate.copied(self._path)
1176 rp = self._repo.dirstate.copied(self._path)
1177 if not rp:
1177 if not rp:
1178 return None
1178 return None
1179 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1179 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1180
1180
1181 def parents(self):
1181 def parents(self):
1182 '''return parent filectxs, following copies if necessary'''
1182 '''return parent filectxs, following copies if necessary'''
1183 def filenode(ctx, path):
1183 def filenode(ctx, path):
1184 return ctx._manifest.get(path, nullid)
1184 return ctx._manifest.get(path, nullid)
1185
1185
1186 path = self._path
1186 path = self._path
1187 fl = self._filelog
1187 fl = self._filelog
1188 pcl = self._changectx._parents
1188 pcl = self._changectx._parents
1189 renamed = self.renamed()
1189 renamed = self.renamed()
1190
1190
1191 if renamed:
1191 if renamed:
1192 pl = [renamed + (None,)]
1192 pl = [renamed + (None,)]
1193 else:
1193 else:
1194 pl = [(path, filenode(pcl[0], path), fl)]
1194 pl = [(path, filenode(pcl[0], path), fl)]
1195
1195
1196 for pc in pcl[1:]:
1196 for pc in pcl[1:]:
1197 pl.append((path, filenode(pc, path), fl))
1197 pl.append((path, filenode(pc, path), fl))
1198
1198
1199 return [filectx(self._repo, p, fileid=n, filelog=l)
1199 return [filectx(self._repo, p, fileid=n, filelog=l)
1200 for p, n, l in pl if n != nullid]
1200 for p, n, l in pl if n != nullid]
1201
1201
1202 def children(self):
1202 def children(self):
1203 return []
1203 return []
1204
1204
1205 def size(self):
1205 def size(self):
1206 return os.lstat(self._repo.wjoin(self._path)).st_size
1206 return os.lstat(self._repo.wjoin(self._path)).st_size
1207 def date(self):
1207 def date(self):
1208 t, tz = self._changectx.date()
1208 t, tz = self._changectx.date()
1209 try:
1209 try:
1210 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
1210 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
1211 except OSError, err:
1211 except OSError, err:
1212 if err.errno != errno.ENOENT:
1212 if err.errno != errno.ENOENT:
1213 raise
1213 raise
1214 return (t, tz)
1214 return (t, tz)
1215
1215
1216 def cmp(self, fctx):
1216 def cmp(self, fctx):
1217 """compare with other file context
1217 """compare with other file context
1218
1218
1219 returns True if different than fctx.
1219 returns True if different than fctx.
1220 """
1220 """
1221 # fctx should be a filectx (not a workingfilectx)
1221 # fctx should be a filectx (not a workingfilectx)
1222 # invert comparison to reuse the same code path
1222 # invert comparison to reuse the same code path
1223 return fctx.cmp(self)
1223 return fctx.cmp(self)
1224
1224
1225 class memctx(object):
1225 class memctx(object):
1226 """Use memctx to perform in-memory commits via localrepo.commitctx().
1226 """Use memctx to perform in-memory commits via localrepo.commitctx().
1227
1227
1228 Revision information is supplied at initialization time while
1228 Revision information is supplied at initialization time while
1229 related files data and is made available through a callback
1229 related files data and is made available through a callback
1230 mechanism. 'repo' is the current localrepo, 'parents' is a
1230 mechanism. 'repo' is the current localrepo, 'parents' is a
1231 sequence of two parent revisions identifiers (pass None for every
1231 sequence of two parent revisions identifiers (pass None for every
1232 missing parent), 'text' is the commit message and 'files' lists
1232 missing parent), 'text' is the commit message and 'files' lists
1233 names of files touched by the revision (normalized and relative to
1233 names of files touched by the revision (normalized and relative to
1234 repository root).
1234 repository root).
1235
1235
1236 filectxfn(repo, memctx, path) is a callable receiving the
1236 filectxfn(repo, memctx, path) is a callable receiving the
1237 repository, the current memctx object and the normalized path of
1237 repository, the current memctx object and the normalized path of
1238 requested file, relative to repository root. It is fired by the
1238 requested file, relative to repository root. It is fired by the
1239 commit function for every file in 'files', but calls order is
1239 commit function for every file in 'files', but calls order is
1240 undefined. If the file is available in the revision being
1240 undefined. If the file is available in the revision being
1241 committed (updated or added), filectxfn returns a memfilectx
1241 committed (updated or added), filectxfn returns a memfilectx
1242 object. If the file was removed, filectxfn raises an
1242 object. If the file was removed, filectxfn raises an
1243 IOError. Moved files are represented by marking the source file
1243 IOError. Moved files are represented by marking the source file
1244 removed and the new file added with copy information (see
1244 removed and the new file added with copy information (see
1245 memfilectx).
1245 memfilectx).
1246
1246
1247 user receives the committer name and defaults to current
1247 user receives the committer name and defaults to current
1248 repository username, date is the commit date in any format
1248 repository username, date is the commit date in any format
1249 supported by util.parsedate() and defaults to current date, extra
1249 supported by util.parsedate() and defaults to current date, extra
1250 is a dictionary of metadata or is left empty.
1250 is a dictionary of metadata or is left empty.
1251 """
1251 """
1252 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1252 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1253 date=None, extra=None):
1253 date=None, extra=None):
1254 self._repo = repo
1254 self._repo = repo
1255 self._rev = None
1255 self._rev = None
1256 self._node = None
1256 self._node = None
1257 self._text = text
1257 self._text = text
1258 self._date = date and util.parsedate(date) or util.makedate()
1258 self._date = date and util.parsedate(date) or util.makedate()
1259 self._user = user
1259 self._user = user
1260 parents = [(p or nullid) for p in parents]
1260 parents = [(p or nullid) for p in parents]
1261 p1, p2 = parents
1261 p1, p2 = parents
1262 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1262 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1263 files = sorted(set(files))
1263 files = sorted(set(files))
1264 self._status = [files, [], [], [], []]
1264 self._status = [files, [], [], [], []]
1265 self._filectxfn = filectxfn
1265 self._filectxfn = filectxfn
1266
1266
1267 self._extra = extra and extra.copy() or {}
1267 self._extra = extra and extra.copy() or {}
1268 if self._extra.get('branch', '') == '':
1268 if self._extra.get('branch', '') == '':
1269 self._extra['branch'] = 'default'
1269 self._extra['branch'] = 'default'
1270
1270
1271 def __str__(self):
1271 def __str__(self):
1272 return str(self._parents[0]) + "+"
1272 return str(self._parents[0]) + "+"
1273
1273
1274 def __int__(self):
1274 def __int__(self):
1275 return self._rev
1275 return self._rev
1276
1276
1277 def __nonzero__(self):
1277 def __nonzero__(self):
1278 return True
1278 return True
1279
1279
1280 def __getitem__(self, key):
1280 def __getitem__(self, key):
1281 return self.filectx(key)
1281 return self.filectx(key)
1282
1282
1283 def p1(self):
1283 def p1(self):
1284 return self._parents[0]
1284 return self._parents[0]
1285 def p2(self):
1285 def p2(self):
1286 return self._parents[1]
1286 return self._parents[1]
1287
1287
1288 def user(self):
1288 def user(self):
1289 return self._user or self._repo.ui.username()
1289 return self._user or self._repo.ui.username()
1290 def date(self):
1290 def date(self):
1291 return self._date
1291 return self._date
1292 def description(self):
1292 def description(self):
1293 return self._text
1293 return self._text
1294 def files(self):
1294 def files(self):
1295 return self.modified()
1295 return self.modified()
1296 def modified(self):
1296 def modified(self):
1297 return self._status[0]
1297 return self._status[0]
1298 def added(self):
1298 def added(self):
1299 return self._status[1]
1299 return self._status[1]
1300 def removed(self):
1300 def removed(self):
1301 return self._status[2]
1301 return self._status[2]
1302 def deleted(self):
1302 def deleted(self):
1303 return self._status[3]
1303 return self._status[3]
1304 def unknown(self):
1304 def unknown(self):
1305 return self._status[4]
1305 return self._status[4]
1306 def ignored(self):
1306 def ignored(self):
1307 return self._status[5]
1307 return self._status[5]
1308 def clean(self):
1308 def clean(self):
1309 return self._status[6]
1309 return self._status[6]
1310 def branch(self):
1310 def branch(self):
1311 return encoding.tolocal(self._extra['branch'])
1311 return encoding.tolocal(self._extra['branch'])
1312 def extra(self):
1312 def extra(self):
1313 return self._extra
1313 return self._extra
1314 def flags(self, f):
1314 def flags(self, f):
1315 return self[f].flags()
1315 return self[f].flags()
1316
1316
1317 def parents(self):
1317 def parents(self):
1318 """return contexts for each parent changeset"""
1318 """return contexts for each parent changeset"""
1319 return self._parents
1319 return self._parents
1320
1320
1321 def filectx(self, path, filelog=None):
1321 def filectx(self, path, filelog=None):
1322 """get a file context from the working directory"""
1322 """get a file context from the working directory"""
1323 return self._filectxfn(self._repo, self, path)
1323 return self._filectxfn(self._repo, self, path)
1324
1324
1325 def commit(self):
1325 def commit(self):
1326 """commit context to the repo"""
1326 """commit context to the repo"""
1327 return self._repo.commitctx(self)
1327 return self._repo.commitctx(self)
1328
1328
1329 class memfilectx(object):
1329 class memfilectx(object):
1330 """memfilectx represents an in-memory file to commit.
1330 """memfilectx represents an in-memory file to commit.
1331
1331
1332 See memctx for more details.
1332 See memctx for more details.
1333 """
1333 """
1334 def __init__(self, path, data, islink=False, isexec=False, copied=None):
1334 def __init__(self, path, data, islink=False, isexec=False, copied=None):
1335 """
1335 """
1336 path is the normalized file path relative to repository root.
1336 path is the normalized file path relative to repository root.
1337 data is the file content as a string.
1337 data is the file content as a string.
1338 islink is True if the file is a symbolic link.
1338 islink is True if the file is a symbolic link.
1339 isexec is True if the file is executable.
1339 isexec is True if the file is executable.
1340 copied is the source file path if current file was copied in the
1340 copied is the source file path if current file was copied in the
1341 revision being committed, or None."""
1341 revision being committed, or None."""
1342 self._path = path
1342 self._path = path
1343 self._data = data
1343 self._data = data
1344 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1344 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1345 self._copied = None
1345 self._copied = None
1346 if copied:
1346 if copied:
1347 self._copied = (copied, nullid)
1347 self._copied = (copied, nullid)
1348
1348
1349 def __nonzero__(self):
1349 def __nonzero__(self):
1350 return True
1350 return True
1351 def __str__(self):
1351 def __str__(self):
1352 return "%s@%s" % (self.path(), self._changectx)
1352 return "%s@%s" % (self.path(), self._changectx)
1353 def path(self):
1353 def path(self):
1354 return self._path
1354 return self._path
1355 def data(self):
1355 def data(self):
1356 return self._data
1356 return self._data
1357 def flags(self):
1357 def flags(self):
1358 return self._flags
1358 return self._flags
1359 def isexec(self):
1359 def isexec(self):
1360 return 'x' in self._flags
1360 return 'x' in self._flags
1361 def islink(self):
1361 def islink(self):
1362 return 'l' in self._flags
1362 return 'l' in self._flags
1363 def renamed(self):
1363 def renamed(self):
1364 return self._copied
1364 return self._copied
@@ -1,339 +1,339
1 # discovery.py - protocol changeset discovery functions
1 # discovery.py - protocol changeset discovery functions
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import util, setdiscovery, treediscovery, phases, obsolete, bookmarks
10 import util, setdiscovery, treediscovery, phases, obsolete, bookmarks
11 import branchmap
11 import branchmap
12
12
13 def findcommonincoming(repo, remote, heads=None, force=False):
13 def findcommonincoming(repo, remote, heads=None, force=False):
14 """Return a tuple (common, anyincoming, heads) used to identify the common
14 """Return a tuple (common, anyincoming, heads) used to identify the common
15 subset of nodes between repo and remote.
15 subset of nodes between repo and remote.
16
16
17 "common" is a list of (at least) the heads of the common subset.
17 "common" is a list of (at least) the heads of the common subset.
18 "anyincoming" is testable as a boolean indicating if any nodes are missing
18 "anyincoming" is testable as a boolean indicating if any nodes are missing
19 locally. If remote does not support getbundle, this actually is a list of
19 locally. If remote does not support getbundle, this actually is a list of
20 roots of the nodes that would be incoming, to be supplied to
20 roots of the nodes that would be incoming, to be supplied to
21 changegroupsubset. No code except for pull should be relying on this fact
21 changegroupsubset. No code except for pull should be relying on this fact
22 any longer.
22 any longer.
23 "heads" is either the supplied heads, or else the remote's heads.
23 "heads" is either the supplied heads, or else the remote's heads.
24
24
25 If you pass heads and they are all known locally, the response lists just
25 If you pass heads and they are all known locally, the response lists just
26 these heads in "common" and in "heads".
26 these heads in "common" and in "heads".
27
27
28 Please use findcommonoutgoing to compute the set of outgoing nodes to give
28 Please use findcommonoutgoing to compute the set of outgoing nodes to give
29 extensions a good hook into outgoing.
29 extensions a good hook into outgoing.
30 """
30 """
31
31
32 if not remote.capable('getbundle'):
32 if not remote.capable('getbundle'):
33 return treediscovery.findcommonincoming(repo, remote, heads, force)
33 return treediscovery.findcommonincoming(repo, remote, heads, force)
34
34
35 if heads:
35 if heads:
36 allknown = True
36 allknown = True
37 nm = repo.changelog.nodemap
37 nm = repo.changelog.nodemap
38 for h in heads:
38 for h in heads:
39 if nm.get(h) is None:
39 if nm.get(h) is None:
40 allknown = False
40 allknown = False
41 break
41 break
42 if allknown:
42 if allknown:
43 return (heads, False, heads)
43 return (heads, False, heads)
44
44
45 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
45 res = setdiscovery.findcommonheads(repo.ui, repo, remote,
46 abortwhenunrelated=not force)
46 abortwhenunrelated=not force)
47 common, anyinc, srvheads = res
47 common, anyinc, srvheads = res
48 return (list(common), anyinc, heads or list(srvheads))
48 return (list(common), anyinc, heads or list(srvheads))
49
49
50 class outgoing(object):
50 class outgoing(object):
51 '''Represents the set of nodes present in a local repo but not in a
51 '''Represents the set of nodes present in a local repo but not in a
52 (possibly) remote one.
52 (possibly) remote one.
53
53
54 Members:
54 Members:
55
55
56 missing is a list of all nodes present in local but not in remote.
56 missing is a list of all nodes present in local but not in remote.
57 common is a list of all nodes shared between the two repos.
57 common is a list of all nodes shared between the two repos.
58 excluded is the list of missing changeset that shouldn't be sent remotely.
58 excluded is the list of missing changeset that shouldn't be sent remotely.
59 missingheads is the list of heads of missing.
59 missingheads is the list of heads of missing.
60 commonheads is the list of heads of common.
60 commonheads is the list of heads of common.
61
61
62 The sets are computed on demand from the heads, unless provided upfront
62 The sets are computed on demand from the heads, unless provided upfront
63 by discovery.'''
63 by discovery.'''
64
64
65 def __init__(self, revlog, commonheads, missingheads):
65 def __init__(self, revlog, commonheads, missingheads):
66 self.commonheads = commonheads
66 self.commonheads = commonheads
67 self.missingheads = missingheads
67 self.missingheads = missingheads
68 self._revlog = revlog
68 self._revlog = revlog
69 self._common = None
69 self._common = None
70 self._missing = None
70 self._missing = None
71 self.excluded = []
71 self.excluded = []
72
72
73 def _computecommonmissing(self):
73 def _computecommonmissing(self):
74 sets = self._revlog.findcommonmissing(self.commonheads,
74 sets = self._revlog.findcommonmissing(self.commonheads,
75 self.missingheads)
75 self.missingheads)
76 self._common, self._missing = sets
76 self._common, self._missing = sets
77
77
78 @util.propertycache
78 @util.propertycache
79 def common(self):
79 def common(self):
80 if self._common is None:
80 if self._common is None:
81 self._computecommonmissing()
81 self._computecommonmissing()
82 return self._common
82 return self._common
83
83
84 @util.propertycache
84 @util.propertycache
85 def missing(self):
85 def missing(self):
86 if self._missing is None:
86 if self._missing is None:
87 self._computecommonmissing()
87 self._computecommonmissing()
88 return self._missing
88 return self._missing
89
89
90 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
90 def findcommonoutgoing(repo, other, onlyheads=None, force=False,
91 commoninc=None, portable=False):
91 commoninc=None, portable=False):
92 '''Return an outgoing instance to identify the nodes present in repo but
92 '''Return an outgoing instance to identify the nodes present in repo but
93 not in other.
93 not in other.
94
94
95 If onlyheads is given, only nodes ancestral to nodes in onlyheads
95 If onlyheads is given, only nodes ancestral to nodes in onlyheads
96 (inclusive) are included. If you already know the local repo's heads,
96 (inclusive) are included. If you already know the local repo's heads,
97 passing them in onlyheads is faster than letting them be recomputed here.
97 passing them in onlyheads is faster than letting them be recomputed here.
98
98
99 If commoninc is given, it must be the result of a prior call to
99 If commoninc is given, it must be the result of a prior call to
100 findcommonincoming(repo, other, force) to avoid recomputing it here.
100 findcommonincoming(repo, other, force) to avoid recomputing it here.
101
101
102 If portable is given, compute more conservative common and missingheads,
102 If portable is given, compute more conservative common and missingheads,
103 to make bundles created from the instance more portable.'''
103 to make bundles created from the instance more portable.'''
104 # declare an empty outgoing object to be filled later
104 # declare an empty outgoing object to be filled later
105 og = outgoing(repo.changelog, None, None)
105 og = outgoing(repo.changelog, None, None)
106
106
107 # get common set if not provided
107 # get common set if not provided
108 if commoninc is None:
108 if commoninc is None:
109 commoninc = findcommonincoming(repo, other, force=force)
109 commoninc = findcommonincoming(repo, other, force=force)
110 og.commonheads, _any, _hds = commoninc
110 og.commonheads, _any, _hds = commoninc
111
111
112 # compute outgoing
112 # compute outgoing
113 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
113 mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
114 if not mayexclude:
114 if not mayexclude:
115 og.missingheads = onlyheads or repo.heads()
115 og.missingheads = onlyheads or repo.heads()
116 elif onlyheads is None:
116 elif onlyheads is None:
117 # use visible heads as it should be cached
117 # use visible heads as it should be cached
118 og.missingheads = repo.filtered("unserved").heads()
118 og.missingheads = repo.filtered("served").heads()
119 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
119 og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
120 else:
120 else:
121 # compute common, missing and exclude secret stuff
121 # compute common, missing and exclude secret stuff
122 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
122 sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
123 og._common, allmissing = sets
123 og._common, allmissing = sets
124 og._missing = missing = []
124 og._missing = missing = []
125 og.excluded = excluded = []
125 og.excluded = excluded = []
126 for node in allmissing:
126 for node in allmissing:
127 ctx = repo[node]
127 ctx = repo[node]
128 if ctx.phase() >= phases.secret or ctx.extinct():
128 if ctx.phase() >= phases.secret or ctx.extinct():
129 excluded.append(node)
129 excluded.append(node)
130 else:
130 else:
131 missing.append(node)
131 missing.append(node)
132 if len(missing) == len(allmissing):
132 if len(missing) == len(allmissing):
133 missingheads = onlyheads
133 missingheads = onlyheads
134 else: # update missing heads
134 else: # update missing heads
135 missingheads = phases.newheads(repo, onlyheads, excluded)
135 missingheads = phases.newheads(repo, onlyheads, excluded)
136 og.missingheads = missingheads
136 og.missingheads = missingheads
137 if portable:
137 if portable:
138 # recompute common and missingheads as if -r<rev> had been given for
138 # recompute common and missingheads as if -r<rev> had been given for
139 # each head of missing, and --base <rev> for each head of the proper
139 # each head of missing, and --base <rev> for each head of the proper
140 # ancestors of missing
140 # ancestors of missing
141 og._computecommonmissing()
141 og._computecommonmissing()
142 cl = repo.changelog
142 cl = repo.changelog
143 missingrevs = set(cl.rev(n) for n in og._missing)
143 missingrevs = set(cl.rev(n) for n in og._missing)
144 og._common = set(cl.ancestors(missingrevs)) - missingrevs
144 og._common = set(cl.ancestors(missingrevs)) - missingrevs
145 commonheads = set(og.commonheads)
145 commonheads = set(og.commonheads)
146 og.missingheads = [h for h in og.missingheads if h not in commonheads]
146 og.missingheads = [h for h in og.missingheads if h not in commonheads]
147
147
148 return og
148 return og
149
149
150 def _headssummary(repo, remote, outgoing):
150 def _headssummary(repo, remote, outgoing):
151 """compute a summary of branch and heads status before and after push
151 """compute a summary of branch and heads status before and after push
152
152
153 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
153 return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
154
154
155 - branch: the branch name
155 - branch: the branch name
156 - remoteheads: the list of remote heads known locally
156 - remoteheads: the list of remote heads known locally
157 None is the branch is new
157 None is the branch is new
158 - newheads: the new remote heads (known locally) with outgoing pushed
158 - newheads: the new remote heads (known locally) with outgoing pushed
159 - unsyncedheads: the list of remote heads unknown locally.
159 - unsyncedheads: the list of remote heads unknown locally.
160 """
160 """
161 cl = repo.changelog
161 cl = repo.changelog
162 headssum = {}
162 headssum = {}
163 # A. Create set of branches involved in the push.
163 # A. Create set of branches involved in the push.
164 branches = set(repo[n].branch() for n in outgoing.missing)
164 branches = set(repo[n].branch() for n in outgoing.missing)
165 remotemap = remote.branchmap()
165 remotemap = remote.branchmap()
166 newbranches = branches - set(remotemap)
166 newbranches = branches - set(remotemap)
167 branches.difference_update(newbranches)
167 branches.difference_update(newbranches)
168
168
169 # A. register remote heads
169 # A. register remote heads
170 remotebranches = set()
170 remotebranches = set()
171 for branch, heads in remote.branchmap().iteritems():
171 for branch, heads in remote.branchmap().iteritems():
172 remotebranches.add(branch)
172 remotebranches.add(branch)
173 known = []
173 known = []
174 unsynced = []
174 unsynced = []
175 for h in heads:
175 for h in heads:
176 if h in cl.nodemap:
176 if h in cl.nodemap:
177 known.append(h)
177 known.append(h)
178 else:
178 else:
179 unsynced.append(h)
179 unsynced.append(h)
180 headssum[branch] = (known, list(known), unsynced)
180 headssum[branch] = (known, list(known), unsynced)
181 # B. add new branch data
181 # B. add new branch data
182 missingctx = list(repo[n] for n in outgoing.missing)
182 missingctx = list(repo[n] for n in outgoing.missing)
183 touchedbranches = set()
183 touchedbranches = set()
184 for ctx in missingctx:
184 for ctx in missingctx:
185 branch = ctx.branch()
185 branch = ctx.branch()
186 touchedbranches.add(branch)
186 touchedbranches.add(branch)
187 if branch not in headssum:
187 if branch not in headssum:
188 headssum[branch] = (None, [], [])
188 headssum[branch] = (None, [], [])
189
189
190 # C drop data about untouched branches:
190 # C drop data about untouched branches:
191 for branch in remotebranches - touchedbranches:
191 for branch in remotebranches - touchedbranches:
192 del headssum[branch]
192 del headssum[branch]
193
193
194 # D. Update newmap with outgoing changes.
194 # D. Update newmap with outgoing changes.
195 # This will possibly add new heads and remove existing ones.
195 # This will possibly add new heads and remove existing ones.
196 newmap = branchmap.branchcache((branch, heads[1])
196 newmap = branchmap.branchcache((branch, heads[1])
197 for branch, heads in headssum.iteritems()
197 for branch, heads in headssum.iteritems()
198 if heads[0] is not None)
198 if heads[0] is not None)
199 newmap.update(repo, (ctx.rev() for ctx in missingctx))
199 newmap.update(repo, (ctx.rev() for ctx in missingctx))
200 for branch, newheads in newmap.iteritems():
200 for branch, newheads in newmap.iteritems():
201 headssum[branch][1][:] = newheads
201 headssum[branch][1][:] = newheads
202 return headssum
202 return headssum
203
203
204 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
204 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
205 """Compute branchmapsummary for repo without branchmap support"""
205 """Compute branchmapsummary for repo without branchmap support"""
206
206
207 cl = repo.changelog
207 cl = repo.changelog
208 # 1-4b. old servers: Check for new topological heads.
208 # 1-4b. old servers: Check for new topological heads.
209 # Construct {old,new}map with branch = None (topological branch).
209 # Construct {old,new}map with branch = None (topological branch).
210 # (code based on update)
210 # (code based on update)
211 oldheads = set(h for h in remoteheads if h in cl.nodemap)
211 oldheads = set(h for h in remoteheads if h in cl.nodemap)
212 # all nodes in outgoing.missing are children of either:
212 # all nodes in outgoing.missing are children of either:
213 # - an element of oldheads
213 # - an element of oldheads
214 # - another element of outgoing.missing
214 # - another element of outgoing.missing
215 # - nullrev
215 # - nullrev
216 # This explains why the new head are very simple to compute.
216 # This explains why the new head are very simple to compute.
217 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
217 r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
218 newheads = list(c.node() for c in r)
218 newheads = list(c.node() for c in r)
219 unsynced = inc and set([None]) or set()
219 unsynced = inc and set([None]) or set()
220 return {None: (oldheads, newheads, unsynced)}
220 return {None: (oldheads, newheads, unsynced)}
221
221
222 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):
222 def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):
223 """Check that a push won't add any outgoing head
223 """Check that a push won't add any outgoing head
224
224
225 raise Abort error and display ui message as needed.
225 raise Abort error and display ui message as needed.
226 """
226 """
227 # Check for each named branch if we're creating new remote heads.
227 # Check for each named branch if we're creating new remote heads.
228 # To be a remote head after push, node must be either:
228 # To be a remote head after push, node must be either:
229 # - unknown locally
229 # - unknown locally
230 # - a local outgoing head descended from update
230 # - a local outgoing head descended from update
231 # - a remote head that's known locally and not
231 # - a remote head that's known locally and not
232 # ancestral to an outgoing head
232 # ancestral to an outgoing head
233 if remoteheads == [nullid]:
233 if remoteheads == [nullid]:
234 # remote is empty, nothing to check.
234 # remote is empty, nothing to check.
235 return
235 return
236
236
237 if remote.capable('branchmap'):
237 if remote.capable('branchmap'):
238 headssum = _headssummary(repo, remote, outgoing)
238 headssum = _headssummary(repo, remote, outgoing)
239 else:
239 else:
240 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
240 headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
241 newbranches = [branch for branch, heads in headssum.iteritems()
241 newbranches = [branch for branch, heads in headssum.iteritems()
242 if heads[0] is None]
242 if heads[0] is None]
243 # 1. Check for new branches on the remote.
243 # 1. Check for new branches on the remote.
244 if newbranches and not newbranch: # new branch requires --new-branch
244 if newbranches and not newbranch: # new branch requires --new-branch
245 branchnames = ', '.join(sorted(newbranches))
245 branchnames = ', '.join(sorted(newbranches))
246 raise util.Abort(_("push creates new remote branches: %s!")
246 raise util.Abort(_("push creates new remote branches: %s!")
247 % branchnames,
247 % branchnames,
248 hint=_("use 'hg push --new-branch' to create"
248 hint=_("use 'hg push --new-branch' to create"
249 " new remote branches"))
249 " new remote branches"))
250
250
251 # 2 compute newly pushed bookmarks. We
251 # 2 compute newly pushed bookmarks. We
252 # we don't warned about bookmarked heads.
252 # we don't warned about bookmarked heads.
253 localbookmarks = repo._bookmarks
253 localbookmarks = repo._bookmarks
254 remotebookmarks = remote.listkeys('bookmarks')
254 remotebookmarks = remote.listkeys('bookmarks')
255 bookmarkedheads = set()
255 bookmarkedheads = set()
256 for bm in localbookmarks:
256 for bm in localbookmarks:
257 rnode = remotebookmarks.get(bm)
257 rnode = remotebookmarks.get(bm)
258 if rnode and rnode in repo:
258 if rnode and rnode in repo:
259 lctx, rctx = repo[bm], repo[rnode]
259 lctx, rctx = repo[bm], repo[rnode]
260 if bookmarks.validdest(repo, rctx, lctx):
260 if bookmarks.validdest(repo, rctx, lctx):
261 bookmarkedheads.add(lctx.node())
261 bookmarkedheads.add(lctx.node())
262
262
263 # 3. Check for new heads.
263 # 3. Check for new heads.
264 # If there are more heads after the push than before, a suitable
264 # If there are more heads after the push than before, a suitable
265 # error message, depending on unsynced status, is displayed.
265 # error message, depending on unsynced status, is displayed.
266 error = None
266 error = None
267 unsynced = False
267 unsynced = False
268 allmissing = set(outgoing.missing)
268 allmissing = set(outgoing.missing)
269 allfuturecommon = set(c.node() for c in repo.set('%ld', outgoing.common))
269 allfuturecommon = set(c.node() for c in repo.set('%ld', outgoing.common))
270 allfuturecommon.update(allmissing)
270 allfuturecommon.update(allmissing)
271 for branch, heads in sorted(headssum.iteritems()):
271 for branch, heads in sorted(headssum.iteritems()):
272 if heads[0] is None:
272 if heads[0] is None:
273 # Maybe we should abort if we push more that one head
273 # Maybe we should abort if we push more that one head
274 # for new branches ?
274 # for new branches ?
275 continue
275 continue
276 candidate_newhs = set(heads[1])
276 candidate_newhs = set(heads[1])
277 # add unsynced data
277 # add unsynced data
278 oldhs = set(heads[0])
278 oldhs = set(heads[0])
279 oldhs.update(heads[2])
279 oldhs.update(heads[2])
280 candidate_newhs.update(heads[2])
280 candidate_newhs.update(heads[2])
281 dhs = None
281 dhs = None
282 discardedheads = set()
282 discardedheads = set()
283 if repo.obsstore:
283 if repo.obsstore:
284 # remove future heads which are actually obsolete by another
284 # remove future heads which are actually obsolete by another
285 # pushed element:
285 # pushed element:
286 #
286 #
287 # XXX as above, There are several cases this case does not handle
287 # XXX as above, There are several cases this case does not handle
288 # XXX properly
288 # XXX properly
289 #
289 #
290 # (1) if <nh> is public, it won't be affected by obsolete marker
290 # (1) if <nh> is public, it won't be affected by obsolete marker
291 # and a new is created
291 # and a new is created
292 #
292 #
293 # (2) if the new heads have ancestors which are not obsolete and
293 # (2) if the new heads have ancestors which are not obsolete and
294 # not ancestors of any other heads we will have a new head too.
294 # not ancestors of any other heads we will have a new head too.
295 #
295 #
296 # This two case will be easy to handle for know changeset but much
296 # This two case will be easy to handle for know changeset but much
297 # more tricky for unsynced changes.
297 # more tricky for unsynced changes.
298 newhs = set()
298 newhs = set()
299 for nh in candidate_newhs:
299 for nh in candidate_newhs:
300 if nh in repo and repo[nh].phase() <= phases.public:
300 if nh in repo and repo[nh].phase() <= phases.public:
301 newhs.add(nh)
301 newhs.add(nh)
302 else:
302 else:
303 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
303 for suc in obsolete.allsuccessors(repo.obsstore, [nh]):
304 if suc != nh and suc in allfuturecommon:
304 if suc != nh and suc in allfuturecommon:
305 discardedheads.add(nh)
305 discardedheads.add(nh)
306 break
306 break
307 else:
307 else:
308 newhs.add(nh)
308 newhs.add(nh)
309 else:
309 else:
310 newhs = candidate_newhs
310 newhs = candidate_newhs
311 if [h for h in heads[2] if h not in discardedheads]:
311 if [h for h in heads[2] if h not in discardedheads]:
312 unsynced = True
312 unsynced = True
313 if len(newhs) > len(oldhs):
313 if len(newhs) > len(oldhs):
314 # strip updates to existing remote heads from the new heads list
314 # strip updates to existing remote heads from the new heads list
315 dhs = sorted(newhs - bookmarkedheads - oldhs)
315 dhs = sorted(newhs - bookmarkedheads - oldhs)
316 if dhs:
316 if dhs:
317 if error is None:
317 if error is None:
318 if branch not in ('default', None):
318 if branch not in ('default', None):
319 error = _("push creates new remote head %s "
319 error = _("push creates new remote head %s "
320 "on branch '%s'!") % (short(dhs[0]), branch)
320 "on branch '%s'!") % (short(dhs[0]), branch)
321 else:
321 else:
322 error = _("push creates new remote head %s!"
322 error = _("push creates new remote head %s!"
323 ) % short(dhs[0])
323 ) % short(dhs[0])
324 if heads[2]: # unsynced
324 if heads[2]: # unsynced
325 hint = _("you should pull and merge or "
325 hint = _("you should pull and merge or "
326 "use push -f to force")
326 "use push -f to force")
327 else:
327 else:
328 hint = _("did you forget to merge? "
328 hint = _("did you forget to merge? "
329 "use push -f to force")
329 "use push -f to force")
330 if branch is not None:
330 if branch is not None:
331 repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
331 repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
332 for h in dhs:
332 for h in dhs:
333 repo.ui.note(_("new remote head %s\n") % short(h))
333 repo.ui.note(_("new remote head %s\n") % short(h))
334 if error:
334 if error:
335 raise util.Abort(error, hint=hint)
335 raise util.Abort(error, hint=hint)
336
336
337 # 6. Check for unsynced changes on involved branches.
337 # 6. Check for unsynced changes on involved branches.
338 if unsynced:
338 if unsynced:
339 repo.ui.warn(_("note: unsynced remote changes!\n"))
339 repo.ui.warn(_("note: unsynced remote changes!\n"))
@@ -1,646 +1,646
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from i18n import _
9 from i18n import _
10 from lock import release
10 from lock import release
11 from node import hex, nullid
11 from node import hex, nullid
12 import localrepo, bundlerepo, httppeer, sshpeer, statichttprepo, bookmarks
12 import localrepo, bundlerepo, httppeer, sshpeer, statichttprepo, bookmarks
13 import lock, util, extensions, error, node, scmutil, phases, url
13 import lock, util, extensions, error, node, scmutil, phases, url
14 import cmdutil, discovery
14 import cmdutil, discovery
15 import merge as mergemod
15 import merge as mergemod
16 import verify as verifymod
16 import verify as verifymod
17 import errno, os, shutil
17 import errno, os, shutil
18
18
19 def _local(path):
19 def _local(path):
20 path = util.expandpath(util.urllocalpath(path))
20 path = util.expandpath(util.urllocalpath(path))
21 return (os.path.isfile(path) and bundlerepo or localrepo)
21 return (os.path.isfile(path) and bundlerepo or localrepo)
22
22
23 def addbranchrevs(lrepo, other, branches, revs):
23 def addbranchrevs(lrepo, other, branches, revs):
24 peer = other.peer() # a courtesy to callers using a localrepo for other
24 peer = other.peer() # a courtesy to callers using a localrepo for other
25 hashbranch, branches = branches
25 hashbranch, branches = branches
26 if not hashbranch and not branches:
26 if not hashbranch and not branches:
27 return revs or None, revs and revs[0] or None
27 return revs or None, revs and revs[0] or None
28 revs = revs and list(revs) or []
28 revs = revs and list(revs) or []
29 if not peer.capable('branchmap'):
29 if not peer.capable('branchmap'):
30 if branches:
30 if branches:
31 raise util.Abort(_("remote branch lookup not supported"))
31 raise util.Abort(_("remote branch lookup not supported"))
32 revs.append(hashbranch)
32 revs.append(hashbranch)
33 return revs, revs[0]
33 return revs, revs[0]
34 branchmap = peer.branchmap()
34 branchmap = peer.branchmap()
35
35
36 def primary(branch):
36 def primary(branch):
37 if branch == '.':
37 if branch == '.':
38 if not lrepo:
38 if not lrepo:
39 raise util.Abort(_("dirstate branch not accessible"))
39 raise util.Abort(_("dirstate branch not accessible"))
40 branch = lrepo.dirstate.branch()
40 branch = lrepo.dirstate.branch()
41 if branch in branchmap:
41 if branch in branchmap:
42 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
42 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
43 return True
43 return True
44 else:
44 else:
45 return False
45 return False
46
46
47 for branch in branches:
47 for branch in branches:
48 if not primary(branch):
48 if not primary(branch):
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
49 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
50 if hashbranch:
50 if hashbranch:
51 if not primary(hashbranch):
51 if not primary(hashbranch):
52 revs.append(hashbranch)
52 revs.append(hashbranch)
53 return revs, revs[0]
53 return revs, revs[0]
54
54
55 def parseurl(path, branches=None):
55 def parseurl(path, branches=None):
56 '''parse url#branch, returning (url, (branch, branches))'''
56 '''parse url#branch, returning (url, (branch, branches))'''
57
57
58 u = util.url(path)
58 u = util.url(path)
59 branch = None
59 branch = None
60 if u.fragment:
60 if u.fragment:
61 branch = u.fragment
61 branch = u.fragment
62 u.fragment = None
62 u.fragment = None
63 return str(u), (branch, branches or [])
63 return str(u), (branch, branches or [])
64
64
65 schemes = {
65 schemes = {
66 'bundle': bundlerepo,
66 'bundle': bundlerepo,
67 'file': _local,
67 'file': _local,
68 'http': httppeer,
68 'http': httppeer,
69 'https': httppeer,
69 'https': httppeer,
70 'ssh': sshpeer,
70 'ssh': sshpeer,
71 'static-http': statichttprepo,
71 'static-http': statichttprepo,
72 }
72 }
73
73
74 def _peerlookup(path):
74 def _peerlookup(path):
75 u = util.url(path)
75 u = util.url(path)
76 scheme = u.scheme or 'file'
76 scheme = u.scheme or 'file'
77 thing = schemes.get(scheme) or schemes['file']
77 thing = schemes.get(scheme) or schemes['file']
78 try:
78 try:
79 return thing(path)
79 return thing(path)
80 except TypeError:
80 except TypeError:
81 return thing
81 return thing
82
82
83 def islocal(repo):
83 def islocal(repo):
84 '''return true if repo or path is local'''
84 '''return true if repo or path is local'''
85 if isinstance(repo, str):
85 if isinstance(repo, str):
86 try:
86 try:
87 return _peerlookup(repo).islocal(repo)
87 return _peerlookup(repo).islocal(repo)
88 except AttributeError:
88 except AttributeError:
89 return False
89 return False
90 return repo.local()
90 return repo.local()
91
91
92 def openpath(ui, path):
92 def openpath(ui, path):
93 '''open path with open if local, url.open if remote'''
93 '''open path with open if local, url.open if remote'''
94 if islocal(path):
94 if islocal(path):
95 return util.posixfile(util.urllocalpath(path), 'rb')
95 return util.posixfile(util.urllocalpath(path), 'rb')
96 else:
96 else:
97 return url.open(ui, path)
97 return url.open(ui, path)
98
98
99 def _peerorrepo(ui, path, create=False):
99 def _peerorrepo(ui, path, create=False):
100 """return a repository object for the specified path"""
100 """return a repository object for the specified path"""
101 obj = _peerlookup(path).instance(ui, path, create)
101 obj = _peerlookup(path).instance(ui, path, create)
102 ui = getattr(obj, "ui", ui)
102 ui = getattr(obj, "ui", ui)
103 for name, module in extensions.extensions():
103 for name, module in extensions.extensions():
104 hook = getattr(module, 'reposetup', None)
104 hook = getattr(module, 'reposetup', None)
105 if hook:
105 if hook:
106 hook(ui, obj)
106 hook(ui, obj)
107 return obj
107 return obj
108
108
109 def repository(ui, path='', create=False):
109 def repository(ui, path='', create=False):
110 """return a repository object for the specified path"""
110 """return a repository object for the specified path"""
111 peer = _peerorrepo(ui, path, create)
111 peer = _peerorrepo(ui, path, create)
112 repo = peer.local()
112 repo = peer.local()
113 if not repo:
113 if not repo:
114 raise util.Abort(_("repository '%s' is not local") %
114 raise util.Abort(_("repository '%s' is not local") %
115 (path or peer.url()))
115 (path or peer.url()))
116 return repo.filtered('hidden')
116 return repo.filtered('visible')
117
117
118 def peer(uiorrepo, opts, path, create=False):
118 def peer(uiorrepo, opts, path, create=False):
119 '''return a repository peer for the specified path'''
119 '''return a repository peer for the specified path'''
120 rui = remoteui(uiorrepo, opts)
120 rui = remoteui(uiorrepo, opts)
121 return _peerorrepo(rui, path, create).peer()
121 return _peerorrepo(rui, path, create).peer()
122
122
123 def defaultdest(source):
123 def defaultdest(source):
124 '''return default destination of clone if none is given'''
124 '''return default destination of clone if none is given'''
125 return os.path.basename(os.path.normpath(util.url(source).path))
125 return os.path.basename(os.path.normpath(util.url(source).path))
126
126
127 def share(ui, source, dest=None, update=True):
127 def share(ui, source, dest=None, update=True):
128 '''create a shared repository'''
128 '''create a shared repository'''
129
129
130 if not islocal(source):
130 if not islocal(source):
131 raise util.Abort(_('can only share local repositories'))
131 raise util.Abort(_('can only share local repositories'))
132
132
133 if not dest:
133 if not dest:
134 dest = defaultdest(source)
134 dest = defaultdest(source)
135 else:
135 else:
136 dest = ui.expandpath(dest)
136 dest = ui.expandpath(dest)
137
137
138 if isinstance(source, str):
138 if isinstance(source, str):
139 origsource = ui.expandpath(source)
139 origsource = ui.expandpath(source)
140 source, branches = parseurl(origsource)
140 source, branches = parseurl(origsource)
141 srcrepo = repository(ui, source)
141 srcrepo = repository(ui, source)
142 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
142 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
143 else:
143 else:
144 srcrepo = source.local()
144 srcrepo = source.local()
145 origsource = source = srcrepo.url()
145 origsource = source = srcrepo.url()
146 checkout = None
146 checkout = None
147
147
148 sharedpath = srcrepo.sharedpath # if our source is already sharing
148 sharedpath = srcrepo.sharedpath # if our source is already sharing
149
149
150 root = os.path.realpath(dest)
150 root = os.path.realpath(dest)
151 roothg = os.path.join(root, '.hg')
151 roothg = os.path.join(root, '.hg')
152
152
153 if os.path.exists(roothg):
153 if os.path.exists(roothg):
154 raise util.Abort(_('destination already exists'))
154 raise util.Abort(_('destination already exists'))
155
155
156 if not os.path.isdir(root):
156 if not os.path.isdir(root):
157 os.mkdir(root)
157 os.mkdir(root)
158 util.makedir(roothg, notindexed=True)
158 util.makedir(roothg, notindexed=True)
159
159
160 requirements = ''
160 requirements = ''
161 try:
161 try:
162 requirements = srcrepo.opener.read('requires')
162 requirements = srcrepo.opener.read('requires')
163 except IOError, inst:
163 except IOError, inst:
164 if inst.errno != errno.ENOENT:
164 if inst.errno != errno.ENOENT:
165 raise
165 raise
166
166
167 requirements += 'shared\n'
167 requirements += 'shared\n'
168 util.writefile(os.path.join(roothg, 'requires'), requirements)
168 util.writefile(os.path.join(roothg, 'requires'), requirements)
169 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
169 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
170
170
171 r = repository(ui, root)
171 r = repository(ui, root)
172
172
173 default = srcrepo.ui.config('paths', 'default')
173 default = srcrepo.ui.config('paths', 'default')
174 if not default:
174 if not default:
175 # set default to source for being able to clone subrepos
175 # set default to source for being able to clone subrepos
176 default = os.path.abspath(util.urllocalpath(origsource))
176 default = os.path.abspath(util.urllocalpath(origsource))
177 fp = r.opener("hgrc", "w", text=True)
177 fp = r.opener("hgrc", "w", text=True)
178 fp.write("[paths]\n")
178 fp.write("[paths]\n")
179 fp.write("default = %s\n" % default)
179 fp.write("default = %s\n" % default)
180 fp.close()
180 fp.close()
181 r.ui.setconfig('paths', 'default', default)
181 r.ui.setconfig('paths', 'default', default)
182
182
183 if update:
183 if update:
184 r.ui.status(_("updating working directory\n"))
184 r.ui.status(_("updating working directory\n"))
185 if update is not True:
185 if update is not True:
186 checkout = update
186 checkout = update
187 for test in (checkout, 'default', 'tip'):
187 for test in (checkout, 'default', 'tip'):
188 if test is None:
188 if test is None:
189 continue
189 continue
190 try:
190 try:
191 uprev = r.lookup(test)
191 uprev = r.lookup(test)
192 break
192 break
193 except error.RepoLookupError:
193 except error.RepoLookupError:
194 continue
194 continue
195 _update(r, uprev)
195 _update(r, uprev)
196
196
197 def copystore(ui, srcrepo, destpath):
197 def copystore(ui, srcrepo, destpath):
198 '''copy files from store of srcrepo in destpath
198 '''copy files from store of srcrepo in destpath
199
199
200 returns destlock
200 returns destlock
201 '''
201 '''
202 destlock = None
202 destlock = None
203 try:
203 try:
204 hardlink = None
204 hardlink = None
205 num = 0
205 num = 0
206 srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
206 srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
207 for f in srcrepo.store.copylist():
207 for f in srcrepo.store.copylist():
208 if srcpublishing and f.endswith('phaseroots'):
208 if srcpublishing and f.endswith('phaseroots'):
209 continue
209 continue
210 src = os.path.join(srcrepo.sharedpath, f)
210 src = os.path.join(srcrepo.sharedpath, f)
211 dst = os.path.join(destpath, f)
211 dst = os.path.join(destpath, f)
212 dstbase = os.path.dirname(dst)
212 dstbase = os.path.dirname(dst)
213 if dstbase and not os.path.exists(dstbase):
213 if dstbase and not os.path.exists(dstbase):
214 os.mkdir(dstbase)
214 os.mkdir(dstbase)
215 if os.path.exists(src):
215 if os.path.exists(src):
216 if dst.endswith('data'):
216 if dst.endswith('data'):
217 # lock to avoid premature writing to the target
217 # lock to avoid premature writing to the target
218 destlock = lock.lock(os.path.join(dstbase, "lock"))
218 destlock = lock.lock(os.path.join(dstbase, "lock"))
219 hardlink, n = util.copyfiles(src, dst, hardlink)
219 hardlink, n = util.copyfiles(src, dst, hardlink)
220 num += n
220 num += n
221 if hardlink:
221 if hardlink:
222 ui.debug("linked %d files\n" % num)
222 ui.debug("linked %d files\n" % num)
223 else:
223 else:
224 ui.debug("copied %d files\n" % num)
224 ui.debug("copied %d files\n" % num)
225 return destlock
225 return destlock
226 except: # re-raises
226 except: # re-raises
227 release(destlock)
227 release(destlock)
228 raise
228 raise
229
229
230 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
230 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
231 update=True, stream=False, branch=None):
231 update=True, stream=False, branch=None):
232 """Make a copy of an existing repository.
232 """Make a copy of an existing repository.
233
233
234 Create a copy of an existing repository in a new directory. The
234 Create a copy of an existing repository in a new directory. The
235 source and destination are URLs, as passed to the repository
235 source and destination are URLs, as passed to the repository
236 function. Returns a pair of repository peers, the source and
236 function. Returns a pair of repository peers, the source and
237 newly created destination.
237 newly created destination.
238
238
239 The location of the source is added to the new repository's
239 The location of the source is added to the new repository's
240 .hg/hgrc file, as the default to be used for future pulls and
240 .hg/hgrc file, as the default to be used for future pulls and
241 pushes.
241 pushes.
242
242
243 If an exception is raised, the partly cloned/updated destination
243 If an exception is raised, the partly cloned/updated destination
244 repository will be deleted.
244 repository will be deleted.
245
245
246 Arguments:
246 Arguments:
247
247
248 source: repository object or URL
248 source: repository object or URL
249
249
250 dest: URL of destination repository to create (defaults to base
250 dest: URL of destination repository to create (defaults to base
251 name of source repository)
251 name of source repository)
252
252
253 pull: always pull from source repository, even in local case
253 pull: always pull from source repository, even in local case
254
254
255 stream: stream raw data uncompressed from repository (fast over
255 stream: stream raw data uncompressed from repository (fast over
256 LAN, slow over WAN)
256 LAN, slow over WAN)
257
257
258 rev: revision to clone up to (implies pull=True)
258 rev: revision to clone up to (implies pull=True)
259
259
260 update: update working directory after clone completes, if
260 update: update working directory after clone completes, if
261 destination is local repository (True means update to default rev,
261 destination is local repository (True means update to default rev,
262 anything else is treated as a revision)
262 anything else is treated as a revision)
263
263
264 branch: branches to clone
264 branch: branches to clone
265 """
265 """
266
266
267 if isinstance(source, str):
267 if isinstance(source, str):
268 origsource = ui.expandpath(source)
268 origsource = ui.expandpath(source)
269 source, branch = parseurl(origsource, branch)
269 source, branch = parseurl(origsource, branch)
270 srcpeer = peer(ui, peeropts, source)
270 srcpeer = peer(ui, peeropts, source)
271 else:
271 else:
272 srcpeer = source.peer() # in case we were called with a localrepo
272 srcpeer = source.peer() # in case we were called with a localrepo
273 branch = (None, branch or [])
273 branch = (None, branch or [])
274 origsource = source = srcpeer.url()
274 origsource = source = srcpeer.url()
275 rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
275 rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
276
276
277 if dest is None:
277 if dest is None:
278 dest = defaultdest(source)
278 dest = defaultdest(source)
279 ui.status(_("destination directory: %s\n") % dest)
279 ui.status(_("destination directory: %s\n") % dest)
280 else:
280 else:
281 dest = ui.expandpath(dest)
281 dest = ui.expandpath(dest)
282
282
283 dest = util.urllocalpath(dest)
283 dest = util.urllocalpath(dest)
284 source = util.urllocalpath(source)
284 source = util.urllocalpath(source)
285
285
286 if not dest:
286 if not dest:
287 raise util.Abort(_("empty destination path is not valid"))
287 raise util.Abort(_("empty destination path is not valid"))
288 if os.path.exists(dest):
288 if os.path.exists(dest):
289 if not os.path.isdir(dest):
289 if not os.path.isdir(dest):
290 raise util.Abort(_("destination '%s' already exists") % dest)
290 raise util.Abort(_("destination '%s' already exists") % dest)
291 elif os.listdir(dest):
291 elif os.listdir(dest):
292 raise util.Abort(_("destination '%s' is not empty") % dest)
292 raise util.Abort(_("destination '%s' is not empty") % dest)
293
293
294 class DirCleanup(object):
294 class DirCleanup(object):
295 def __init__(self, dir_):
295 def __init__(self, dir_):
296 self.rmtree = shutil.rmtree
296 self.rmtree = shutil.rmtree
297 self.dir_ = dir_
297 self.dir_ = dir_
298 def close(self):
298 def close(self):
299 self.dir_ = None
299 self.dir_ = None
300 def cleanup(self):
300 def cleanup(self):
301 if self.dir_:
301 if self.dir_:
302 self.rmtree(self.dir_, True)
302 self.rmtree(self.dir_, True)
303
303
304 srclock = destlock = dircleanup = None
304 srclock = destlock = dircleanup = None
305 srcrepo = srcpeer.local()
305 srcrepo = srcpeer.local()
306 try:
306 try:
307 abspath = origsource
307 abspath = origsource
308 if islocal(origsource):
308 if islocal(origsource):
309 abspath = os.path.abspath(util.urllocalpath(origsource))
309 abspath = os.path.abspath(util.urllocalpath(origsource))
310
310
311 if islocal(dest):
311 if islocal(dest):
312 dircleanup = DirCleanup(dest)
312 dircleanup = DirCleanup(dest)
313
313
314 copy = False
314 copy = False
315 if (srcrepo and srcrepo.cancopy() and islocal(dest)
315 if (srcrepo and srcrepo.cancopy() and islocal(dest)
316 and not phases.hassecret(srcrepo)):
316 and not phases.hassecret(srcrepo)):
317 copy = not pull and not rev
317 copy = not pull and not rev
318
318
319 if copy:
319 if copy:
320 try:
320 try:
321 # we use a lock here because if we race with commit, we
321 # we use a lock here because if we race with commit, we
322 # can end up with extra data in the cloned revlogs that's
322 # can end up with extra data in the cloned revlogs that's
323 # not pointed to by changesets, thus causing verify to
323 # not pointed to by changesets, thus causing verify to
324 # fail
324 # fail
325 srclock = srcrepo.lock(wait=False)
325 srclock = srcrepo.lock(wait=False)
326 except error.LockError:
326 except error.LockError:
327 copy = False
327 copy = False
328
328
329 if copy:
329 if copy:
330 srcrepo.hook('preoutgoing', throw=True, source='clone')
330 srcrepo.hook('preoutgoing', throw=True, source='clone')
331 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
331 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
332 if not os.path.exists(dest):
332 if not os.path.exists(dest):
333 os.mkdir(dest)
333 os.mkdir(dest)
334 else:
334 else:
335 # only clean up directories we create ourselves
335 # only clean up directories we create ourselves
336 dircleanup.dir_ = hgdir
336 dircleanup.dir_ = hgdir
337 try:
337 try:
338 destpath = hgdir
338 destpath = hgdir
339 util.makedir(destpath, notindexed=True)
339 util.makedir(destpath, notindexed=True)
340 except OSError, inst:
340 except OSError, inst:
341 if inst.errno == errno.EEXIST:
341 if inst.errno == errno.EEXIST:
342 dircleanup.close()
342 dircleanup.close()
343 raise util.Abort(_("destination '%s' already exists")
343 raise util.Abort(_("destination '%s' already exists")
344 % dest)
344 % dest)
345 raise
345 raise
346
346
347 destlock = copystore(ui, srcrepo, destpath)
347 destlock = copystore(ui, srcrepo, destpath)
348
348
349 # Recomputing branch cache might be slow on big repos,
349 # Recomputing branch cache might be slow on big repos,
350 # so just copy it
350 # so just copy it
351 dstcachedir = os.path.join(destpath, 'cache')
351 dstcachedir = os.path.join(destpath, 'cache')
352 srcbranchcache = srcrepo.sjoin('cache/branchheads')
352 srcbranchcache = srcrepo.sjoin('cache/branchheads')
353 dstbranchcache = os.path.join(dstcachedir, 'branchheads')
353 dstbranchcache = os.path.join(dstcachedir, 'branchheads')
354 if os.path.exists(srcbranchcache):
354 if os.path.exists(srcbranchcache):
355 if not os.path.exists(dstcachedir):
355 if not os.path.exists(dstcachedir):
356 os.mkdir(dstcachedir)
356 os.mkdir(dstcachedir)
357 util.copyfile(srcbranchcache, dstbranchcache)
357 util.copyfile(srcbranchcache, dstbranchcache)
358
358
359 # we need to re-init the repo after manually copying the data
359 # we need to re-init the repo after manually copying the data
360 # into it
360 # into it
361 destpeer = peer(srcrepo, peeropts, dest)
361 destpeer = peer(srcrepo, peeropts, dest)
362 srcrepo.hook('outgoing', source='clone',
362 srcrepo.hook('outgoing', source='clone',
363 node=node.hex(node.nullid))
363 node=node.hex(node.nullid))
364 else:
364 else:
365 try:
365 try:
366 destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
366 destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
367 # only pass ui when no srcrepo
367 # only pass ui when no srcrepo
368 except OSError, inst:
368 except OSError, inst:
369 if inst.errno == errno.EEXIST:
369 if inst.errno == errno.EEXIST:
370 dircleanup.close()
370 dircleanup.close()
371 raise util.Abort(_("destination '%s' already exists")
371 raise util.Abort(_("destination '%s' already exists")
372 % dest)
372 % dest)
373 raise
373 raise
374
374
375 revs = None
375 revs = None
376 if rev:
376 if rev:
377 if not srcpeer.capable('lookup'):
377 if not srcpeer.capable('lookup'):
378 raise util.Abort(_("src repository does not support "
378 raise util.Abort(_("src repository does not support "
379 "revision lookup and so doesn't "
379 "revision lookup and so doesn't "
380 "support clone by revision"))
380 "support clone by revision"))
381 revs = [srcpeer.lookup(r) for r in rev]
381 revs = [srcpeer.lookup(r) for r in rev]
382 checkout = revs[0]
382 checkout = revs[0]
383 if destpeer.local():
383 if destpeer.local():
384 destpeer.local().clone(srcpeer, heads=revs, stream=stream)
384 destpeer.local().clone(srcpeer, heads=revs, stream=stream)
385 elif srcrepo:
385 elif srcrepo:
386 srcrepo.push(destpeer, revs=revs)
386 srcrepo.push(destpeer, revs=revs)
387 else:
387 else:
388 raise util.Abort(_("clone from remote to remote not supported"))
388 raise util.Abort(_("clone from remote to remote not supported"))
389
389
390 if dircleanup:
390 if dircleanup:
391 dircleanup.close()
391 dircleanup.close()
392
392
393 # clone all bookmarks except divergent ones
393 # clone all bookmarks except divergent ones
394 destrepo = destpeer.local()
394 destrepo = destpeer.local()
395 if destrepo and srcpeer.capable("pushkey"):
395 if destrepo and srcpeer.capable("pushkey"):
396 rb = srcpeer.listkeys('bookmarks')
396 rb = srcpeer.listkeys('bookmarks')
397 marks = destrepo._bookmarks
397 marks = destrepo._bookmarks
398 for k, n in rb.iteritems():
398 for k, n in rb.iteritems():
399 try:
399 try:
400 m = destrepo.lookup(n)
400 m = destrepo.lookup(n)
401 marks[k] = m
401 marks[k] = m
402 except error.RepoLookupError:
402 except error.RepoLookupError:
403 pass
403 pass
404 if rb:
404 if rb:
405 marks.write()
405 marks.write()
406 elif srcrepo and destpeer.capable("pushkey"):
406 elif srcrepo and destpeer.capable("pushkey"):
407 for k, n in srcrepo._bookmarks.iteritems():
407 for k, n in srcrepo._bookmarks.iteritems():
408 destpeer.pushkey('bookmarks', k, '', hex(n))
408 destpeer.pushkey('bookmarks', k, '', hex(n))
409
409
410 if destrepo:
410 if destrepo:
411 fp = destrepo.opener("hgrc", "w", text=True)
411 fp = destrepo.opener("hgrc", "w", text=True)
412 fp.write("[paths]\n")
412 fp.write("[paths]\n")
413 u = util.url(abspath)
413 u = util.url(abspath)
414 u.passwd = None
414 u.passwd = None
415 defaulturl = str(u)
415 defaulturl = str(u)
416 fp.write("default = %s\n" % defaulturl)
416 fp.write("default = %s\n" % defaulturl)
417 fp.close()
417 fp.close()
418
418
419 destrepo.ui.setconfig('paths', 'default', defaulturl)
419 destrepo.ui.setconfig('paths', 'default', defaulturl)
420
420
421 if update:
421 if update:
422 if update is not True:
422 if update is not True:
423 checkout = srcpeer.lookup(update)
423 checkout = srcpeer.lookup(update)
424 uprev = None
424 uprev = None
425 status = None
425 status = None
426 if checkout is not None:
426 if checkout is not None:
427 try:
427 try:
428 uprev = destrepo.lookup(checkout)
428 uprev = destrepo.lookup(checkout)
429 except error.RepoLookupError:
429 except error.RepoLookupError:
430 pass
430 pass
431 if uprev is None:
431 if uprev is None:
432 try:
432 try:
433 uprev = destrepo._bookmarks['@']
433 uprev = destrepo._bookmarks['@']
434 update = '@'
434 update = '@'
435 bn = destrepo[uprev].branch()
435 bn = destrepo[uprev].branch()
436 if bn == 'default':
436 if bn == 'default':
437 status = _("updating to bookmark @\n")
437 status = _("updating to bookmark @\n")
438 else:
438 else:
439 status = _("updating to bookmark @ on branch %s\n"
439 status = _("updating to bookmark @ on branch %s\n"
440 % bn)
440 % bn)
441 except KeyError:
441 except KeyError:
442 try:
442 try:
443 uprev = destrepo.branchtip('default')
443 uprev = destrepo.branchtip('default')
444 except error.RepoLookupError:
444 except error.RepoLookupError:
445 uprev = destrepo.lookup('tip')
445 uprev = destrepo.lookup('tip')
446 if not status:
446 if not status:
447 bn = destrepo[uprev].branch()
447 bn = destrepo[uprev].branch()
448 status = _("updating to branch %s\n") % bn
448 status = _("updating to branch %s\n") % bn
449 destrepo.ui.status(status)
449 destrepo.ui.status(status)
450 _update(destrepo, uprev)
450 _update(destrepo, uprev)
451 if update in destrepo._bookmarks:
451 if update in destrepo._bookmarks:
452 bookmarks.setcurrent(destrepo, update)
452 bookmarks.setcurrent(destrepo, update)
453
453
454 return srcpeer, destpeer
454 return srcpeer, destpeer
455 finally:
455 finally:
456 release(srclock, destlock)
456 release(srclock, destlock)
457 if dircleanup is not None:
457 if dircleanup is not None:
458 dircleanup.cleanup()
458 dircleanup.cleanup()
459 if srcpeer is not None:
459 if srcpeer is not None:
460 srcpeer.close()
460 srcpeer.close()
461
461
462 def _showstats(repo, stats):
462 def _showstats(repo, stats):
463 repo.ui.status(_("%d files updated, %d files merged, "
463 repo.ui.status(_("%d files updated, %d files merged, "
464 "%d files removed, %d files unresolved\n") % stats)
464 "%d files removed, %d files unresolved\n") % stats)
465
465
466 def updaterepo(repo, node, overwrite):
466 def updaterepo(repo, node, overwrite):
467 """Update the working directory to node.
467 """Update the working directory to node.
468
468
469 When overwrite is set, changes are clobbered, merged else
469 When overwrite is set, changes are clobbered, merged else
470
470
471 returns stats (see pydoc mercurial.merge.applyupdates)"""
471 returns stats (see pydoc mercurial.merge.applyupdates)"""
472 return mergemod.update(repo, node, False, overwrite, None)
472 return mergemod.update(repo, node, False, overwrite, None)
473
473
474 def update(repo, node):
474 def update(repo, node):
475 """update the working directory to node, merging linear changes"""
475 """update the working directory to node, merging linear changes"""
476 stats = updaterepo(repo, node, False)
476 stats = updaterepo(repo, node, False)
477 _showstats(repo, stats)
477 _showstats(repo, stats)
478 if stats[3]:
478 if stats[3]:
479 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
479 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
480 return stats[3] > 0
480 return stats[3] > 0
481
481
482 # naming conflict in clone()
482 # naming conflict in clone()
483 _update = update
483 _update = update
484
484
485 def clean(repo, node, show_stats=True):
485 def clean(repo, node, show_stats=True):
486 """forcibly switch the working directory to node, clobbering changes"""
486 """forcibly switch the working directory to node, clobbering changes"""
487 stats = updaterepo(repo, node, True)
487 stats = updaterepo(repo, node, True)
488 if show_stats:
488 if show_stats:
489 _showstats(repo, stats)
489 _showstats(repo, stats)
490 return stats[3] > 0
490 return stats[3] > 0
491
491
492 def merge(repo, node, force=None, remind=True):
492 def merge(repo, node, force=None, remind=True):
493 """Branch merge with node, resolving changes. Return true if any
493 """Branch merge with node, resolving changes. Return true if any
494 unresolved conflicts."""
494 unresolved conflicts."""
495 stats = mergemod.update(repo, node, True, force, False)
495 stats = mergemod.update(repo, node, True, force, False)
496 _showstats(repo, stats)
496 _showstats(repo, stats)
497 if stats[3]:
497 if stats[3]:
498 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
498 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
499 "or 'hg update -C .' to abandon\n"))
499 "or 'hg update -C .' to abandon\n"))
500 elif remind:
500 elif remind:
501 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
501 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
502 return stats[3] > 0
502 return stats[3] > 0
503
503
504 def _incoming(displaychlist, subreporecurse, ui, repo, source,
504 def _incoming(displaychlist, subreporecurse, ui, repo, source,
505 opts, buffered=False):
505 opts, buffered=False):
506 """
506 """
507 Helper for incoming / gincoming.
507 Helper for incoming / gincoming.
508 displaychlist gets called with
508 displaychlist gets called with
509 (remoterepo, incomingchangesetlist, displayer) parameters,
509 (remoterepo, incomingchangesetlist, displayer) parameters,
510 and is supposed to contain only code that can't be unified.
510 and is supposed to contain only code that can't be unified.
511 """
511 """
512 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
512 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
513 other = peer(repo, opts, source)
513 other = peer(repo, opts, source)
514 ui.status(_('comparing with %s\n') % util.hidepassword(source))
514 ui.status(_('comparing with %s\n') % util.hidepassword(source))
515 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
515 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
516
516
517 if revs:
517 if revs:
518 revs = [other.lookup(rev) for rev in revs]
518 revs = [other.lookup(rev) for rev in revs]
519 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
519 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
520 revs, opts["bundle"], opts["force"])
520 revs, opts["bundle"], opts["force"])
521 try:
521 try:
522 if not chlist:
522 if not chlist:
523 ui.status(_("no changes found\n"))
523 ui.status(_("no changes found\n"))
524 return subreporecurse()
524 return subreporecurse()
525
525
526 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
526 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
527
527
528 # XXX once graphlog extension makes it into core,
528 # XXX once graphlog extension makes it into core,
529 # should be replaced by a if graph/else
529 # should be replaced by a if graph/else
530 displaychlist(other, chlist, displayer)
530 displaychlist(other, chlist, displayer)
531
531
532 displayer.close()
532 displayer.close()
533 finally:
533 finally:
534 cleanupfn()
534 cleanupfn()
535 subreporecurse()
535 subreporecurse()
536 return 0 # exit code is zero since we found incoming changes
536 return 0 # exit code is zero since we found incoming changes
537
537
538 def incoming(ui, repo, source, opts):
538 def incoming(ui, repo, source, opts):
539 def subreporecurse():
539 def subreporecurse():
540 ret = 1
540 ret = 1
541 if opts.get('subrepos'):
541 if opts.get('subrepos'):
542 ctx = repo[None]
542 ctx = repo[None]
543 for subpath in sorted(ctx.substate):
543 for subpath in sorted(ctx.substate):
544 sub = ctx.sub(subpath)
544 sub = ctx.sub(subpath)
545 ret = min(ret, sub.incoming(ui, source, opts))
545 ret = min(ret, sub.incoming(ui, source, opts))
546 return ret
546 return ret
547
547
548 def display(other, chlist, displayer):
548 def display(other, chlist, displayer):
549 limit = cmdutil.loglimit(opts)
549 limit = cmdutil.loglimit(opts)
550 if opts.get('newest_first'):
550 if opts.get('newest_first'):
551 chlist.reverse()
551 chlist.reverse()
552 count = 0
552 count = 0
553 for n in chlist:
553 for n in chlist:
554 if limit is not None and count >= limit:
554 if limit is not None and count >= limit:
555 break
555 break
556 parents = [p for p in other.changelog.parents(n) if p != nullid]
556 parents = [p for p in other.changelog.parents(n) if p != nullid]
557 if opts.get('no_merges') and len(parents) == 2:
557 if opts.get('no_merges') and len(parents) == 2:
558 continue
558 continue
559 count += 1
559 count += 1
560 displayer.show(other[n])
560 displayer.show(other[n])
561 return _incoming(display, subreporecurse, ui, repo, source, opts)
561 return _incoming(display, subreporecurse, ui, repo, source, opts)
562
562
563 def _outgoing(ui, repo, dest, opts):
563 def _outgoing(ui, repo, dest, opts):
564 dest = ui.expandpath(dest or 'default-push', dest or 'default')
564 dest = ui.expandpath(dest or 'default-push', dest or 'default')
565 dest, branches = parseurl(dest, opts.get('branch'))
565 dest, branches = parseurl(dest, opts.get('branch'))
566 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
566 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
567 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
567 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
568 if revs:
568 if revs:
569 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
569 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
570
570
571 other = peer(repo, opts, dest)
571 other = peer(repo, opts, dest)
572 outgoing = discovery.findcommonoutgoing(repo, other, revs,
572 outgoing = discovery.findcommonoutgoing(repo, other, revs,
573 force=opts.get('force'))
573 force=opts.get('force'))
574 o = outgoing.missing
574 o = outgoing.missing
575 if not o:
575 if not o:
576 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
576 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
577 return None
577 return None
578 return o
578 return o
579
579
580 def outgoing(ui, repo, dest, opts):
580 def outgoing(ui, repo, dest, opts):
581 def recurse():
581 def recurse():
582 ret = 1
582 ret = 1
583 if opts.get('subrepos'):
583 if opts.get('subrepos'):
584 ctx = repo[None]
584 ctx = repo[None]
585 for subpath in sorted(ctx.substate):
585 for subpath in sorted(ctx.substate):
586 sub = ctx.sub(subpath)
586 sub = ctx.sub(subpath)
587 ret = min(ret, sub.outgoing(ui, dest, opts))
587 ret = min(ret, sub.outgoing(ui, dest, opts))
588 return ret
588 return ret
589
589
590 limit = cmdutil.loglimit(opts)
590 limit = cmdutil.loglimit(opts)
591 o = _outgoing(ui, repo, dest, opts)
591 o = _outgoing(ui, repo, dest, opts)
592 if o is None:
592 if o is None:
593 return recurse()
593 return recurse()
594
594
595 if opts.get('newest_first'):
595 if opts.get('newest_first'):
596 o.reverse()
596 o.reverse()
597 displayer = cmdutil.show_changeset(ui, repo, opts)
597 displayer = cmdutil.show_changeset(ui, repo, opts)
598 count = 0
598 count = 0
599 for n in o:
599 for n in o:
600 if limit is not None and count >= limit:
600 if limit is not None and count >= limit:
601 break
601 break
602 parents = [p for p in repo.changelog.parents(n) if p != nullid]
602 parents = [p for p in repo.changelog.parents(n) if p != nullid]
603 if opts.get('no_merges') and len(parents) == 2:
603 if opts.get('no_merges') and len(parents) == 2:
604 continue
604 continue
605 count += 1
605 count += 1
606 displayer.show(repo[n])
606 displayer.show(repo[n])
607 displayer.close()
607 displayer.close()
608 recurse()
608 recurse()
609 return 0 # exit code is zero since we found outgoing changes
609 return 0 # exit code is zero since we found outgoing changes
610
610
611 def revert(repo, node, choose):
611 def revert(repo, node, choose):
612 """revert changes to revision in node without updating dirstate"""
612 """revert changes to revision in node without updating dirstate"""
613 return mergemod.update(repo, node, False, True, choose)[3] > 0
613 return mergemod.update(repo, node, False, True, choose)[3] > 0
614
614
615 def verify(repo):
615 def verify(repo):
616 """verify the consistency of a repository"""
616 """verify the consistency of a repository"""
617 return verifymod.verify(repo)
617 return verifymod.verify(repo)
618
618
619 def remoteui(src, opts):
619 def remoteui(src, opts):
620 'build a remote ui from ui or repo and opts'
620 'build a remote ui from ui or repo and opts'
621 if util.safehasattr(src, 'baseui'): # looks like a repository
621 if util.safehasattr(src, 'baseui'): # looks like a repository
622 dst = src.baseui.copy() # drop repo-specific config
622 dst = src.baseui.copy() # drop repo-specific config
623 src = src.ui # copy target options from repo
623 src = src.ui # copy target options from repo
624 else: # assume it's a global ui object
624 else: # assume it's a global ui object
625 dst = src.copy() # keep all global options
625 dst = src.copy() # keep all global options
626
626
627 # copy ssh-specific options
627 # copy ssh-specific options
628 for o in 'ssh', 'remotecmd':
628 for o in 'ssh', 'remotecmd':
629 v = opts.get(o) or src.config('ui', o)
629 v = opts.get(o) or src.config('ui', o)
630 if v:
630 if v:
631 dst.setconfig("ui", o, v)
631 dst.setconfig("ui", o, v)
632
632
633 # copy bundle-specific options
633 # copy bundle-specific options
634 r = src.config('bundle', 'mainreporoot')
634 r = src.config('bundle', 'mainreporoot')
635 if r:
635 if r:
636 dst.setconfig('bundle', 'mainreporoot', r)
636 dst.setconfig('bundle', 'mainreporoot', r)
637
637
638 # copy selected local settings to the remote ui
638 # copy selected local settings to the remote ui
639 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
639 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
640 for key, val in src.configitems(sect):
640 for key, val in src.configitems(sect):
641 dst.setconfig(sect, key, val)
641 dst.setconfig(sect, key, val)
642 v = src.config('web', 'cacerts')
642 v = src.config('web', 'cacerts')
643 if v:
643 if v:
644 dst.setconfig('web', 'cacerts', util.expandpath(v))
644 dst.setconfig('web', 'cacerts', util.expandpath(v))
645
645
646 return dst
646 return dst
@@ -1,2578 +1,2578
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from node import hex, nullid, short
7 from node import hex, nullid, short
8 from i18n import _
8 from i18n import _
9 import peer, changegroup, subrepo, discovery, pushkey, obsolete, repoview
9 import peer, changegroup, subrepo, discovery, pushkey, obsolete, repoview
10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import lock, transaction, store, encoding, base85
11 import lock, transaction, store, encoding, base85
12 import scmutil, util, extensions, hook, error, revset
12 import scmutil, util, extensions, hook, error, revset
13 import match as matchmod
13 import match as matchmod
14 import merge as mergemod
14 import merge as mergemod
15 import tags as tagsmod
15 import tags as tagsmod
16 from lock import release
16 from lock import release
17 import weakref, errno, os, time, inspect
17 import weakref, errno, os, time, inspect
18 import branchmap
18 import branchmap
19 propertycache = util.propertycache
19 propertycache = util.propertycache
20 filecache = scmutil.filecache
20 filecache = scmutil.filecache
21
21
22 class repofilecache(filecache):
22 class repofilecache(filecache):
23 """All filecache usage on repo are done for logic that should be unfiltered
23 """All filecache usage on repo are done for logic that should be unfiltered
24 """
24 """
25
25
26 def __get__(self, repo, type=None):
26 def __get__(self, repo, type=None):
27 return super(repofilecache, self).__get__(repo.unfiltered(), type)
27 return super(repofilecache, self).__get__(repo.unfiltered(), type)
28 def __set__(self, repo, value):
28 def __set__(self, repo, value):
29 return super(repofilecache, self).__set__(repo.unfiltered(), value)
29 return super(repofilecache, self).__set__(repo.unfiltered(), value)
30 def __delete__(self, repo):
30 def __delete__(self, repo):
31 return super(repofilecache, self).__delete__(repo.unfiltered())
31 return super(repofilecache, self).__delete__(repo.unfiltered())
32
32
33 class storecache(repofilecache):
33 class storecache(repofilecache):
34 """filecache for files in the store"""
34 """filecache for files in the store"""
35 def join(self, obj, fname):
35 def join(self, obj, fname):
36 return obj.sjoin(fname)
36 return obj.sjoin(fname)
37
37
38 class unfilteredpropertycache(propertycache):
38 class unfilteredpropertycache(propertycache):
39 """propertycache that apply to unfiltered repo only"""
39 """propertycache that apply to unfiltered repo only"""
40
40
41 def __get__(self, repo, type=None):
41 def __get__(self, repo, type=None):
42 return super(unfilteredpropertycache, self).__get__(repo.unfiltered())
42 return super(unfilteredpropertycache, self).__get__(repo.unfiltered())
43
43
44 class filteredpropertycache(propertycache):
44 class filteredpropertycache(propertycache):
45 """propertycache that must take filtering in account"""
45 """propertycache that must take filtering in account"""
46
46
47 def cachevalue(self, obj, value):
47 def cachevalue(self, obj, value):
48 object.__setattr__(obj, self.name, value)
48 object.__setattr__(obj, self.name, value)
49
49
50
50
51 def hasunfilteredcache(repo, name):
51 def hasunfilteredcache(repo, name):
52 """check if an repo and a unfilteredproperty cached value for <name>"""
52 """check if an repo and a unfilteredproperty cached value for <name>"""
53 return name in vars(repo.unfiltered())
53 return name in vars(repo.unfiltered())
54
54
55 def unfilteredmethod(orig):
55 def unfilteredmethod(orig):
56 """decorate method that always need to be run on unfiltered version"""
56 """decorate method that always need to be run on unfiltered version"""
57 def wrapper(repo, *args, **kwargs):
57 def wrapper(repo, *args, **kwargs):
58 return orig(repo.unfiltered(), *args, **kwargs)
58 return orig(repo.unfiltered(), *args, **kwargs)
59 return wrapper
59 return wrapper
60
60
61 MODERNCAPS = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle'))
61 MODERNCAPS = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle'))
62 LEGACYCAPS = MODERNCAPS.union(set(['changegroupsubset']))
62 LEGACYCAPS = MODERNCAPS.union(set(['changegroupsubset']))
63
63
64 class localpeer(peer.peerrepository):
64 class localpeer(peer.peerrepository):
65 '''peer for a local repo; reflects only the most recent API'''
65 '''peer for a local repo; reflects only the most recent API'''
66
66
67 def __init__(self, repo, caps=MODERNCAPS):
67 def __init__(self, repo, caps=MODERNCAPS):
68 peer.peerrepository.__init__(self)
68 peer.peerrepository.__init__(self)
69 self._repo = repo.filtered('unserved')
69 self._repo = repo.filtered('served')
70 self.ui = repo.ui
70 self.ui = repo.ui
71 self._caps = repo._restrictcapabilities(caps)
71 self._caps = repo._restrictcapabilities(caps)
72 self.requirements = repo.requirements
72 self.requirements = repo.requirements
73 self.supportedformats = repo.supportedformats
73 self.supportedformats = repo.supportedformats
74
74
75 def close(self):
75 def close(self):
76 self._repo.close()
76 self._repo.close()
77
77
78 def _capabilities(self):
78 def _capabilities(self):
79 return self._caps
79 return self._caps
80
80
81 def local(self):
81 def local(self):
82 return self._repo
82 return self._repo
83
83
84 def canpush(self):
84 def canpush(self):
85 return True
85 return True
86
86
87 def url(self):
87 def url(self):
88 return self._repo.url()
88 return self._repo.url()
89
89
90 def lookup(self, key):
90 def lookup(self, key):
91 return self._repo.lookup(key)
91 return self._repo.lookup(key)
92
92
93 def branchmap(self):
93 def branchmap(self):
94 return self._repo.branchmap()
94 return self._repo.branchmap()
95
95
96 def heads(self):
96 def heads(self):
97 return self._repo.heads()
97 return self._repo.heads()
98
98
99 def known(self, nodes):
99 def known(self, nodes):
100 return self._repo.known(nodes)
100 return self._repo.known(nodes)
101
101
102 def getbundle(self, source, heads=None, common=None):
102 def getbundle(self, source, heads=None, common=None):
103 return self._repo.getbundle(source, heads=heads, common=common)
103 return self._repo.getbundle(source, heads=heads, common=common)
104
104
105 # TODO We might want to move the next two calls into legacypeer and add
105 # TODO We might want to move the next two calls into legacypeer and add
106 # unbundle instead.
106 # unbundle instead.
107
107
108 def lock(self):
108 def lock(self):
109 return self._repo.lock()
109 return self._repo.lock()
110
110
111 def addchangegroup(self, cg, source, url):
111 def addchangegroup(self, cg, source, url):
112 return self._repo.addchangegroup(cg, source, url)
112 return self._repo.addchangegroup(cg, source, url)
113
113
114 def pushkey(self, namespace, key, old, new):
114 def pushkey(self, namespace, key, old, new):
115 return self._repo.pushkey(namespace, key, old, new)
115 return self._repo.pushkey(namespace, key, old, new)
116
116
117 def listkeys(self, namespace):
117 def listkeys(self, namespace):
118 return self._repo.listkeys(namespace)
118 return self._repo.listkeys(namespace)
119
119
120 def debugwireargs(self, one, two, three=None, four=None, five=None):
120 def debugwireargs(self, one, two, three=None, four=None, five=None):
121 '''used to test argument passing over the wire'''
121 '''used to test argument passing over the wire'''
122 return "%s %s %s %s %s" % (one, two, three, four, five)
122 return "%s %s %s %s %s" % (one, two, three, four, five)
123
123
124 class locallegacypeer(localpeer):
124 class locallegacypeer(localpeer):
125 '''peer extension which implements legacy methods too; used for tests with
125 '''peer extension which implements legacy methods too; used for tests with
126 restricted capabilities'''
126 restricted capabilities'''
127
127
128 def __init__(self, repo):
128 def __init__(self, repo):
129 localpeer.__init__(self, repo, caps=LEGACYCAPS)
129 localpeer.__init__(self, repo, caps=LEGACYCAPS)
130
130
131 def branches(self, nodes):
131 def branches(self, nodes):
132 return self._repo.branches(nodes)
132 return self._repo.branches(nodes)
133
133
134 def between(self, pairs):
134 def between(self, pairs):
135 return self._repo.between(pairs)
135 return self._repo.between(pairs)
136
136
137 def changegroup(self, basenodes, source):
137 def changegroup(self, basenodes, source):
138 return self._repo.changegroup(basenodes, source)
138 return self._repo.changegroup(basenodes, source)
139
139
140 def changegroupsubset(self, bases, heads, source):
140 def changegroupsubset(self, bases, heads, source):
141 return self._repo.changegroupsubset(bases, heads, source)
141 return self._repo.changegroupsubset(bases, heads, source)
142
142
143 class localrepository(object):
143 class localrepository(object):
144
144
145 supportedformats = set(('revlogv1', 'generaldelta'))
145 supportedformats = set(('revlogv1', 'generaldelta'))
146 supported = supportedformats | set(('store', 'fncache', 'shared',
146 supported = supportedformats | set(('store', 'fncache', 'shared',
147 'dotencode'))
147 'dotencode'))
148 openerreqs = set(('revlogv1', 'generaldelta'))
148 openerreqs = set(('revlogv1', 'generaldelta'))
149 requirements = ['revlogv1']
149 requirements = ['revlogv1']
150 filtername = None
150 filtername = None
151
151
152 def _baserequirements(self, create):
152 def _baserequirements(self, create):
153 return self.requirements[:]
153 return self.requirements[:]
154
154
155 def __init__(self, baseui, path=None, create=False):
155 def __init__(self, baseui, path=None, create=False):
156 self.wvfs = scmutil.vfs(path, expand=True)
156 self.wvfs = scmutil.vfs(path, expand=True)
157 self.wopener = self.wvfs
157 self.wopener = self.wvfs
158 self.root = self.wvfs.base
158 self.root = self.wvfs.base
159 self.path = self.wvfs.join(".hg")
159 self.path = self.wvfs.join(".hg")
160 self.origroot = path
160 self.origroot = path
161 self.auditor = scmutil.pathauditor(self.root, self._checknested)
161 self.auditor = scmutil.pathauditor(self.root, self._checknested)
162 self.vfs = scmutil.vfs(self.path)
162 self.vfs = scmutil.vfs(self.path)
163 self.opener = self.vfs
163 self.opener = self.vfs
164 self.baseui = baseui
164 self.baseui = baseui
165 self.ui = baseui.copy()
165 self.ui = baseui.copy()
166 # A list of callback to shape the phase if no data were found.
166 # A list of callback to shape the phase if no data were found.
167 # Callback are in the form: func(repo, roots) --> processed root.
167 # Callback are in the form: func(repo, roots) --> processed root.
168 # This list it to be filled by extension during repo setup
168 # This list it to be filled by extension during repo setup
169 self._phasedefaults = []
169 self._phasedefaults = []
170 try:
170 try:
171 self.ui.readconfig(self.join("hgrc"), self.root)
171 self.ui.readconfig(self.join("hgrc"), self.root)
172 extensions.loadall(self.ui)
172 extensions.loadall(self.ui)
173 except IOError:
173 except IOError:
174 pass
174 pass
175
175
176 if not self.vfs.isdir():
176 if not self.vfs.isdir():
177 if create:
177 if create:
178 if not self.wvfs.exists():
178 if not self.wvfs.exists():
179 self.wvfs.makedirs()
179 self.wvfs.makedirs()
180 self.vfs.makedir(notindexed=True)
180 self.vfs.makedir(notindexed=True)
181 requirements = self._baserequirements(create)
181 requirements = self._baserequirements(create)
182 if self.ui.configbool('format', 'usestore', True):
182 if self.ui.configbool('format', 'usestore', True):
183 self.vfs.mkdir("store")
183 self.vfs.mkdir("store")
184 requirements.append("store")
184 requirements.append("store")
185 if self.ui.configbool('format', 'usefncache', True):
185 if self.ui.configbool('format', 'usefncache', True):
186 requirements.append("fncache")
186 requirements.append("fncache")
187 if self.ui.configbool('format', 'dotencode', True):
187 if self.ui.configbool('format', 'dotencode', True):
188 requirements.append('dotencode')
188 requirements.append('dotencode')
189 # create an invalid changelog
189 # create an invalid changelog
190 self.vfs.append(
190 self.vfs.append(
191 "00changelog.i",
191 "00changelog.i",
192 '\0\0\0\2' # represents revlogv2
192 '\0\0\0\2' # represents revlogv2
193 ' dummy changelog to prevent using the old repo layout'
193 ' dummy changelog to prevent using the old repo layout'
194 )
194 )
195 if self.ui.configbool('format', 'generaldelta', False):
195 if self.ui.configbool('format', 'generaldelta', False):
196 requirements.append("generaldelta")
196 requirements.append("generaldelta")
197 requirements = set(requirements)
197 requirements = set(requirements)
198 else:
198 else:
199 raise error.RepoError(_("repository %s not found") % path)
199 raise error.RepoError(_("repository %s not found") % path)
200 elif create:
200 elif create:
201 raise error.RepoError(_("repository %s already exists") % path)
201 raise error.RepoError(_("repository %s already exists") % path)
202 else:
202 else:
203 try:
203 try:
204 requirements = scmutil.readrequires(self.vfs, self.supported)
204 requirements = scmutil.readrequires(self.vfs, self.supported)
205 except IOError, inst:
205 except IOError, inst:
206 if inst.errno != errno.ENOENT:
206 if inst.errno != errno.ENOENT:
207 raise
207 raise
208 requirements = set()
208 requirements = set()
209
209
210 self.sharedpath = self.path
210 self.sharedpath = self.path
211 try:
211 try:
212 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
212 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
213 if not os.path.exists(s):
213 if not os.path.exists(s):
214 raise error.RepoError(
214 raise error.RepoError(
215 _('.hg/sharedpath points to nonexistent directory %s') % s)
215 _('.hg/sharedpath points to nonexistent directory %s') % s)
216 self.sharedpath = s
216 self.sharedpath = s
217 except IOError, inst:
217 except IOError, inst:
218 if inst.errno != errno.ENOENT:
218 if inst.errno != errno.ENOENT:
219 raise
219 raise
220
220
221 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
221 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
222 self.spath = self.store.path
222 self.spath = self.store.path
223 self.svfs = self.store.vfs
223 self.svfs = self.store.vfs
224 self.sopener = self.svfs
224 self.sopener = self.svfs
225 self.sjoin = self.store.join
225 self.sjoin = self.store.join
226 self.vfs.createmode = self.store.createmode
226 self.vfs.createmode = self.store.createmode
227 self._applyrequirements(requirements)
227 self._applyrequirements(requirements)
228 if create:
228 if create:
229 self._writerequirements()
229 self._writerequirements()
230
230
231
231
232 self._branchcaches = {}
232 self._branchcaches = {}
233 self.filterpats = {}
233 self.filterpats = {}
234 self._datafilters = {}
234 self._datafilters = {}
235 self._transref = self._lockref = self._wlockref = None
235 self._transref = self._lockref = self._wlockref = None
236
236
237 # A cache for various files under .hg/ that tracks file changes,
237 # A cache for various files under .hg/ that tracks file changes,
238 # (used by the filecache decorator)
238 # (used by the filecache decorator)
239 #
239 #
240 # Maps a property name to its util.filecacheentry
240 # Maps a property name to its util.filecacheentry
241 self._filecache = {}
241 self._filecache = {}
242
242
243 # hold sets of revision to be filtered
243 # hold sets of revision to be filtered
244 # should be cleared when something might have changed the filter value:
244 # should be cleared when something might have changed the filter value:
245 # - new changesets,
245 # - new changesets,
246 # - phase change,
246 # - phase change,
247 # - new obsolescence marker,
247 # - new obsolescence marker,
248 # - working directory parent change,
248 # - working directory parent change,
249 # - bookmark changes
249 # - bookmark changes
250 self.filteredrevcache = {}
250 self.filteredrevcache = {}
251
251
252 def close(self):
252 def close(self):
253 pass
253 pass
254
254
255 def _restrictcapabilities(self, caps):
255 def _restrictcapabilities(self, caps):
256 return caps
256 return caps
257
257
258 def _applyrequirements(self, requirements):
258 def _applyrequirements(self, requirements):
259 self.requirements = requirements
259 self.requirements = requirements
260 self.sopener.options = dict((r, 1) for r in requirements
260 self.sopener.options = dict((r, 1) for r in requirements
261 if r in self.openerreqs)
261 if r in self.openerreqs)
262
262
263 def _writerequirements(self):
263 def _writerequirements(self):
264 reqfile = self.opener("requires", "w")
264 reqfile = self.opener("requires", "w")
265 for r in sorted(self.requirements):
265 for r in sorted(self.requirements):
266 reqfile.write("%s\n" % r)
266 reqfile.write("%s\n" % r)
267 reqfile.close()
267 reqfile.close()
268
268
269 def _checknested(self, path):
269 def _checknested(self, path):
270 """Determine if path is a legal nested repository."""
270 """Determine if path is a legal nested repository."""
271 if not path.startswith(self.root):
271 if not path.startswith(self.root):
272 return False
272 return False
273 subpath = path[len(self.root) + 1:]
273 subpath = path[len(self.root) + 1:]
274 normsubpath = util.pconvert(subpath)
274 normsubpath = util.pconvert(subpath)
275
275
276 # XXX: Checking against the current working copy is wrong in
276 # XXX: Checking against the current working copy is wrong in
277 # the sense that it can reject things like
277 # the sense that it can reject things like
278 #
278 #
279 # $ hg cat -r 10 sub/x.txt
279 # $ hg cat -r 10 sub/x.txt
280 #
280 #
281 # if sub/ is no longer a subrepository in the working copy
281 # if sub/ is no longer a subrepository in the working copy
282 # parent revision.
282 # parent revision.
283 #
283 #
284 # However, it can of course also allow things that would have
284 # However, it can of course also allow things that would have
285 # been rejected before, such as the above cat command if sub/
285 # been rejected before, such as the above cat command if sub/
286 # is a subrepository now, but was a normal directory before.
286 # is a subrepository now, but was a normal directory before.
287 # The old path auditor would have rejected by mistake since it
287 # The old path auditor would have rejected by mistake since it
288 # panics when it sees sub/.hg/.
288 # panics when it sees sub/.hg/.
289 #
289 #
290 # All in all, checking against the working copy seems sensible
290 # All in all, checking against the working copy seems sensible
291 # since we want to prevent access to nested repositories on
291 # since we want to prevent access to nested repositories on
292 # the filesystem *now*.
292 # the filesystem *now*.
293 ctx = self[None]
293 ctx = self[None]
294 parts = util.splitpath(subpath)
294 parts = util.splitpath(subpath)
295 while parts:
295 while parts:
296 prefix = '/'.join(parts)
296 prefix = '/'.join(parts)
297 if prefix in ctx.substate:
297 if prefix in ctx.substate:
298 if prefix == normsubpath:
298 if prefix == normsubpath:
299 return True
299 return True
300 else:
300 else:
301 sub = ctx.sub(prefix)
301 sub = ctx.sub(prefix)
302 return sub.checknested(subpath[len(prefix) + 1:])
302 return sub.checknested(subpath[len(prefix) + 1:])
303 else:
303 else:
304 parts.pop()
304 parts.pop()
305 return False
305 return False
306
306
307 def peer(self):
307 def peer(self):
308 return localpeer(self) # not cached to avoid reference cycle
308 return localpeer(self) # not cached to avoid reference cycle
309
309
310 def unfiltered(self):
310 def unfiltered(self):
311 """Return unfiltered version of the repository
311 """Return unfiltered version of the repository
312
312
313 Intended to be ovewritten by filtered repo."""
313 Intended to be ovewritten by filtered repo."""
314 return self
314 return self
315
315
316 def filtered(self, name):
316 def filtered(self, name):
317 """Return a filtered version of a repository"""
317 """Return a filtered version of a repository"""
318 # build a new class with the mixin and the current class
318 # build a new class with the mixin and the current class
319 # (possibily subclass of the repo)
319 # (possibily subclass of the repo)
320 class proxycls(repoview.repoview, self.unfiltered().__class__):
320 class proxycls(repoview.repoview, self.unfiltered().__class__):
321 pass
321 pass
322 return proxycls(self, name)
322 return proxycls(self, name)
323
323
324 @repofilecache('bookmarks')
324 @repofilecache('bookmarks')
325 def _bookmarks(self):
325 def _bookmarks(self):
326 return bookmarks.bmstore(self)
326 return bookmarks.bmstore(self)
327
327
328 @repofilecache('bookmarks.current')
328 @repofilecache('bookmarks.current')
329 def _bookmarkcurrent(self):
329 def _bookmarkcurrent(self):
330 return bookmarks.readcurrent(self)
330 return bookmarks.readcurrent(self)
331
331
332 def bookmarkheads(self, bookmark):
332 def bookmarkheads(self, bookmark):
333 name = bookmark.split('@', 1)[0]
333 name = bookmark.split('@', 1)[0]
334 heads = []
334 heads = []
335 for mark, n in self._bookmarks.iteritems():
335 for mark, n in self._bookmarks.iteritems():
336 if mark.split('@', 1)[0] == name:
336 if mark.split('@', 1)[0] == name:
337 heads.append(n)
337 heads.append(n)
338 return heads
338 return heads
339
339
340 @storecache('phaseroots')
340 @storecache('phaseroots')
341 def _phasecache(self):
341 def _phasecache(self):
342 return phases.phasecache(self, self._phasedefaults)
342 return phases.phasecache(self, self._phasedefaults)
343
343
344 @storecache('obsstore')
344 @storecache('obsstore')
345 def obsstore(self):
345 def obsstore(self):
346 store = obsolete.obsstore(self.sopener)
346 store = obsolete.obsstore(self.sopener)
347 if store and not obsolete._enabled:
347 if store and not obsolete._enabled:
348 # message is rare enough to not be translated
348 # message is rare enough to not be translated
349 msg = 'obsolete feature not enabled but %i markers found!\n'
349 msg = 'obsolete feature not enabled but %i markers found!\n'
350 self.ui.warn(msg % len(list(store)))
350 self.ui.warn(msg % len(list(store)))
351 return store
351 return store
352
352
353 @storecache('00changelog.i')
353 @storecache('00changelog.i')
354 def changelog(self):
354 def changelog(self):
355 c = changelog.changelog(self.sopener)
355 c = changelog.changelog(self.sopener)
356 if 'HG_PENDING' in os.environ:
356 if 'HG_PENDING' in os.environ:
357 p = os.environ['HG_PENDING']
357 p = os.environ['HG_PENDING']
358 if p.startswith(self.root):
358 if p.startswith(self.root):
359 c.readpending('00changelog.i.a')
359 c.readpending('00changelog.i.a')
360 return c
360 return c
361
361
362 @storecache('00manifest.i')
362 @storecache('00manifest.i')
363 def manifest(self):
363 def manifest(self):
364 return manifest.manifest(self.sopener)
364 return manifest.manifest(self.sopener)
365
365
366 @repofilecache('dirstate')
366 @repofilecache('dirstate')
367 def dirstate(self):
367 def dirstate(self):
368 warned = [0]
368 warned = [0]
369 def validate(node):
369 def validate(node):
370 try:
370 try:
371 self.changelog.rev(node)
371 self.changelog.rev(node)
372 return node
372 return node
373 except error.LookupError:
373 except error.LookupError:
374 if not warned[0]:
374 if not warned[0]:
375 warned[0] = True
375 warned[0] = True
376 self.ui.warn(_("warning: ignoring unknown"
376 self.ui.warn(_("warning: ignoring unknown"
377 " working parent %s!\n") % short(node))
377 " working parent %s!\n") % short(node))
378 return nullid
378 return nullid
379
379
380 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
380 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
381
381
382 def __getitem__(self, changeid):
382 def __getitem__(self, changeid):
383 if changeid is None:
383 if changeid is None:
384 return context.workingctx(self)
384 return context.workingctx(self)
385 return context.changectx(self, changeid)
385 return context.changectx(self, changeid)
386
386
387 def __contains__(self, changeid):
387 def __contains__(self, changeid):
388 try:
388 try:
389 return bool(self.lookup(changeid))
389 return bool(self.lookup(changeid))
390 except error.RepoLookupError:
390 except error.RepoLookupError:
391 return False
391 return False
392
392
393 def __nonzero__(self):
393 def __nonzero__(self):
394 return True
394 return True
395
395
396 def __len__(self):
396 def __len__(self):
397 return len(self.changelog)
397 return len(self.changelog)
398
398
399 def __iter__(self):
399 def __iter__(self):
400 return iter(self.changelog)
400 return iter(self.changelog)
401
401
402 def revs(self, expr, *args):
402 def revs(self, expr, *args):
403 '''Return a list of revisions matching the given revset'''
403 '''Return a list of revisions matching the given revset'''
404 expr = revset.formatspec(expr, *args)
404 expr = revset.formatspec(expr, *args)
405 m = revset.match(None, expr)
405 m = revset.match(None, expr)
406 return [r for r in m(self, list(self))]
406 return [r for r in m(self, list(self))]
407
407
408 def set(self, expr, *args):
408 def set(self, expr, *args):
409 '''
409 '''
410 Yield a context for each matching revision, after doing arg
410 Yield a context for each matching revision, after doing arg
411 replacement via revset.formatspec
411 replacement via revset.formatspec
412 '''
412 '''
413 for r in self.revs(expr, *args):
413 for r in self.revs(expr, *args):
414 yield self[r]
414 yield self[r]
415
415
416 def url(self):
416 def url(self):
417 return 'file:' + self.root
417 return 'file:' + self.root
418
418
419 def hook(self, name, throw=False, **args):
419 def hook(self, name, throw=False, **args):
420 return hook.hook(self.ui, self, name, throw, **args)
420 return hook.hook(self.ui, self, name, throw, **args)
421
421
422 @unfilteredmethod
422 @unfilteredmethod
423 def _tag(self, names, node, message, local, user, date, extra={}):
423 def _tag(self, names, node, message, local, user, date, extra={}):
424 if isinstance(names, str):
424 if isinstance(names, str):
425 names = (names,)
425 names = (names,)
426
426
427 branches = self.branchmap()
427 branches = self.branchmap()
428 for name in names:
428 for name in names:
429 self.hook('pretag', throw=True, node=hex(node), tag=name,
429 self.hook('pretag', throw=True, node=hex(node), tag=name,
430 local=local)
430 local=local)
431 if name in branches:
431 if name in branches:
432 self.ui.warn(_("warning: tag %s conflicts with existing"
432 self.ui.warn(_("warning: tag %s conflicts with existing"
433 " branch name\n") % name)
433 " branch name\n") % name)
434
434
435 def writetags(fp, names, munge, prevtags):
435 def writetags(fp, names, munge, prevtags):
436 fp.seek(0, 2)
436 fp.seek(0, 2)
437 if prevtags and prevtags[-1] != '\n':
437 if prevtags and prevtags[-1] != '\n':
438 fp.write('\n')
438 fp.write('\n')
439 for name in names:
439 for name in names:
440 m = munge and munge(name) or name
440 m = munge and munge(name) or name
441 if (self._tagscache.tagtypes and
441 if (self._tagscache.tagtypes and
442 name in self._tagscache.tagtypes):
442 name in self._tagscache.tagtypes):
443 old = self.tags().get(name, nullid)
443 old = self.tags().get(name, nullid)
444 fp.write('%s %s\n' % (hex(old), m))
444 fp.write('%s %s\n' % (hex(old), m))
445 fp.write('%s %s\n' % (hex(node), m))
445 fp.write('%s %s\n' % (hex(node), m))
446 fp.close()
446 fp.close()
447
447
448 prevtags = ''
448 prevtags = ''
449 if local:
449 if local:
450 try:
450 try:
451 fp = self.opener('localtags', 'r+')
451 fp = self.opener('localtags', 'r+')
452 except IOError:
452 except IOError:
453 fp = self.opener('localtags', 'a')
453 fp = self.opener('localtags', 'a')
454 else:
454 else:
455 prevtags = fp.read()
455 prevtags = fp.read()
456
456
457 # local tags are stored in the current charset
457 # local tags are stored in the current charset
458 writetags(fp, names, None, prevtags)
458 writetags(fp, names, None, prevtags)
459 for name in names:
459 for name in names:
460 self.hook('tag', node=hex(node), tag=name, local=local)
460 self.hook('tag', node=hex(node), tag=name, local=local)
461 return
461 return
462
462
463 try:
463 try:
464 fp = self.wfile('.hgtags', 'rb+')
464 fp = self.wfile('.hgtags', 'rb+')
465 except IOError, e:
465 except IOError, e:
466 if e.errno != errno.ENOENT:
466 if e.errno != errno.ENOENT:
467 raise
467 raise
468 fp = self.wfile('.hgtags', 'ab')
468 fp = self.wfile('.hgtags', 'ab')
469 else:
469 else:
470 prevtags = fp.read()
470 prevtags = fp.read()
471
471
472 # committed tags are stored in UTF-8
472 # committed tags are stored in UTF-8
473 writetags(fp, names, encoding.fromlocal, prevtags)
473 writetags(fp, names, encoding.fromlocal, prevtags)
474
474
475 fp.close()
475 fp.close()
476
476
477 self.invalidatecaches()
477 self.invalidatecaches()
478
478
479 if '.hgtags' not in self.dirstate:
479 if '.hgtags' not in self.dirstate:
480 self[None].add(['.hgtags'])
480 self[None].add(['.hgtags'])
481
481
482 m = matchmod.exact(self.root, '', ['.hgtags'])
482 m = matchmod.exact(self.root, '', ['.hgtags'])
483 tagnode = self.commit(message, user, date, extra=extra, match=m)
483 tagnode = self.commit(message, user, date, extra=extra, match=m)
484
484
485 for name in names:
485 for name in names:
486 self.hook('tag', node=hex(node), tag=name, local=local)
486 self.hook('tag', node=hex(node), tag=name, local=local)
487
487
488 return tagnode
488 return tagnode
489
489
490 def tag(self, names, node, message, local, user, date):
490 def tag(self, names, node, message, local, user, date):
491 '''tag a revision with one or more symbolic names.
491 '''tag a revision with one or more symbolic names.
492
492
493 names is a list of strings or, when adding a single tag, names may be a
493 names is a list of strings or, when adding a single tag, names may be a
494 string.
494 string.
495
495
496 if local is True, the tags are stored in a per-repository file.
496 if local is True, the tags are stored in a per-repository file.
497 otherwise, they are stored in the .hgtags file, and a new
497 otherwise, they are stored in the .hgtags file, and a new
498 changeset is committed with the change.
498 changeset is committed with the change.
499
499
500 keyword arguments:
500 keyword arguments:
501
501
502 local: whether to store tags in non-version-controlled file
502 local: whether to store tags in non-version-controlled file
503 (default False)
503 (default False)
504
504
505 message: commit message to use if committing
505 message: commit message to use if committing
506
506
507 user: name of user to use if committing
507 user: name of user to use if committing
508
508
509 date: date tuple to use if committing'''
509 date: date tuple to use if committing'''
510
510
511 if not local:
511 if not local:
512 for x in self.status()[:5]:
512 for x in self.status()[:5]:
513 if '.hgtags' in x:
513 if '.hgtags' in x:
514 raise util.Abort(_('working copy of .hgtags is changed '
514 raise util.Abort(_('working copy of .hgtags is changed '
515 '(please commit .hgtags manually)'))
515 '(please commit .hgtags manually)'))
516
516
517 self.tags() # instantiate the cache
517 self.tags() # instantiate the cache
518 self._tag(names, node, message, local, user, date)
518 self._tag(names, node, message, local, user, date)
519
519
520 @filteredpropertycache
520 @filteredpropertycache
521 def _tagscache(self):
521 def _tagscache(self):
522 '''Returns a tagscache object that contains various tags related
522 '''Returns a tagscache object that contains various tags related
523 caches.'''
523 caches.'''
524
524
525 # This simplifies its cache management by having one decorated
525 # This simplifies its cache management by having one decorated
526 # function (this one) and the rest simply fetch things from it.
526 # function (this one) and the rest simply fetch things from it.
527 class tagscache(object):
527 class tagscache(object):
528 def __init__(self):
528 def __init__(self):
529 # These two define the set of tags for this repository. tags
529 # These two define the set of tags for this repository. tags
530 # maps tag name to node; tagtypes maps tag name to 'global' or
530 # maps tag name to node; tagtypes maps tag name to 'global' or
531 # 'local'. (Global tags are defined by .hgtags across all
531 # 'local'. (Global tags are defined by .hgtags across all
532 # heads, and local tags are defined in .hg/localtags.)
532 # heads, and local tags are defined in .hg/localtags.)
533 # They constitute the in-memory cache of tags.
533 # They constitute the in-memory cache of tags.
534 self.tags = self.tagtypes = None
534 self.tags = self.tagtypes = None
535
535
536 self.nodetagscache = self.tagslist = None
536 self.nodetagscache = self.tagslist = None
537
537
538 cache = tagscache()
538 cache = tagscache()
539 cache.tags, cache.tagtypes = self._findtags()
539 cache.tags, cache.tagtypes = self._findtags()
540
540
541 return cache
541 return cache
542
542
543 def tags(self):
543 def tags(self):
544 '''return a mapping of tag to node'''
544 '''return a mapping of tag to node'''
545 t = {}
545 t = {}
546 if self.changelog.filteredrevs:
546 if self.changelog.filteredrevs:
547 tags, tt = self._findtags()
547 tags, tt = self._findtags()
548 else:
548 else:
549 tags = self._tagscache.tags
549 tags = self._tagscache.tags
550 for k, v in tags.iteritems():
550 for k, v in tags.iteritems():
551 try:
551 try:
552 # ignore tags to unknown nodes
552 # ignore tags to unknown nodes
553 self.changelog.rev(v)
553 self.changelog.rev(v)
554 t[k] = v
554 t[k] = v
555 except (error.LookupError, ValueError):
555 except (error.LookupError, ValueError):
556 pass
556 pass
557 return t
557 return t
558
558
559 def _findtags(self):
559 def _findtags(self):
560 '''Do the hard work of finding tags. Return a pair of dicts
560 '''Do the hard work of finding tags. Return a pair of dicts
561 (tags, tagtypes) where tags maps tag name to node, and tagtypes
561 (tags, tagtypes) where tags maps tag name to node, and tagtypes
562 maps tag name to a string like \'global\' or \'local\'.
562 maps tag name to a string like \'global\' or \'local\'.
563 Subclasses or extensions are free to add their own tags, but
563 Subclasses or extensions are free to add their own tags, but
564 should be aware that the returned dicts will be retained for the
564 should be aware that the returned dicts will be retained for the
565 duration of the localrepo object.'''
565 duration of the localrepo object.'''
566
566
567 # XXX what tagtype should subclasses/extensions use? Currently
567 # XXX what tagtype should subclasses/extensions use? Currently
568 # mq and bookmarks add tags, but do not set the tagtype at all.
568 # mq and bookmarks add tags, but do not set the tagtype at all.
569 # Should each extension invent its own tag type? Should there
569 # Should each extension invent its own tag type? Should there
570 # be one tagtype for all such "virtual" tags? Or is the status
570 # be one tagtype for all such "virtual" tags? Or is the status
571 # quo fine?
571 # quo fine?
572
572
573 alltags = {} # map tag name to (node, hist)
573 alltags = {} # map tag name to (node, hist)
574 tagtypes = {}
574 tagtypes = {}
575
575
576 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
576 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
577 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
577 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
578
578
579 # Build the return dicts. Have to re-encode tag names because
579 # Build the return dicts. Have to re-encode tag names because
580 # the tags module always uses UTF-8 (in order not to lose info
580 # the tags module always uses UTF-8 (in order not to lose info
581 # writing to the cache), but the rest of Mercurial wants them in
581 # writing to the cache), but the rest of Mercurial wants them in
582 # local encoding.
582 # local encoding.
583 tags = {}
583 tags = {}
584 for (name, (node, hist)) in alltags.iteritems():
584 for (name, (node, hist)) in alltags.iteritems():
585 if node != nullid:
585 if node != nullid:
586 tags[encoding.tolocal(name)] = node
586 tags[encoding.tolocal(name)] = node
587 tags['tip'] = self.changelog.tip()
587 tags['tip'] = self.changelog.tip()
588 tagtypes = dict([(encoding.tolocal(name), value)
588 tagtypes = dict([(encoding.tolocal(name), value)
589 for (name, value) in tagtypes.iteritems()])
589 for (name, value) in tagtypes.iteritems()])
590 return (tags, tagtypes)
590 return (tags, tagtypes)
591
591
592 def tagtype(self, tagname):
592 def tagtype(self, tagname):
593 '''
593 '''
594 return the type of the given tag. result can be:
594 return the type of the given tag. result can be:
595
595
596 'local' : a local tag
596 'local' : a local tag
597 'global' : a global tag
597 'global' : a global tag
598 None : tag does not exist
598 None : tag does not exist
599 '''
599 '''
600
600
601 return self._tagscache.tagtypes.get(tagname)
601 return self._tagscache.tagtypes.get(tagname)
602
602
603 def tagslist(self):
603 def tagslist(self):
604 '''return a list of tags ordered by revision'''
604 '''return a list of tags ordered by revision'''
605 if not self._tagscache.tagslist:
605 if not self._tagscache.tagslist:
606 l = []
606 l = []
607 for t, n in self.tags().iteritems():
607 for t, n in self.tags().iteritems():
608 r = self.changelog.rev(n)
608 r = self.changelog.rev(n)
609 l.append((r, t, n))
609 l.append((r, t, n))
610 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
610 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
611
611
612 return self._tagscache.tagslist
612 return self._tagscache.tagslist
613
613
614 def nodetags(self, node):
614 def nodetags(self, node):
615 '''return the tags associated with a node'''
615 '''return the tags associated with a node'''
616 if not self._tagscache.nodetagscache:
616 if not self._tagscache.nodetagscache:
617 nodetagscache = {}
617 nodetagscache = {}
618 for t, n in self._tagscache.tags.iteritems():
618 for t, n in self._tagscache.tags.iteritems():
619 nodetagscache.setdefault(n, []).append(t)
619 nodetagscache.setdefault(n, []).append(t)
620 for tags in nodetagscache.itervalues():
620 for tags in nodetagscache.itervalues():
621 tags.sort()
621 tags.sort()
622 self._tagscache.nodetagscache = nodetagscache
622 self._tagscache.nodetagscache = nodetagscache
623 return self._tagscache.nodetagscache.get(node, [])
623 return self._tagscache.nodetagscache.get(node, [])
624
624
625 def nodebookmarks(self, node):
625 def nodebookmarks(self, node):
626 marks = []
626 marks = []
627 for bookmark, n in self._bookmarks.iteritems():
627 for bookmark, n in self._bookmarks.iteritems():
628 if n == node:
628 if n == node:
629 marks.append(bookmark)
629 marks.append(bookmark)
630 return sorted(marks)
630 return sorted(marks)
631
631
632 def branchmap(self):
632 def branchmap(self):
633 '''returns a dictionary {branch: [branchheads]}'''
633 '''returns a dictionary {branch: [branchheads]}'''
634 branchmap.updatecache(self)
634 branchmap.updatecache(self)
635 return self._branchcaches[self.filtername]
635 return self._branchcaches[self.filtername]
636
636
637
637
638 def _branchtip(self, heads):
638 def _branchtip(self, heads):
639 '''return the tipmost branch head in heads'''
639 '''return the tipmost branch head in heads'''
640 tip = heads[-1]
640 tip = heads[-1]
641 for h in reversed(heads):
641 for h in reversed(heads):
642 if not self[h].closesbranch():
642 if not self[h].closesbranch():
643 tip = h
643 tip = h
644 break
644 break
645 return tip
645 return tip
646
646
647 def branchtip(self, branch):
647 def branchtip(self, branch):
648 '''return the tip node for a given branch'''
648 '''return the tip node for a given branch'''
649 if branch not in self.branchmap():
649 if branch not in self.branchmap():
650 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
650 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
651 return self._branchtip(self.branchmap()[branch])
651 return self._branchtip(self.branchmap()[branch])
652
652
653 def branchtags(self):
653 def branchtags(self):
654 '''return a dict where branch names map to the tipmost head of
654 '''return a dict where branch names map to the tipmost head of
655 the branch, open heads come before closed'''
655 the branch, open heads come before closed'''
656 bt = {}
656 bt = {}
657 for bn, heads in self.branchmap().iteritems():
657 for bn, heads in self.branchmap().iteritems():
658 bt[bn] = self._branchtip(heads)
658 bt[bn] = self._branchtip(heads)
659 return bt
659 return bt
660
660
661 def lookup(self, key):
661 def lookup(self, key):
662 return self[key].node()
662 return self[key].node()
663
663
664 def lookupbranch(self, key, remote=None):
664 def lookupbranch(self, key, remote=None):
665 repo = remote or self
665 repo = remote or self
666 if key in repo.branchmap():
666 if key in repo.branchmap():
667 return key
667 return key
668
668
669 repo = (remote and remote.local()) and remote or self
669 repo = (remote and remote.local()) and remote or self
670 return repo[key].branch()
670 return repo[key].branch()
671
671
672 def known(self, nodes):
672 def known(self, nodes):
673 nm = self.changelog.nodemap
673 nm = self.changelog.nodemap
674 pc = self._phasecache
674 pc = self._phasecache
675 result = []
675 result = []
676 for n in nodes:
676 for n in nodes:
677 r = nm.get(n)
677 r = nm.get(n)
678 resp = not (r is None or pc.phase(self, r) >= phases.secret)
678 resp = not (r is None or pc.phase(self, r) >= phases.secret)
679 result.append(resp)
679 result.append(resp)
680 return result
680 return result
681
681
682 def local(self):
682 def local(self):
683 return self
683 return self
684
684
685 def cancopy(self):
685 def cancopy(self):
686 return self.local() # so statichttprepo's override of local() works
686 return self.local() # so statichttprepo's override of local() works
687
687
688 def join(self, f):
688 def join(self, f):
689 return os.path.join(self.path, f)
689 return os.path.join(self.path, f)
690
690
691 def wjoin(self, f):
691 def wjoin(self, f):
692 return os.path.join(self.root, f)
692 return os.path.join(self.root, f)
693
693
694 def file(self, f):
694 def file(self, f):
695 if f[0] == '/':
695 if f[0] == '/':
696 f = f[1:]
696 f = f[1:]
697 return filelog.filelog(self.sopener, f)
697 return filelog.filelog(self.sopener, f)
698
698
699 def changectx(self, changeid):
699 def changectx(self, changeid):
700 return self[changeid]
700 return self[changeid]
701
701
702 def parents(self, changeid=None):
702 def parents(self, changeid=None):
703 '''get list of changectxs for parents of changeid'''
703 '''get list of changectxs for parents of changeid'''
704 return self[changeid].parents()
704 return self[changeid].parents()
705
705
706 def setparents(self, p1, p2=nullid):
706 def setparents(self, p1, p2=nullid):
707 copies = self.dirstate.setparents(p1, p2)
707 copies = self.dirstate.setparents(p1, p2)
708 if copies:
708 if copies:
709 # Adjust copy records, the dirstate cannot do it, it
709 # Adjust copy records, the dirstate cannot do it, it
710 # requires access to parents manifests. Preserve them
710 # requires access to parents manifests. Preserve them
711 # only for entries added to first parent.
711 # only for entries added to first parent.
712 pctx = self[p1]
712 pctx = self[p1]
713 for f in copies:
713 for f in copies:
714 if f not in pctx and copies[f] in pctx:
714 if f not in pctx and copies[f] in pctx:
715 self.dirstate.copy(copies[f], f)
715 self.dirstate.copy(copies[f], f)
716
716
717 def filectx(self, path, changeid=None, fileid=None):
717 def filectx(self, path, changeid=None, fileid=None):
718 """changeid can be a changeset revision, node, or tag.
718 """changeid can be a changeset revision, node, or tag.
719 fileid can be a file revision or node."""
719 fileid can be a file revision or node."""
720 return context.filectx(self, path, changeid, fileid)
720 return context.filectx(self, path, changeid, fileid)
721
721
722 def getcwd(self):
722 def getcwd(self):
723 return self.dirstate.getcwd()
723 return self.dirstate.getcwd()
724
724
725 def pathto(self, f, cwd=None):
725 def pathto(self, f, cwd=None):
726 return self.dirstate.pathto(f, cwd)
726 return self.dirstate.pathto(f, cwd)
727
727
728 def wfile(self, f, mode='r'):
728 def wfile(self, f, mode='r'):
729 return self.wopener(f, mode)
729 return self.wopener(f, mode)
730
730
731 def _link(self, f):
731 def _link(self, f):
732 return os.path.islink(self.wjoin(f))
732 return os.path.islink(self.wjoin(f))
733
733
734 def _loadfilter(self, filter):
734 def _loadfilter(self, filter):
735 if filter not in self.filterpats:
735 if filter not in self.filterpats:
736 l = []
736 l = []
737 for pat, cmd in self.ui.configitems(filter):
737 for pat, cmd in self.ui.configitems(filter):
738 if cmd == '!':
738 if cmd == '!':
739 continue
739 continue
740 mf = matchmod.match(self.root, '', [pat])
740 mf = matchmod.match(self.root, '', [pat])
741 fn = None
741 fn = None
742 params = cmd
742 params = cmd
743 for name, filterfn in self._datafilters.iteritems():
743 for name, filterfn in self._datafilters.iteritems():
744 if cmd.startswith(name):
744 if cmd.startswith(name):
745 fn = filterfn
745 fn = filterfn
746 params = cmd[len(name):].lstrip()
746 params = cmd[len(name):].lstrip()
747 break
747 break
748 if not fn:
748 if not fn:
749 fn = lambda s, c, **kwargs: util.filter(s, c)
749 fn = lambda s, c, **kwargs: util.filter(s, c)
750 # Wrap old filters not supporting keyword arguments
750 # Wrap old filters not supporting keyword arguments
751 if not inspect.getargspec(fn)[2]:
751 if not inspect.getargspec(fn)[2]:
752 oldfn = fn
752 oldfn = fn
753 fn = lambda s, c, **kwargs: oldfn(s, c)
753 fn = lambda s, c, **kwargs: oldfn(s, c)
754 l.append((mf, fn, params))
754 l.append((mf, fn, params))
755 self.filterpats[filter] = l
755 self.filterpats[filter] = l
756 return self.filterpats[filter]
756 return self.filterpats[filter]
757
757
758 def _filter(self, filterpats, filename, data):
758 def _filter(self, filterpats, filename, data):
759 for mf, fn, cmd in filterpats:
759 for mf, fn, cmd in filterpats:
760 if mf(filename):
760 if mf(filename):
761 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
761 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
762 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
762 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
763 break
763 break
764
764
765 return data
765 return data
766
766
767 @unfilteredpropertycache
767 @unfilteredpropertycache
768 def _encodefilterpats(self):
768 def _encodefilterpats(self):
769 return self._loadfilter('encode')
769 return self._loadfilter('encode')
770
770
771 @unfilteredpropertycache
771 @unfilteredpropertycache
772 def _decodefilterpats(self):
772 def _decodefilterpats(self):
773 return self._loadfilter('decode')
773 return self._loadfilter('decode')
774
774
775 def adddatafilter(self, name, filter):
775 def adddatafilter(self, name, filter):
776 self._datafilters[name] = filter
776 self._datafilters[name] = filter
777
777
778 def wread(self, filename):
778 def wread(self, filename):
779 if self._link(filename):
779 if self._link(filename):
780 data = os.readlink(self.wjoin(filename))
780 data = os.readlink(self.wjoin(filename))
781 else:
781 else:
782 data = self.wopener.read(filename)
782 data = self.wopener.read(filename)
783 return self._filter(self._encodefilterpats, filename, data)
783 return self._filter(self._encodefilterpats, filename, data)
784
784
785 def wwrite(self, filename, data, flags):
785 def wwrite(self, filename, data, flags):
786 data = self._filter(self._decodefilterpats, filename, data)
786 data = self._filter(self._decodefilterpats, filename, data)
787 if 'l' in flags:
787 if 'l' in flags:
788 self.wopener.symlink(data, filename)
788 self.wopener.symlink(data, filename)
789 else:
789 else:
790 self.wopener.write(filename, data)
790 self.wopener.write(filename, data)
791 if 'x' in flags:
791 if 'x' in flags:
792 util.setflags(self.wjoin(filename), False, True)
792 util.setflags(self.wjoin(filename), False, True)
793
793
794 def wwritedata(self, filename, data):
794 def wwritedata(self, filename, data):
795 return self._filter(self._decodefilterpats, filename, data)
795 return self._filter(self._decodefilterpats, filename, data)
796
796
797 def transaction(self, desc):
797 def transaction(self, desc):
798 tr = self._transref and self._transref() or None
798 tr = self._transref and self._transref() or None
799 if tr and tr.running():
799 if tr and tr.running():
800 return tr.nest()
800 return tr.nest()
801
801
802 # abort here if the journal already exists
802 # abort here if the journal already exists
803 if os.path.exists(self.sjoin("journal")):
803 if os.path.exists(self.sjoin("journal")):
804 raise error.RepoError(
804 raise error.RepoError(
805 _("abandoned transaction found - run hg recover"))
805 _("abandoned transaction found - run hg recover"))
806
806
807 self._writejournal(desc)
807 self._writejournal(desc)
808 renames = [(x, undoname(x)) for x in self._journalfiles()]
808 renames = [(x, undoname(x)) for x in self._journalfiles()]
809
809
810 tr = transaction.transaction(self.ui.warn, self.sopener,
810 tr = transaction.transaction(self.ui.warn, self.sopener,
811 self.sjoin("journal"),
811 self.sjoin("journal"),
812 aftertrans(renames),
812 aftertrans(renames),
813 self.store.createmode)
813 self.store.createmode)
814 self._transref = weakref.ref(tr)
814 self._transref = weakref.ref(tr)
815 return tr
815 return tr
816
816
817 def _journalfiles(self):
817 def _journalfiles(self):
818 return (self.sjoin('journal'), self.join('journal.dirstate'),
818 return (self.sjoin('journal'), self.join('journal.dirstate'),
819 self.join('journal.branch'), self.join('journal.desc'),
819 self.join('journal.branch'), self.join('journal.desc'),
820 self.join('journal.bookmarks'),
820 self.join('journal.bookmarks'),
821 self.sjoin('journal.phaseroots'))
821 self.sjoin('journal.phaseroots'))
822
822
823 def undofiles(self):
823 def undofiles(self):
824 return [undoname(x) for x in self._journalfiles()]
824 return [undoname(x) for x in self._journalfiles()]
825
825
826 def _writejournal(self, desc):
826 def _writejournal(self, desc):
827 self.opener.write("journal.dirstate",
827 self.opener.write("journal.dirstate",
828 self.opener.tryread("dirstate"))
828 self.opener.tryread("dirstate"))
829 self.opener.write("journal.branch",
829 self.opener.write("journal.branch",
830 encoding.fromlocal(self.dirstate.branch()))
830 encoding.fromlocal(self.dirstate.branch()))
831 self.opener.write("journal.desc",
831 self.opener.write("journal.desc",
832 "%d\n%s\n" % (len(self), desc))
832 "%d\n%s\n" % (len(self), desc))
833 self.opener.write("journal.bookmarks",
833 self.opener.write("journal.bookmarks",
834 self.opener.tryread("bookmarks"))
834 self.opener.tryread("bookmarks"))
835 self.sopener.write("journal.phaseroots",
835 self.sopener.write("journal.phaseroots",
836 self.sopener.tryread("phaseroots"))
836 self.sopener.tryread("phaseroots"))
837
837
838 def recover(self):
838 def recover(self):
839 lock = self.lock()
839 lock = self.lock()
840 try:
840 try:
841 if os.path.exists(self.sjoin("journal")):
841 if os.path.exists(self.sjoin("journal")):
842 self.ui.status(_("rolling back interrupted transaction\n"))
842 self.ui.status(_("rolling back interrupted transaction\n"))
843 transaction.rollback(self.sopener, self.sjoin("journal"),
843 transaction.rollback(self.sopener, self.sjoin("journal"),
844 self.ui.warn)
844 self.ui.warn)
845 self.invalidate()
845 self.invalidate()
846 return True
846 return True
847 else:
847 else:
848 self.ui.warn(_("no interrupted transaction available\n"))
848 self.ui.warn(_("no interrupted transaction available\n"))
849 return False
849 return False
850 finally:
850 finally:
851 lock.release()
851 lock.release()
852
852
853 def rollback(self, dryrun=False, force=False):
853 def rollback(self, dryrun=False, force=False):
854 wlock = lock = None
854 wlock = lock = None
855 try:
855 try:
856 wlock = self.wlock()
856 wlock = self.wlock()
857 lock = self.lock()
857 lock = self.lock()
858 if os.path.exists(self.sjoin("undo")):
858 if os.path.exists(self.sjoin("undo")):
859 return self._rollback(dryrun, force)
859 return self._rollback(dryrun, force)
860 else:
860 else:
861 self.ui.warn(_("no rollback information available\n"))
861 self.ui.warn(_("no rollback information available\n"))
862 return 1
862 return 1
863 finally:
863 finally:
864 release(lock, wlock)
864 release(lock, wlock)
865
865
866 @unfilteredmethod # Until we get smarter cache management
866 @unfilteredmethod # Until we get smarter cache management
867 def _rollback(self, dryrun, force):
867 def _rollback(self, dryrun, force):
868 ui = self.ui
868 ui = self.ui
869 try:
869 try:
870 args = self.opener.read('undo.desc').splitlines()
870 args = self.opener.read('undo.desc').splitlines()
871 (oldlen, desc, detail) = (int(args[0]), args[1], None)
871 (oldlen, desc, detail) = (int(args[0]), args[1], None)
872 if len(args) >= 3:
872 if len(args) >= 3:
873 detail = args[2]
873 detail = args[2]
874 oldtip = oldlen - 1
874 oldtip = oldlen - 1
875
875
876 if detail and ui.verbose:
876 if detail and ui.verbose:
877 msg = (_('repository tip rolled back to revision %s'
877 msg = (_('repository tip rolled back to revision %s'
878 ' (undo %s: %s)\n')
878 ' (undo %s: %s)\n')
879 % (oldtip, desc, detail))
879 % (oldtip, desc, detail))
880 else:
880 else:
881 msg = (_('repository tip rolled back to revision %s'
881 msg = (_('repository tip rolled back to revision %s'
882 ' (undo %s)\n')
882 ' (undo %s)\n')
883 % (oldtip, desc))
883 % (oldtip, desc))
884 except IOError:
884 except IOError:
885 msg = _('rolling back unknown transaction\n')
885 msg = _('rolling back unknown transaction\n')
886 desc = None
886 desc = None
887
887
888 if not force and self['.'] != self['tip'] and desc == 'commit':
888 if not force and self['.'] != self['tip'] and desc == 'commit':
889 raise util.Abort(
889 raise util.Abort(
890 _('rollback of last commit while not checked out '
890 _('rollback of last commit while not checked out '
891 'may lose data'), hint=_('use -f to force'))
891 'may lose data'), hint=_('use -f to force'))
892
892
893 ui.status(msg)
893 ui.status(msg)
894 if dryrun:
894 if dryrun:
895 return 0
895 return 0
896
896
897 parents = self.dirstate.parents()
897 parents = self.dirstate.parents()
898 self.destroying()
898 self.destroying()
899 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
899 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
900 if os.path.exists(self.join('undo.bookmarks')):
900 if os.path.exists(self.join('undo.bookmarks')):
901 util.rename(self.join('undo.bookmarks'),
901 util.rename(self.join('undo.bookmarks'),
902 self.join('bookmarks'))
902 self.join('bookmarks'))
903 if os.path.exists(self.sjoin('undo.phaseroots')):
903 if os.path.exists(self.sjoin('undo.phaseroots')):
904 util.rename(self.sjoin('undo.phaseroots'),
904 util.rename(self.sjoin('undo.phaseroots'),
905 self.sjoin('phaseroots'))
905 self.sjoin('phaseroots'))
906 self.invalidate()
906 self.invalidate()
907
907
908 parentgone = (parents[0] not in self.changelog.nodemap or
908 parentgone = (parents[0] not in self.changelog.nodemap or
909 parents[1] not in self.changelog.nodemap)
909 parents[1] not in self.changelog.nodemap)
910 if parentgone:
910 if parentgone:
911 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
911 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
912 try:
912 try:
913 branch = self.opener.read('undo.branch')
913 branch = self.opener.read('undo.branch')
914 self.dirstate.setbranch(encoding.tolocal(branch))
914 self.dirstate.setbranch(encoding.tolocal(branch))
915 except IOError:
915 except IOError:
916 ui.warn(_('named branch could not be reset: '
916 ui.warn(_('named branch could not be reset: '
917 'current branch is still \'%s\'\n')
917 'current branch is still \'%s\'\n')
918 % self.dirstate.branch())
918 % self.dirstate.branch())
919
919
920 self.dirstate.invalidate()
920 self.dirstate.invalidate()
921 parents = tuple([p.rev() for p in self.parents()])
921 parents = tuple([p.rev() for p in self.parents()])
922 if len(parents) > 1:
922 if len(parents) > 1:
923 ui.status(_('working directory now based on '
923 ui.status(_('working directory now based on '
924 'revisions %d and %d\n') % parents)
924 'revisions %d and %d\n') % parents)
925 else:
925 else:
926 ui.status(_('working directory now based on '
926 ui.status(_('working directory now based on '
927 'revision %d\n') % parents)
927 'revision %d\n') % parents)
928 # TODO: if we know which new heads may result from this rollback, pass
928 # TODO: if we know which new heads may result from this rollback, pass
929 # them to destroy(), which will prevent the branchhead cache from being
929 # them to destroy(), which will prevent the branchhead cache from being
930 # invalidated.
930 # invalidated.
931 self.destroyed()
931 self.destroyed()
932 return 0
932 return 0
933
933
934 def invalidatecaches(self):
934 def invalidatecaches(self):
935
935
936 if '_tagscache' in vars(self):
936 if '_tagscache' in vars(self):
937 # can't use delattr on proxy
937 # can't use delattr on proxy
938 del self.__dict__['_tagscache']
938 del self.__dict__['_tagscache']
939
939
940 self.unfiltered()._branchcaches.clear()
940 self.unfiltered()._branchcaches.clear()
941 self.invalidatevolatilesets()
941 self.invalidatevolatilesets()
942
942
943 def invalidatevolatilesets(self):
943 def invalidatevolatilesets(self):
944 self.filteredrevcache.clear()
944 self.filteredrevcache.clear()
945 obsolete.clearobscaches(self)
945 obsolete.clearobscaches(self)
946
946
947 def invalidatedirstate(self):
947 def invalidatedirstate(self):
948 '''Invalidates the dirstate, causing the next call to dirstate
948 '''Invalidates the dirstate, causing the next call to dirstate
949 to check if it was modified since the last time it was read,
949 to check if it was modified since the last time it was read,
950 rereading it if it has.
950 rereading it if it has.
951
951
952 This is different to dirstate.invalidate() that it doesn't always
952 This is different to dirstate.invalidate() that it doesn't always
953 rereads the dirstate. Use dirstate.invalidate() if you want to
953 rereads the dirstate. Use dirstate.invalidate() if you want to
954 explicitly read the dirstate again (i.e. restoring it to a previous
954 explicitly read the dirstate again (i.e. restoring it to a previous
955 known good state).'''
955 known good state).'''
956 if hasunfilteredcache(self, 'dirstate'):
956 if hasunfilteredcache(self, 'dirstate'):
957 for k in self.dirstate._filecache:
957 for k in self.dirstate._filecache:
958 try:
958 try:
959 delattr(self.dirstate, k)
959 delattr(self.dirstate, k)
960 except AttributeError:
960 except AttributeError:
961 pass
961 pass
962 delattr(self.unfiltered(), 'dirstate')
962 delattr(self.unfiltered(), 'dirstate')
963
963
964 def invalidate(self):
964 def invalidate(self):
965 unfiltered = self.unfiltered() # all filecaches are stored on unfiltered
965 unfiltered = self.unfiltered() # all filecaches are stored on unfiltered
966 for k in self._filecache:
966 for k in self._filecache:
967 # dirstate is invalidated separately in invalidatedirstate()
967 # dirstate is invalidated separately in invalidatedirstate()
968 if k == 'dirstate':
968 if k == 'dirstate':
969 continue
969 continue
970
970
971 try:
971 try:
972 delattr(unfiltered, k)
972 delattr(unfiltered, k)
973 except AttributeError:
973 except AttributeError:
974 pass
974 pass
975 self.invalidatecaches()
975 self.invalidatecaches()
976
976
977 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
977 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
978 try:
978 try:
979 l = lock.lock(lockname, 0, releasefn, desc=desc)
979 l = lock.lock(lockname, 0, releasefn, desc=desc)
980 except error.LockHeld, inst:
980 except error.LockHeld, inst:
981 if not wait:
981 if not wait:
982 raise
982 raise
983 self.ui.warn(_("waiting for lock on %s held by %r\n") %
983 self.ui.warn(_("waiting for lock on %s held by %r\n") %
984 (desc, inst.locker))
984 (desc, inst.locker))
985 # default to 600 seconds timeout
985 # default to 600 seconds timeout
986 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
986 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
987 releasefn, desc=desc)
987 releasefn, desc=desc)
988 if acquirefn:
988 if acquirefn:
989 acquirefn()
989 acquirefn()
990 return l
990 return l
991
991
992 def _afterlock(self, callback):
992 def _afterlock(self, callback):
993 """add a callback to the current repository lock.
993 """add a callback to the current repository lock.
994
994
995 The callback will be executed on lock release."""
995 The callback will be executed on lock release."""
996 l = self._lockref and self._lockref()
996 l = self._lockref and self._lockref()
997 if l:
997 if l:
998 l.postrelease.append(callback)
998 l.postrelease.append(callback)
999 else:
999 else:
1000 callback()
1000 callback()
1001
1001
1002 def lock(self, wait=True):
1002 def lock(self, wait=True):
1003 '''Lock the repository store (.hg/store) and return a weak reference
1003 '''Lock the repository store (.hg/store) and return a weak reference
1004 to the lock. Use this before modifying the store (e.g. committing or
1004 to the lock. Use this before modifying the store (e.g. committing or
1005 stripping). If you are opening a transaction, get a lock as well.)'''
1005 stripping). If you are opening a transaction, get a lock as well.)'''
1006 l = self._lockref and self._lockref()
1006 l = self._lockref and self._lockref()
1007 if l is not None and l.held:
1007 if l is not None and l.held:
1008 l.lock()
1008 l.lock()
1009 return l
1009 return l
1010
1010
1011 def unlock():
1011 def unlock():
1012 self.store.write()
1012 self.store.write()
1013 if hasunfilteredcache(self, '_phasecache'):
1013 if hasunfilteredcache(self, '_phasecache'):
1014 self._phasecache.write()
1014 self._phasecache.write()
1015 for k, ce in self._filecache.items():
1015 for k, ce in self._filecache.items():
1016 if k == 'dirstate' or k not in self.__dict__:
1016 if k == 'dirstate' or k not in self.__dict__:
1017 continue
1017 continue
1018 ce.refresh()
1018 ce.refresh()
1019
1019
1020 l = self._lock(self.sjoin("lock"), wait, unlock,
1020 l = self._lock(self.sjoin("lock"), wait, unlock,
1021 self.invalidate, _('repository %s') % self.origroot)
1021 self.invalidate, _('repository %s') % self.origroot)
1022 self._lockref = weakref.ref(l)
1022 self._lockref = weakref.ref(l)
1023 return l
1023 return l
1024
1024
1025 def wlock(self, wait=True):
1025 def wlock(self, wait=True):
1026 '''Lock the non-store parts of the repository (everything under
1026 '''Lock the non-store parts of the repository (everything under
1027 .hg except .hg/store) and return a weak reference to the lock.
1027 .hg except .hg/store) and return a weak reference to the lock.
1028 Use this before modifying files in .hg.'''
1028 Use this before modifying files in .hg.'''
1029 l = self._wlockref and self._wlockref()
1029 l = self._wlockref and self._wlockref()
1030 if l is not None and l.held:
1030 if l is not None and l.held:
1031 l.lock()
1031 l.lock()
1032 return l
1032 return l
1033
1033
1034 def unlock():
1034 def unlock():
1035 self.dirstate.write()
1035 self.dirstate.write()
1036 self._filecache['dirstate'].refresh()
1036 self._filecache['dirstate'].refresh()
1037
1037
1038 l = self._lock(self.join("wlock"), wait, unlock,
1038 l = self._lock(self.join("wlock"), wait, unlock,
1039 self.invalidatedirstate, _('working directory of %s') %
1039 self.invalidatedirstate, _('working directory of %s') %
1040 self.origroot)
1040 self.origroot)
1041 self._wlockref = weakref.ref(l)
1041 self._wlockref = weakref.ref(l)
1042 return l
1042 return l
1043
1043
1044 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1044 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1045 """
1045 """
1046 commit an individual file as part of a larger transaction
1046 commit an individual file as part of a larger transaction
1047 """
1047 """
1048
1048
1049 fname = fctx.path()
1049 fname = fctx.path()
1050 text = fctx.data()
1050 text = fctx.data()
1051 flog = self.file(fname)
1051 flog = self.file(fname)
1052 fparent1 = manifest1.get(fname, nullid)
1052 fparent1 = manifest1.get(fname, nullid)
1053 fparent2 = fparent2o = manifest2.get(fname, nullid)
1053 fparent2 = fparent2o = manifest2.get(fname, nullid)
1054
1054
1055 meta = {}
1055 meta = {}
1056 copy = fctx.renamed()
1056 copy = fctx.renamed()
1057 if copy and copy[0] != fname:
1057 if copy and copy[0] != fname:
1058 # Mark the new revision of this file as a copy of another
1058 # Mark the new revision of this file as a copy of another
1059 # file. This copy data will effectively act as a parent
1059 # file. This copy data will effectively act as a parent
1060 # of this new revision. If this is a merge, the first
1060 # of this new revision. If this is a merge, the first
1061 # parent will be the nullid (meaning "look up the copy data")
1061 # parent will be the nullid (meaning "look up the copy data")
1062 # and the second one will be the other parent. For example:
1062 # and the second one will be the other parent. For example:
1063 #
1063 #
1064 # 0 --- 1 --- 3 rev1 changes file foo
1064 # 0 --- 1 --- 3 rev1 changes file foo
1065 # \ / rev2 renames foo to bar and changes it
1065 # \ / rev2 renames foo to bar and changes it
1066 # \- 2 -/ rev3 should have bar with all changes and
1066 # \- 2 -/ rev3 should have bar with all changes and
1067 # should record that bar descends from
1067 # should record that bar descends from
1068 # bar in rev2 and foo in rev1
1068 # bar in rev2 and foo in rev1
1069 #
1069 #
1070 # this allows this merge to succeed:
1070 # this allows this merge to succeed:
1071 #
1071 #
1072 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1072 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1073 # \ / merging rev3 and rev4 should use bar@rev2
1073 # \ / merging rev3 and rev4 should use bar@rev2
1074 # \- 2 --- 4 as the merge base
1074 # \- 2 --- 4 as the merge base
1075 #
1075 #
1076
1076
1077 cfname = copy[0]
1077 cfname = copy[0]
1078 crev = manifest1.get(cfname)
1078 crev = manifest1.get(cfname)
1079 newfparent = fparent2
1079 newfparent = fparent2
1080
1080
1081 if manifest2: # branch merge
1081 if manifest2: # branch merge
1082 if fparent2 == nullid or crev is None: # copied on remote side
1082 if fparent2 == nullid or crev is None: # copied on remote side
1083 if cfname in manifest2:
1083 if cfname in manifest2:
1084 crev = manifest2[cfname]
1084 crev = manifest2[cfname]
1085 newfparent = fparent1
1085 newfparent = fparent1
1086
1086
1087 # find source in nearest ancestor if we've lost track
1087 # find source in nearest ancestor if we've lost track
1088 if not crev:
1088 if not crev:
1089 self.ui.debug(" %s: searching for copy revision for %s\n" %
1089 self.ui.debug(" %s: searching for copy revision for %s\n" %
1090 (fname, cfname))
1090 (fname, cfname))
1091 for ancestor in self[None].ancestors():
1091 for ancestor in self[None].ancestors():
1092 if cfname in ancestor:
1092 if cfname in ancestor:
1093 crev = ancestor[cfname].filenode()
1093 crev = ancestor[cfname].filenode()
1094 break
1094 break
1095
1095
1096 if crev:
1096 if crev:
1097 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1097 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1098 meta["copy"] = cfname
1098 meta["copy"] = cfname
1099 meta["copyrev"] = hex(crev)
1099 meta["copyrev"] = hex(crev)
1100 fparent1, fparent2 = nullid, newfparent
1100 fparent1, fparent2 = nullid, newfparent
1101 else:
1101 else:
1102 self.ui.warn(_("warning: can't find ancestor for '%s' "
1102 self.ui.warn(_("warning: can't find ancestor for '%s' "
1103 "copied from '%s'!\n") % (fname, cfname))
1103 "copied from '%s'!\n") % (fname, cfname))
1104
1104
1105 elif fparent2 != nullid:
1105 elif fparent2 != nullid:
1106 # is one parent an ancestor of the other?
1106 # is one parent an ancestor of the other?
1107 fparentancestor = flog.ancestor(fparent1, fparent2)
1107 fparentancestor = flog.ancestor(fparent1, fparent2)
1108 if fparentancestor == fparent1:
1108 if fparentancestor == fparent1:
1109 fparent1, fparent2 = fparent2, nullid
1109 fparent1, fparent2 = fparent2, nullid
1110 elif fparentancestor == fparent2:
1110 elif fparentancestor == fparent2:
1111 fparent2 = nullid
1111 fparent2 = nullid
1112
1112
1113 # is the file changed?
1113 # is the file changed?
1114 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1114 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1115 changelist.append(fname)
1115 changelist.append(fname)
1116 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1116 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1117
1117
1118 # are just the flags changed during merge?
1118 # are just the flags changed during merge?
1119 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1119 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1120 changelist.append(fname)
1120 changelist.append(fname)
1121
1121
1122 return fparent1
1122 return fparent1
1123
1123
1124 @unfilteredmethod
1124 @unfilteredmethod
1125 def commit(self, text="", user=None, date=None, match=None, force=False,
1125 def commit(self, text="", user=None, date=None, match=None, force=False,
1126 editor=False, extra={}):
1126 editor=False, extra={}):
1127 """Add a new revision to current repository.
1127 """Add a new revision to current repository.
1128
1128
1129 Revision information is gathered from the working directory,
1129 Revision information is gathered from the working directory,
1130 match can be used to filter the committed files. If editor is
1130 match can be used to filter the committed files. If editor is
1131 supplied, it is called to get a commit message.
1131 supplied, it is called to get a commit message.
1132 """
1132 """
1133
1133
1134 def fail(f, msg):
1134 def fail(f, msg):
1135 raise util.Abort('%s: %s' % (f, msg))
1135 raise util.Abort('%s: %s' % (f, msg))
1136
1136
1137 if not match:
1137 if not match:
1138 match = matchmod.always(self.root, '')
1138 match = matchmod.always(self.root, '')
1139
1139
1140 if not force:
1140 if not force:
1141 vdirs = []
1141 vdirs = []
1142 match.dir = vdirs.append
1142 match.dir = vdirs.append
1143 match.bad = fail
1143 match.bad = fail
1144
1144
1145 wlock = self.wlock()
1145 wlock = self.wlock()
1146 try:
1146 try:
1147 wctx = self[None]
1147 wctx = self[None]
1148 merge = len(wctx.parents()) > 1
1148 merge = len(wctx.parents()) > 1
1149
1149
1150 if (not force and merge and match and
1150 if (not force and merge and match and
1151 (match.files() or match.anypats())):
1151 (match.files() or match.anypats())):
1152 raise util.Abort(_('cannot partially commit a merge '
1152 raise util.Abort(_('cannot partially commit a merge '
1153 '(do not specify files or patterns)'))
1153 '(do not specify files or patterns)'))
1154
1154
1155 changes = self.status(match=match, clean=force)
1155 changes = self.status(match=match, clean=force)
1156 if force:
1156 if force:
1157 changes[0].extend(changes[6]) # mq may commit unchanged files
1157 changes[0].extend(changes[6]) # mq may commit unchanged files
1158
1158
1159 # check subrepos
1159 # check subrepos
1160 subs = []
1160 subs = []
1161 commitsubs = set()
1161 commitsubs = set()
1162 newstate = wctx.substate.copy()
1162 newstate = wctx.substate.copy()
1163 # only manage subrepos and .hgsubstate if .hgsub is present
1163 # only manage subrepos and .hgsubstate if .hgsub is present
1164 if '.hgsub' in wctx:
1164 if '.hgsub' in wctx:
1165 # we'll decide whether to track this ourselves, thanks
1165 # we'll decide whether to track this ourselves, thanks
1166 if '.hgsubstate' in changes[0]:
1166 if '.hgsubstate' in changes[0]:
1167 changes[0].remove('.hgsubstate')
1167 changes[0].remove('.hgsubstate')
1168 if '.hgsubstate' in changes[2]:
1168 if '.hgsubstate' in changes[2]:
1169 changes[2].remove('.hgsubstate')
1169 changes[2].remove('.hgsubstate')
1170
1170
1171 # compare current state to last committed state
1171 # compare current state to last committed state
1172 # build new substate based on last committed state
1172 # build new substate based on last committed state
1173 oldstate = wctx.p1().substate
1173 oldstate = wctx.p1().substate
1174 for s in sorted(newstate.keys()):
1174 for s in sorted(newstate.keys()):
1175 if not match(s):
1175 if not match(s):
1176 # ignore working copy, use old state if present
1176 # ignore working copy, use old state if present
1177 if s in oldstate:
1177 if s in oldstate:
1178 newstate[s] = oldstate[s]
1178 newstate[s] = oldstate[s]
1179 continue
1179 continue
1180 if not force:
1180 if not force:
1181 raise util.Abort(
1181 raise util.Abort(
1182 _("commit with new subrepo %s excluded") % s)
1182 _("commit with new subrepo %s excluded") % s)
1183 if wctx.sub(s).dirty(True):
1183 if wctx.sub(s).dirty(True):
1184 if not self.ui.configbool('ui', 'commitsubrepos'):
1184 if not self.ui.configbool('ui', 'commitsubrepos'):
1185 raise util.Abort(
1185 raise util.Abort(
1186 _("uncommitted changes in subrepo %s") % s,
1186 _("uncommitted changes in subrepo %s") % s,
1187 hint=_("use --subrepos for recursive commit"))
1187 hint=_("use --subrepos for recursive commit"))
1188 subs.append(s)
1188 subs.append(s)
1189 commitsubs.add(s)
1189 commitsubs.add(s)
1190 else:
1190 else:
1191 bs = wctx.sub(s).basestate()
1191 bs = wctx.sub(s).basestate()
1192 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1192 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1193 if oldstate.get(s, (None, None, None))[1] != bs:
1193 if oldstate.get(s, (None, None, None))[1] != bs:
1194 subs.append(s)
1194 subs.append(s)
1195
1195
1196 # check for removed subrepos
1196 # check for removed subrepos
1197 for p in wctx.parents():
1197 for p in wctx.parents():
1198 r = [s for s in p.substate if s not in newstate]
1198 r = [s for s in p.substate if s not in newstate]
1199 subs += [s for s in r if match(s)]
1199 subs += [s for s in r if match(s)]
1200 if subs:
1200 if subs:
1201 if (not match('.hgsub') and
1201 if (not match('.hgsub') and
1202 '.hgsub' in (wctx.modified() + wctx.added())):
1202 '.hgsub' in (wctx.modified() + wctx.added())):
1203 raise util.Abort(
1203 raise util.Abort(
1204 _("can't commit subrepos without .hgsub"))
1204 _("can't commit subrepos without .hgsub"))
1205 changes[0].insert(0, '.hgsubstate')
1205 changes[0].insert(0, '.hgsubstate')
1206
1206
1207 elif '.hgsub' in changes[2]:
1207 elif '.hgsub' in changes[2]:
1208 # clean up .hgsubstate when .hgsub is removed
1208 # clean up .hgsubstate when .hgsub is removed
1209 if ('.hgsubstate' in wctx and
1209 if ('.hgsubstate' in wctx and
1210 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1210 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1211 changes[2].insert(0, '.hgsubstate')
1211 changes[2].insert(0, '.hgsubstate')
1212
1212
1213 # make sure all explicit patterns are matched
1213 # make sure all explicit patterns are matched
1214 if not force and match.files():
1214 if not force and match.files():
1215 matched = set(changes[0] + changes[1] + changes[2])
1215 matched = set(changes[0] + changes[1] + changes[2])
1216
1216
1217 for f in match.files():
1217 for f in match.files():
1218 f = self.dirstate.normalize(f)
1218 f = self.dirstate.normalize(f)
1219 if f == '.' or f in matched or f in wctx.substate:
1219 if f == '.' or f in matched or f in wctx.substate:
1220 continue
1220 continue
1221 if f in changes[3]: # missing
1221 if f in changes[3]: # missing
1222 fail(f, _('file not found!'))
1222 fail(f, _('file not found!'))
1223 if f in vdirs: # visited directory
1223 if f in vdirs: # visited directory
1224 d = f + '/'
1224 d = f + '/'
1225 for mf in matched:
1225 for mf in matched:
1226 if mf.startswith(d):
1226 if mf.startswith(d):
1227 break
1227 break
1228 else:
1228 else:
1229 fail(f, _("no match under directory!"))
1229 fail(f, _("no match under directory!"))
1230 elif f not in self.dirstate:
1230 elif f not in self.dirstate:
1231 fail(f, _("file not tracked!"))
1231 fail(f, _("file not tracked!"))
1232
1232
1233 if (not force and not extra.get("close") and not merge
1233 if (not force and not extra.get("close") and not merge
1234 and not (changes[0] or changes[1] or changes[2])
1234 and not (changes[0] or changes[1] or changes[2])
1235 and wctx.branch() == wctx.p1().branch()):
1235 and wctx.branch() == wctx.p1().branch()):
1236 return None
1236 return None
1237
1237
1238 if merge and changes[3]:
1238 if merge and changes[3]:
1239 raise util.Abort(_("cannot commit merge with missing files"))
1239 raise util.Abort(_("cannot commit merge with missing files"))
1240
1240
1241 ms = mergemod.mergestate(self)
1241 ms = mergemod.mergestate(self)
1242 for f in changes[0]:
1242 for f in changes[0]:
1243 if f in ms and ms[f] == 'u':
1243 if f in ms and ms[f] == 'u':
1244 raise util.Abort(_("unresolved merge conflicts "
1244 raise util.Abort(_("unresolved merge conflicts "
1245 "(see hg help resolve)"))
1245 "(see hg help resolve)"))
1246
1246
1247 cctx = context.workingctx(self, text, user, date, extra, changes)
1247 cctx = context.workingctx(self, text, user, date, extra, changes)
1248 if editor:
1248 if editor:
1249 cctx._text = editor(self, cctx, subs)
1249 cctx._text = editor(self, cctx, subs)
1250 edited = (text != cctx._text)
1250 edited = (text != cctx._text)
1251
1251
1252 # commit subs and write new state
1252 # commit subs and write new state
1253 if subs:
1253 if subs:
1254 for s in sorted(commitsubs):
1254 for s in sorted(commitsubs):
1255 sub = wctx.sub(s)
1255 sub = wctx.sub(s)
1256 self.ui.status(_('committing subrepository %s\n') %
1256 self.ui.status(_('committing subrepository %s\n') %
1257 subrepo.subrelpath(sub))
1257 subrepo.subrelpath(sub))
1258 sr = sub.commit(cctx._text, user, date)
1258 sr = sub.commit(cctx._text, user, date)
1259 newstate[s] = (newstate[s][0], sr)
1259 newstate[s] = (newstate[s][0], sr)
1260 subrepo.writestate(self, newstate)
1260 subrepo.writestate(self, newstate)
1261
1261
1262 # Save commit message in case this transaction gets rolled back
1262 # Save commit message in case this transaction gets rolled back
1263 # (e.g. by a pretxncommit hook). Leave the content alone on
1263 # (e.g. by a pretxncommit hook). Leave the content alone on
1264 # the assumption that the user will use the same editor again.
1264 # the assumption that the user will use the same editor again.
1265 msgfn = self.savecommitmessage(cctx._text)
1265 msgfn = self.savecommitmessage(cctx._text)
1266
1266
1267 p1, p2 = self.dirstate.parents()
1267 p1, p2 = self.dirstate.parents()
1268 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1268 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1269 try:
1269 try:
1270 self.hook("precommit", throw=True, parent1=hookp1,
1270 self.hook("precommit", throw=True, parent1=hookp1,
1271 parent2=hookp2)
1271 parent2=hookp2)
1272 ret = self.commitctx(cctx, True)
1272 ret = self.commitctx(cctx, True)
1273 except: # re-raises
1273 except: # re-raises
1274 if edited:
1274 if edited:
1275 self.ui.write(
1275 self.ui.write(
1276 _('note: commit message saved in %s\n') % msgfn)
1276 _('note: commit message saved in %s\n') % msgfn)
1277 raise
1277 raise
1278
1278
1279 # update bookmarks, dirstate and mergestate
1279 # update bookmarks, dirstate and mergestate
1280 bookmarks.update(self, [p1, p2], ret)
1280 bookmarks.update(self, [p1, p2], ret)
1281 for f in changes[0] + changes[1]:
1281 for f in changes[0] + changes[1]:
1282 self.dirstate.normal(f)
1282 self.dirstate.normal(f)
1283 for f in changes[2]:
1283 for f in changes[2]:
1284 self.dirstate.drop(f)
1284 self.dirstate.drop(f)
1285 self.dirstate.setparents(ret)
1285 self.dirstate.setparents(ret)
1286 ms.reset()
1286 ms.reset()
1287 finally:
1287 finally:
1288 wlock.release()
1288 wlock.release()
1289
1289
1290 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1290 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1291 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1291 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1292 self._afterlock(commithook)
1292 self._afterlock(commithook)
1293 return ret
1293 return ret
1294
1294
1295 @unfilteredmethod
1295 @unfilteredmethod
1296 def commitctx(self, ctx, error=False):
1296 def commitctx(self, ctx, error=False):
1297 """Add a new revision to current repository.
1297 """Add a new revision to current repository.
1298 Revision information is passed via the context argument.
1298 Revision information is passed via the context argument.
1299 """
1299 """
1300
1300
1301 tr = lock = None
1301 tr = lock = None
1302 removed = list(ctx.removed())
1302 removed = list(ctx.removed())
1303 p1, p2 = ctx.p1(), ctx.p2()
1303 p1, p2 = ctx.p1(), ctx.p2()
1304 user = ctx.user()
1304 user = ctx.user()
1305
1305
1306 lock = self.lock()
1306 lock = self.lock()
1307 try:
1307 try:
1308 tr = self.transaction("commit")
1308 tr = self.transaction("commit")
1309 trp = weakref.proxy(tr)
1309 trp = weakref.proxy(tr)
1310
1310
1311 if ctx.files():
1311 if ctx.files():
1312 m1 = p1.manifest().copy()
1312 m1 = p1.manifest().copy()
1313 m2 = p2.manifest()
1313 m2 = p2.manifest()
1314
1314
1315 # check in files
1315 # check in files
1316 new = {}
1316 new = {}
1317 changed = []
1317 changed = []
1318 linkrev = len(self)
1318 linkrev = len(self)
1319 for f in sorted(ctx.modified() + ctx.added()):
1319 for f in sorted(ctx.modified() + ctx.added()):
1320 self.ui.note(f + "\n")
1320 self.ui.note(f + "\n")
1321 try:
1321 try:
1322 fctx = ctx[f]
1322 fctx = ctx[f]
1323 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1323 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1324 changed)
1324 changed)
1325 m1.set(f, fctx.flags())
1325 m1.set(f, fctx.flags())
1326 except OSError, inst:
1326 except OSError, inst:
1327 self.ui.warn(_("trouble committing %s!\n") % f)
1327 self.ui.warn(_("trouble committing %s!\n") % f)
1328 raise
1328 raise
1329 except IOError, inst:
1329 except IOError, inst:
1330 errcode = getattr(inst, 'errno', errno.ENOENT)
1330 errcode = getattr(inst, 'errno', errno.ENOENT)
1331 if error or errcode and errcode != errno.ENOENT:
1331 if error or errcode and errcode != errno.ENOENT:
1332 self.ui.warn(_("trouble committing %s!\n") % f)
1332 self.ui.warn(_("trouble committing %s!\n") % f)
1333 raise
1333 raise
1334 else:
1334 else:
1335 removed.append(f)
1335 removed.append(f)
1336
1336
1337 # update manifest
1337 # update manifest
1338 m1.update(new)
1338 m1.update(new)
1339 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1339 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1340 drop = [f for f in removed if f in m1]
1340 drop = [f for f in removed if f in m1]
1341 for f in drop:
1341 for f in drop:
1342 del m1[f]
1342 del m1[f]
1343 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1343 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1344 p2.manifestnode(), (new, drop))
1344 p2.manifestnode(), (new, drop))
1345 files = changed + removed
1345 files = changed + removed
1346 else:
1346 else:
1347 mn = p1.manifestnode()
1347 mn = p1.manifestnode()
1348 files = []
1348 files = []
1349
1349
1350 # update changelog
1350 # update changelog
1351 self.changelog.delayupdate()
1351 self.changelog.delayupdate()
1352 n = self.changelog.add(mn, files, ctx.description(),
1352 n = self.changelog.add(mn, files, ctx.description(),
1353 trp, p1.node(), p2.node(),
1353 trp, p1.node(), p2.node(),
1354 user, ctx.date(), ctx.extra().copy())
1354 user, ctx.date(), ctx.extra().copy())
1355 p = lambda: self.changelog.writepending() and self.root or ""
1355 p = lambda: self.changelog.writepending() and self.root or ""
1356 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1356 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1357 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1357 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1358 parent2=xp2, pending=p)
1358 parent2=xp2, pending=p)
1359 self.changelog.finalize(trp)
1359 self.changelog.finalize(trp)
1360 # set the new commit is proper phase
1360 # set the new commit is proper phase
1361 targetphase = phases.newcommitphase(self.ui)
1361 targetphase = phases.newcommitphase(self.ui)
1362 if targetphase:
1362 if targetphase:
1363 # retract boundary do not alter parent changeset.
1363 # retract boundary do not alter parent changeset.
1364 # if a parent have higher the resulting phase will
1364 # if a parent have higher the resulting phase will
1365 # be compliant anyway
1365 # be compliant anyway
1366 #
1366 #
1367 # if minimal phase was 0 we don't need to retract anything
1367 # if minimal phase was 0 we don't need to retract anything
1368 phases.retractboundary(self, targetphase, [n])
1368 phases.retractboundary(self, targetphase, [n])
1369 tr.close()
1369 tr.close()
1370 branchmap.updatecache(self)
1370 branchmap.updatecache(self)
1371 return n
1371 return n
1372 finally:
1372 finally:
1373 if tr:
1373 if tr:
1374 tr.release()
1374 tr.release()
1375 lock.release()
1375 lock.release()
1376
1376
1377 @unfilteredmethod
1377 @unfilteredmethod
1378 def destroying(self):
1378 def destroying(self):
1379 '''Inform the repository that nodes are about to be destroyed.
1379 '''Inform the repository that nodes are about to be destroyed.
1380 Intended for use by strip and rollback, so there's a common
1380 Intended for use by strip and rollback, so there's a common
1381 place for anything that has to be done before destroying history.
1381 place for anything that has to be done before destroying history.
1382
1382
1383 This is mostly useful for saving state that is in memory and waiting
1383 This is mostly useful for saving state that is in memory and waiting
1384 to be flushed when the current lock is released. Because a call to
1384 to be flushed when the current lock is released. Because a call to
1385 destroyed is imminent, the repo will be invalidated causing those
1385 destroyed is imminent, the repo will be invalidated causing those
1386 changes to stay in memory (waiting for the next unlock), or vanish
1386 changes to stay in memory (waiting for the next unlock), or vanish
1387 completely.
1387 completely.
1388 '''
1388 '''
1389 # It simplifies the logic around updating the branchheads cache if we
1389 # It simplifies the logic around updating the branchheads cache if we
1390 # only have to consider the effect of the stripped revisions and not
1390 # only have to consider the effect of the stripped revisions and not
1391 # revisions missing because the cache is out-of-date.
1391 # revisions missing because the cache is out-of-date.
1392 branchmap.updatecache(self)
1392 branchmap.updatecache(self)
1393
1393
1394 # When using the same lock to commit and strip, the phasecache is left
1394 # When using the same lock to commit and strip, the phasecache is left
1395 # dirty after committing. Then when we strip, the repo is invalidated,
1395 # dirty after committing. Then when we strip, the repo is invalidated,
1396 # causing those changes to disappear.
1396 # causing those changes to disappear.
1397 if '_phasecache' in vars(self):
1397 if '_phasecache' in vars(self):
1398 self._phasecache.write()
1398 self._phasecache.write()
1399
1399
1400 @unfilteredmethod
1400 @unfilteredmethod
1401 def destroyed(self, newheadnodes=None):
1401 def destroyed(self, newheadnodes=None):
1402 '''Inform the repository that nodes have been destroyed.
1402 '''Inform the repository that nodes have been destroyed.
1403 Intended for use by strip and rollback, so there's a common
1403 Intended for use by strip and rollback, so there's a common
1404 place for anything that has to be done after destroying history.
1404 place for anything that has to be done after destroying history.
1405
1405
1406 If you know the branchheadcache was uptodate before nodes were removed
1406 If you know the branchheadcache was uptodate before nodes were removed
1407 and you also know the set of candidate new heads that may have resulted
1407 and you also know the set of candidate new heads that may have resulted
1408 from the destruction, you can set newheadnodes. This will enable the
1408 from the destruction, you can set newheadnodes. This will enable the
1409 code to update the branchheads cache, rather than having future code
1409 code to update the branchheads cache, rather than having future code
1410 decide it's invalid and regenerating it from scratch.
1410 decide it's invalid and regenerating it from scratch.
1411 '''
1411 '''
1412 # When one tries to:
1412 # When one tries to:
1413 # 1) destroy nodes thus calling this method (e.g. strip)
1413 # 1) destroy nodes thus calling this method (e.g. strip)
1414 # 2) use phasecache somewhere (e.g. commit)
1414 # 2) use phasecache somewhere (e.g. commit)
1415 #
1415 #
1416 # then 2) will fail because the phasecache contains nodes that were
1416 # then 2) will fail because the phasecache contains nodes that were
1417 # removed. We can either remove phasecache from the filecache,
1417 # removed. We can either remove phasecache from the filecache,
1418 # causing it to reload next time it is accessed, or simply filter
1418 # causing it to reload next time it is accessed, or simply filter
1419 # the removed nodes now and write the updated cache.
1419 # the removed nodes now and write the updated cache.
1420 if '_phasecache' in self._filecache:
1420 if '_phasecache' in self._filecache:
1421 self._phasecache.filterunknown(self)
1421 self._phasecache.filterunknown(self)
1422 self._phasecache.write()
1422 self._phasecache.write()
1423
1423
1424 # If we have info, newheadnodes, on how to update the branch cache, do
1424 # If we have info, newheadnodes, on how to update the branch cache, do
1425 # it, Otherwise, since nodes were destroyed, the cache is stale and this
1425 # it, Otherwise, since nodes were destroyed, the cache is stale and this
1426 # will be caught the next time it is read.
1426 # will be caught the next time it is read.
1427 if newheadnodes:
1427 if newheadnodes:
1428 cl = self.changelog
1428 cl = self.changelog
1429 revgen = (cl.rev(node) for node in newheadnodes
1429 revgen = (cl.rev(node) for node in newheadnodes
1430 if cl.hasnode(node))
1430 if cl.hasnode(node))
1431 cache = self._branchcaches[None]
1431 cache = self._branchcaches[None]
1432 cache.update(self, revgen)
1432 cache.update(self, revgen)
1433 cache.write(self)
1433 cache.write(self)
1434
1434
1435 # Ensure the persistent tag cache is updated. Doing it now
1435 # Ensure the persistent tag cache is updated. Doing it now
1436 # means that the tag cache only has to worry about destroyed
1436 # means that the tag cache only has to worry about destroyed
1437 # heads immediately after a strip/rollback. That in turn
1437 # heads immediately after a strip/rollback. That in turn
1438 # guarantees that "cachetip == currenttip" (comparing both rev
1438 # guarantees that "cachetip == currenttip" (comparing both rev
1439 # and node) always means no nodes have been added or destroyed.
1439 # and node) always means no nodes have been added or destroyed.
1440
1440
1441 # XXX this is suboptimal when qrefresh'ing: we strip the current
1441 # XXX this is suboptimal when qrefresh'ing: we strip the current
1442 # head, refresh the tag cache, then immediately add a new head.
1442 # head, refresh the tag cache, then immediately add a new head.
1443 # But I think doing it this way is necessary for the "instant
1443 # But I think doing it this way is necessary for the "instant
1444 # tag cache retrieval" case to work.
1444 # tag cache retrieval" case to work.
1445 self.invalidate()
1445 self.invalidate()
1446
1446
1447 def walk(self, match, node=None):
1447 def walk(self, match, node=None):
1448 '''
1448 '''
1449 walk recursively through the directory tree or a given
1449 walk recursively through the directory tree or a given
1450 changeset, finding all files matched by the match
1450 changeset, finding all files matched by the match
1451 function
1451 function
1452 '''
1452 '''
1453 return self[node].walk(match)
1453 return self[node].walk(match)
1454
1454
1455 def status(self, node1='.', node2=None, match=None,
1455 def status(self, node1='.', node2=None, match=None,
1456 ignored=False, clean=False, unknown=False,
1456 ignored=False, clean=False, unknown=False,
1457 listsubrepos=False):
1457 listsubrepos=False):
1458 """return status of files between two nodes or node and working
1458 """return status of files between two nodes or node and working
1459 directory.
1459 directory.
1460
1460
1461 If node1 is None, use the first dirstate parent instead.
1461 If node1 is None, use the first dirstate parent instead.
1462 If node2 is None, compare node1 with working directory.
1462 If node2 is None, compare node1 with working directory.
1463 """
1463 """
1464
1464
1465 def mfmatches(ctx):
1465 def mfmatches(ctx):
1466 mf = ctx.manifest().copy()
1466 mf = ctx.manifest().copy()
1467 if match.always():
1467 if match.always():
1468 return mf
1468 return mf
1469 for fn in mf.keys():
1469 for fn in mf.keys():
1470 if not match(fn):
1470 if not match(fn):
1471 del mf[fn]
1471 del mf[fn]
1472 return mf
1472 return mf
1473
1473
1474 if isinstance(node1, context.changectx):
1474 if isinstance(node1, context.changectx):
1475 ctx1 = node1
1475 ctx1 = node1
1476 else:
1476 else:
1477 ctx1 = self[node1]
1477 ctx1 = self[node1]
1478 if isinstance(node2, context.changectx):
1478 if isinstance(node2, context.changectx):
1479 ctx2 = node2
1479 ctx2 = node2
1480 else:
1480 else:
1481 ctx2 = self[node2]
1481 ctx2 = self[node2]
1482
1482
1483 working = ctx2.rev() is None
1483 working = ctx2.rev() is None
1484 parentworking = working and ctx1 == self['.']
1484 parentworking = working and ctx1 == self['.']
1485 match = match or matchmod.always(self.root, self.getcwd())
1485 match = match or matchmod.always(self.root, self.getcwd())
1486 listignored, listclean, listunknown = ignored, clean, unknown
1486 listignored, listclean, listunknown = ignored, clean, unknown
1487
1487
1488 # load earliest manifest first for caching reasons
1488 # load earliest manifest first for caching reasons
1489 if not working and ctx2.rev() < ctx1.rev():
1489 if not working and ctx2.rev() < ctx1.rev():
1490 ctx2.manifest()
1490 ctx2.manifest()
1491
1491
1492 if not parentworking:
1492 if not parentworking:
1493 def bad(f, msg):
1493 def bad(f, msg):
1494 # 'f' may be a directory pattern from 'match.files()',
1494 # 'f' may be a directory pattern from 'match.files()',
1495 # so 'f not in ctx1' is not enough
1495 # so 'f not in ctx1' is not enough
1496 if f not in ctx1 and f not in ctx1.dirs():
1496 if f not in ctx1 and f not in ctx1.dirs():
1497 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1497 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1498 match.bad = bad
1498 match.bad = bad
1499
1499
1500 if working: # we need to scan the working dir
1500 if working: # we need to scan the working dir
1501 subrepos = []
1501 subrepos = []
1502 if '.hgsub' in self.dirstate:
1502 if '.hgsub' in self.dirstate:
1503 subrepos = sorted(ctx2.substate)
1503 subrepos = sorted(ctx2.substate)
1504 s = self.dirstate.status(match, subrepos, listignored,
1504 s = self.dirstate.status(match, subrepos, listignored,
1505 listclean, listunknown)
1505 listclean, listunknown)
1506 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1506 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1507
1507
1508 # check for any possibly clean files
1508 # check for any possibly clean files
1509 if parentworking and cmp:
1509 if parentworking and cmp:
1510 fixup = []
1510 fixup = []
1511 # do a full compare of any files that might have changed
1511 # do a full compare of any files that might have changed
1512 for f in sorted(cmp):
1512 for f in sorted(cmp):
1513 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1513 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1514 or ctx1[f].cmp(ctx2[f])):
1514 or ctx1[f].cmp(ctx2[f])):
1515 modified.append(f)
1515 modified.append(f)
1516 else:
1516 else:
1517 fixup.append(f)
1517 fixup.append(f)
1518
1518
1519 # update dirstate for files that are actually clean
1519 # update dirstate for files that are actually clean
1520 if fixup:
1520 if fixup:
1521 if listclean:
1521 if listclean:
1522 clean += fixup
1522 clean += fixup
1523
1523
1524 try:
1524 try:
1525 # updating the dirstate is optional
1525 # updating the dirstate is optional
1526 # so we don't wait on the lock
1526 # so we don't wait on the lock
1527 wlock = self.wlock(False)
1527 wlock = self.wlock(False)
1528 try:
1528 try:
1529 for f in fixup:
1529 for f in fixup:
1530 self.dirstate.normal(f)
1530 self.dirstate.normal(f)
1531 finally:
1531 finally:
1532 wlock.release()
1532 wlock.release()
1533 except error.LockError:
1533 except error.LockError:
1534 pass
1534 pass
1535
1535
1536 if not parentworking:
1536 if not parentworking:
1537 mf1 = mfmatches(ctx1)
1537 mf1 = mfmatches(ctx1)
1538 if working:
1538 if working:
1539 # we are comparing working dir against non-parent
1539 # we are comparing working dir against non-parent
1540 # generate a pseudo-manifest for the working dir
1540 # generate a pseudo-manifest for the working dir
1541 mf2 = mfmatches(self['.'])
1541 mf2 = mfmatches(self['.'])
1542 for f in cmp + modified + added:
1542 for f in cmp + modified + added:
1543 mf2[f] = None
1543 mf2[f] = None
1544 mf2.set(f, ctx2.flags(f))
1544 mf2.set(f, ctx2.flags(f))
1545 for f in removed:
1545 for f in removed:
1546 if f in mf2:
1546 if f in mf2:
1547 del mf2[f]
1547 del mf2[f]
1548 else:
1548 else:
1549 # we are comparing two revisions
1549 # we are comparing two revisions
1550 deleted, unknown, ignored = [], [], []
1550 deleted, unknown, ignored = [], [], []
1551 mf2 = mfmatches(ctx2)
1551 mf2 = mfmatches(ctx2)
1552
1552
1553 modified, added, clean = [], [], []
1553 modified, added, clean = [], [], []
1554 withflags = mf1.withflags() | mf2.withflags()
1554 withflags = mf1.withflags() | mf2.withflags()
1555 for fn in mf2:
1555 for fn in mf2:
1556 if fn in mf1:
1556 if fn in mf1:
1557 if (fn not in deleted and
1557 if (fn not in deleted and
1558 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1558 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1559 (mf1[fn] != mf2[fn] and
1559 (mf1[fn] != mf2[fn] and
1560 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1560 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1561 modified.append(fn)
1561 modified.append(fn)
1562 elif listclean:
1562 elif listclean:
1563 clean.append(fn)
1563 clean.append(fn)
1564 del mf1[fn]
1564 del mf1[fn]
1565 elif fn not in deleted:
1565 elif fn not in deleted:
1566 added.append(fn)
1566 added.append(fn)
1567 removed = mf1.keys()
1567 removed = mf1.keys()
1568
1568
1569 if working and modified and not self.dirstate._checklink:
1569 if working and modified and not self.dirstate._checklink:
1570 # Symlink placeholders may get non-symlink-like contents
1570 # Symlink placeholders may get non-symlink-like contents
1571 # via user error or dereferencing by NFS or Samba servers,
1571 # via user error or dereferencing by NFS or Samba servers,
1572 # so we filter out any placeholders that don't look like a
1572 # so we filter out any placeholders that don't look like a
1573 # symlink
1573 # symlink
1574 sane = []
1574 sane = []
1575 for f in modified:
1575 for f in modified:
1576 if ctx2.flags(f) == 'l':
1576 if ctx2.flags(f) == 'l':
1577 d = ctx2[f].data()
1577 d = ctx2[f].data()
1578 if len(d) >= 1024 or '\n' in d or util.binary(d):
1578 if len(d) >= 1024 or '\n' in d or util.binary(d):
1579 self.ui.debug('ignoring suspect symlink placeholder'
1579 self.ui.debug('ignoring suspect symlink placeholder'
1580 ' "%s"\n' % f)
1580 ' "%s"\n' % f)
1581 continue
1581 continue
1582 sane.append(f)
1582 sane.append(f)
1583 modified = sane
1583 modified = sane
1584
1584
1585 r = modified, added, removed, deleted, unknown, ignored, clean
1585 r = modified, added, removed, deleted, unknown, ignored, clean
1586
1586
1587 if listsubrepos:
1587 if listsubrepos:
1588 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1588 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1589 if working:
1589 if working:
1590 rev2 = None
1590 rev2 = None
1591 else:
1591 else:
1592 rev2 = ctx2.substate[subpath][1]
1592 rev2 = ctx2.substate[subpath][1]
1593 try:
1593 try:
1594 submatch = matchmod.narrowmatcher(subpath, match)
1594 submatch = matchmod.narrowmatcher(subpath, match)
1595 s = sub.status(rev2, match=submatch, ignored=listignored,
1595 s = sub.status(rev2, match=submatch, ignored=listignored,
1596 clean=listclean, unknown=listunknown,
1596 clean=listclean, unknown=listunknown,
1597 listsubrepos=True)
1597 listsubrepos=True)
1598 for rfiles, sfiles in zip(r, s):
1598 for rfiles, sfiles in zip(r, s):
1599 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1599 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1600 except error.LookupError:
1600 except error.LookupError:
1601 self.ui.status(_("skipping missing subrepository: %s\n")
1601 self.ui.status(_("skipping missing subrepository: %s\n")
1602 % subpath)
1602 % subpath)
1603
1603
1604 for l in r:
1604 for l in r:
1605 l.sort()
1605 l.sort()
1606 return r
1606 return r
1607
1607
1608 def heads(self, start=None):
1608 def heads(self, start=None):
1609 heads = self.changelog.heads(start)
1609 heads = self.changelog.heads(start)
1610 # sort the output in rev descending order
1610 # sort the output in rev descending order
1611 return sorted(heads, key=self.changelog.rev, reverse=True)
1611 return sorted(heads, key=self.changelog.rev, reverse=True)
1612
1612
1613 def branchheads(self, branch=None, start=None, closed=False):
1613 def branchheads(self, branch=None, start=None, closed=False):
1614 '''return a (possibly filtered) list of heads for the given branch
1614 '''return a (possibly filtered) list of heads for the given branch
1615
1615
1616 Heads are returned in topological order, from newest to oldest.
1616 Heads are returned in topological order, from newest to oldest.
1617 If branch is None, use the dirstate branch.
1617 If branch is None, use the dirstate branch.
1618 If start is not None, return only heads reachable from start.
1618 If start is not None, return only heads reachable from start.
1619 If closed is True, return heads that are marked as closed as well.
1619 If closed is True, return heads that are marked as closed as well.
1620 '''
1620 '''
1621 if branch is None:
1621 if branch is None:
1622 branch = self[None].branch()
1622 branch = self[None].branch()
1623 branches = self.branchmap()
1623 branches = self.branchmap()
1624 if branch not in branches:
1624 if branch not in branches:
1625 return []
1625 return []
1626 # the cache returns heads ordered lowest to highest
1626 # the cache returns heads ordered lowest to highest
1627 bheads = list(reversed(branches[branch]))
1627 bheads = list(reversed(branches[branch]))
1628 if start is not None:
1628 if start is not None:
1629 # filter out the heads that cannot be reached from startrev
1629 # filter out the heads that cannot be reached from startrev
1630 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1630 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1631 bheads = [h for h in bheads if h in fbheads]
1631 bheads = [h for h in bheads if h in fbheads]
1632 if not closed:
1632 if not closed:
1633 bheads = [h for h in bheads if not self[h].closesbranch()]
1633 bheads = [h for h in bheads if not self[h].closesbranch()]
1634 return bheads
1634 return bheads
1635
1635
1636 def branches(self, nodes):
1636 def branches(self, nodes):
1637 if not nodes:
1637 if not nodes:
1638 nodes = [self.changelog.tip()]
1638 nodes = [self.changelog.tip()]
1639 b = []
1639 b = []
1640 for n in nodes:
1640 for n in nodes:
1641 t = n
1641 t = n
1642 while True:
1642 while True:
1643 p = self.changelog.parents(n)
1643 p = self.changelog.parents(n)
1644 if p[1] != nullid or p[0] == nullid:
1644 if p[1] != nullid or p[0] == nullid:
1645 b.append((t, n, p[0], p[1]))
1645 b.append((t, n, p[0], p[1]))
1646 break
1646 break
1647 n = p[0]
1647 n = p[0]
1648 return b
1648 return b
1649
1649
1650 def between(self, pairs):
1650 def between(self, pairs):
1651 r = []
1651 r = []
1652
1652
1653 for top, bottom in pairs:
1653 for top, bottom in pairs:
1654 n, l, i = top, [], 0
1654 n, l, i = top, [], 0
1655 f = 1
1655 f = 1
1656
1656
1657 while n != bottom and n != nullid:
1657 while n != bottom and n != nullid:
1658 p = self.changelog.parents(n)[0]
1658 p = self.changelog.parents(n)[0]
1659 if i == f:
1659 if i == f:
1660 l.append(n)
1660 l.append(n)
1661 f = f * 2
1661 f = f * 2
1662 n = p
1662 n = p
1663 i += 1
1663 i += 1
1664
1664
1665 r.append(l)
1665 r.append(l)
1666
1666
1667 return r
1667 return r
1668
1668
1669 def pull(self, remote, heads=None, force=False):
1669 def pull(self, remote, heads=None, force=False):
1670 # don't open transaction for nothing or you break future useful
1670 # don't open transaction for nothing or you break future useful
1671 # rollback call
1671 # rollback call
1672 tr = None
1672 tr = None
1673 trname = 'pull\n' + util.hidepassword(remote.url())
1673 trname = 'pull\n' + util.hidepassword(remote.url())
1674 lock = self.lock()
1674 lock = self.lock()
1675 try:
1675 try:
1676 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1676 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1677 force=force)
1677 force=force)
1678 common, fetch, rheads = tmp
1678 common, fetch, rheads = tmp
1679 if not fetch:
1679 if not fetch:
1680 self.ui.status(_("no changes found\n"))
1680 self.ui.status(_("no changes found\n"))
1681 added = []
1681 added = []
1682 result = 0
1682 result = 0
1683 else:
1683 else:
1684 tr = self.transaction(trname)
1684 tr = self.transaction(trname)
1685 if heads is None and list(common) == [nullid]:
1685 if heads is None and list(common) == [nullid]:
1686 self.ui.status(_("requesting all changes\n"))
1686 self.ui.status(_("requesting all changes\n"))
1687 elif heads is None and remote.capable('changegroupsubset'):
1687 elif heads is None and remote.capable('changegroupsubset'):
1688 # issue1320, avoid a race if remote changed after discovery
1688 # issue1320, avoid a race if remote changed after discovery
1689 heads = rheads
1689 heads = rheads
1690
1690
1691 if remote.capable('getbundle'):
1691 if remote.capable('getbundle'):
1692 cg = remote.getbundle('pull', common=common,
1692 cg = remote.getbundle('pull', common=common,
1693 heads=heads or rheads)
1693 heads=heads or rheads)
1694 elif heads is None:
1694 elif heads is None:
1695 cg = remote.changegroup(fetch, 'pull')
1695 cg = remote.changegroup(fetch, 'pull')
1696 elif not remote.capable('changegroupsubset'):
1696 elif not remote.capable('changegroupsubset'):
1697 raise util.Abort(_("partial pull cannot be done because "
1697 raise util.Abort(_("partial pull cannot be done because "
1698 "other repository doesn't support "
1698 "other repository doesn't support "
1699 "changegroupsubset."))
1699 "changegroupsubset."))
1700 else:
1700 else:
1701 cg = remote.changegroupsubset(fetch, heads, 'pull')
1701 cg = remote.changegroupsubset(fetch, heads, 'pull')
1702 clstart = len(self.changelog)
1702 clstart = len(self.changelog)
1703 result = self.addchangegroup(cg, 'pull', remote.url())
1703 result = self.addchangegroup(cg, 'pull', remote.url())
1704 clend = len(self.changelog)
1704 clend = len(self.changelog)
1705 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1705 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1706
1706
1707 # compute target subset
1707 # compute target subset
1708 if heads is None:
1708 if heads is None:
1709 # We pulled every thing possible
1709 # We pulled every thing possible
1710 # sync on everything common
1710 # sync on everything common
1711 subset = common + added
1711 subset = common + added
1712 else:
1712 else:
1713 # We pulled a specific subset
1713 # We pulled a specific subset
1714 # sync on this subset
1714 # sync on this subset
1715 subset = heads
1715 subset = heads
1716
1716
1717 # Get remote phases data from remote
1717 # Get remote phases data from remote
1718 remotephases = remote.listkeys('phases')
1718 remotephases = remote.listkeys('phases')
1719 publishing = bool(remotephases.get('publishing', False))
1719 publishing = bool(remotephases.get('publishing', False))
1720 if remotephases and not publishing:
1720 if remotephases and not publishing:
1721 # remote is new and unpublishing
1721 # remote is new and unpublishing
1722 pheads, _dr = phases.analyzeremotephases(self, subset,
1722 pheads, _dr = phases.analyzeremotephases(self, subset,
1723 remotephases)
1723 remotephases)
1724 phases.advanceboundary(self, phases.public, pheads)
1724 phases.advanceboundary(self, phases.public, pheads)
1725 phases.advanceboundary(self, phases.draft, subset)
1725 phases.advanceboundary(self, phases.draft, subset)
1726 else:
1726 else:
1727 # Remote is old or publishing all common changesets
1727 # Remote is old or publishing all common changesets
1728 # should be seen as public
1728 # should be seen as public
1729 phases.advanceboundary(self, phases.public, subset)
1729 phases.advanceboundary(self, phases.public, subset)
1730
1730
1731 if obsolete._enabled:
1731 if obsolete._enabled:
1732 self.ui.debug('fetching remote obsolete markers\n')
1732 self.ui.debug('fetching remote obsolete markers\n')
1733 remoteobs = remote.listkeys('obsolete')
1733 remoteobs = remote.listkeys('obsolete')
1734 if 'dump0' in remoteobs:
1734 if 'dump0' in remoteobs:
1735 if tr is None:
1735 if tr is None:
1736 tr = self.transaction(trname)
1736 tr = self.transaction(trname)
1737 for key in sorted(remoteobs, reverse=True):
1737 for key in sorted(remoteobs, reverse=True):
1738 if key.startswith('dump'):
1738 if key.startswith('dump'):
1739 data = base85.b85decode(remoteobs[key])
1739 data = base85.b85decode(remoteobs[key])
1740 self.obsstore.mergemarkers(tr, data)
1740 self.obsstore.mergemarkers(tr, data)
1741 self.invalidatevolatilesets()
1741 self.invalidatevolatilesets()
1742 if tr is not None:
1742 if tr is not None:
1743 tr.close()
1743 tr.close()
1744 finally:
1744 finally:
1745 if tr is not None:
1745 if tr is not None:
1746 tr.release()
1746 tr.release()
1747 lock.release()
1747 lock.release()
1748
1748
1749 return result
1749 return result
1750
1750
1751 def checkpush(self, force, revs):
1751 def checkpush(self, force, revs):
1752 """Extensions can override this function if additional checks have
1752 """Extensions can override this function if additional checks have
1753 to be performed before pushing, or call it if they override push
1753 to be performed before pushing, or call it if they override push
1754 command.
1754 command.
1755 """
1755 """
1756 pass
1756 pass
1757
1757
1758 def push(self, remote, force=False, revs=None, newbranch=False):
1758 def push(self, remote, force=False, revs=None, newbranch=False):
1759 '''Push outgoing changesets (limited by revs) from the current
1759 '''Push outgoing changesets (limited by revs) from the current
1760 repository to remote. Return an integer:
1760 repository to remote. Return an integer:
1761 - None means nothing to push
1761 - None means nothing to push
1762 - 0 means HTTP error
1762 - 0 means HTTP error
1763 - 1 means we pushed and remote head count is unchanged *or*
1763 - 1 means we pushed and remote head count is unchanged *or*
1764 we have outgoing changesets but refused to push
1764 we have outgoing changesets but refused to push
1765 - other values as described by addchangegroup()
1765 - other values as described by addchangegroup()
1766 '''
1766 '''
1767 # there are two ways to push to remote repo:
1767 # there are two ways to push to remote repo:
1768 #
1768 #
1769 # addchangegroup assumes local user can lock remote
1769 # addchangegroup assumes local user can lock remote
1770 # repo (local filesystem, old ssh servers).
1770 # repo (local filesystem, old ssh servers).
1771 #
1771 #
1772 # unbundle assumes local user cannot lock remote repo (new ssh
1772 # unbundle assumes local user cannot lock remote repo (new ssh
1773 # servers, http servers).
1773 # servers, http servers).
1774
1774
1775 if not remote.canpush():
1775 if not remote.canpush():
1776 raise util.Abort(_("destination does not support push"))
1776 raise util.Abort(_("destination does not support push"))
1777 unfi = self.unfiltered()
1777 unfi = self.unfiltered()
1778 # get local lock as we might write phase data
1778 # get local lock as we might write phase data
1779 locallock = self.lock()
1779 locallock = self.lock()
1780 try:
1780 try:
1781 self.checkpush(force, revs)
1781 self.checkpush(force, revs)
1782 lock = None
1782 lock = None
1783 unbundle = remote.capable('unbundle')
1783 unbundle = remote.capable('unbundle')
1784 if not unbundle:
1784 if not unbundle:
1785 lock = remote.lock()
1785 lock = remote.lock()
1786 try:
1786 try:
1787 # discovery
1787 # discovery
1788 fci = discovery.findcommonincoming
1788 fci = discovery.findcommonincoming
1789 commoninc = fci(unfi, remote, force=force)
1789 commoninc = fci(unfi, remote, force=force)
1790 common, inc, remoteheads = commoninc
1790 common, inc, remoteheads = commoninc
1791 fco = discovery.findcommonoutgoing
1791 fco = discovery.findcommonoutgoing
1792 outgoing = fco(unfi, remote, onlyheads=revs,
1792 outgoing = fco(unfi, remote, onlyheads=revs,
1793 commoninc=commoninc, force=force)
1793 commoninc=commoninc, force=force)
1794
1794
1795
1795
1796 if not outgoing.missing:
1796 if not outgoing.missing:
1797 # nothing to push
1797 # nothing to push
1798 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
1798 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
1799 ret = None
1799 ret = None
1800 else:
1800 else:
1801 # something to push
1801 # something to push
1802 if not force:
1802 if not force:
1803 # if self.obsstore == False --> no obsolete
1803 # if self.obsstore == False --> no obsolete
1804 # then, save the iteration
1804 # then, save the iteration
1805 if unfi.obsstore:
1805 if unfi.obsstore:
1806 # this message are here for 80 char limit reason
1806 # this message are here for 80 char limit reason
1807 mso = _("push includes obsolete changeset: %s!")
1807 mso = _("push includes obsolete changeset: %s!")
1808 mst = "push includes %s changeset: %s!"
1808 mst = "push includes %s changeset: %s!"
1809 # plain versions for i18n tool to detect them
1809 # plain versions for i18n tool to detect them
1810 _("push includes unstable changeset: %s!")
1810 _("push includes unstable changeset: %s!")
1811 _("push includes bumped changeset: %s!")
1811 _("push includes bumped changeset: %s!")
1812 _("push includes divergent changeset: %s!")
1812 _("push includes divergent changeset: %s!")
1813 # If we are to push if there is at least one
1813 # If we are to push if there is at least one
1814 # obsolete or unstable changeset in missing, at
1814 # obsolete or unstable changeset in missing, at
1815 # least one of the missinghead will be obsolete or
1815 # least one of the missinghead will be obsolete or
1816 # unstable. So checking heads only is ok
1816 # unstable. So checking heads only is ok
1817 for node in outgoing.missingheads:
1817 for node in outgoing.missingheads:
1818 ctx = unfi[node]
1818 ctx = unfi[node]
1819 if ctx.obsolete():
1819 if ctx.obsolete():
1820 raise util.Abort(mso % ctx)
1820 raise util.Abort(mso % ctx)
1821 elif ctx.troubled():
1821 elif ctx.troubled():
1822 raise util.Abort(_(mst)
1822 raise util.Abort(_(mst)
1823 % (ctx.troubles()[0],
1823 % (ctx.troubles()[0],
1824 ctx))
1824 ctx))
1825 discovery.checkheads(unfi, remote, outgoing,
1825 discovery.checkheads(unfi, remote, outgoing,
1826 remoteheads, newbranch,
1826 remoteheads, newbranch,
1827 bool(inc))
1827 bool(inc))
1828
1828
1829 # create a changegroup from local
1829 # create a changegroup from local
1830 if revs is None and not outgoing.excluded:
1830 if revs is None and not outgoing.excluded:
1831 # push everything,
1831 # push everything,
1832 # use the fast path, no race possible on push
1832 # use the fast path, no race possible on push
1833 cg = self._changegroup(outgoing.missing, 'push')
1833 cg = self._changegroup(outgoing.missing, 'push')
1834 else:
1834 else:
1835 cg = self.getlocalbundle('push', outgoing)
1835 cg = self.getlocalbundle('push', outgoing)
1836
1836
1837 # apply changegroup to remote
1837 # apply changegroup to remote
1838 if unbundle:
1838 if unbundle:
1839 # local repo finds heads on server, finds out what
1839 # local repo finds heads on server, finds out what
1840 # revs it must push. once revs transferred, if server
1840 # revs it must push. once revs transferred, if server
1841 # finds it has different heads (someone else won
1841 # finds it has different heads (someone else won
1842 # commit/push race), server aborts.
1842 # commit/push race), server aborts.
1843 if force:
1843 if force:
1844 remoteheads = ['force']
1844 remoteheads = ['force']
1845 # ssh: return remote's addchangegroup()
1845 # ssh: return remote's addchangegroup()
1846 # http: return remote's addchangegroup() or 0 for error
1846 # http: return remote's addchangegroup() or 0 for error
1847 ret = remote.unbundle(cg, remoteheads, 'push')
1847 ret = remote.unbundle(cg, remoteheads, 'push')
1848 else:
1848 else:
1849 # we return an integer indicating remote head count
1849 # we return an integer indicating remote head count
1850 # change
1850 # change
1851 ret = remote.addchangegroup(cg, 'push', self.url())
1851 ret = remote.addchangegroup(cg, 'push', self.url())
1852
1852
1853 if ret:
1853 if ret:
1854 # push succeed, synchronize target of the push
1854 # push succeed, synchronize target of the push
1855 cheads = outgoing.missingheads
1855 cheads = outgoing.missingheads
1856 elif revs is None:
1856 elif revs is None:
1857 # All out push fails. synchronize all common
1857 # All out push fails. synchronize all common
1858 cheads = outgoing.commonheads
1858 cheads = outgoing.commonheads
1859 else:
1859 else:
1860 # I want cheads = heads(::missingheads and ::commonheads)
1860 # I want cheads = heads(::missingheads and ::commonheads)
1861 # (missingheads is revs with secret changeset filtered out)
1861 # (missingheads is revs with secret changeset filtered out)
1862 #
1862 #
1863 # This can be expressed as:
1863 # This can be expressed as:
1864 # cheads = ( (missingheads and ::commonheads)
1864 # cheads = ( (missingheads and ::commonheads)
1865 # + (commonheads and ::missingheads))"
1865 # + (commonheads and ::missingheads))"
1866 # )
1866 # )
1867 #
1867 #
1868 # while trying to push we already computed the following:
1868 # while trying to push we already computed the following:
1869 # common = (::commonheads)
1869 # common = (::commonheads)
1870 # missing = ((commonheads::missingheads) - commonheads)
1870 # missing = ((commonheads::missingheads) - commonheads)
1871 #
1871 #
1872 # We can pick:
1872 # We can pick:
1873 # * missingheads part of common (::commonheads)
1873 # * missingheads part of common (::commonheads)
1874 common = set(outgoing.common)
1874 common = set(outgoing.common)
1875 cheads = [node for node in revs if node in common]
1875 cheads = [node for node in revs if node in common]
1876 # and
1876 # and
1877 # * commonheads parents on missing
1877 # * commonheads parents on missing
1878 revset = unfi.set('%ln and parents(roots(%ln))',
1878 revset = unfi.set('%ln and parents(roots(%ln))',
1879 outgoing.commonheads,
1879 outgoing.commonheads,
1880 outgoing.missing)
1880 outgoing.missing)
1881 cheads.extend(c.node() for c in revset)
1881 cheads.extend(c.node() for c in revset)
1882 # even when we don't push, exchanging phase data is useful
1882 # even when we don't push, exchanging phase data is useful
1883 remotephases = remote.listkeys('phases')
1883 remotephases = remote.listkeys('phases')
1884 if not remotephases: # old server or public only repo
1884 if not remotephases: # old server or public only repo
1885 phases.advanceboundary(self, phases.public, cheads)
1885 phases.advanceboundary(self, phases.public, cheads)
1886 # don't push any phase data as there is nothing to push
1886 # don't push any phase data as there is nothing to push
1887 else:
1887 else:
1888 ana = phases.analyzeremotephases(self, cheads, remotephases)
1888 ana = phases.analyzeremotephases(self, cheads, remotephases)
1889 pheads, droots = ana
1889 pheads, droots = ana
1890 ### Apply remote phase on local
1890 ### Apply remote phase on local
1891 if remotephases.get('publishing', False):
1891 if remotephases.get('publishing', False):
1892 phases.advanceboundary(self, phases.public, cheads)
1892 phases.advanceboundary(self, phases.public, cheads)
1893 else: # publish = False
1893 else: # publish = False
1894 phases.advanceboundary(self, phases.public, pheads)
1894 phases.advanceboundary(self, phases.public, pheads)
1895 phases.advanceboundary(self, phases.draft, cheads)
1895 phases.advanceboundary(self, phases.draft, cheads)
1896 ### Apply local phase on remote
1896 ### Apply local phase on remote
1897
1897
1898 # Get the list of all revs draft on remote by public here.
1898 # Get the list of all revs draft on remote by public here.
1899 # XXX Beware that revset break if droots is not strictly
1899 # XXX Beware that revset break if droots is not strictly
1900 # XXX root we may want to ensure it is but it is costly
1900 # XXX root we may want to ensure it is but it is costly
1901 outdated = unfi.set('heads((%ln::%ln) and public())',
1901 outdated = unfi.set('heads((%ln::%ln) and public())',
1902 droots, cheads)
1902 droots, cheads)
1903 for newremotehead in outdated:
1903 for newremotehead in outdated:
1904 r = remote.pushkey('phases',
1904 r = remote.pushkey('phases',
1905 newremotehead.hex(),
1905 newremotehead.hex(),
1906 str(phases.draft),
1906 str(phases.draft),
1907 str(phases.public))
1907 str(phases.public))
1908 if not r:
1908 if not r:
1909 self.ui.warn(_('updating %s to public failed!\n')
1909 self.ui.warn(_('updating %s to public failed!\n')
1910 % newremotehead)
1910 % newremotehead)
1911 self.ui.debug('try to push obsolete markers to remote\n')
1911 self.ui.debug('try to push obsolete markers to remote\n')
1912 if (obsolete._enabled and self.obsstore and
1912 if (obsolete._enabled and self.obsstore and
1913 'obsolete' in remote.listkeys('namespaces')):
1913 'obsolete' in remote.listkeys('namespaces')):
1914 rslts = []
1914 rslts = []
1915 remotedata = self.listkeys('obsolete')
1915 remotedata = self.listkeys('obsolete')
1916 for key in sorted(remotedata, reverse=True):
1916 for key in sorted(remotedata, reverse=True):
1917 # reverse sort to ensure we end with dump0
1917 # reverse sort to ensure we end with dump0
1918 data = remotedata[key]
1918 data = remotedata[key]
1919 rslts.append(remote.pushkey('obsolete', key, '', data))
1919 rslts.append(remote.pushkey('obsolete', key, '', data))
1920 if [r for r in rslts if not r]:
1920 if [r for r in rslts if not r]:
1921 msg = _('failed to push some obsolete markers!\n')
1921 msg = _('failed to push some obsolete markers!\n')
1922 self.ui.warn(msg)
1922 self.ui.warn(msg)
1923 finally:
1923 finally:
1924 if lock is not None:
1924 if lock is not None:
1925 lock.release()
1925 lock.release()
1926 finally:
1926 finally:
1927 locallock.release()
1927 locallock.release()
1928
1928
1929 self.ui.debug("checking for updated bookmarks\n")
1929 self.ui.debug("checking for updated bookmarks\n")
1930 rb = remote.listkeys('bookmarks')
1930 rb = remote.listkeys('bookmarks')
1931 for k in rb.keys():
1931 for k in rb.keys():
1932 if k in unfi._bookmarks:
1932 if k in unfi._bookmarks:
1933 nr, nl = rb[k], hex(self._bookmarks[k])
1933 nr, nl = rb[k], hex(self._bookmarks[k])
1934 if nr in unfi:
1934 if nr in unfi:
1935 cr = unfi[nr]
1935 cr = unfi[nr]
1936 cl = unfi[nl]
1936 cl = unfi[nl]
1937 if bookmarks.validdest(unfi, cr, cl):
1937 if bookmarks.validdest(unfi, cr, cl):
1938 r = remote.pushkey('bookmarks', k, nr, nl)
1938 r = remote.pushkey('bookmarks', k, nr, nl)
1939 if r:
1939 if r:
1940 self.ui.status(_("updating bookmark %s\n") % k)
1940 self.ui.status(_("updating bookmark %s\n") % k)
1941 else:
1941 else:
1942 self.ui.warn(_('updating bookmark %s'
1942 self.ui.warn(_('updating bookmark %s'
1943 ' failed!\n') % k)
1943 ' failed!\n') % k)
1944
1944
1945 return ret
1945 return ret
1946
1946
1947 def changegroupinfo(self, nodes, source):
1947 def changegroupinfo(self, nodes, source):
1948 if self.ui.verbose or source == 'bundle':
1948 if self.ui.verbose or source == 'bundle':
1949 self.ui.status(_("%d changesets found\n") % len(nodes))
1949 self.ui.status(_("%d changesets found\n") % len(nodes))
1950 if self.ui.debugflag:
1950 if self.ui.debugflag:
1951 self.ui.debug("list of changesets:\n")
1951 self.ui.debug("list of changesets:\n")
1952 for node in nodes:
1952 for node in nodes:
1953 self.ui.debug("%s\n" % hex(node))
1953 self.ui.debug("%s\n" % hex(node))
1954
1954
1955 def changegroupsubset(self, bases, heads, source):
1955 def changegroupsubset(self, bases, heads, source):
1956 """Compute a changegroup consisting of all the nodes that are
1956 """Compute a changegroup consisting of all the nodes that are
1957 descendants of any of the bases and ancestors of any of the heads.
1957 descendants of any of the bases and ancestors of any of the heads.
1958 Return a chunkbuffer object whose read() method will return
1958 Return a chunkbuffer object whose read() method will return
1959 successive changegroup chunks.
1959 successive changegroup chunks.
1960
1960
1961 It is fairly complex as determining which filenodes and which
1961 It is fairly complex as determining which filenodes and which
1962 manifest nodes need to be included for the changeset to be complete
1962 manifest nodes need to be included for the changeset to be complete
1963 is non-trivial.
1963 is non-trivial.
1964
1964
1965 Another wrinkle is doing the reverse, figuring out which changeset in
1965 Another wrinkle is doing the reverse, figuring out which changeset in
1966 the changegroup a particular filenode or manifestnode belongs to.
1966 the changegroup a particular filenode or manifestnode belongs to.
1967 """
1967 """
1968 cl = self.changelog
1968 cl = self.changelog
1969 if not bases:
1969 if not bases:
1970 bases = [nullid]
1970 bases = [nullid]
1971 csets, bases, heads = cl.nodesbetween(bases, heads)
1971 csets, bases, heads = cl.nodesbetween(bases, heads)
1972 # We assume that all ancestors of bases are known
1972 # We assume that all ancestors of bases are known
1973 common = cl.ancestors([cl.rev(n) for n in bases])
1973 common = cl.ancestors([cl.rev(n) for n in bases])
1974 return self._changegroupsubset(common, csets, heads, source)
1974 return self._changegroupsubset(common, csets, heads, source)
1975
1975
1976 def getlocalbundle(self, source, outgoing):
1976 def getlocalbundle(self, source, outgoing):
1977 """Like getbundle, but taking a discovery.outgoing as an argument.
1977 """Like getbundle, but taking a discovery.outgoing as an argument.
1978
1978
1979 This is only implemented for local repos and reuses potentially
1979 This is only implemented for local repos and reuses potentially
1980 precomputed sets in outgoing."""
1980 precomputed sets in outgoing."""
1981 if not outgoing.missing:
1981 if not outgoing.missing:
1982 return None
1982 return None
1983 return self._changegroupsubset(outgoing.common,
1983 return self._changegroupsubset(outgoing.common,
1984 outgoing.missing,
1984 outgoing.missing,
1985 outgoing.missingheads,
1985 outgoing.missingheads,
1986 source)
1986 source)
1987
1987
1988 def getbundle(self, source, heads=None, common=None):
1988 def getbundle(self, source, heads=None, common=None):
1989 """Like changegroupsubset, but returns the set difference between the
1989 """Like changegroupsubset, but returns the set difference between the
1990 ancestors of heads and the ancestors common.
1990 ancestors of heads and the ancestors common.
1991
1991
1992 If heads is None, use the local heads. If common is None, use [nullid].
1992 If heads is None, use the local heads. If common is None, use [nullid].
1993
1993
1994 The nodes in common might not all be known locally due to the way the
1994 The nodes in common might not all be known locally due to the way the
1995 current discovery protocol works.
1995 current discovery protocol works.
1996 """
1996 """
1997 cl = self.changelog
1997 cl = self.changelog
1998 if common:
1998 if common:
1999 hasnode = cl.hasnode
1999 hasnode = cl.hasnode
2000 common = [n for n in common if hasnode(n)]
2000 common = [n for n in common if hasnode(n)]
2001 else:
2001 else:
2002 common = [nullid]
2002 common = [nullid]
2003 if not heads:
2003 if not heads:
2004 heads = cl.heads()
2004 heads = cl.heads()
2005 return self.getlocalbundle(source,
2005 return self.getlocalbundle(source,
2006 discovery.outgoing(cl, common, heads))
2006 discovery.outgoing(cl, common, heads))
2007
2007
2008 @unfilteredmethod
2008 @unfilteredmethod
2009 def _changegroupsubset(self, commonrevs, csets, heads, source):
2009 def _changegroupsubset(self, commonrevs, csets, heads, source):
2010
2010
2011 cl = self.changelog
2011 cl = self.changelog
2012 mf = self.manifest
2012 mf = self.manifest
2013 mfs = {} # needed manifests
2013 mfs = {} # needed manifests
2014 fnodes = {} # needed file nodes
2014 fnodes = {} # needed file nodes
2015 changedfiles = set()
2015 changedfiles = set()
2016 fstate = ['', {}]
2016 fstate = ['', {}]
2017 count = [0, 0]
2017 count = [0, 0]
2018
2018
2019 # can we go through the fast path ?
2019 # can we go through the fast path ?
2020 heads.sort()
2020 heads.sort()
2021 if heads == sorted(self.heads()):
2021 if heads == sorted(self.heads()):
2022 return self._changegroup(csets, source)
2022 return self._changegroup(csets, source)
2023
2023
2024 # slow path
2024 # slow path
2025 self.hook('preoutgoing', throw=True, source=source)
2025 self.hook('preoutgoing', throw=True, source=source)
2026 self.changegroupinfo(csets, source)
2026 self.changegroupinfo(csets, source)
2027
2027
2028 # filter any nodes that claim to be part of the known set
2028 # filter any nodes that claim to be part of the known set
2029 def prune(revlog, missing):
2029 def prune(revlog, missing):
2030 rr, rl = revlog.rev, revlog.linkrev
2030 rr, rl = revlog.rev, revlog.linkrev
2031 return [n for n in missing
2031 return [n for n in missing
2032 if rl(rr(n)) not in commonrevs]
2032 if rl(rr(n)) not in commonrevs]
2033
2033
2034 progress = self.ui.progress
2034 progress = self.ui.progress
2035 _bundling = _('bundling')
2035 _bundling = _('bundling')
2036 _changesets = _('changesets')
2036 _changesets = _('changesets')
2037 _manifests = _('manifests')
2037 _manifests = _('manifests')
2038 _files = _('files')
2038 _files = _('files')
2039
2039
2040 def lookup(revlog, x):
2040 def lookup(revlog, x):
2041 if revlog == cl:
2041 if revlog == cl:
2042 c = cl.read(x)
2042 c = cl.read(x)
2043 changedfiles.update(c[3])
2043 changedfiles.update(c[3])
2044 mfs.setdefault(c[0], x)
2044 mfs.setdefault(c[0], x)
2045 count[0] += 1
2045 count[0] += 1
2046 progress(_bundling, count[0],
2046 progress(_bundling, count[0],
2047 unit=_changesets, total=count[1])
2047 unit=_changesets, total=count[1])
2048 return x
2048 return x
2049 elif revlog == mf:
2049 elif revlog == mf:
2050 clnode = mfs[x]
2050 clnode = mfs[x]
2051 mdata = mf.readfast(x)
2051 mdata = mf.readfast(x)
2052 for f, n in mdata.iteritems():
2052 for f, n in mdata.iteritems():
2053 if f in changedfiles:
2053 if f in changedfiles:
2054 fnodes[f].setdefault(n, clnode)
2054 fnodes[f].setdefault(n, clnode)
2055 count[0] += 1
2055 count[0] += 1
2056 progress(_bundling, count[0],
2056 progress(_bundling, count[0],
2057 unit=_manifests, total=count[1])
2057 unit=_manifests, total=count[1])
2058 return clnode
2058 return clnode
2059 else:
2059 else:
2060 progress(_bundling, count[0], item=fstate[0],
2060 progress(_bundling, count[0], item=fstate[0],
2061 unit=_files, total=count[1])
2061 unit=_files, total=count[1])
2062 return fstate[1][x]
2062 return fstate[1][x]
2063
2063
2064 bundler = changegroup.bundle10(lookup)
2064 bundler = changegroup.bundle10(lookup)
2065 reorder = self.ui.config('bundle', 'reorder', 'auto')
2065 reorder = self.ui.config('bundle', 'reorder', 'auto')
2066 if reorder == 'auto':
2066 if reorder == 'auto':
2067 reorder = None
2067 reorder = None
2068 else:
2068 else:
2069 reorder = util.parsebool(reorder)
2069 reorder = util.parsebool(reorder)
2070
2070
2071 def gengroup():
2071 def gengroup():
2072 # Create a changenode group generator that will call our functions
2072 # Create a changenode group generator that will call our functions
2073 # back to lookup the owning changenode and collect information.
2073 # back to lookup the owning changenode and collect information.
2074 count[:] = [0, len(csets)]
2074 count[:] = [0, len(csets)]
2075 for chunk in cl.group(csets, bundler, reorder=reorder):
2075 for chunk in cl.group(csets, bundler, reorder=reorder):
2076 yield chunk
2076 yield chunk
2077 progress(_bundling, None)
2077 progress(_bundling, None)
2078
2078
2079 # Create a generator for the manifestnodes that calls our lookup
2079 # Create a generator for the manifestnodes that calls our lookup
2080 # and data collection functions back.
2080 # and data collection functions back.
2081 for f in changedfiles:
2081 for f in changedfiles:
2082 fnodes[f] = {}
2082 fnodes[f] = {}
2083 count[:] = [0, len(mfs)]
2083 count[:] = [0, len(mfs)]
2084 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
2084 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
2085 yield chunk
2085 yield chunk
2086 progress(_bundling, None)
2086 progress(_bundling, None)
2087
2087
2088 mfs.clear()
2088 mfs.clear()
2089
2089
2090 # Go through all our files in order sorted by name.
2090 # Go through all our files in order sorted by name.
2091 count[:] = [0, len(changedfiles)]
2091 count[:] = [0, len(changedfiles)]
2092 for fname in sorted(changedfiles):
2092 for fname in sorted(changedfiles):
2093 filerevlog = self.file(fname)
2093 filerevlog = self.file(fname)
2094 if not len(filerevlog):
2094 if not len(filerevlog):
2095 raise util.Abort(_("empty or missing revlog for %s")
2095 raise util.Abort(_("empty or missing revlog for %s")
2096 % fname)
2096 % fname)
2097 fstate[0] = fname
2097 fstate[0] = fname
2098 fstate[1] = fnodes.pop(fname, {})
2098 fstate[1] = fnodes.pop(fname, {})
2099
2099
2100 nodelist = prune(filerevlog, fstate[1])
2100 nodelist = prune(filerevlog, fstate[1])
2101 if nodelist:
2101 if nodelist:
2102 count[0] += 1
2102 count[0] += 1
2103 yield bundler.fileheader(fname)
2103 yield bundler.fileheader(fname)
2104 for chunk in filerevlog.group(nodelist, bundler, reorder):
2104 for chunk in filerevlog.group(nodelist, bundler, reorder):
2105 yield chunk
2105 yield chunk
2106
2106
2107 # Signal that no more groups are left.
2107 # Signal that no more groups are left.
2108 yield bundler.close()
2108 yield bundler.close()
2109 progress(_bundling, None)
2109 progress(_bundling, None)
2110
2110
2111 if csets:
2111 if csets:
2112 self.hook('outgoing', node=hex(csets[0]), source=source)
2112 self.hook('outgoing', node=hex(csets[0]), source=source)
2113
2113
2114 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2114 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2115
2115
2116 def changegroup(self, basenodes, source):
2116 def changegroup(self, basenodes, source):
2117 # to avoid a race we use changegroupsubset() (issue1320)
2117 # to avoid a race we use changegroupsubset() (issue1320)
2118 return self.changegroupsubset(basenodes, self.heads(), source)
2118 return self.changegroupsubset(basenodes, self.heads(), source)
2119
2119
2120 @unfilteredmethod
2120 @unfilteredmethod
2121 def _changegroup(self, nodes, source):
2121 def _changegroup(self, nodes, source):
2122 """Compute the changegroup of all nodes that we have that a recipient
2122 """Compute the changegroup of all nodes that we have that a recipient
2123 doesn't. Return a chunkbuffer object whose read() method will return
2123 doesn't. Return a chunkbuffer object whose read() method will return
2124 successive changegroup chunks.
2124 successive changegroup chunks.
2125
2125
2126 This is much easier than the previous function as we can assume that
2126 This is much easier than the previous function as we can assume that
2127 the recipient has any changenode we aren't sending them.
2127 the recipient has any changenode we aren't sending them.
2128
2128
2129 nodes is the set of nodes to send"""
2129 nodes is the set of nodes to send"""
2130
2130
2131 cl = self.changelog
2131 cl = self.changelog
2132 mf = self.manifest
2132 mf = self.manifest
2133 mfs = {}
2133 mfs = {}
2134 changedfiles = set()
2134 changedfiles = set()
2135 fstate = ['']
2135 fstate = ['']
2136 count = [0, 0]
2136 count = [0, 0]
2137
2137
2138 self.hook('preoutgoing', throw=True, source=source)
2138 self.hook('preoutgoing', throw=True, source=source)
2139 self.changegroupinfo(nodes, source)
2139 self.changegroupinfo(nodes, source)
2140
2140
2141 revset = set([cl.rev(n) for n in nodes])
2141 revset = set([cl.rev(n) for n in nodes])
2142
2142
2143 def gennodelst(log):
2143 def gennodelst(log):
2144 ln, llr = log.node, log.linkrev
2144 ln, llr = log.node, log.linkrev
2145 return [ln(r) for r in log if llr(r) in revset]
2145 return [ln(r) for r in log if llr(r) in revset]
2146
2146
2147 progress = self.ui.progress
2147 progress = self.ui.progress
2148 _bundling = _('bundling')
2148 _bundling = _('bundling')
2149 _changesets = _('changesets')
2149 _changesets = _('changesets')
2150 _manifests = _('manifests')
2150 _manifests = _('manifests')
2151 _files = _('files')
2151 _files = _('files')
2152
2152
2153 def lookup(revlog, x):
2153 def lookup(revlog, x):
2154 if revlog == cl:
2154 if revlog == cl:
2155 c = cl.read(x)
2155 c = cl.read(x)
2156 changedfiles.update(c[3])
2156 changedfiles.update(c[3])
2157 mfs.setdefault(c[0], x)
2157 mfs.setdefault(c[0], x)
2158 count[0] += 1
2158 count[0] += 1
2159 progress(_bundling, count[0],
2159 progress(_bundling, count[0],
2160 unit=_changesets, total=count[1])
2160 unit=_changesets, total=count[1])
2161 return x
2161 return x
2162 elif revlog == mf:
2162 elif revlog == mf:
2163 count[0] += 1
2163 count[0] += 1
2164 progress(_bundling, count[0],
2164 progress(_bundling, count[0],
2165 unit=_manifests, total=count[1])
2165 unit=_manifests, total=count[1])
2166 return cl.node(revlog.linkrev(revlog.rev(x)))
2166 return cl.node(revlog.linkrev(revlog.rev(x)))
2167 else:
2167 else:
2168 progress(_bundling, count[0], item=fstate[0],
2168 progress(_bundling, count[0], item=fstate[0],
2169 total=count[1], unit=_files)
2169 total=count[1], unit=_files)
2170 return cl.node(revlog.linkrev(revlog.rev(x)))
2170 return cl.node(revlog.linkrev(revlog.rev(x)))
2171
2171
2172 bundler = changegroup.bundle10(lookup)
2172 bundler = changegroup.bundle10(lookup)
2173 reorder = self.ui.config('bundle', 'reorder', 'auto')
2173 reorder = self.ui.config('bundle', 'reorder', 'auto')
2174 if reorder == 'auto':
2174 if reorder == 'auto':
2175 reorder = None
2175 reorder = None
2176 else:
2176 else:
2177 reorder = util.parsebool(reorder)
2177 reorder = util.parsebool(reorder)
2178
2178
2179 def gengroup():
2179 def gengroup():
2180 '''yield a sequence of changegroup chunks (strings)'''
2180 '''yield a sequence of changegroup chunks (strings)'''
2181 # construct a list of all changed files
2181 # construct a list of all changed files
2182
2182
2183 count[:] = [0, len(nodes)]
2183 count[:] = [0, len(nodes)]
2184 for chunk in cl.group(nodes, bundler, reorder=reorder):
2184 for chunk in cl.group(nodes, bundler, reorder=reorder):
2185 yield chunk
2185 yield chunk
2186 progress(_bundling, None)
2186 progress(_bundling, None)
2187
2187
2188 count[:] = [0, len(mfs)]
2188 count[:] = [0, len(mfs)]
2189 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
2189 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
2190 yield chunk
2190 yield chunk
2191 progress(_bundling, None)
2191 progress(_bundling, None)
2192
2192
2193 count[:] = [0, len(changedfiles)]
2193 count[:] = [0, len(changedfiles)]
2194 for fname in sorted(changedfiles):
2194 for fname in sorted(changedfiles):
2195 filerevlog = self.file(fname)
2195 filerevlog = self.file(fname)
2196 if not len(filerevlog):
2196 if not len(filerevlog):
2197 raise util.Abort(_("empty or missing revlog for %s")
2197 raise util.Abort(_("empty or missing revlog for %s")
2198 % fname)
2198 % fname)
2199 fstate[0] = fname
2199 fstate[0] = fname
2200 nodelist = gennodelst(filerevlog)
2200 nodelist = gennodelst(filerevlog)
2201 if nodelist:
2201 if nodelist:
2202 count[0] += 1
2202 count[0] += 1
2203 yield bundler.fileheader(fname)
2203 yield bundler.fileheader(fname)
2204 for chunk in filerevlog.group(nodelist, bundler, reorder):
2204 for chunk in filerevlog.group(nodelist, bundler, reorder):
2205 yield chunk
2205 yield chunk
2206 yield bundler.close()
2206 yield bundler.close()
2207 progress(_bundling, None)
2207 progress(_bundling, None)
2208
2208
2209 if nodes:
2209 if nodes:
2210 self.hook('outgoing', node=hex(nodes[0]), source=source)
2210 self.hook('outgoing', node=hex(nodes[0]), source=source)
2211
2211
2212 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2212 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2213
2213
2214 @unfilteredmethod
2214 @unfilteredmethod
2215 def addchangegroup(self, source, srctype, url, emptyok=False):
2215 def addchangegroup(self, source, srctype, url, emptyok=False):
2216 """Add the changegroup returned by source.read() to this repo.
2216 """Add the changegroup returned by source.read() to this repo.
2217 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2217 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2218 the URL of the repo where this changegroup is coming from.
2218 the URL of the repo where this changegroup is coming from.
2219
2219
2220 Return an integer summarizing the change to this repo:
2220 Return an integer summarizing the change to this repo:
2221 - nothing changed or no source: 0
2221 - nothing changed or no source: 0
2222 - more heads than before: 1+added heads (2..n)
2222 - more heads than before: 1+added heads (2..n)
2223 - fewer heads than before: -1-removed heads (-2..-n)
2223 - fewer heads than before: -1-removed heads (-2..-n)
2224 - number of heads stays the same: 1
2224 - number of heads stays the same: 1
2225 """
2225 """
2226 def csmap(x):
2226 def csmap(x):
2227 self.ui.debug("add changeset %s\n" % short(x))
2227 self.ui.debug("add changeset %s\n" % short(x))
2228 return len(cl)
2228 return len(cl)
2229
2229
2230 def revmap(x):
2230 def revmap(x):
2231 return cl.rev(x)
2231 return cl.rev(x)
2232
2232
2233 if not source:
2233 if not source:
2234 return 0
2234 return 0
2235
2235
2236 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2236 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2237
2237
2238 changesets = files = revisions = 0
2238 changesets = files = revisions = 0
2239 efiles = set()
2239 efiles = set()
2240
2240
2241 # write changelog data to temp files so concurrent readers will not see
2241 # write changelog data to temp files so concurrent readers will not see
2242 # inconsistent view
2242 # inconsistent view
2243 cl = self.changelog
2243 cl = self.changelog
2244 cl.delayupdate()
2244 cl.delayupdate()
2245 oldheads = cl.heads()
2245 oldheads = cl.heads()
2246
2246
2247 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2247 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2248 try:
2248 try:
2249 trp = weakref.proxy(tr)
2249 trp = weakref.proxy(tr)
2250 # pull off the changeset group
2250 # pull off the changeset group
2251 self.ui.status(_("adding changesets\n"))
2251 self.ui.status(_("adding changesets\n"))
2252 clstart = len(cl)
2252 clstart = len(cl)
2253 class prog(object):
2253 class prog(object):
2254 step = _('changesets')
2254 step = _('changesets')
2255 count = 1
2255 count = 1
2256 ui = self.ui
2256 ui = self.ui
2257 total = None
2257 total = None
2258 def __call__(self):
2258 def __call__(self):
2259 self.ui.progress(self.step, self.count, unit=_('chunks'),
2259 self.ui.progress(self.step, self.count, unit=_('chunks'),
2260 total=self.total)
2260 total=self.total)
2261 self.count += 1
2261 self.count += 1
2262 pr = prog()
2262 pr = prog()
2263 source.callback = pr
2263 source.callback = pr
2264
2264
2265 source.changelogheader()
2265 source.changelogheader()
2266 srccontent = cl.addgroup(source, csmap, trp)
2266 srccontent = cl.addgroup(source, csmap, trp)
2267 if not (srccontent or emptyok):
2267 if not (srccontent or emptyok):
2268 raise util.Abort(_("received changelog group is empty"))
2268 raise util.Abort(_("received changelog group is empty"))
2269 clend = len(cl)
2269 clend = len(cl)
2270 changesets = clend - clstart
2270 changesets = clend - clstart
2271 for c in xrange(clstart, clend):
2271 for c in xrange(clstart, clend):
2272 efiles.update(self[c].files())
2272 efiles.update(self[c].files())
2273 efiles = len(efiles)
2273 efiles = len(efiles)
2274 self.ui.progress(_('changesets'), None)
2274 self.ui.progress(_('changesets'), None)
2275
2275
2276 # pull off the manifest group
2276 # pull off the manifest group
2277 self.ui.status(_("adding manifests\n"))
2277 self.ui.status(_("adding manifests\n"))
2278 pr.step = _('manifests')
2278 pr.step = _('manifests')
2279 pr.count = 1
2279 pr.count = 1
2280 pr.total = changesets # manifests <= changesets
2280 pr.total = changesets # manifests <= changesets
2281 # no need to check for empty manifest group here:
2281 # no need to check for empty manifest group here:
2282 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2282 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2283 # no new manifest will be created and the manifest group will
2283 # no new manifest will be created and the manifest group will
2284 # be empty during the pull
2284 # be empty during the pull
2285 source.manifestheader()
2285 source.manifestheader()
2286 self.manifest.addgroup(source, revmap, trp)
2286 self.manifest.addgroup(source, revmap, trp)
2287 self.ui.progress(_('manifests'), None)
2287 self.ui.progress(_('manifests'), None)
2288
2288
2289 needfiles = {}
2289 needfiles = {}
2290 if self.ui.configbool('server', 'validate', default=False):
2290 if self.ui.configbool('server', 'validate', default=False):
2291 # validate incoming csets have their manifests
2291 # validate incoming csets have their manifests
2292 for cset in xrange(clstart, clend):
2292 for cset in xrange(clstart, clend):
2293 mfest = self.changelog.read(self.changelog.node(cset))[0]
2293 mfest = self.changelog.read(self.changelog.node(cset))[0]
2294 mfest = self.manifest.readdelta(mfest)
2294 mfest = self.manifest.readdelta(mfest)
2295 # store file nodes we must see
2295 # store file nodes we must see
2296 for f, n in mfest.iteritems():
2296 for f, n in mfest.iteritems():
2297 needfiles.setdefault(f, set()).add(n)
2297 needfiles.setdefault(f, set()).add(n)
2298
2298
2299 # process the files
2299 # process the files
2300 self.ui.status(_("adding file changes\n"))
2300 self.ui.status(_("adding file changes\n"))
2301 pr.step = _('files')
2301 pr.step = _('files')
2302 pr.count = 1
2302 pr.count = 1
2303 pr.total = efiles
2303 pr.total = efiles
2304 source.callback = None
2304 source.callback = None
2305
2305
2306 while True:
2306 while True:
2307 chunkdata = source.filelogheader()
2307 chunkdata = source.filelogheader()
2308 if not chunkdata:
2308 if not chunkdata:
2309 break
2309 break
2310 f = chunkdata["filename"]
2310 f = chunkdata["filename"]
2311 self.ui.debug("adding %s revisions\n" % f)
2311 self.ui.debug("adding %s revisions\n" % f)
2312 pr()
2312 pr()
2313 fl = self.file(f)
2313 fl = self.file(f)
2314 o = len(fl)
2314 o = len(fl)
2315 if not fl.addgroup(source, revmap, trp):
2315 if not fl.addgroup(source, revmap, trp):
2316 raise util.Abort(_("received file revlog group is empty"))
2316 raise util.Abort(_("received file revlog group is empty"))
2317 revisions += len(fl) - o
2317 revisions += len(fl) - o
2318 files += 1
2318 files += 1
2319 if f in needfiles:
2319 if f in needfiles:
2320 needs = needfiles[f]
2320 needs = needfiles[f]
2321 for new in xrange(o, len(fl)):
2321 for new in xrange(o, len(fl)):
2322 n = fl.node(new)
2322 n = fl.node(new)
2323 if n in needs:
2323 if n in needs:
2324 needs.remove(n)
2324 needs.remove(n)
2325 if not needs:
2325 if not needs:
2326 del needfiles[f]
2326 del needfiles[f]
2327 self.ui.progress(_('files'), None)
2327 self.ui.progress(_('files'), None)
2328
2328
2329 for f, needs in needfiles.iteritems():
2329 for f, needs in needfiles.iteritems():
2330 fl = self.file(f)
2330 fl = self.file(f)
2331 for n in needs:
2331 for n in needs:
2332 try:
2332 try:
2333 fl.rev(n)
2333 fl.rev(n)
2334 except error.LookupError:
2334 except error.LookupError:
2335 raise util.Abort(
2335 raise util.Abort(
2336 _('missing file data for %s:%s - run hg verify') %
2336 _('missing file data for %s:%s - run hg verify') %
2337 (f, hex(n)))
2337 (f, hex(n)))
2338
2338
2339 dh = 0
2339 dh = 0
2340 if oldheads:
2340 if oldheads:
2341 heads = cl.heads()
2341 heads = cl.heads()
2342 dh = len(heads) - len(oldheads)
2342 dh = len(heads) - len(oldheads)
2343 for h in heads:
2343 for h in heads:
2344 if h not in oldheads and self[h].closesbranch():
2344 if h not in oldheads and self[h].closesbranch():
2345 dh -= 1
2345 dh -= 1
2346 htext = ""
2346 htext = ""
2347 if dh:
2347 if dh:
2348 htext = _(" (%+d heads)") % dh
2348 htext = _(" (%+d heads)") % dh
2349
2349
2350 self.ui.status(_("added %d changesets"
2350 self.ui.status(_("added %d changesets"
2351 " with %d changes to %d files%s\n")
2351 " with %d changes to %d files%s\n")
2352 % (changesets, revisions, files, htext))
2352 % (changesets, revisions, files, htext))
2353 self.invalidatevolatilesets()
2353 self.invalidatevolatilesets()
2354
2354
2355 if changesets > 0:
2355 if changesets > 0:
2356 p = lambda: cl.writepending() and self.root or ""
2356 p = lambda: cl.writepending() and self.root or ""
2357 self.hook('pretxnchangegroup', throw=True,
2357 self.hook('pretxnchangegroup', throw=True,
2358 node=hex(cl.node(clstart)), source=srctype,
2358 node=hex(cl.node(clstart)), source=srctype,
2359 url=url, pending=p)
2359 url=url, pending=p)
2360
2360
2361 added = [cl.node(r) for r in xrange(clstart, clend)]
2361 added = [cl.node(r) for r in xrange(clstart, clend)]
2362 publishing = self.ui.configbool('phases', 'publish', True)
2362 publishing = self.ui.configbool('phases', 'publish', True)
2363 if srctype == 'push':
2363 if srctype == 'push':
2364 # Old server can not push the boundary themself.
2364 # Old server can not push the boundary themself.
2365 # New server won't push the boundary if changeset already
2365 # New server won't push the boundary if changeset already
2366 # existed locally as secrete
2366 # existed locally as secrete
2367 #
2367 #
2368 # We should not use added here but the list of all change in
2368 # We should not use added here but the list of all change in
2369 # the bundle
2369 # the bundle
2370 if publishing:
2370 if publishing:
2371 phases.advanceboundary(self, phases.public, srccontent)
2371 phases.advanceboundary(self, phases.public, srccontent)
2372 else:
2372 else:
2373 phases.advanceboundary(self, phases.draft, srccontent)
2373 phases.advanceboundary(self, phases.draft, srccontent)
2374 phases.retractboundary(self, phases.draft, added)
2374 phases.retractboundary(self, phases.draft, added)
2375 elif srctype != 'strip':
2375 elif srctype != 'strip':
2376 # publishing only alter behavior during push
2376 # publishing only alter behavior during push
2377 #
2377 #
2378 # strip should not touch boundary at all
2378 # strip should not touch boundary at all
2379 phases.retractboundary(self, phases.draft, added)
2379 phases.retractboundary(self, phases.draft, added)
2380
2380
2381 # make changelog see real files again
2381 # make changelog see real files again
2382 cl.finalize(trp)
2382 cl.finalize(trp)
2383
2383
2384 tr.close()
2384 tr.close()
2385
2385
2386 if changesets > 0:
2386 if changesets > 0:
2387 if srctype != 'strip':
2387 if srctype != 'strip':
2388 # During strip, branchcache is invalid but coming call to
2388 # During strip, branchcache is invalid but coming call to
2389 # `destroyed` will repair it.
2389 # `destroyed` will repair it.
2390 # In other case we can safely update cache on disk.
2390 # In other case we can safely update cache on disk.
2391 branchmap.updatecache(self)
2391 branchmap.updatecache(self)
2392 def runhooks():
2392 def runhooks():
2393 # forcefully update the on-disk branch cache
2393 # forcefully update the on-disk branch cache
2394 self.ui.debug("updating the branch cache\n")
2394 self.ui.debug("updating the branch cache\n")
2395 self.hook("changegroup", node=hex(cl.node(clstart)),
2395 self.hook("changegroup", node=hex(cl.node(clstart)),
2396 source=srctype, url=url)
2396 source=srctype, url=url)
2397
2397
2398 for n in added:
2398 for n in added:
2399 self.hook("incoming", node=hex(n), source=srctype,
2399 self.hook("incoming", node=hex(n), source=srctype,
2400 url=url)
2400 url=url)
2401 self._afterlock(runhooks)
2401 self._afterlock(runhooks)
2402
2402
2403 finally:
2403 finally:
2404 tr.release()
2404 tr.release()
2405 # never return 0 here:
2405 # never return 0 here:
2406 if dh < 0:
2406 if dh < 0:
2407 return dh - 1
2407 return dh - 1
2408 else:
2408 else:
2409 return dh + 1
2409 return dh + 1
2410
2410
2411 def stream_in(self, remote, requirements):
2411 def stream_in(self, remote, requirements):
2412 lock = self.lock()
2412 lock = self.lock()
2413 try:
2413 try:
2414 # Save remote branchmap. We will use it later
2414 # Save remote branchmap. We will use it later
2415 # to speed up branchcache creation
2415 # to speed up branchcache creation
2416 rbranchmap = None
2416 rbranchmap = None
2417 if remote.capable("branchmap"):
2417 if remote.capable("branchmap"):
2418 rbranchmap = remote.branchmap()
2418 rbranchmap = remote.branchmap()
2419
2419
2420 fp = remote.stream_out()
2420 fp = remote.stream_out()
2421 l = fp.readline()
2421 l = fp.readline()
2422 try:
2422 try:
2423 resp = int(l)
2423 resp = int(l)
2424 except ValueError:
2424 except ValueError:
2425 raise error.ResponseError(
2425 raise error.ResponseError(
2426 _('unexpected response from remote server:'), l)
2426 _('unexpected response from remote server:'), l)
2427 if resp == 1:
2427 if resp == 1:
2428 raise util.Abort(_('operation forbidden by server'))
2428 raise util.Abort(_('operation forbidden by server'))
2429 elif resp == 2:
2429 elif resp == 2:
2430 raise util.Abort(_('locking the remote repository failed'))
2430 raise util.Abort(_('locking the remote repository failed'))
2431 elif resp != 0:
2431 elif resp != 0:
2432 raise util.Abort(_('the server sent an unknown error code'))
2432 raise util.Abort(_('the server sent an unknown error code'))
2433 self.ui.status(_('streaming all changes\n'))
2433 self.ui.status(_('streaming all changes\n'))
2434 l = fp.readline()
2434 l = fp.readline()
2435 try:
2435 try:
2436 total_files, total_bytes = map(int, l.split(' ', 1))
2436 total_files, total_bytes = map(int, l.split(' ', 1))
2437 except (ValueError, TypeError):
2437 except (ValueError, TypeError):
2438 raise error.ResponseError(
2438 raise error.ResponseError(
2439 _('unexpected response from remote server:'), l)
2439 _('unexpected response from remote server:'), l)
2440 self.ui.status(_('%d files to transfer, %s of data\n') %
2440 self.ui.status(_('%d files to transfer, %s of data\n') %
2441 (total_files, util.bytecount(total_bytes)))
2441 (total_files, util.bytecount(total_bytes)))
2442 handled_bytes = 0
2442 handled_bytes = 0
2443 self.ui.progress(_('clone'), 0, total=total_bytes)
2443 self.ui.progress(_('clone'), 0, total=total_bytes)
2444 start = time.time()
2444 start = time.time()
2445 for i in xrange(total_files):
2445 for i in xrange(total_files):
2446 # XXX doesn't support '\n' or '\r' in filenames
2446 # XXX doesn't support '\n' or '\r' in filenames
2447 l = fp.readline()
2447 l = fp.readline()
2448 try:
2448 try:
2449 name, size = l.split('\0', 1)
2449 name, size = l.split('\0', 1)
2450 size = int(size)
2450 size = int(size)
2451 except (ValueError, TypeError):
2451 except (ValueError, TypeError):
2452 raise error.ResponseError(
2452 raise error.ResponseError(
2453 _('unexpected response from remote server:'), l)
2453 _('unexpected response from remote server:'), l)
2454 if self.ui.debugflag:
2454 if self.ui.debugflag:
2455 self.ui.debug('adding %s (%s)\n' %
2455 self.ui.debug('adding %s (%s)\n' %
2456 (name, util.bytecount(size)))
2456 (name, util.bytecount(size)))
2457 # for backwards compat, name was partially encoded
2457 # for backwards compat, name was partially encoded
2458 ofp = self.sopener(store.decodedir(name), 'w')
2458 ofp = self.sopener(store.decodedir(name), 'w')
2459 for chunk in util.filechunkiter(fp, limit=size):
2459 for chunk in util.filechunkiter(fp, limit=size):
2460 handled_bytes += len(chunk)
2460 handled_bytes += len(chunk)
2461 self.ui.progress(_('clone'), handled_bytes,
2461 self.ui.progress(_('clone'), handled_bytes,
2462 total=total_bytes)
2462 total=total_bytes)
2463 ofp.write(chunk)
2463 ofp.write(chunk)
2464 ofp.close()
2464 ofp.close()
2465 elapsed = time.time() - start
2465 elapsed = time.time() - start
2466 if elapsed <= 0:
2466 if elapsed <= 0:
2467 elapsed = 0.001
2467 elapsed = 0.001
2468 self.ui.progress(_('clone'), None)
2468 self.ui.progress(_('clone'), None)
2469 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2469 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2470 (util.bytecount(total_bytes), elapsed,
2470 (util.bytecount(total_bytes), elapsed,
2471 util.bytecount(total_bytes / elapsed)))
2471 util.bytecount(total_bytes / elapsed)))
2472
2472
2473 # new requirements = old non-format requirements +
2473 # new requirements = old non-format requirements +
2474 # new format-related
2474 # new format-related
2475 # requirements from the streamed-in repository
2475 # requirements from the streamed-in repository
2476 requirements.update(set(self.requirements) - self.supportedformats)
2476 requirements.update(set(self.requirements) - self.supportedformats)
2477 self._applyrequirements(requirements)
2477 self._applyrequirements(requirements)
2478 self._writerequirements()
2478 self._writerequirements()
2479
2479
2480 if rbranchmap:
2480 if rbranchmap:
2481 rbheads = []
2481 rbheads = []
2482 for bheads in rbranchmap.itervalues():
2482 for bheads in rbranchmap.itervalues():
2483 rbheads.extend(bheads)
2483 rbheads.extend(bheads)
2484
2484
2485 if rbheads:
2485 if rbheads:
2486 rtiprev = max((int(self.changelog.rev(node))
2486 rtiprev = max((int(self.changelog.rev(node))
2487 for node in rbheads))
2487 for node in rbheads))
2488 cache = branchmap.branchcache(rbranchmap,
2488 cache = branchmap.branchcache(rbranchmap,
2489 self[rtiprev].node(),
2489 self[rtiprev].node(),
2490 rtiprev)
2490 rtiprev)
2491 self._branchcaches[None] = cache
2491 self._branchcaches[None] = cache
2492 cache.write(self.unfiltered())
2492 cache.write(self.unfiltered())
2493 self.invalidate()
2493 self.invalidate()
2494 return len(self.heads()) + 1
2494 return len(self.heads()) + 1
2495 finally:
2495 finally:
2496 lock.release()
2496 lock.release()
2497
2497
2498 def clone(self, remote, heads=[], stream=False):
2498 def clone(self, remote, heads=[], stream=False):
2499 '''clone remote repository.
2499 '''clone remote repository.
2500
2500
2501 keyword arguments:
2501 keyword arguments:
2502 heads: list of revs to clone (forces use of pull)
2502 heads: list of revs to clone (forces use of pull)
2503 stream: use streaming clone if possible'''
2503 stream: use streaming clone if possible'''
2504
2504
2505 # now, all clients that can request uncompressed clones can
2505 # now, all clients that can request uncompressed clones can
2506 # read repo formats supported by all servers that can serve
2506 # read repo formats supported by all servers that can serve
2507 # them.
2507 # them.
2508
2508
2509 # if revlog format changes, client will have to check version
2509 # if revlog format changes, client will have to check version
2510 # and format flags on "stream" capability, and use
2510 # and format flags on "stream" capability, and use
2511 # uncompressed only if compatible.
2511 # uncompressed only if compatible.
2512
2512
2513 if not stream:
2513 if not stream:
2514 # if the server explicitly prefers to stream (for fast LANs)
2514 # if the server explicitly prefers to stream (for fast LANs)
2515 stream = remote.capable('stream-preferred')
2515 stream = remote.capable('stream-preferred')
2516
2516
2517 if stream and not heads:
2517 if stream and not heads:
2518 # 'stream' means remote revlog format is revlogv1 only
2518 # 'stream' means remote revlog format is revlogv1 only
2519 if remote.capable('stream'):
2519 if remote.capable('stream'):
2520 return self.stream_in(remote, set(('revlogv1',)))
2520 return self.stream_in(remote, set(('revlogv1',)))
2521 # otherwise, 'streamreqs' contains the remote revlog format
2521 # otherwise, 'streamreqs' contains the remote revlog format
2522 streamreqs = remote.capable('streamreqs')
2522 streamreqs = remote.capable('streamreqs')
2523 if streamreqs:
2523 if streamreqs:
2524 streamreqs = set(streamreqs.split(','))
2524 streamreqs = set(streamreqs.split(','))
2525 # if we support it, stream in and adjust our requirements
2525 # if we support it, stream in and adjust our requirements
2526 if not streamreqs - self.supportedformats:
2526 if not streamreqs - self.supportedformats:
2527 return self.stream_in(remote, streamreqs)
2527 return self.stream_in(remote, streamreqs)
2528 return self.pull(remote, heads)
2528 return self.pull(remote, heads)
2529
2529
2530 def pushkey(self, namespace, key, old, new):
2530 def pushkey(self, namespace, key, old, new):
2531 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2531 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2532 old=old, new=new)
2532 old=old, new=new)
2533 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2533 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2534 ret = pushkey.push(self, namespace, key, old, new)
2534 ret = pushkey.push(self, namespace, key, old, new)
2535 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2535 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2536 ret=ret)
2536 ret=ret)
2537 return ret
2537 return ret
2538
2538
2539 def listkeys(self, namespace):
2539 def listkeys(self, namespace):
2540 self.hook('prelistkeys', throw=True, namespace=namespace)
2540 self.hook('prelistkeys', throw=True, namespace=namespace)
2541 self.ui.debug('listing keys for "%s"\n' % namespace)
2541 self.ui.debug('listing keys for "%s"\n' % namespace)
2542 values = pushkey.list(self, namespace)
2542 values = pushkey.list(self, namespace)
2543 self.hook('listkeys', namespace=namespace, values=values)
2543 self.hook('listkeys', namespace=namespace, values=values)
2544 return values
2544 return values
2545
2545
2546 def debugwireargs(self, one, two, three=None, four=None, five=None):
2546 def debugwireargs(self, one, two, three=None, four=None, five=None):
2547 '''used to test argument passing over the wire'''
2547 '''used to test argument passing over the wire'''
2548 return "%s %s %s %s %s" % (one, two, three, four, five)
2548 return "%s %s %s %s %s" % (one, two, three, four, five)
2549
2549
2550 def savecommitmessage(self, text):
2550 def savecommitmessage(self, text):
2551 fp = self.opener('last-message.txt', 'wb')
2551 fp = self.opener('last-message.txt', 'wb')
2552 try:
2552 try:
2553 fp.write(text)
2553 fp.write(text)
2554 finally:
2554 finally:
2555 fp.close()
2555 fp.close()
2556 return self.pathto(fp.name[len(self.root) + 1:])
2556 return self.pathto(fp.name[len(self.root) + 1:])
2557
2557
2558 # used to avoid circular references so destructors work
2558 # used to avoid circular references so destructors work
2559 def aftertrans(files):
2559 def aftertrans(files):
2560 renamefiles = [tuple(t) for t in files]
2560 renamefiles = [tuple(t) for t in files]
2561 def a():
2561 def a():
2562 for src, dest in renamefiles:
2562 for src, dest in renamefiles:
2563 try:
2563 try:
2564 util.rename(src, dest)
2564 util.rename(src, dest)
2565 except OSError: # journal file does not yet exist
2565 except OSError: # journal file does not yet exist
2566 pass
2566 pass
2567 return a
2567 return a
2568
2568
2569 def undoname(fn):
2569 def undoname(fn):
2570 base, name = os.path.split(fn)
2570 base, name = os.path.split(fn)
2571 assert name.startswith('journal')
2571 assert name.startswith('journal')
2572 return os.path.join(base, name.replace('journal', 'undo', 1))
2572 return os.path.join(base, name.replace('journal', 'undo', 1))
2573
2573
2574 def instance(ui, path, create):
2574 def instance(ui, path, create):
2575 return localrepository(ui, util.urllocalpath(path), create)
2575 return localrepository(ui, util.urllocalpath(path), create)
2576
2576
2577 def islocal(path):
2577 def islocal(path):
2578 return True
2578 return True
@@ -1,194 +1,194
1 # repoview.py - Filtered view of a localrepo object
1 # repoview.py - Filtered view of a localrepo object
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import copy
9 import copy
10 import phases
10 import phases
11 import util
11 import util
12 import obsolete, bookmarks, revset
12 import obsolete, bookmarks, revset
13
13
14
14
15 def hideablerevs(repo):
15 def hideablerevs(repo):
16 """Revisions candidates to be hidden
16 """Revisions candidates to be hidden
17
17
18 This is a standalone function to help extensions to wrap it."""
18 This is a standalone function to help extensions to wrap it."""
19 return obsolete.getrevs(repo, 'obsolete')
19 return obsolete.getrevs(repo, 'obsolete')
20
20
21 def computehidden(repo):
21 def computehidden(repo):
22 """compute the set of hidden revision to filter
22 """compute the set of hidden revision to filter
23
23
24 During most operation hidden should be filtered."""
24 During most operation hidden should be filtered."""
25 assert not repo.changelog.filteredrevs
25 assert not repo.changelog.filteredrevs
26 hideable = hideablerevs(repo)
26 hideable = hideablerevs(repo)
27 if hideable:
27 if hideable:
28 cl = repo.changelog
28 cl = repo.changelog
29 firsthideable = min(hideable)
29 firsthideable = min(hideable)
30 revs = cl.revs(start=firsthideable)
30 revs = cl.revs(start=firsthideable)
31 blockers = [r for r in revset._children(repo, revs, hideable)
31 blockers = [r for r in revset._children(repo, revs, hideable)
32 if r not in hideable]
32 if r not in hideable]
33 for par in repo[None].parents():
33 for par in repo[None].parents():
34 blockers.append(par.rev())
34 blockers.append(par.rev())
35 for bm in bookmarks.listbookmarks(repo).values():
35 for bm in bookmarks.listbookmarks(repo).values():
36 blockers.append(repo[bm].rev())
36 blockers.append(repo[bm].rev())
37 blocked = cl.ancestors(blockers, inclusive=True)
37 blocked = cl.ancestors(blockers, inclusive=True)
38 return frozenset(r for r in hideable if r not in blocked)
38 return frozenset(r for r in hideable if r not in blocked)
39 return frozenset()
39 return frozenset()
40
40
41 def computeunserved(repo):
41 def computeunserved(repo):
42 """compute the set of revision that should be filtered when used a server
42 """compute the set of revision that should be filtered when used a server
43
43
44 Secret and hidden changeset should not pretend to be here."""
44 Secret and hidden changeset should not pretend to be here."""
45 assert not repo.changelog.filteredrevs
45 assert not repo.changelog.filteredrevs
46 # fast path in simple case to avoid impact of non optimised code
46 # fast path in simple case to avoid impact of non optimised code
47 hiddens = filteredrevs(repo, 'hidden')
47 hiddens = filterrevs(repo, 'visible')
48 if phases.hassecret(repo):
48 if phases.hassecret(repo):
49 cl = repo.changelog
49 cl = repo.changelog
50 secret = phases.secret
50 secret = phases.secret
51 getphase = repo._phasecache.phase
51 getphase = repo._phasecache.phase
52 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
52 first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret])
53 revs = cl.revs(start=first)
53 revs = cl.revs(start=first)
54 secrets = set(r for r in revs if getphase(repo, r) >= secret)
54 secrets = set(r for r in revs if getphase(repo, r) >= secret)
55 return frozenset(hiddens | secrets)
55 return frozenset(hiddens | secrets)
56 else:
56 else:
57 return hiddens
57 return hiddens
58 return frozenset()
58 return frozenset()
59
59
60 def computemutable(repo):
60 def computemutable(repo):
61 """compute the set of revision that should be filtered when used a server
61 """compute the set of revision that should be filtered when used a server
62
62
63 Secret and hidden changeset should not pretend to be here."""
63 Secret and hidden changeset should not pretend to be here."""
64 assert not repo.changelog.filteredrevs
64 assert not repo.changelog.filteredrevs
65 # fast check to avoid revset call on huge repo
65 # fast check to avoid revset call on huge repo
66 if util.any(repo._phasecache.phaseroots[1:]):
66 if util.any(repo._phasecache.phaseroots[1:]):
67 getphase = repo._phasecache.phase
67 getphase = repo._phasecache.phase
68 maymutable = filteredrevs(repo, 'impactable')
68 maymutable = filterrevs(repo, 'base')
69 return frozenset(r for r in maymutable if getphase(repo, r))
69 return frozenset(r for r in maymutable if getphase(repo, r))
70 return frozenset()
70 return frozenset()
71
71
72 def computeimpactable(repo):
72 def computeimpactable(repo):
73 """Everything impactable by mutable revision
73 """Everything impactable by mutable revision
74
74
75 The mutable filter still have some chance to get invalidated. This will
75 The mutable filter still have some chance to get invalidated. This will
76 happen when:
76 happen when:
77
77
78 - you garbage collect hidden changeset,
78 - you garbage collect hidden changeset,
79 - public phase is moved backward,
79 - public phase is moved backward,
80 - something is changed in the filtering (this could be fixed)
80 - something is changed in the filtering (this could be fixed)
81
81
82 This filter out any mutable changeset and any public changeset that may be
82 This filter out any mutable changeset and any public changeset that may be
83 impacted by something happening to a mutable revision.
83 impacted by something happening to a mutable revision.
84
84
85 This is achieved by filtered everything with a revision number egal or
85 This is achieved by filtered everything with a revision number egal or
86 higher than the first mutable changeset is filtered."""
86 higher than the first mutable changeset is filtered."""
87 assert not repo.changelog.filteredrevs
87 assert not repo.changelog.filteredrevs
88 cl = repo.changelog
88 cl = repo.changelog
89 firstmutable = len(cl)
89 firstmutable = len(cl)
90 for roots in repo._phasecache.phaseroots[1:]:
90 for roots in repo._phasecache.phaseroots[1:]:
91 if roots:
91 if roots:
92 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
92 firstmutable = min(firstmutable, min(cl.rev(r) for r in roots))
93 return frozenset(xrange(firstmutable, len(cl)))
93 return frozenset(xrange(firstmutable, len(cl)))
94
94
95 # function to compute filtered set
95 # function to compute filtered set
96 filtertable = {'hidden': computehidden,
96 filtertable = {'visible': computehidden,
97 'unserved': computeunserved,
97 'served': computeunserved,
98 'mutable': computemutable,
98 'immutable': computemutable,
99 'impactable': computeimpactable}
99 'base': computeimpactable}
100 ### Nearest subset relation
100 ### Nearest subset relation
101 # Nearest subset of filter X is a filter Y so that:
101 # Nearest subset of filter X is a filter Y so that:
102 # * Y is included in X,
102 # * Y is included in X,
103 # * X - Y is as small as possible.
103 # * X - Y is as small as possible.
104 # This create and ordering used for branchmap purpose.
104 # This create and ordering used for branchmap purpose.
105 # the ordering may be partial
105 # the ordering may be partial
106 subsettable = {None: 'hidden',
106 subsettable = {None: 'visible',
107 'hidden': 'unserved',
107 'visible': 'served',
108 'unserved': 'mutable',
108 'served': 'immutable',
109 'mutable': 'impactable'}
109 'immutable': 'base'}
110
110
111 def filteredrevs(repo, filtername):
111 def filterrevs(repo, filtername):
112 """returns set of filtered revision for this filter name"""
112 """returns set of filtered revision for this filter name"""
113 if filtername not in repo.filteredrevcache:
113 if filtername not in repo.filteredrevcache:
114 func = filtertable[filtername]
114 func = filtertable[filtername]
115 repo.filteredrevcache[filtername] = func(repo.unfiltered())
115 repo.filteredrevcache[filtername] = func(repo.unfiltered())
116 return repo.filteredrevcache[filtername]
116 return repo.filteredrevcache[filtername]
117
117
118 class repoview(object):
118 class repoview(object):
119 """Provide a read/write view of a repo through a filtered changelog
119 """Provide a read/write view of a repo through a filtered changelog
120
120
121 This object is used to access a filtered version of a repository without
121 This object is used to access a filtered version of a repository without
122 altering the original repository object itself. We can not alter the
122 altering the original repository object itself. We can not alter the
123 original object for two main reasons:
123 original object for two main reasons:
124 - It prevents the use of a repo with multiple filters at the same time. In
124 - It prevents the use of a repo with multiple filters at the same time. In
125 particular when multiple threads are involved.
125 particular when multiple threads are involved.
126 - It makes scope of the filtering harder to control.
126 - It makes scope of the filtering harder to control.
127
127
128 This object behaves very closely to the original repository. All attribute
128 This object behaves very closely to the original repository. All attribute
129 operations are done on the original repository:
129 operations are done on the original repository:
130 - An access to `repoview.someattr` actually returns `repo.someattr`,
130 - An access to `repoview.someattr` actually returns `repo.someattr`,
131 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
131 - A write to `repoview.someattr` actually sets value of `repo.someattr`,
132 - A deletion of `repoview.someattr` actually drops `someattr`
132 - A deletion of `repoview.someattr` actually drops `someattr`
133 from `repo.__dict__`.
133 from `repo.__dict__`.
134
134
135 The only exception is the `changelog` property. It is overridden to return
135 The only exception is the `changelog` property. It is overridden to return
136 a (surface) copy of `repo.changelog` with some revisions filtered. The
136 a (surface) copy of `repo.changelog` with some revisions filtered. The
137 `filtername` attribute of the view control the revisions that need to be
137 `filtername` attribute of the view control the revisions that need to be
138 filtered. (the fact the changelog is copied is an implementation detail).
138 filtered. (the fact the changelog is copied is an implementation detail).
139
139
140 Unlike attributes, this object intercepts all method calls. This means that
140 Unlike attributes, this object intercepts all method calls. This means that
141 all methods are run on the `repoview` object with the filtered `changelog`
141 all methods are run on the `repoview` object with the filtered `changelog`
142 property. For this purpose the simple `repoview` class must be mixed with
142 property. For this purpose the simple `repoview` class must be mixed with
143 the actual class of the repository. This ensures that the resulting
143 the actual class of the repository. This ensures that the resulting
144 `repoview` object have the very same methods than the repo object. This
144 `repoview` object have the very same methods than the repo object. This
145 leads to the property below.
145 leads to the property below.
146
146
147 repoview.method() --> repo.__class__.method(repoview)
147 repoview.method() --> repo.__class__.method(repoview)
148
148
149 The inheritance has to be done dynamically because `repo` can be of any
149 The inheritance has to be done dynamically because `repo` can be of any
150 subclasses of `localrepo`. Eg: `bundlerepo` or `httprepo`.
150 subclasses of `localrepo`. Eg: `bundlerepo` or `httprepo`.
151 """
151 """
152
152
153 def __init__(self, repo, filtername):
153 def __init__(self, repo, filtername):
154 object.__setattr__(self, '_unfilteredrepo', repo)
154 object.__setattr__(self, '_unfilteredrepo', repo)
155 object.__setattr__(self, 'filtername', filtername)
155 object.__setattr__(self, 'filtername', filtername)
156
156
157 # not a cacheproperty on purpose we shall implement a proper cache later
157 # not a cacheproperty on purpose we shall implement a proper cache later
158 @property
158 @property
159 def changelog(self):
159 def changelog(self):
160 """return a filtered version of the changeset
160 """return a filtered version of the changeset
161
161
162 this changelog must not be used for writing"""
162 this changelog must not be used for writing"""
163 # some cache may be implemented later
163 # some cache may be implemented later
164 cl = copy.copy(self._unfilteredrepo.changelog)
164 cl = copy.copy(self._unfilteredrepo.changelog)
165 cl.filteredrevs = filteredrevs(self._unfilteredrepo, self.filtername)
165 cl.filteredrevs = filterrevs(self._unfilteredrepo, self.filtername)
166 return cl
166 return cl
167
167
168 def unfiltered(self):
168 def unfiltered(self):
169 """Return an unfiltered version of a repo"""
169 """Return an unfiltered version of a repo"""
170 return self._unfilteredrepo
170 return self._unfilteredrepo
171
171
172 def filtered(self, name):
172 def filtered(self, name):
173 """Return a filtered version of a repository"""
173 """Return a filtered version of a repository"""
174 if name == self.filtername:
174 if name == self.filtername:
175 return self
175 return self
176 return self.unfiltered().filtered(name)
176 return self.unfiltered().filtered(name)
177
177
178 # everything access are forwarded to the proxied repo
178 # everything access are forwarded to the proxied repo
179 def __getattr__(self, attr):
179 def __getattr__(self, attr):
180 return getattr(self._unfilteredrepo, attr)
180 return getattr(self._unfilteredrepo, attr)
181
181
182 def __setattr__(self, attr, value):
182 def __setattr__(self, attr, value):
183 return setattr(self._unfilteredrepo, attr, value)
183 return setattr(self._unfilteredrepo, attr, value)
184
184
185 def __delattr__(self, attr):
185 def __delattr__(self, attr):
186 return delattr(self._unfilteredrepo, attr)
186 return delattr(self._unfilteredrepo, attr)
187
187
188 # The `requirement` attribut is initialiazed during __init__. But
188 # The `requirement` attribut is initialiazed during __init__. But
189 # __getattr__ won't be called as it also exists on the class. We need
189 # __getattr__ won't be called as it also exists on the class. We need
190 # explicit forwarding to main repo here
190 # explicit forwarding to main repo here
191 @property
191 @property
192 def requirements(self):
192 def requirements(self):
193 return self._unfilteredrepo.requirements
193 return self._unfilteredrepo.requirements
194
194
@@ -1,1937 +1,1937
1 # revset.py - revision set queries for mercurial
1 # revset.py - revision set queries for mercurial
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import re
8 import re
9 import parser, util, error, discovery, hbisect, phases
9 import parser, util, error, discovery, hbisect, phases
10 import node
10 import node
11 import bookmarks as bookmarksmod
11 import bookmarks as bookmarksmod
12 import match as matchmod
12 import match as matchmod
13 from i18n import _
13 from i18n import _
14 import encoding
14 import encoding
15 import obsolete as obsmod
15 import obsolete as obsmod
16 import repoview
16 import repoview
17
17
18 def _revancestors(repo, revs, followfirst):
18 def _revancestors(repo, revs, followfirst):
19 """Like revlog.ancestors(), but supports followfirst."""
19 """Like revlog.ancestors(), but supports followfirst."""
20 cut = followfirst and 1 or None
20 cut = followfirst and 1 or None
21 cl = repo.changelog
21 cl = repo.changelog
22 visit = util.deque(revs)
22 visit = util.deque(revs)
23 seen = set([node.nullrev])
23 seen = set([node.nullrev])
24 while visit:
24 while visit:
25 for parent in cl.parentrevs(visit.popleft())[:cut]:
25 for parent in cl.parentrevs(visit.popleft())[:cut]:
26 if parent not in seen:
26 if parent not in seen:
27 visit.append(parent)
27 visit.append(parent)
28 seen.add(parent)
28 seen.add(parent)
29 yield parent
29 yield parent
30
30
31 def _revdescendants(repo, revs, followfirst):
31 def _revdescendants(repo, revs, followfirst):
32 """Like revlog.descendants() but supports followfirst."""
32 """Like revlog.descendants() but supports followfirst."""
33 cut = followfirst and 1 or None
33 cut = followfirst and 1 or None
34 cl = repo.changelog
34 cl = repo.changelog
35 first = min(revs)
35 first = min(revs)
36 nullrev = node.nullrev
36 nullrev = node.nullrev
37 if first == nullrev:
37 if first == nullrev:
38 # Are there nodes with a null first parent and a non-null
38 # Are there nodes with a null first parent and a non-null
39 # second one? Maybe. Do we care? Probably not.
39 # second one? Maybe. Do we care? Probably not.
40 for i in cl:
40 for i in cl:
41 yield i
41 yield i
42 return
42 return
43
43
44 seen = set(revs)
44 seen = set(revs)
45 for i in cl.revs(first + 1):
45 for i in cl.revs(first + 1):
46 for x in cl.parentrevs(i)[:cut]:
46 for x in cl.parentrevs(i)[:cut]:
47 if x != nullrev and x in seen:
47 if x != nullrev and x in seen:
48 seen.add(i)
48 seen.add(i)
49 yield i
49 yield i
50 break
50 break
51
51
52 def _revsbetween(repo, roots, heads):
52 def _revsbetween(repo, roots, heads):
53 """Return all paths between roots and heads, inclusive of both endpoint
53 """Return all paths between roots and heads, inclusive of both endpoint
54 sets."""
54 sets."""
55 if not roots:
55 if not roots:
56 return []
56 return []
57 parentrevs = repo.changelog.parentrevs
57 parentrevs = repo.changelog.parentrevs
58 visit = heads[:]
58 visit = heads[:]
59 reachable = set()
59 reachable = set()
60 seen = {}
60 seen = {}
61 minroot = min(roots)
61 minroot = min(roots)
62 roots = set(roots)
62 roots = set(roots)
63 # open-code the post-order traversal due to the tiny size of
63 # open-code the post-order traversal due to the tiny size of
64 # sys.getrecursionlimit()
64 # sys.getrecursionlimit()
65 while visit:
65 while visit:
66 rev = visit.pop()
66 rev = visit.pop()
67 if rev in roots:
67 if rev in roots:
68 reachable.add(rev)
68 reachable.add(rev)
69 parents = parentrevs(rev)
69 parents = parentrevs(rev)
70 seen[rev] = parents
70 seen[rev] = parents
71 for parent in parents:
71 for parent in parents:
72 if parent >= minroot and parent not in seen:
72 if parent >= minroot and parent not in seen:
73 visit.append(parent)
73 visit.append(parent)
74 if not reachable:
74 if not reachable:
75 return []
75 return []
76 for rev in sorted(seen):
76 for rev in sorted(seen):
77 for parent in seen[rev]:
77 for parent in seen[rev]:
78 if parent in reachable:
78 if parent in reachable:
79 reachable.add(rev)
79 reachable.add(rev)
80 return sorted(reachable)
80 return sorted(reachable)
81
81
82 elements = {
82 elements = {
83 "(": (20, ("group", 1, ")"), ("func", 1, ")")),
83 "(": (20, ("group", 1, ")"), ("func", 1, ")")),
84 "~": (18, None, ("ancestor", 18)),
84 "~": (18, None, ("ancestor", 18)),
85 "^": (18, None, ("parent", 18), ("parentpost", 18)),
85 "^": (18, None, ("parent", 18), ("parentpost", 18)),
86 "-": (5, ("negate", 19), ("minus", 5)),
86 "-": (5, ("negate", 19), ("minus", 5)),
87 "::": (17, ("dagrangepre", 17), ("dagrange", 17),
87 "::": (17, ("dagrangepre", 17), ("dagrange", 17),
88 ("dagrangepost", 17)),
88 ("dagrangepost", 17)),
89 "..": (17, ("dagrangepre", 17), ("dagrange", 17),
89 "..": (17, ("dagrangepre", 17), ("dagrange", 17),
90 ("dagrangepost", 17)),
90 ("dagrangepost", 17)),
91 ":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)),
91 ":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)),
92 "not": (10, ("not", 10)),
92 "not": (10, ("not", 10)),
93 "!": (10, ("not", 10)),
93 "!": (10, ("not", 10)),
94 "and": (5, None, ("and", 5)),
94 "and": (5, None, ("and", 5)),
95 "&": (5, None, ("and", 5)),
95 "&": (5, None, ("and", 5)),
96 "or": (4, None, ("or", 4)),
96 "or": (4, None, ("or", 4)),
97 "|": (4, None, ("or", 4)),
97 "|": (4, None, ("or", 4)),
98 "+": (4, None, ("or", 4)),
98 "+": (4, None, ("or", 4)),
99 ",": (2, None, ("list", 2)),
99 ",": (2, None, ("list", 2)),
100 ")": (0, None, None),
100 ")": (0, None, None),
101 "symbol": (0, ("symbol",), None),
101 "symbol": (0, ("symbol",), None),
102 "string": (0, ("string",), None),
102 "string": (0, ("string",), None),
103 "end": (0, None, None),
103 "end": (0, None, None),
104 }
104 }
105
105
106 keywords = set(['and', 'or', 'not'])
106 keywords = set(['and', 'or', 'not'])
107
107
108 def tokenize(program):
108 def tokenize(program):
109 '''
109 '''
110 Parse a revset statement into a stream of tokens
110 Parse a revset statement into a stream of tokens
111
111
112 Check that @ is a valid unquoted token character (issue3686):
112 Check that @ is a valid unquoted token character (issue3686):
113 >>> list(tokenize("@::"))
113 >>> list(tokenize("@::"))
114 [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
114 [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
115
115
116 '''
116 '''
117
117
118 pos, l = 0, len(program)
118 pos, l = 0, len(program)
119 while pos < l:
119 while pos < l:
120 c = program[pos]
120 c = program[pos]
121 if c.isspace(): # skip inter-token whitespace
121 if c.isspace(): # skip inter-token whitespace
122 pass
122 pass
123 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
123 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
124 yield ('::', None, pos)
124 yield ('::', None, pos)
125 pos += 1 # skip ahead
125 pos += 1 # skip ahead
126 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
126 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
127 yield ('..', None, pos)
127 yield ('..', None, pos)
128 pos += 1 # skip ahead
128 pos += 1 # skip ahead
129 elif c in "():,-|&+!~^": # handle simple operators
129 elif c in "():,-|&+!~^": # handle simple operators
130 yield (c, None, pos)
130 yield (c, None, pos)
131 elif (c in '"\'' or c == 'r' and
131 elif (c in '"\'' or c == 'r' and
132 program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
132 program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
133 if c == 'r':
133 if c == 'r':
134 pos += 1
134 pos += 1
135 c = program[pos]
135 c = program[pos]
136 decode = lambda x: x
136 decode = lambda x: x
137 else:
137 else:
138 decode = lambda x: x.decode('string-escape')
138 decode = lambda x: x.decode('string-escape')
139 pos += 1
139 pos += 1
140 s = pos
140 s = pos
141 while pos < l: # find closing quote
141 while pos < l: # find closing quote
142 d = program[pos]
142 d = program[pos]
143 if d == '\\': # skip over escaped characters
143 if d == '\\': # skip over escaped characters
144 pos += 2
144 pos += 2
145 continue
145 continue
146 if d == c:
146 if d == c:
147 yield ('string', decode(program[s:pos]), s)
147 yield ('string', decode(program[s:pos]), s)
148 break
148 break
149 pos += 1
149 pos += 1
150 else:
150 else:
151 raise error.ParseError(_("unterminated string"), s)
151 raise error.ParseError(_("unterminated string"), s)
152 # gather up a symbol/keyword
152 # gather up a symbol/keyword
153 elif c.isalnum() or c in '._@' or ord(c) > 127:
153 elif c.isalnum() or c in '._@' or ord(c) > 127:
154 s = pos
154 s = pos
155 pos += 1
155 pos += 1
156 while pos < l: # find end of symbol
156 while pos < l: # find end of symbol
157 d = program[pos]
157 d = program[pos]
158 if not (d.isalnum() or d in "._/@" or ord(d) > 127):
158 if not (d.isalnum() or d in "._/@" or ord(d) > 127):
159 break
159 break
160 if d == '.' and program[pos - 1] == '.': # special case for ..
160 if d == '.' and program[pos - 1] == '.': # special case for ..
161 pos -= 1
161 pos -= 1
162 break
162 break
163 pos += 1
163 pos += 1
164 sym = program[s:pos]
164 sym = program[s:pos]
165 if sym in keywords: # operator keywords
165 if sym in keywords: # operator keywords
166 yield (sym, None, s)
166 yield (sym, None, s)
167 else:
167 else:
168 yield ('symbol', sym, s)
168 yield ('symbol', sym, s)
169 pos -= 1
169 pos -= 1
170 else:
170 else:
171 raise error.ParseError(_("syntax error"), pos)
171 raise error.ParseError(_("syntax error"), pos)
172 pos += 1
172 pos += 1
173 yield ('end', None, pos)
173 yield ('end', None, pos)
174
174
175 # helpers
175 # helpers
176
176
177 def getstring(x, err):
177 def getstring(x, err):
178 if x and (x[0] == 'string' or x[0] == 'symbol'):
178 if x and (x[0] == 'string' or x[0] == 'symbol'):
179 return x[1]
179 return x[1]
180 raise error.ParseError(err)
180 raise error.ParseError(err)
181
181
182 def getlist(x):
182 def getlist(x):
183 if not x:
183 if not x:
184 return []
184 return []
185 if x[0] == 'list':
185 if x[0] == 'list':
186 return getlist(x[1]) + [x[2]]
186 return getlist(x[1]) + [x[2]]
187 return [x]
187 return [x]
188
188
189 def getargs(x, min, max, err):
189 def getargs(x, min, max, err):
190 l = getlist(x)
190 l = getlist(x)
191 if len(l) < min or (max >= 0 and len(l) > max):
191 if len(l) < min or (max >= 0 and len(l) > max):
192 raise error.ParseError(err)
192 raise error.ParseError(err)
193 return l
193 return l
194
194
195 def getset(repo, subset, x):
195 def getset(repo, subset, x):
196 if not x:
196 if not x:
197 raise error.ParseError(_("missing argument"))
197 raise error.ParseError(_("missing argument"))
198 return methods[x[0]](repo, subset, *x[1:])
198 return methods[x[0]](repo, subset, *x[1:])
199
199
200 def _getrevsource(repo, r):
200 def _getrevsource(repo, r):
201 extra = repo[r].extra()
201 extra = repo[r].extra()
202 for label in ('source', 'transplant_source', 'rebase_source'):
202 for label in ('source', 'transplant_source', 'rebase_source'):
203 if label in extra:
203 if label in extra:
204 try:
204 try:
205 return repo[extra[label]].rev()
205 return repo[extra[label]].rev()
206 except error.RepoLookupError:
206 except error.RepoLookupError:
207 pass
207 pass
208 return None
208 return None
209
209
210 # operator methods
210 # operator methods
211
211
212 def stringset(repo, subset, x):
212 def stringset(repo, subset, x):
213 x = repo[x].rev()
213 x = repo[x].rev()
214 if x == -1 and len(subset) == len(repo):
214 if x == -1 and len(subset) == len(repo):
215 return [-1]
215 return [-1]
216 if len(subset) == len(repo) or x in subset:
216 if len(subset) == len(repo) or x in subset:
217 return [x]
217 return [x]
218 return []
218 return []
219
219
220 def symbolset(repo, subset, x):
220 def symbolset(repo, subset, x):
221 if x in symbols:
221 if x in symbols:
222 raise error.ParseError(_("can't use %s here") % x)
222 raise error.ParseError(_("can't use %s here") % x)
223 return stringset(repo, subset, x)
223 return stringset(repo, subset, x)
224
224
225 def rangeset(repo, subset, x, y):
225 def rangeset(repo, subset, x, y):
226 m = getset(repo, subset, x)
226 m = getset(repo, subset, x)
227 if not m:
227 if not m:
228 m = getset(repo, list(repo), x)
228 m = getset(repo, list(repo), x)
229
229
230 n = getset(repo, subset, y)
230 n = getset(repo, subset, y)
231 if not n:
231 if not n:
232 n = getset(repo, list(repo), y)
232 n = getset(repo, list(repo), y)
233
233
234 if not m or not n:
234 if not m or not n:
235 return []
235 return []
236 m, n = m[0], n[-1]
236 m, n = m[0], n[-1]
237
237
238 if m < n:
238 if m < n:
239 r = range(m, n + 1)
239 r = range(m, n + 1)
240 else:
240 else:
241 r = range(m, n - 1, -1)
241 r = range(m, n - 1, -1)
242 s = set(subset)
242 s = set(subset)
243 return [x for x in r if x in s]
243 return [x for x in r if x in s]
244
244
245 def dagrange(repo, subset, x, y):
245 def dagrange(repo, subset, x, y):
246 if subset:
246 if subset:
247 r = list(repo)
247 r = list(repo)
248 xs = _revsbetween(repo, getset(repo, r, x), getset(repo, r, y))
248 xs = _revsbetween(repo, getset(repo, r, x), getset(repo, r, y))
249 s = set(subset)
249 s = set(subset)
250 return [r for r in xs if r in s]
250 return [r for r in xs if r in s]
251 return []
251 return []
252
252
253 def andset(repo, subset, x, y):
253 def andset(repo, subset, x, y):
254 return getset(repo, getset(repo, subset, x), y)
254 return getset(repo, getset(repo, subset, x), y)
255
255
256 def orset(repo, subset, x, y):
256 def orset(repo, subset, x, y):
257 xl = getset(repo, subset, x)
257 xl = getset(repo, subset, x)
258 s = set(xl)
258 s = set(xl)
259 yl = getset(repo, [r for r in subset if r not in s], y)
259 yl = getset(repo, [r for r in subset if r not in s], y)
260 return xl + yl
260 return xl + yl
261
261
262 def notset(repo, subset, x):
262 def notset(repo, subset, x):
263 s = set(getset(repo, subset, x))
263 s = set(getset(repo, subset, x))
264 return [r for r in subset if r not in s]
264 return [r for r in subset if r not in s]
265
265
266 def listset(repo, subset, a, b):
266 def listset(repo, subset, a, b):
267 raise error.ParseError(_("can't use a list in this context"))
267 raise error.ParseError(_("can't use a list in this context"))
268
268
269 def func(repo, subset, a, b):
269 def func(repo, subset, a, b):
270 if a[0] == 'symbol' and a[1] in symbols:
270 if a[0] == 'symbol' and a[1] in symbols:
271 return symbols[a[1]](repo, subset, b)
271 return symbols[a[1]](repo, subset, b)
272 raise error.ParseError(_("not a function: %s") % a[1])
272 raise error.ParseError(_("not a function: %s") % a[1])
273
273
274 # functions
274 # functions
275
275
276 def adds(repo, subset, x):
276 def adds(repo, subset, x):
277 """``adds(pattern)``
277 """``adds(pattern)``
278 Changesets that add a file matching pattern.
278 Changesets that add a file matching pattern.
279 """
279 """
280 # i18n: "adds" is a keyword
280 # i18n: "adds" is a keyword
281 pat = getstring(x, _("adds requires a pattern"))
281 pat = getstring(x, _("adds requires a pattern"))
282 return checkstatus(repo, subset, pat, 1)
282 return checkstatus(repo, subset, pat, 1)
283
283
284 def ancestor(repo, subset, x):
284 def ancestor(repo, subset, x):
285 """``ancestor(single, single)``
285 """``ancestor(single, single)``
286 Greatest common ancestor of the two changesets.
286 Greatest common ancestor of the two changesets.
287 """
287 """
288 # i18n: "ancestor" is a keyword
288 # i18n: "ancestor" is a keyword
289 l = getargs(x, 2, 2, _("ancestor requires two arguments"))
289 l = getargs(x, 2, 2, _("ancestor requires two arguments"))
290 r = list(repo)
290 r = list(repo)
291 a = getset(repo, r, l[0])
291 a = getset(repo, r, l[0])
292 b = getset(repo, r, l[1])
292 b = getset(repo, r, l[1])
293 if len(a) != 1 or len(b) != 1:
293 if len(a) != 1 or len(b) != 1:
294 # i18n: "ancestor" is a keyword
294 # i18n: "ancestor" is a keyword
295 raise error.ParseError(_("ancestor arguments must be single revisions"))
295 raise error.ParseError(_("ancestor arguments must be single revisions"))
296 an = [repo[a[0]].ancestor(repo[b[0]]).rev()]
296 an = [repo[a[0]].ancestor(repo[b[0]]).rev()]
297
297
298 return [r for r in an if r in subset]
298 return [r for r in an if r in subset]
299
299
300 def _ancestors(repo, subset, x, followfirst=False):
300 def _ancestors(repo, subset, x, followfirst=False):
301 args = getset(repo, list(repo), x)
301 args = getset(repo, list(repo), x)
302 if not args:
302 if not args:
303 return []
303 return []
304 s = set(_revancestors(repo, args, followfirst)) | set(args)
304 s = set(_revancestors(repo, args, followfirst)) | set(args)
305 return [r for r in subset if r in s]
305 return [r for r in subset if r in s]
306
306
307 def ancestors(repo, subset, x):
307 def ancestors(repo, subset, x):
308 """``ancestors(set)``
308 """``ancestors(set)``
309 Changesets that are ancestors of a changeset in set.
309 Changesets that are ancestors of a changeset in set.
310 """
310 """
311 return _ancestors(repo, subset, x)
311 return _ancestors(repo, subset, x)
312
312
313 def _firstancestors(repo, subset, x):
313 def _firstancestors(repo, subset, x):
314 # ``_firstancestors(set)``
314 # ``_firstancestors(set)``
315 # Like ``ancestors(set)`` but follows only the first parents.
315 # Like ``ancestors(set)`` but follows only the first parents.
316 return _ancestors(repo, subset, x, followfirst=True)
316 return _ancestors(repo, subset, x, followfirst=True)
317
317
318 def ancestorspec(repo, subset, x, n):
318 def ancestorspec(repo, subset, x, n):
319 """``set~n``
319 """``set~n``
320 Changesets that are the Nth ancestor (first parents only) of a changeset
320 Changesets that are the Nth ancestor (first parents only) of a changeset
321 in set.
321 in set.
322 """
322 """
323 try:
323 try:
324 n = int(n[1])
324 n = int(n[1])
325 except (TypeError, ValueError):
325 except (TypeError, ValueError):
326 raise error.ParseError(_("~ expects a number"))
326 raise error.ParseError(_("~ expects a number"))
327 ps = set()
327 ps = set()
328 cl = repo.changelog
328 cl = repo.changelog
329 for r in getset(repo, subset, x):
329 for r in getset(repo, subset, x):
330 for i in range(n):
330 for i in range(n):
331 r = cl.parentrevs(r)[0]
331 r = cl.parentrevs(r)[0]
332 ps.add(r)
332 ps.add(r)
333 return [r for r in subset if r in ps]
333 return [r for r in subset if r in ps]
334
334
335 def author(repo, subset, x):
335 def author(repo, subset, x):
336 """``author(string)``
336 """``author(string)``
337 Alias for ``user(string)``.
337 Alias for ``user(string)``.
338 """
338 """
339 # i18n: "author" is a keyword
339 # i18n: "author" is a keyword
340 n = encoding.lower(getstring(x, _("author requires a string")))
340 n = encoding.lower(getstring(x, _("author requires a string")))
341 kind, pattern, matcher = _substringmatcher(n)
341 kind, pattern, matcher = _substringmatcher(n)
342 return [r for r in subset if matcher(encoding.lower(repo[r].user()))]
342 return [r for r in subset if matcher(encoding.lower(repo[r].user()))]
343
343
344 def bisect(repo, subset, x):
344 def bisect(repo, subset, x):
345 """``bisect(string)``
345 """``bisect(string)``
346 Changesets marked in the specified bisect status:
346 Changesets marked in the specified bisect status:
347
347
348 - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
348 - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
349 - ``goods``, ``bads`` : csets topologically good/bad
349 - ``goods``, ``bads`` : csets topologically good/bad
350 - ``range`` : csets taking part in the bisection
350 - ``range`` : csets taking part in the bisection
351 - ``pruned`` : csets that are goods, bads or skipped
351 - ``pruned`` : csets that are goods, bads or skipped
352 - ``untested`` : csets whose fate is yet unknown
352 - ``untested`` : csets whose fate is yet unknown
353 - ``ignored`` : csets ignored due to DAG topology
353 - ``ignored`` : csets ignored due to DAG topology
354 - ``current`` : the cset currently being bisected
354 - ``current`` : the cset currently being bisected
355 """
355 """
356 # i18n: "bisect" is a keyword
356 # i18n: "bisect" is a keyword
357 status = getstring(x, _("bisect requires a string")).lower()
357 status = getstring(x, _("bisect requires a string")).lower()
358 state = set(hbisect.get(repo, status))
358 state = set(hbisect.get(repo, status))
359 return [r for r in subset if r in state]
359 return [r for r in subset if r in state]
360
360
361 # Backward-compatibility
361 # Backward-compatibility
362 # - no help entry so that we do not advertise it any more
362 # - no help entry so that we do not advertise it any more
363 def bisected(repo, subset, x):
363 def bisected(repo, subset, x):
364 return bisect(repo, subset, x)
364 return bisect(repo, subset, x)
365
365
366 def bookmark(repo, subset, x):
366 def bookmark(repo, subset, x):
367 """``bookmark([name])``
367 """``bookmark([name])``
368 The named bookmark or all bookmarks.
368 The named bookmark or all bookmarks.
369
369
370 If `name` starts with `re:`, the remainder of the name is treated as
370 If `name` starts with `re:`, the remainder of the name is treated as
371 a regular expression. To match a bookmark that actually starts with `re:`,
371 a regular expression. To match a bookmark that actually starts with `re:`,
372 use the prefix `literal:`.
372 use the prefix `literal:`.
373 """
373 """
374 # i18n: "bookmark" is a keyword
374 # i18n: "bookmark" is a keyword
375 args = getargs(x, 0, 1, _('bookmark takes one or no arguments'))
375 args = getargs(x, 0, 1, _('bookmark takes one or no arguments'))
376 if args:
376 if args:
377 bm = getstring(args[0],
377 bm = getstring(args[0],
378 # i18n: "bookmark" is a keyword
378 # i18n: "bookmark" is a keyword
379 _('the argument to bookmark must be a string'))
379 _('the argument to bookmark must be a string'))
380 kind, pattern, matcher = _stringmatcher(bm)
380 kind, pattern, matcher = _stringmatcher(bm)
381 if kind == 'literal':
381 if kind == 'literal':
382 bmrev = bookmarksmod.listbookmarks(repo).get(bm, None)
382 bmrev = bookmarksmod.listbookmarks(repo).get(bm, None)
383 if not bmrev:
383 if not bmrev:
384 raise util.Abort(_("bookmark '%s' does not exist") % bm)
384 raise util.Abort(_("bookmark '%s' does not exist") % bm)
385 bmrev = repo[bmrev].rev()
385 bmrev = repo[bmrev].rev()
386 return [r for r in subset if r == bmrev]
386 return [r for r in subset if r == bmrev]
387 else:
387 else:
388 matchrevs = set()
388 matchrevs = set()
389 for name, bmrev in bookmarksmod.listbookmarks(repo).iteritems():
389 for name, bmrev in bookmarksmod.listbookmarks(repo).iteritems():
390 if matcher(name):
390 if matcher(name):
391 matchrevs.add(bmrev)
391 matchrevs.add(bmrev)
392 if not matchrevs:
392 if not matchrevs:
393 raise util.Abort(_("no bookmarks exist that match '%s'")
393 raise util.Abort(_("no bookmarks exist that match '%s'")
394 % pattern)
394 % pattern)
395 bmrevs = set()
395 bmrevs = set()
396 for bmrev in matchrevs:
396 for bmrev in matchrevs:
397 bmrevs.add(repo[bmrev].rev())
397 bmrevs.add(repo[bmrev].rev())
398 return [r for r in subset if r in bmrevs]
398 return [r for r in subset if r in bmrevs]
399
399
400 bms = set([repo[r].rev()
400 bms = set([repo[r].rev()
401 for r in bookmarksmod.listbookmarks(repo).values()])
401 for r in bookmarksmod.listbookmarks(repo).values()])
402 return [r for r in subset if r in bms]
402 return [r for r in subset if r in bms]
403
403
404 def branch(repo, subset, x):
404 def branch(repo, subset, x):
405 """``branch(string or set)``
405 """``branch(string or set)``
406 All changesets belonging to the given branch or the branches of the given
406 All changesets belonging to the given branch or the branches of the given
407 changesets.
407 changesets.
408
408
409 If `string` starts with `re:`, the remainder of the name is treated as
409 If `string` starts with `re:`, the remainder of the name is treated as
410 a regular expression. To match a branch that actually starts with `re:`,
410 a regular expression. To match a branch that actually starts with `re:`,
411 use the prefix `literal:`.
411 use the prefix `literal:`.
412 """
412 """
413 try:
413 try:
414 b = getstring(x, '')
414 b = getstring(x, '')
415 except error.ParseError:
415 except error.ParseError:
416 # not a string, but another revspec, e.g. tip()
416 # not a string, but another revspec, e.g. tip()
417 pass
417 pass
418 else:
418 else:
419 kind, pattern, matcher = _stringmatcher(b)
419 kind, pattern, matcher = _stringmatcher(b)
420 if kind == 'literal':
420 if kind == 'literal':
421 # note: falls through to the revspec case if no branch with
421 # note: falls through to the revspec case if no branch with
422 # this name exists
422 # this name exists
423 if pattern in repo.branchmap():
423 if pattern in repo.branchmap():
424 return [r for r in subset if matcher(repo[r].branch())]
424 return [r for r in subset if matcher(repo[r].branch())]
425 else:
425 else:
426 return [r for r in subset if matcher(repo[r].branch())]
426 return [r for r in subset if matcher(repo[r].branch())]
427
427
428 s = getset(repo, list(repo), x)
428 s = getset(repo, list(repo), x)
429 b = set()
429 b = set()
430 for r in s:
430 for r in s:
431 b.add(repo[r].branch())
431 b.add(repo[r].branch())
432 s = set(s)
432 s = set(s)
433 return [r for r in subset if r in s or repo[r].branch() in b]
433 return [r for r in subset if r in s or repo[r].branch() in b]
434
434
435 def bumped(repo, subset, x):
435 def bumped(repo, subset, x):
436 """``bumped()``
436 """``bumped()``
437 Mutable changesets marked as successors of public changesets.
437 Mutable changesets marked as successors of public changesets.
438
438
439 Only non-public and non-obsolete changesets can be `bumped`.
439 Only non-public and non-obsolete changesets can be `bumped`.
440 """
440 """
441 # i18n: "bumped" is a keyword
441 # i18n: "bumped" is a keyword
442 getargs(x, 0, 0, _("bumped takes no arguments"))
442 getargs(x, 0, 0, _("bumped takes no arguments"))
443 bumped = obsmod.getrevs(repo, 'bumped')
443 bumped = obsmod.getrevs(repo, 'bumped')
444 return [r for r in subset if r in bumped]
444 return [r for r in subset if r in bumped]
445
445
446 def bundle(repo, subset, x):
446 def bundle(repo, subset, x):
447 """``bundle()``
447 """``bundle()``
448 Changesets in the bundle.
448 Changesets in the bundle.
449
449
450 Bundle must be specified by the -R option."""
450 Bundle must be specified by the -R option."""
451
451
452 try:
452 try:
453 bundlenodes = repo.changelog.bundlenodes
453 bundlenodes = repo.changelog.bundlenodes
454 except AttributeError:
454 except AttributeError:
455 raise util.Abort(_("no bundle provided - specify with -R"))
455 raise util.Abort(_("no bundle provided - specify with -R"))
456 revs = set(repo[n].rev() for n in bundlenodes)
456 revs = set(repo[n].rev() for n in bundlenodes)
457 return [r for r in subset if r in revs]
457 return [r for r in subset if r in revs]
458
458
459 def checkstatus(repo, subset, pat, field):
459 def checkstatus(repo, subset, pat, field):
460 m = None
460 m = None
461 s = []
461 s = []
462 hasset = matchmod.patkind(pat) == 'set'
462 hasset = matchmod.patkind(pat) == 'set'
463 fname = None
463 fname = None
464 for r in subset:
464 for r in subset:
465 c = repo[r]
465 c = repo[r]
466 if not m or hasset:
466 if not m or hasset:
467 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
467 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
468 if not m.anypats() and len(m.files()) == 1:
468 if not m.anypats() and len(m.files()) == 1:
469 fname = m.files()[0]
469 fname = m.files()[0]
470 if fname is not None:
470 if fname is not None:
471 if fname not in c.files():
471 if fname not in c.files():
472 continue
472 continue
473 else:
473 else:
474 for f in c.files():
474 for f in c.files():
475 if m(f):
475 if m(f):
476 break
476 break
477 else:
477 else:
478 continue
478 continue
479 files = repo.status(c.p1().node(), c.node())[field]
479 files = repo.status(c.p1().node(), c.node())[field]
480 if fname is not None:
480 if fname is not None:
481 if fname in files:
481 if fname in files:
482 s.append(r)
482 s.append(r)
483 else:
483 else:
484 for f in files:
484 for f in files:
485 if m(f):
485 if m(f):
486 s.append(r)
486 s.append(r)
487 break
487 break
488 return s
488 return s
489
489
490 def _children(repo, narrow, parentset):
490 def _children(repo, narrow, parentset):
491 cs = set()
491 cs = set()
492 if not parentset:
492 if not parentset:
493 return cs
493 return cs
494 pr = repo.changelog.parentrevs
494 pr = repo.changelog.parentrevs
495 minrev = min(parentset)
495 minrev = min(parentset)
496 for r in narrow:
496 for r in narrow:
497 if r <= minrev:
497 if r <= minrev:
498 continue
498 continue
499 for p in pr(r):
499 for p in pr(r):
500 if p in parentset:
500 if p in parentset:
501 cs.add(r)
501 cs.add(r)
502 return cs
502 return cs
503
503
504 def children(repo, subset, x):
504 def children(repo, subset, x):
505 """``children(set)``
505 """``children(set)``
506 Child changesets of changesets in set.
506 Child changesets of changesets in set.
507 """
507 """
508 s = set(getset(repo, list(repo), x))
508 s = set(getset(repo, list(repo), x))
509 cs = _children(repo, subset, s)
509 cs = _children(repo, subset, s)
510 return [r for r in subset if r in cs]
510 return [r for r in subset if r in cs]
511
511
512 def closed(repo, subset, x):
512 def closed(repo, subset, x):
513 """``closed()``
513 """``closed()``
514 Changeset is closed.
514 Changeset is closed.
515 """
515 """
516 # i18n: "closed" is a keyword
516 # i18n: "closed" is a keyword
517 getargs(x, 0, 0, _("closed takes no arguments"))
517 getargs(x, 0, 0, _("closed takes no arguments"))
518 return [r for r in subset if repo[r].closesbranch()]
518 return [r for r in subset if repo[r].closesbranch()]
519
519
520 def contains(repo, subset, x):
520 def contains(repo, subset, x):
521 """``contains(pattern)``
521 """``contains(pattern)``
522 Revision contains a file matching pattern. See :hg:`help patterns`
522 Revision contains a file matching pattern. See :hg:`help patterns`
523 for information about file patterns.
523 for information about file patterns.
524 """
524 """
525 # i18n: "contains" is a keyword
525 # i18n: "contains" is a keyword
526 pat = getstring(x, _("contains requires a pattern"))
526 pat = getstring(x, _("contains requires a pattern"))
527 m = None
527 m = None
528 s = []
528 s = []
529 if not matchmod.patkind(pat):
529 if not matchmod.patkind(pat):
530 for r in subset:
530 for r in subset:
531 if pat in repo[r]:
531 if pat in repo[r]:
532 s.append(r)
532 s.append(r)
533 else:
533 else:
534 for r in subset:
534 for r in subset:
535 c = repo[r]
535 c = repo[r]
536 if not m or matchmod.patkind(pat) == 'set':
536 if not m or matchmod.patkind(pat) == 'set':
537 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
537 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
538 for f in c.manifest():
538 for f in c.manifest():
539 if m(f):
539 if m(f):
540 s.append(r)
540 s.append(r)
541 break
541 break
542 return s
542 return s
543
543
544 def converted(repo, subset, x):
544 def converted(repo, subset, x):
545 """``converted([id])``
545 """``converted([id])``
546 Changesets converted from the given identifier in the old repository if
546 Changesets converted from the given identifier in the old repository if
547 present, or all converted changesets if no identifier is specified.
547 present, or all converted changesets if no identifier is specified.
548 """
548 """
549
549
550 # There is exactly no chance of resolving the revision, so do a simple
550 # There is exactly no chance of resolving the revision, so do a simple
551 # string compare and hope for the best
551 # string compare and hope for the best
552
552
553 rev = None
553 rev = None
554 # i18n: "converted" is a keyword
554 # i18n: "converted" is a keyword
555 l = getargs(x, 0, 1, _('converted takes one or no arguments'))
555 l = getargs(x, 0, 1, _('converted takes one or no arguments'))
556 if l:
556 if l:
557 # i18n: "converted" is a keyword
557 # i18n: "converted" is a keyword
558 rev = getstring(l[0], _('converted requires a revision'))
558 rev = getstring(l[0], _('converted requires a revision'))
559
559
560 def _matchvalue(r):
560 def _matchvalue(r):
561 source = repo[r].extra().get('convert_revision', None)
561 source = repo[r].extra().get('convert_revision', None)
562 return source is not None and (rev is None or source.startswith(rev))
562 return source is not None and (rev is None or source.startswith(rev))
563
563
564 return [r for r in subset if _matchvalue(r)]
564 return [r for r in subset if _matchvalue(r)]
565
565
566 def date(repo, subset, x):
566 def date(repo, subset, x):
567 """``date(interval)``
567 """``date(interval)``
568 Changesets within the interval, see :hg:`help dates`.
568 Changesets within the interval, see :hg:`help dates`.
569 """
569 """
570 # i18n: "date" is a keyword
570 # i18n: "date" is a keyword
571 ds = getstring(x, _("date requires a string"))
571 ds = getstring(x, _("date requires a string"))
572 dm = util.matchdate(ds)
572 dm = util.matchdate(ds)
573 return [r for r in subset if dm(repo[r].date()[0])]
573 return [r for r in subset if dm(repo[r].date()[0])]
574
574
575 def desc(repo, subset, x):
575 def desc(repo, subset, x):
576 """``desc(string)``
576 """``desc(string)``
577 Search commit message for string. The match is case-insensitive.
577 Search commit message for string. The match is case-insensitive.
578 """
578 """
579 # i18n: "desc" is a keyword
579 # i18n: "desc" is a keyword
580 ds = encoding.lower(getstring(x, _("desc requires a string")))
580 ds = encoding.lower(getstring(x, _("desc requires a string")))
581 l = []
581 l = []
582 for r in subset:
582 for r in subset:
583 c = repo[r]
583 c = repo[r]
584 if ds in encoding.lower(c.description()):
584 if ds in encoding.lower(c.description()):
585 l.append(r)
585 l.append(r)
586 return l
586 return l
587
587
588 def _descendants(repo, subset, x, followfirst=False):
588 def _descendants(repo, subset, x, followfirst=False):
589 args = getset(repo, list(repo), x)
589 args = getset(repo, list(repo), x)
590 if not args:
590 if not args:
591 return []
591 return []
592 s = set(_revdescendants(repo, args, followfirst)) | set(args)
592 s = set(_revdescendants(repo, args, followfirst)) | set(args)
593 return [r for r in subset if r in s]
593 return [r for r in subset if r in s]
594
594
595 def descendants(repo, subset, x):
595 def descendants(repo, subset, x):
596 """``descendants(set)``
596 """``descendants(set)``
597 Changesets which are descendants of changesets in set.
597 Changesets which are descendants of changesets in set.
598 """
598 """
599 return _descendants(repo, subset, x)
599 return _descendants(repo, subset, x)
600
600
601 def _firstdescendants(repo, subset, x):
601 def _firstdescendants(repo, subset, x):
602 # ``_firstdescendants(set)``
602 # ``_firstdescendants(set)``
603 # Like ``descendants(set)`` but follows only the first parents.
603 # Like ``descendants(set)`` but follows only the first parents.
604 return _descendants(repo, subset, x, followfirst=True)
604 return _descendants(repo, subset, x, followfirst=True)
605
605
606 def destination(repo, subset, x):
606 def destination(repo, subset, x):
607 """``destination([set])``
607 """``destination([set])``
608 Changesets that were created by a graft, transplant or rebase operation,
608 Changesets that were created by a graft, transplant or rebase operation,
609 with the given revisions specified as the source. Omitting the optional set
609 with the given revisions specified as the source. Omitting the optional set
610 is the same as passing all().
610 is the same as passing all().
611 """
611 """
612 if x is not None:
612 if x is not None:
613 args = set(getset(repo, list(repo), x))
613 args = set(getset(repo, list(repo), x))
614 else:
614 else:
615 args = set(getall(repo, list(repo), x))
615 args = set(getall(repo, list(repo), x))
616
616
617 dests = set()
617 dests = set()
618
618
619 # subset contains all of the possible destinations that can be returned, so
619 # subset contains all of the possible destinations that can be returned, so
620 # iterate over them and see if their source(s) were provided in the args.
620 # iterate over them and see if their source(s) were provided in the args.
621 # Even if the immediate src of r is not in the args, src's source (or
621 # Even if the immediate src of r is not in the args, src's source (or
622 # further back) may be. Scanning back further than the immediate src allows
622 # further back) may be. Scanning back further than the immediate src allows
623 # transitive transplants and rebases to yield the same results as transitive
623 # transitive transplants and rebases to yield the same results as transitive
624 # grafts.
624 # grafts.
625 for r in subset:
625 for r in subset:
626 src = _getrevsource(repo, r)
626 src = _getrevsource(repo, r)
627 lineage = None
627 lineage = None
628
628
629 while src is not None:
629 while src is not None:
630 if lineage is None:
630 if lineage is None:
631 lineage = list()
631 lineage = list()
632
632
633 lineage.append(r)
633 lineage.append(r)
634
634
635 # The visited lineage is a match if the current source is in the arg
635 # The visited lineage is a match if the current source is in the arg
636 # set. Since every candidate dest is visited by way of iterating
636 # set. Since every candidate dest is visited by way of iterating
637 # subset, any dests further back in the lineage will be tested by a
637 # subset, any dests further back in the lineage will be tested by a
638 # different iteration over subset. Likewise, if the src was already
638 # different iteration over subset. Likewise, if the src was already
639 # selected, the current lineage can be selected without going back
639 # selected, the current lineage can be selected without going back
640 # further.
640 # further.
641 if src in args or src in dests:
641 if src in args or src in dests:
642 dests.update(lineage)
642 dests.update(lineage)
643 break
643 break
644
644
645 r = src
645 r = src
646 src = _getrevsource(repo, r)
646 src = _getrevsource(repo, r)
647
647
648 return [r for r in subset if r in dests]
648 return [r for r in subset if r in dests]
649
649
650 def divergent(repo, subset, x):
650 def divergent(repo, subset, x):
651 """``divergent()``
651 """``divergent()``
652 Final successors of changesets with an alternative set of final successors.
652 Final successors of changesets with an alternative set of final successors.
653 """
653 """
654 # i18n: "divergent" is a keyword
654 # i18n: "divergent" is a keyword
655 getargs(x, 0, 0, _("divergent takes no arguments"))
655 getargs(x, 0, 0, _("divergent takes no arguments"))
656 divergent = obsmod.getrevs(repo, 'divergent')
656 divergent = obsmod.getrevs(repo, 'divergent')
657 return [r for r in subset if r in divergent]
657 return [r for r in subset if r in divergent]
658
658
659 def draft(repo, subset, x):
659 def draft(repo, subset, x):
660 """``draft()``
660 """``draft()``
661 Changeset in draft phase."""
661 Changeset in draft phase."""
662 # i18n: "draft" is a keyword
662 # i18n: "draft" is a keyword
663 getargs(x, 0, 0, _("draft takes no arguments"))
663 getargs(x, 0, 0, _("draft takes no arguments"))
664 pc = repo._phasecache
664 pc = repo._phasecache
665 return [r for r in subset if pc.phase(repo, r) == phases.draft]
665 return [r for r in subset if pc.phase(repo, r) == phases.draft]
666
666
667 def extinct(repo, subset, x):
667 def extinct(repo, subset, x):
668 """``extinct()``
668 """``extinct()``
669 Obsolete changesets with obsolete descendants only.
669 Obsolete changesets with obsolete descendants only.
670 """
670 """
671 # i18n: "extinct" is a keyword
671 # i18n: "extinct" is a keyword
672 getargs(x, 0, 0, _("extinct takes no arguments"))
672 getargs(x, 0, 0, _("extinct takes no arguments"))
673 extincts = obsmod.getrevs(repo, 'extinct')
673 extincts = obsmod.getrevs(repo, 'extinct')
674 return [r for r in subset if r in extincts]
674 return [r for r in subset if r in extincts]
675
675
676 def extra(repo, subset, x):
676 def extra(repo, subset, x):
677 """``extra(label, [value])``
677 """``extra(label, [value])``
678 Changesets with the given label in the extra metadata, with the given
678 Changesets with the given label in the extra metadata, with the given
679 optional value.
679 optional value.
680
680
681 If `value` starts with `re:`, the remainder of the value is treated as
681 If `value` starts with `re:`, the remainder of the value is treated as
682 a regular expression. To match a value that actually starts with `re:`,
682 a regular expression. To match a value that actually starts with `re:`,
683 use the prefix `literal:`.
683 use the prefix `literal:`.
684 """
684 """
685
685
686 # i18n: "extra" is a keyword
686 # i18n: "extra" is a keyword
687 l = getargs(x, 1, 2, _('extra takes at least 1 and at most 2 arguments'))
687 l = getargs(x, 1, 2, _('extra takes at least 1 and at most 2 arguments'))
688 # i18n: "extra" is a keyword
688 # i18n: "extra" is a keyword
689 label = getstring(l[0], _('first argument to extra must be a string'))
689 label = getstring(l[0], _('first argument to extra must be a string'))
690 value = None
690 value = None
691
691
692 if len(l) > 1:
692 if len(l) > 1:
693 # i18n: "extra" is a keyword
693 # i18n: "extra" is a keyword
694 value = getstring(l[1], _('second argument to extra must be a string'))
694 value = getstring(l[1], _('second argument to extra must be a string'))
695 kind, value, matcher = _stringmatcher(value)
695 kind, value, matcher = _stringmatcher(value)
696
696
697 def _matchvalue(r):
697 def _matchvalue(r):
698 extra = repo[r].extra()
698 extra = repo[r].extra()
699 return label in extra and (value is None or matcher(extra[label]))
699 return label in extra and (value is None or matcher(extra[label]))
700
700
701 return [r for r in subset if _matchvalue(r)]
701 return [r for r in subset if _matchvalue(r)]
702
702
703 def filelog(repo, subset, x):
703 def filelog(repo, subset, x):
704 """``filelog(pattern)``
704 """``filelog(pattern)``
705 Changesets connected to the specified filelog.
705 Changesets connected to the specified filelog.
706
706
707 For performance reasons, ``filelog()`` does not show every changeset
707 For performance reasons, ``filelog()`` does not show every changeset
708 that affects the requested file(s). See :hg:`help log` for details. For
708 that affects the requested file(s). See :hg:`help log` for details. For
709 a slower, more accurate result, use ``file()``.
709 a slower, more accurate result, use ``file()``.
710 """
710 """
711
711
712 # i18n: "filelog" is a keyword
712 # i18n: "filelog" is a keyword
713 pat = getstring(x, _("filelog requires a pattern"))
713 pat = getstring(x, _("filelog requires a pattern"))
714 m = matchmod.match(repo.root, repo.getcwd(), [pat], default='relpath',
714 m = matchmod.match(repo.root, repo.getcwd(), [pat], default='relpath',
715 ctx=repo[None])
715 ctx=repo[None])
716 s = set()
716 s = set()
717
717
718 if not matchmod.patkind(pat):
718 if not matchmod.patkind(pat):
719 for f in m.files():
719 for f in m.files():
720 fl = repo.file(f)
720 fl = repo.file(f)
721 for fr in fl:
721 for fr in fl:
722 s.add(fl.linkrev(fr))
722 s.add(fl.linkrev(fr))
723 else:
723 else:
724 for f in repo[None]:
724 for f in repo[None]:
725 if m(f):
725 if m(f):
726 fl = repo.file(f)
726 fl = repo.file(f)
727 for fr in fl:
727 for fr in fl:
728 s.add(fl.linkrev(fr))
728 s.add(fl.linkrev(fr))
729
729
730 return [r for r in subset if r in s]
730 return [r for r in subset if r in s]
731
731
732 def first(repo, subset, x):
732 def first(repo, subset, x):
733 """``first(set, [n])``
733 """``first(set, [n])``
734 An alias for limit().
734 An alias for limit().
735 """
735 """
736 return limit(repo, subset, x)
736 return limit(repo, subset, x)
737
737
738 def _follow(repo, subset, x, name, followfirst=False):
738 def _follow(repo, subset, x, name, followfirst=False):
739 l = getargs(x, 0, 1, _("%s takes no arguments or a filename") % name)
739 l = getargs(x, 0, 1, _("%s takes no arguments or a filename") % name)
740 c = repo['.']
740 c = repo['.']
741 if l:
741 if l:
742 x = getstring(l[0], _("%s expected a filename") % name)
742 x = getstring(l[0], _("%s expected a filename") % name)
743 if x in c:
743 if x in c:
744 cx = c[x]
744 cx = c[x]
745 s = set(ctx.rev() for ctx in cx.ancestors(followfirst=followfirst))
745 s = set(ctx.rev() for ctx in cx.ancestors(followfirst=followfirst))
746 # include the revision responsible for the most recent version
746 # include the revision responsible for the most recent version
747 s.add(cx.linkrev())
747 s.add(cx.linkrev())
748 else:
748 else:
749 return []
749 return []
750 else:
750 else:
751 s = set(_revancestors(repo, [c.rev()], followfirst)) | set([c.rev()])
751 s = set(_revancestors(repo, [c.rev()], followfirst)) | set([c.rev()])
752
752
753 return [r for r in subset if r in s]
753 return [r for r in subset if r in s]
754
754
755 def follow(repo, subset, x):
755 def follow(repo, subset, x):
756 """``follow([file])``
756 """``follow([file])``
757 An alias for ``::.`` (ancestors of the working copy's first parent).
757 An alias for ``::.`` (ancestors of the working copy's first parent).
758 If a filename is specified, the history of the given file is followed,
758 If a filename is specified, the history of the given file is followed,
759 including copies.
759 including copies.
760 """
760 """
761 return _follow(repo, subset, x, 'follow')
761 return _follow(repo, subset, x, 'follow')
762
762
763 def _followfirst(repo, subset, x):
763 def _followfirst(repo, subset, x):
764 # ``followfirst([file])``
764 # ``followfirst([file])``
765 # Like ``follow([file])`` but follows only the first parent of
765 # Like ``follow([file])`` but follows only the first parent of
766 # every revision or file revision.
766 # every revision or file revision.
767 return _follow(repo, subset, x, '_followfirst', followfirst=True)
767 return _follow(repo, subset, x, '_followfirst', followfirst=True)
768
768
769 def getall(repo, subset, x):
769 def getall(repo, subset, x):
770 """``all()``
770 """``all()``
771 All changesets, the same as ``0:tip``.
771 All changesets, the same as ``0:tip``.
772 """
772 """
773 # i18n: "all" is a keyword
773 # i18n: "all" is a keyword
774 getargs(x, 0, 0, _("all takes no arguments"))
774 getargs(x, 0, 0, _("all takes no arguments"))
775 return subset
775 return subset
776
776
777 def grep(repo, subset, x):
777 def grep(repo, subset, x):
778 """``grep(regex)``
778 """``grep(regex)``
779 Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')``
779 Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')``
780 to ensure special escape characters are handled correctly. Unlike
780 to ensure special escape characters are handled correctly. Unlike
781 ``keyword(string)``, the match is case-sensitive.
781 ``keyword(string)``, the match is case-sensitive.
782 """
782 """
783 try:
783 try:
784 # i18n: "grep" is a keyword
784 # i18n: "grep" is a keyword
785 gr = re.compile(getstring(x, _("grep requires a string")))
785 gr = re.compile(getstring(x, _("grep requires a string")))
786 except re.error, e:
786 except re.error, e:
787 raise error.ParseError(_('invalid match pattern: %s') % e)
787 raise error.ParseError(_('invalid match pattern: %s') % e)
788 l = []
788 l = []
789 for r in subset:
789 for r in subset:
790 c = repo[r]
790 c = repo[r]
791 for e in c.files() + [c.user(), c.description()]:
791 for e in c.files() + [c.user(), c.description()]:
792 if gr.search(e):
792 if gr.search(e):
793 l.append(r)
793 l.append(r)
794 break
794 break
795 return l
795 return l
796
796
797 def _matchfiles(repo, subset, x):
797 def _matchfiles(repo, subset, x):
798 # _matchfiles takes a revset list of prefixed arguments:
798 # _matchfiles takes a revset list of prefixed arguments:
799 #
799 #
800 # [p:foo, i:bar, x:baz]
800 # [p:foo, i:bar, x:baz]
801 #
801 #
802 # builds a match object from them and filters subset. Allowed
802 # builds a match object from them and filters subset. Allowed
803 # prefixes are 'p:' for regular patterns, 'i:' for include
803 # prefixes are 'p:' for regular patterns, 'i:' for include
804 # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass
804 # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass
805 # a revision identifier, or the empty string to reference the
805 # a revision identifier, or the empty string to reference the
806 # working directory, from which the match object is
806 # working directory, from which the match object is
807 # initialized. Use 'd:' to set the default matching mode, default
807 # initialized. Use 'd:' to set the default matching mode, default
808 # to 'glob'. At most one 'r:' and 'd:' argument can be passed.
808 # to 'glob'. At most one 'r:' and 'd:' argument can be passed.
809
809
810 # i18n: "_matchfiles" is a keyword
810 # i18n: "_matchfiles" is a keyword
811 l = getargs(x, 1, -1, _("_matchfiles requires at least one argument"))
811 l = getargs(x, 1, -1, _("_matchfiles requires at least one argument"))
812 pats, inc, exc = [], [], []
812 pats, inc, exc = [], [], []
813 hasset = False
813 hasset = False
814 rev, default = None, None
814 rev, default = None, None
815 for arg in l:
815 for arg in l:
816 # i18n: "_matchfiles" is a keyword
816 # i18n: "_matchfiles" is a keyword
817 s = getstring(arg, _("_matchfiles requires string arguments"))
817 s = getstring(arg, _("_matchfiles requires string arguments"))
818 prefix, value = s[:2], s[2:]
818 prefix, value = s[:2], s[2:]
819 if prefix == 'p:':
819 if prefix == 'p:':
820 pats.append(value)
820 pats.append(value)
821 elif prefix == 'i:':
821 elif prefix == 'i:':
822 inc.append(value)
822 inc.append(value)
823 elif prefix == 'x:':
823 elif prefix == 'x:':
824 exc.append(value)
824 exc.append(value)
825 elif prefix == 'r:':
825 elif prefix == 'r:':
826 if rev is not None:
826 if rev is not None:
827 # i18n: "_matchfiles" is a keyword
827 # i18n: "_matchfiles" is a keyword
828 raise error.ParseError(_('_matchfiles expected at most one '
828 raise error.ParseError(_('_matchfiles expected at most one '
829 'revision'))
829 'revision'))
830 rev = value
830 rev = value
831 elif prefix == 'd:':
831 elif prefix == 'd:':
832 if default is not None:
832 if default is not None:
833 # i18n: "_matchfiles" is a keyword
833 # i18n: "_matchfiles" is a keyword
834 raise error.ParseError(_('_matchfiles expected at most one '
834 raise error.ParseError(_('_matchfiles expected at most one '
835 'default mode'))
835 'default mode'))
836 default = value
836 default = value
837 else:
837 else:
838 # i18n: "_matchfiles" is a keyword
838 # i18n: "_matchfiles" is a keyword
839 raise error.ParseError(_('invalid _matchfiles prefix: %s') % prefix)
839 raise error.ParseError(_('invalid _matchfiles prefix: %s') % prefix)
840 if not hasset and matchmod.patkind(value) == 'set':
840 if not hasset and matchmod.patkind(value) == 'set':
841 hasset = True
841 hasset = True
842 if not default:
842 if not default:
843 default = 'glob'
843 default = 'glob'
844 m = None
844 m = None
845 s = []
845 s = []
846 for r in subset:
846 for r in subset:
847 c = repo[r]
847 c = repo[r]
848 if not m or (hasset and rev is None):
848 if not m or (hasset and rev is None):
849 ctx = c
849 ctx = c
850 if rev is not None:
850 if rev is not None:
851 ctx = repo[rev or None]
851 ctx = repo[rev or None]
852 m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc,
852 m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc,
853 exclude=exc, ctx=ctx, default=default)
853 exclude=exc, ctx=ctx, default=default)
854 for f in c.files():
854 for f in c.files():
855 if m(f):
855 if m(f):
856 s.append(r)
856 s.append(r)
857 break
857 break
858 return s
858 return s
859
859
860 def hasfile(repo, subset, x):
860 def hasfile(repo, subset, x):
861 """``file(pattern)``
861 """``file(pattern)``
862 Changesets affecting files matched by pattern.
862 Changesets affecting files matched by pattern.
863
863
864 For a faster but less accurate result, consider using ``filelog()``
864 For a faster but less accurate result, consider using ``filelog()``
865 instead.
865 instead.
866 """
866 """
867 # i18n: "file" is a keyword
867 # i18n: "file" is a keyword
868 pat = getstring(x, _("file requires a pattern"))
868 pat = getstring(x, _("file requires a pattern"))
869 return _matchfiles(repo, subset, ('string', 'p:' + pat))
869 return _matchfiles(repo, subset, ('string', 'p:' + pat))
870
870
871 def head(repo, subset, x):
871 def head(repo, subset, x):
872 """``head()``
872 """``head()``
873 Changeset is a named branch head.
873 Changeset is a named branch head.
874 """
874 """
875 # i18n: "head" is a keyword
875 # i18n: "head" is a keyword
876 getargs(x, 0, 0, _("head takes no arguments"))
876 getargs(x, 0, 0, _("head takes no arguments"))
877 hs = set()
877 hs = set()
878 for b, ls in repo.branchmap().iteritems():
878 for b, ls in repo.branchmap().iteritems():
879 hs.update(repo[h].rev() for h in ls)
879 hs.update(repo[h].rev() for h in ls)
880 return [r for r in subset if r in hs]
880 return [r for r in subset if r in hs]
881
881
882 def heads(repo, subset, x):
882 def heads(repo, subset, x):
883 """``heads(set)``
883 """``heads(set)``
884 Members of set with no children in set.
884 Members of set with no children in set.
885 """
885 """
886 s = getset(repo, subset, x)
886 s = getset(repo, subset, x)
887 ps = set(parents(repo, subset, x))
887 ps = set(parents(repo, subset, x))
888 return [r for r in s if r not in ps]
888 return [r for r in s if r not in ps]
889
889
890 def hidden(repo, subset, x):
890 def hidden(repo, subset, x):
891 """``hidden()``
891 """``hidden()``
892 Hidden changesets.
892 Hidden changesets.
893 """
893 """
894 # i18n: "hidden" is a keyword
894 # i18n: "hidden" is a keyword
895 getargs(x, 0, 0, _("hidden takes no arguments"))
895 getargs(x, 0, 0, _("hidden takes no arguments"))
896 hiddenrevs = repoview.filteredrevs(repo, 'hidden')
896 hiddenrevs = repoview.filterrevs(repo, 'visible')
897 return [r for r in subset if r in hiddenrevs]
897 return [r for r in subset if r in hiddenrevs]
898
898
899 def keyword(repo, subset, x):
899 def keyword(repo, subset, x):
900 """``keyword(string)``
900 """``keyword(string)``
901 Search commit message, user name, and names of changed files for
901 Search commit message, user name, and names of changed files for
902 string. The match is case-insensitive.
902 string. The match is case-insensitive.
903 """
903 """
904 # i18n: "keyword" is a keyword
904 # i18n: "keyword" is a keyword
905 kw = encoding.lower(getstring(x, _("keyword requires a string")))
905 kw = encoding.lower(getstring(x, _("keyword requires a string")))
906 l = []
906 l = []
907 for r in subset:
907 for r in subset:
908 c = repo[r]
908 c = repo[r]
909 t = " ".join(c.files() + [c.user(), c.description()])
909 t = " ".join(c.files() + [c.user(), c.description()])
910 if kw in encoding.lower(t):
910 if kw in encoding.lower(t):
911 l.append(r)
911 l.append(r)
912 return l
912 return l
913
913
914 def limit(repo, subset, x):
914 def limit(repo, subset, x):
915 """``limit(set, [n])``
915 """``limit(set, [n])``
916 First n members of set, defaulting to 1.
916 First n members of set, defaulting to 1.
917 """
917 """
918 # i18n: "limit" is a keyword
918 # i18n: "limit" is a keyword
919 l = getargs(x, 1, 2, _("limit requires one or two arguments"))
919 l = getargs(x, 1, 2, _("limit requires one or two arguments"))
920 try:
920 try:
921 lim = 1
921 lim = 1
922 if len(l) == 2:
922 if len(l) == 2:
923 # i18n: "limit" is a keyword
923 # i18n: "limit" is a keyword
924 lim = int(getstring(l[1], _("limit requires a number")))
924 lim = int(getstring(l[1], _("limit requires a number")))
925 except (TypeError, ValueError):
925 except (TypeError, ValueError):
926 # i18n: "limit" is a keyword
926 # i18n: "limit" is a keyword
927 raise error.ParseError(_("limit expects a number"))
927 raise error.ParseError(_("limit expects a number"))
928 ss = set(subset)
928 ss = set(subset)
929 os = getset(repo, list(repo), l[0])[:lim]
929 os = getset(repo, list(repo), l[0])[:lim]
930 return [r for r in os if r in ss]
930 return [r for r in os if r in ss]
931
931
932 def last(repo, subset, x):
932 def last(repo, subset, x):
933 """``last(set, [n])``
933 """``last(set, [n])``
934 Last n members of set, defaulting to 1.
934 Last n members of set, defaulting to 1.
935 """
935 """
936 # i18n: "last" is a keyword
936 # i18n: "last" is a keyword
937 l = getargs(x, 1, 2, _("last requires one or two arguments"))
937 l = getargs(x, 1, 2, _("last requires one or two arguments"))
938 try:
938 try:
939 lim = 1
939 lim = 1
940 if len(l) == 2:
940 if len(l) == 2:
941 # i18n: "last" is a keyword
941 # i18n: "last" is a keyword
942 lim = int(getstring(l[1], _("last requires a number")))
942 lim = int(getstring(l[1], _("last requires a number")))
943 except (TypeError, ValueError):
943 except (TypeError, ValueError):
944 # i18n: "last" is a keyword
944 # i18n: "last" is a keyword
945 raise error.ParseError(_("last expects a number"))
945 raise error.ParseError(_("last expects a number"))
946 ss = set(subset)
946 ss = set(subset)
947 os = getset(repo, list(repo), l[0])[-lim:]
947 os = getset(repo, list(repo), l[0])[-lim:]
948 return [r for r in os if r in ss]
948 return [r for r in os if r in ss]
949
949
950 def maxrev(repo, subset, x):
950 def maxrev(repo, subset, x):
951 """``max(set)``
951 """``max(set)``
952 Changeset with highest revision number in set.
952 Changeset with highest revision number in set.
953 """
953 """
954 os = getset(repo, list(repo), x)
954 os = getset(repo, list(repo), x)
955 if os:
955 if os:
956 m = max(os)
956 m = max(os)
957 if m in subset:
957 if m in subset:
958 return [m]
958 return [m]
959 return []
959 return []
960
960
961 def merge(repo, subset, x):
961 def merge(repo, subset, x):
962 """``merge()``
962 """``merge()``
963 Changeset is a merge changeset.
963 Changeset is a merge changeset.
964 """
964 """
965 # i18n: "merge" is a keyword
965 # i18n: "merge" is a keyword
966 getargs(x, 0, 0, _("merge takes no arguments"))
966 getargs(x, 0, 0, _("merge takes no arguments"))
967 cl = repo.changelog
967 cl = repo.changelog
968 return [r for r in subset if cl.parentrevs(r)[1] != -1]
968 return [r for r in subset if cl.parentrevs(r)[1] != -1]
969
969
970 def branchpoint(repo, subset, x):
970 def branchpoint(repo, subset, x):
971 """``branchpoint()``
971 """``branchpoint()``
972 Changesets with more than one child.
972 Changesets with more than one child.
973 """
973 """
974 # i18n: "branchpoint" is a keyword
974 # i18n: "branchpoint" is a keyword
975 getargs(x, 0, 0, _("branchpoint takes no arguments"))
975 getargs(x, 0, 0, _("branchpoint takes no arguments"))
976 cl = repo.changelog
976 cl = repo.changelog
977 if not subset:
977 if not subset:
978 return []
978 return []
979 baserev = min(subset)
979 baserev = min(subset)
980 parentscount = [0]*(len(repo) - baserev)
980 parentscount = [0]*(len(repo) - baserev)
981 for r in cl.revs(start=baserev + 1):
981 for r in cl.revs(start=baserev + 1):
982 for p in cl.parentrevs(r):
982 for p in cl.parentrevs(r):
983 if p >= baserev:
983 if p >= baserev:
984 parentscount[p - baserev] += 1
984 parentscount[p - baserev] += 1
985 return [r for r in subset if (parentscount[r - baserev] > 1)]
985 return [r for r in subset if (parentscount[r - baserev] > 1)]
986
986
987 def minrev(repo, subset, x):
987 def minrev(repo, subset, x):
988 """``min(set)``
988 """``min(set)``
989 Changeset with lowest revision number in set.
989 Changeset with lowest revision number in set.
990 """
990 """
991 os = getset(repo, list(repo), x)
991 os = getset(repo, list(repo), x)
992 if os:
992 if os:
993 m = min(os)
993 m = min(os)
994 if m in subset:
994 if m in subset:
995 return [m]
995 return [m]
996 return []
996 return []
997
997
998 def modifies(repo, subset, x):
998 def modifies(repo, subset, x):
999 """``modifies(pattern)``
999 """``modifies(pattern)``
1000 Changesets modifying files matched by pattern.
1000 Changesets modifying files matched by pattern.
1001 """
1001 """
1002 # i18n: "modifies" is a keyword
1002 # i18n: "modifies" is a keyword
1003 pat = getstring(x, _("modifies requires a pattern"))
1003 pat = getstring(x, _("modifies requires a pattern"))
1004 return checkstatus(repo, subset, pat, 0)
1004 return checkstatus(repo, subset, pat, 0)
1005
1005
1006 def node_(repo, subset, x):
1006 def node_(repo, subset, x):
1007 """``id(string)``
1007 """``id(string)``
1008 Revision non-ambiguously specified by the given hex string prefix.
1008 Revision non-ambiguously specified by the given hex string prefix.
1009 """
1009 """
1010 # i18n: "id" is a keyword
1010 # i18n: "id" is a keyword
1011 l = getargs(x, 1, 1, _("id requires one argument"))
1011 l = getargs(x, 1, 1, _("id requires one argument"))
1012 # i18n: "id" is a keyword
1012 # i18n: "id" is a keyword
1013 n = getstring(l[0], _("id requires a string"))
1013 n = getstring(l[0], _("id requires a string"))
1014 if len(n) == 40:
1014 if len(n) == 40:
1015 rn = repo[n].rev()
1015 rn = repo[n].rev()
1016 else:
1016 else:
1017 rn = None
1017 rn = None
1018 pm = repo.changelog._partialmatch(n)
1018 pm = repo.changelog._partialmatch(n)
1019 if pm is not None:
1019 if pm is not None:
1020 rn = repo.changelog.rev(pm)
1020 rn = repo.changelog.rev(pm)
1021
1021
1022 return [r for r in subset if r == rn]
1022 return [r for r in subset if r == rn]
1023
1023
1024 def obsolete(repo, subset, x):
1024 def obsolete(repo, subset, x):
1025 """``obsolete()``
1025 """``obsolete()``
1026 Mutable changeset with a newer version."""
1026 Mutable changeset with a newer version."""
1027 # i18n: "obsolete" is a keyword
1027 # i18n: "obsolete" is a keyword
1028 getargs(x, 0, 0, _("obsolete takes no arguments"))
1028 getargs(x, 0, 0, _("obsolete takes no arguments"))
1029 obsoletes = obsmod.getrevs(repo, 'obsolete')
1029 obsoletes = obsmod.getrevs(repo, 'obsolete')
1030 return [r for r in subset if r in obsoletes]
1030 return [r for r in subset if r in obsoletes]
1031
1031
1032 def origin(repo, subset, x):
1032 def origin(repo, subset, x):
1033 """``origin([set])``
1033 """``origin([set])``
1034 Changesets that were specified as a source for the grafts, transplants or
1034 Changesets that were specified as a source for the grafts, transplants or
1035 rebases that created the given revisions. Omitting the optional set is the
1035 rebases that created the given revisions. Omitting the optional set is the
1036 same as passing all(). If a changeset created by these operations is itself
1036 same as passing all(). If a changeset created by these operations is itself
1037 specified as a source for one of these operations, only the source changeset
1037 specified as a source for one of these operations, only the source changeset
1038 for the first operation is selected.
1038 for the first operation is selected.
1039 """
1039 """
1040 if x is not None:
1040 if x is not None:
1041 args = set(getset(repo, list(repo), x))
1041 args = set(getset(repo, list(repo), x))
1042 else:
1042 else:
1043 args = set(getall(repo, list(repo), x))
1043 args = set(getall(repo, list(repo), x))
1044
1044
1045 def _firstsrc(rev):
1045 def _firstsrc(rev):
1046 src = _getrevsource(repo, rev)
1046 src = _getrevsource(repo, rev)
1047 if src is None:
1047 if src is None:
1048 return None
1048 return None
1049
1049
1050 while True:
1050 while True:
1051 prev = _getrevsource(repo, src)
1051 prev = _getrevsource(repo, src)
1052
1052
1053 if prev is None:
1053 if prev is None:
1054 return src
1054 return src
1055 src = prev
1055 src = prev
1056
1056
1057 o = set([_firstsrc(r) for r in args])
1057 o = set([_firstsrc(r) for r in args])
1058 return [r for r in subset if r in o]
1058 return [r for r in subset if r in o]
1059
1059
1060 def outgoing(repo, subset, x):
1060 def outgoing(repo, subset, x):
1061 """``outgoing([path])``
1061 """``outgoing([path])``
1062 Changesets not found in the specified destination repository, or the
1062 Changesets not found in the specified destination repository, or the
1063 default push location.
1063 default push location.
1064 """
1064 """
1065 import hg # avoid start-up nasties
1065 import hg # avoid start-up nasties
1066 # i18n: "outgoing" is a keyword
1066 # i18n: "outgoing" is a keyword
1067 l = getargs(x, 0, 1, _("outgoing takes one or no arguments"))
1067 l = getargs(x, 0, 1, _("outgoing takes one or no arguments"))
1068 # i18n: "outgoing" is a keyword
1068 # i18n: "outgoing" is a keyword
1069 dest = l and getstring(l[0], _("outgoing requires a repository path")) or ''
1069 dest = l and getstring(l[0], _("outgoing requires a repository path")) or ''
1070 dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
1070 dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
1071 dest, branches = hg.parseurl(dest)
1071 dest, branches = hg.parseurl(dest)
1072 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1072 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1073 if revs:
1073 if revs:
1074 revs = [repo.lookup(rev) for rev in revs]
1074 revs = [repo.lookup(rev) for rev in revs]
1075 other = hg.peer(repo, {}, dest)
1075 other = hg.peer(repo, {}, dest)
1076 repo.ui.pushbuffer()
1076 repo.ui.pushbuffer()
1077 outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs)
1077 outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs)
1078 repo.ui.popbuffer()
1078 repo.ui.popbuffer()
1079 cl = repo.changelog
1079 cl = repo.changelog
1080 o = set([cl.rev(r) for r in outgoing.missing])
1080 o = set([cl.rev(r) for r in outgoing.missing])
1081 return [r for r in subset if r in o]
1081 return [r for r in subset if r in o]
1082
1082
1083 def p1(repo, subset, x):
1083 def p1(repo, subset, x):
1084 """``p1([set])``
1084 """``p1([set])``
1085 First parent of changesets in set, or the working directory.
1085 First parent of changesets in set, or the working directory.
1086 """
1086 """
1087 if x is None:
1087 if x is None:
1088 p = repo[x].p1().rev()
1088 p = repo[x].p1().rev()
1089 return [r for r in subset if r == p]
1089 return [r for r in subset if r == p]
1090
1090
1091 ps = set()
1091 ps = set()
1092 cl = repo.changelog
1092 cl = repo.changelog
1093 for r in getset(repo, list(repo), x):
1093 for r in getset(repo, list(repo), x):
1094 ps.add(cl.parentrevs(r)[0])
1094 ps.add(cl.parentrevs(r)[0])
1095 return [r for r in subset if r in ps]
1095 return [r for r in subset if r in ps]
1096
1096
1097 def p2(repo, subset, x):
1097 def p2(repo, subset, x):
1098 """``p2([set])``
1098 """``p2([set])``
1099 Second parent of changesets in set, or the working directory.
1099 Second parent of changesets in set, or the working directory.
1100 """
1100 """
1101 if x is None:
1101 if x is None:
1102 ps = repo[x].parents()
1102 ps = repo[x].parents()
1103 try:
1103 try:
1104 p = ps[1].rev()
1104 p = ps[1].rev()
1105 return [r for r in subset if r == p]
1105 return [r for r in subset if r == p]
1106 except IndexError:
1106 except IndexError:
1107 return []
1107 return []
1108
1108
1109 ps = set()
1109 ps = set()
1110 cl = repo.changelog
1110 cl = repo.changelog
1111 for r in getset(repo, list(repo), x):
1111 for r in getset(repo, list(repo), x):
1112 ps.add(cl.parentrevs(r)[1])
1112 ps.add(cl.parentrevs(r)[1])
1113 return [r for r in subset if r in ps]
1113 return [r for r in subset if r in ps]
1114
1114
1115 def parents(repo, subset, x):
1115 def parents(repo, subset, x):
1116 """``parents([set])``
1116 """``parents([set])``
1117 The set of all parents for all changesets in set, or the working directory.
1117 The set of all parents for all changesets in set, or the working directory.
1118 """
1118 """
1119 if x is None:
1119 if x is None:
1120 ps = tuple(p.rev() for p in repo[x].parents())
1120 ps = tuple(p.rev() for p in repo[x].parents())
1121 return [r for r in subset if r in ps]
1121 return [r for r in subset if r in ps]
1122
1122
1123 ps = set()
1123 ps = set()
1124 cl = repo.changelog
1124 cl = repo.changelog
1125 for r in getset(repo, list(repo), x):
1125 for r in getset(repo, list(repo), x):
1126 ps.update(cl.parentrevs(r))
1126 ps.update(cl.parentrevs(r))
1127 return [r for r in subset if r in ps]
1127 return [r for r in subset if r in ps]
1128
1128
1129 def parentspec(repo, subset, x, n):
1129 def parentspec(repo, subset, x, n):
1130 """``set^0``
1130 """``set^0``
1131 The set.
1131 The set.
1132 ``set^1`` (or ``set^``), ``set^2``
1132 ``set^1`` (or ``set^``), ``set^2``
1133 First or second parent, respectively, of all changesets in set.
1133 First or second parent, respectively, of all changesets in set.
1134 """
1134 """
1135 try:
1135 try:
1136 n = int(n[1])
1136 n = int(n[1])
1137 if n not in (0, 1, 2):
1137 if n not in (0, 1, 2):
1138 raise ValueError
1138 raise ValueError
1139 except (TypeError, ValueError):
1139 except (TypeError, ValueError):
1140 raise error.ParseError(_("^ expects a number 0, 1, or 2"))
1140 raise error.ParseError(_("^ expects a number 0, 1, or 2"))
1141 ps = set()
1141 ps = set()
1142 cl = repo.changelog
1142 cl = repo.changelog
1143 for r in getset(repo, subset, x):
1143 for r in getset(repo, subset, x):
1144 if n == 0:
1144 if n == 0:
1145 ps.add(r)
1145 ps.add(r)
1146 elif n == 1:
1146 elif n == 1:
1147 ps.add(cl.parentrevs(r)[0])
1147 ps.add(cl.parentrevs(r)[0])
1148 elif n == 2:
1148 elif n == 2:
1149 parents = cl.parentrevs(r)
1149 parents = cl.parentrevs(r)
1150 if len(parents) > 1:
1150 if len(parents) > 1:
1151 ps.add(parents[1])
1151 ps.add(parents[1])
1152 return [r for r in subset if r in ps]
1152 return [r for r in subset if r in ps]
1153
1153
1154 def present(repo, subset, x):
1154 def present(repo, subset, x):
1155 """``present(set)``
1155 """``present(set)``
1156 An empty set, if any revision in set isn't found; otherwise,
1156 An empty set, if any revision in set isn't found; otherwise,
1157 all revisions in set.
1157 all revisions in set.
1158
1158
1159 If any of specified revisions is not present in the local repository,
1159 If any of specified revisions is not present in the local repository,
1160 the query is normally aborted. But this predicate allows the query
1160 the query is normally aborted. But this predicate allows the query
1161 to continue even in such cases.
1161 to continue even in such cases.
1162 """
1162 """
1163 try:
1163 try:
1164 return getset(repo, subset, x)
1164 return getset(repo, subset, x)
1165 except error.RepoLookupError:
1165 except error.RepoLookupError:
1166 return []
1166 return []
1167
1167
1168 def public(repo, subset, x):
1168 def public(repo, subset, x):
1169 """``public()``
1169 """``public()``
1170 Changeset in public phase."""
1170 Changeset in public phase."""
1171 # i18n: "public" is a keyword
1171 # i18n: "public" is a keyword
1172 getargs(x, 0, 0, _("public takes no arguments"))
1172 getargs(x, 0, 0, _("public takes no arguments"))
1173 pc = repo._phasecache
1173 pc = repo._phasecache
1174 return [r for r in subset if pc.phase(repo, r) == phases.public]
1174 return [r for r in subset if pc.phase(repo, r) == phases.public]
1175
1175
1176 def remote(repo, subset, x):
1176 def remote(repo, subset, x):
1177 """``remote([id [,path]])``
1177 """``remote([id [,path]])``
1178 Local revision that corresponds to the given identifier in a
1178 Local revision that corresponds to the given identifier in a
1179 remote repository, if present. Here, the '.' identifier is a
1179 remote repository, if present. Here, the '.' identifier is a
1180 synonym for the current local branch.
1180 synonym for the current local branch.
1181 """
1181 """
1182
1182
1183 import hg # avoid start-up nasties
1183 import hg # avoid start-up nasties
1184 # i18n: "remote" is a keyword
1184 # i18n: "remote" is a keyword
1185 l = getargs(x, 0, 2, _("remote takes one, two or no arguments"))
1185 l = getargs(x, 0, 2, _("remote takes one, two or no arguments"))
1186
1186
1187 q = '.'
1187 q = '.'
1188 if len(l) > 0:
1188 if len(l) > 0:
1189 # i18n: "remote" is a keyword
1189 # i18n: "remote" is a keyword
1190 q = getstring(l[0], _("remote requires a string id"))
1190 q = getstring(l[0], _("remote requires a string id"))
1191 if q == '.':
1191 if q == '.':
1192 q = repo['.'].branch()
1192 q = repo['.'].branch()
1193
1193
1194 dest = ''
1194 dest = ''
1195 if len(l) > 1:
1195 if len(l) > 1:
1196 # i18n: "remote" is a keyword
1196 # i18n: "remote" is a keyword
1197 dest = getstring(l[1], _("remote requires a repository path"))
1197 dest = getstring(l[1], _("remote requires a repository path"))
1198 dest = repo.ui.expandpath(dest or 'default')
1198 dest = repo.ui.expandpath(dest or 'default')
1199 dest, branches = hg.parseurl(dest)
1199 dest, branches = hg.parseurl(dest)
1200 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1200 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1201 if revs:
1201 if revs:
1202 revs = [repo.lookup(rev) for rev in revs]
1202 revs = [repo.lookup(rev) for rev in revs]
1203 other = hg.peer(repo, {}, dest)
1203 other = hg.peer(repo, {}, dest)
1204 n = other.lookup(q)
1204 n = other.lookup(q)
1205 if n in repo:
1205 if n in repo:
1206 r = repo[n].rev()
1206 r = repo[n].rev()
1207 if r in subset:
1207 if r in subset:
1208 return [r]
1208 return [r]
1209 return []
1209 return []
1210
1210
1211 def removes(repo, subset, x):
1211 def removes(repo, subset, x):
1212 """``removes(pattern)``
1212 """``removes(pattern)``
1213 Changesets which remove files matching pattern.
1213 Changesets which remove files matching pattern.
1214 """
1214 """
1215 # i18n: "removes" is a keyword
1215 # i18n: "removes" is a keyword
1216 pat = getstring(x, _("removes requires a pattern"))
1216 pat = getstring(x, _("removes requires a pattern"))
1217 return checkstatus(repo, subset, pat, 2)
1217 return checkstatus(repo, subset, pat, 2)
1218
1218
1219 def rev(repo, subset, x):
1219 def rev(repo, subset, x):
1220 """``rev(number)``
1220 """``rev(number)``
1221 Revision with the given numeric identifier.
1221 Revision with the given numeric identifier.
1222 """
1222 """
1223 # i18n: "rev" is a keyword
1223 # i18n: "rev" is a keyword
1224 l = getargs(x, 1, 1, _("rev requires one argument"))
1224 l = getargs(x, 1, 1, _("rev requires one argument"))
1225 try:
1225 try:
1226 # i18n: "rev" is a keyword
1226 # i18n: "rev" is a keyword
1227 l = int(getstring(l[0], _("rev requires a number")))
1227 l = int(getstring(l[0], _("rev requires a number")))
1228 except (TypeError, ValueError):
1228 except (TypeError, ValueError):
1229 # i18n: "rev" is a keyword
1229 # i18n: "rev" is a keyword
1230 raise error.ParseError(_("rev expects a number"))
1230 raise error.ParseError(_("rev expects a number"))
1231 return [r for r in subset if r == l]
1231 return [r for r in subset if r == l]
1232
1232
1233 def matching(repo, subset, x):
1233 def matching(repo, subset, x):
1234 """``matching(revision [, field])``
1234 """``matching(revision [, field])``
1235 Changesets in which a given set of fields match the set of fields in the
1235 Changesets in which a given set of fields match the set of fields in the
1236 selected revision or set.
1236 selected revision or set.
1237
1237
1238 To match more than one field pass the list of fields to match separated
1238 To match more than one field pass the list of fields to match separated
1239 by spaces (e.g. ``author description``).
1239 by spaces (e.g. ``author description``).
1240
1240
1241 Valid fields are most regular revision fields and some special fields.
1241 Valid fields are most regular revision fields and some special fields.
1242
1242
1243 Regular revision fields are ``description``, ``author``, ``branch``,
1243 Regular revision fields are ``description``, ``author``, ``branch``,
1244 ``date``, ``files``, ``phase``, ``parents``, ``substate``, ``user``
1244 ``date``, ``files``, ``phase``, ``parents``, ``substate``, ``user``
1245 and ``diff``.
1245 and ``diff``.
1246 Note that ``author`` and ``user`` are synonyms. ``diff`` refers to the
1246 Note that ``author`` and ``user`` are synonyms. ``diff`` refers to the
1247 contents of the revision. Two revisions matching their ``diff`` will
1247 contents of the revision. Two revisions matching their ``diff`` will
1248 also match their ``files``.
1248 also match their ``files``.
1249
1249
1250 Special fields are ``summary`` and ``metadata``:
1250 Special fields are ``summary`` and ``metadata``:
1251 ``summary`` matches the first line of the description.
1251 ``summary`` matches the first line of the description.
1252 ``metadata`` is equivalent to matching ``description user date``
1252 ``metadata`` is equivalent to matching ``description user date``
1253 (i.e. it matches the main metadata fields).
1253 (i.e. it matches the main metadata fields).
1254
1254
1255 ``metadata`` is the default field which is used when no fields are
1255 ``metadata`` is the default field which is used when no fields are
1256 specified. You can match more than one field at a time.
1256 specified. You can match more than one field at a time.
1257 """
1257 """
1258 # i18n: "matching" is a keyword
1258 # i18n: "matching" is a keyword
1259 l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
1259 l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
1260
1260
1261 revs = getset(repo, repo.changelog, l[0])
1261 revs = getset(repo, repo.changelog, l[0])
1262
1262
1263 fieldlist = ['metadata']
1263 fieldlist = ['metadata']
1264 if len(l) > 1:
1264 if len(l) > 1:
1265 fieldlist = getstring(l[1],
1265 fieldlist = getstring(l[1],
1266 # i18n: "matching" is a keyword
1266 # i18n: "matching" is a keyword
1267 _("matching requires a string "
1267 _("matching requires a string "
1268 "as its second argument")).split()
1268 "as its second argument")).split()
1269
1269
1270 # Make sure that there are no repeated fields,
1270 # Make sure that there are no repeated fields,
1271 # expand the 'special' 'metadata' field type
1271 # expand the 'special' 'metadata' field type
1272 # and check the 'files' whenever we check the 'diff'
1272 # and check the 'files' whenever we check the 'diff'
1273 fields = []
1273 fields = []
1274 for field in fieldlist:
1274 for field in fieldlist:
1275 if field == 'metadata':
1275 if field == 'metadata':
1276 fields += ['user', 'description', 'date']
1276 fields += ['user', 'description', 'date']
1277 elif field == 'diff':
1277 elif field == 'diff':
1278 # a revision matching the diff must also match the files
1278 # a revision matching the diff must also match the files
1279 # since matching the diff is very costly, make sure to
1279 # since matching the diff is very costly, make sure to
1280 # also match the files first
1280 # also match the files first
1281 fields += ['files', 'diff']
1281 fields += ['files', 'diff']
1282 else:
1282 else:
1283 if field == 'author':
1283 if field == 'author':
1284 field = 'user'
1284 field = 'user'
1285 fields.append(field)
1285 fields.append(field)
1286 fields = set(fields)
1286 fields = set(fields)
1287 if 'summary' in fields and 'description' in fields:
1287 if 'summary' in fields and 'description' in fields:
1288 # If a revision matches its description it also matches its summary
1288 # If a revision matches its description it also matches its summary
1289 fields.discard('summary')
1289 fields.discard('summary')
1290
1290
1291 # We may want to match more than one field
1291 # We may want to match more than one field
1292 # Not all fields take the same amount of time to be matched
1292 # Not all fields take the same amount of time to be matched
1293 # Sort the selected fields in order of increasing matching cost
1293 # Sort the selected fields in order of increasing matching cost
1294 fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary',
1294 fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary',
1295 'files', 'description', 'substate', 'diff']
1295 'files', 'description', 'substate', 'diff']
1296 def fieldkeyfunc(f):
1296 def fieldkeyfunc(f):
1297 try:
1297 try:
1298 return fieldorder.index(f)
1298 return fieldorder.index(f)
1299 except ValueError:
1299 except ValueError:
1300 # assume an unknown field is very costly
1300 # assume an unknown field is very costly
1301 return len(fieldorder)
1301 return len(fieldorder)
1302 fields = list(fields)
1302 fields = list(fields)
1303 fields.sort(key=fieldkeyfunc)
1303 fields.sort(key=fieldkeyfunc)
1304
1304
1305 # Each field will be matched with its own "getfield" function
1305 # Each field will be matched with its own "getfield" function
1306 # which will be added to the getfieldfuncs array of functions
1306 # which will be added to the getfieldfuncs array of functions
1307 getfieldfuncs = []
1307 getfieldfuncs = []
1308 _funcs = {
1308 _funcs = {
1309 'user': lambda r: repo[r].user(),
1309 'user': lambda r: repo[r].user(),
1310 'branch': lambda r: repo[r].branch(),
1310 'branch': lambda r: repo[r].branch(),
1311 'date': lambda r: repo[r].date(),
1311 'date': lambda r: repo[r].date(),
1312 'description': lambda r: repo[r].description(),
1312 'description': lambda r: repo[r].description(),
1313 'files': lambda r: repo[r].files(),
1313 'files': lambda r: repo[r].files(),
1314 'parents': lambda r: repo[r].parents(),
1314 'parents': lambda r: repo[r].parents(),
1315 'phase': lambda r: repo[r].phase(),
1315 'phase': lambda r: repo[r].phase(),
1316 'substate': lambda r: repo[r].substate,
1316 'substate': lambda r: repo[r].substate,
1317 'summary': lambda r: repo[r].description().splitlines()[0],
1317 'summary': lambda r: repo[r].description().splitlines()[0],
1318 'diff': lambda r: list(repo[r].diff(git=True),)
1318 'diff': lambda r: list(repo[r].diff(git=True),)
1319 }
1319 }
1320 for info in fields:
1320 for info in fields:
1321 getfield = _funcs.get(info, None)
1321 getfield = _funcs.get(info, None)
1322 if getfield is None:
1322 if getfield is None:
1323 raise error.ParseError(
1323 raise error.ParseError(
1324 # i18n: "matching" is a keyword
1324 # i18n: "matching" is a keyword
1325 _("unexpected field name passed to matching: %s") % info)
1325 _("unexpected field name passed to matching: %s") % info)
1326 getfieldfuncs.append(getfield)
1326 getfieldfuncs.append(getfield)
1327 # convert the getfield array of functions into a "getinfo" function
1327 # convert the getfield array of functions into a "getinfo" function
1328 # which returns an array of field values (or a single value if there
1328 # which returns an array of field values (or a single value if there
1329 # is only one field to match)
1329 # is only one field to match)
1330 getinfo = lambda r: [f(r) for f in getfieldfuncs]
1330 getinfo = lambda r: [f(r) for f in getfieldfuncs]
1331
1331
1332 matches = set()
1332 matches = set()
1333 for rev in revs:
1333 for rev in revs:
1334 target = getinfo(rev)
1334 target = getinfo(rev)
1335 for r in subset:
1335 for r in subset:
1336 match = True
1336 match = True
1337 for n, f in enumerate(getfieldfuncs):
1337 for n, f in enumerate(getfieldfuncs):
1338 if target[n] != f(r):
1338 if target[n] != f(r):
1339 match = False
1339 match = False
1340 break
1340 break
1341 if match:
1341 if match:
1342 matches.add(r)
1342 matches.add(r)
1343 return [r for r in subset if r in matches]
1343 return [r for r in subset if r in matches]
1344
1344
1345 def reverse(repo, subset, x):
1345 def reverse(repo, subset, x):
1346 """``reverse(set)``
1346 """``reverse(set)``
1347 Reverse order of set.
1347 Reverse order of set.
1348 """
1348 """
1349 l = getset(repo, subset, x)
1349 l = getset(repo, subset, x)
1350 if not isinstance(l, list):
1350 if not isinstance(l, list):
1351 l = list(l)
1351 l = list(l)
1352 l.reverse()
1352 l.reverse()
1353 return l
1353 return l
1354
1354
1355 def roots(repo, subset, x):
1355 def roots(repo, subset, x):
1356 """``roots(set)``
1356 """``roots(set)``
1357 Changesets in set with no parent changeset in set.
1357 Changesets in set with no parent changeset in set.
1358 """
1358 """
1359 s = set(getset(repo, repo.changelog, x))
1359 s = set(getset(repo, repo.changelog, x))
1360 subset = [r for r in subset if r in s]
1360 subset = [r for r in subset if r in s]
1361 cs = _children(repo, subset, s)
1361 cs = _children(repo, subset, s)
1362 return [r for r in subset if r not in cs]
1362 return [r for r in subset if r not in cs]
1363
1363
1364 def secret(repo, subset, x):
1364 def secret(repo, subset, x):
1365 """``secret()``
1365 """``secret()``
1366 Changeset in secret phase."""
1366 Changeset in secret phase."""
1367 # i18n: "secret" is a keyword
1367 # i18n: "secret" is a keyword
1368 getargs(x, 0, 0, _("secret takes no arguments"))
1368 getargs(x, 0, 0, _("secret takes no arguments"))
1369 pc = repo._phasecache
1369 pc = repo._phasecache
1370 return [r for r in subset if pc.phase(repo, r) == phases.secret]
1370 return [r for r in subset if pc.phase(repo, r) == phases.secret]
1371
1371
1372 def sort(repo, subset, x):
1372 def sort(repo, subset, x):
1373 """``sort(set[, [-]key...])``
1373 """``sort(set[, [-]key...])``
1374 Sort set by keys. The default sort order is ascending, specify a key
1374 Sort set by keys. The default sort order is ascending, specify a key
1375 as ``-key`` to sort in descending order.
1375 as ``-key`` to sort in descending order.
1376
1376
1377 The keys can be:
1377 The keys can be:
1378
1378
1379 - ``rev`` for the revision number,
1379 - ``rev`` for the revision number,
1380 - ``branch`` for the branch name,
1380 - ``branch`` for the branch name,
1381 - ``desc`` for the commit message (description),
1381 - ``desc`` for the commit message (description),
1382 - ``user`` for user name (``author`` can be used as an alias),
1382 - ``user`` for user name (``author`` can be used as an alias),
1383 - ``date`` for the commit date
1383 - ``date`` for the commit date
1384 """
1384 """
1385 # i18n: "sort" is a keyword
1385 # i18n: "sort" is a keyword
1386 l = getargs(x, 1, 2, _("sort requires one or two arguments"))
1386 l = getargs(x, 1, 2, _("sort requires one or two arguments"))
1387 keys = "rev"
1387 keys = "rev"
1388 if len(l) == 2:
1388 if len(l) == 2:
1389 # i18n: "sort" is a keyword
1389 # i18n: "sort" is a keyword
1390 keys = getstring(l[1], _("sort spec must be a string"))
1390 keys = getstring(l[1], _("sort spec must be a string"))
1391
1391
1392 s = l[0]
1392 s = l[0]
1393 keys = keys.split()
1393 keys = keys.split()
1394 l = []
1394 l = []
1395 def invert(s):
1395 def invert(s):
1396 return "".join(chr(255 - ord(c)) for c in s)
1396 return "".join(chr(255 - ord(c)) for c in s)
1397 for r in getset(repo, subset, s):
1397 for r in getset(repo, subset, s):
1398 c = repo[r]
1398 c = repo[r]
1399 e = []
1399 e = []
1400 for k in keys:
1400 for k in keys:
1401 if k == 'rev':
1401 if k == 'rev':
1402 e.append(r)
1402 e.append(r)
1403 elif k == '-rev':
1403 elif k == '-rev':
1404 e.append(-r)
1404 e.append(-r)
1405 elif k == 'branch':
1405 elif k == 'branch':
1406 e.append(c.branch())
1406 e.append(c.branch())
1407 elif k == '-branch':
1407 elif k == '-branch':
1408 e.append(invert(c.branch()))
1408 e.append(invert(c.branch()))
1409 elif k == 'desc':
1409 elif k == 'desc':
1410 e.append(c.description())
1410 e.append(c.description())
1411 elif k == '-desc':
1411 elif k == '-desc':
1412 e.append(invert(c.description()))
1412 e.append(invert(c.description()))
1413 elif k in 'user author':
1413 elif k in 'user author':
1414 e.append(c.user())
1414 e.append(c.user())
1415 elif k in '-user -author':
1415 elif k in '-user -author':
1416 e.append(invert(c.user()))
1416 e.append(invert(c.user()))
1417 elif k == 'date':
1417 elif k == 'date':
1418 e.append(c.date()[0])
1418 e.append(c.date()[0])
1419 elif k == '-date':
1419 elif k == '-date':
1420 e.append(-c.date()[0])
1420 e.append(-c.date()[0])
1421 else:
1421 else:
1422 raise error.ParseError(_("unknown sort key %r") % k)
1422 raise error.ParseError(_("unknown sort key %r") % k)
1423 e.append(r)
1423 e.append(r)
1424 l.append(e)
1424 l.append(e)
1425 l.sort()
1425 l.sort()
1426 return [e[-1] for e in l]
1426 return [e[-1] for e in l]
1427
1427
1428 def _stringmatcher(pattern):
1428 def _stringmatcher(pattern):
1429 """
1429 """
1430 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1430 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1431 returns the matcher name, pattern, and matcher function.
1431 returns the matcher name, pattern, and matcher function.
1432 missing or unknown prefixes are treated as literal matches.
1432 missing or unknown prefixes are treated as literal matches.
1433
1433
1434 helper for tests:
1434 helper for tests:
1435 >>> def test(pattern, *tests):
1435 >>> def test(pattern, *tests):
1436 ... kind, pattern, matcher = _stringmatcher(pattern)
1436 ... kind, pattern, matcher = _stringmatcher(pattern)
1437 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1437 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1438
1438
1439 exact matching (no prefix):
1439 exact matching (no prefix):
1440 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
1440 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
1441 ('literal', 'abcdefg', [False, False, True])
1441 ('literal', 'abcdefg', [False, False, True])
1442
1442
1443 regex matching ('re:' prefix)
1443 regex matching ('re:' prefix)
1444 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
1444 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
1445 ('re', 'a.+b', [False, False, True])
1445 ('re', 'a.+b', [False, False, True])
1446
1446
1447 force exact matches ('literal:' prefix)
1447 force exact matches ('literal:' prefix)
1448 >>> test('literal:re:foobar', 'foobar', 're:foobar')
1448 >>> test('literal:re:foobar', 'foobar', 're:foobar')
1449 ('literal', 're:foobar', [False, True])
1449 ('literal', 're:foobar', [False, True])
1450
1450
1451 unknown prefixes are ignored and treated as literals
1451 unknown prefixes are ignored and treated as literals
1452 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
1452 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
1453 ('literal', 'foo:bar', [False, False, True])
1453 ('literal', 'foo:bar', [False, False, True])
1454 """
1454 """
1455 if pattern.startswith('re:'):
1455 if pattern.startswith('re:'):
1456 pattern = pattern[3:]
1456 pattern = pattern[3:]
1457 try:
1457 try:
1458 regex = re.compile(pattern)
1458 regex = re.compile(pattern)
1459 except re.error, e:
1459 except re.error, e:
1460 raise error.ParseError(_('invalid regular expression: %s')
1460 raise error.ParseError(_('invalid regular expression: %s')
1461 % e)
1461 % e)
1462 return 're', pattern, regex.search
1462 return 're', pattern, regex.search
1463 elif pattern.startswith('literal:'):
1463 elif pattern.startswith('literal:'):
1464 pattern = pattern[8:]
1464 pattern = pattern[8:]
1465 return 'literal', pattern, pattern.__eq__
1465 return 'literal', pattern, pattern.__eq__
1466
1466
1467 def _substringmatcher(pattern):
1467 def _substringmatcher(pattern):
1468 kind, pattern, matcher = _stringmatcher(pattern)
1468 kind, pattern, matcher = _stringmatcher(pattern)
1469 if kind == 'literal':
1469 if kind == 'literal':
1470 matcher = lambda s: pattern in s
1470 matcher = lambda s: pattern in s
1471 return kind, pattern, matcher
1471 return kind, pattern, matcher
1472
1472
1473 def tag(repo, subset, x):
1473 def tag(repo, subset, x):
1474 """``tag([name])``
1474 """``tag([name])``
1475 The specified tag by name, or all tagged revisions if no name is given.
1475 The specified tag by name, or all tagged revisions if no name is given.
1476 """
1476 """
1477 # i18n: "tag" is a keyword
1477 # i18n: "tag" is a keyword
1478 args = getargs(x, 0, 1, _("tag takes one or no arguments"))
1478 args = getargs(x, 0, 1, _("tag takes one or no arguments"))
1479 cl = repo.changelog
1479 cl = repo.changelog
1480 if args:
1480 if args:
1481 pattern = getstring(args[0],
1481 pattern = getstring(args[0],
1482 # i18n: "tag" is a keyword
1482 # i18n: "tag" is a keyword
1483 _('the argument to tag must be a string'))
1483 _('the argument to tag must be a string'))
1484 kind, pattern, matcher = _stringmatcher(pattern)
1484 kind, pattern, matcher = _stringmatcher(pattern)
1485 if kind == 'literal':
1485 if kind == 'literal':
1486 # avoid resolving all tags
1486 # avoid resolving all tags
1487 tn = repo._tagscache.tags.get(pattern, None)
1487 tn = repo._tagscache.tags.get(pattern, None)
1488 if tn is None:
1488 if tn is None:
1489 raise util.Abort(_("tag '%s' does not exist") % pattern)
1489 raise util.Abort(_("tag '%s' does not exist") % pattern)
1490 s = set([repo[tn].rev()])
1490 s = set([repo[tn].rev()])
1491 else:
1491 else:
1492 s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
1492 s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
1493 if not s:
1493 if not s:
1494 raise util.Abort(_("no tags exist that match '%s'") % pattern)
1494 raise util.Abort(_("no tags exist that match '%s'") % pattern)
1495 else:
1495 else:
1496 s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
1496 s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
1497 return [r for r in subset if r in s]
1497 return [r for r in subset if r in s]
1498
1498
1499 def tagged(repo, subset, x):
1499 def tagged(repo, subset, x):
1500 return tag(repo, subset, x)
1500 return tag(repo, subset, x)
1501
1501
1502 def unstable(repo, subset, x):
1502 def unstable(repo, subset, x):
1503 """``unstable()``
1503 """``unstable()``
1504 Non-obsolete changesets with obsolete ancestors.
1504 Non-obsolete changesets with obsolete ancestors.
1505 """
1505 """
1506 # i18n: "unstable" is a keyword
1506 # i18n: "unstable" is a keyword
1507 getargs(x, 0, 0, _("unstable takes no arguments"))
1507 getargs(x, 0, 0, _("unstable takes no arguments"))
1508 unstables = obsmod.getrevs(repo, 'unstable')
1508 unstables = obsmod.getrevs(repo, 'unstable')
1509 return [r for r in subset if r in unstables]
1509 return [r for r in subset if r in unstables]
1510
1510
1511
1511
1512 def user(repo, subset, x):
1512 def user(repo, subset, x):
1513 """``user(string)``
1513 """``user(string)``
1514 User name contains string. The match is case-insensitive.
1514 User name contains string. The match is case-insensitive.
1515
1515
1516 If `string` starts with `re:`, the remainder of the string is treated as
1516 If `string` starts with `re:`, the remainder of the string is treated as
1517 a regular expression. To match a user that actually contains `re:`, use
1517 a regular expression. To match a user that actually contains `re:`, use
1518 the prefix `literal:`.
1518 the prefix `literal:`.
1519 """
1519 """
1520 return author(repo, subset, x)
1520 return author(repo, subset, x)
1521
1521
1522 # for internal use
1522 # for internal use
1523 def _list(repo, subset, x):
1523 def _list(repo, subset, x):
1524 s = getstring(x, "internal error")
1524 s = getstring(x, "internal error")
1525 if not s:
1525 if not s:
1526 return []
1526 return []
1527 if not isinstance(subset, set):
1527 if not isinstance(subset, set):
1528 subset = set(subset)
1528 subset = set(subset)
1529 ls = [repo[r].rev() for r in s.split('\0')]
1529 ls = [repo[r].rev() for r in s.split('\0')]
1530 return [r for r in ls if r in subset]
1530 return [r for r in ls if r in subset]
1531
1531
1532 symbols = {
1532 symbols = {
1533 "adds": adds,
1533 "adds": adds,
1534 "all": getall,
1534 "all": getall,
1535 "ancestor": ancestor,
1535 "ancestor": ancestor,
1536 "ancestors": ancestors,
1536 "ancestors": ancestors,
1537 "_firstancestors": _firstancestors,
1537 "_firstancestors": _firstancestors,
1538 "author": author,
1538 "author": author,
1539 "bisect": bisect,
1539 "bisect": bisect,
1540 "bisected": bisected,
1540 "bisected": bisected,
1541 "bookmark": bookmark,
1541 "bookmark": bookmark,
1542 "branch": branch,
1542 "branch": branch,
1543 "branchpoint": branchpoint,
1543 "branchpoint": branchpoint,
1544 "bumped": bumped,
1544 "bumped": bumped,
1545 "bundle": bundle,
1545 "bundle": bundle,
1546 "children": children,
1546 "children": children,
1547 "closed": closed,
1547 "closed": closed,
1548 "contains": contains,
1548 "contains": contains,
1549 "converted": converted,
1549 "converted": converted,
1550 "date": date,
1550 "date": date,
1551 "desc": desc,
1551 "desc": desc,
1552 "descendants": descendants,
1552 "descendants": descendants,
1553 "_firstdescendants": _firstdescendants,
1553 "_firstdescendants": _firstdescendants,
1554 "destination": destination,
1554 "destination": destination,
1555 "divergent": divergent,
1555 "divergent": divergent,
1556 "draft": draft,
1556 "draft": draft,
1557 "extinct": extinct,
1557 "extinct": extinct,
1558 "extra": extra,
1558 "extra": extra,
1559 "file": hasfile,
1559 "file": hasfile,
1560 "filelog": filelog,
1560 "filelog": filelog,
1561 "first": first,
1561 "first": first,
1562 "follow": follow,
1562 "follow": follow,
1563 "_followfirst": _followfirst,
1563 "_followfirst": _followfirst,
1564 "grep": grep,
1564 "grep": grep,
1565 "head": head,
1565 "head": head,
1566 "heads": heads,
1566 "heads": heads,
1567 "hidden": hidden,
1567 "hidden": hidden,
1568 "id": node_,
1568 "id": node_,
1569 "keyword": keyword,
1569 "keyword": keyword,
1570 "last": last,
1570 "last": last,
1571 "limit": limit,
1571 "limit": limit,
1572 "_matchfiles": _matchfiles,
1572 "_matchfiles": _matchfiles,
1573 "max": maxrev,
1573 "max": maxrev,
1574 "merge": merge,
1574 "merge": merge,
1575 "min": minrev,
1575 "min": minrev,
1576 "modifies": modifies,
1576 "modifies": modifies,
1577 "obsolete": obsolete,
1577 "obsolete": obsolete,
1578 "origin": origin,
1578 "origin": origin,
1579 "outgoing": outgoing,
1579 "outgoing": outgoing,
1580 "p1": p1,
1580 "p1": p1,
1581 "p2": p2,
1581 "p2": p2,
1582 "parents": parents,
1582 "parents": parents,
1583 "present": present,
1583 "present": present,
1584 "public": public,
1584 "public": public,
1585 "remote": remote,
1585 "remote": remote,
1586 "removes": removes,
1586 "removes": removes,
1587 "rev": rev,
1587 "rev": rev,
1588 "reverse": reverse,
1588 "reverse": reverse,
1589 "roots": roots,
1589 "roots": roots,
1590 "sort": sort,
1590 "sort": sort,
1591 "secret": secret,
1591 "secret": secret,
1592 "matching": matching,
1592 "matching": matching,
1593 "tag": tag,
1593 "tag": tag,
1594 "tagged": tagged,
1594 "tagged": tagged,
1595 "user": user,
1595 "user": user,
1596 "unstable": unstable,
1596 "unstable": unstable,
1597 "_list": _list,
1597 "_list": _list,
1598 }
1598 }
1599
1599
1600 methods = {
1600 methods = {
1601 "range": rangeset,
1601 "range": rangeset,
1602 "dagrange": dagrange,
1602 "dagrange": dagrange,
1603 "string": stringset,
1603 "string": stringset,
1604 "symbol": symbolset,
1604 "symbol": symbolset,
1605 "and": andset,
1605 "and": andset,
1606 "or": orset,
1606 "or": orset,
1607 "not": notset,
1607 "not": notset,
1608 "list": listset,
1608 "list": listset,
1609 "func": func,
1609 "func": func,
1610 "ancestor": ancestorspec,
1610 "ancestor": ancestorspec,
1611 "parent": parentspec,
1611 "parent": parentspec,
1612 "parentpost": p1,
1612 "parentpost": p1,
1613 }
1613 }
1614
1614
1615 def optimize(x, small):
1615 def optimize(x, small):
1616 if x is None:
1616 if x is None:
1617 return 0, x
1617 return 0, x
1618
1618
1619 smallbonus = 1
1619 smallbonus = 1
1620 if small:
1620 if small:
1621 smallbonus = .5
1621 smallbonus = .5
1622
1622
1623 op = x[0]
1623 op = x[0]
1624 if op == 'minus':
1624 if op == 'minus':
1625 return optimize(('and', x[1], ('not', x[2])), small)
1625 return optimize(('and', x[1], ('not', x[2])), small)
1626 elif op == 'dagrangepre':
1626 elif op == 'dagrangepre':
1627 return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
1627 return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
1628 elif op == 'dagrangepost':
1628 elif op == 'dagrangepost':
1629 return optimize(('func', ('symbol', 'descendants'), x[1]), small)
1629 return optimize(('func', ('symbol', 'descendants'), x[1]), small)
1630 elif op == 'rangepre':
1630 elif op == 'rangepre':
1631 return optimize(('range', ('string', '0'), x[1]), small)
1631 return optimize(('range', ('string', '0'), x[1]), small)
1632 elif op == 'rangepost':
1632 elif op == 'rangepost':
1633 return optimize(('range', x[1], ('string', 'tip')), small)
1633 return optimize(('range', x[1], ('string', 'tip')), small)
1634 elif op == 'negate':
1634 elif op == 'negate':
1635 return optimize(('string',
1635 return optimize(('string',
1636 '-' + getstring(x[1], _("can't negate that"))), small)
1636 '-' + getstring(x[1], _("can't negate that"))), small)
1637 elif op in 'string symbol negate':
1637 elif op in 'string symbol negate':
1638 return smallbonus, x # single revisions are small
1638 return smallbonus, x # single revisions are small
1639 elif op == 'and':
1639 elif op == 'and':
1640 wa, ta = optimize(x[1], True)
1640 wa, ta = optimize(x[1], True)
1641 wb, tb = optimize(x[2], True)
1641 wb, tb = optimize(x[2], True)
1642 w = min(wa, wb)
1642 w = min(wa, wb)
1643 if wa > wb:
1643 if wa > wb:
1644 return w, (op, tb, ta)
1644 return w, (op, tb, ta)
1645 return w, (op, ta, tb)
1645 return w, (op, ta, tb)
1646 elif op == 'or':
1646 elif op == 'or':
1647 wa, ta = optimize(x[1], False)
1647 wa, ta = optimize(x[1], False)
1648 wb, tb = optimize(x[2], False)
1648 wb, tb = optimize(x[2], False)
1649 if wb < wa:
1649 if wb < wa:
1650 wb, wa = wa, wb
1650 wb, wa = wa, wb
1651 return max(wa, wb), (op, ta, tb)
1651 return max(wa, wb), (op, ta, tb)
1652 elif op == 'not':
1652 elif op == 'not':
1653 o = optimize(x[1], not small)
1653 o = optimize(x[1], not small)
1654 return o[0], (op, o[1])
1654 return o[0], (op, o[1])
1655 elif op == 'parentpost':
1655 elif op == 'parentpost':
1656 o = optimize(x[1], small)
1656 o = optimize(x[1], small)
1657 return o[0], (op, o[1])
1657 return o[0], (op, o[1])
1658 elif op == 'group':
1658 elif op == 'group':
1659 return optimize(x[1], small)
1659 return optimize(x[1], small)
1660 elif op in 'dagrange range list parent ancestorspec':
1660 elif op in 'dagrange range list parent ancestorspec':
1661 if op == 'parent':
1661 if op == 'parent':
1662 # x^:y means (x^) : y, not x ^ (:y)
1662 # x^:y means (x^) : y, not x ^ (:y)
1663 post = ('parentpost', x[1])
1663 post = ('parentpost', x[1])
1664 if x[2][0] == 'dagrangepre':
1664 if x[2][0] == 'dagrangepre':
1665 return optimize(('dagrange', post, x[2][1]), small)
1665 return optimize(('dagrange', post, x[2][1]), small)
1666 elif x[2][0] == 'rangepre':
1666 elif x[2][0] == 'rangepre':
1667 return optimize(('range', post, x[2][1]), small)
1667 return optimize(('range', post, x[2][1]), small)
1668
1668
1669 wa, ta = optimize(x[1], small)
1669 wa, ta = optimize(x[1], small)
1670 wb, tb = optimize(x[2], small)
1670 wb, tb = optimize(x[2], small)
1671 return wa + wb, (op, ta, tb)
1671 return wa + wb, (op, ta, tb)
1672 elif op == 'func':
1672 elif op == 'func':
1673 f = getstring(x[1], _("not a symbol"))
1673 f = getstring(x[1], _("not a symbol"))
1674 wa, ta = optimize(x[2], small)
1674 wa, ta = optimize(x[2], small)
1675 if f in ("author branch closed date desc file grep keyword "
1675 if f in ("author branch closed date desc file grep keyword "
1676 "outgoing user"):
1676 "outgoing user"):
1677 w = 10 # slow
1677 w = 10 # slow
1678 elif f in "modifies adds removes":
1678 elif f in "modifies adds removes":
1679 w = 30 # slower
1679 w = 30 # slower
1680 elif f == "contains":
1680 elif f == "contains":
1681 w = 100 # very slow
1681 w = 100 # very slow
1682 elif f == "ancestor":
1682 elif f == "ancestor":
1683 w = 1 * smallbonus
1683 w = 1 * smallbonus
1684 elif f in "reverse limit first":
1684 elif f in "reverse limit first":
1685 w = 0
1685 w = 0
1686 elif f in "sort":
1686 elif f in "sort":
1687 w = 10 # assume most sorts look at changelog
1687 w = 10 # assume most sorts look at changelog
1688 else:
1688 else:
1689 w = 1
1689 w = 1
1690 return w + wa, (op, x[1], ta)
1690 return w + wa, (op, x[1], ta)
1691 return 1, x
1691 return 1, x
1692
1692
1693 _aliasarg = ('func', ('symbol', '_aliasarg'))
1693 _aliasarg = ('func', ('symbol', '_aliasarg'))
1694 def _getaliasarg(tree):
1694 def _getaliasarg(tree):
1695 """If tree matches ('func', ('symbol', '_aliasarg'), ('string', X))
1695 """If tree matches ('func', ('symbol', '_aliasarg'), ('string', X))
1696 return X, None otherwise.
1696 return X, None otherwise.
1697 """
1697 """
1698 if (len(tree) == 3 and tree[:2] == _aliasarg
1698 if (len(tree) == 3 and tree[:2] == _aliasarg
1699 and tree[2][0] == 'string'):
1699 and tree[2][0] == 'string'):
1700 return tree[2][1]
1700 return tree[2][1]
1701 return None
1701 return None
1702
1702
1703 def _checkaliasarg(tree, known=None):
1703 def _checkaliasarg(tree, known=None):
1704 """Check tree contains no _aliasarg construct or only ones which
1704 """Check tree contains no _aliasarg construct or only ones which
1705 value is in known. Used to avoid alias placeholders injection.
1705 value is in known. Used to avoid alias placeholders injection.
1706 """
1706 """
1707 if isinstance(tree, tuple):
1707 if isinstance(tree, tuple):
1708 arg = _getaliasarg(tree)
1708 arg = _getaliasarg(tree)
1709 if arg is not None and (not known or arg not in known):
1709 if arg is not None and (not known or arg not in known):
1710 raise error.ParseError(_("not a function: %s") % '_aliasarg')
1710 raise error.ParseError(_("not a function: %s") % '_aliasarg')
1711 for t in tree:
1711 for t in tree:
1712 _checkaliasarg(t, known)
1712 _checkaliasarg(t, known)
1713
1713
1714 class revsetalias(object):
1714 class revsetalias(object):
1715 funcre = re.compile('^([^(]+)\(([^)]+)\)$')
1715 funcre = re.compile('^([^(]+)\(([^)]+)\)$')
1716 args = None
1716 args = None
1717
1717
1718 def __init__(self, name, value):
1718 def __init__(self, name, value):
1719 '''Aliases like:
1719 '''Aliases like:
1720
1720
1721 h = heads(default)
1721 h = heads(default)
1722 b($1) = ancestors($1) - ancestors(default)
1722 b($1) = ancestors($1) - ancestors(default)
1723 '''
1723 '''
1724 m = self.funcre.search(name)
1724 m = self.funcre.search(name)
1725 if m:
1725 if m:
1726 self.name = m.group(1)
1726 self.name = m.group(1)
1727 self.tree = ('func', ('symbol', m.group(1)))
1727 self.tree = ('func', ('symbol', m.group(1)))
1728 self.args = [x.strip() for x in m.group(2).split(',')]
1728 self.args = [x.strip() for x in m.group(2).split(',')]
1729 for arg in self.args:
1729 for arg in self.args:
1730 # _aliasarg() is an unknown symbol only used separate
1730 # _aliasarg() is an unknown symbol only used separate
1731 # alias argument placeholders from regular strings.
1731 # alias argument placeholders from regular strings.
1732 value = value.replace(arg, '_aliasarg(%r)' % (arg,))
1732 value = value.replace(arg, '_aliasarg(%r)' % (arg,))
1733 else:
1733 else:
1734 self.name = name
1734 self.name = name
1735 self.tree = ('symbol', name)
1735 self.tree = ('symbol', name)
1736
1736
1737 self.replacement, pos = parse(value)
1737 self.replacement, pos = parse(value)
1738 if pos != len(value):
1738 if pos != len(value):
1739 raise error.ParseError(_('invalid token'), pos)
1739 raise error.ParseError(_('invalid token'), pos)
1740 # Check for placeholder injection
1740 # Check for placeholder injection
1741 _checkaliasarg(self.replacement, self.args)
1741 _checkaliasarg(self.replacement, self.args)
1742
1742
1743 def _getalias(aliases, tree):
1743 def _getalias(aliases, tree):
1744 """If tree looks like an unexpanded alias, return it. Return None
1744 """If tree looks like an unexpanded alias, return it. Return None
1745 otherwise.
1745 otherwise.
1746 """
1746 """
1747 if isinstance(tree, tuple) and tree:
1747 if isinstance(tree, tuple) and tree:
1748 if tree[0] == 'symbol' and len(tree) == 2:
1748 if tree[0] == 'symbol' and len(tree) == 2:
1749 name = tree[1]
1749 name = tree[1]
1750 alias = aliases.get(name)
1750 alias = aliases.get(name)
1751 if alias and alias.args is None and alias.tree == tree:
1751 if alias and alias.args is None and alias.tree == tree:
1752 return alias
1752 return alias
1753 if tree[0] == 'func' and len(tree) > 1:
1753 if tree[0] == 'func' and len(tree) > 1:
1754 if tree[1][0] == 'symbol' and len(tree[1]) == 2:
1754 if tree[1][0] == 'symbol' and len(tree[1]) == 2:
1755 name = tree[1][1]
1755 name = tree[1][1]
1756 alias = aliases.get(name)
1756 alias = aliases.get(name)
1757 if alias and alias.args is not None and alias.tree == tree[:2]:
1757 if alias and alias.args is not None and alias.tree == tree[:2]:
1758 return alias
1758 return alias
1759 return None
1759 return None
1760
1760
1761 def _expandargs(tree, args):
1761 def _expandargs(tree, args):
1762 """Replace _aliasarg instances with the substitution value of the
1762 """Replace _aliasarg instances with the substitution value of the
1763 same name in args, recursively.
1763 same name in args, recursively.
1764 """
1764 """
1765 if not tree or not isinstance(tree, tuple):
1765 if not tree or not isinstance(tree, tuple):
1766 return tree
1766 return tree
1767 arg = _getaliasarg(tree)
1767 arg = _getaliasarg(tree)
1768 if arg is not None:
1768 if arg is not None:
1769 return args[arg]
1769 return args[arg]
1770 return tuple(_expandargs(t, args) for t in tree)
1770 return tuple(_expandargs(t, args) for t in tree)
1771
1771
1772 def _expandaliases(aliases, tree, expanding, cache):
1772 def _expandaliases(aliases, tree, expanding, cache):
1773 """Expand aliases in tree, recursively.
1773 """Expand aliases in tree, recursively.
1774
1774
1775 'aliases' is a dictionary mapping user defined aliases to
1775 'aliases' is a dictionary mapping user defined aliases to
1776 revsetalias objects.
1776 revsetalias objects.
1777 """
1777 """
1778 if not isinstance(tree, tuple):
1778 if not isinstance(tree, tuple):
1779 # Do not expand raw strings
1779 # Do not expand raw strings
1780 return tree
1780 return tree
1781 alias = _getalias(aliases, tree)
1781 alias = _getalias(aliases, tree)
1782 if alias is not None:
1782 if alias is not None:
1783 if alias in expanding:
1783 if alias in expanding:
1784 raise error.ParseError(_('infinite expansion of revset alias "%s" '
1784 raise error.ParseError(_('infinite expansion of revset alias "%s" '
1785 'detected') % alias.name)
1785 'detected') % alias.name)
1786 expanding.append(alias)
1786 expanding.append(alias)
1787 if alias.name not in cache:
1787 if alias.name not in cache:
1788 cache[alias.name] = _expandaliases(aliases, alias.replacement,
1788 cache[alias.name] = _expandaliases(aliases, alias.replacement,
1789 expanding, cache)
1789 expanding, cache)
1790 result = cache[alias.name]
1790 result = cache[alias.name]
1791 expanding.pop()
1791 expanding.pop()
1792 if alias.args is not None:
1792 if alias.args is not None:
1793 l = getlist(tree[2])
1793 l = getlist(tree[2])
1794 if len(l) != len(alias.args):
1794 if len(l) != len(alias.args):
1795 raise error.ParseError(
1795 raise error.ParseError(
1796 _('invalid number of arguments: %s') % len(l))
1796 _('invalid number of arguments: %s') % len(l))
1797 l = [_expandaliases(aliases, a, [], cache) for a in l]
1797 l = [_expandaliases(aliases, a, [], cache) for a in l]
1798 result = _expandargs(result, dict(zip(alias.args, l)))
1798 result = _expandargs(result, dict(zip(alias.args, l)))
1799 else:
1799 else:
1800 result = tuple(_expandaliases(aliases, t, expanding, cache)
1800 result = tuple(_expandaliases(aliases, t, expanding, cache)
1801 for t in tree)
1801 for t in tree)
1802 return result
1802 return result
1803
1803
1804 def findaliases(ui, tree):
1804 def findaliases(ui, tree):
1805 _checkaliasarg(tree)
1805 _checkaliasarg(tree)
1806 aliases = {}
1806 aliases = {}
1807 for k, v in ui.configitems('revsetalias'):
1807 for k, v in ui.configitems('revsetalias'):
1808 alias = revsetalias(k, v)
1808 alias = revsetalias(k, v)
1809 aliases[alias.name] = alias
1809 aliases[alias.name] = alias
1810 return _expandaliases(aliases, tree, [], {})
1810 return _expandaliases(aliases, tree, [], {})
1811
1811
1812 parse = parser.parser(tokenize, elements).parse
1812 parse = parser.parser(tokenize, elements).parse
1813
1813
1814 def match(ui, spec):
1814 def match(ui, spec):
1815 if not spec:
1815 if not spec:
1816 raise error.ParseError(_("empty query"))
1816 raise error.ParseError(_("empty query"))
1817 tree, pos = parse(spec)
1817 tree, pos = parse(spec)
1818 if (pos != len(spec)):
1818 if (pos != len(spec)):
1819 raise error.ParseError(_("invalid token"), pos)
1819 raise error.ParseError(_("invalid token"), pos)
1820 if ui:
1820 if ui:
1821 tree = findaliases(ui, tree)
1821 tree = findaliases(ui, tree)
1822 weight, tree = optimize(tree, True)
1822 weight, tree = optimize(tree, True)
1823 def mfunc(repo, subset):
1823 def mfunc(repo, subset):
1824 return getset(repo, subset, tree)
1824 return getset(repo, subset, tree)
1825 return mfunc
1825 return mfunc
1826
1826
1827 def formatspec(expr, *args):
1827 def formatspec(expr, *args):
1828 '''
1828 '''
1829 This is a convenience function for using revsets internally, and
1829 This is a convenience function for using revsets internally, and
1830 escapes arguments appropriately. Aliases are intentionally ignored
1830 escapes arguments appropriately. Aliases are intentionally ignored
1831 so that intended expression behavior isn't accidentally subverted.
1831 so that intended expression behavior isn't accidentally subverted.
1832
1832
1833 Supported arguments:
1833 Supported arguments:
1834
1834
1835 %r = revset expression, parenthesized
1835 %r = revset expression, parenthesized
1836 %d = int(arg), no quoting
1836 %d = int(arg), no quoting
1837 %s = string(arg), escaped and single-quoted
1837 %s = string(arg), escaped and single-quoted
1838 %b = arg.branch(), escaped and single-quoted
1838 %b = arg.branch(), escaped and single-quoted
1839 %n = hex(arg), single-quoted
1839 %n = hex(arg), single-quoted
1840 %% = a literal '%'
1840 %% = a literal '%'
1841
1841
1842 Prefixing the type with 'l' specifies a parenthesized list of that type.
1842 Prefixing the type with 'l' specifies a parenthesized list of that type.
1843
1843
1844 >>> formatspec('%r:: and %lr', '10 or 11', ("this()", "that()"))
1844 >>> formatspec('%r:: and %lr', '10 or 11', ("this()", "that()"))
1845 '(10 or 11):: and ((this()) or (that()))'
1845 '(10 or 11):: and ((this()) or (that()))'
1846 >>> formatspec('%d:: and not %d::', 10, 20)
1846 >>> formatspec('%d:: and not %d::', 10, 20)
1847 '10:: and not 20::'
1847 '10:: and not 20::'
1848 >>> formatspec('%ld or %ld', [], [1])
1848 >>> formatspec('%ld or %ld', [], [1])
1849 "_list('') or 1"
1849 "_list('') or 1"
1850 >>> formatspec('keyword(%s)', 'foo\\xe9')
1850 >>> formatspec('keyword(%s)', 'foo\\xe9')
1851 "keyword('foo\\\\xe9')"
1851 "keyword('foo\\\\xe9')"
1852 >>> b = lambda: 'default'
1852 >>> b = lambda: 'default'
1853 >>> b.branch = b
1853 >>> b.branch = b
1854 >>> formatspec('branch(%b)', b)
1854 >>> formatspec('branch(%b)', b)
1855 "branch('default')"
1855 "branch('default')"
1856 >>> formatspec('root(%ls)', ['a', 'b', 'c', 'd'])
1856 >>> formatspec('root(%ls)', ['a', 'b', 'c', 'd'])
1857 "root(_list('a\\x00b\\x00c\\x00d'))"
1857 "root(_list('a\\x00b\\x00c\\x00d'))"
1858 '''
1858 '''
1859
1859
1860 def quote(s):
1860 def quote(s):
1861 return repr(str(s))
1861 return repr(str(s))
1862
1862
1863 def argtype(c, arg):
1863 def argtype(c, arg):
1864 if c == 'd':
1864 if c == 'd':
1865 return str(int(arg))
1865 return str(int(arg))
1866 elif c == 's':
1866 elif c == 's':
1867 return quote(arg)
1867 return quote(arg)
1868 elif c == 'r':
1868 elif c == 'r':
1869 parse(arg) # make sure syntax errors are confined
1869 parse(arg) # make sure syntax errors are confined
1870 return '(%s)' % arg
1870 return '(%s)' % arg
1871 elif c == 'n':
1871 elif c == 'n':
1872 return quote(node.hex(arg))
1872 return quote(node.hex(arg))
1873 elif c == 'b':
1873 elif c == 'b':
1874 return quote(arg.branch())
1874 return quote(arg.branch())
1875
1875
1876 def listexp(s, t):
1876 def listexp(s, t):
1877 l = len(s)
1877 l = len(s)
1878 if l == 0:
1878 if l == 0:
1879 return "_list('')"
1879 return "_list('')"
1880 elif l == 1:
1880 elif l == 1:
1881 return argtype(t, s[0])
1881 return argtype(t, s[0])
1882 elif t == 'd':
1882 elif t == 'd':
1883 return "_list('%s')" % "\0".join(str(int(a)) for a in s)
1883 return "_list('%s')" % "\0".join(str(int(a)) for a in s)
1884 elif t == 's':
1884 elif t == 's':
1885 return "_list('%s')" % "\0".join(s)
1885 return "_list('%s')" % "\0".join(s)
1886 elif t == 'n':
1886 elif t == 'n':
1887 return "_list('%s')" % "\0".join(node.hex(a) for a in s)
1887 return "_list('%s')" % "\0".join(node.hex(a) for a in s)
1888 elif t == 'b':
1888 elif t == 'b':
1889 return "_list('%s')" % "\0".join(a.branch() for a in s)
1889 return "_list('%s')" % "\0".join(a.branch() for a in s)
1890
1890
1891 m = l // 2
1891 m = l // 2
1892 return '(%s or %s)' % (listexp(s[:m], t), listexp(s[m:], t))
1892 return '(%s or %s)' % (listexp(s[:m], t), listexp(s[m:], t))
1893
1893
1894 ret = ''
1894 ret = ''
1895 pos = 0
1895 pos = 0
1896 arg = 0
1896 arg = 0
1897 while pos < len(expr):
1897 while pos < len(expr):
1898 c = expr[pos]
1898 c = expr[pos]
1899 if c == '%':
1899 if c == '%':
1900 pos += 1
1900 pos += 1
1901 d = expr[pos]
1901 d = expr[pos]
1902 if d == '%':
1902 if d == '%':
1903 ret += d
1903 ret += d
1904 elif d in 'dsnbr':
1904 elif d in 'dsnbr':
1905 ret += argtype(d, args[arg])
1905 ret += argtype(d, args[arg])
1906 arg += 1
1906 arg += 1
1907 elif d == 'l':
1907 elif d == 'l':
1908 # a list of some type
1908 # a list of some type
1909 pos += 1
1909 pos += 1
1910 d = expr[pos]
1910 d = expr[pos]
1911 ret += listexp(list(args[arg]), d)
1911 ret += listexp(list(args[arg]), d)
1912 arg += 1
1912 arg += 1
1913 else:
1913 else:
1914 raise util.Abort('unexpected revspec format character %s' % d)
1914 raise util.Abort('unexpected revspec format character %s' % d)
1915 else:
1915 else:
1916 ret += c
1916 ret += c
1917 pos += 1
1917 pos += 1
1918
1918
1919 return ret
1919 return ret
1920
1920
1921 def prettyformat(tree):
1921 def prettyformat(tree):
1922 def _prettyformat(tree, level, lines):
1922 def _prettyformat(tree, level, lines):
1923 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
1923 if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
1924 lines.append((level, str(tree)))
1924 lines.append((level, str(tree)))
1925 else:
1925 else:
1926 lines.append((level, '(%s' % tree[0]))
1926 lines.append((level, '(%s' % tree[0]))
1927 for s in tree[1:]:
1927 for s in tree[1:]:
1928 _prettyformat(s, level + 1, lines)
1928 _prettyformat(s, level + 1, lines)
1929 lines[-1:] = [(lines[-1][0], lines[-1][1] + ')')]
1929 lines[-1:] = [(lines[-1][0], lines[-1][1] + ')')]
1930
1930
1931 lines = []
1931 lines = []
1932 _prettyformat(tree, 0, lines)
1932 _prettyformat(tree, 0, lines)
1933 output = '\n'.join((' '*l + s) for l, s in lines)
1933 output = '\n'.join((' '*l + s) for l, s in lines)
1934 return output
1934 return output
1935
1935
1936 # tell hggettext to extract docstrings from these functions:
1936 # tell hggettext to extract docstrings from these functions:
1937 i18nfunctions = symbols.values()
1937 i18nfunctions = symbols.values()
@@ -1,654 +1,654
1 # wireproto.py - generic wire protocol support functions
1 # wireproto.py - generic wire protocol support functions
2 #
2 #
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import urllib, tempfile, os, sys
8 import urllib, tempfile, os, sys
9 from i18n import _
9 from i18n import _
10 from node import bin, hex
10 from node import bin, hex
11 import changegroup as changegroupmod
11 import changegroup as changegroupmod
12 import peer, error, encoding, util, store
12 import peer, error, encoding, util, store
13
13
14 # abstract batching support
14 # abstract batching support
15
15
16 class future(object):
16 class future(object):
17 '''placeholder for a value to be set later'''
17 '''placeholder for a value to be set later'''
18 def set(self, value):
18 def set(self, value):
19 if util.safehasattr(self, 'value'):
19 if util.safehasattr(self, 'value'):
20 raise error.RepoError("future is already set")
20 raise error.RepoError("future is already set")
21 self.value = value
21 self.value = value
22
22
23 class batcher(object):
23 class batcher(object):
24 '''base class for batches of commands submittable in a single request
24 '''base class for batches of commands submittable in a single request
25
25
26 All methods invoked on instances of this class are simply queued and
26 All methods invoked on instances of this class are simply queued and
27 return a a future for the result. Once you call submit(), all the queued
27 return a a future for the result. Once you call submit(), all the queued
28 calls are performed and the results set in their respective futures.
28 calls are performed and the results set in their respective futures.
29 '''
29 '''
30 def __init__(self):
30 def __init__(self):
31 self.calls = []
31 self.calls = []
32 def __getattr__(self, name):
32 def __getattr__(self, name):
33 def call(*args, **opts):
33 def call(*args, **opts):
34 resref = future()
34 resref = future()
35 self.calls.append((name, args, opts, resref,))
35 self.calls.append((name, args, opts, resref,))
36 return resref
36 return resref
37 return call
37 return call
38 def submit(self):
38 def submit(self):
39 pass
39 pass
40
40
41 class localbatch(batcher):
41 class localbatch(batcher):
42 '''performs the queued calls directly'''
42 '''performs the queued calls directly'''
43 def __init__(self, local):
43 def __init__(self, local):
44 batcher.__init__(self)
44 batcher.__init__(self)
45 self.local = local
45 self.local = local
46 def submit(self):
46 def submit(self):
47 for name, args, opts, resref in self.calls:
47 for name, args, opts, resref in self.calls:
48 resref.set(getattr(self.local, name)(*args, **opts))
48 resref.set(getattr(self.local, name)(*args, **opts))
49
49
50 class remotebatch(batcher):
50 class remotebatch(batcher):
51 '''batches the queued calls; uses as few roundtrips as possible'''
51 '''batches the queued calls; uses as few roundtrips as possible'''
52 def __init__(self, remote):
52 def __init__(self, remote):
53 '''remote must support _submitbatch(encbatch) and
53 '''remote must support _submitbatch(encbatch) and
54 _submitone(op, encargs)'''
54 _submitone(op, encargs)'''
55 batcher.__init__(self)
55 batcher.__init__(self)
56 self.remote = remote
56 self.remote = remote
57 def submit(self):
57 def submit(self):
58 req, rsp = [], []
58 req, rsp = [], []
59 for name, args, opts, resref in self.calls:
59 for name, args, opts, resref in self.calls:
60 mtd = getattr(self.remote, name)
60 mtd = getattr(self.remote, name)
61 batchablefn = getattr(mtd, 'batchable', None)
61 batchablefn = getattr(mtd, 'batchable', None)
62 if batchablefn is not None:
62 if batchablefn is not None:
63 batchable = batchablefn(mtd.im_self, *args, **opts)
63 batchable = batchablefn(mtd.im_self, *args, **opts)
64 encargsorres, encresref = batchable.next()
64 encargsorres, encresref = batchable.next()
65 if encresref:
65 if encresref:
66 req.append((name, encargsorres,))
66 req.append((name, encargsorres,))
67 rsp.append((batchable, encresref, resref,))
67 rsp.append((batchable, encresref, resref,))
68 else:
68 else:
69 resref.set(encargsorres)
69 resref.set(encargsorres)
70 else:
70 else:
71 if req:
71 if req:
72 self._submitreq(req, rsp)
72 self._submitreq(req, rsp)
73 req, rsp = [], []
73 req, rsp = [], []
74 resref.set(mtd(*args, **opts))
74 resref.set(mtd(*args, **opts))
75 if req:
75 if req:
76 self._submitreq(req, rsp)
76 self._submitreq(req, rsp)
77 def _submitreq(self, req, rsp):
77 def _submitreq(self, req, rsp):
78 encresults = self.remote._submitbatch(req)
78 encresults = self.remote._submitbatch(req)
79 for encres, r in zip(encresults, rsp):
79 for encres, r in zip(encresults, rsp):
80 batchable, encresref, resref = r
80 batchable, encresref, resref = r
81 encresref.set(encres)
81 encresref.set(encres)
82 resref.set(batchable.next())
82 resref.set(batchable.next())
83
83
84 def batchable(f):
84 def batchable(f):
85 '''annotation for batchable methods
85 '''annotation for batchable methods
86
86
87 Such methods must implement a coroutine as follows:
87 Such methods must implement a coroutine as follows:
88
88
89 @batchable
89 @batchable
90 def sample(self, one, two=None):
90 def sample(self, one, two=None):
91 # Handle locally computable results first:
91 # Handle locally computable results first:
92 if not one:
92 if not one:
93 yield "a local result", None
93 yield "a local result", None
94 # Build list of encoded arguments suitable for your wire protocol:
94 # Build list of encoded arguments suitable for your wire protocol:
95 encargs = [('one', encode(one),), ('two', encode(two),)]
95 encargs = [('one', encode(one),), ('two', encode(two),)]
96 # Create future for injection of encoded result:
96 # Create future for injection of encoded result:
97 encresref = future()
97 encresref = future()
98 # Return encoded arguments and future:
98 # Return encoded arguments and future:
99 yield encargs, encresref
99 yield encargs, encresref
100 # Assuming the future to be filled with the result from the batched
100 # Assuming the future to be filled with the result from the batched
101 # request now. Decode it:
101 # request now. Decode it:
102 yield decode(encresref.value)
102 yield decode(encresref.value)
103
103
104 The decorator returns a function which wraps this coroutine as a plain
104 The decorator returns a function which wraps this coroutine as a plain
105 method, but adds the original method as an attribute called "batchable",
105 method, but adds the original method as an attribute called "batchable",
106 which is used by remotebatch to split the call into separate encoding and
106 which is used by remotebatch to split the call into separate encoding and
107 decoding phases.
107 decoding phases.
108 '''
108 '''
109 def plain(*args, **opts):
109 def plain(*args, **opts):
110 batchable = f(*args, **opts)
110 batchable = f(*args, **opts)
111 encargsorres, encresref = batchable.next()
111 encargsorres, encresref = batchable.next()
112 if not encresref:
112 if not encresref:
113 return encargsorres # a local result in this case
113 return encargsorres # a local result in this case
114 self = args[0]
114 self = args[0]
115 encresref.set(self._submitone(f.func_name, encargsorres))
115 encresref.set(self._submitone(f.func_name, encargsorres))
116 return batchable.next()
116 return batchable.next()
117 setattr(plain, 'batchable', f)
117 setattr(plain, 'batchable', f)
118 return plain
118 return plain
119
119
120 # list of nodes encoding / decoding
120 # list of nodes encoding / decoding
121
121
122 def decodelist(l, sep=' '):
122 def decodelist(l, sep=' '):
123 if l:
123 if l:
124 return map(bin, l.split(sep))
124 return map(bin, l.split(sep))
125 return []
125 return []
126
126
127 def encodelist(l, sep=' '):
127 def encodelist(l, sep=' '):
128 return sep.join(map(hex, l))
128 return sep.join(map(hex, l))
129
129
130 # batched call argument encoding
130 # batched call argument encoding
131
131
132 def escapearg(plain):
132 def escapearg(plain):
133 return (plain
133 return (plain
134 .replace(':', '::')
134 .replace(':', '::')
135 .replace(',', ':,')
135 .replace(',', ':,')
136 .replace(';', ':;')
136 .replace(';', ':;')
137 .replace('=', ':='))
137 .replace('=', ':='))
138
138
139 def unescapearg(escaped):
139 def unescapearg(escaped):
140 return (escaped
140 return (escaped
141 .replace(':=', '=')
141 .replace(':=', '=')
142 .replace(':;', ';')
142 .replace(':;', ';')
143 .replace(':,', ',')
143 .replace(':,', ',')
144 .replace('::', ':'))
144 .replace('::', ':'))
145
145
146 # client side
146 # client side
147
147
148 def todict(**args):
148 def todict(**args):
149 return args
149 return args
150
150
151 class wirepeer(peer.peerrepository):
151 class wirepeer(peer.peerrepository):
152
152
153 def batch(self):
153 def batch(self):
154 return remotebatch(self)
154 return remotebatch(self)
155 def _submitbatch(self, req):
155 def _submitbatch(self, req):
156 cmds = []
156 cmds = []
157 for op, argsdict in req:
157 for op, argsdict in req:
158 args = ','.join('%s=%s' % p for p in argsdict.iteritems())
158 args = ','.join('%s=%s' % p for p in argsdict.iteritems())
159 cmds.append('%s %s' % (op, args))
159 cmds.append('%s %s' % (op, args))
160 rsp = self._call("batch", cmds=';'.join(cmds))
160 rsp = self._call("batch", cmds=';'.join(cmds))
161 return rsp.split(';')
161 return rsp.split(';')
162 def _submitone(self, op, args):
162 def _submitone(self, op, args):
163 return self._call(op, **args)
163 return self._call(op, **args)
164
164
165 @batchable
165 @batchable
166 def lookup(self, key):
166 def lookup(self, key):
167 self.requirecap('lookup', _('look up remote revision'))
167 self.requirecap('lookup', _('look up remote revision'))
168 f = future()
168 f = future()
169 yield todict(key=encoding.fromlocal(key)), f
169 yield todict(key=encoding.fromlocal(key)), f
170 d = f.value
170 d = f.value
171 success, data = d[:-1].split(" ", 1)
171 success, data = d[:-1].split(" ", 1)
172 if int(success):
172 if int(success):
173 yield bin(data)
173 yield bin(data)
174 self._abort(error.RepoError(data))
174 self._abort(error.RepoError(data))
175
175
176 @batchable
176 @batchable
177 def heads(self):
177 def heads(self):
178 f = future()
178 f = future()
179 yield {}, f
179 yield {}, f
180 d = f.value
180 d = f.value
181 try:
181 try:
182 yield decodelist(d[:-1])
182 yield decodelist(d[:-1])
183 except ValueError:
183 except ValueError:
184 self._abort(error.ResponseError(_("unexpected response:"), d))
184 self._abort(error.ResponseError(_("unexpected response:"), d))
185
185
186 @batchable
186 @batchable
187 def known(self, nodes):
187 def known(self, nodes):
188 f = future()
188 f = future()
189 yield todict(nodes=encodelist(nodes)), f
189 yield todict(nodes=encodelist(nodes)), f
190 d = f.value
190 d = f.value
191 try:
191 try:
192 yield [bool(int(f)) for f in d]
192 yield [bool(int(f)) for f in d]
193 except ValueError:
193 except ValueError:
194 self._abort(error.ResponseError(_("unexpected response:"), d))
194 self._abort(error.ResponseError(_("unexpected response:"), d))
195
195
196 @batchable
196 @batchable
197 def branchmap(self):
197 def branchmap(self):
198 f = future()
198 f = future()
199 yield {}, f
199 yield {}, f
200 d = f.value
200 d = f.value
201 try:
201 try:
202 branchmap = {}
202 branchmap = {}
203 for branchpart in d.splitlines():
203 for branchpart in d.splitlines():
204 branchname, branchheads = branchpart.split(' ', 1)
204 branchname, branchheads = branchpart.split(' ', 1)
205 branchname = encoding.tolocal(urllib.unquote(branchname))
205 branchname = encoding.tolocal(urllib.unquote(branchname))
206 branchheads = decodelist(branchheads)
206 branchheads = decodelist(branchheads)
207 branchmap[branchname] = branchheads
207 branchmap[branchname] = branchheads
208 yield branchmap
208 yield branchmap
209 except TypeError:
209 except TypeError:
210 self._abort(error.ResponseError(_("unexpected response:"), d))
210 self._abort(error.ResponseError(_("unexpected response:"), d))
211
211
212 def branches(self, nodes):
212 def branches(self, nodes):
213 n = encodelist(nodes)
213 n = encodelist(nodes)
214 d = self._call("branches", nodes=n)
214 d = self._call("branches", nodes=n)
215 try:
215 try:
216 br = [tuple(decodelist(b)) for b in d.splitlines()]
216 br = [tuple(decodelist(b)) for b in d.splitlines()]
217 return br
217 return br
218 except ValueError:
218 except ValueError:
219 self._abort(error.ResponseError(_("unexpected response:"), d))
219 self._abort(error.ResponseError(_("unexpected response:"), d))
220
220
221 def between(self, pairs):
221 def between(self, pairs):
222 batch = 8 # avoid giant requests
222 batch = 8 # avoid giant requests
223 r = []
223 r = []
224 for i in xrange(0, len(pairs), batch):
224 for i in xrange(0, len(pairs), batch):
225 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
225 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
226 d = self._call("between", pairs=n)
226 d = self._call("between", pairs=n)
227 try:
227 try:
228 r.extend(l and decodelist(l) or [] for l in d.splitlines())
228 r.extend(l and decodelist(l) or [] for l in d.splitlines())
229 except ValueError:
229 except ValueError:
230 self._abort(error.ResponseError(_("unexpected response:"), d))
230 self._abort(error.ResponseError(_("unexpected response:"), d))
231 return r
231 return r
232
232
233 @batchable
233 @batchable
234 def pushkey(self, namespace, key, old, new):
234 def pushkey(self, namespace, key, old, new):
235 if not self.capable('pushkey'):
235 if not self.capable('pushkey'):
236 yield False, None
236 yield False, None
237 f = future()
237 f = future()
238 self.ui.debug('preparing pushkey for "%s:%s"\n' % (namespace, key))
238 self.ui.debug('preparing pushkey for "%s:%s"\n' % (namespace, key))
239 yield todict(namespace=encoding.fromlocal(namespace),
239 yield todict(namespace=encoding.fromlocal(namespace),
240 key=encoding.fromlocal(key),
240 key=encoding.fromlocal(key),
241 old=encoding.fromlocal(old),
241 old=encoding.fromlocal(old),
242 new=encoding.fromlocal(new)), f
242 new=encoding.fromlocal(new)), f
243 d = f.value
243 d = f.value
244 d, output = d.split('\n', 1)
244 d, output = d.split('\n', 1)
245 try:
245 try:
246 d = bool(int(d))
246 d = bool(int(d))
247 except ValueError:
247 except ValueError:
248 raise error.ResponseError(
248 raise error.ResponseError(
249 _('push failed (unexpected response):'), d)
249 _('push failed (unexpected response):'), d)
250 for l in output.splitlines(True):
250 for l in output.splitlines(True):
251 self.ui.status(_('remote: '), l)
251 self.ui.status(_('remote: '), l)
252 yield d
252 yield d
253
253
254 @batchable
254 @batchable
255 def listkeys(self, namespace):
255 def listkeys(self, namespace):
256 if not self.capable('pushkey'):
256 if not self.capable('pushkey'):
257 yield {}, None
257 yield {}, None
258 f = future()
258 f = future()
259 self.ui.debug('preparing listkeys for "%s"\n' % namespace)
259 self.ui.debug('preparing listkeys for "%s"\n' % namespace)
260 yield todict(namespace=encoding.fromlocal(namespace)), f
260 yield todict(namespace=encoding.fromlocal(namespace)), f
261 d = f.value
261 d = f.value
262 r = {}
262 r = {}
263 for l in d.splitlines():
263 for l in d.splitlines():
264 k, v = l.split('\t')
264 k, v = l.split('\t')
265 r[encoding.tolocal(k)] = encoding.tolocal(v)
265 r[encoding.tolocal(k)] = encoding.tolocal(v)
266 yield r
266 yield r
267
267
268 def stream_out(self):
268 def stream_out(self):
269 return self._callstream('stream_out')
269 return self._callstream('stream_out')
270
270
271 def changegroup(self, nodes, kind):
271 def changegroup(self, nodes, kind):
272 n = encodelist(nodes)
272 n = encodelist(nodes)
273 f = self._callstream("changegroup", roots=n)
273 f = self._callstream("changegroup", roots=n)
274 return changegroupmod.unbundle10(self._decompress(f), 'UN')
274 return changegroupmod.unbundle10(self._decompress(f), 'UN')
275
275
276 def changegroupsubset(self, bases, heads, kind):
276 def changegroupsubset(self, bases, heads, kind):
277 self.requirecap('changegroupsubset', _('look up remote changes'))
277 self.requirecap('changegroupsubset', _('look up remote changes'))
278 bases = encodelist(bases)
278 bases = encodelist(bases)
279 heads = encodelist(heads)
279 heads = encodelist(heads)
280 f = self._callstream("changegroupsubset",
280 f = self._callstream("changegroupsubset",
281 bases=bases, heads=heads)
281 bases=bases, heads=heads)
282 return changegroupmod.unbundle10(self._decompress(f), 'UN')
282 return changegroupmod.unbundle10(self._decompress(f), 'UN')
283
283
284 def getbundle(self, source, heads=None, common=None):
284 def getbundle(self, source, heads=None, common=None):
285 self.requirecap('getbundle', _('look up remote changes'))
285 self.requirecap('getbundle', _('look up remote changes'))
286 opts = {}
286 opts = {}
287 if heads is not None:
287 if heads is not None:
288 opts['heads'] = encodelist(heads)
288 opts['heads'] = encodelist(heads)
289 if common is not None:
289 if common is not None:
290 opts['common'] = encodelist(common)
290 opts['common'] = encodelist(common)
291 f = self._callstream("getbundle", **opts)
291 f = self._callstream("getbundle", **opts)
292 return changegroupmod.unbundle10(self._decompress(f), 'UN')
292 return changegroupmod.unbundle10(self._decompress(f), 'UN')
293
293
294 def unbundle(self, cg, heads, source):
294 def unbundle(self, cg, heads, source):
295 '''Send cg (a readable file-like object representing the
295 '''Send cg (a readable file-like object representing the
296 changegroup to push, typically a chunkbuffer object) to the
296 changegroup to push, typically a chunkbuffer object) to the
297 remote server as a bundle. Return an integer indicating the
297 remote server as a bundle. Return an integer indicating the
298 result of the push (see localrepository.addchangegroup()).'''
298 result of the push (see localrepository.addchangegroup()).'''
299
299
300 if heads != ['force'] and self.capable('unbundlehash'):
300 if heads != ['force'] and self.capable('unbundlehash'):
301 heads = encodelist(['hashed',
301 heads = encodelist(['hashed',
302 util.sha1(''.join(sorted(heads))).digest()])
302 util.sha1(''.join(sorted(heads))).digest()])
303 else:
303 else:
304 heads = encodelist(heads)
304 heads = encodelist(heads)
305
305
306 ret, output = self._callpush("unbundle", cg, heads=heads)
306 ret, output = self._callpush("unbundle", cg, heads=heads)
307 if ret == "":
307 if ret == "":
308 raise error.ResponseError(
308 raise error.ResponseError(
309 _('push failed:'), output)
309 _('push failed:'), output)
310 try:
310 try:
311 ret = int(ret)
311 ret = int(ret)
312 except ValueError:
312 except ValueError:
313 raise error.ResponseError(
313 raise error.ResponseError(
314 _('push failed (unexpected response):'), ret)
314 _('push failed (unexpected response):'), ret)
315
315
316 for l in output.splitlines(True):
316 for l in output.splitlines(True):
317 self.ui.status(_('remote: '), l)
317 self.ui.status(_('remote: '), l)
318 return ret
318 return ret
319
319
320 def debugwireargs(self, one, two, three=None, four=None, five=None):
320 def debugwireargs(self, one, two, three=None, four=None, five=None):
321 # don't pass optional arguments left at their default value
321 # don't pass optional arguments left at their default value
322 opts = {}
322 opts = {}
323 if three is not None:
323 if three is not None:
324 opts['three'] = three
324 opts['three'] = three
325 if four is not None:
325 if four is not None:
326 opts['four'] = four
326 opts['four'] = four
327 return self._call('debugwireargs', one=one, two=two, **opts)
327 return self._call('debugwireargs', one=one, two=two, **opts)
328
328
329 # server side
329 # server side
330
330
331 class streamres(object):
331 class streamres(object):
332 def __init__(self, gen):
332 def __init__(self, gen):
333 self.gen = gen
333 self.gen = gen
334
334
335 class pushres(object):
335 class pushres(object):
336 def __init__(self, res):
336 def __init__(self, res):
337 self.res = res
337 self.res = res
338
338
339 class pusherr(object):
339 class pusherr(object):
340 def __init__(self, res):
340 def __init__(self, res):
341 self.res = res
341 self.res = res
342
342
343 class ooberror(object):
343 class ooberror(object):
344 def __init__(self, message):
344 def __init__(self, message):
345 self.message = message
345 self.message = message
346
346
347 def dispatch(repo, proto, command):
347 def dispatch(repo, proto, command):
348 repo = repo.filtered("unserved")
348 repo = repo.filtered("served")
349 func, spec = commands[command]
349 func, spec = commands[command]
350 args = proto.getargs(spec)
350 args = proto.getargs(spec)
351 return func(repo, proto, *args)
351 return func(repo, proto, *args)
352
352
353 def options(cmd, keys, others):
353 def options(cmd, keys, others):
354 opts = {}
354 opts = {}
355 for k in keys:
355 for k in keys:
356 if k in others:
356 if k in others:
357 opts[k] = others[k]
357 opts[k] = others[k]
358 del others[k]
358 del others[k]
359 if others:
359 if others:
360 sys.stderr.write("abort: %s got unexpected arguments %s\n"
360 sys.stderr.write("abort: %s got unexpected arguments %s\n"
361 % (cmd, ",".join(others)))
361 % (cmd, ",".join(others)))
362 return opts
362 return opts
363
363
364 def batch(repo, proto, cmds, others):
364 def batch(repo, proto, cmds, others):
365 repo = repo.filtered("unserved")
365 repo = repo.filtered("served")
366 res = []
366 res = []
367 for pair in cmds.split(';'):
367 for pair in cmds.split(';'):
368 op, args = pair.split(' ', 1)
368 op, args = pair.split(' ', 1)
369 vals = {}
369 vals = {}
370 for a in args.split(','):
370 for a in args.split(','):
371 if a:
371 if a:
372 n, v = a.split('=')
372 n, v = a.split('=')
373 vals[n] = unescapearg(v)
373 vals[n] = unescapearg(v)
374 func, spec = commands[op]
374 func, spec = commands[op]
375 if spec:
375 if spec:
376 keys = spec.split()
376 keys = spec.split()
377 data = {}
377 data = {}
378 for k in keys:
378 for k in keys:
379 if k == '*':
379 if k == '*':
380 star = {}
380 star = {}
381 for key in vals.keys():
381 for key in vals.keys():
382 if key not in keys:
382 if key not in keys:
383 star[key] = vals[key]
383 star[key] = vals[key]
384 data['*'] = star
384 data['*'] = star
385 else:
385 else:
386 data[k] = vals[k]
386 data[k] = vals[k]
387 result = func(repo, proto, *[data[k] for k in keys])
387 result = func(repo, proto, *[data[k] for k in keys])
388 else:
388 else:
389 result = func(repo, proto)
389 result = func(repo, proto)
390 if isinstance(result, ooberror):
390 if isinstance(result, ooberror):
391 return result
391 return result
392 res.append(escapearg(result))
392 res.append(escapearg(result))
393 return ';'.join(res)
393 return ';'.join(res)
394
394
395 def between(repo, proto, pairs):
395 def between(repo, proto, pairs):
396 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
396 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
397 r = []
397 r = []
398 for b in repo.between(pairs):
398 for b in repo.between(pairs):
399 r.append(encodelist(b) + "\n")
399 r.append(encodelist(b) + "\n")
400 return "".join(r)
400 return "".join(r)
401
401
402 def branchmap(repo, proto):
402 def branchmap(repo, proto):
403 branchmap = repo.branchmap()
403 branchmap = repo.branchmap()
404 heads = []
404 heads = []
405 for branch, nodes in branchmap.iteritems():
405 for branch, nodes in branchmap.iteritems():
406 branchname = urllib.quote(encoding.fromlocal(branch))
406 branchname = urllib.quote(encoding.fromlocal(branch))
407 branchnodes = encodelist(nodes)
407 branchnodes = encodelist(nodes)
408 heads.append('%s %s' % (branchname, branchnodes))
408 heads.append('%s %s' % (branchname, branchnodes))
409 return '\n'.join(heads)
409 return '\n'.join(heads)
410
410
411 def branches(repo, proto, nodes):
411 def branches(repo, proto, nodes):
412 nodes = decodelist(nodes)
412 nodes = decodelist(nodes)
413 r = []
413 r = []
414 for b in repo.branches(nodes):
414 for b in repo.branches(nodes):
415 r.append(encodelist(b) + "\n")
415 r.append(encodelist(b) + "\n")
416 return "".join(r)
416 return "".join(r)
417
417
418 def capabilities(repo, proto):
418 def capabilities(repo, proto):
419 caps = ('lookup changegroupsubset branchmap pushkey known getbundle '
419 caps = ('lookup changegroupsubset branchmap pushkey known getbundle '
420 'unbundlehash batch').split()
420 'unbundlehash batch').split()
421 if _allowstream(repo.ui):
421 if _allowstream(repo.ui):
422 if repo.ui.configbool('server', 'preferuncompressed', False):
422 if repo.ui.configbool('server', 'preferuncompressed', False):
423 caps.append('stream-preferred')
423 caps.append('stream-preferred')
424 requiredformats = repo.requirements & repo.supportedformats
424 requiredformats = repo.requirements & repo.supportedformats
425 # if our local revlogs are just revlogv1, add 'stream' cap
425 # if our local revlogs are just revlogv1, add 'stream' cap
426 if not requiredformats - set(('revlogv1',)):
426 if not requiredformats - set(('revlogv1',)):
427 caps.append('stream')
427 caps.append('stream')
428 # otherwise, add 'streamreqs' detailing our local revlog format
428 # otherwise, add 'streamreqs' detailing our local revlog format
429 else:
429 else:
430 caps.append('streamreqs=%s' % ','.join(requiredformats))
430 caps.append('streamreqs=%s' % ','.join(requiredformats))
431 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
431 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
432 caps.append('httpheader=1024')
432 caps.append('httpheader=1024')
433 return ' '.join(caps)
433 return ' '.join(caps)
434
434
435 def changegroup(repo, proto, roots):
435 def changegroup(repo, proto, roots):
436 nodes = decodelist(roots)
436 nodes = decodelist(roots)
437 cg = repo.changegroup(nodes, 'serve')
437 cg = repo.changegroup(nodes, 'serve')
438 return streamres(proto.groupchunks(cg))
438 return streamres(proto.groupchunks(cg))
439
439
440 def changegroupsubset(repo, proto, bases, heads):
440 def changegroupsubset(repo, proto, bases, heads):
441 bases = decodelist(bases)
441 bases = decodelist(bases)
442 heads = decodelist(heads)
442 heads = decodelist(heads)
443 cg = repo.changegroupsubset(bases, heads, 'serve')
443 cg = repo.changegroupsubset(bases, heads, 'serve')
444 return streamres(proto.groupchunks(cg))
444 return streamres(proto.groupchunks(cg))
445
445
446 def debugwireargs(repo, proto, one, two, others):
446 def debugwireargs(repo, proto, one, two, others):
447 # only accept optional args from the known set
447 # only accept optional args from the known set
448 opts = options('debugwireargs', ['three', 'four'], others)
448 opts = options('debugwireargs', ['three', 'four'], others)
449 return repo.debugwireargs(one, two, **opts)
449 return repo.debugwireargs(one, two, **opts)
450
450
451 def getbundle(repo, proto, others):
451 def getbundle(repo, proto, others):
452 opts = options('getbundle', ['heads', 'common'], others)
452 opts = options('getbundle', ['heads', 'common'], others)
453 for k, v in opts.iteritems():
453 for k, v in opts.iteritems():
454 opts[k] = decodelist(v)
454 opts[k] = decodelist(v)
455 cg = repo.getbundle('serve', **opts)
455 cg = repo.getbundle('serve', **opts)
456 return streamres(proto.groupchunks(cg))
456 return streamres(proto.groupchunks(cg))
457
457
458 def heads(repo, proto):
458 def heads(repo, proto):
459 h = repo.heads()
459 h = repo.heads()
460 return encodelist(h) + "\n"
460 return encodelist(h) + "\n"
461
461
462 def hello(repo, proto):
462 def hello(repo, proto):
463 '''the hello command returns a set of lines describing various
463 '''the hello command returns a set of lines describing various
464 interesting things about the server, in an RFC822-like format.
464 interesting things about the server, in an RFC822-like format.
465 Currently the only one defined is "capabilities", which
465 Currently the only one defined is "capabilities", which
466 consists of a line in the form:
466 consists of a line in the form:
467
467
468 capabilities: space separated list of tokens
468 capabilities: space separated list of tokens
469 '''
469 '''
470 return "capabilities: %s\n" % (capabilities(repo, proto))
470 return "capabilities: %s\n" % (capabilities(repo, proto))
471
471
472 def listkeys(repo, proto, namespace):
472 def listkeys(repo, proto, namespace):
473 d = repo.listkeys(encoding.tolocal(namespace)).items()
473 d = repo.listkeys(encoding.tolocal(namespace)).items()
474 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
474 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
475 for k, v in d])
475 for k, v in d])
476 return t
476 return t
477
477
478 def lookup(repo, proto, key):
478 def lookup(repo, proto, key):
479 try:
479 try:
480 k = encoding.tolocal(key)
480 k = encoding.tolocal(key)
481 c = repo[k]
481 c = repo[k]
482 r = c.hex()
482 r = c.hex()
483 success = 1
483 success = 1
484 except Exception, inst:
484 except Exception, inst:
485 r = str(inst)
485 r = str(inst)
486 success = 0
486 success = 0
487 return "%s %s\n" % (success, r)
487 return "%s %s\n" % (success, r)
488
488
489 def known(repo, proto, nodes, others):
489 def known(repo, proto, nodes, others):
490 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
490 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
491
491
492 def pushkey(repo, proto, namespace, key, old, new):
492 def pushkey(repo, proto, namespace, key, old, new):
493 # compatibility with pre-1.8 clients which were accidentally
493 # compatibility with pre-1.8 clients which were accidentally
494 # sending raw binary nodes rather than utf-8-encoded hex
494 # sending raw binary nodes rather than utf-8-encoded hex
495 if len(new) == 20 and new.encode('string-escape') != new:
495 if len(new) == 20 and new.encode('string-escape') != new:
496 # looks like it could be a binary node
496 # looks like it could be a binary node
497 try:
497 try:
498 new.decode('utf-8')
498 new.decode('utf-8')
499 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
499 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
500 except UnicodeDecodeError:
500 except UnicodeDecodeError:
501 pass # binary, leave unmodified
501 pass # binary, leave unmodified
502 else:
502 else:
503 new = encoding.tolocal(new) # normal path
503 new = encoding.tolocal(new) # normal path
504
504
505 if util.safehasattr(proto, 'restore'):
505 if util.safehasattr(proto, 'restore'):
506
506
507 proto.redirect()
507 proto.redirect()
508
508
509 try:
509 try:
510 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
510 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
511 encoding.tolocal(old), new) or False
511 encoding.tolocal(old), new) or False
512 except util.Abort:
512 except util.Abort:
513 r = False
513 r = False
514
514
515 output = proto.restore()
515 output = proto.restore()
516
516
517 return '%s\n%s' % (int(r), output)
517 return '%s\n%s' % (int(r), output)
518
518
519 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
519 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
520 encoding.tolocal(old), new)
520 encoding.tolocal(old), new)
521 return '%s\n' % int(r)
521 return '%s\n' % int(r)
522
522
523 def _allowstream(ui):
523 def _allowstream(ui):
524 return ui.configbool('server', 'uncompressed', True, untrusted=True)
524 return ui.configbool('server', 'uncompressed', True, untrusted=True)
525
525
526 def stream(repo, proto):
526 def stream(repo, proto):
527 '''If the server supports streaming clone, it advertises the "stream"
527 '''If the server supports streaming clone, it advertises the "stream"
528 capability with a value representing the version and flags of the repo
528 capability with a value representing the version and flags of the repo
529 it is serving. Client checks to see if it understands the format.
529 it is serving. Client checks to see if it understands the format.
530
530
531 The format is simple: the server writes out a line with the amount
531 The format is simple: the server writes out a line with the amount
532 of files, then the total amount of bytes to be transferred (separated
532 of files, then the total amount of bytes to be transferred (separated
533 by a space). Then, for each file, the server first writes the filename
533 by a space). Then, for each file, the server first writes the filename
534 and filesize (separated by the null character), then the file contents.
534 and filesize (separated by the null character), then the file contents.
535 '''
535 '''
536
536
537 if not _allowstream(repo.ui):
537 if not _allowstream(repo.ui):
538 return '1\n'
538 return '1\n'
539
539
540 entries = []
540 entries = []
541 total_bytes = 0
541 total_bytes = 0
542 try:
542 try:
543 # get consistent snapshot of repo, lock during scan
543 # get consistent snapshot of repo, lock during scan
544 lock = repo.lock()
544 lock = repo.lock()
545 try:
545 try:
546 repo.ui.debug('scanning\n')
546 repo.ui.debug('scanning\n')
547 for name, ename, size in repo.store.walk():
547 for name, ename, size in repo.store.walk():
548 if size:
548 if size:
549 entries.append((name, size))
549 entries.append((name, size))
550 total_bytes += size
550 total_bytes += size
551 finally:
551 finally:
552 lock.release()
552 lock.release()
553 except error.LockError:
553 except error.LockError:
554 return '2\n' # error: 2
554 return '2\n' # error: 2
555
555
556 def streamer(repo, entries, total):
556 def streamer(repo, entries, total):
557 '''stream out all metadata files in repository.'''
557 '''stream out all metadata files in repository.'''
558 yield '0\n' # success
558 yield '0\n' # success
559 repo.ui.debug('%d files, %d bytes to transfer\n' %
559 repo.ui.debug('%d files, %d bytes to transfer\n' %
560 (len(entries), total_bytes))
560 (len(entries), total_bytes))
561 yield '%d %d\n' % (len(entries), total_bytes)
561 yield '%d %d\n' % (len(entries), total_bytes)
562
562
563 sopener = repo.sopener
563 sopener = repo.sopener
564 oldaudit = sopener.mustaudit
564 oldaudit = sopener.mustaudit
565 debugflag = repo.ui.debugflag
565 debugflag = repo.ui.debugflag
566 sopener.mustaudit = False
566 sopener.mustaudit = False
567
567
568 try:
568 try:
569 for name, size in entries:
569 for name, size in entries:
570 if debugflag:
570 if debugflag:
571 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
571 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
572 # partially encode name over the wire for backwards compat
572 # partially encode name over the wire for backwards compat
573 yield '%s\0%d\n' % (store.encodedir(name), size)
573 yield '%s\0%d\n' % (store.encodedir(name), size)
574 if size <= 65536:
574 if size <= 65536:
575 fp = sopener(name)
575 fp = sopener(name)
576 try:
576 try:
577 data = fp.read(size)
577 data = fp.read(size)
578 finally:
578 finally:
579 fp.close()
579 fp.close()
580 yield data
580 yield data
581 else:
581 else:
582 for chunk in util.filechunkiter(sopener(name), limit=size):
582 for chunk in util.filechunkiter(sopener(name), limit=size):
583 yield chunk
583 yield chunk
584 # replace with "finally:" when support for python 2.4 has been dropped
584 # replace with "finally:" when support for python 2.4 has been dropped
585 except Exception:
585 except Exception:
586 sopener.mustaudit = oldaudit
586 sopener.mustaudit = oldaudit
587 raise
587 raise
588 sopener.mustaudit = oldaudit
588 sopener.mustaudit = oldaudit
589
589
590 return streamres(streamer(repo, entries, total_bytes))
590 return streamres(streamer(repo, entries, total_bytes))
591
591
592 def unbundle(repo, proto, heads):
592 def unbundle(repo, proto, heads):
593 their_heads = decodelist(heads)
593 their_heads = decodelist(heads)
594
594
595 def check_heads():
595 def check_heads():
596 heads = repo.heads()
596 heads = repo.heads()
597 heads_hash = util.sha1(''.join(sorted(heads))).digest()
597 heads_hash = util.sha1(''.join(sorted(heads))).digest()
598 return (their_heads == ['force'] or their_heads == heads or
598 return (their_heads == ['force'] or their_heads == heads or
599 their_heads == ['hashed', heads_hash])
599 their_heads == ['hashed', heads_hash])
600
600
601 proto.redirect()
601 proto.redirect()
602
602
603 # fail early if possible
603 # fail early if possible
604 if not check_heads():
604 if not check_heads():
605 return pusherr('unsynced changes')
605 return pusherr('unsynced changes')
606
606
607 # write bundle data to temporary file because it can be big
607 # write bundle data to temporary file because it can be big
608 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
608 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
609 fp = os.fdopen(fd, 'wb+')
609 fp = os.fdopen(fd, 'wb+')
610 r = 0
610 r = 0
611 try:
611 try:
612 proto.getfile(fp)
612 proto.getfile(fp)
613 lock = repo.lock()
613 lock = repo.lock()
614 try:
614 try:
615 if not check_heads():
615 if not check_heads():
616 # someone else committed/pushed/unbundled while we
616 # someone else committed/pushed/unbundled while we
617 # were transferring data
617 # were transferring data
618 return pusherr('unsynced changes')
618 return pusherr('unsynced changes')
619
619
620 # push can proceed
620 # push can proceed
621 fp.seek(0)
621 fp.seek(0)
622 gen = changegroupmod.readbundle(fp, None)
622 gen = changegroupmod.readbundle(fp, None)
623
623
624 try:
624 try:
625 r = repo.addchangegroup(gen, 'serve', proto._client())
625 r = repo.addchangegroup(gen, 'serve', proto._client())
626 except util.Abort, inst:
626 except util.Abort, inst:
627 sys.stderr.write("abort: %s\n" % inst)
627 sys.stderr.write("abort: %s\n" % inst)
628 finally:
628 finally:
629 lock.release()
629 lock.release()
630 return pushres(r)
630 return pushres(r)
631
631
632 finally:
632 finally:
633 fp.close()
633 fp.close()
634 os.unlink(tempname)
634 os.unlink(tempname)
635
635
636 commands = {
636 commands = {
637 'batch': (batch, 'cmds *'),
637 'batch': (batch, 'cmds *'),
638 'between': (between, 'pairs'),
638 'between': (between, 'pairs'),
639 'branchmap': (branchmap, ''),
639 'branchmap': (branchmap, ''),
640 'branches': (branches, 'nodes'),
640 'branches': (branches, 'nodes'),
641 'capabilities': (capabilities, ''),
641 'capabilities': (capabilities, ''),
642 'changegroup': (changegroup, 'roots'),
642 'changegroup': (changegroup, 'roots'),
643 'changegroupsubset': (changegroupsubset, 'bases heads'),
643 'changegroupsubset': (changegroupsubset, 'bases heads'),
644 'debugwireargs': (debugwireargs, 'one two *'),
644 'debugwireargs': (debugwireargs, 'one two *'),
645 'getbundle': (getbundle, '*'),
645 'getbundle': (getbundle, '*'),
646 'heads': (heads, ''),
646 'heads': (heads, ''),
647 'hello': (hello, ''),
647 'hello': (hello, ''),
648 'known': (known, 'nodes *'),
648 'known': (known, 'nodes *'),
649 'listkeys': (listkeys, 'namespace'),
649 'listkeys': (listkeys, 'namespace'),
650 'lookup': (lookup, 'key'),
650 'lookup': (lookup, 'key'),
651 'pushkey': (pushkey, 'namespace key old new'),
651 'pushkey': (pushkey, 'namespace key old new'),
652 'stream_out': (stream, ''),
652 'stream_out': (stream, ''),
653 'unbundle': (unbundle, 'heads'),
653 'unbundle': (unbundle, 'heads'),
654 }
654 }
@@ -1,2143 +1,2143
1 > do_push()
1 > do_push()
2 > {
2 > {
3 > user=$1
3 > user=$1
4 > shift
4 > shift
5 > echo "Pushing as user $user"
5 > echo "Pushing as user $user"
6 > echo 'hgrc = """'
6 > echo 'hgrc = """'
7 > sed -e 1,2d b/.hg/hgrc | grep -v fakegroups.py
7 > sed -e 1,2d b/.hg/hgrc | grep -v fakegroups.py
8 > echo '"""'
8 > echo '"""'
9 > if test -f acl.config; then
9 > if test -f acl.config; then
10 > echo 'acl.config = """'
10 > echo 'acl.config = """'
11 > cat acl.config
11 > cat acl.config
12 > echo '"""'
12 > echo '"""'
13 > fi
13 > fi
14 > # On AIX /etc/profile sets LOGNAME read-only. So
14 > # On AIX /etc/profile sets LOGNAME read-only. So
15 > # LOGNAME=$user hg --cws a --debug push ../b
15 > # LOGNAME=$user hg --cws a --debug push ../b
16 > # fails with "This variable is read only."
16 > # fails with "This variable is read only."
17 > # Use env to work around this.
17 > # Use env to work around this.
18 > env LOGNAME=$user hg --cwd a --debug push ../b
18 > env LOGNAME=$user hg --cwd a --debug push ../b
19 > hg --cwd b rollback
19 > hg --cwd b rollback
20 > hg --cwd b --quiet tip
20 > hg --cwd b --quiet tip
21 > echo
21 > echo
22 > }
22 > }
23
23
24 > init_config()
24 > init_config()
25 > {
25 > {
26 > cat > fakegroups.py <<EOF
26 > cat > fakegroups.py <<EOF
27 > from hgext import acl
27 > from hgext import acl
28 > def fakegetusers(ui, group):
28 > def fakegetusers(ui, group):
29 > try:
29 > try:
30 > return acl._getusersorig(ui, group)
30 > return acl._getusersorig(ui, group)
31 > except:
31 > except:
32 > return ["fred", "betty"]
32 > return ["fred", "betty"]
33 > acl._getusersorig = acl._getusers
33 > acl._getusersorig = acl._getusers
34 > acl._getusers = fakegetusers
34 > acl._getusers = fakegetusers
35 > EOF
35 > EOF
36 > rm -f acl.config
36 > rm -f acl.config
37 > cat > $config <<EOF
37 > cat > $config <<EOF
38 > [hooks]
38 > [hooks]
39 > pretxnchangegroup.acl = python:hgext.acl.hook
39 > pretxnchangegroup.acl = python:hgext.acl.hook
40 > [acl]
40 > [acl]
41 > sources = push
41 > sources = push
42 > [extensions]
42 > [extensions]
43 > f=`pwd`/fakegroups.py
43 > f=`pwd`/fakegroups.py
44 > EOF
44 > EOF
45 > }
45 > }
46
46
47 $ hg init a
47 $ hg init a
48 $ cd a
48 $ cd a
49 $ mkdir foo foo/Bar quux
49 $ mkdir foo foo/Bar quux
50 $ echo 'in foo' > foo/file.txt
50 $ echo 'in foo' > foo/file.txt
51 $ echo 'in foo/Bar' > foo/Bar/file.txt
51 $ echo 'in foo/Bar' > foo/Bar/file.txt
52 $ echo 'in quux' > quux/file.py
52 $ echo 'in quux' > quux/file.py
53 $ hg add -q
53 $ hg add -q
54 $ hg ci -m 'add files' -d '1000000 0'
54 $ hg ci -m 'add files' -d '1000000 0'
55 $ echo >> foo/file.txt
55 $ echo >> foo/file.txt
56 $ hg ci -m 'change foo/file' -d '1000001 0'
56 $ hg ci -m 'change foo/file' -d '1000001 0'
57 $ echo >> foo/Bar/file.txt
57 $ echo >> foo/Bar/file.txt
58 $ hg ci -m 'change foo/Bar/file' -d '1000002 0'
58 $ hg ci -m 'change foo/Bar/file' -d '1000002 0'
59 $ echo >> quux/file.py
59 $ echo >> quux/file.py
60 $ hg ci -m 'change quux/file' -d '1000003 0'
60 $ hg ci -m 'change quux/file' -d '1000003 0'
61 $ hg tip --quiet
61 $ hg tip --quiet
62 3:911600dab2ae
62 3:911600dab2ae
63
63
64 $ cd ..
64 $ cd ..
65 $ hg clone -r 0 a b
65 $ hg clone -r 0 a b
66 adding changesets
66 adding changesets
67 adding manifests
67 adding manifests
68 adding file changes
68 adding file changes
69 added 1 changesets with 3 changes to 3 files
69 added 1 changesets with 3 changes to 3 files
70 updating to branch default
70 updating to branch default
71 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
71 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
72
72
73 $ config=b/.hg/hgrc
73 $ config=b/.hg/hgrc
74
74
75 Extension disabled for lack of a hook
75 Extension disabled for lack of a hook
76
76
77 $ do_push fred
77 $ do_push fred
78 Pushing as user fred
78 Pushing as user fred
79 hgrc = """
79 hgrc = """
80 """
80 """
81 pushing to ../b
81 pushing to ../b
82 query 1; heads
82 query 1; heads
83 searching for changes
83 searching for changes
84 all remote heads known locally
84 all remote heads known locally
85 listing keys for "bookmarks"
85 listing keys for "bookmarks"
86 3 changesets found
86 3 changesets found
87 list of changesets:
87 list of changesets:
88 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
88 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
89 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
89 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
90 911600dab2ae7a9baff75958b84fe606851ce955
90 911600dab2ae7a9baff75958b84fe606851ce955
91 adding changesets
91 adding changesets
92 bundling: 1/3 changesets (33.33%)
92 bundling: 1/3 changesets (33.33%)
93 bundling: 2/3 changesets (66.67%)
93 bundling: 2/3 changesets (66.67%)
94 bundling: 3/3 changesets (100.00%)
94 bundling: 3/3 changesets (100.00%)
95 bundling: 1/3 manifests (33.33%)
95 bundling: 1/3 manifests (33.33%)
96 bundling: 2/3 manifests (66.67%)
96 bundling: 2/3 manifests (66.67%)
97 bundling: 3/3 manifests (100.00%)
97 bundling: 3/3 manifests (100.00%)
98 bundling: foo/Bar/file.txt 1/3 files (33.33%)
98 bundling: foo/Bar/file.txt 1/3 files (33.33%)
99 bundling: foo/file.txt 2/3 files (66.67%)
99 bundling: foo/file.txt 2/3 files (66.67%)
100 bundling: quux/file.py 3/3 files (100.00%)
100 bundling: quux/file.py 3/3 files (100.00%)
101 changesets: 1 chunks
101 changesets: 1 chunks
102 add changeset ef1ea85a6374
102 add changeset ef1ea85a6374
103 changesets: 2 chunks
103 changesets: 2 chunks
104 add changeset f9cafe1212c8
104 add changeset f9cafe1212c8
105 changesets: 3 chunks
105 changesets: 3 chunks
106 add changeset 911600dab2ae
106 add changeset 911600dab2ae
107 adding manifests
107 adding manifests
108 manifests: 1/3 chunks (33.33%)
108 manifests: 1/3 chunks (33.33%)
109 manifests: 2/3 chunks (66.67%)
109 manifests: 2/3 chunks (66.67%)
110 manifests: 3/3 chunks (100.00%)
110 manifests: 3/3 chunks (100.00%)
111 adding file changes
111 adding file changes
112 adding foo/Bar/file.txt revisions
112 adding foo/Bar/file.txt revisions
113 files: 1/3 chunks (33.33%)
113 files: 1/3 chunks (33.33%)
114 adding foo/file.txt revisions
114 adding foo/file.txt revisions
115 files: 2/3 chunks (66.67%)
115 files: 2/3 chunks (66.67%)
116 adding quux/file.py revisions
116 adding quux/file.py revisions
117 files: 3/3 chunks (100.00%)
117 files: 3/3 chunks (100.00%)
118 added 3 changesets with 3 changes to 3 files
118 added 3 changesets with 3 changes to 3 files
119 listing keys for "phases"
119 listing keys for "phases"
120 try to push obsolete markers to remote
120 try to push obsolete markers to remote
121 updating the branch cache
121 updating the branch cache
122 checking for updated bookmarks
122 checking for updated bookmarks
123 listing keys for "bookmarks"
123 listing keys for "bookmarks"
124 repository tip rolled back to revision 0 (undo push)
124 repository tip rolled back to revision 0 (undo push)
125 0:6675d58eff77
125 0:6675d58eff77
126
126
127
127
128 $ echo '[hooks]' >> $config
128 $ echo '[hooks]' >> $config
129 $ echo 'pretxnchangegroup.acl = python:hgext.acl.hook' >> $config
129 $ echo 'pretxnchangegroup.acl = python:hgext.acl.hook' >> $config
130
130
131 Extension disabled for lack of acl.sources
131 Extension disabled for lack of acl.sources
132
132
133 $ do_push fred
133 $ do_push fred
134 Pushing as user fred
134 Pushing as user fred
135 hgrc = """
135 hgrc = """
136 [hooks]
136 [hooks]
137 pretxnchangegroup.acl = python:hgext.acl.hook
137 pretxnchangegroup.acl = python:hgext.acl.hook
138 """
138 """
139 pushing to ../b
139 pushing to ../b
140 query 1; heads
140 query 1; heads
141 searching for changes
141 searching for changes
142 all remote heads known locally
142 all remote heads known locally
143 invalid branchheads cache (unserved): tip differs
143 invalid branchheads cache (served): tip differs
144 listing keys for "bookmarks"
144 listing keys for "bookmarks"
145 3 changesets found
145 3 changesets found
146 list of changesets:
146 list of changesets:
147 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
147 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
148 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
148 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
149 911600dab2ae7a9baff75958b84fe606851ce955
149 911600dab2ae7a9baff75958b84fe606851ce955
150 adding changesets
150 adding changesets
151 bundling: 1/3 changesets (33.33%)
151 bundling: 1/3 changesets (33.33%)
152 bundling: 2/3 changesets (66.67%)
152 bundling: 2/3 changesets (66.67%)
153 bundling: 3/3 changesets (100.00%)
153 bundling: 3/3 changesets (100.00%)
154 bundling: 1/3 manifests (33.33%)
154 bundling: 1/3 manifests (33.33%)
155 bundling: 2/3 manifests (66.67%)
155 bundling: 2/3 manifests (66.67%)
156 bundling: 3/3 manifests (100.00%)
156 bundling: 3/3 manifests (100.00%)
157 bundling: foo/Bar/file.txt 1/3 files (33.33%)
157 bundling: foo/Bar/file.txt 1/3 files (33.33%)
158 bundling: foo/file.txt 2/3 files (66.67%)
158 bundling: foo/file.txt 2/3 files (66.67%)
159 bundling: quux/file.py 3/3 files (100.00%)
159 bundling: quux/file.py 3/3 files (100.00%)
160 changesets: 1 chunks
160 changesets: 1 chunks
161 add changeset ef1ea85a6374
161 add changeset ef1ea85a6374
162 changesets: 2 chunks
162 changesets: 2 chunks
163 add changeset f9cafe1212c8
163 add changeset f9cafe1212c8
164 changesets: 3 chunks
164 changesets: 3 chunks
165 add changeset 911600dab2ae
165 add changeset 911600dab2ae
166 adding manifests
166 adding manifests
167 manifests: 1/3 chunks (33.33%)
167 manifests: 1/3 chunks (33.33%)
168 manifests: 2/3 chunks (66.67%)
168 manifests: 2/3 chunks (66.67%)
169 manifests: 3/3 chunks (100.00%)
169 manifests: 3/3 chunks (100.00%)
170 adding file changes
170 adding file changes
171 adding foo/Bar/file.txt revisions
171 adding foo/Bar/file.txt revisions
172 files: 1/3 chunks (33.33%)
172 files: 1/3 chunks (33.33%)
173 adding foo/file.txt revisions
173 adding foo/file.txt revisions
174 files: 2/3 chunks (66.67%)
174 files: 2/3 chunks (66.67%)
175 adding quux/file.py revisions
175 adding quux/file.py revisions
176 files: 3/3 chunks (100.00%)
176 files: 3/3 chunks (100.00%)
177 added 3 changesets with 3 changes to 3 files
177 added 3 changesets with 3 changes to 3 files
178 calling hook pretxnchangegroup.acl: hgext.acl.hook
178 calling hook pretxnchangegroup.acl: hgext.acl.hook
179 acl: changes have source "push" - skipping
179 acl: changes have source "push" - skipping
180 listing keys for "phases"
180 listing keys for "phases"
181 try to push obsolete markers to remote
181 try to push obsolete markers to remote
182 updating the branch cache
182 updating the branch cache
183 checking for updated bookmarks
183 checking for updated bookmarks
184 listing keys for "bookmarks"
184 listing keys for "bookmarks"
185 repository tip rolled back to revision 0 (undo push)
185 repository tip rolled back to revision 0 (undo push)
186 0:6675d58eff77
186 0:6675d58eff77
187
187
188
188
189 No [acl.allow]/[acl.deny]
189 No [acl.allow]/[acl.deny]
190
190
191 $ echo '[acl]' >> $config
191 $ echo '[acl]' >> $config
192 $ echo 'sources = push' >> $config
192 $ echo 'sources = push' >> $config
193 $ do_push fred
193 $ do_push fred
194 Pushing as user fred
194 Pushing as user fred
195 hgrc = """
195 hgrc = """
196 [hooks]
196 [hooks]
197 pretxnchangegroup.acl = python:hgext.acl.hook
197 pretxnchangegroup.acl = python:hgext.acl.hook
198 [acl]
198 [acl]
199 sources = push
199 sources = push
200 """
200 """
201 pushing to ../b
201 pushing to ../b
202 query 1; heads
202 query 1; heads
203 searching for changes
203 searching for changes
204 all remote heads known locally
204 all remote heads known locally
205 invalid branchheads cache (unserved): tip differs
205 invalid branchheads cache (served): tip differs
206 listing keys for "bookmarks"
206 listing keys for "bookmarks"
207 3 changesets found
207 3 changesets found
208 list of changesets:
208 list of changesets:
209 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
209 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
210 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
210 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
211 911600dab2ae7a9baff75958b84fe606851ce955
211 911600dab2ae7a9baff75958b84fe606851ce955
212 adding changesets
212 adding changesets
213 bundling: 1/3 changesets (33.33%)
213 bundling: 1/3 changesets (33.33%)
214 bundling: 2/3 changesets (66.67%)
214 bundling: 2/3 changesets (66.67%)
215 bundling: 3/3 changesets (100.00%)
215 bundling: 3/3 changesets (100.00%)
216 bundling: 1/3 manifests (33.33%)
216 bundling: 1/3 manifests (33.33%)
217 bundling: 2/3 manifests (66.67%)
217 bundling: 2/3 manifests (66.67%)
218 bundling: 3/3 manifests (100.00%)
218 bundling: 3/3 manifests (100.00%)
219 bundling: foo/Bar/file.txt 1/3 files (33.33%)
219 bundling: foo/Bar/file.txt 1/3 files (33.33%)
220 bundling: foo/file.txt 2/3 files (66.67%)
220 bundling: foo/file.txt 2/3 files (66.67%)
221 bundling: quux/file.py 3/3 files (100.00%)
221 bundling: quux/file.py 3/3 files (100.00%)
222 changesets: 1 chunks
222 changesets: 1 chunks
223 add changeset ef1ea85a6374
223 add changeset ef1ea85a6374
224 changesets: 2 chunks
224 changesets: 2 chunks
225 add changeset f9cafe1212c8
225 add changeset f9cafe1212c8
226 changesets: 3 chunks
226 changesets: 3 chunks
227 add changeset 911600dab2ae
227 add changeset 911600dab2ae
228 adding manifests
228 adding manifests
229 manifests: 1/3 chunks (33.33%)
229 manifests: 1/3 chunks (33.33%)
230 manifests: 2/3 chunks (66.67%)
230 manifests: 2/3 chunks (66.67%)
231 manifests: 3/3 chunks (100.00%)
231 manifests: 3/3 chunks (100.00%)
232 adding file changes
232 adding file changes
233 adding foo/Bar/file.txt revisions
233 adding foo/Bar/file.txt revisions
234 files: 1/3 chunks (33.33%)
234 files: 1/3 chunks (33.33%)
235 adding foo/file.txt revisions
235 adding foo/file.txt revisions
236 files: 2/3 chunks (66.67%)
236 files: 2/3 chunks (66.67%)
237 adding quux/file.py revisions
237 adding quux/file.py revisions
238 files: 3/3 chunks (100.00%)
238 files: 3/3 chunks (100.00%)
239 added 3 changesets with 3 changes to 3 files
239 added 3 changesets with 3 changes to 3 files
240 calling hook pretxnchangegroup.acl: hgext.acl.hook
240 calling hook pretxnchangegroup.acl: hgext.acl.hook
241 acl: checking access for user "fred"
241 acl: checking access for user "fred"
242 acl: acl.allow.branches not enabled
242 acl: acl.allow.branches not enabled
243 acl: acl.deny.branches not enabled
243 acl: acl.deny.branches not enabled
244 acl: acl.allow not enabled
244 acl: acl.allow not enabled
245 acl: acl.deny not enabled
245 acl: acl.deny not enabled
246 acl: branch access granted: "ef1ea85a6374" on branch "default"
246 acl: branch access granted: "ef1ea85a6374" on branch "default"
247 acl: path access granted: "ef1ea85a6374"
247 acl: path access granted: "ef1ea85a6374"
248 acl: branch access granted: "f9cafe1212c8" on branch "default"
248 acl: branch access granted: "f9cafe1212c8" on branch "default"
249 acl: path access granted: "f9cafe1212c8"
249 acl: path access granted: "f9cafe1212c8"
250 acl: branch access granted: "911600dab2ae" on branch "default"
250 acl: branch access granted: "911600dab2ae" on branch "default"
251 acl: path access granted: "911600dab2ae"
251 acl: path access granted: "911600dab2ae"
252 listing keys for "phases"
252 listing keys for "phases"
253 try to push obsolete markers to remote
253 try to push obsolete markers to remote
254 updating the branch cache
254 updating the branch cache
255 checking for updated bookmarks
255 checking for updated bookmarks
256 listing keys for "bookmarks"
256 listing keys for "bookmarks"
257 repository tip rolled back to revision 0 (undo push)
257 repository tip rolled back to revision 0 (undo push)
258 0:6675d58eff77
258 0:6675d58eff77
259
259
260
260
261 Empty [acl.allow]
261 Empty [acl.allow]
262
262
263 $ echo '[acl.allow]' >> $config
263 $ echo '[acl.allow]' >> $config
264 $ do_push fred
264 $ do_push fred
265 Pushing as user fred
265 Pushing as user fred
266 hgrc = """
266 hgrc = """
267 [hooks]
267 [hooks]
268 pretxnchangegroup.acl = python:hgext.acl.hook
268 pretxnchangegroup.acl = python:hgext.acl.hook
269 [acl]
269 [acl]
270 sources = push
270 sources = push
271 [acl.allow]
271 [acl.allow]
272 """
272 """
273 pushing to ../b
273 pushing to ../b
274 query 1; heads
274 query 1; heads
275 searching for changes
275 searching for changes
276 all remote heads known locally
276 all remote heads known locally
277 invalid branchheads cache (unserved): tip differs
277 invalid branchheads cache (served): tip differs
278 listing keys for "bookmarks"
278 listing keys for "bookmarks"
279 3 changesets found
279 3 changesets found
280 list of changesets:
280 list of changesets:
281 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
281 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
282 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
282 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
283 911600dab2ae7a9baff75958b84fe606851ce955
283 911600dab2ae7a9baff75958b84fe606851ce955
284 adding changesets
284 adding changesets
285 bundling: 1/3 changesets (33.33%)
285 bundling: 1/3 changesets (33.33%)
286 bundling: 2/3 changesets (66.67%)
286 bundling: 2/3 changesets (66.67%)
287 bundling: 3/3 changesets (100.00%)
287 bundling: 3/3 changesets (100.00%)
288 bundling: 1/3 manifests (33.33%)
288 bundling: 1/3 manifests (33.33%)
289 bundling: 2/3 manifests (66.67%)
289 bundling: 2/3 manifests (66.67%)
290 bundling: 3/3 manifests (100.00%)
290 bundling: 3/3 manifests (100.00%)
291 bundling: foo/Bar/file.txt 1/3 files (33.33%)
291 bundling: foo/Bar/file.txt 1/3 files (33.33%)
292 bundling: foo/file.txt 2/3 files (66.67%)
292 bundling: foo/file.txt 2/3 files (66.67%)
293 bundling: quux/file.py 3/3 files (100.00%)
293 bundling: quux/file.py 3/3 files (100.00%)
294 changesets: 1 chunks
294 changesets: 1 chunks
295 add changeset ef1ea85a6374
295 add changeset ef1ea85a6374
296 changesets: 2 chunks
296 changesets: 2 chunks
297 add changeset f9cafe1212c8
297 add changeset f9cafe1212c8
298 changesets: 3 chunks
298 changesets: 3 chunks
299 add changeset 911600dab2ae
299 add changeset 911600dab2ae
300 adding manifests
300 adding manifests
301 manifests: 1/3 chunks (33.33%)
301 manifests: 1/3 chunks (33.33%)
302 manifests: 2/3 chunks (66.67%)
302 manifests: 2/3 chunks (66.67%)
303 manifests: 3/3 chunks (100.00%)
303 manifests: 3/3 chunks (100.00%)
304 adding file changes
304 adding file changes
305 adding foo/Bar/file.txt revisions
305 adding foo/Bar/file.txt revisions
306 files: 1/3 chunks (33.33%)
306 files: 1/3 chunks (33.33%)
307 adding foo/file.txt revisions
307 adding foo/file.txt revisions
308 files: 2/3 chunks (66.67%)
308 files: 2/3 chunks (66.67%)
309 adding quux/file.py revisions
309 adding quux/file.py revisions
310 files: 3/3 chunks (100.00%)
310 files: 3/3 chunks (100.00%)
311 added 3 changesets with 3 changes to 3 files
311 added 3 changesets with 3 changes to 3 files
312 calling hook pretxnchangegroup.acl: hgext.acl.hook
312 calling hook pretxnchangegroup.acl: hgext.acl.hook
313 acl: checking access for user "fred"
313 acl: checking access for user "fred"
314 acl: acl.allow.branches not enabled
314 acl: acl.allow.branches not enabled
315 acl: acl.deny.branches not enabled
315 acl: acl.deny.branches not enabled
316 acl: acl.allow enabled, 0 entries for user fred
316 acl: acl.allow enabled, 0 entries for user fred
317 acl: acl.deny not enabled
317 acl: acl.deny not enabled
318 acl: branch access granted: "ef1ea85a6374" on branch "default"
318 acl: branch access granted: "ef1ea85a6374" on branch "default"
319 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
319 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
320 transaction abort!
320 transaction abort!
321 rollback completed
321 rollback completed
322 abort: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
322 abort: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
323 no rollback information available
323 no rollback information available
324 0:6675d58eff77
324 0:6675d58eff77
325
325
326
326
327 fred is allowed inside foo/
327 fred is allowed inside foo/
328
328
329 $ echo 'foo/** = fred' >> $config
329 $ echo 'foo/** = fred' >> $config
330 $ do_push fred
330 $ do_push fred
331 Pushing as user fred
331 Pushing as user fred
332 hgrc = """
332 hgrc = """
333 [hooks]
333 [hooks]
334 pretxnchangegroup.acl = python:hgext.acl.hook
334 pretxnchangegroup.acl = python:hgext.acl.hook
335 [acl]
335 [acl]
336 sources = push
336 sources = push
337 [acl.allow]
337 [acl.allow]
338 foo/** = fred
338 foo/** = fred
339 """
339 """
340 pushing to ../b
340 pushing to ../b
341 query 1; heads
341 query 1; heads
342 searching for changes
342 searching for changes
343 all remote heads known locally
343 all remote heads known locally
344 invalid branchheads cache (unserved): tip differs
344 invalid branchheads cache (served): tip differs
345 listing keys for "bookmarks"
345 listing keys for "bookmarks"
346 3 changesets found
346 3 changesets found
347 list of changesets:
347 list of changesets:
348 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
348 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
349 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
349 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
350 911600dab2ae7a9baff75958b84fe606851ce955
350 911600dab2ae7a9baff75958b84fe606851ce955
351 adding changesets
351 adding changesets
352 bundling: 1/3 changesets (33.33%)
352 bundling: 1/3 changesets (33.33%)
353 bundling: 2/3 changesets (66.67%)
353 bundling: 2/3 changesets (66.67%)
354 bundling: 3/3 changesets (100.00%)
354 bundling: 3/3 changesets (100.00%)
355 bundling: 1/3 manifests (33.33%)
355 bundling: 1/3 manifests (33.33%)
356 bundling: 2/3 manifests (66.67%)
356 bundling: 2/3 manifests (66.67%)
357 bundling: 3/3 manifests (100.00%)
357 bundling: 3/3 manifests (100.00%)
358 bundling: foo/Bar/file.txt 1/3 files (33.33%)
358 bundling: foo/Bar/file.txt 1/3 files (33.33%)
359 bundling: foo/file.txt 2/3 files (66.67%)
359 bundling: foo/file.txt 2/3 files (66.67%)
360 bundling: quux/file.py 3/3 files (100.00%)
360 bundling: quux/file.py 3/3 files (100.00%)
361 changesets: 1 chunks
361 changesets: 1 chunks
362 add changeset ef1ea85a6374
362 add changeset ef1ea85a6374
363 changesets: 2 chunks
363 changesets: 2 chunks
364 add changeset f9cafe1212c8
364 add changeset f9cafe1212c8
365 changesets: 3 chunks
365 changesets: 3 chunks
366 add changeset 911600dab2ae
366 add changeset 911600dab2ae
367 adding manifests
367 adding manifests
368 manifests: 1/3 chunks (33.33%)
368 manifests: 1/3 chunks (33.33%)
369 manifests: 2/3 chunks (66.67%)
369 manifests: 2/3 chunks (66.67%)
370 manifests: 3/3 chunks (100.00%)
370 manifests: 3/3 chunks (100.00%)
371 adding file changes
371 adding file changes
372 adding foo/Bar/file.txt revisions
372 adding foo/Bar/file.txt revisions
373 files: 1/3 chunks (33.33%)
373 files: 1/3 chunks (33.33%)
374 adding foo/file.txt revisions
374 adding foo/file.txt revisions
375 files: 2/3 chunks (66.67%)
375 files: 2/3 chunks (66.67%)
376 adding quux/file.py revisions
376 adding quux/file.py revisions
377 files: 3/3 chunks (100.00%)
377 files: 3/3 chunks (100.00%)
378 added 3 changesets with 3 changes to 3 files
378 added 3 changesets with 3 changes to 3 files
379 calling hook pretxnchangegroup.acl: hgext.acl.hook
379 calling hook pretxnchangegroup.acl: hgext.acl.hook
380 acl: checking access for user "fred"
380 acl: checking access for user "fred"
381 acl: acl.allow.branches not enabled
381 acl: acl.allow.branches not enabled
382 acl: acl.deny.branches not enabled
382 acl: acl.deny.branches not enabled
383 acl: acl.allow enabled, 1 entries for user fred
383 acl: acl.allow enabled, 1 entries for user fred
384 acl: acl.deny not enabled
384 acl: acl.deny not enabled
385 acl: branch access granted: "ef1ea85a6374" on branch "default"
385 acl: branch access granted: "ef1ea85a6374" on branch "default"
386 acl: path access granted: "ef1ea85a6374"
386 acl: path access granted: "ef1ea85a6374"
387 acl: branch access granted: "f9cafe1212c8" on branch "default"
387 acl: branch access granted: "f9cafe1212c8" on branch "default"
388 acl: path access granted: "f9cafe1212c8"
388 acl: path access granted: "f9cafe1212c8"
389 acl: branch access granted: "911600dab2ae" on branch "default"
389 acl: branch access granted: "911600dab2ae" on branch "default"
390 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
390 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
391 transaction abort!
391 transaction abort!
392 rollback completed
392 rollback completed
393 abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
393 abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
394 no rollback information available
394 no rollback information available
395 0:6675d58eff77
395 0:6675d58eff77
396
396
397
397
398 Empty [acl.deny]
398 Empty [acl.deny]
399
399
400 $ echo '[acl.deny]' >> $config
400 $ echo '[acl.deny]' >> $config
401 $ do_push barney
401 $ do_push barney
402 Pushing as user barney
402 Pushing as user barney
403 hgrc = """
403 hgrc = """
404 [hooks]
404 [hooks]
405 pretxnchangegroup.acl = python:hgext.acl.hook
405 pretxnchangegroup.acl = python:hgext.acl.hook
406 [acl]
406 [acl]
407 sources = push
407 sources = push
408 [acl.allow]
408 [acl.allow]
409 foo/** = fred
409 foo/** = fred
410 [acl.deny]
410 [acl.deny]
411 """
411 """
412 pushing to ../b
412 pushing to ../b
413 query 1; heads
413 query 1; heads
414 searching for changes
414 searching for changes
415 all remote heads known locally
415 all remote heads known locally
416 invalid branchheads cache (unserved): tip differs
416 invalid branchheads cache (served): tip differs
417 listing keys for "bookmarks"
417 listing keys for "bookmarks"
418 3 changesets found
418 3 changesets found
419 list of changesets:
419 list of changesets:
420 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
420 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
421 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
421 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
422 911600dab2ae7a9baff75958b84fe606851ce955
422 911600dab2ae7a9baff75958b84fe606851ce955
423 adding changesets
423 adding changesets
424 bundling: 1/3 changesets (33.33%)
424 bundling: 1/3 changesets (33.33%)
425 bundling: 2/3 changesets (66.67%)
425 bundling: 2/3 changesets (66.67%)
426 bundling: 3/3 changesets (100.00%)
426 bundling: 3/3 changesets (100.00%)
427 bundling: 1/3 manifests (33.33%)
427 bundling: 1/3 manifests (33.33%)
428 bundling: 2/3 manifests (66.67%)
428 bundling: 2/3 manifests (66.67%)
429 bundling: 3/3 manifests (100.00%)
429 bundling: 3/3 manifests (100.00%)
430 bundling: foo/Bar/file.txt 1/3 files (33.33%)
430 bundling: foo/Bar/file.txt 1/3 files (33.33%)
431 bundling: foo/file.txt 2/3 files (66.67%)
431 bundling: foo/file.txt 2/3 files (66.67%)
432 bundling: quux/file.py 3/3 files (100.00%)
432 bundling: quux/file.py 3/3 files (100.00%)
433 changesets: 1 chunks
433 changesets: 1 chunks
434 add changeset ef1ea85a6374
434 add changeset ef1ea85a6374
435 changesets: 2 chunks
435 changesets: 2 chunks
436 add changeset f9cafe1212c8
436 add changeset f9cafe1212c8
437 changesets: 3 chunks
437 changesets: 3 chunks
438 add changeset 911600dab2ae
438 add changeset 911600dab2ae
439 adding manifests
439 adding manifests
440 manifests: 1/3 chunks (33.33%)
440 manifests: 1/3 chunks (33.33%)
441 manifests: 2/3 chunks (66.67%)
441 manifests: 2/3 chunks (66.67%)
442 manifests: 3/3 chunks (100.00%)
442 manifests: 3/3 chunks (100.00%)
443 adding file changes
443 adding file changes
444 adding foo/Bar/file.txt revisions
444 adding foo/Bar/file.txt revisions
445 files: 1/3 chunks (33.33%)
445 files: 1/3 chunks (33.33%)
446 adding foo/file.txt revisions
446 adding foo/file.txt revisions
447 files: 2/3 chunks (66.67%)
447 files: 2/3 chunks (66.67%)
448 adding quux/file.py revisions
448 adding quux/file.py revisions
449 files: 3/3 chunks (100.00%)
449 files: 3/3 chunks (100.00%)
450 added 3 changesets with 3 changes to 3 files
450 added 3 changesets with 3 changes to 3 files
451 calling hook pretxnchangegroup.acl: hgext.acl.hook
451 calling hook pretxnchangegroup.acl: hgext.acl.hook
452 acl: checking access for user "barney"
452 acl: checking access for user "barney"
453 acl: acl.allow.branches not enabled
453 acl: acl.allow.branches not enabled
454 acl: acl.deny.branches not enabled
454 acl: acl.deny.branches not enabled
455 acl: acl.allow enabled, 0 entries for user barney
455 acl: acl.allow enabled, 0 entries for user barney
456 acl: acl.deny enabled, 0 entries for user barney
456 acl: acl.deny enabled, 0 entries for user barney
457 acl: branch access granted: "ef1ea85a6374" on branch "default"
457 acl: branch access granted: "ef1ea85a6374" on branch "default"
458 error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
458 error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
459 transaction abort!
459 transaction abort!
460 rollback completed
460 rollback completed
461 abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
461 abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
462 no rollback information available
462 no rollback information available
463 0:6675d58eff77
463 0:6675d58eff77
464
464
465
465
466 fred is allowed inside foo/, but not foo/bar/ (case matters)
466 fred is allowed inside foo/, but not foo/bar/ (case matters)
467
467
468 $ echo 'foo/bar/** = fred' >> $config
468 $ echo 'foo/bar/** = fred' >> $config
469 $ do_push fred
469 $ do_push fred
470 Pushing as user fred
470 Pushing as user fred
471 hgrc = """
471 hgrc = """
472 [hooks]
472 [hooks]
473 pretxnchangegroup.acl = python:hgext.acl.hook
473 pretxnchangegroup.acl = python:hgext.acl.hook
474 [acl]
474 [acl]
475 sources = push
475 sources = push
476 [acl.allow]
476 [acl.allow]
477 foo/** = fred
477 foo/** = fred
478 [acl.deny]
478 [acl.deny]
479 foo/bar/** = fred
479 foo/bar/** = fred
480 """
480 """
481 pushing to ../b
481 pushing to ../b
482 query 1; heads
482 query 1; heads
483 searching for changes
483 searching for changes
484 all remote heads known locally
484 all remote heads known locally
485 invalid branchheads cache (unserved): tip differs
485 invalid branchheads cache (served): tip differs
486 listing keys for "bookmarks"
486 listing keys for "bookmarks"
487 3 changesets found
487 3 changesets found
488 list of changesets:
488 list of changesets:
489 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
489 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
490 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
490 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
491 911600dab2ae7a9baff75958b84fe606851ce955
491 911600dab2ae7a9baff75958b84fe606851ce955
492 adding changesets
492 adding changesets
493 bundling: 1/3 changesets (33.33%)
493 bundling: 1/3 changesets (33.33%)
494 bundling: 2/3 changesets (66.67%)
494 bundling: 2/3 changesets (66.67%)
495 bundling: 3/3 changesets (100.00%)
495 bundling: 3/3 changesets (100.00%)
496 bundling: 1/3 manifests (33.33%)
496 bundling: 1/3 manifests (33.33%)
497 bundling: 2/3 manifests (66.67%)
497 bundling: 2/3 manifests (66.67%)
498 bundling: 3/3 manifests (100.00%)
498 bundling: 3/3 manifests (100.00%)
499 bundling: foo/Bar/file.txt 1/3 files (33.33%)
499 bundling: foo/Bar/file.txt 1/3 files (33.33%)
500 bundling: foo/file.txt 2/3 files (66.67%)
500 bundling: foo/file.txt 2/3 files (66.67%)
501 bundling: quux/file.py 3/3 files (100.00%)
501 bundling: quux/file.py 3/3 files (100.00%)
502 changesets: 1 chunks
502 changesets: 1 chunks
503 add changeset ef1ea85a6374
503 add changeset ef1ea85a6374
504 changesets: 2 chunks
504 changesets: 2 chunks
505 add changeset f9cafe1212c8
505 add changeset f9cafe1212c8
506 changesets: 3 chunks
506 changesets: 3 chunks
507 add changeset 911600dab2ae
507 add changeset 911600dab2ae
508 adding manifests
508 adding manifests
509 manifests: 1/3 chunks (33.33%)
509 manifests: 1/3 chunks (33.33%)
510 manifests: 2/3 chunks (66.67%)
510 manifests: 2/3 chunks (66.67%)
511 manifests: 3/3 chunks (100.00%)
511 manifests: 3/3 chunks (100.00%)
512 adding file changes
512 adding file changes
513 adding foo/Bar/file.txt revisions
513 adding foo/Bar/file.txt revisions
514 files: 1/3 chunks (33.33%)
514 files: 1/3 chunks (33.33%)
515 adding foo/file.txt revisions
515 adding foo/file.txt revisions
516 files: 2/3 chunks (66.67%)
516 files: 2/3 chunks (66.67%)
517 adding quux/file.py revisions
517 adding quux/file.py revisions
518 files: 3/3 chunks (100.00%)
518 files: 3/3 chunks (100.00%)
519 added 3 changesets with 3 changes to 3 files
519 added 3 changesets with 3 changes to 3 files
520 calling hook pretxnchangegroup.acl: hgext.acl.hook
520 calling hook pretxnchangegroup.acl: hgext.acl.hook
521 acl: checking access for user "fred"
521 acl: checking access for user "fred"
522 acl: acl.allow.branches not enabled
522 acl: acl.allow.branches not enabled
523 acl: acl.deny.branches not enabled
523 acl: acl.deny.branches not enabled
524 acl: acl.allow enabled, 1 entries for user fred
524 acl: acl.allow enabled, 1 entries for user fred
525 acl: acl.deny enabled, 1 entries for user fred
525 acl: acl.deny enabled, 1 entries for user fred
526 acl: branch access granted: "ef1ea85a6374" on branch "default"
526 acl: branch access granted: "ef1ea85a6374" on branch "default"
527 acl: path access granted: "ef1ea85a6374"
527 acl: path access granted: "ef1ea85a6374"
528 acl: branch access granted: "f9cafe1212c8" on branch "default"
528 acl: branch access granted: "f9cafe1212c8" on branch "default"
529 acl: path access granted: "f9cafe1212c8"
529 acl: path access granted: "f9cafe1212c8"
530 acl: branch access granted: "911600dab2ae" on branch "default"
530 acl: branch access granted: "911600dab2ae" on branch "default"
531 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
531 error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
532 transaction abort!
532 transaction abort!
533 rollback completed
533 rollback completed
534 abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
534 abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae")
535 no rollback information available
535 no rollback information available
536 0:6675d58eff77
536 0:6675d58eff77
537
537
538
538
539 fred is allowed inside foo/, but not foo/Bar/
539 fred is allowed inside foo/, but not foo/Bar/
540
540
541 $ echo 'foo/Bar/** = fred' >> $config
541 $ echo 'foo/Bar/** = fred' >> $config
542 $ do_push fred
542 $ do_push fred
543 Pushing as user fred
543 Pushing as user fred
544 hgrc = """
544 hgrc = """
545 [hooks]
545 [hooks]
546 pretxnchangegroup.acl = python:hgext.acl.hook
546 pretxnchangegroup.acl = python:hgext.acl.hook
547 [acl]
547 [acl]
548 sources = push
548 sources = push
549 [acl.allow]
549 [acl.allow]
550 foo/** = fred
550 foo/** = fred
551 [acl.deny]
551 [acl.deny]
552 foo/bar/** = fred
552 foo/bar/** = fred
553 foo/Bar/** = fred
553 foo/Bar/** = fred
554 """
554 """
555 pushing to ../b
555 pushing to ../b
556 query 1; heads
556 query 1; heads
557 searching for changes
557 searching for changes
558 all remote heads known locally
558 all remote heads known locally
559 invalid branchheads cache (unserved): tip differs
559 invalid branchheads cache (served): tip differs
560 listing keys for "bookmarks"
560 listing keys for "bookmarks"
561 3 changesets found
561 3 changesets found
562 list of changesets:
562 list of changesets:
563 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
563 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
564 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
564 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
565 911600dab2ae7a9baff75958b84fe606851ce955
565 911600dab2ae7a9baff75958b84fe606851ce955
566 adding changesets
566 adding changesets
567 bundling: 1/3 changesets (33.33%)
567 bundling: 1/3 changesets (33.33%)
568 bundling: 2/3 changesets (66.67%)
568 bundling: 2/3 changesets (66.67%)
569 bundling: 3/3 changesets (100.00%)
569 bundling: 3/3 changesets (100.00%)
570 bundling: 1/3 manifests (33.33%)
570 bundling: 1/3 manifests (33.33%)
571 bundling: 2/3 manifests (66.67%)
571 bundling: 2/3 manifests (66.67%)
572 bundling: 3/3 manifests (100.00%)
572 bundling: 3/3 manifests (100.00%)
573 bundling: foo/Bar/file.txt 1/3 files (33.33%)
573 bundling: foo/Bar/file.txt 1/3 files (33.33%)
574 bundling: foo/file.txt 2/3 files (66.67%)
574 bundling: foo/file.txt 2/3 files (66.67%)
575 bundling: quux/file.py 3/3 files (100.00%)
575 bundling: quux/file.py 3/3 files (100.00%)
576 changesets: 1 chunks
576 changesets: 1 chunks
577 add changeset ef1ea85a6374
577 add changeset ef1ea85a6374
578 changesets: 2 chunks
578 changesets: 2 chunks
579 add changeset f9cafe1212c8
579 add changeset f9cafe1212c8
580 changesets: 3 chunks
580 changesets: 3 chunks
581 add changeset 911600dab2ae
581 add changeset 911600dab2ae
582 adding manifests
582 adding manifests
583 manifests: 1/3 chunks (33.33%)
583 manifests: 1/3 chunks (33.33%)
584 manifests: 2/3 chunks (66.67%)
584 manifests: 2/3 chunks (66.67%)
585 manifests: 3/3 chunks (100.00%)
585 manifests: 3/3 chunks (100.00%)
586 adding file changes
586 adding file changes
587 adding foo/Bar/file.txt revisions
587 adding foo/Bar/file.txt revisions
588 files: 1/3 chunks (33.33%)
588 files: 1/3 chunks (33.33%)
589 adding foo/file.txt revisions
589 adding foo/file.txt revisions
590 files: 2/3 chunks (66.67%)
590 files: 2/3 chunks (66.67%)
591 adding quux/file.py revisions
591 adding quux/file.py revisions
592 files: 3/3 chunks (100.00%)
592 files: 3/3 chunks (100.00%)
593 added 3 changesets with 3 changes to 3 files
593 added 3 changesets with 3 changes to 3 files
594 calling hook pretxnchangegroup.acl: hgext.acl.hook
594 calling hook pretxnchangegroup.acl: hgext.acl.hook
595 acl: checking access for user "fred"
595 acl: checking access for user "fred"
596 acl: acl.allow.branches not enabled
596 acl: acl.allow.branches not enabled
597 acl: acl.deny.branches not enabled
597 acl: acl.deny.branches not enabled
598 acl: acl.allow enabled, 1 entries for user fred
598 acl: acl.allow enabled, 1 entries for user fred
599 acl: acl.deny enabled, 2 entries for user fred
599 acl: acl.deny enabled, 2 entries for user fred
600 acl: branch access granted: "ef1ea85a6374" on branch "default"
600 acl: branch access granted: "ef1ea85a6374" on branch "default"
601 acl: path access granted: "ef1ea85a6374"
601 acl: path access granted: "ef1ea85a6374"
602 acl: branch access granted: "f9cafe1212c8" on branch "default"
602 acl: branch access granted: "f9cafe1212c8" on branch "default"
603 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
603 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
604 transaction abort!
604 transaction abort!
605 rollback completed
605 rollback completed
606 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
606 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
607 no rollback information available
607 no rollback information available
608 0:6675d58eff77
608 0:6675d58eff77
609
609
610
610
611 $ echo 'barney is not mentioned => not allowed anywhere'
611 $ echo 'barney is not mentioned => not allowed anywhere'
612 barney is not mentioned => not allowed anywhere
612 barney is not mentioned => not allowed anywhere
613 $ do_push barney
613 $ do_push barney
614 Pushing as user barney
614 Pushing as user barney
615 hgrc = """
615 hgrc = """
616 [hooks]
616 [hooks]
617 pretxnchangegroup.acl = python:hgext.acl.hook
617 pretxnchangegroup.acl = python:hgext.acl.hook
618 [acl]
618 [acl]
619 sources = push
619 sources = push
620 [acl.allow]
620 [acl.allow]
621 foo/** = fred
621 foo/** = fred
622 [acl.deny]
622 [acl.deny]
623 foo/bar/** = fred
623 foo/bar/** = fred
624 foo/Bar/** = fred
624 foo/Bar/** = fred
625 """
625 """
626 pushing to ../b
626 pushing to ../b
627 query 1; heads
627 query 1; heads
628 searching for changes
628 searching for changes
629 all remote heads known locally
629 all remote heads known locally
630 invalid branchheads cache (unserved): tip differs
630 invalid branchheads cache (served): tip differs
631 listing keys for "bookmarks"
631 listing keys for "bookmarks"
632 3 changesets found
632 3 changesets found
633 list of changesets:
633 list of changesets:
634 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
634 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
635 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
635 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
636 911600dab2ae7a9baff75958b84fe606851ce955
636 911600dab2ae7a9baff75958b84fe606851ce955
637 adding changesets
637 adding changesets
638 bundling: 1/3 changesets (33.33%)
638 bundling: 1/3 changesets (33.33%)
639 bundling: 2/3 changesets (66.67%)
639 bundling: 2/3 changesets (66.67%)
640 bundling: 3/3 changesets (100.00%)
640 bundling: 3/3 changesets (100.00%)
641 bundling: 1/3 manifests (33.33%)
641 bundling: 1/3 manifests (33.33%)
642 bundling: 2/3 manifests (66.67%)
642 bundling: 2/3 manifests (66.67%)
643 bundling: 3/3 manifests (100.00%)
643 bundling: 3/3 manifests (100.00%)
644 bundling: foo/Bar/file.txt 1/3 files (33.33%)
644 bundling: foo/Bar/file.txt 1/3 files (33.33%)
645 bundling: foo/file.txt 2/3 files (66.67%)
645 bundling: foo/file.txt 2/3 files (66.67%)
646 bundling: quux/file.py 3/3 files (100.00%)
646 bundling: quux/file.py 3/3 files (100.00%)
647 changesets: 1 chunks
647 changesets: 1 chunks
648 add changeset ef1ea85a6374
648 add changeset ef1ea85a6374
649 changesets: 2 chunks
649 changesets: 2 chunks
650 add changeset f9cafe1212c8
650 add changeset f9cafe1212c8
651 changesets: 3 chunks
651 changesets: 3 chunks
652 add changeset 911600dab2ae
652 add changeset 911600dab2ae
653 adding manifests
653 adding manifests
654 manifests: 1/3 chunks (33.33%)
654 manifests: 1/3 chunks (33.33%)
655 manifests: 2/3 chunks (66.67%)
655 manifests: 2/3 chunks (66.67%)
656 manifests: 3/3 chunks (100.00%)
656 manifests: 3/3 chunks (100.00%)
657 adding file changes
657 adding file changes
658 adding foo/Bar/file.txt revisions
658 adding foo/Bar/file.txt revisions
659 files: 1/3 chunks (33.33%)
659 files: 1/3 chunks (33.33%)
660 adding foo/file.txt revisions
660 adding foo/file.txt revisions
661 files: 2/3 chunks (66.67%)
661 files: 2/3 chunks (66.67%)
662 adding quux/file.py revisions
662 adding quux/file.py revisions
663 files: 3/3 chunks (100.00%)
663 files: 3/3 chunks (100.00%)
664 added 3 changesets with 3 changes to 3 files
664 added 3 changesets with 3 changes to 3 files
665 calling hook pretxnchangegroup.acl: hgext.acl.hook
665 calling hook pretxnchangegroup.acl: hgext.acl.hook
666 acl: checking access for user "barney"
666 acl: checking access for user "barney"
667 acl: acl.allow.branches not enabled
667 acl: acl.allow.branches not enabled
668 acl: acl.deny.branches not enabled
668 acl: acl.deny.branches not enabled
669 acl: acl.allow enabled, 0 entries for user barney
669 acl: acl.allow enabled, 0 entries for user barney
670 acl: acl.deny enabled, 0 entries for user barney
670 acl: acl.deny enabled, 0 entries for user barney
671 acl: branch access granted: "ef1ea85a6374" on branch "default"
671 acl: branch access granted: "ef1ea85a6374" on branch "default"
672 error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
672 error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
673 transaction abort!
673 transaction abort!
674 rollback completed
674 rollback completed
675 abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
675 abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374")
676 no rollback information available
676 no rollback information available
677 0:6675d58eff77
677 0:6675d58eff77
678
678
679
679
680 barney is allowed everywhere
680 barney is allowed everywhere
681
681
682 $ echo '[acl.allow]' >> $config
682 $ echo '[acl.allow]' >> $config
683 $ echo '** = barney' >> $config
683 $ echo '** = barney' >> $config
684 $ do_push barney
684 $ do_push barney
685 Pushing as user barney
685 Pushing as user barney
686 hgrc = """
686 hgrc = """
687 [hooks]
687 [hooks]
688 pretxnchangegroup.acl = python:hgext.acl.hook
688 pretxnchangegroup.acl = python:hgext.acl.hook
689 [acl]
689 [acl]
690 sources = push
690 sources = push
691 [acl.allow]
691 [acl.allow]
692 foo/** = fred
692 foo/** = fred
693 [acl.deny]
693 [acl.deny]
694 foo/bar/** = fred
694 foo/bar/** = fred
695 foo/Bar/** = fred
695 foo/Bar/** = fred
696 [acl.allow]
696 [acl.allow]
697 ** = barney
697 ** = barney
698 """
698 """
699 pushing to ../b
699 pushing to ../b
700 query 1; heads
700 query 1; heads
701 searching for changes
701 searching for changes
702 all remote heads known locally
702 all remote heads known locally
703 invalid branchheads cache (unserved): tip differs
703 invalid branchheads cache (served): tip differs
704 listing keys for "bookmarks"
704 listing keys for "bookmarks"
705 3 changesets found
705 3 changesets found
706 list of changesets:
706 list of changesets:
707 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
707 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
708 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
708 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
709 911600dab2ae7a9baff75958b84fe606851ce955
709 911600dab2ae7a9baff75958b84fe606851ce955
710 adding changesets
710 adding changesets
711 bundling: 1/3 changesets (33.33%)
711 bundling: 1/3 changesets (33.33%)
712 bundling: 2/3 changesets (66.67%)
712 bundling: 2/3 changesets (66.67%)
713 bundling: 3/3 changesets (100.00%)
713 bundling: 3/3 changesets (100.00%)
714 bundling: 1/3 manifests (33.33%)
714 bundling: 1/3 manifests (33.33%)
715 bundling: 2/3 manifests (66.67%)
715 bundling: 2/3 manifests (66.67%)
716 bundling: 3/3 manifests (100.00%)
716 bundling: 3/3 manifests (100.00%)
717 bundling: foo/Bar/file.txt 1/3 files (33.33%)
717 bundling: foo/Bar/file.txt 1/3 files (33.33%)
718 bundling: foo/file.txt 2/3 files (66.67%)
718 bundling: foo/file.txt 2/3 files (66.67%)
719 bundling: quux/file.py 3/3 files (100.00%)
719 bundling: quux/file.py 3/3 files (100.00%)
720 changesets: 1 chunks
720 changesets: 1 chunks
721 add changeset ef1ea85a6374
721 add changeset ef1ea85a6374
722 changesets: 2 chunks
722 changesets: 2 chunks
723 add changeset f9cafe1212c8
723 add changeset f9cafe1212c8
724 changesets: 3 chunks
724 changesets: 3 chunks
725 add changeset 911600dab2ae
725 add changeset 911600dab2ae
726 adding manifests
726 adding manifests
727 manifests: 1/3 chunks (33.33%)
727 manifests: 1/3 chunks (33.33%)
728 manifests: 2/3 chunks (66.67%)
728 manifests: 2/3 chunks (66.67%)
729 manifests: 3/3 chunks (100.00%)
729 manifests: 3/3 chunks (100.00%)
730 adding file changes
730 adding file changes
731 adding foo/Bar/file.txt revisions
731 adding foo/Bar/file.txt revisions
732 files: 1/3 chunks (33.33%)
732 files: 1/3 chunks (33.33%)
733 adding foo/file.txt revisions
733 adding foo/file.txt revisions
734 files: 2/3 chunks (66.67%)
734 files: 2/3 chunks (66.67%)
735 adding quux/file.py revisions
735 adding quux/file.py revisions
736 files: 3/3 chunks (100.00%)
736 files: 3/3 chunks (100.00%)
737 added 3 changesets with 3 changes to 3 files
737 added 3 changesets with 3 changes to 3 files
738 calling hook pretxnchangegroup.acl: hgext.acl.hook
738 calling hook pretxnchangegroup.acl: hgext.acl.hook
739 acl: checking access for user "barney"
739 acl: checking access for user "barney"
740 acl: acl.allow.branches not enabled
740 acl: acl.allow.branches not enabled
741 acl: acl.deny.branches not enabled
741 acl: acl.deny.branches not enabled
742 acl: acl.allow enabled, 1 entries for user barney
742 acl: acl.allow enabled, 1 entries for user barney
743 acl: acl.deny enabled, 0 entries for user barney
743 acl: acl.deny enabled, 0 entries for user barney
744 acl: branch access granted: "ef1ea85a6374" on branch "default"
744 acl: branch access granted: "ef1ea85a6374" on branch "default"
745 acl: path access granted: "ef1ea85a6374"
745 acl: path access granted: "ef1ea85a6374"
746 acl: branch access granted: "f9cafe1212c8" on branch "default"
746 acl: branch access granted: "f9cafe1212c8" on branch "default"
747 acl: path access granted: "f9cafe1212c8"
747 acl: path access granted: "f9cafe1212c8"
748 acl: branch access granted: "911600dab2ae" on branch "default"
748 acl: branch access granted: "911600dab2ae" on branch "default"
749 acl: path access granted: "911600dab2ae"
749 acl: path access granted: "911600dab2ae"
750 listing keys for "phases"
750 listing keys for "phases"
751 try to push obsolete markers to remote
751 try to push obsolete markers to remote
752 updating the branch cache
752 updating the branch cache
753 checking for updated bookmarks
753 checking for updated bookmarks
754 listing keys for "bookmarks"
754 listing keys for "bookmarks"
755 repository tip rolled back to revision 0 (undo push)
755 repository tip rolled back to revision 0 (undo push)
756 0:6675d58eff77
756 0:6675d58eff77
757
757
758
758
759 wilma can change files with a .txt extension
759 wilma can change files with a .txt extension
760
760
761 $ echo '**/*.txt = wilma' >> $config
761 $ echo '**/*.txt = wilma' >> $config
762 $ do_push wilma
762 $ do_push wilma
763 Pushing as user wilma
763 Pushing as user wilma
764 hgrc = """
764 hgrc = """
765 [hooks]
765 [hooks]
766 pretxnchangegroup.acl = python:hgext.acl.hook
766 pretxnchangegroup.acl = python:hgext.acl.hook
767 [acl]
767 [acl]
768 sources = push
768 sources = push
769 [acl.allow]
769 [acl.allow]
770 foo/** = fred
770 foo/** = fred
771 [acl.deny]
771 [acl.deny]
772 foo/bar/** = fred
772 foo/bar/** = fred
773 foo/Bar/** = fred
773 foo/Bar/** = fred
774 [acl.allow]
774 [acl.allow]
775 ** = barney
775 ** = barney
776 **/*.txt = wilma
776 **/*.txt = wilma
777 """
777 """
778 pushing to ../b
778 pushing to ../b
779 query 1; heads
779 query 1; heads
780 searching for changes
780 searching for changes
781 all remote heads known locally
781 all remote heads known locally
782 invalid branchheads cache (unserved): tip differs
782 invalid branchheads cache (served): tip differs
783 listing keys for "bookmarks"
783 listing keys for "bookmarks"
784 3 changesets found
784 3 changesets found
785 list of changesets:
785 list of changesets:
786 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
786 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
787 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
787 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
788 911600dab2ae7a9baff75958b84fe606851ce955
788 911600dab2ae7a9baff75958b84fe606851ce955
789 adding changesets
789 adding changesets
790 bundling: 1/3 changesets (33.33%)
790 bundling: 1/3 changesets (33.33%)
791 bundling: 2/3 changesets (66.67%)
791 bundling: 2/3 changesets (66.67%)
792 bundling: 3/3 changesets (100.00%)
792 bundling: 3/3 changesets (100.00%)
793 bundling: 1/3 manifests (33.33%)
793 bundling: 1/3 manifests (33.33%)
794 bundling: 2/3 manifests (66.67%)
794 bundling: 2/3 manifests (66.67%)
795 bundling: 3/3 manifests (100.00%)
795 bundling: 3/3 manifests (100.00%)
796 bundling: foo/Bar/file.txt 1/3 files (33.33%)
796 bundling: foo/Bar/file.txt 1/3 files (33.33%)
797 bundling: foo/file.txt 2/3 files (66.67%)
797 bundling: foo/file.txt 2/3 files (66.67%)
798 bundling: quux/file.py 3/3 files (100.00%)
798 bundling: quux/file.py 3/3 files (100.00%)
799 changesets: 1 chunks
799 changesets: 1 chunks
800 add changeset ef1ea85a6374
800 add changeset ef1ea85a6374
801 changesets: 2 chunks
801 changesets: 2 chunks
802 add changeset f9cafe1212c8
802 add changeset f9cafe1212c8
803 changesets: 3 chunks
803 changesets: 3 chunks
804 add changeset 911600dab2ae
804 add changeset 911600dab2ae
805 adding manifests
805 adding manifests
806 manifests: 1/3 chunks (33.33%)
806 manifests: 1/3 chunks (33.33%)
807 manifests: 2/3 chunks (66.67%)
807 manifests: 2/3 chunks (66.67%)
808 manifests: 3/3 chunks (100.00%)
808 manifests: 3/3 chunks (100.00%)
809 adding file changes
809 adding file changes
810 adding foo/Bar/file.txt revisions
810 adding foo/Bar/file.txt revisions
811 files: 1/3 chunks (33.33%)
811 files: 1/3 chunks (33.33%)
812 adding foo/file.txt revisions
812 adding foo/file.txt revisions
813 files: 2/3 chunks (66.67%)
813 files: 2/3 chunks (66.67%)
814 adding quux/file.py revisions
814 adding quux/file.py revisions
815 files: 3/3 chunks (100.00%)
815 files: 3/3 chunks (100.00%)
816 added 3 changesets with 3 changes to 3 files
816 added 3 changesets with 3 changes to 3 files
817 calling hook pretxnchangegroup.acl: hgext.acl.hook
817 calling hook pretxnchangegroup.acl: hgext.acl.hook
818 acl: checking access for user "wilma"
818 acl: checking access for user "wilma"
819 acl: acl.allow.branches not enabled
819 acl: acl.allow.branches not enabled
820 acl: acl.deny.branches not enabled
820 acl: acl.deny.branches not enabled
821 acl: acl.allow enabled, 1 entries for user wilma
821 acl: acl.allow enabled, 1 entries for user wilma
822 acl: acl.deny enabled, 0 entries for user wilma
822 acl: acl.deny enabled, 0 entries for user wilma
823 acl: branch access granted: "ef1ea85a6374" on branch "default"
823 acl: branch access granted: "ef1ea85a6374" on branch "default"
824 acl: path access granted: "ef1ea85a6374"
824 acl: path access granted: "ef1ea85a6374"
825 acl: branch access granted: "f9cafe1212c8" on branch "default"
825 acl: branch access granted: "f9cafe1212c8" on branch "default"
826 acl: path access granted: "f9cafe1212c8"
826 acl: path access granted: "f9cafe1212c8"
827 acl: branch access granted: "911600dab2ae" on branch "default"
827 acl: branch access granted: "911600dab2ae" on branch "default"
828 error: pretxnchangegroup.acl hook failed: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae")
828 error: pretxnchangegroup.acl hook failed: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae")
829 transaction abort!
829 transaction abort!
830 rollback completed
830 rollback completed
831 abort: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae")
831 abort: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae")
832 no rollback information available
832 no rollback information available
833 0:6675d58eff77
833 0:6675d58eff77
834
834
835
835
836 file specified by acl.config does not exist
836 file specified by acl.config does not exist
837
837
838 $ echo '[acl]' >> $config
838 $ echo '[acl]' >> $config
839 $ echo 'config = ../acl.config' >> $config
839 $ echo 'config = ../acl.config' >> $config
840 $ do_push barney
840 $ do_push barney
841 Pushing as user barney
841 Pushing as user barney
842 hgrc = """
842 hgrc = """
843 [hooks]
843 [hooks]
844 pretxnchangegroup.acl = python:hgext.acl.hook
844 pretxnchangegroup.acl = python:hgext.acl.hook
845 [acl]
845 [acl]
846 sources = push
846 sources = push
847 [acl.allow]
847 [acl.allow]
848 foo/** = fred
848 foo/** = fred
849 [acl.deny]
849 [acl.deny]
850 foo/bar/** = fred
850 foo/bar/** = fred
851 foo/Bar/** = fred
851 foo/Bar/** = fred
852 [acl.allow]
852 [acl.allow]
853 ** = barney
853 ** = barney
854 **/*.txt = wilma
854 **/*.txt = wilma
855 [acl]
855 [acl]
856 config = ../acl.config
856 config = ../acl.config
857 """
857 """
858 pushing to ../b
858 pushing to ../b
859 query 1; heads
859 query 1; heads
860 searching for changes
860 searching for changes
861 all remote heads known locally
861 all remote heads known locally
862 invalid branchheads cache (unserved): tip differs
862 invalid branchheads cache (served): tip differs
863 listing keys for "bookmarks"
863 listing keys for "bookmarks"
864 3 changesets found
864 3 changesets found
865 list of changesets:
865 list of changesets:
866 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
866 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
867 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
867 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
868 911600dab2ae7a9baff75958b84fe606851ce955
868 911600dab2ae7a9baff75958b84fe606851ce955
869 adding changesets
869 adding changesets
870 bundling: 1/3 changesets (33.33%)
870 bundling: 1/3 changesets (33.33%)
871 bundling: 2/3 changesets (66.67%)
871 bundling: 2/3 changesets (66.67%)
872 bundling: 3/3 changesets (100.00%)
872 bundling: 3/3 changesets (100.00%)
873 bundling: 1/3 manifests (33.33%)
873 bundling: 1/3 manifests (33.33%)
874 bundling: 2/3 manifests (66.67%)
874 bundling: 2/3 manifests (66.67%)
875 bundling: 3/3 manifests (100.00%)
875 bundling: 3/3 manifests (100.00%)
876 bundling: foo/Bar/file.txt 1/3 files (33.33%)
876 bundling: foo/Bar/file.txt 1/3 files (33.33%)
877 bundling: foo/file.txt 2/3 files (66.67%)
877 bundling: foo/file.txt 2/3 files (66.67%)
878 bundling: quux/file.py 3/3 files (100.00%)
878 bundling: quux/file.py 3/3 files (100.00%)
879 changesets: 1 chunks
879 changesets: 1 chunks
880 add changeset ef1ea85a6374
880 add changeset ef1ea85a6374
881 changesets: 2 chunks
881 changesets: 2 chunks
882 add changeset f9cafe1212c8
882 add changeset f9cafe1212c8
883 changesets: 3 chunks
883 changesets: 3 chunks
884 add changeset 911600dab2ae
884 add changeset 911600dab2ae
885 adding manifests
885 adding manifests
886 manifests: 1/3 chunks (33.33%)
886 manifests: 1/3 chunks (33.33%)
887 manifests: 2/3 chunks (66.67%)
887 manifests: 2/3 chunks (66.67%)
888 manifests: 3/3 chunks (100.00%)
888 manifests: 3/3 chunks (100.00%)
889 adding file changes
889 adding file changes
890 adding foo/Bar/file.txt revisions
890 adding foo/Bar/file.txt revisions
891 files: 1/3 chunks (33.33%)
891 files: 1/3 chunks (33.33%)
892 adding foo/file.txt revisions
892 adding foo/file.txt revisions
893 files: 2/3 chunks (66.67%)
893 files: 2/3 chunks (66.67%)
894 adding quux/file.py revisions
894 adding quux/file.py revisions
895 files: 3/3 chunks (100.00%)
895 files: 3/3 chunks (100.00%)
896 added 3 changesets with 3 changes to 3 files
896 added 3 changesets with 3 changes to 3 files
897 calling hook pretxnchangegroup.acl: hgext.acl.hook
897 calling hook pretxnchangegroup.acl: hgext.acl.hook
898 acl: checking access for user "barney"
898 acl: checking access for user "barney"
899 error: pretxnchangegroup.acl hook raised an exception: [Errno *] *: '../acl.config' (glob)
899 error: pretxnchangegroup.acl hook raised an exception: [Errno *] *: '../acl.config' (glob)
900 transaction abort!
900 transaction abort!
901 rollback completed
901 rollback completed
902 abort: *: ../acl.config (glob)
902 abort: *: ../acl.config (glob)
903 no rollback information available
903 no rollback information available
904 0:6675d58eff77
904 0:6675d58eff77
905
905
906
906
907 betty is allowed inside foo/ by a acl.config file
907 betty is allowed inside foo/ by a acl.config file
908
908
909 $ echo '[acl.allow]' >> acl.config
909 $ echo '[acl.allow]' >> acl.config
910 $ echo 'foo/** = betty' >> acl.config
910 $ echo 'foo/** = betty' >> acl.config
911 $ do_push betty
911 $ do_push betty
912 Pushing as user betty
912 Pushing as user betty
913 hgrc = """
913 hgrc = """
914 [hooks]
914 [hooks]
915 pretxnchangegroup.acl = python:hgext.acl.hook
915 pretxnchangegroup.acl = python:hgext.acl.hook
916 [acl]
916 [acl]
917 sources = push
917 sources = push
918 [acl.allow]
918 [acl.allow]
919 foo/** = fred
919 foo/** = fred
920 [acl.deny]
920 [acl.deny]
921 foo/bar/** = fred
921 foo/bar/** = fred
922 foo/Bar/** = fred
922 foo/Bar/** = fred
923 [acl.allow]
923 [acl.allow]
924 ** = barney
924 ** = barney
925 **/*.txt = wilma
925 **/*.txt = wilma
926 [acl]
926 [acl]
927 config = ../acl.config
927 config = ../acl.config
928 """
928 """
929 acl.config = """
929 acl.config = """
930 [acl.allow]
930 [acl.allow]
931 foo/** = betty
931 foo/** = betty
932 """
932 """
933 pushing to ../b
933 pushing to ../b
934 query 1; heads
934 query 1; heads
935 searching for changes
935 searching for changes
936 all remote heads known locally
936 all remote heads known locally
937 invalid branchheads cache (unserved): tip differs
937 invalid branchheads cache (served): tip differs
938 listing keys for "bookmarks"
938 listing keys for "bookmarks"
939 3 changesets found
939 3 changesets found
940 list of changesets:
940 list of changesets:
941 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
941 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
942 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
942 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
943 911600dab2ae7a9baff75958b84fe606851ce955
943 911600dab2ae7a9baff75958b84fe606851ce955
944 adding changesets
944 adding changesets
945 bundling: 1/3 changesets (33.33%)
945 bundling: 1/3 changesets (33.33%)
946 bundling: 2/3 changesets (66.67%)
946 bundling: 2/3 changesets (66.67%)
947 bundling: 3/3 changesets (100.00%)
947 bundling: 3/3 changesets (100.00%)
948 bundling: 1/3 manifests (33.33%)
948 bundling: 1/3 manifests (33.33%)
949 bundling: 2/3 manifests (66.67%)
949 bundling: 2/3 manifests (66.67%)
950 bundling: 3/3 manifests (100.00%)
950 bundling: 3/3 manifests (100.00%)
951 bundling: foo/Bar/file.txt 1/3 files (33.33%)
951 bundling: foo/Bar/file.txt 1/3 files (33.33%)
952 bundling: foo/file.txt 2/3 files (66.67%)
952 bundling: foo/file.txt 2/3 files (66.67%)
953 bundling: quux/file.py 3/3 files (100.00%)
953 bundling: quux/file.py 3/3 files (100.00%)
954 changesets: 1 chunks
954 changesets: 1 chunks
955 add changeset ef1ea85a6374
955 add changeset ef1ea85a6374
956 changesets: 2 chunks
956 changesets: 2 chunks
957 add changeset f9cafe1212c8
957 add changeset f9cafe1212c8
958 changesets: 3 chunks
958 changesets: 3 chunks
959 add changeset 911600dab2ae
959 add changeset 911600dab2ae
960 adding manifests
960 adding manifests
961 manifests: 1/3 chunks (33.33%)
961 manifests: 1/3 chunks (33.33%)
962 manifests: 2/3 chunks (66.67%)
962 manifests: 2/3 chunks (66.67%)
963 manifests: 3/3 chunks (100.00%)
963 manifests: 3/3 chunks (100.00%)
964 adding file changes
964 adding file changes
965 adding foo/Bar/file.txt revisions
965 adding foo/Bar/file.txt revisions
966 files: 1/3 chunks (33.33%)
966 files: 1/3 chunks (33.33%)
967 adding foo/file.txt revisions
967 adding foo/file.txt revisions
968 files: 2/3 chunks (66.67%)
968 files: 2/3 chunks (66.67%)
969 adding quux/file.py revisions
969 adding quux/file.py revisions
970 files: 3/3 chunks (100.00%)
970 files: 3/3 chunks (100.00%)
971 added 3 changesets with 3 changes to 3 files
971 added 3 changesets with 3 changes to 3 files
972 calling hook pretxnchangegroup.acl: hgext.acl.hook
972 calling hook pretxnchangegroup.acl: hgext.acl.hook
973 acl: checking access for user "betty"
973 acl: checking access for user "betty"
974 acl: acl.allow.branches not enabled
974 acl: acl.allow.branches not enabled
975 acl: acl.deny.branches not enabled
975 acl: acl.deny.branches not enabled
976 acl: acl.allow enabled, 1 entries for user betty
976 acl: acl.allow enabled, 1 entries for user betty
977 acl: acl.deny enabled, 0 entries for user betty
977 acl: acl.deny enabled, 0 entries for user betty
978 acl: branch access granted: "ef1ea85a6374" on branch "default"
978 acl: branch access granted: "ef1ea85a6374" on branch "default"
979 acl: path access granted: "ef1ea85a6374"
979 acl: path access granted: "ef1ea85a6374"
980 acl: branch access granted: "f9cafe1212c8" on branch "default"
980 acl: branch access granted: "f9cafe1212c8" on branch "default"
981 acl: path access granted: "f9cafe1212c8"
981 acl: path access granted: "f9cafe1212c8"
982 acl: branch access granted: "911600dab2ae" on branch "default"
982 acl: branch access granted: "911600dab2ae" on branch "default"
983 error: pretxnchangegroup.acl hook failed: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae")
983 error: pretxnchangegroup.acl hook failed: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae")
984 transaction abort!
984 transaction abort!
985 rollback completed
985 rollback completed
986 abort: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae")
986 abort: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae")
987 no rollback information available
987 no rollback information available
988 0:6675d58eff77
988 0:6675d58eff77
989
989
990
990
991 acl.config can set only [acl.allow]/[acl.deny]
991 acl.config can set only [acl.allow]/[acl.deny]
992
992
993 $ echo '[hooks]' >> acl.config
993 $ echo '[hooks]' >> acl.config
994 $ echo 'changegroup.acl = false' >> acl.config
994 $ echo 'changegroup.acl = false' >> acl.config
995 $ do_push barney
995 $ do_push barney
996 Pushing as user barney
996 Pushing as user barney
997 hgrc = """
997 hgrc = """
998 [hooks]
998 [hooks]
999 pretxnchangegroup.acl = python:hgext.acl.hook
999 pretxnchangegroup.acl = python:hgext.acl.hook
1000 [acl]
1000 [acl]
1001 sources = push
1001 sources = push
1002 [acl.allow]
1002 [acl.allow]
1003 foo/** = fred
1003 foo/** = fred
1004 [acl.deny]
1004 [acl.deny]
1005 foo/bar/** = fred
1005 foo/bar/** = fred
1006 foo/Bar/** = fred
1006 foo/Bar/** = fred
1007 [acl.allow]
1007 [acl.allow]
1008 ** = barney
1008 ** = barney
1009 **/*.txt = wilma
1009 **/*.txt = wilma
1010 [acl]
1010 [acl]
1011 config = ../acl.config
1011 config = ../acl.config
1012 """
1012 """
1013 acl.config = """
1013 acl.config = """
1014 [acl.allow]
1014 [acl.allow]
1015 foo/** = betty
1015 foo/** = betty
1016 [hooks]
1016 [hooks]
1017 changegroup.acl = false
1017 changegroup.acl = false
1018 """
1018 """
1019 pushing to ../b
1019 pushing to ../b
1020 query 1; heads
1020 query 1; heads
1021 searching for changes
1021 searching for changes
1022 all remote heads known locally
1022 all remote heads known locally
1023 invalid branchheads cache (unserved): tip differs
1023 invalid branchheads cache (served): tip differs
1024 listing keys for "bookmarks"
1024 listing keys for "bookmarks"
1025 3 changesets found
1025 3 changesets found
1026 list of changesets:
1026 list of changesets:
1027 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1027 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1028 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1028 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1029 911600dab2ae7a9baff75958b84fe606851ce955
1029 911600dab2ae7a9baff75958b84fe606851ce955
1030 adding changesets
1030 adding changesets
1031 bundling: 1/3 changesets (33.33%)
1031 bundling: 1/3 changesets (33.33%)
1032 bundling: 2/3 changesets (66.67%)
1032 bundling: 2/3 changesets (66.67%)
1033 bundling: 3/3 changesets (100.00%)
1033 bundling: 3/3 changesets (100.00%)
1034 bundling: 1/3 manifests (33.33%)
1034 bundling: 1/3 manifests (33.33%)
1035 bundling: 2/3 manifests (66.67%)
1035 bundling: 2/3 manifests (66.67%)
1036 bundling: 3/3 manifests (100.00%)
1036 bundling: 3/3 manifests (100.00%)
1037 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1037 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1038 bundling: foo/file.txt 2/3 files (66.67%)
1038 bundling: foo/file.txt 2/3 files (66.67%)
1039 bundling: quux/file.py 3/3 files (100.00%)
1039 bundling: quux/file.py 3/3 files (100.00%)
1040 changesets: 1 chunks
1040 changesets: 1 chunks
1041 add changeset ef1ea85a6374
1041 add changeset ef1ea85a6374
1042 changesets: 2 chunks
1042 changesets: 2 chunks
1043 add changeset f9cafe1212c8
1043 add changeset f9cafe1212c8
1044 changesets: 3 chunks
1044 changesets: 3 chunks
1045 add changeset 911600dab2ae
1045 add changeset 911600dab2ae
1046 adding manifests
1046 adding manifests
1047 manifests: 1/3 chunks (33.33%)
1047 manifests: 1/3 chunks (33.33%)
1048 manifests: 2/3 chunks (66.67%)
1048 manifests: 2/3 chunks (66.67%)
1049 manifests: 3/3 chunks (100.00%)
1049 manifests: 3/3 chunks (100.00%)
1050 adding file changes
1050 adding file changes
1051 adding foo/Bar/file.txt revisions
1051 adding foo/Bar/file.txt revisions
1052 files: 1/3 chunks (33.33%)
1052 files: 1/3 chunks (33.33%)
1053 adding foo/file.txt revisions
1053 adding foo/file.txt revisions
1054 files: 2/3 chunks (66.67%)
1054 files: 2/3 chunks (66.67%)
1055 adding quux/file.py revisions
1055 adding quux/file.py revisions
1056 files: 3/3 chunks (100.00%)
1056 files: 3/3 chunks (100.00%)
1057 added 3 changesets with 3 changes to 3 files
1057 added 3 changesets with 3 changes to 3 files
1058 calling hook pretxnchangegroup.acl: hgext.acl.hook
1058 calling hook pretxnchangegroup.acl: hgext.acl.hook
1059 acl: checking access for user "barney"
1059 acl: checking access for user "barney"
1060 acl: acl.allow.branches not enabled
1060 acl: acl.allow.branches not enabled
1061 acl: acl.deny.branches not enabled
1061 acl: acl.deny.branches not enabled
1062 acl: acl.allow enabled, 1 entries for user barney
1062 acl: acl.allow enabled, 1 entries for user barney
1063 acl: acl.deny enabled, 0 entries for user barney
1063 acl: acl.deny enabled, 0 entries for user barney
1064 acl: branch access granted: "ef1ea85a6374" on branch "default"
1064 acl: branch access granted: "ef1ea85a6374" on branch "default"
1065 acl: path access granted: "ef1ea85a6374"
1065 acl: path access granted: "ef1ea85a6374"
1066 acl: branch access granted: "f9cafe1212c8" on branch "default"
1066 acl: branch access granted: "f9cafe1212c8" on branch "default"
1067 acl: path access granted: "f9cafe1212c8"
1067 acl: path access granted: "f9cafe1212c8"
1068 acl: branch access granted: "911600dab2ae" on branch "default"
1068 acl: branch access granted: "911600dab2ae" on branch "default"
1069 acl: path access granted: "911600dab2ae"
1069 acl: path access granted: "911600dab2ae"
1070 listing keys for "phases"
1070 listing keys for "phases"
1071 try to push obsolete markers to remote
1071 try to push obsolete markers to remote
1072 updating the branch cache
1072 updating the branch cache
1073 checking for updated bookmarks
1073 checking for updated bookmarks
1074 listing keys for "bookmarks"
1074 listing keys for "bookmarks"
1075 repository tip rolled back to revision 0 (undo push)
1075 repository tip rolled back to revision 0 (undo push)
1076 0:6675d58eff77
1076 0:6675d58eff77
1077
1077
1078
1078
1079 asterisk
1079 asterisk
1080
1080
1081 $ init_config
1081 $ init_config
1082
1082
1083 asterisk test
1083 asterisk test
1084
1084
1085 $ echo '[acl.allow]' >> $config
1085 $ echo '[acl.allow]' >> $config
1086 $ echo "** = fred" >> $config
1086 $ echo "** = fred" >> $config
1087
1087
1088 fred is always allowed
1088 fred is always allowed
1089
1089
1090 $ do_push fred
1090 $ do_push fred
1091 Pushing as user fred
1091 Pushing as user fred
1092 hgrc = """
1092 hgrc = """
1093 [acl]
1093 [acl]
1094 sources = push
1094 sources = push
1095 [extensions]
1095 [extensions]
1096 [acl.allow]
1096 [acl.allow]
1097 ** = fred
1097 ** = fred
1098 """
1098 """
1099 pushing to ../b
1099 pushing to ../b
1100 query 1; heads
1100 query 1; heads
1101 searching for changes
1101 searching for changes
1102 all remote heads known locally
1102 all remote heads known locally
1103 invalid branchheads cache (unserved): tip differs
1103 invalid branchheads cache (served): tip differs
1104 listing keys for "bookmarks"
1104 listing keys for "bookmarks"
1105 3 changesets found
1105 3 changesets found
1106 list of changesets:
1106 list of changesets:
1107 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1107 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1108 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1108 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1109 911600dab2ae7a9baff75958b84fe606851ce955
1109 911600dab2ae7a9baff75958b84fe606851ce955
1110 adding changesets
1110 adding changesets
1111 bundling: 1/3 changesets (33.33%)
1111 bundling: 1/3 changesets (33.33%)
1112 bundling: 2/3 changesets (66.67%)
1112 bundling: 2/3 changesets (66.67%)
1113 bundling: 3/3 changesets (100.00%)
1113 bundling: 3/3 changesets (100.00%)
1114 bundling: 1/3 manifests (33.33%)
1114 bundling: 1/3 manifests (33.33%)
1115 bundling: 2/3 manifests (66.67%)
1115 bundling: 2/3 manifests (66.67%)
1116 bundling: 3/3 manifests (100.00%)
1116 bundling: 3/3 manifests (100.00%)
1117 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1117 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1118 bundling: foo/file.txt 2/3 files (66.67%)
1118 bundling: foo/file.txt 2/3 files (66.67%)
1119 bundling: quux/file.py 3/3 files (100.00%)
1119 bundling: quux/file.py 3/3 files (100.00%)
1120 changesets: 1 chunks
1120 changesets: 1 chunks
1121 add changeset ef1ea85a6374
1121 add changeset ef1ea85a6374
1122 changesets: 2 chunks
1122 changesets: 2 chunks
1123 add changeset f9cafe1212c8
1123 add changeset f9cafe1212c8
1124 changesets: 3 chunks
1124 changesets: 3 chunks
1125 add changeset 911600dab2ae
1125 add changeset 911600dab2ae
1126 adding manifests
1126 adding manifests
1127 manifests: 1/3 chunks (33.33%)
1127 manifests: 1/3 chunks (33.33%)
1128 manifests: 2/3 chunks (66.67%)
1128 manifests: 2/3 chunks (66.67%)
1129 manifests: 3/3 chunks (100.00%)
1129 manifests: 3/3 chunks (100.00%)
1130 adding file changes
1130 adding file changes
1131 adding foo/Bar/file.txt revisions
1131 adding foo/Bar/file.txt revisions
1132 files: 1/3 chunks (33.33%)
1132 files: 1/3 chunks (33.33%)
1133 adding foo/file.txt revisions
1133 adding foo/file.txt revisions
1134 files: 2/3 chunks (66.67%)
1134 files: 2/3 chunks (66.67%)
1135 adding quux/file.py revisions
1135 adding quux/file.py revisions
1136 files: 3/3 chunks (100.00%)
1136 files: 3/3 chunks (100.00%)
1137 added 3 changesets with 3 changes to 3 files
1137 added 3 changesets with 3 changes to 3 files
1138 calling hook pretxnchangegroup.acl: hgext.acl.hook
1138 calling hook pretxnchangegroup.acl: hgext.acl.hook
1139 acl: checking access for user "fred"
1139 acl: checking access for user "fred"
1140 acl: acl.allow.branches not enabled
1140 acl: acl.allow.branches not enabled
1141 acl: acl.deny.branches not enabled
1141 acl: acl.deny.branches not enabled
1142 acl: acl.allow enabled, 1 entries for user fred
1142 acl: acl.allow enabled, 1 entries for user fred
1143 acl: acl.deny not enabled
1143 acl: acl.deny not enabled
1144 acl: branch access granted: "ef1ea85a6374" on branch "default"
1144 acl: branch access granted: "ef1ea85a6374" on branch "default"
1145 acl: path access granted: "ef1ea85a6374"
1145 acl: path access granted: "ef1ea85a6374"
1146 acl: branch access granted: "f9cafe1212c8" on branch "default"
1146 acl: branch access granted: "f9cafe1212c8" on branch "default"
1147 acl: path access granted: "f9cafe1212c8"
1147 acl: path access granted: "f9cafe1212c8"
1148 acl: branch access granted: "911600dab2ae" on branch "default"
1148 acl: branch access granted: "911600dab2ae" on branch "default"
1149 acl: path access granted: "911600dab2ae"
1149 acl: path access granted: "911600dab2ae"
1150 listing keys for "phases"
1150 listing keys for "phases"
1151 try to push obsolete markers to remote
1151 try to push obsolete markers to remote
1152 updating the branch cache
1152 updating the branch cache
1153 checking for updated bookmarks
1153 checking for updated bookmarks
1154 listing keys for "bookmarks"
1154 listing keys for "bookmarks"
1155 repository tip rolled back to revision 0 (undo push)
1155 repository tip rolled back to revision 0 (undo push)
1156 0:6675d58eff77
1156 0:6675d58eff77
1157
1157
1158
1158
1159 $ echo '[acl.deny]' >> $config
1159 $ echo '[acl.deny]' >> $config
1160 $ echo "foo/Bar/** = *" >> $config
1160 $ echo "foo/Bar/** = *" >> $config
1161
1161
1162 no one is allowed inside foo/Bar/
1162 no one is allowed inside foo/Bar/
1163
1163
1164 $ do_push fred
1164 $ do_push fred
1165 Pushing as user fred
1165 Pushing as user fred
1166 hgrc = """
1166 hgrc = """
1167 [acl]
1167 [acl]
1168 sources = push
1168 sources = push
1169 [extensions]
1169 [extensions]
1170 [acl.allow]
1170 [acl.allow]
1171 ** = fred
1171 ** = fred
1172 [acl.deny]
1172 [acl.deny]
1173 foo/Bar/** = *
1173 foo/Bar/** = *
1174 """
1174 """
1175 pushing to ../b
1175 pushing to ../b
1176 query 1; heads
1176 query 1; heads
1177 searching for changes
1177 searching for changes
1178 all remote heads known locally
1178 all remote heads known locally
1179 invalid branchheads cache (unserved): tip differs
1179 invalid branchheads cache (served): tip differs
1180 listing keys for "bookmarks"
1180 listing keys for "bookmarks"
1181 3 changesets found
1181 3 changesets found
1182 list of changesets:
1182 list of changesets:
1183 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1183 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1184 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1184 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1185 911600dab2ae7a9baff75958b84fe606851ce955
1185 911600dab2ae7a9baff75958b84fe606851ce955
1186 adding changesets
1186 adding changesets
1187 bundling: 1/3 changesets (33.33%)
1187 bundling: 1/3 changesets (33.33%)
1188 bundling: 2/3 changesets (66.67%)
1188 bundling: 2/3 changesets (66.67%)
1189 bundling: 3/3 changesets (100.00%)
1189 bundling: 3/3 changesets (100.00%)
1190 bundling: 1/3 manifests (33.33%)
1190 bundling: 1/3 manifests (33.33%)
1191 bundling: 2/3 manifests (66.67%)
1191 bundling: 2/3 manifests (66.67%)
1192 bundling: 3/3 manifests (100.00%)
1192 bundling: 3/3 manifests (100.00%)
1193 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1193 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1194 bundling: foo/file.txt 2/3 files (66.67%)
1194 bundling: foo/file.txt 2/3 files (66.67%)
1195 bundling: quux/file.py 3/3 files (100.00%)
1195 bundling: quux/file.py 3/3 files (100.00%)
1196 changesets: 1 chunks
1196 changesets: 1 chunks
1197 add changeset ef1ea85a6374
1197 add changeset ef1ea85a6374
1198 changesets: 2 chunks
1198 changesets: 2 chunks
1199 add changeset f9cafe1212c8
1199 add changeset f9cafe1212c8
1200 changesets: 3 chunks
1200 changesets: 3 chunks
1201 add changeset 911600dab2ae
1201 add changeset 911600dab2ae
1202 adding manifests
1202 adding manifests
1203 manifests: 1/3 chunks (33.33%)
1203 manifests: 1/3 chunks (33.33%)
1204 manifests: 2/3 chunks (66.67%)
1204 manifests: 2/3 chunks (66.67%)
1205 manifests: 3/3 chunks (100.00%)
1205 manifests: 3/3 chunks (100.00%)
1206 adding file changes
1206 adding file changes
1207 adding foo/Bar/file.txt revisions
1207 adding foo/Bar/file.txt revisions
1208 files: 1/3 chunks (33.33%)
1208 files: 1/3 chunks (33.33%)
1209 adding foo/file.txt revisions
1209 adding foo/file.txt revisions
1210 files: 2/3 chunks (66.67%)
1210 files: 2/3 chunks (66.67%)
1211 adding quux/file.py revisions
1211 adding quux/file.py revisions
1212 files: 3/3 chunks (100.00%)
1212 files: 3/3 chunks (100.00%)
1213 added 3 changesets with 3 changes to 3 files
1213 added 3 changesets with 3 changes to 3 files
1214 calling hook pretxnchangegroup.acl: hgext.acl.hook
1214 calling hook pretxnchangegroup.acl: hgext.acl.hook
1215 acl: checking access for user "fred"
1215 acl: checking access for user "fred"
1216 acl: acl.allow.branches not enabled
1216 acl: acl.allow.branches not enabled
1217 acl: acl.deny.branches not enabled
1217 acl: acl.deny.branches not enabled
1218 acl: acl.allow enabled, 1 entries for user fred
1218 acl: acl.allow enabled, 1 entries for user fred
1219 acl: acl.deny enabled, 1 entries for user fred
1219 acl: acl.deny enabled, 1 entries for user fred
1220 acl: branch access granted: "ef1ea85a6374" on branch "default"
1220 acl: branch access granted: "ef1ea85a6374" on branch "default"
1221 acl: path access granted: "ef1ea85a6374"
1221 acl: path access granted: "ef1ea85a6374"
1222 acl: branch access granted: "f9cafe1212c8" on branch "default"
1222 acl: branch access granted: "f9cafe1212c8" on branch "default"
1223 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1223 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1224 transaction abort!
1224 transaction abort!
1225 rollback completed
1225 rollback completed
1226 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1226 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1227 no rollback information available
1227 no rollback information available
1228 0:6675d58eff77
1228 0:6675d58eff77
1229
1229
1230
1230
1231 Groups
1231 Groups
1232
1232
1233 $ init_config
1233 $ init_config
1234
1234
1235 OS-level groups
1235 OS-level groups
1236
1236
1237 $ echo '[acl.allow]' >> $config
1237 $ echo '[acl.allow]' >> $config
1238 $ echo "** = @group1" >> $config
1238 $ echo "** = @group1" >> $config
1239
1239
1240 @group1 is always allowed
1240 @group1 is always allowed
1241
1241
1242 $ do_push fred
1242 $ do_push fred
1243 Pushing as user fred
1243 Pushing as user fred
1244 hgrc = """
1244 hgrc = """
1245 [acl]
1245 [acl]
1246 sources = push
1246 sources = push
1247 [extensions]
1247 [extensions]
1248 [acl.allow]
1248 [acl.allow]
1249 ** = @group1
1249 ** = @group1
1250 """
1250 """
1251 pushing to ../b
1251 pushing to ../b
1252 query 1; heads
1252 query 1; heads
1253 searching for changes
1253 searching for changes
1254 all remote heads known locally
1254 all remote heads known locally
1255 invalid branchheads cache (unserved): tip differs
1255 invalid branchheads cache (served): tip differs
1256 listing keys for "bookmarks"
1256 listing keys for "bookmarks"
1257 3 changesets found
1257 3 changesets found
1258 list of changesets:
1258 list of changesets:
1259 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1259 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1260 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1260 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1261 911600dab2ae7a9baff75958b84fe606851ce955
1261 911600dab2ae7a9baff75958b84fe606851ce955
1262 adding changesets
1262 adding changesets
1263 bundling: 1/3 changesets (33.33%)
1263 bundling: 1/3 changesets (33.33%)
1264 bundling: 2/3 changesets (66.67%)
1264 bundling: 2/3 changesets (66.67%)
1265 bundling: 3/3 changesets (100.00%)
1265 bundling: 3/3 changesets (100.00%)
1266 bundling: 1/3 manifests (33.33%)
1266 bundling: 1/3 manifests (33.33%)
1267 bundling: 2/3 manifests (66.67%)
1267 bundling: 2/3 manifests (66.67%)
1268 bundling: 3/3 manifests (100.00%)
1268 bundling: 3/3 manifests (100.00%)
1269 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1269 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1270 bundling: foo/file.txt 2/3 files (66.67%)
1270 bundling: foo/file.txt 2/3 files (66.67%)
1271 bundling: quux/file.py 3/3 files (100.00%)
1271 bundling: quux/file.py 3/3 files (100.00%)
1272 changesets: 1 chunks
1272 changesets: 1 chunks
1273 add changeset ef1ea85a6374
1273 add changeset ef1ea85a6374
1274 changesets: 2 chunks
1274 changesets: 2 chunks
1275 add changeset f9cafe1212c8
1275 add changeset f9cafe1212c8
1276 changesets: 3 chunks
1276 changesets: 3 chunks
1277 add changeset 911600dab2ae
1277 add changeset 911600dab2ae
1278 adding manifests
1278 adding manifests
1279 manifests: 1/3 chunks (33.33%)
1279 manifests: 1/3 chunks (33.33%)
1280 manifests: 2/3 chunks (66.67%)
1280 manifests: 2/3 chunks (66.67%)
1281 manifests: 3/3 chunks (100.00%)
1281 manifests: 3/3 chunks (100.00%)
1282 adding file changes
1282 adding file changes
1283 adding foo/Bar/file.txt revisions
1283 adding foo/Bar/file.txt revisions
1284 files: 1/3 chunks (33.33%)
1284 files: 1/3 chunks (33.33%)
1285 adding foo/file.txt revisions
1285 adding foo/file.txt revisions
1286 files: 2/3 chunks (66.67%)
1286 files: 2/3 chunks (66.67%)
1287 adding quux/file.py revisions
1287 adding quux/file.py revisions
1288 files: 3/3 chunks (100.00%)
1288 files: 3/3 chunks (100.00%)
1289 added 3 changesets with 3 changes to 3 files
1289 added 3 changesets with 3 changes to 3 files
1290 calling hook pretxnchangegroup.acl: hgext.acl.hook
1290 calling hook pretxnchangegroup.acl: hgext.acl.hook
1291 acl: checking access for user "fred"
1291 acl: checking access for user "fred"
1292 acl: acl.allow.branches not enabled
1292 acl: acl.allow.branches not enabled
1293 acl: acl.deny.branches not enabled
1293 acl: acl.deny.branches not enabled
1294 acl: "group1" not defined in [acl.groups]
1294 acl: "group1" not defined in [acl.groups]
1295 acl: acl.allow enabled, 1 entries for user fred
1295 acl: acl.allow enabled, 1 entries for user fred
1296 acl: acl.deny not enabled
1296 acl: acl.deny not enabled
1297 acl: branch access granted: "ef1ea85a6374" on branch "default"
1297 acl: branch access granted: "ef1ea85a6374" on branch "default"
1298 acl: path access granted: "ef1ea85a6374"
1298 acl: path access granted: "ef1ea85a6374"
1299 acl: branch access granted: "f9cafe1212c8" on branch "default"
1299 acl: branch access granted: "f9cafe1212c8" on branch "default"
1300 acl: path access granted: "f9cafe1212c8"
1300 acl: path access granted: "f9cafe1212c8"
1301 acl: branch access granted: "911600dab2ae" on branch "default"
1301 acl: branch access granted: "911600dab2ae" on branch "default"
1302 acl: path access granted: "911600dab2ae"
1302 acl: path access granted: "911600dab2ae"
1303 listing keys for "phases"
1303 listing keys for "phases"
1304 try to push obsolete markers to remote
1304 try to push obsolete markers to remote
1305 updating the branch cache
1305 updating the branch cache
1306 checking for updated bookmarks
1306 checking for updated bookmarks
1307 listing keys for "bookmarks"
1307 listing keys for "bookmarks"
1308 repository tip rolled back to revision 0 (undo push)
1308 repository tip rolled back to revision 0 (undo push)
1309 0:6675d58eff77
1309 0:6675d58eff77
1310
1310
1311
1311
1312 $ echo '[acl.deny]' >> $config
1312 $ echo '[acl.deny]' >> $config
1313 $ echo "foo/Bar/** = @group1" >> $config
1313 $ echo "foo/Bar/** = @group1" >> $config
1314
1314
1315 @group is allowed inside anything but foo/Bar/
1315 @group is allowed inside anything but foo/Bar/
1316
1316
1317 $ do_push fred
1317 $ do_push fred
1318 Pushing as user fred
1318 Pushing as user fred
1319 hgrc = """
1319 hgrc = """
1320 [acl]
1320 [acl]
1321 sources = push
1321 sources = push
1322 [extensions]
1322 [extensions]
1323 [acl.allow]
1323 [acl.allow]
1324 ** = @group1
1324 ** = @group1
1325 [acl.deny]
1325 [acl.deny]
1326 foo/Bar/** = @group1
1326 foo/Bar/** = @group1
1327 """
1327 """
1328 pushing to ../b
1328 pushing to ../b
1329 query 1; heads
1329 query 1; heads
1330 searching for changes
1330 searching for changes
1331 all remote heads known locally
1331 all remote heads known locally
1332 invalid branchheads cache (unserved): tip differs
1332 invalid branchheads cache (served): tip differs
1333 listing keys for "bookmarks"
1333 listing keys for "bookmarks"
1334 3 changesets found
1334 3 changesets found
1335 list of changesets:
1335 list of changesets:
1336 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1336 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1337 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1337 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1338 911600dab2ae7a9baff75958b84fe606851ce955
1338 911600dab2ae7a9baff75958b84fe606851ce955
1339 adding changesets
1339 adding changesets
1340 bundling: 1/3 changesets (33.33%)
1340 bundling: 1/3 changesets (33.33%)
1341 bundling: 2/3 changesets (66.67%)
1341 bundling: 2/3 changesets (66.67%)
1342 bundling: 3/3 changesets (100.00%)
1342 bundling: 3/3 changesets (100.00%)
1343 bundling: 1/3 manifests (33.33%)
1343 bundling: 1/3 manifests (33.33%)
1344 bundling: 2/3 manifests (66.67%)
1344 bundling: 2/3 manifests (66.67%)
1345 bundling: 3/3 manifests (100.00%)
1345 bundling: 3/3 manifests (100.00%)
1346 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1346 bundling: foo/Bar/file.txt 1/3 files (33.33%)
1347 bundling: foo/file.txt 2/3 files (66.67%)
1347 bundling: foo/file.txt 2/3 files (66.67%)
1348 bundling: quux/file.py 3/3 files (100.00%)
1348 bundling: quux/file.py 3/3 files (100.00%)
1349 changesets: 1 chunks
1349 changesets: 1 chunks
1350 add changeset ef1ea85a6374
1350 add changeset ef1ea85a6374
1351 changesets: 2 chunks
1351 changesets: 2 chunks
1352 add changeset f9cafe1212c8
1352 add changeset f9cafe1212c8
1353 changesets: 3 chunks
1353 changesets: 3 chunks
1354 add changeset 911600dab2ae
1354 add changeset 911600dab2ae
1355 adding manifests
1355 adding manifests
1356 manifests: 1/3 chunks (33.33%)
1356 manifests: 1/3 chunks (33.33%)
1357 manifests: 2/3 chunks (66.67%)
1357 manifests: 2/3 chunks (66.67%)
1358 manifests: 3/3 chunks (100.00%)
1358 manifests: 3/3 chunks (100.00%)
1359 adding file changes
1359 adding file changes
1360 adding foo/Bar/file.txt revisions
1360 adding foo/Bar/file.txt revisions
1361 files: 1/3 chunks (33.33%)
1361 files: 1/3 chunks (33.33%)
1362 adding foo/file.txt revisions
1362 adding foo/file.txt revisions
1363 files: 2/3 chunks (66.67%)
1363 files: 2/3 chunks (66.67%)
1364 adding quux/file.py revisions
1364 adding quux/file.py revisions
1365 files: 3/3 chunks (100.00%)
1365 files: 3/3 chunks (100.00%)
1366 added 3 changesets with 3 changes to 3 files
1366 added 3 changesets with 3 changes to 3 files
1367 calling hook pretxnchangegroup.acl: hgext.acl.hook
1367 calling hook pretxnchangegroup.acl: hgext.acl.hook
1368 acl: checking access for user "fred"
1368 acl: checking access for user "fred"
1369 acl: acl.allow.branches not enabled
1369 acl: acl.allow.branches not enabled
1370 acl: acl.deny.branches not enabled
1370 acl: acl.deny.branches not enabled
1371 acl: "group1" not defined in [acl.groups]
1371 acl: "group1" not defined in [acl.groups]
1372 acl: acl.allow enabled, 1 entries for user fred
1372 acl: acl.allow enabled, 1 entries for user fred
1373 acl: "group1" not defined in [acl.groups]
1373 acl: "group1" not defined in [acl.groups]
1374 acl: acl.deny enabled, 1 entries for user fred
1374 acl: acl.deny enabled, 1 entries for user fred
1375 acl: branch access granted: "ef1ea85a6374" on branch "default"
1375 acl: branch access granted: "ef1ea85a6374" on branch "default"
1376 acl: path access granted: "ef1ea85a6374"
1376 acl: path access granted: "ef1ea85a6374"
1377 acl: branch access granted: "f9cafe1212c8" on branch "default"
1377 acl: branch access granted: "f9cafe1212c8" on branch "default"
1378 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1378 error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1379 transaction abort!
1379 transaction abort!
1380 rollback completed
1380 rollback completed
1381 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1381 abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8")
1382 no rollback information available
1382 no rollback information available
1383 0:6675d58eff77
1383 0:6675d58eff77
1384
1384
1385
1385
1386 Invalid group
1386 Invalid group
1387
1387
1388 Disable the fakegroups trick to get real failures
1388 Disable the fakegroups trick to get real failures
1389
1389
1390 $ grep -v fakegroups $config > config.tmp
1390 $ grep -v fakegroups $config > config.tmp
1391 $ mv config.tmp $config
1391 $ mv config.tmp $config
1392 $ echo '[acl.allow]' >> $config
1392 $ echo '[acl.allow]' >> $config
1393 $ echo "** = @unlikelytoexist" >> $config
1393 $ echo "** = @unlikelytoexist" >> $config
1394 $ do_push fred 2>&1 | grep unlikelytoexist
1394 $ do_push fred 2>&1 | grep unlikelytoexist
1395 ** = @unlikelytoexist
1395 ** = @unlikelytoexist
1396 acl: "unlikelytoexist" not defined in [acl.groups]
1396 acl: "unlikelytoexist" not defined in [acl.groups]
1397 error: pretxnchangegroup.acl hook failed: group 'unlikelytoexist' is undefined
1397 error: pretxnchangegroup.acl hook failed: group 'unlikelytoexist' is undefined
1398 abort: group 'unlikelytoexist' is undefined
1398 abort: group 'unlikelytoexist' is undefined
1399
1399
1400
1400
1401 Branch acl tests setup
1401 Branch acl tests setup
1402
1402
1403 $ init_config
1403 $ init_config
1404 $ cd b
1404 $ cd b
1405 $ hg up
1405 $ hg up
1406 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1406 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1407 $ hg branch foobar
1407 $ hg branch foobar
1408 marked working directory as branch foobar
1408 marked working directory as branch foobar
1409 (branches are permanent and global, did you want a bookmark?)
1409 (branches are permanent and global, did you want a bookmark?)
1410 $ hg commit -m 'create foobar'
1410 $ hg commit -m 'create foobar'
1411 $ echo 'foo contents' > abc.txt
1411 $ echo 'foo contents' > abc.txt
1412 $ hg add abc.txt
1412 $ hg add abc.txt
1413 $ hg commit -m 'foobar contents'
1413 $ hg commit -m 'foobar contents'
1414 $ cd ..
1414 $ cd ..
1415 $ hg --cwd a pull ../b
1415 $ hg --cwd a pull ../b
1416 pulling from ../b
1416 pulling from ../b
1417 searching for changes
1417 searching for changes
1418 adding changesets
1418 adding changesets
1419 adding manifests
1419 adding manifests
1420 adding file changes
1420 adding file changes
1421 added 2 changesets with 1 changes to 1 files (+1 heads)
1421 added 2 changesets with 1 changes to 1 files (+1 heads)
1422 (run 'hg heads' to see heads)
1422 (run 'hg heads' to see heads)
1423
1423
1424 Create additional changeset on foobar branch
1424 Create additional changeset on foobar branch
1425
1425
1426 $ cd a
1426 $ cd a
1427 $ hg up -C foobar
1427 $ hg up -C foobar
1428 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
1428 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
1429 $ echo 'foo contents2' > abc.txt
1429 $ echo 'foo contents2' > abc.txt
1430 $ hg commit -m 'foobar contents2'
1430 $ hg commit -m 'foobar contents2'
1431 $ cd ..
1431 $ cd ..
1432
1432
1433
1433
1434 No branch acls specified
1434 No branch acls specified
1435
1435
1436 $ do_push astro
1436 $ do_push astro
1437 Pushing as user astro
1437 Pushing as user astro
1438 hgrc = """
1438 hgrc = """
1439 [acl]
1439 [acl]
1440 sources = push
1440 sources = push
1441 [extensions]
1441 [extensions]
1442 """
1442 """
1443 pushing to ../b
1443 pushing to ../b
1444 query 1; heads
1444 query 1; heads
1445 searching for changes
1445 searching for changes
1446 all remote heads known locally
1446 all remote heads known locally
1447 listing keys for "bookmarks"
1447 listing keys for "bookmarks"
1448 4 changesets found
1448 4 changesets found
1449 list of changesets:
1449 list of changesets:
1450 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1450 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1451 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1451 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1452 911600dab2ae7a9baff75958b84fe606851ce955
1452 911600dab2ae7a9baff75958b84fe606851ce955
1453 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1453 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1454 adding changesets
1454 adding changesets
1455 bundling: 1/4 changesets (25.00%)
1455 bundling: 1/4 changesets (25.00%)
1456 bundling: 2/4 changesets (50.00%)
1456 bundling: 2/4 changesets (50.00%)
1457 bundling: 3/4 changesets (75.00%)
1457 bundling: 3/4 changesets (75.00%)
1458 bundling: 4/4 changesets (100.00%)
1458 bundling: 4/4 changesets (100.00%)
1459 bundling: 1/4 manifests (25.00%)
1459 bundling: 1/4 manifests (25.00%)
1460 bundling: 2/4 manifests (50.00%)
1460 bundling: 2/4 manifests (50.00%)
1461 bundling: 3/4 manifests (75.00%)
1461 bundling: 3/4 manifests (75.00%)
1462 bundling: 4/4 manifests (100.00%)
1462 bundling: 4/4 manifests (100.00%)
1463 bundling: abc.txt 1/4 files (25.00%)
1463 bundling: abc.txt 1/4 files (25.00%)
1464 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1464 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1465 bundling: foo/file.txt 3/4 files (75.00%)
1465 bundling: foo/file.txt 3/4 files (75.00%)
1466 bundling: quux/file.py 4/4 files (100.00%)
1466 bundling: quux/file.py 4/4 files (100.00%)
1467 changesets: 1 chunks
1467 changesets: 1 chunks
1468 add changeset ef1ea85a6374
1468 add changeset ef1ea85a6374
1469 changesets: 2 chunks
1469 changesets: 2 chunks
1470 add changeset f9cafe1212c8
1470 add changeset f9cafe1212c8
1471 changesets: 3 chunks
1471 changesets: 3 chunks
1472 add changeset 911600dab2ae
1472 add changeset 911600dab2ae
1473 changesets: 4 chunks
1473 changesets: 4 chunks
1474 add changeset e8fc755d4d82
1474 add changeset e8fc755d4d82
1475 adding manifests
1475 adding manifests
1476 manifests: 1/4 chunks (25.00%)
1476 manifests: 1/4 chunks (25.00%)
1477 manifests: 2/4 chunks (50.00%)
1477 manifests: 2/4 chunks (50.00%)
1478 manifests: 3/4 chunks (75.00%)
1478 manifests: 3/4 chunks (75.00%)
1479 manifests: 4/4 chunks (100.00%)
1479 manifests: 4/4 chunks (100.00%)
1480 adding file changes
1480 adding file changes
1481 adding abc.txt revisions
1481 adding abc.txt revisions
1482 files: 1/4 chunks (25.00%)
1482 files: 1/4 chunks (25.00%)
1483 adding foo/Bar/file.txt revisions
1483 adding foo/Bar/file.txt revisions
1484 files: 2/4 chunks (50.00%)
1484 files: 2/4 chunks (50.00%)
1485 adding foo/file.txt revisions
1485 adding foo/file.txt revisions
1486 files: 3/4 chunks (75.00%)
1486 files: 3/4 chunks (75.00%)
1487 adding quux/file.py revisions
1487 adding quux/file.py revisions
1488 files: 4/4 chunks (100.00%)
1488 files: 4/4 chunks (100.00%)
1489 added 4 changesets with 4 changes to 4 files (+1 heads)
1489 added 4 changesets with 4 changes to 4 files (+1 heads)
1490 calling hook pretxnchangegroup.acl: hgext.acl.hook
1490 calling hook pretxnchangegroup.acl: hgext.acl.hook
1491 acl: checking access for user "astro"
1491 acl: checking access for user "astro"
1492 acl: acl.allow.branches not enabled
1492 acl: acl.allow.branches not enabled
1493 acl: acl.deny.branches not enabled
1493 acl: acl.deny.branches not enabled
1494 acl: acl.allow not enabled
1494 acl: acl.allow not enabled
1495 acl: acl.deny not enabled
1495 acl: acl.deny not enabled
1496 acl: branch access granted: "ef1ea85a6374" on branch "default"
1496 acl: branch access granted: "ef1ea85a6374" on branch "default"
1497 acl: path access granted: "ef1ea85a6374"
1497 acl: path access granted: "ef1ea85a6374"
1498 acl: branch access granted: "f9cafe1212c8" on branch "default"
1498 acl: branch access granted: "f9cafe1212c8" on branch "default"
1499 acl: path access granted: "f9cafe1212c8"
1499 acl: path access granted: "f9cafe1212c8"
1500 acl: branch access granted: "911600dab2ae" on branch "default"
1500 acl: branch access granted: "911600dab2ae" on branch "default"
1501 acl: path access granted: "911600dab2ae"
1501 acl: path access granted: "911600dab2ae"
1502 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1502 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1503 acl: path access granted: "e8fc755d4d82"
1503 acl: path access granted: "e8fc755d4d82"
1504 listing keys for "phases"
1504 listing keys for "phases"
1505 try to push obsolete markers to remote
1505 try to push obsolete markers to remote
1506 updating the branch cache
1506 updating the branch cache
1507 checking for updated bookmarks
1507 checking for updated bookmarks
1508 listing keys for "bookmarks"
1508 listing keys for "bookmarks"
1509 repository tip rolled back to revision 2 (undo push)
1509 repository tip rolled back to revision 2 (undo push)
1510 2:fb35475503ef
1510 2:fb35475503ef
1511
1511
1512
1512
1513 Branch acl deny test
1513 Branch acl deny test
1514
1514
1515 $ echo "[acl.deny.branches]" >> $config
1515 $ echo "[acl.deny.branches]" >> $config
1516 $ echo "foobar = *" >> $config
1516 $ echo "foobar = *" >> $config
1517 $ do_push astro
1517 $ do_push astro
1518 Pushing as user astro
1518 Pushing as user astro
1519 hgrc = """
1519 hgrc = """
1520 [acl]
1520 [acl]
1521 sources = push
1521 sources = push
1522 [extensions]
1522 [extensions]
1523 [acl.deny.branches]
1523 [acl.deny.branches]
1524 foobar = *
1524 foobar = *
1525 """
1525 """
1526 pushing to ../b
1526 pushing to ../b
1527 query 1; heads
1527 query 1; heads
1528 searching for changes
1528 searching for changes
1529 all remote heads known locally
1529 all remote heads known locally
1530 invalid branchheads cache (unserved): tip differs
1530 invalid branchheads cache (served): tip differs
1531 listing keys for "bookmarks"
1531 listing keys for "bookmarks"
1532 4 changesets found
1532 4 changesets found
1533 list of changesets:
1533 list of changesets:
1534 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1534 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1535 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1535 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1536 911600dab2ae7a9baff75958b84fe606851ce955
1536 911600dab2ae7a9baff75958b84fe606851ce955
1537 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1537 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1538 adding changesets
1538 adding changesets
1539 bundling: 1/4 changesets (25.00%)
1539 bundling: 1/4 changesets (25.00%)
1540 bundling: 2/4 changesets (50.00%)
1540 bundling: 2/4 changesets (50.00%)
1541 bundling: 3/4 changesets (75.00%)
1541 bundling: 3/4 changesets (75.00%)
1542 bundling: 4/4 changesets (100.00%)
1542 bundling: 4/4 changesets (100.00%)
1543 bundling: 1/4 manifests (25.00%)
1543 bundling: 1/4 manifests (25.00%)
1544 bundling: 2/4 manifests (50.00%)
1544 bundling: 2/4 manifests (50.00%)
1545 bundling: 3/4 manifests (75.00%)
1545 bundling: 3/4 manifests (75.00%)
1546 bundling: 4/4 manifests (100.00%)
1546 bundling: 4/4 manifests (100.00%)
1547 bundling: abc.txt 1/4 files (25.00%)
1547 bundling: abc.txt 1/4 files (25.00%)
1548 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1548 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1549 bundling: foo/file.txt 3/4 files (75.00%)
1549 bundling: foo/file.txt 3/4 files (75.00%)
1550 bundling: quux/file.py 4/4 files (100.00%)
1550 bundling: quux/file.py 4/4 files (100.00%)
1551 changesets: 1 chunks
1551 changesets: 1 chunks
1552 add changeset ef1ea85a6374
1552 add changeset ef1ea85a6374
1553 changesets: 2 chunks
1553 changesets: 2 chunks
1554 add changeset f9cafe1212c8
1554 add changeset f9cafe1212c8
1555 changesets: 3 chunks
1555 changesets: 3 chunks
1556 add changeset 911600dab2ae
1556 add changeset 911600dab2ae
1557 changesets: 4 chunks
1557 changesets: 4 chunks
1558 add changeset e8fc755d4d82
1558 add changeset e8fc755d4d82
1559 adding manifests
1559 adding manifests
1560 manifests: 1/4 chunks (25.00%)
1560 manifests: 1/4 chunks (25.00%)
1561 manifests: 2/4 chunks (50.00%)
1561 manifests: 2/4 chunks (50.00%)
1562 manifests: 3/4 chunks (75.00%)
1562 manifests: 3/4 chunks (75.00%)
1563 manifests: 4/4 chunks (100.00%)
1563 manifests: 4/4 chunks (100.00%)
1564 adding file changes
1564 adding file changes
1565 adding abc.txt revisions
1565 adding abc.txt revisions
1566 files: 1/4 chunks (25.00%)
1566 files: 1/4 chunks (25.00%)
1567 adding foo/Bar/file.txt revisions
1567 adding foo/Bar/file.txt revisions
1568 files: 2/4 chunks (50.00%)
1568 files: 2/4 chunks (50.00%)
1569 adding foo/file.txt revisions
1569 adding foo/file.txt revisions
1570 files: 3/4 chunks (75.00%)
1570 files: 3/4 chunks (75.00%)
1571 adding quux/file.py revisions
1571 adding quux/file.py revisions
1572 files: 4/4 chunks (100.00%)
1572 files: 4/4 chunks (100.00%)
1573 added 4 changesets with 4 changes to 4 files (+1 heads)
1573 added 4 changesets with 4 changes to 4 files (+1 heads)
1574 calling hook pretxnchangegroup.acl: hgext.acl.hook
1574 calling hook pretxnchangegroup.acl: hgext.acl.hook
1575 acl: checking access for user "astro"
1575 acl: checking access for user "astro"
1576 acl: acl.allow.branches not enabled
1576 acl: acl.allow.branches not enabled
1577 acl: acl.deny.branches enabled, 1 entries for user astro
1577 acl: acl.deny.branches enabled, 1 entries for user astro
1578 acl: acl.allow not enabled
1578 acl: acl.allow not enabled
1579 acl: acl.deny not enabled
1579 acl: acl.deny not enabled
1580 acl: branch access granted: "ef1ea85a6374" on branch "default"
1580 acl: branch access granted: "ef1ea85a6374" on branch "default"
1581 acl: path access granted: "ef1ea85a6374"
1581 acl: path access granted: "ef1ea85a6374"
1582 acl: branch access granted: "f9cafe1212c8" on branch "default"
1582 acl: branch access granted: "f9cafe1212c8" on branch "default"
1583 acl: path access granted: "f9cafe1212c8"
1583 acl: path access granted: "f9cafe1212c8"
1584 acl: branch access granted: "911600dab2ae" on branch "default"
1584 acl: branch access granted: "911600dab2ae" on branch "default"
1585 acl: path access granted: "911600dab2ae"
1585 acl: path access granted: "911600dab2ae"
1586 error: pretxnchangegroup.acl hook failed: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82")
1586 error: pretxnchangegroup.acl hook failed: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82")
1587 transaction abort!
1587 transaction abort!
1588 rollback completed
1588 rollback completed
1589 abort: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82")
1589 abort: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82")
1590 no rollback information available
1590 no rollback information available
1591 2:fb35475503ef
1591 2:fb35475503ef
1592
1592
1593
1593
1594 Branch acl empty allow test
1594 Branch acl empty allow test
1595
1595
1596 $ init_config
1596 $ init_config
1597 $ echo "[acl.allow.branches]" >> $config
1597 $ echo "[acl.allow.branches]" >> $config
1598 $ do_push astro
1598 $ do_push astro
1599 Pushing as user astro
1599 Pushing as user astro
1600 hgrc = """
1600 hgrc = """
1601 [acl]
1601 [acl]
1602 sources = push
1602 sources = push
1603 [extensions]
1603 [extensions]
1604 [acl.allow.branches]
1604 [acl.allow.branches]
1605 """
1605 """
1606 pushing to ../b
1606 pushing to ../b
1607 query 1; heads
1607 query 1; heads
1608 searching for changes
1608 searching for changes
1609 all remote heads known locally
1609 all remote heads known locally
1610 listing keys for "bookmarks"
1610 listing keys for "bookmarks"
1611 4 changesets found
1611 4 changesets found
1612 list of changesets:
1612 list of changesets:
1613 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1613 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1614 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1614 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1615 911600dab2ae7a9baff75958b84fe606851ce955
1615 911600dab2ae7a9baff75958b84fe606851ce955
1616 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1616 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1617 adding changesets
1617 adding changesets
1618 bundling: 1/4 changesets (25.00%)
1618 bundling: 1/4 changesets (25.00%)
1619 bundling: 2/4 changesets (50.00%)
1619 bundling: 2/4 changesets (50.00%)
1620 bundling: 3/4 changesets (75.00%)
1620 bundling: 3/4 changesets (75.00%)
1621 bundling: 4/4 changesets (100.00%)
1621 bundling: 4/4 changesets (100.00%)
1622 bundling: 1/4 manifests (25.00%)
1622 bundling: 1/4 manifests (25.00%)
1623 bundling: 2/4 manifests (50.00%)
1623 bundling: 2/4 manifests (50.00%)
1624 bundling: 3/4 manifests (75.00%)
1624 bundling: 3/4 manifests (75.00%)
1625 bundling: 4/4 manifests (100.00%)
1625 bundling: 4/4 manifests (100.00%)
1626 bundling: abc.txt 1/4 files (25.00%)
1626 bundling: abc.txt 1/4 files (25.00%)
1627 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1627 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1628 bundling: foo/file.txt 3/4 files (75.00%)
1628 bundling: foo/file.txt 3/4 files (75.00%)
1629 bundling: quux/file.py 4/4 files (100.00%)
1629 bundling: quux/file.py 4/4 files (100.00%)
1630 changesets: 1 chunks
1630 changesets: 1 chunks
1631 add changeset ef1ea85a6374
1631 add changeset ef1ea85a6374
1632 changesets: 2 chunks
1632 changesets: 2 chunks
1633 add changeset f9cafe1212c8
1633 add changeset f9cafe1212c8
1634 changesets: 3 chunks
1634 changesets: 3 chunks
1635 add changeset 911600dab2ae
1635 add changeset 911600dab2ae
1636 changesets: 4 chunks
1636 changesets: 4 chunks
1637 add changeset e8fc755d4d82
1637 add changeset e8fc755d4d82
1638 adding manifests
1638 adding manifests
1639 manifests: 1/4 chunks (25.00%)
1639 manifests: 1/4 chunks (25.00%)
1640 manifests: 2/4 chunks (50.00%)
1640 manifests: 2/4 chunks (50.00%)
1641 manifests: 3/4 chunks (75.00%)
1641 manifests: 3/4 chunks (75.00%)
1642 manifests: 4/4 chunks (100.00%)
1642 manifests: 4/4 chunks (100.00%)
1643 adding file changes
1643 adding file changes
1644 adding abc.txt revisions
1644 adding abc.txt revisions
1645 files: 1/4 chunks (25.00%)
1645 files: 1/4 chunks (25.00%)
1646 adding foo/Bar/file.txt revisions
1646 adding foo/Bar/file.txt revisions
1647 files: 2/4 chunks (50.00%)
1647 files: 2/4 chunks (50.00%)
1648 adding foo/file.txt revisions
1648 adding foo/file.txt revisions
1649 files: 3/4 chunks (75.00%)
1649 files: 3/4 chunks (75.00%)
1650 adding quux/file.py revisions
1650 adding quux/file.py revisions
1651 files: 4/4 chunks (100.00%)
1651 files: 4/4 chunks (100.00%)
1652 added 4 changesets with 4 changes to 4 files (+1 heads)
1652 added 4 changesets with 4 changes to 4 files (+1 heads)
1653 calling hook pretxnchangegroup.acl: hgext.acl.hook
1653 calling hook pretxnchangegroup.acl: hgext.acl.hook
1654 acl: checking access for user "astro"
1654 acl: checking access for user "astro"
1655 acl: acl.allow.branches enabled, 0 entries for user astro
1655 acl: acl.allow.branches enabled, 0 entries for user astro
1656 acl: acl.deny.branches not enabled
1656 acl: acl.deny.branches not enabled
1657 acl: acl.allow not enabled
1657 acl: acl.allow not enabled
1658 acl: acl.deny not enabled
1658 acl: acl.deny not enabled
1659 error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1659 error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1660 transaction abort!
1660 transaction abort!
1661 rollback completed
1661 rollback completed
1662 abort: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1662 abort: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1663 no rollback information available
1663 no rollback information available
1664 2:fb35475503ef
1664 2:fb35475503ef
1665
1665
1666
1666
1667 Branch acl allow other
1667 Branch acl allow other
1668
1668
1669 $ init_config
1669 $ init_config
1670 $ echo "[acl.allow.branches]" >> $config
1670 $ echo "[acl.allow.branches]" >> $config
1671 $ echo "* = george" >> $config
1671 $ echo "* = george" >> $config
1672 $ do_push astro
1672 $ do_push astro
1673 Pushing as user astro
1673 Pushing as user astro
1674 hgrc = """
1674 hgrc = """
1675 [acl]
1675 [acl]
1676 sources = push
1676 sources = push
1677 [extensions]
1677 [extensions]
1678 [acl.allow.branches]
1678 [acl.allow.branches]
1679 * = george
1679 * = george
1680 """
1680 """
1681 pushing to ../b
1681 pushing to ../b
1682 query 1; heads
1682 query 1; heads
1683 searching for changes
1683 searching for changes
1684 all remote heads known locally
1684 all remote heads known locally
1685 listing keys for "bookmarks"
1685 listing keys for "bookmarks"
1686 4 changesets found
1686 4 changesets found
1687 list of changesets:
1687 list of changesets:
1688 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1688 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1689 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1689 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1690 911600dab2ae7a9baff75958b84fe606851ce955
1690 911600dab2ae7a9baff75958b84fe606851ce955
1691 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1691 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1692 adding changesets
1692 adding changesets
1693 bundling: 1/4 changesets (25.00%)
1693 bundling: 1/4 changesets (25.00%)
1694 bundling: 2/4 changesets (50.00%)
1694 bundling: 2/4 changesets (50.00%)
1695 bundling: 3/4 changesets (75.00%)
1695 bundling: 3/4 changesets (75.00%)
1696 bundling: 4/4 changesets (100.00%)
1696 bundling: 4/4 changesets (100.00%)
1697 bundling: 1/4 manifests (25.00%)
1697 bundling: 1/4 manifests (25.00%)
1698 bundling: 2/4 manifests (50.00%)
1698 bundling: 2/4 manifests (50.00%)
1699 bundling: 3/4 manifests (75.00%)
1699 bundling: 3/4 manifests (75.00%)
1700 bundling: 4/4 manifests (100.00%)
1700 bundling: 4/4 manifests (100.00%)
1701 bundling: abc.txt 1/4 files (25.00%)
1701 bundling: abc.txt 1/4 files (25.00%)
1702 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1702 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1703 bundling: foo/file.txt 3/4 files (75.00%)
1703 bundling: foo/file.txt 3/4 files (75.00%)
1704 bundling: quux/file.py 4/4 files (100.00%)
1704 bundling: quux/file.py 4/4 files (100.00%)
1705 changesets: 1 chunks
1705 changesets: 1 chunks
1706 add changeset ef1ea85a6374
1706 add changeset ef1ea85a6374
1707 changesets: 2 chunks
1707 changesets: 2 chunks
1708 add changeset f9cafe1212c8
1708 add changeset f9cafe1212c8
1709 changesets: 3 chunks
1709 changesets: 3 chunks
1710 add changeset 911600dab2ae
1710 add changeset 911600dab2ae
1711 changesets: 4 chunks
1711 changesets: 4 chunks
1712 add changeset e8fc755d4d82
1712 add changeset e8fc755d4d82
1713 adding manifests
1713 adding manifests
1714 manifests: 1/4 chunks (25.00%)
1714 manifests: 1/4 chunks (25.00%)
1715 manifests: 2/4 chunks (50.00%)
1715 manifests: 2/4 chunks (50.00%)
1716 manifests: 3/4 chunks (75.00%)
1716 manifests: 3/4 chunks (75.00%)
1717 manifests: 4/4 chunks (100.00%)
1717 manifests: 4/4 chunks (100.00%)
1718 adding file changes
1718 adding file changes
1719 adding abc.txt revisions
1719 adding abc.txt revisions
1720 files: 1/4 chunks (25.00%)
1720 files: 1/4 chunks (25.00%)
1721 adding foo/Bar/file.txt revisions
1721 adding foo/Bar/file.txt revisions
1722 files: 2/4 chunks (50.00%)
1722 files: 2/4 chunks (50.00%)
1723 adding foo/file.txt revisions
1723 adding foo/file.txt revisions
1724 files: 3/4 chunks (75.00%)
1724 files: 3/4 chunks (75.00%)
1725 adding quux/file.py revisions
1725 adding quux/file.py revisions
1726 files: 4/4 chunks (100.00%)
1726 files: 4/4 chunks (100.00%)
1727 added 4 changesets with 4 changes to 4 files (+1 heads)
1727 added 4 changesets with 4 changes to 4 files (+1 heads)
1728 calling hook pretxnchangegroup.acl: hgext.acl.hook
1728 calling hook pretxnchangegroup.acl: hgext.acl.hook
1729 acl: checking access for user "astro"
1729 acl: checking access for user "astro"
1730 acl: acl.allow.branches enabled, 0 entries for user astro
1730 acl: acl.allow.branches enabled, 0 entries for user astro
1731 acl: acl.deny.branches not enabled
1731 acl: acl.deny.branches not enabled
1732 acl: acl.allow not enabled
1732 acl: acl.allow not enabled
1733 acl: acl.deny not enabled
1733 acl: acl.deny not enabled
1734 error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1734 error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1735 transaction abort!
1735 transaction abort!
1736 rollback completed
1736 rollback completed
1737 abort: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1737 abort: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374")
1738 no rollback information available
1738 no rollback information available
1739 2:fb35475503ef
1739 2:fb35475503ef
1740
1740
1741 $ do_push george
1741 $ do_push george
1742 Pushing as user george
1742 Pushing as user george
1743 hgrc = """
1743 hgrc = """
1744 [acl]
1744 [acl]
1745 sources = push
1745 sources = push
1746 [extensions]
1746 [extensions]
1747 [acl.allow.branches]
1747 [acl.allow.branches]
1748 * = george
1748 * = george
1749 """
1749 """
1750 pushing to ../b
1750 pushing to ../b
1751 query 1; heads
1751 query 1; heads
1752 searching for changes
1752 searching for changes
1753 all remote heads known locally
1753 all remote heads known locally
1754 listing keys for "bookmarks"
1754 listing keys for "bookmarks"
1755 4 changesets found
1755 4 changesets found
1756 list of changesets:
1756 list of changesets:
1757 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1757 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1758 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1758 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1759 911600dab2ae7a9baff75958b84fe606851ce955
1759 911600dab2ae7a9baff75958b84fe606851ce955
1760 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1760 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1761 adding changesets
1761 adding changesets
1762 bundling: 1/4 changesets (25.00%)
1762 bundling: 1/4 changesets (25.00%)
1763 bundling: 2/4 changesets (50.00%)
1763 bundling: 2/4 changesets (50.00%)
1764 bundling: 3/4 changesets (75.00%)
1764 bundling: 3/4 changesets (75.00%)
1765 bundling: 4/4 changesets (100.00%)
1765 bundling: 4/4 changesets (100.00%)
1766 bundling: 1/4 manifests (25.00%)
1766 bundling: 1/4 manifests (25.00%)
1767 bundling: 2/4 manifests (50.00%)
1767 bundling: 2/4 manifests (50.00%)
1768 bundling: 3/4 manifests (75.00%)
1768 bundling: 3/4 manifests (75.00%)
1769 bundling: 4/4 manifests (100.00%)
1769 bundling: 4/4 manifests (100.00%)
1770 bundling: abc.txt 1/4 files (25.00%)
1770 bundling: abc.txt 1/4 files (25.00%)
1771 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1771 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1772 bundling: foo/file.txt 3/4 files (75.00%)
1772 bundling: foo/file.txt 3/4 files (75.00%)
1773 bundling: quux/file.py 4/4 files (100.00%)
1773 bundling: quux/file.py 4/4 files (100.00%)
1774 changesets: 1 chunks
1774 changesets: 1 chunks
1775 add changeset ef1ea85a6374
1775 add changeset ef1ea85a6374
1776 changesets: 2 chunks
1776 changesets: 2 chunks
1777 add changeset f9cafe1212c8
1777 add changeset f9cafe1212c8
1778 changesets: 3 chunks
1778 changesets: 3 chunks
1779 add changeset 911600dab2ae
1779 add changeset 911600dab2ae
1780 changesets: 4 chunks
1780 changesets: 4 chunks
1781 add changeset e8fc755d4d82
1781 add changeset e8fc755d4d82
1782 adding manifests
1782 adding manifests
1783 manifests: 1/4 chunks (25.00%)
1783 manifests: 1/4 chunks (25.00%)
1784 manifests: 2/4 chunks (50.00%)
1784 manifests: 2/4 chunks (50.00%)
1785 manifests: 3/4 chunks (75.00%)
1785 manifests: 3/4 chunks (75.00%)
1786 manifests: 4/4 chunks (100.00%)
1786 manifests: 4/4 chunks (100.00%)
1787 adding file changes
1787 adding file changes
1788 adding abc.txt revisions
1788 adding abc.txt revisions
1789 files: 1/4 chunks (25.00%)
1789 files: 1/4 chunks (25.00%)
1790 adding foo/Bar/file.txt revisions
1790 adding foo/Bar/file.txt revisions
1791 files: 2/4 chunks (50.00%)
1791 files: 2/4 chunks (50.00%)
1792 adding foo/file.txt revisions
1792 adding foo/file.txt revisions
1793 files: 3/4 chunks (75.00%)
1793 files: 3/4 chunks (75.00%)
1794 adding quux/file.py revisions
1794 adding quux/file.py revisions
1795 files: 4/4 chunks (100.00%)
1795 files: 4/4 chunks (100.00%)
1796 added 4 changesets with 4 changes to 4 files (+1 heads)
1796 added 4 changesets with 4 changes to 4 files (+1 heads)
1797 calling hook pretxnchangegroup.acl: hgext.acl.hook
1797 calling hook pretxnchangegroup.acl: hgext.acl.hook
1798 acl: checking access for user "george"
1798 acl: checking access for user "george"
1799 acl: acl.allow.branches enabled, 1 entries for user george
1799 acl: acl.allow.branches enabled, 1 entries for user george
1800 acl: acl.deny.branches not enabled
1800 acl: acl.deny.branches not enabled
1801 acl: acl.allow not enabled
1801 acl: acl.allow not enabled
1802 acl: acl.deny not enabled
1802 acl: acl.deny not enabled
1803 acl: branch access granted: "ef1ea85a6374" on branch "default"
1803 acl: branch access granted: "ef1ea85a6374" on branch "default"
1804 acl: path access granted: "ef1ea85a6374"
1804 acl: path access granted: "ef1ea85a6374"
1805 acl: branch access granted: "f9cafe1212c8" on branch "default"
1805 acl: branch access granted: "f9cafe1212c8" on branch "default"
1806 acl: path access granted: "f9cafe1212c8"
1806 acl: path access granted: "f9cafe1212c8"
1807 acl: branch access granted: "911600dab2ae" on branch "default"
1807 acl: branch access granted: "911600dab2ae" on branch "default"
1808 acl: path access granted: "911600dab2ae"
1808 acl: path access granted: "911600dab2ae"
1809 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1809 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1810 acl: path access granted: "e8fc755d4d82"
1810 acl: path access granted: "e8fc755d4d82"
1811 listing keys for "phases"
1811 listing keys for "phases"
1812 try to push obsolete markers to remote
1812 try to push obsolete markers to remote
1813 updating the branch cache
1813 updating the branch cache
1814 checking for updated bookmarks
1814 checking for updated bookmarks
1815 listing keys for "bookmarks"
1815 listing keys for "bookmarks"
1816 repository tip rolled back to revision 2 (undo push)
1816 repository tip rolled back to revision 2 (undo push)
1817 2:fb35475503ef
1817 2:fb35475503ef
1818
1818
1819
1819
1820 Branch acl conflicting allow
1820 Branch acl conflicting allow
1821 asterisk ends up applying to all branches and allowing george to
1821 asterisk ends up applying to all branches and allowing george to
1822 push foobar into the remote
1822 push foobar into the remote
1823
1823
1824 $ init_config
1824 $ init_config
1825 $ echo "[acl.allow.branches]" >> $config
1825 $ echo "[acl.allow.branches]" >> $config
1826 $ echo "foobar = astro" >> $config
1826 $ echo "foobar = astro" >> $config
1827 $ echo "* = george" >> $config
1827 $ echo "* = george" >> $config
1828 $ do_push george
1828 $ do_push george
1829 Pushing as user george
1829 Pushing as user george
1830 hgrc = """
1830 hgrc = """
1831 [acl]
1831 [acl]
1832 sources = push
1832 sources = push
1833 [extensions]
1833 [extensions]
1834 [acl.allow.branches]
1834 [acl.allow.branches]
1835 foobar = astro
1835 foobar = astro
1836 * = george
1836 * = george
1837 """
1837 """
1838 pushing to ../b
1838 pushing to ../b
1839 query 1; heads
1839 query 1; heads
1840 searching for changes
1840 searching for changes
1841 all remote heads known locally
1841 all remote heads known locally
1842 invalid branchheads cache (unserved): tip differs
1842 invalid branchheads cache (served): tip differs
1843 listing keys for "bookmarks"
1843 listing keys for "bookmarks"
1844 4 changesets found
1844 4 changesets found
1845 list of changesets:
1845 list of changesets:
1846 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1846 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1847 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1847 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1848 911600dab2ae7a9baff75958b84fe606851ce955
1848 911600dab2ae7a9baff75958b84fe606851ce955
1849 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1849 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1850 adding changesets
1850 adding changesets
1851 bundling: 1/4 changesets (25.00%)
1851 bundling: 1/4 changesets (25.00%)
1852 bundling: 2/4 changesets (50.00%)
1852 bundling: 2/4 changesets (50.00%)
1853 bundling: 3/4 changesets (75.00%)
1853 bundling: 3/4 changesets (75.00%)
1854 bundling: 4/4 changesets (100.00%)
1854 bundling: 4/4 changesets (100.00%)
1855 bundling: 1/4 manifests (25.00%)
1855 bundling: 1/4 manifests (25.00%)
1856 bundling: 2/4 manifests (50.00%)
1856 bundling: 2/4 manifests (50.00%)
1857 bundling: 3/4 manifests (75.00%)
1857 bundling: 3/4 manifests (75.00%)
1858 bundling: 4/4 manifests (100.00%)
1858 bundling: 4/4 manifests (100.00%)
1859 bundling: abc.txt 1/4 files (25.00%)
1859 bundling: abc.txt 1/4 files (25.00%)
1860 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1860 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1861 bundling: foo/file.txt 3/4 files (75.00%)
1861 bundling: foo/file.txt 3/4 files (75.00%)
1862 bundling: quux/file.py 4/4 files (100.00%)
1862 bundling: quux/file.py 4/4 files (100.00%)
1863 changesets: 1 chunks
1863 changesets: 1 chunks
1864 add changeset ef1ea85a6374
1864 add changeset ef1ea85a6374
1865 changesets: 2 chunks
1865 changesets: 2 chunks
1866 add changeset f9cafe1212c8
1866 add changeset f9cafe1212c8
1867 changesets: 3 chunks
1867 changesets: 3 chunks
1868 add changeset 911600dab2ae
1868 add changeset 911600dab2ae
1869 changesets: 4 chunks
1869 changesets: 4 chunks
1870 add changeset e8fc755d4d82
1870 add changeset e8fc755d4d82
1871 adding manifests
1871 adding manifests
1872 manifests: 1/4 chunks (25.00%)
1872 manifests: 1/4 chunks (25.00%)
1873 manifests: 2/4 chunks (50.00%)
1873 manifests: 2/4 chunks (50.00%)
1874 manifests: 3/4 chunks (75.00%)
1874 manifests: 3/4 chunks (75.00%)
1875 manifests: 4/4 chunks (100.00%)
1875 manifests: 4/4 chunks (100.00%)
1876 adding file changes
1876 adding file changes
1877 adding abc.txt revisions
1877 adding abc.txt revisions
1878 files: 1/4 chunks (25.00%)
1878 files: 1/4 chunks (25.00%)
1879 adding foo/Bar/file.txt revisions
1879 adding foo/Bar/file.txt revisions
1880 files: 2/4 chunks (50.00%)
1880 files: 2/4 chunks (50.00%)
1881 adding foo/file.txt revisions
1881 adding foo/file.txt revisions
1882 files: 3/4 chunks (75.00%)
1882 files: 3/4 chunks (75.00%)
1883 adding quux/file.py revisions
1883 adding quux/file.py revisions
1884 files: 4/4 chunks (100.00%)
1884 files: 4/4 chunks (100.00%)
1885 added 4 changesets with 4 changes to 4 files (+1 heads)
1885 added 4 changesets with 4 changes to 4 files (+1 heads)
1886 calling hook pretxnchangegroup.acl: hgext.acl.hook
1886 calling hook pretxnchangegroup.acl: hgext.acl.hook
1887 acl: checking access for user "george"
1887 acl: checking access for user "george"
1888 acl: acl.allow.branches enabled, 1 entries for user george
1888 acl: acl.allow.branches enabled, 1 entries for user george
1889 acl: acl.deny.branches not enabled
1889 acl: acl.deny.branches not enabled
1890 acl: acl.allow not enabled
1890 acl: acl.allow not enabled
1891 acl: acl.deny not enabled
1891 acl: acl.deny not enabled
1892 acl: branch access granted: "ef1ea85a6374" on branch "default"
1892 acl: branch access granted: "ef1ea85a6374" on branch "default"
1893 acl: path access granted: "ef1ea85a6374"
1893 acl: path access granted: "ef1ea85a6374"
1894 acl: branch access granted: "f9cafe1212c8" on branch "default"
1894 acl: branch access granted: "f9cafe1212c8" on branch "default"
1895 acl: path access granted: "f9cafe1212c8"
1895 acl: path access granted: "f9cafe1212c8"
1896 acl: branch access granted: "911600dab2ae" on branch "default"
1896 acl: branch access granted: "911600dab2ae" on branch "default"
1897 acl: path access granted: "911600dab2ae"
1897 acl: path access granted: "911600dab2ae"
1898 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1898 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
1899 acl: path access granted: "e8fc755d4d82"
1899 acl: path access granted: "e8fc755d4d82"
1900 listing keys for "phases"
1900 listing keys for "phases"
1901 try to push obsolete markers to remote
1901 try to push obsolete markers to remote
1902 updating the branch cache
1902 updating the branch cache
1903 checking for updated bookmarks
1903 checking for updated bookmarks
1904 listing keys for "bookmarks"
1904 listing keys for "bookmarks"
1905 repository tip rolled back to revision 2 (undo push)
1905 repository tip rolled back to revision 2 (undo push)
1906 2:fb35475503ef
1906 2:fb35475503ef
1907
1907
1908 Branch acl conflicting deny
1908 Branch acl conflicting deny
1909
1909
1910 $ init_config
1910 $ init_config
1911 $ echo "[acl.deny.branches]" >> $config
1911 $ echo "[acl.deny.branches]" >> $config
1912 $ echo "foobar = astro" >> $config
1912 $ echo "foobar = astro" >> $config
1913 $ echo "default = astro" >> $config
1913 $ echo "default = astro" >> $config
1914 $ echo "* = george" >> $config
1914 $ echo "* = george" >> $config
1915 $ do_push george
1915 $ do_push george
1916 Pushing as user george
1916 Pushing as user george
1917 hgrc = """
1917 hgrc = """
1918 [acl]
1918 [acl]
1919 sources = push
1919 sources = push
1920 [extensions]
1920 [extensions]
1921 [acl.deny.branches]
1921 [acl.deny.branches]
1922 foobar = astro
1922 foobar = astro
1923 default = astro
1923 default = astro
1924 * = george
1924 * = george
1925 """
1925 """
1926 pushing to ../b
1926 pushing to ../b
1927 query 1; heads
1927 query 1; heads
1928 searching for changes
1928 searching for changes
1929 all remote heads known locally
1929 all remote heads known locally
1930 invalid branchheads cache (unserved): tip differs
1930 invalid branchheads cache (served): tip differs
1931 listing keys for "bookmarks"
1931 listing keys for "bookmarks"
1932 4 changesets found
1932 4 changesets found
1933 list of changesets:
1933 list of changesets:
1934 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1934 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1935 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1935 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1936 911600dab2ae7a9baff75958b84fe606851ce955
1936 911600dab2ae7a9baff75958b84fe606851ce955
1937 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1937 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
1938 adding changesets
1938 adding changesets
1939 bundling: 1/4 changesets (25.00%)
1939 bundling: 1/4 changesets (25.00%)
1940 bundling: 2/4 changesets (50.00%)
1940 bundling: 2/4 changesets (50.00%)
1941 bundling: 3/4 changesets (75.00%)
1941 bundling: 3/4 changesets (75.00%)
1942 bundling: 4/4 changesets (100.00%)
1942 bundling: 4/4 changesets (100.00%)
1943 bundling: 1/4 manifests (25.00%)
1943 bundling: 1/4 manifests (25.00%)
1944 bundling: 2/4 manifests (50.00%)
1944 bundling: 2/4 manifests (50.00%)
1945 bundling: 3/4 manifests (75.00%)
1945 bundling: 3/4 manifests (75.00%)
1946 bundling: 4/4 manifests (100.00%)
1946 bundling: 4/4 manifests (100.00%)
1947 bundling: abc.txt 1/4 files (25.00%)
1947 bundling: abc.txt 1/4 files (25.00%)
1948 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1948 bundling: foo/Bar/file.txt 2/4 files (50.00%)
1949 bundling: foo/file.txt 3/4 files (75.00%)
1949 bundling: foo/file.txt 3/4 files (75.00%)
1950 bundling: quux/file.py 4/4 files (100.00%)
1950 bundling: quux/file.py 4/4 files (100.00%)
1951 changesets: 1 chunks
1951 changesets: 1 chunks
1952 add changeset ef1ea85a6374
1952 add changeset ef1ea85a6374
1953 changesets: 2 chunks
1953 changesets: 2 chunks
1954 add changeset f9cafe1212c8
1954 add changeset f9cafe1212c8
1955 changesets: 3 chunks
1955 changesets: 3 chunks
1956 add changeset 911600dab2ae
1956 add changeset 911600dab2ae
1957 changesets: 4 chunks
1957 changesets: 4 chunks
1958 add changeset e8fc755d4d82
1958 add changeset e8fc755d4d82
1959 adding manifests
1959 adding manifests
1960 manifests: 1/4 chunks (25.00%)
1960 manifests: 1/4 chunks (25.00%)
1961 manifests: 2/4 chunks (50.00%)
1961 manifests: 2/4 chunks (50.00%)
1962 manifests: 3/4 chunks (75.00%)
1962 manifests: 3/4 chunks (75.00%)
1963 manifests: 4/4 chunks (100.00%)
1963 manifests: 4/4 chunks (100.00%)
1964 adding file changes
1964 adding file changes
1965 adding abc.txt revisions
1965 adding abc.txt revisions
1966 files: 1/4 chunks (25.00%)
1966 files: 1/4 chunks (25.00%)
1967 adding foo/Bar/file.txt revisions
1967 adding foo/Bar/file.txt revisions
1968 files: 2/4 chunks (50.00%)
1968 files: 2/4 chunks (50.00%)
1969 adding foo/file.txt revisions
1969 adding foo/file.txt revisions
1970 files: 3/4 chunks (75.00%)
1970 files: 3/4 chunks (75.00%)
1971 adding quux/file.py revisions
1971 adding quux/file.py revisions
1972 files: 4/4 chunks (100.00%)
1972 files: 4/4 chunks (100.00%)
1973 added 4 changesets with 4 changes to 4 files (+1 heads)
1973 added 4 changesets with 4 changes to 4 files (+1 heads)
1974 calling hook pretxnchangegroup.acl: hgext.acl.hook
1974 calling hook pretxnchangegroup.acl: hgext.acl.hook
1975 acl: checking access for user "george"
1975 acl: checking access for user "george"
1976 acl: acl.allow.branches not enabled
1976 acl: acl.allow.branches not enabled
1977 acl: acl.deny.branches enabled, 1 entries for user george
1977 acl: acl.deny.branches enabled, 1 entries for user george
1978 acl: acl.allow not enabled
1978 acl: acl.allow not enabled
1979 acl: acl.deny not enabled
1979 acl: acl.deny not enabled
1980 error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
1980 error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
1981 transaction abort!
1981 transaction abort!
1982 rollback completed
1982 rollback completed
1983 abort: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
1983 abort: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
1984 no rollback information available
1984 no rollback information available
1985 2:fb35475503ef
1985 2:fb35475503ef
1986
1986
1987 User 'astro' must not be denied
1987 User 'astro' must not be denied
1988
1988
1989 $ init_config
1989 $ init_config
1990 $ echo "[acl.deny.branches]" >> $config
1990 $ echo "[acl.deny.branches]" >> $config
1991 $ echo "default = !astro" >> $config
1991 $ echo "default = !astro" >> $config
1992 $ do_push astro
1992 $ do_push astro
1993 Pushing as user astro
1993 Pushing as user astro
1994 hgrc = """
1994 hgrc = """
1995 [acl]
1995 [acl]
1996 sources = push
1996 sources = push
1997 [extensions]
1997 [extensions]
1998 [acl.deny.branches]
1998 [acl.deny.branches]
1999 default = !astro
1999 default = !astro
2000 """
2000 """
2001 pushing to ../b
2001 pushing to ../b
2002 query 1; heads
2002 query 1; heads
2003 searching for changes
2003 searching for changes
2004 all remote heads known locally
2004 all remote heads known locally
2005 listing keys for "bookmarks"
2005 listing keys for "bookmarks"
2006 4 changesets found
2006 4 changesets found
2007 list of changesets:
2007 list of changesets:
2008 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
2008 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
2009 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
2009 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
2010 911600dab2ae7a9baff75958b84fe606851ce955
2010 911600dab2ae7a9baff75958b84fe606851ce955
2011 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
2011 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
2012 adding changesets
2012 adding changesets
2013 bundling: 1/4 changesets (25.00%)
2013 bundling: 1/4 changesets (25.00%)
2014 bundling: 2/4 changesets (50.00%)
2014 bundling: 2/4 changesets (50.00%)
2015 bundling: 3/4 changesets (75.00%)
2015 bundling: 3/4 changesets (75.00%)
2016 bundling: 4/4 changesets (100.00%)
2016 bundling: 4/4 changesets (100.00%)
2017 bundling: 1/4 manifests (25.00%)
2017 bundling: 1/4 manifests (25.00%)
2018 bundling: 2/4 manifests (50.00%)
2018 bundling: 2/4 manifests (50.00%)
2019 bundling: 3/4 manifests (75.00%)
2019 bundling: 3/4 manifests (75.00%)
2020 bundling: 4/4 manifests (100.00%)
2020 bundling: 4/4 manifests (100.00%)
2021 bundling: abc.txt 1/4 files (25.00%)
2021 bundling: abc.txt 1/4 files (25.00%)
2022 bundling: foo/Bar/file.txt 2/4 files (50.00%)
2022 bundling: foo/Bar/file.txt 2/4 files (50.00%)
2023 bundling: foo/file.txt 3/4 files (75.00%)
2023 bundling: foo/file.txt 3/4 files (75.00%)
2024 bundling: quux/file.py 4/4 files (100.00%)
2024 bundling: quux/file.py 4/4 files (100.00%)
2025 changesets: 1 chunks
2025 changesets: 1 chunks
2026 add changeset ef1ea85a6374
2026 add changeset ef1ea85a6374
2027 changesets: 2 chunks
2027 changesets: 2 chunks
2028 add changeset f9cafe1212c8
2028 add changeset f9cafe1212c8
2029 changesets: 3 chunks
2029 changesets: 3 chunks
2030 add changeset 911600dab2ae
2030 add changeset 911600dab2ae
2031 changesets: 4 chunks
2031 changesets: 4 chunks
2032 add changeset e8fc755d4d82
2032 add changeset e8fc755d4d82
2033 adding manifests
2033 adding manifests
2034 manifests: 1/4 chunks (25.00%)
2034 manifests: 1/4 chunks (25.00%)
2035 manifests: 2/4 chunks (50.00%)
2035 manifests: 2/4 chunks (50.00%)
2036 manifests: 3/4 chunks (75.00%)
2036 manifests: 3/4 chunks (75.00%)
2037 manifests: 4/4 chunks (100.00%)
2037 manifests: 4/4 chunks (100.00%)
2038 adding file changes
2038 adding file changes
2039 adding abc.txt revisions
2039 adding abc.txt revisions
2040 files: 1/4 chunks (25.00%)
2040 files: 1/4 chunks (25.00%)
2041 adding foo/Bar/file.txt revisions
2041 adding foo/Bar/file.txt revisions
2042 files: 2/4 chunks (50.00%)
2042 files: 2/4 chunks (50.00%)
2043 adding foo/file.txt revisions
2043 adding foo/file.txt revisions
2044 files: 3/4 chunks (75.00%)
2044 files: 3/4 chunks (75.00%)
2045 adding quux/file.py revisions
2045 adding quux/file.py revisions
2046 files: 4/4 chunks (100.00%)
2046 files: 4/4 chunks (100.00%)
2047 added 4 changesets with 4 changes to 4 files (+1 heads)
2047 added 4 changesets with 4 changes to 4 files (+1 heads)
2048 calling hook pretxnchangegroup.acl: hgext.acl.hook
2048 calling hook pretxnchangegroup.acl: hgext.acl.hook
2049 acl: checking access for user "astro"
2049 acl: checking access for user "astro"
2050 acl: acl.allow.branches not enabled
2050 acl: acl.allow.branches not enabled
2051 acl: acl.deny.branches enabled, 0 entries for user astro
2051 acl: acl.deny.branches enabled, 0 entries for user astro
2052 acl: acl.allow not enabled
2052 acl: acl.allow not enabled
2053 acl: acl.deny not enabled
2053 acl: acl.deny not enabled
2054 acl: branch access granted: "ef1ea85a6374" on branch "default"
2054 acl: branch access granted: "ef1ea85a6374" on branch "default"
2055 acl: path access granted: "ef1ea85a6374"
2055 acl: path access granted: "ef1ea85a6374"
2056 acl: branch access granted: "f9cafe1212c8" on branch "default"
2056 acl: branch access granted: "f9cafe1212c8" on branch "default"
2057 acl: path access granted: "f9cafe1212c8"
2057 acl: path access granted: "f9cafe1212c8"
2058 acl: branch access granted: "911600dab2ae" on branch "default"
2058 acl: branch access granted: "911600dab2ae" on branch "default"
2059 acl: path access granted: "911600dab2ae"
2059 acl: path access granted: "911600dab2ae"
2060 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
2060 acl: branch access granted: "e8fc755d4d82" on branch "foobar"
2061 acl: path access granted: "e8fc755d4d82"
2061 acl: path access granted: "e8fc755d4d82"
2062 listing keys for "phases"
2062 listing keys for "phases"
2063 try to push obsolete markers to remote
2063 try to push obsolete markers to remote
2064 updating the branch cache
2064 updating the branch cache
2065 checking for updated bookmarks
2065 checking for updated bookmarks
2066 listing keys for "bookmarks"
2066 listing keys for "bookmarks"
2067 repository tip rolled back to revision 2 (undo push)
2067 repository tip rolled back to revision 2 (undo push)
2068 2:fb35475503ef
2068 2:fb35475503ef
2069
2069
2070
2070
2071 Non-astro users must be denied
2071 Non-astro users must be denied
2072
2072
2073 $ do_push george
2073 $ do_push george
2074 Pushing as user george
2074 Pushing as user george
2075 hgrc = """
2075 hgrc = """
2076 [acl]
2076 [acl]
2077 sources = push
2077 sources = push
2078 [extensions]
2078 [extensions]
2079 [acl.deny.branches]
2079 [acl.deny.branches]
2080 default = !astro
2080 default = !astro
2081 """
2081 """
2082 pushing to ../b
2082 pushing to ../b
2083 query 1; heads
2083 query 1; heads
2084 searching for changes
2084 searching for changes
2085 all remote heads known locally
2085 all remote heads known locally
2086 invalid branchheads cache (unserved): tip differs
2086 invalid branchheads cache (served): tip differs
2087 listing keys for "bookmarks"
2087 listing keys for "bookmarks"
2088 4 changesets found
2088 4 changesets found
2089 list of changesets:
2089 list of changesets:
2090 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
2090 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
2091 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
2091 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
2092 911600dab2ae7a9baff75958b84fe606851ce955
2092 911600dab2ae7a9baff75958b84fe606851ce955
2093 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
2093 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
2094 adding changesets
2094 adding changesets
2095 bundling: 1/4 changesets (25.00%)
2095 bundling: 1/4 changesets (25.00%)
2096 bundling: 2/4 changesets (50.00%)
2096 bundling: 2/4 changesets (50.00%)
2097 bundling: 3/4 changesets (75.00%)
2097 bundling: 3/4 changesets (75.00%)
2098 bundling: 4/4 changesets (100.00%)
2098 bundling: 4/4 changesets (100.00%)
2099 bundling: 1/4 manifests (25.00%)
2099 bundling: 1/4 manifests (25.00%)
2100 bundling: 2/4 manifests (50.00%)
2100 bundling: 2/4 manifests (50.00%)
2101 bundling: 3/4 manifests (75.00%)
2101 bundling: 3/4 manifests (75.00%)
2102 bundling: 4/4 manifests (100.00%)
2102 bundling: 4/4 manifests (100.00%)
2103 bundling: abc.txt 1/4 files (25.00%)
2103 bundling: abc.txt 1/4 files (25.00%)
2104 bundling: foo/Bar/file.txt 2/4 files (50.00%)
2104 bundling: foo/Bar/file.txt 2/4 files (50.00%)
2105 bundling: foo/file.txt 3/4 files (75.00%)
2105 bundling: foo/file.txt 3/4 files (75.00%)
2106 bundling: quux/file.py 4/4 files (100.00%)
2106 bundling: quux/file.py 4/4 files (100.00%)
2107 changesets: 1 chunks
2107 changesets: 1 chunks
2108 add changeset ef1ea85a6374
2108 add changeset ef1ea85a6374
2109 changesets: 2 chunks
2109 changesets: 2 chunks
2110 add changeset f9cafe1212c8
2110 add changeset f9cafe1212c8
2111 changesets: 3 chunks
2111 changesets: 3 chunks
2112 add changeset 911600dab2ae
2112 add changeset 911600dab2ae
2113 changesets: 4 chunks
2113 changesets: 4 chunks
2114 add changeset e8fc755d4d82
2114 add changeset e8fc755d4d82
2115 adding manifests
2115 adding manifests
2116 manifests: 1/4 chunks (25.00%)
2116 manifests: 1/4 chunks (25.00%)
2117 manifests: 2/4 chunks (50.00%)
2117 manifests: 2/4 chunks (50.00%)
2118 manifests: 3/4 chunks (75.00%)
2118 manifests: 3/4 chunks (75.00%)
2119 manifests: 4/4 chunks (100.00%)
2119 manifests: 4/4 chunks (100.00%)
2120 adding file changes
2120 adding file changes
2121 adding abc.txt revisions
2121 adding abc.txt revisions
2122 files: 1/4 chunks (25.00%)
2122 files: 1/4 chunks (25.00%)
2123 adding foo/Bar/file.txt revisions
2123 adding foo/Bar/file.txt revisions
2124 files: 2/4 chunks (50.00%)
2124 files: 2/4 chunks (50.00%)
2125 adding foo/file.txt revisions
2125 adding foo/file.txt revisions
2126 files: 3/4 chunks (75.00%)
2126 files: 3/4 chunks (75.00%)
2127 adding quux/file.py revisions
2127 adding quux/file.py revisions
2128 files: 4/4 chunks (100.00%)
2128 files: 4/4 chunks (100.00%)
2129 added 4 changesets with 4 changes to 4 files (+1 heads)
2129 added 4 changesets with 4 changes to 4 files (+1 heads)
2130 calling hook pretxnchangegroup.acl: hgext.acl.hook
2130 calling hook pretxnchangegroup.acl: hgext.acl.hook
2131 acl: checking access for user "george"
2131 acl: checking access for user "george"
2132 acl: acl.allow.branches not enabled
2132 acl: acl.allow.branches not enabled
2133 acl: acl.deny.branches enabled, 1 entries for user george
2133 acl: acl.deny.branches enabled, 1 entries for user george
2134 acl: acl.allow not enabled
2134 acl: acl.allow not enabled
2135 acl: acl.deny not enabled
2135 acl: acl.deny not enabled
2136 error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
2136 error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
2137 transaction abort!
2137 transaction abort!
2138 rollback completed
2138 rollback completed
2139 abort: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
2139 abort: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374")
2140 no rollback information available
2140 no rollback information available
2141 2:fb35475503ef
2141 2:fb35475503ef
2142
2142
2143
2143
@@ -1,180 +1,180
1 Init repo1:
1 Init repo1:
2
2
3 $ hg init repo1
3 $ hg init repo1
4 $ cd repo1
4 $ cd repo1
5 $ echo "some text" > a
5 $ echo "some text" > a
6 $ hg add
6 $ hg add
7 adding a
7 adding a
8 $ hg ci -m first
8 $ hg ci -m first
9 $ cat .hg/store/fncache | sort
9 $ cat .hg/store/fncache | sort
10 data/a.i
10 data/a.i
11
11
12 Testing a.i/b:
12 Testing a.i/b:
13
13
14 $ mkdir a.i
14 $ mkdir a.i
15 $ echo "some other text" > a.i/b
15 $ echo "some other text" > a.i/b
16 $ hg add
16 $ hg add
17 adding a.i/b (glob)
17 adding a.i/b (glob)
18 $ hg ci -m second
18 $ hg ci -m second
19 $ cat .hg/store/fncache | sort
19 $ cat .hg/store/fncache | sort
20 data/a.i
20 data/a.i
21 data/a.i.hg/b.i
21 data/a.i.hg/b.i
22
22
23 Testing a.i.hg/c:
23 Testing a.i.hg/c:
24
24
25 $ mkdir a.i.hg
25 $ mkdir a.i.hg
26 $ echo "yet another text" > a.i.hg/c
26 $ echo "yet another text" > a.i.hg/c
27 $ hg add
27 $ hg add
28 adding a.i.hg/c (glob)
28 adding a.i.hg/c (glob)
29 $ hg ci -m third
29 $ hg ci -m third
30 $ cat .hg/store/fncache | sort
30 $ cat .hg/store/fncache | sort
31 data/a.i
31 data/a.i
32 data/a.i.hg.hg/c.i
32 data/a.i.hg.hg/c.i
33 data/a.i.hg/b.i
33 data/a.i.hg/b.i
34
34
35 Testing verify:
35 Testing verify:
36
36
37 $ hg verify
37 $ hg verify
38 checking changesets
38 checking changesets
39 checking manifests
39 checking manifests
40 crosschecking files in changesets and manifests
40 crosschecking files in changesets and manifests
41 checking files
41 checking files
42 3 files, 3 changesets, 3 total revisions
42 3 files, 3 changesets, 3 total revisions
43
43
44 $ rm .hg/store/fncache
44 $ rm .hg/store/fncache
45
45
46 $ hg verify
46 $ hg verify
47 checking changesets
47 checking changesets
48 checking manifests
48 checking manifests
49 crosschecking files in changesets and manifests
49 crosschecking files in changesets and manifests
50 checking files
50 checking files
51 data/a.i@0: missing revlog!
51 data/a.i@0: missing revlog!
52 data/a.i.hg/c.i@2: missing revlog!
52 data/a.i.hg/c.i@2: missing revlog!
53 data/a.i/b.i@1: missing revlog!
53 data/a.i/b.i@1: missing revlog!
54 3 files, 3 changesets, 3 total revisions
54 3 files, 3 changesets, 3 total revisions
55 3 integrity errors encountered!
55 3 integrity errors encountered!
56 (first damaged changeset appears to be 0)
56 (first damaged changeset appears to be 0)
57 [1]
57 [1]
58 $ cd ..
58 $ cd ..
59
59
60 Non store repo:
60 Non store repo:
61
61
62 $ hg --config format.usestore=False init foo
62 $ hg --config format.usestore=False init foo
63 $ cd foo
63 $ cd foo
64 $ mkdir tst.d
64 $ mkdir tst.d
65 $ echo foo > tst.d/foo
65 $ echo foo > tst.d/foo
66 $ hg ci -Amfoo
66 $ hg ci -Amfoo
67 adding tst.d/foo
67 adding tst.d/foo
68 $ find .hg | sort
68 $ find .hg | sort
69 .hg
69 .hg
70 .hg/00changelog.i
70 .hg/00changelog.i
71 .hg/00manifest.i
71 .hg/00manifest.i
72 .hg/cache
72 .hg/cache
73 .hg/cache/branchheads-unserved
73 .hg/cache/branchheads-served
74 .hg/data
74 .hg/data
75 .hg/data/tst.d.hg
75 .hg/data/tst.d.hg
76 .hg/data/tst.d.hg/foo.i
76 .hg/data/tst.d.hg/foo.i
77 .hg/dirstate
77 .hg/dirstate
78 .hg/last-message.txt
78 .hg/last-message.txt
79 .hg/phaseroots
79 .hg/phaseroots
80 .hg/requires
80 .hg/requires
81 .hg/undo
81 .hg/undo
82 .hg/undo.bookmarks
82 .hg/undo.bookmarks
83 .hg/undo.branch
83 .hg/undo.branch
84 .hg/undo.desc
84 .hg/undo.desc
85 .hg/undo.dirstate
85 .hg/undo.dirstate
86 .hg/undo.phaseroots
86 .hg/undo.phaseroots
87 $ cd ..
87 $ cd ..
88
88
89 Non fncache repo:
89 Non fncache repo:
90
90
91 $ hg --config format.usefncache=False init bar
91 $ hg --config format.usefncache=False init bar
92 $ cd bar
92 $ cd bar
93 $ mkdir tst.d
93 $ mkdir tst.d
94 $ echo foo > tst.d/Foo
94 $ echo foo > tst.d/Foo
95 $ hg ci -Amfoo
95 $ hg ci -Amfoo
96 adding tst.d/Foo
96 adding tst.d/Foo
97 $ find .hg | sort
97 $ find .hg | sort
98 .hg
98 .hg
99 .hg/00changelog.i
99 .hg/00changelog.i
100 .hg/cache
100 .hg/cache
101 .hg/cache/branchheads-unserved
101 .hg/cache/branchheads-served
102 .hg/dirstate
102 .hg/dirstate
103 .hg/last-message.txt
103 .hg/last-message.txt
104 .hg/requires
104 .hg/requires
105 .hg/store
105 .hg/store
106 .hg/store/00changelog.i
106 .hg/store/00changelog.i
107 .hg/store/00manifest.i
107 .hg/store/00manifest.i
108 .hg/store/data
108 .hg/store/data
109 .hg/store/data/tst.d.hg
109 .hg/store/data/tst.d.hg
110 .hg/store/data/tst.d.hg/_foo.i
110 .hg/store/data/tst.d.hg/_foo.i
111 .hg/store/phaseroots
111 .hg/store/phaseroots
112 .hg/store/undo
112 .hg/store/undo
113 .hg/store/undo.phaseroots
113 .hg/store/undo.phaseroots
114 .hg/undo.bookmarks
114 .hg/undo.bookmarks
115 .hg/undo.branch
115 .hg/undo.branch
116 .hg/undo.desc
116 .hg/undo.desc
117 .hg/undo.dirstate
117 .hg/undo.dirstate
118 $ cd ..
118 $ cd ..
119
119
120 Encoding of reserved / long paths in the store
120 Encoding of reserved / long paths in the store
121
121
122 $ hg init r2
122 $ hg init r2
123 $ cd r2
123 $ cd r2
124 $ cat <<EOF > .hg/hgrc
124 $ cat <<EOF > .hg/hgrc
125 > [ui]
125 > [ui]
126 > portablefilenames = ignore
126 > portablefilenames = ignore
127 > EOF
127 > EOF
128
128
129 $ hg import -q --bypass - <<EOF
129 $ hg import -q --bypass - <<EOF
130 > # HG changeset patch
130 > # HG changeset patch
131 > # User test
131 > # User test
132 > # Date 0 0
132 > # Date 0 0
133 > # Node ID 1c7a2f7cb77be1a0def34e4c7cabc562ad98fbd7
133 > # Node ID 1c7a2f7cb77be1a0def34e4c7cabc562ad98fbd7
134 > # Parent 0000000000000000000000000000000000000000
134 > # Parent 0000000000000000000000000000000000000000
135 > 1
135 > 1
136 >
136 >
137 > diff --git a/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
137 > diff --git a/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
138 > new file mode 100644
138 > new file mode 100644
139 > --- /dev/null
139 > --- /dev/null
140 > +++ b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
140 > +++ b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
141 > @@ -0,0 +1,1 @@
141 > @@ -0,0 +1,1 @@
142 > +foo
142 > +foo
143 > diff --git a/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
143 > diff --git a/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
144 > new file mode 100644
144 > new file mode 100644
145 > --- /dev/null
145 > --- /dev/null
146 > +++ b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
146 > +++ b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
147 > @@ -0,0 +1,1 @@
147 > @@ -0,0 +1,1 @@
148 > +foo
148 > +foo
149 > diff --git a/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
149 > diff --git a/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
150 > new file mode 100644
150 > new file mode 100644
151 > --- /dev/null
151 > --- /dev/null
152 > +++ b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
152 > +++ b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
153 > @@ -0,0 +1,1 @@
153 > @@ -0,0 +1,1 @@
154 > +foo
154 > +foo
155 > diff --git a/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
155 > diff --git a/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
156 > new file mode 100644
156 > new file mode 100644
157 > --- /dev/null
157 > --- /dev/null
158 > +++ b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
158 > +++ b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
159 > @@ -0,0 +1,1 @@
159 > @@ -0,0 +1,1 @@
160 > +foo
160 > +foo
161 > diff --git a/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
161 > diff --git a/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
162 > new file mode 100644
162 > new file mode 100644
163 > --- /dev/null
163 > --- /dev/null
164 > +++ b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
164 > +++ b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
165 > @@ -0,0 +1,1 @@
165 > @@ -0,0 +1,1 @@
166 > +foo
166 > +foo
167 > EOF
167 > EOF
168
168
169 $ find .hg/store -name *.i | sort
169 $ find .hg/store -name *.i | sort
170 .hg/store/00changelog.i
170 .hg/store/00changelog.i
171 .hg/store/00manifest.i
171 .hg/store/00manifest.i
172 .hg/store/data/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i
172 .hg/store/data/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i
173 .hg/store/dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxx168e07b38e65eff86ab579afaaa8e30bfbe0f35f.i
173 .hg/store/dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxx168e07b38e65eff86ab579afaaa8e30bfbe0f35f.i
174 .hg/store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i
174 .hg/store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i
175 .hg/store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i
175 .hg/store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i
176 .hg/store/dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilename0d8e1f4187c650e2f1fdca9fd90f786bc0976b6b.i
176 .hg/store/dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilename0d8e1f4187c650e2f1fdca9fd90f786bc0976b6b.i
177
177
178 $ cd ..
178 $ cd ..
179
179
180
180
@@ -1,350 +1,350
1 $ "$TESTDIR/hghave" hardlink || exit 80
1 $ "$TESTDIR/hghave" hardlink || exit 80
2
2
3 $ cat > nlinks.py <<EOF
3 $ cat > nlinks.py <<EOF
4 > import sys
4 > import sys
5 > from mercurial import util
5 > from mercurial import util
6 > for f in sorted(sys.stdin.readlines()):
6 > for f in sorted(sys.stdin.readlines()):
7 > f = f[:-1]
7 > f = f[:-1]
8 > print util.nlinks(f), f
8 > print util.nlinks(f), f
9 > EOF
9 > EOF
10
10
11 $ nlinksdir()
11 $ nlinksdir()
12 > {
12 > {
13 > find $1 -type f | python $TESTTMP/nlinks.py
13 > find $1 -type f | python $TESTTMP/nlinks.py
14 > }
14 > }
15
15
16 Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
16 Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
17
17
18 $ cat > linkcp.py <<EOF
18 $ cat > linkcp.py <<EOF
19 > from mercurial import util
19 > from mercurial import util
20 > import sys
20 > import sys
21 > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True)
21 > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True)
22 > EOF
22 > EOF
23
23
24 $ linkcp()
24 $ linkcp()
25 > {
25 > {
26 > python $TESTTMP/linkcp.py $1 $2
26 > python $TESTTMP/linkcp.py $1 $2
27 > }
27 > }
28
28
29 Prepare repo r1:
29 Prepare repo r1:
30
30
31 $ hg init r1
31 $ hg init r1
32 $ cd r1
32 $ cd r1
33
33
34 $ echo c1 > f1
34 $ echo c1 > f1
35 $ hg add f1
35 $ hg add f1
36 $ hg ci -m0
36 $ hg ci -m0
37
37
38 $ mkdir d1
38 $ mkdir d1
39 $ cd d1
39 $ cd d1
40 $ echo c2 > f2
40 $ echo c2 > f2
41 $ hg add f2
41 $ hg add f2
42 $ hg ci -m1
42 $ hg ci -m1
43 $ cd ../..
43 $ cd ../..
44
44
45 $ nlinksdir r1/.hg/store
45 $ nlinksdir r1/.hg/store
46 1 r1/.hg/store/00changelog.i
46 1 r1/.hg/store/00changelog.i
47 1 r1/.hg/store/00manifest.i
47 1 r1/.hg/store/00manifest.i
48 1 r1/.hg/store/data/d1/f2.i
48 1 r1/.hg/store/data/d1/f2.i
49 1 r1/.hg/store/data/f1.i
49 1 r1/.hg/store/data/f1.i
50 1 r1/.hg/store/fncache
50 1 r1/.hg/store/fncache
51 1 r1/.hg/store/phaseroots
51 1 r1/.hg/store/phaseroots
52 1 r1/.hg/store/undo
52 1 r1/.hg/store/undo
53 1 r1/.hg/store/undo.phaseroots
53 1 r1/.hg/store/undo.phaseroots
54
54
55
55
56 Create hardlinked clone r2:
56 Create hardlinked clone r2:
57
57
58 $ hg clone -U --debug r1 r2
58 $ hg clone -U --debug r1 r2
59 linked 7 files
59 linked 7 files
60 listing keys for "bookmarks"
60 listing keys for "bookmarks"
61
61
62 Create non-hardlinked clone r3:
62 Create non-hardlinked clone r3:
63
63
64 $ hg clone --pull r1 r3
64 $ hg clone --pull r1 r3
65 requesting all changes
65 requesting all changes
66 adding changesets
66 adding changesets
67 adding manifests
67 adding manifests
68 adding file changes
68 adding file changes
69 added 2 changesets with 2 changes to 2 files
69 added 2 changesets with 2 changes to 2 files
70 updating to branch default
70 updating to branch default
71 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
71 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
72
72
73
73
74 Repos r1 and r2 should now contain hardlinked files:
74 Repos r1 and r2 should now contain hardlinked files:
75
75
76 $ nlinksdir r1/.hg/store
76 $ nlinksdir r1/.hg/store
77 2 r1/.hg/store/00changelog.i
77 2 r1/.hg/store/00changelog.i
78 2 r1/.hg/store/00manifest.i
78 2 r1/.hg/store/00manifest.i
79 2 r1/.hg/store/data/d1/f2.i
79 2 r1/.hg/store/data/d1/f2.i
80 2 r1/.hg/store/data/f1.i
80 2 r1/.hg/store/data/f1.i
81 2 r1/.hg/store/fncache
81 2 r1/.hg/store/fncache
82 1 r1/.hg/store/phaseroots
82 1 r1/.hg/store/phaseroots
83 1 r1/.hg/store/undo
83 1 r1/.hg/store/undo
84 1 r1/.hg/store/undo.phaseroots
84 1 r1/.hg/store/undo.phaseroots
85
85
86 $ nlinksdir r2/.hg/store
86 $ nlinksdir r2/.hg/store
87 2 r2/.hg/store/00changelog.i
87 2 r2/.hg/store/00changelog.i
88 2 r2/.hg/store/00manifest.i
88 2 r2/.hg/store/00manifest.i
89 2 r2/.hg/store/data/d1/f2.i
89 2 r2/.hg/store/data/d1/f2.i
90 2 r2/.hg/store/data/f1.i
90 2 r2/.hg/store/data/f1.i
91 2 r2/.hg/store/fncache
91 2 r2/.hg/store/fncache
92
92
93 Repo r3 should not be hardlinked:
93 Repo r3 should not be hardlinked:
94
94
95 $ nlinksdir r3/.hg/store
95 $ nlinksdir r3/.hg/store
96 1 r3/.hg/store/00changelog.i
96 1 r3/.hg/store/00changelog.i
97 1 r3/.hg/store/00manifest.i
97 1 r3/.hg/store/00manifest.i
98 1 r3/.hg/store/data/d1/f2.i
98 1 r3/.hg/store/data/d1/f2.i
99 1 r3/.hg/store/data/f1.i
99 1 r3/.hg/store/data/f1.i
100 1 r3/.hg/store/fncache
100 1 r3/.hg/store/fncache
101 1 r3/.hg/store/phaseroots
101 1 r3/.hg/store/phaseroots
102 1 r3/.hg/store/undo
102 1 r3/.hg/store/undo
103 1 r3/.hg/store/undo.phaseroots
103 1 r3/.hg/store/undo.phaseroots
104
104
105
105
106 Create a non-inlined filelog in r3:
106 Create a non-inlined filelog in r3:
107
107
108 $ cd r3/d1
108 $ cd r3/d1
109 >>> f = open('data1', 'wb')
109 >>> f = open('data1', 'wb')
110 >>> for x in range(10000):
110 >>> for x in range(10000):
111 ... f.write("%s\n" % str(x))
111 ... f.write("%s\n" % str(x))
112 >>> f.close()
112 >>> f.close()
113 $ for j in 0 1 2 3 4 5 6 7 8 9; do
113 $ for j in 0 1 2 3 4 5 6 7 8 9; do
114 > cat data1 >> f2
114 > cat data1 >> f2
115 > hg commit -m$j
115 > hg commit -m$j
116 > done
116 > done
117 $ cd ../..
117 $ cd ../..
118
118
119 $ nlinksdir r3/.hg/store
119 $ nlinksdir r3/.hg/store
120 1 r3/.hg/store/00changelog.i
120 1 r3/.hg/store/00changelog.i
121 1 r3/.hg/store/00manifest.i
121 1 r3/.hg/store/00manifest.i
122 1 r3/.hg/store/data/d1/f2.d
122 1 r3/.hg/store/data/d1/f2.d
123 1 r3/.hg/store/data/d1/f2.i
123 1 r3/.hg/store/data/d1/f2.i
124 1 r3/.hg/store/data/f1.i
124 1 r3/.hg/store/data/f1.i
125 1 r3/.hg/store/fncache
125 1 r3/.hg/store/fncache
126 1 r3/.hg/store/phaseroots
126 1 r3/.hg/store/phaseroots
127 1 r3/.hg/store/undo
127 1 r3/.hg/store/undo
128 1 r3/.hg/store/undo.phaseroots
128 1 r3/.hg/store/undo.phaseroots
129
129
130 Push to repo r1 should break up most hardlinks in r2:
130 Push to repo r1 should break up most hardlinks in r2:
131
131
132 $ hg -R r2 verify
132 $ hg -R r2 verify
133 checking changesets
133 checking changesets
134 checking manifests
134 checking manifests
135 crosschecking files in changesets and manifests
135 crosschecking files in changesets and manifests
136 checking files
136 checking files
137 2 files, 2 changesets, 2 total revisions
137 2 files, 2 changesets, 2 total revisions
138
138
139 $ cd r3
139 $ cd r3
140 $ hg push
140 $ hg push
141 pushing to $TESTTMP/r1 (glob)
141 pushing to $TESTTMP/r1 (glob)
142 searching for changes
142 searching for changes
143 adding changesets
143 adding changesets
144 adding manifests
144 adding manifests
145 adding file changes
145 adding file changes
146 added 10 changesets with 10 changes to 1 files
146 added 10 changesets with 10 changes to 1 files
147
147
148 $ cd ..
148 $ cd ..
149
149
150 $ nlinksdir r2/.hg/store
150 $ nlinksdir r2/.hg/store
151 1 r2/.hg/store/00changelog.i
151 1 r2/.hg/store/00changelog.i
152 1 r2/.hg/store/00manifest.i
152 1 r2/.hg/store/00manifest.i
153 1 r2/.hg/store/data/d1/f2.i
153 1 r2/.hg/store/data/d1/f2.i
154 2 r2/.hg/store/data/f1.i
154 2 r2/.hg/store/data/f1.i
155 1 r2/.hg/store/fncache
155 1 r2/.hg/store/fncache
156
156
157 $ hg -R r2 verify
157 $ hg -R r2 verify
158 checking changesets
158 checking changesets
159 checking manifests
159 checking manifests
160 crosschecking files in changesets and manifests
160 crosschecking files in changesets and manifests
161 checking files
161 checking files
162 2 files, 2 changesets, 2 total revisions
162 2 files, 2 changesets, 2 total revisions
163
163
164
164
165 $ cd r1
165 $ cd r1
166 $ hg up
166 $ hg up
167 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
167 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
168
168
169 Committing a change to f1 in r1 must break up hardlink f1.i in r2:
169 Committing a change to f1 in r1 must break up hardlink f1.i in r2:
170
170
171 $ echo c1c1 >> f1
171 $ echo c1c1 >> f1
172 $ hg ci -m00
172 $ hg ci -m00
173 $ cd ..
173 $ cd ..
174
174
175 $ nlinksdir r2/.hg/store
175 $ nlinksdir r2/.hg/store
176 1 r2/.hg/store/00changelog.i
176 1 r2/.hg/store/00changelog.i
177 1 r2/.hg/store/00manifest.i
177 1 r2/.hg/store/00manifest.i
178 1 r2/.hg/store/data/d1/f2.i
178 1 r2/.hg/store/data/d1/f2.i
179 1 r2/.hg/store/data/f1.i
179 1 r2/.hg/store/data/f1.i
180 1 r2/.hg/store/fncache
180 1 r2/.hg/store/fncache
181
181
182
182
183 $ cd r3
183 $ cd r3
184 $ hg tip --template '{rev}:{node|short}\n'
184 $ hg tip --template '{rev}:{node|short}\n'
185 11:a6451b6bc41f
185 11:a6451b6bc41f
186 $ echo bla > f1
186 $ echo bla > f1
187 $ hg ci -m1
187 $ hg ci -m1
188 $ cd ..
188 $ cd ..
189
189
190 Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
190 Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
191
191
192 $ linkcp r3 r4
192 $ linkcp r3 r4
193
193
194 r4 has hardlinks in the working dir (not just inside .hg):
194 r4 has hardlinks in the working dir (not just inside .hg):
195
195
196 $ nlinksdir r4
196 $ nlinksdir r4
197 2 r4/.hg/00changelog.i
197 2 r4/.hg/00changelog.i
198 2 r4/.hg/branch
198 2 r4/.hg/branch
199 2 r4/.hg/cache/branchheads-unserved
199 2 r4/.hg/cache/branchheads-served
200 2 r4/.hg/dirstate
200 2 r4/.hg/dirstate
201 2 r4/.hg/hgrc
201 2 r4/.hg/hgrc
202 2 r4/.hg/last-message.txt
202 2 r4/.hg/last-message.txt
203 2 r4/.hg/requires
203 2 r4/.hg/requires
204 2 r4/.hg/store/00changelog.i
204 2 r4/.hg/store/00changelog.i
205 2 r4/.hg/store/00manifest.i
205 2 r4/.hg/store/00manifest.i
206 2 r4/.hg/store/data/d1/f2.d
206 2 r4/.hg/store/data/d1/f2.d
207 2 r4/.hg/store/data/d1/f2.i
207 2 r4/.hg/store/data/d1/f2.i
208 2 r4/.hg/store/data/f1.i
208 2 r4/.hg/store/data/f1.i
209 2 r4/.hg/store/fncache
209 2 r4/.hg/store/fncache
210 2 r4/.hg/store/phaseroots
210 2 r4/.hg/store/phaseroots
211 2 r4/.hg/store/undo
211 2 r4/.hg/store/undo
212 2 r4/.hg/store/undo.phaseroots
212 2 r4/.hg/store/undo.phaseroots
213 2 r4/.hg/undo.bookmarks
213 2 r4/.hg/undo.bookmarks
214 2 r4/.hg/undo.branch
214 2 r4/.hg/undo.branch
215 2 r4/.hg/undo.desc
215 2 r4/.hg/undo.desc
216 2 r4/.hg/undo.dirstate
216 2 r4/.hg/undo.dirstate
217 2 r4/d1/data1
217 2 r4/d1/data1
218 2 r4/d1/f2
218 2 r4/d1/f2
219 2 r4/f1
219 2 r4/f1
220
220
221 Update back to revision 11 in r4 should break hardlink of file f1:
221 Update back to revision 11 in r4 should break hardlink of file f1:
222
222
223 $ hg -R r4 up 11
223 $ hg -R r4 up 11
224 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
224 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
225
225
226 $ nlinksdir r4
226 $ nlinksdir r4
227 2 r4/.hg/00changelog.i
227 2 r4/.hg/00changelog.i
228 1 r4/.hg/branch
228 1 r4/.hg/branch
229 2 r4/.hg/cache/branchheads-unserved
229 2 r4/.hg/cache/branchheads-served
230 1 r4/.hg/dirstate
230 1 r4/.hg/dirstate
231 2 r4/.hg/hgrc
231 2 r4/.hg/hgrc
232 2 r4/.hg/last-message.txt
232 2 r4/.hg/last-message.txt
233 2 r4/.hg/requires
233 2 r4/.hg/requires
234 2 r4/.hg/store/00changelog.i
234 2 r4/.hg/store/00changelog.i
235 2 r4/.hg/store/00manifest.i
235 2 r4/.hg/store/00manifest.i
236 2 r4/.hg/store/data/d1/f2.d
236 2 r4/.hg/store/data/d1/f2.d
237 2 r4/.hg/store/data/d1/f2.i
237 2 r4/.hg/store/data/d1/f2.i
238 2 r4/.hg/store/data/f1.i
238 2 r4/.hg/store/data/f1.i
239 2 r4/.hg/store/fncache
239 2 r4/.hg/store/fncache
240 2 r4/.hg/store/phaseroots
240 2 r4/.hg/store/phaseroots
241 2 r4/.hg/store/undo
241 2 r4/.hg/store/undo
242 2 r4/.hg/store/undo.phaseroots
242 2 r4/.hg/store/undo.phaseroots
243 2 r4/.hg/undo.bookmarks
243 2 r4/.hg/undo.bookmarks
244 2 r4/.hg/undo.branch
244 2 r4/.hg/undo.branch
245 2 r4/.hg/undo.desc
245 2 r4/.hg/undo.desc
246 2 r4/.hg/undo.dirstate
246 2 r4/.hg/undo.dirstate
247 2 r4/d1/data1
247 2 r4/d1/data1
248 2 r4/d1/f2
248 2 r4/d1/f2
249 1 r4/f1
249 1 r4/f1
250
250
251
251
252 Test hardlinking outside hg:
252 Test hardlinking outside hg:
253
253
254 $ mkdir x
254 $ mkdir x
255 $ echo foo > x/a
255 $ echo foo > x/a
256
256
257 $ linkcp x y
257 $ linkcp x y
258 $ echo bar >> y/a
258 $ echo bar >> y/a
259
259
260 No diff if hardlink:
260 No diff if hardlink:
261
261
262 $ diff x/a y/a
262 $ diff x/a y/a
263
263
264 Test mq hardlinking:
264 Test mq hardlinking:
265
265
266 $ echo "[extensions]" >> $HGRCPATH
266 $ echo "[extensions]" >> $HGRCPATH
267 $ echo "mq=" >> $HGRCPATH
267 $ echo "mq=" >> $HGRCPATH
268
268
269 $ hg init a
269 $ hg init a
270 $ cd a
270 $ cd a
271
271
272 $ hg qimport -n foo - << EOF
272 $ hg qimport -n foo - << EOF
273 > # HG changeset patch
273 > # HG changeset patch
274 > # Date 1 0
274 > # Date 1 0
275 > diff -r 2588a8b53d66 a
275 > diff -r 2588a8b53d66 a
276 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
276 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
277 > +++ b/a Wed Jul 23 15:54:29 2008 +0200
277 > +++ b/a Wed Jul 23 15:54:29 2008 +0200
278 > @@ -0,0 +1,1 @@
278 > @@ -0,0 +1,1 @@
279 > +a
279 > +a
280 > EOF
280 > EOF
281 adding foo to series file
281 adding foo to series file
282
282
283 $ hg qpush
283 $ hg qpush
284 applying foo
284 applying foo
285 now at: foo
285 now at: foo
286
286
287 $ cd ..
287 $ cd ..
288 $ linkcp a b
288 $ linkcp a b
289 $ cd b
289 $ cd b
290
290
291 $ hg qimport -n bar - << EOF
291 $ hg qimport -n bar - << EOF
292 > # HG changeset patch
292 > # HG changeset patch
293 > # Date 2 0
293 > # Date 2 0
294 > diff -r 2588a8b53d66 a
294 > diff -r 2588a8b53d66 a
295 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
295 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
296 > +++ b/b Wed Jul 23 15:54:29 2008 +0200
296 > +++ b/b Wed Jul 23 15:54:29 2008 +0200
297 > @@ -0,0 +1,1 @@
297 > @@ -0,0 +1,1 @@
298 > +b
298 > +b
299 > EOF
299 > EOF
300 adding bar to series file
300 adding bar to series file
301
301
302 $ hg qpush
302 $ hg qpush
303 applying bar
303 applying bar
304 now at: bar
304 now at: bar
305
305
306 $ cat .hg/patches/status
306 $ cat .hg/patches/status
307 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
307 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
308 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
308 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
309
309
310 $ cat .hg/patches/series
310 $ cat .hg/patches/series
311 foo
311 foo
312 bar
312 bar
313
313
314 $ cat ../a/.hg/patches/status
314 $ cat ../a/.hg/patches/status
315 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
315 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
316
316
317 $ cat ../a/.hg/patches/series
317 $ cat ../a/.hg/patches/series
318 foo
318 foo
319
319
320 Test tags hardlinking:
320 Test tags hardlinking:
321
321
322 $ hg qdel -r qbase:qtip
322 $ hg qdel -r qbase:qtip
323 patch foo finalized without changeset message
323 patch foo finalized without changeset message
324 patch bar finalized without changeset message
324 patch bar finalized without changeset message
325
325
326 $ hg tag -l lfoo
326 $ hg tag -l lfoo
327 $ hg tag foo
327 $ hg tag foo
328
328
329 $ cd ..
329 $ cd ..
330 $ linkcp b c
330 $ linkcp b c
331 $ cd c
331 $ cd c
332
332
333 $ hg tag -l -r 0 lbar
333 $ hg tag -l -r 0 lbar
334 $ hg tag -r 0 bar
334 $ hg tag -r 0 bar
335
335
336 $ cat .hgtags
336 $ cat .hgtags
337 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
337 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
338 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
338 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
339
339
340 $ cat .hg/localtags
340 $ cat .hg/localtags
341 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
341 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
342 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
342 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
343
343
344 $ cat ../b/.hgtags
344 $ cat ../b/.hgtags
345 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
345 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
346
346
347 $ cat ../b/.hg/localtags
347 $ cat ../b/.hg/localtags
348 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
348 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
349
349
350 $ cd ..
350 $ cd ..
@@ -1,152 +1,152
1 test that new files created in .hg inherit the permissions from .hg/store
1 test that new files created in .hg inherit the permissions from .hg/store
2
2
3
3
4 $ "$TESTDIR/hghave" unix-permissions || exit 80
4 $ "$TESTDIR/hghave" unix-permissions || exit 80
5
5
6 $ mkdir dir
6 $ mkdir dir
7
7
8 just in case somebody has a strange $TMPDIR
8 just in case somebody has a strange $TMPDIR
9
9
10 $ chmod g-s dir
10 $ chmod g-s dir
11 $ cd dir
11 $ cd dir
12
12
13 $ cat >printmodes.py <<EOF
13 $ cat >printmodes.py <<EOF
14 > import os, sys
14 > import os, sys
15 >
15 >
16 > allnames = []
16 > allnames = []
17 > isdir = {}
17 > isdir = {}
18 > for root, dirs, files in os.walk(sys.argv[1]):
18 > for root, dirs, files in os.walk(sys.argv[1]):
19 > for d in dirs:
19 > for d in dirs:
20 > name = os.path.join(root, d)
20 > name = os.path.join(root, d)
21 > isdir[name] = 1
21 > isdir[name] = 1
22 > allnames.append(name)
22 > allnames.append(name)
23 > for f in files:
23 > for f in files:
24 > name = os.path.join(root, f)
24 > name = os.path.join(root, f)
25 > allnames.append(name)
25 > allnames.append(name)
26 > allnames.sort()
26 > allnames.sort()
27 > for name in allnames:
27 > for name in allnames:
28 > suffix = name in isdir and '/' or ''
28 > suffix = name in isdir and '/' or ''
29 > print '%05o %s%s' % (os.lstat(name).st_mode & 07777, name, suffix)
29 > print '%05o %s%s' % (os.lstat(name).st_mode & 07777, name, suffix)
30 > EOF
30 > EOF
31
31
32 $ cat >mode.py <<EOF
32 $ cat >mode.py <<EOF
33 > import sys
33 > import sys
34 > import os
34 > import os
35 > print '%05o' % os.lstat(sys.argv[1]).st_mode
35 > print '%05o' % os.lstat(sys.argv[1]).st_mode
36 > EOF
36 > EOF
37
37
38 $ umask 077
38 $ umask 077
39
39
40 $ hg init repo
40 $ hg init repo
41 $ cd repo
41 $ cd repo
42
42
43 $ chmod 0770 .hg/store
43 $ chmod 0770 .hg/store
44
44
45 before commit
45 before commit
46 store can be written by the group, other files cannot
46 store can be written by the group, other files cannot
47 store is setgid
47 store is setgid
48
48
49 $ python ../printmodes.py .
49 $ python ../printmodes.py .
50 00700 ./.hg/
50 00700 ./.hg/
51 00600 ./.hg/00changelog.i
51 00600 ./.hg/00changelog.i
52 00600 ./.hg/requires
52 00600 ./.hg/requires
53 00770 ./.hg/store/
53 00770 ./.hg/store/
54
54
55 $ mkdir dir
55 $ mkdir dir
56 $ touch foo dir/bar
56 $ touch foo dir/bar
57 $ hg ci -qAm 'add files'
57 $ hg ci -qAm 'add files'
58
58
59 after commit
59 after commit
60 working dir files can only be written by the owner
60 working dir files can only be written by the owner
61 files created in .hg can be written by the group
61 files created in .hg can be written by the group
62 (in particular, store/**, dirstate, branch cache file, undo files)
62 (in particular, store/**, dirstate, branch cache file, undo files)
63 new directories are setgid
63 new directories are setgid
64
64
65 $ python ../printmodes.py .
65 $ python ../printmodes.py .
66 00700 ./.hg/
66 00700 ./.hg/
67 00600 ./.hg/00changelog.i
67 00600 ./.hg/00changelog.i
68 00770 ./.hg/cache/
68 00770 ./.hg/cache/
69 00660 ./.hg/cache/branchheads-unserved
69 00660 ./.hg/cache/branchheads-served
70 00660 ./.hg/dirstate
70 00660 ./.hg/dirstate
71 00660 ./.hg/last-message.txt
71 00660 ./.hg/last-message.txt
72 00600 ./.hg/requires
72 00600 ./.hg/requires
73 00770 ./.hg/store/
73 00770 ./.hg/store/
74 00660 ./.hg/store/00changelog.i
74 00660 ./.hg/store/00changelog.i
75 00660 ./.hg/store/00manifest.i
75 00660 ./.hg/store/00manifest.i
76 00770 ./.hg/store/data/
76 00770 ./.hg/store/data/
77 00770 ./.hg/store/data/dir/
77 00770 ./.hg/store/data/dir/
78 00660 ./.hg/store/data/dir/bar.i
78 00660 ./.hg/store/data/dir/bar.i
79 00660 ./.hg/store/data/foo.i
79 00660 ./.hg/store/data/foo.i
80 00660 ./.hg/store/fncache
80 00660 ./.hg/store/fncache
81 00660 ./.hg/store/phaseroots
81 00660 ./.hg/store/phaseroots
82 00660 ./.hg/store/undo
82 00660 ./.hg/store/undo
83 00660 ./.hg/store/undo.phaseroots
83 00660 ./.hg/store/undo.phaseroots
84 00660 ./.hg/undo.bookmarks
84 00660 ./.hg/undo.bookmarks
85 00660 ./.hg/undo.branch
85 00660 ./.hg/undo.branch
86 00660 ./.hg/undo.desc
86 00660 ./.hg/undo.desc
87 00660 ./.hg/undo.dirstate
87 00660 ./.hg/undo.dirstate
88 00700 ./dir/
88 00700 ./dir/
89 00600 ./dir/bar
89 00600 ./dir/bar
90 00600 ./foo
90 00600 ./foo
91
91
92 $ umask 007
92 $ umask 007
93 $ hg init ../push
93 $ hg init ../push
94
94
95 before push
95 before push
96 group can write everything
96 group can write everything
97
97
98 $ python ../printmodes.py ../push
98 $ python ../printmodes.py ../push
99 00770 ../push/.hg/
99 00770 ../push/.hg/
100 00660 ../push/.hg/00changelog.i
100 00660 ../push/.hg/00changelog.i
101 00660 ../push/.hg/requires
101 00660 ../push/.hg/requires
102 00770 ../push/.hg/store/
102 00770 ../push/.hg/store/
103
103
104 $ umask 077
104 $ umask 077
105 $ hg -q push ../push
105 $ hg -q push ../push
106
106
107 after push
107 after push
108 group can still write everything
108 group can still write everything
109
109
110 $ python ../printmodes.py ../push
110 $ python ../printmodes.py ../push
111 00770 ../push/.hg/
111 00770 ../push/.hg/
112 00660 ../push/.hg/00changelog.i
112 00660 ../push/.hg/00changelog.i
113 00770 ../push/.hg/cache/
113 00770 ../push/.hg/cache/
114 00660 ../push/.hg/cache/branchheads-impactable
114 00660 ../push/.hg/cache/branchheads-base
115 00660 ../push/.hg/requires
115 00660 ../push/.hg/requires
116 00770 ../push/.hg/store/
116 00770 ../push/.hg/store/
117 00660 ../push/.hg/store/00changelog.i
117 00660 ../push/.hg/store/00changelog.i
118 00660 ../push/.hg/store/00manifest.i
118 00660 ../push/.hg/store/00manifest.i
119 00770 ../push/.hg/store/data/
119 00770 ../push/.hg/store/data/
120 00770 ../push/.hg/store/data/dir/
120 00770 ../push/.hg/store/data/dir/
121 00660 ../push/.hg/store/data/dir/bar.i
121 00660 ../push/.hg/store/data/dir/bar.i
122 00660 ../push/.hg/store/data/foo.i
122 00660 ../push/.hg/store/data/foo.i
123 00660 ../push/.hg/store/fncache
123 00660 ../push/.hg/store/fncache
124 00660 ../push/.hg/store/phaseroots
124 00660 ../push/.hg/store/phaseroots
125 00660 ../push/.hg/store/undo
125 00660 ../push/.hg/store/undo
126 00660 ../push/.hg/store/undo.phaseroots
126 00660 ../push/.hg/store/undo.phaseroots
127 00660 ../push/.hg/undo.bookmarks
127 00660 ../push/.hg/undo.bookmarks
128 00660 ../push/.hg/undo.branch
128 00660 ../push/.hg/undo.branch
129 00660 ../push/.hg/undo.desc
129 00660 ../push/.hg/undo.desc
130 00660 ../push/.hg/undo.dirstate
130 00660 ../push/.hg/undo.dirstate
131
131
132
132
133 Test that we don't lose the setgid bit when we call chmod.
133 Test that we don't lose the setgid bit when we call chmod.
134 Not all systems support setgid directories (e.g. HFS+), so
134 Not all systems support setgid directories (e.g. HFS+), so
135 just check that directories have the same mode.
135 just check that directories have the same mode.
136
136
137 $ cd ..
137 $ cd ..
138 $ hg init setgid
138 $ hg init setgid
139 $ cd setgid
139 $ cd setgid
140 $ chmod g+rwx .hg/store
140 $ chmod g+rwx .hg/store
141 $ chmod g+s .hg/store 2> /dev/null || true
141 $ chmod g+s .hg/store 2> /dev/null || true
142 $ mkdir dir
142 $ mkdir dir
143 $ touch dir/file
143 $ touch dir/file
144 $ hg ci -qAm 'add dir/file'
144 $ hg ci -qAm 'add dir/file'
145 $ storemode=`python ../mode.py .hg/store`
145 $ storemode=`python ../mode.py .hg/store`
146 $ dirmode=`python ../mode.py .hg/store/data/dir`
146 $ dirmode=`python ../mode.py .hg/store/data/dir`
147 $ if [ "$storemode" != "$dirmode" ]; then
147 $ if [ "$storemode" != "$dirmode" ]; then
148 > echo "$storemode != $dirmode"
148 > echo "$storemode != $dirmode"
149 > fi
149 > fi
150 $ cd ..
150 $ cd ..
151
151
152 $ cd .. # g-s dir
152 $ cd .. # g-s dir
@@ -1,1155 +1,1155
1 $ cat <<EOF >> $HGRCPATH
1 $ cat <<EOF >> $HGRCPATH
2 > [extensions]
2 > [extensions]
3 > keyword =
3 > keyword =
4 > mq =
4 > mq =
5 > notify =
5 > notify =
6 > record =
6 > record =
7 > transplant =
7 > transplant =
8 > [ui]
8 > [ui]
9 > interactive = true
9 > interactive = true
10 > EOF
10 > EOF
11
11
12 hide outer repo
12 hide outer repo
13 $ hg init
13 $ hg init
14
14
15 Run kwdemo before [keyword] files are set up
15 Run kwdemo before [keyword] files are set up
16 as it would succeed without uisetup otherwise
16 as it would succeed without uisetup otherwise
17
17
18 $ hg --quiet kwdemo
18 $ hg --quiet kwdemo
19 [extensions]
19 [extensions]
20 keyword =
20 keyword =
21 [keyword]
21 [keyword]
22 demo.txt =
22 demo.txt =
23 [keywordset]
23 [keywordset]
24 svn = False
24 svn = False
25 [keywordmaps]
25 [keywordmaps]
26 Author = {author|user}
26 Author = {author|user}
27 Date = {date|utcdate}
27 Date = {date|utcdate}
28 Header = {root}/{file},v {node|short} {date|utcdate} {author|user}
28 Header = {root}/{file},v {node|short} {date|utcdate} {author|user}
29 Id = {file|basename},v {node|short} {date|utcdate} {author|user}
29 Id = {file|basename},v {node|short} {date|utcdate} {author|user}
30 RCSFile = {file|basename},v
30 RCSFile = {file|basename},v
31 RCSfile = {file|basename},v
31 RCSfile = {file|basename},v
32 Revision = {node|short}
32 Revision = {node|short}
33 Source = {root}/{file},v
33 Source = {root}/{file},v
34 $Author: test $
34 $Author: test $
35 $Date: ????/??/?? ??:??:?? $ (glob)
35 $Date: ????/??/?? ??:??:?? $ (glob)
36 $Header: */demo.txt,v ???????????? ????/??/?? ??:??:?? test $ (glob)
36 $Header: */demo.txt,v ???????????? ????/??/?? ??:??:?? test $ (glob)
37 $Id: demo.txt,v ???????????? ????/??/?? ??:??:?? test $ (glob)
37 $Id: demo.txt,v ???????????? ????/??/?? ??:??:?? test $ (glob)
38 $RCSFile: demo.txt,v $
38 $RCSFile: demo.txt,v $
39 $RCSfile: demo.txt,v $
39 $RCSfile: demo.txt,v $
40 $Revision: ???????????? $ (glob)
40 $Revision: ???????????? $ (glob)
41 $Source: */demo.txt,v $ (glob)
41 $Source: */demo.txt,v $ (glob)
42
42
43 $ hg --quiet kwdemo "Branch = {branches}"
43 $ hg --quiet kwdemo "Branch = {branches}"
44 [extensions]
44 [extensions]
45 keyword =
45 keyword =
46 [keyword]
46 [keyword]
47 demo.txt =
47 demo.txt =
48 [keywordset]
48 [keywordset]
49 svn = False
49 svn = False
50 [keywordmaps]
50 [keywordmaps]
51 Branch = {branches}
51 Branch = {branches}
52 $Branch: demobranch $
52 $Branch: demobranch $
53
53
54 $ cat <<EOF >> $HGRCPATH
54 $ cat <<EOF >> $HGRCPATH
55 > [keyword]
55 > [keyword]
56 > ** =
56 > ** =
57 > b = ignore
57 > b = ignore
58 > i = ignore
58 > i = ignore
59 > [hooks]
59 > [hooks]
60 > EOF
60 > EOF
61 $ cp $HGRCPATH $HGRCPATH.nohooks
61 $ cp $HGRCPATH $HGRCPATH.nohooks
62 > cat <<EOF >> $HGRCPATH
62 > cat <<EOF >> $HGRCPATH
63 > commit=
63 > commit=
64 > commit.test=cp a hooktest
64 > commit.test=cp a hooktest
65 > EOF
65 > EOF
66
66
67 $ hg init Test-bndl
67 $ hg init Test-bndl
68 $ cd Test-bndl
68 $ cd Test-bndl
69
69
70 kwshrink should exit silently in empty/invalid repo
70 kwshrink should exit silently in empty/invalid repo
71
71
72 $ hg kwshrink
72 $ hg kwshrink
73
73
74 Symlinks cannot be created on Windows.
74 Symlinks cannot be created on Windows.
75 A bundle to test this was made with:
75 A bundle to test this was made with:
76 hg init t
76 hg init t
77 cd t
77 cd t
78 echo a > a
78 echo a > a
79 ln -s a sym
79 ln -s a sym
80 hg add sym
80 hg add sym
81 hg ci -m addsym -u mercurial
81 hg ci -m addsym -u mercurial
82 hg bundle --base null ../test-keyword.hg
82 hg bundle --base null ../test-keyword.hg
83
83
84 $ hg pull -u "$TESTDIR"/bundles/test-keyword.hg
84 $ hg pull -u "$TESTDIR"/bundles/test-keyword.hg
85 pulling from *test-keyword.hg (glob)
85 pulling from *test-keyword.hg (glob)
86 requesting all changes
86 requesting all changes
87 adding changesets
87 adding changesets
88 adding manifests
88 adding manifests
89 adding file changes
89 adding file changes
90 added 1 changesets with 1 changes to 1 files
90 added 1 changesets with 1 changes to 1 files
91 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
91 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
92
92
93 $ echo 'expand $Id$' > a
93 $ echo 'expand $Id$' > a
94 $ echo 'do not process $Id:' >> a
94 $ echo 'do not process $Id:' >> a
95 $ echo 'xxx $' >> a
95 $ echo 'xxx $' >> a
96 $ echo 'ignore $Id$' > b
96 $ echo 'ignore $Id$' > b
97
97
98 Output files as they were created
98 Output files as they were created
99
99
100 $ cat a b
100 $ cat a b
101 expand $Id$
101 expand $Id$
102 do not process $Id:
102 do not process $Id:
103 xxx $
103 xxx $
104 ignore $Id$
104 ignore $Id$
105
105
106 no kwfiles
106 no kwfiles
107
107
108 $ hg kwfiles
108 $ hg kwfiles
109
109
110 untracked candidates
110 untracked candidates
111
111
112 $ hg -v kwfiles --unknown
112 $ hg -v kwfiles --unknown
113 k a
113 k a
114
114
115 Add files and check status
115 Add files and check status
116
116
117 $ hg addremove
117 $ hg addremove
118 adding a
118 adding a
119 adding b
119 adding b
120 $ hg status
120 $ hg status
121 A a
121 A a
122 A b
122 A b
123
123
124
124
125 Default keyword expansion including commit hook
125 Default keyword expansion including commit hook
126 Interrupted commit should not change state or run commit hook
126 Interrupted commit should not change state or run commit hook
127
127
128 $ hg --debug commit
128 $ hg --debug commit
129 abort: empty commit message
129 abort: empty commit message
130 [255]
130 [255]
131 $ hg status
131 $ hg status
132 A a
132 A a
133 A b
133 A b
134
134
135 Commit with several checks
135 Commit with several checks
136
136
137 $ hg --debug commit -mabsym -u 'User Name <user@example.com>'
137 $ hg --debug commit -mabsym -u 'User Name <user@example.com>'
138 a
138 a
139 b
139 b
140 overwriting a expanding keywords
140 overwriting a expanding keywords
141 running hook commit.test: cp a hooktest
141 running hook commit.test: cp a hooktest
142 committed changeset 1:ef63ca68695bc9495032c6fda1350c71e6d256e9
142 committed changeset 1:ef63ca68695bc9495032c6fda1350c71e6d256e9
143 $ hg status
143 $ hg status
144 ? hooktest
144 ? hooktest
145 $ hg debugrebuildstate
145 $ hg debugrebuildstate
146 $ hg --quiet identify
146 $ hg --quiet identify
147 ef63ca68695b
147 ef63ca68695b
148
148
149 cat files in working directory with keywords expanded
149 cat files in working directory with keywords expanded
150
150
151 $ cat a b
151 $ cat a b
152 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
152 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
153 do not process $Id:
153 do not process $Id:
154 xxx $
154 xxx $
155 ignore $Id$
155 ignore $Id$
156
156
157 hg cat files and symlink, no expansion
157 hg cat files and symlink, no expansion
158
158
159 $ hg cat sym a b && echo
159 $ hg cat sym a b && echo
160 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
160 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
161 do not process $Id:
161 do not process $Id:
162 xxx $
162 xxx $
163 ignore $Id$
163 ignore $Id$
164 a
164 a
165
165
166 $ diff a hooktest
166 $ diff a hooktest
167
167
168 $ cp $HGRCPATH.nohooks $HGRCPATH
168 $ cp $HGRCPATH.nohooks $HGRCPATH
169 $ rm hooktest
169 $ rm hooktest
170
170
171 hg status of kw-ignored binary file starting with '\1\n'
171 hg status of kw-ignored binary file starting with '\1\n'
172
172
173 >>> open("i", "wb").write("\1\nfoo")
173 >>> open("i", "wb").write("\1\nfoo")
174 $ hg -q commit -Am metasep i
174 $ hg -q commit -Am metasep i
175 $ hg status
175 $ hg status
176 >>> open("i", "wb").write("\1\nbar")
176 >>> open("i", "wb").write("\1\nbar")
177 $ hg status
177 $ hg status
178 M i
178 M i
179 $ hg -q commit -m "modify metasep" i
179 $ hg -q commit -m "modify metasep" i
180 $ hg status --rev 2:3
180 $ hg status --rev 2:3
181 M i
181 M i
182 $ touch empty
182 $ touch empty
183 $ hg -q commit -A -m "another file"
183 $ hg -q commit -A -m "another file"
184 $ hg status -A --rev 3:4 i
184 $ hg status -A --rev 3:4 i
185 C i
185 C i
186
186
187 $ hg -q strip -n 2
187 $ hg -q strip -n 2
188
188
189 Test hook execution
189 Test hook execution
190
190
191 bundle
191 bundle
192
192
193 $ hg bundle --base null ../kw.hg
193 $ hg bundle --base null ../kw.hg
194 2 changesets found
194 2 changesets found
195 $ cd ..
195 $ cd ..
196 $ hg init Test
196 $ hg init Test
197 $ cd Test
197 $ cd Test
198
198
199 Notify on pull to check whether keywords stay as is in email
199 Notify on pull to check whether keywords stay as is in email
200 ie. if patch.diff wrapper acts as it should
200 ie. if patch.diff wrapper acts as it should
201
201
202 $ cat <<EOF >> $HGRCPATH
202 $ cat <<EOF >> $HGRCPATH
203 > [hooks]
203 > [hooks]
204 > incoming.notify = python:hgext.notify.hook
204 > incoming.notify = python:hgext.notify.hook
205 > [notify]
205 > [notify]
206 > sources = pull
206 > sources = pull
207 > diffstat = False
207 > diffstat = False
208 > maxsubject = 15
208 > maxsubject = 15
209 > [reposubs]
209 > [reposubs]
210 > * = Test
210 > * = Test
211 > EOF
211 > EOF
212
212
213 Pull from bundle and trigger notify
213 Pull from bundle and trigger notify
214
214
215 $ hg pull -u ../kw.hg
215 $ hg pull -u ../kw.hg
216 pulling from ../kw.hg
216 pulling from ../kw.hg
217 requesting all changes
217 requesting all changes
218 adding changesets
218 adding changesets
219 adding manifests
219 adding manifests
220 adding file changes
220 adding file changes
221 added 2 changesets with 3 changes to 3 files
221 added 2 changesets with 3 changes to 3 files
222 Content-Type: text/plain; charset="us-ascii"
222 Content-Type: text/plain; charset="us-ascii"
223 MIME-Version: 1.0
223 MIME-Version: 1.0
224 Content-Transfer-Encoding: 7bit
224 Content-Transfer-Encoding: 7bit
225 Date: * (glob)
225 Date: * (glob)
226 Subject: changeset in...
226 Subject: changeset in...
227 From: mercurial
227 From: mercurial
228 X-Hg-Notification: changeset a2392c293916
228 X-Hg-Notification: changeset a2392c293916
229 Message-Id: <hg.a2392c293916*> (glob)
229 Message-Id: <hg.a2392c293916*> (glob)
230 To: Test
230 To: Test
231
231
232 changeset a2392c293916 in $TESTTMP/Test (glob)
232 changeset a2392c293916 in $TESTTMP/Test (glob)
233 details: $TESTTMP/Test?cmd=changeset;node=a2392c293916
233 details: $TESTTMP/Test?cmd=changeset;node=a2392c293916
234 description:
234 description:
235 addsym
235 addsym
236
236
237 diffs (6 lines):
237 diffs (6 lines):
238
238
239 diff -r 000000000000 -r a2392c293916 sym
239 diff -r 000000000000 -r a2392c293916 sym
240 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
240 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
241 +++ b/sym Sat Feb 09 20:25:47 2008 +0100
241 +++ b/sym Sat Feb 09 20:25:47 2008 +0100
242 @@ -0,0 +1,1 @@
242 @@ -0,0 +1,1 @@
243 +a
243 +a
244 \ No newline at end of file
244 \ No newline at end of file
245 Content-Type: text/plain; charset="us-ascii"
245 Content-Type: text/plain; charset="us-ascii"
246 MIME-Version: 1.0
246 MIME-Version: 1.0
247 Content-Transfer-Encoding: 7bit
247 Content-Transfer-Encoding: 7bit
248 Date:* (glob)
248 Date:* (glob)
249 Subject: changeset in...
249 Subject: changeset in...
250 From: User Name <user@example.com>
250 From: User Name <user@example.com>
251 X-Hg-Notification: changeset ef63ca68695b
251 X-Hg-Notification: changeset ef63ca68695b
252 Message-Id: <hg.ef63ca68695b*> (glob)
252 Message-Id: <hg.ef63ca68695b*> (glob)
253 To: Test
253 To: Test
254
254
255 changeset ef63ca68695b in $TESTTMP/Test (glob)
255 changeset ef63ca68695b in $TESTTMP/Test (glob)
256 details: $TESTTMP/Test?cmd=changeset;node=ef63ca68695b
256 details: $TESTTMP/Test?cmd=changeset;node=ef63ca68695b
257 description:
257 description:
258 absym
258 absym
259
259
260 diffs (12 lines):
260 diffs (12 lines):
261
261
262 diff -r a2392c293916 -r ef63ca68695b a
262 diff -r a2392c293916 -r ef63ca68695b a
263 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
263 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
264 +++ b/a Thu Jan 01 00:00:00 1970 +0000
264 +++ b/a Thu Jan 01 00:00:00 1970 +0000
265 @@ -0,0 +1,3 @@
265 @@ -0,0 +1,3 @@
266 +expand $Id$
266 +expand $Id$
267 +do not process $Id:
267 +do not process $Id:
268 +xxx $
268 +xxx $
269 diff -r a2392c293916 -r ef63ca68695b b
269 diff -r a2392c293916 -r ef63ca68695b b
270 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
270 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
271 +++ b/b Thu Jan 01 00:00:00 1970 +0000
271 +++ b/b Thu Jan 01 00:00:00 1970 +0000
272 @@ -0,0 +1,1 @@
272 @@ -0,0 +1,1 @@
273 +ignore $Id$
273 +ignore $Id$
274 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
274 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
275
275
276 $ cp $HGRCPATH.nohooks $HGRCPATH
276 $ cp $HGRCPATH.nohooks $HGRCPATH
277
277
278 Touch files and check with status
278 Touch files and check with status
279
279
280 $ touch a b
280 $ touch a b
281 $ hg status
281 $ hg status
282
282
283 Update and expand
283 Update and expand
284
284
285 $ rm sym a b
285 $ rm sym a b
286 $ hg update -C
286 $ hg update -C
287 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
287 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
288 $ cat a b
288 $ cat a b
289 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
289 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
290 do not process $Id:
290 do not process $Id:
291 xxx $
291 xxx $
292 ignore $Id$
292 ignore $Id$
293
293
294 Check whether expansion is filewise and file mode is preserved
294 Check whether expansion is filewise and file mode is preserved
295
295
296 $ echo '$Id$' > c
296 $ echo '$Id$' > c
297 $ echo 'tests for different changenodes' >> c
297 $ echo 'tests for different changenodes' >> c
298 #if unix-permissions
298 #if unix-permissions
299 $ chmod 600 c
299 $ chmod 600 c
300 $ ls -l c | cut -b 1-10
300 $ ls -l c | cut -b 1-10
301 -rw-------
301 -rw-------
302 #endif
302 #endif
303
303
304 commit file c
304 commit file c
305
305
306 $ hg commit -A -mcndiff -d '1 0' -u 'User Name <user@example.com>'
306 $ hg commit -A -mcndiff -d '1 0' -u 'User Name <user@example.com>'
307 adding c
307 adding c
308 #if unix-permissions
308 #if unix-permissions
309 $ ls -l c | cut -b 1-10
309 $ ls -l c | cut -b 1-10
310 -rw-------
310 -rw-------
311 #endif
311 #endif
312
312
313 force expansion
313 force expansion
314
314
315 $ hg -v kwexpand
315 $ hg -v kwexpand
316 overwriting a expanding keywords
316 overwriting a expanding keywords
317 overwriting c expanding keywords
317 overwriting c expanding keywords
318
318
319 compare changenodes in a and c
319 compare changenodes in a and c
320
320
321 $ cat a c
321 $ cat a c
322 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
322 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
323 do not process $Id:
323 do not process $Id:
324 xxx $
324 xxx $
325 $Id: c,v 40a904bbbe4c 1970/01/01 00:00:01 user $
325 $Id: c,v 40a904bbbe4c 1970/01/01 00:00:01 user $
326 tests for different changenodes
326 tests for different changenodes
327
327
328 record
328 record
329
329
330 $ echo '$Id$' > r
330 $ echo '$Id$' > r
331 $ hg add r
331 $ hg add r
332
332
333 record chunk
333 record chunk
334
334
335 >>> lines = open('a', 'rb').readlines()
335 >>> lines = open('a', 'rb').readlines()
336 >>> lines.insert(1, 'foo\n')
336 >>> lines.insert(1, 'foo\n')
337 >>> lines.append('bar\n')
337 >>> lines.append('bar\n')
338 >>> open('a', 'wb').writelines(lines)
338 >>> open('a', 'wb').writelines(lines)
339 $ hg record -d '10 1' -m rectest a<<EOF
339 $ hg record -d '10 1' -m rectest a<<EOF
340 > y
340 > y
341 > y
341 > y
342 > n
342 > n
343 > EOF
343 > EOF
344 diff --git a/a b/a
344 diff --git a/a b/a
345 2 hunks, 2 lines changed
345 2 hunks, 2 lines changed
346 examine changes to 'a'? [Ynesfdaq?]
346 examine changes to 'a'? [Ynesfdaq?]
347 @@ -1,3 +1,4 @@
347 @@ -1,3 +1,4 @@
348 expand $Id$
348 expand $Id$
349 +foo
349 +foo
350 do not process $Id:
350 do not process $Id:
351 xxx $
351 xxx $
352 record change 1/2 to 'a'? [Ynesfdaq?]
352 record change 1/2 to 'a'? [Ynesfdaq?]
353 @@ -2,2 +3,3 @@
353 @@ -2,2 +3,3 @@
354 do not process $Id:
354 do not process $Id:
355 xxx $
355 xxx $
356 +bar
356 +bar
357 record change 2/2 to 'a'? [Ynesfdaq?]
357 record change 2/2 to 'a'? [Ynesfdaq?]
358
358
359 $ hg identify
359 $ hg identify
360 5f5eb23505c3+ tip
360 5f5eb23505c3+ tip
361 $ hg status
361 $ hg status
362 M a
362 M a
363 A r
363 A r
364
364
365 Cat modified file a
365 Cat modified file a
366
366
367 $ cat a
367 $ cat a
368 expand $Id: a,v 5f5eb23505c3 1970/01/01 00:00:10 test $
368 expand $Id: a,v 5f5eb23505c3 1970/01/01 00:00:10 test $
369 foo
369 foo
370 do not process $Id:
370 do not process $Id:
371 xxx $
371 xxx $
372 bar
372 bar
373
373
374 Diff remaining chunk
374 Diff remaining chunk
375
375
376 $ hg diff a
376 $ hg diff a
377 diff -r 5f5eb23505c3 a
377 diff -r 5f5eb23505c3 a
378 --- a/a Thu Jan 01 00:00:09 1970 -0000
378 --- a/a Thu Jan 01 00:00:09 1970 -0000
379 +++ b/a * (glob)
379 +++ b/a * (glob)
380 @@ -2,3 +2,4 @@
380 @@ -2,3 +2,4 @@
381 foo
381 foo
382 do not process $Id:
382 do not process $Id:
383 xxx $
383 xxx $
384 +bar
384 +bar
385
385
386 $ hg rollback
386 $ hg rollback
387 repository tip rolled back to revision 2 (undo commit)
387 repository tip rolled back to revision 2 (undo commit)
388 working directory now based on revision 2
388 working directory now based on revision 2
389
389
390 Record all chunks in file a
390 Record all chunks in file a
391
391
392 $ echo foo > msg
392 $ echo foo > msg
393
393
394 - do not use "hg record -m" here!
394 - do not use "hg record -m" here!
395
395
396 $ hg record -l msg -d '11 1' a<<EOF
396 $ hg record -l msg -d '11 1' a<<EOF
397 > y
397 > y
398 > y
398 > y
399 > y
399 > y
400 > EOF
400 > EOF
401 diff --git a/a b/a
401 diff --git a/a b/a
402 2 hunks, 2 lines changed
402 2 hunks, 2 lines changed
403 examine changes to 'a'? [Ynesfdaq?]
403 examine changes to 'a'? [Ynesfdaq?]
404 @@ -1,3 +1,4 @@
404 @@ -1,3 +1,4 @@
405 expand $Id$
405 expand $Id$
406 +foo
406 +foo
407 do not process $Id:
407 do not process $Id:
408 xxx $
408 xxx $
409 record change 1/2 to 'a'? [Ynesfdaq?]
409 record change 1/2 to 'a'? [Ynesfdaq?]
410 @@ -2,2 +3,3 @@
410 @@ -2,2 +3,3 @@
411 do not process $Id:
411 do not process $Id:
412 xxx $
412 xxx $
413 +bar
413 +bar
414 record change 2/2 to 'a'? [Ynesfdaq?]
414 record change 2/2 to 'a'? [Ynesfdaq?]
415
415
416 File a should be clean
416 File a should be clean
417
417
418 $ hg status -A a
418 $ hg status -A a
419 C a
419 C a
420
420
421 rollback and revert expansion
421 rollback and revert expansion
422
422
423 $ cat a
423 $ cat a
424 expand $Id: a,v 78e0a02d76aa 1970/01/01 00:00:11 test $
424 expand $Id: a,v 78e0a02d76aa 1970/01/01 00:00:11 test $
425 foo
425 foo
426 do not process $Id:
426 do not process $Id:
427 xxx $
427 xxx $
428 bar
428 bar
429 $ hg --verbose rollback
429 $ hg --verbose rollback
430 repository tip rolled back to revision 2 (undo commit)
430 repository tip rolled back to revision 2 (undo commit)
431 working directory now based on revision 2
431 working directory now based on revision 2
432 overwriting a expanding keywords
432 overwriting a expanding keywords
433 $ hg status a
433 $ hg status a
434 M a
434 M a
435 $ cat a
435 $ cat a
436 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
436 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
437 foo
437 foo
438 do not process $Id:
438 do not process $Id:
439 xxx $
439 xxx $
440 bar
440 bar
441 $ echo '$Id$' > y
441 $ echo '$Id$' > y
442 $ echo '$Id$' > z
442 $ echo '$Id$' > z
443 $ hg add y
443 $ hg add y
444 $ hg commit -Am "rollback only" z
444 $ hg commit -Am "rollback only" z
445 $ cat z
445 $ cat z
446 $Id: z,v 45a5d3adce53 1970/01/01 00:00:00 test $
446 $Id: z,v 45a5d3adce53 1970/01/01 00:00:00 test $
447 $ hg --verbose rollback
447 $ hg --verbose rollback
448 repository tip rolled back to revision 2 (undo commit)
448 repository tip rolled back to revision 2 (undo commit)
449 working directory now based on revision 2
449 working directory now based on revision 2
450 overwriting z shrinking keywords
450 overwriting z shrinking keywords
451
451
452 Only z should be overwritten
452 Only z should be overwritten
453
453
454 $ hg status a y z
454 $ hg status a y z
455 M a
455 M a
456 A y
456 A y
457 A z
457 A z
458 $ cat z
458 $ cat z
459 $Id$
459 $Id$
460 $ hg forget y z
460 $ hg forget y z
461 $ rm y z
461 $ rm y z
462
462
463 record added file alone
463 record added file alone
464
464
465 $ hg -v record -l msg -d '12 2' r<<EOF
465 $ hg -v record -l msg -d '12 2' r<<EOF
466 > y
466 > y
467 > EOF
467 > EOF
468 diff --git a/r b/r
468 diff --git a/r b/r
469 new file mode 100644
469 new file mode 100644
470 examine changes to 'r'? [Ynesfdaq?]
470 examine changes to 'r'? [Ynesfdaq?]
471 r
471 r
472 committed changeset 3:82a2f715724d
472 committed changeset 3:82a2f715724d
473 overwriting r expanding keywords
473 overwriting r expanding keywords
474 - status call required for dirstate.normallookup() check
474 - status call required for dirstate.normallookup() check
475 $ hg status r
475 $ hg status r
476 $ hg --verbose rollback
476 $ hg --verbose rollback
477 repository tip rolled back to revision 2 (undo commit)
477 repository tip rolled back to revision 2 (undo commit)
478 working directory now based on revision 2
478 working directory now based on revision 2
479 overwriting r shrinking keywords
479 overwriting r shrinking keywords
480 $ hg forget r
480 $ hg forget r
481 $ rm msg r
481 $ rm msg r
482 $ hg update -C
482 $ hg update -C
483 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
483 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
484
484
485 record added keyword ignored file
485 record added keyword ignored file
486
486
487 $ echo '$Id$' > i
487 $ echo '$Id$' > i
488 $ hg add i
488 $ hg add i
489 $ hg --verbose record -d '13 1' -m recignored<<EOF
489 $ hg --verbose record -d '13 1' -m recignored<<EOF
490 > y
490 > y
491 > EOF
491 > EOF
492 diff --git a/i b/i
492 diff --git a/i b/i
493 new file mode 100644
493 new file mode 100644
494 examine changes to 'i'? [Ynesfdaq?]
494 examine changes to 'i'? [Ynesfdaq?]
495 i
495 i
496 committed changeset 3:9f40ceb5a072
496 committed changeset 3:9f40ceb5a072
497 $ cat i
497 $ cat i
498 $Id$
498 $Id$
499 $ hg -q rollback
499 $ hg -q rollback
500 $ hg forget i
500 $ hg forget i
501 $ rm i
501 $ rm i
502
502
503 amend
503 amend
504
504
505 $ echo amend >> a
505 $ echo amend >> a
506 $ echo amend >> b
506 $ echo amend >> b
507 $ hg -q commit -d '14 1' -m 'prepare amend'
507 $ hg -q commit -d '14 1' -m 'prepare amend'
508
508
509 $ hg --debug commit --amend -d '15 1' -m 'amend without changes' | grep keywords
509 $ hg --debug commit --amend -d '15 1' -m 'amend without changes' | grep keywords
510 overwriting a expanding keywords
510 overwriting a expanding keywords
511 $ hg -q id
511 $ hg -q id
512 67d8c481a6be
512 67d8c481a6be
513 $ head -1 a
513 $ head -1 a
514 expand $Id: a,v 67d8c481a6be 1970/01/01 00:00:15 test $
514 expand $Id: a,v 67d8c481a6be 1970/01/01 00:00:15 test $
515
515
516 $ hg -q strip -n tip
516 $ hg -q strip -n tip
517
517
518 Test patch queue repo
518 Test patch queue repo
519
519
520 $ hg init --mq
520 $ hg init --mq
521 $ hg qimport -r tip -n mqtest.diff
521 $ hg qimport -r tip -n mqtest.diff
522 $ hg commit --mq -m mqtest
522 $ hg commit --mq -m mqtest
523
523
524 Keywords should not be expanded in patch
524 Keywords should not be expanded in patch
525
525
526 $ cat .hg/patches/mqtest.diff
526 $ cat .hg/patches/mqtest.diff
527 # HG changeset patch
527 # HG changeset patch
528 # User User Name <user@example.com>
528 # User User Name <user@example.com>
529 # Date 1 0
529 # Date 1 0
530 # Node ID 40a904bbbe4cd4ab0a1f28411e35db26341a40ad
530 # Node ID 40a904bbbe4cd4ab0a1f28411e35db26341a40ad
531 # Parent ef63ca68695bc9495032c6fda1350c71e6d256e9
531 # Parent ef63ca68695bc9495032c6fda1350c71e6d256e9
532 cndiff
532 cndiff
533
533
534 diff -r ef63ca68695b -r 40a904bbbe4c c
534 diff -r ef63ca68695b -r 40a904bbbe4c c
535 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
535 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
536 +++ b/c Thu Jan 01 00:00:01 1970 +0000
536 +++ b/c Thu Jan 01 00:00:01 1970 +0000
537 @@ -0,0 +1,2 @@
537 @@ -0,0 +1,2 @@
538 +$Id$
538 +$Id$
539 +tests for different changenodes
539 +tests for different changenodes
540
540
541 $ hg qpop
541 $ hg qpop
542 popping mqtest.diff
542 popping mqtest.diff
543 patch queue now empty
543 patch queue now empty
544
544
545 qgoto, implying qpush, should expand
545 qgoto, implying qpush, should expand
546
546
547 $ hg qgoto mqtest.diff
547 $ hg qgoto mqtest.diff
548 applying mqtest.diff
548 applying mqtest.diff
549 now at: mqtest.diff
549 now at: mqtest.diff
550 $ cat c
550 $ cat c
551 $Id: c,v 40a904bbbe4c 1970/01/01 00:00:01 user $
551 $Id: c,v 40a904bbbe4c 1970/01/01 00:00:01 user $
552 tests for different changenodes
552 tests for different changenodes
553 $ hg cat c
553 $ hg cat c
554 $Id: c,v 40a904bbbe4c 1970/01/01 00:00:01 user $
554 $Id: c,v 40a904bbbe4c 1970/01/01 00:00:01 user $
555 tests for different changenodes
555 tests for different changenodes
556
556
557 Keywords should not be expanded in filelog
557 Keywords should not be expanded in filelog
558
558
559 $ hg --config 'extensions.keyword=!' cat c
559 $ hg --config 'extensions.keyword=!' cat c
560 $Id$
560 $Id$
561 tests for different changenodes
561 tests for different changenodes
562
562
563 qpop and move on
563 qpop and move on
564
564
565 $ hg qpop
565 $ hg qpop
566 popping mqtest.diff
566 popping mqtest.diff
567 patch queue now empty
567 patch queue now empty
568
568
569 Copy and show added kwfiles
569 Copy and show added kwfiles
570
570
571 $ hg cp a c
571 $ hg cp a c
572 $ hg kwfiles
572 $ hg kwfiles
573 a
573 a
574 c
574 c
575
575
576 Commit and show expansion in original and copy
576 Commit and show expansion in original and copy
577
577
578 $ hg --debug commit -ma2c -d '1 0' -u 'User Name <user@example.com>'
578 $ hg --debug commit -ma2c -d '1 0' -u 'User Name <user@example.com>'
579 invalid branchheads cache (unserved): tip differs
579 invalid branchheads cache (served): tip differs
580 c
580 c
581 c: copy a:0045e12f6c5791aac80ca6cbfd97709a88307292
581 c: copy a:0045e12f6c5791aac80ca6cbfd97709a88307292
582 overwriting c expanding keywords
582 overwriting c expanding keywords
583 committed changeset 2:25736cf2f5cbe41f6be4e6784ef6ecf9f3bbcc7d
583 committed changeset 2:25736cf2f5cbe41f6be4e6784ef6ecf9f3bbcc7d
584 $ cat a c
584 $ cat a c
585 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
585 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
586 do not process $Id:
586 do not process $Id:
587 xxx $
587 xxx $
588 expand $Id: c,v 25736cf2f5cb 1970/01/01 00:00:01 user $
588 expand $Id: c,v 25736cf2f5cb 1970/01/01 00:00:01 user $
589 do not process $Id:
589 do not process $Id:
590 xxx $
590 xxx $
591
591
592 Touch copied c and check its status
592 Touch copied c and check its status
593
593
594 $ touch c
594 $ touch c
595 $ hg status
595 $ hg status
596
596
597 Copy kwfile to keyword ignored file unexpanding keywords
597 Copy kwfile to keyword ignored file unexpanding keywords
598
598
599 $ hg --verbose copy a i
599 $ hg --verbose copy a i
600 copying a to i
600 copying a to i
601 overwriting i shrinking keywords
601 overwriting i shrinking keywords
602 $ head -n 1 i
602 $ head -n 1 i
603 expand $Id$
603 expand $Id$
604 $ hg forget i
604 $ hg forget i
605 $ rm i
605 $ rm i
606
606
607 Copy ignored file to ignored file: no overwriting
607 Copy ignored file to ignored file: no overwriting
608
608
609 $ hg --verbose copy b i
609 $ hg --verbose copy b i
610 copying b to i
610 copying b to i
611 $ hg forget i
611 $ hg forget i
612 $ rm i
612 $ rm i
613
613
614 cp symlink file; hg cp -A symlink file (part1)
614 cp symlink file; hg cp -A symlink file (part1)
615 - copied symlink points to kwfile: overwrite
615 - copied symlink points to kwfile: overwrite
616
616
617 #if symlink
617 #if symlink
618 $ cp sym i
618 $ cp sym i
619 $ ls -l i
619 $ ls -l i
620 -rw-r--r--* (glob)
620 -rw-r--r--* (glob)
621 $ head -1 i
621 $ head -1 i
622 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
622 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
623 $ hg copy --after --verbose sym i
623 $ hg copy --after --verbose sym i
624 copying sym to i
624 copying sym to i
625 overwriting i shrinking keywords
625 overwriting i shrinking keywords
626 $ head -1 i
626 $ head -1 i
627 expand $Id$
627 expand $Id$
628 $ hg forget i
628 $ hg forget i
629 $ rm i
629 $ rm i
630 #endif
630 #endif
631
631
632 Test different options of hg kwfiles
632 Test different options of hg kwfiles
633
633
634 $ hg kwfiles
634 $ hg kwfiles
635 a
635 a
636 c
636 c
637 $ hg -v kwfiles --ignore
637 $ hg -v kwfiles --ignore
638 I b
638 I b
639 I sym
639 I sym
640 $ hg kwfiles --all
640 $ hg kwfiles --all
641 K a
641 K a
642 K c
642 K c
643 I b
643 I b
644 I sym
644 I sym
645
645
646 Diff specific revision
646 Diff specific revision
647
647
648 $ hg diff --rev 1
648 $ hg diff --rev 1
649 diff -r ef63ca68695b c
649 diff -r ef63ca68695b c
650 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
650 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
651 +++ b/c * (glob)
651 +++ b/c * (glob)
652 @@ -0,0 +1,3 @@
652 @@ -0,0 +1,3 @@
653 +expand $Id$
653 +expand $Id$
654 +do not process $Id:
654 +do not process $Id:
655 +xxx $
655 +xxx $
656
656
657 Status after rollback:
657 Status after rollback:
658
658
659 $ hg rollback
659 $ hg rollback
660 repository tip rolled back to revision 1 (undo commit)
660 repository tip rolled back to revision 1 (undo commit)
661 working directory now based on revision 1
661 working directory now based on revision 1
662 $ hg status
662 $ hg status
663 A c
663 A c
664 $ hg update --clean
664 $ hg update --clean
665 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
665 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
666
666
667 #if symlink
667 #if symlink
668
668
669 cp symlink file; hg cp -A symlink file (part2)
669 cp symlink file; hg cp -A symlink file (part2)
670 - copied symlink points to kw ignored file: do not overwrite
670 - copied symlink points to kw ignored file: do not overwrite
671
671
672 $ cat a > i
672 $ cat a > i
673 $ ln -s i symignored
673 $ ln -s i symignored
674 $ hg commit -Am 'fake expansion in ignored and symlink' i symignored
674 $ hg commit -Am 'fake expansion in ignored and symlink' i symignored
675 $ cp symignored x
675 $ cp symignored x
676 $ hg copy --after --verbose symignored x
676 $ hg copy --after --verbose symignored x
677 copying symignored to x
677 copying symignored to x
678 $ head -n 1 x
678 $ head -n 1 x
679 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
679 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
680 $ hg forget x
680 $ hg forget x
681 $ rm x
681 $ rm x
682
682
683 $ hg rollback
683 $ hg rollback
684 repository tip rolled back to revision 1 (undo commit)
684 repository tip rolled back to revision 1 (undo commit)
685 working directory now based on revision 1
685 working directory now based on revision 1
686 $ hg update --clean
686 $ hg update --clean
687 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
687 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
688 $ rm i symignored
688 $ rm i symignored
689
689
690 #endif
690 #endif
691
691
692 Custom keywordmaps as argument to kwdemo
692 Custom keywordmaps as argument to kwdemo
693
693
694 $ hg --quiet kwdemo "Xinfo = {author}: {desc}"
694 $ hg --quiet kwdemo "Xinfo = {author}: {desc}"
695 [extensions]
695 [extensions]
696 keyword =
696 keyword =
697 [keyword]
697 [keyword]
698 ** =
698 ** =
699 b = ignore
699 b = ignore
700 demo.txt =
700 demo.txt =
701 i = ignore
701 i = ignore
702 [keywordset]
702 [keywordset]
703 svn = False
703 svn = False
704 [keywordmaps]
704 [keywordmaps]
705 Xinfo = {author}: {desc}
705 Xinfo = {author}: {desc}
706 $Xinfo: test: hg keyword configuration and expansion example $
706 $Xinfo: test: hg keyword configuration and expansion example $
707
707
708 Configure custom keywordmaps
708 Configure custom keywordmaps
709
709
710 $ cat <<EOF >>$HGRCPATH
710 $ cat <<EOF >>$HGRCPATH
711 > [keywordmaps]
711 > [keywordmaps]
712 > Id = {file} {node|short} {date|rfc822date} {author|user}
712 > Id = {file} {node|short} {date|rfc822date} {author|user}
713 > Xinfo = {author}: {desc}
713 > Xinfo = {author}: {desc}
714 > EOF
714 > EOF
715
715
716 Cat and hg cat files before custom expansion
716 Cat and hg cat files before custom expansion
717
717
718 $ cat a b
718 $ cat a b
719 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
719 expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $
720 do not process $Id:
720 do not process $Id:
721 xxx $
721 xxx $
722 ignore $Id$
722 ignore $Id$
723 $ hg cat sym a b && echo
723 $ hg cat sym a b && echo
724 expand $Id: a ef63ca68695b Thu, 01 Jan 1970 00:00:00 +0000 user $
724 expand $Id: a ef63ca68695b Thu, 01 Jan 1970 00:00:00 +0000 user $
725 do not process $Id:
725 do not process $Id:
726 xxx $
726 xxx $
727 ignore $Id$
727 ignore $Id$
728 a
728 a
729
729
730 Write custom keyword and prepare multi-line commit message
730 Write custom keyword and prepare multi-line commit message
731
731
732 $ echo '$Xinfo$' >> a
732 $ echo '$Xinfo$' >> a
733 $ cat <<EOF >> log
733 $ cat <<EOF >> log
734 > firstline
734 > firstline
735 > secondline
735 > secondline
736 > EOF
736 > EOF
737
737
738 Interrupted commit should not change state
738 Interrupted commit should not change state
739
739
740 $ hg commit
740 $ hg commit
741 abort: empty commit message
741 abort: empty commit message
742 [255]
742 [255]
743 $ hg status
743 $ hg status
744 M a
744 M a
745 ? c
745 ? c
746 ? log
746 ? log
747
747
748 Commit with multi-line message and custom expansion
748 Commit with multi-line message and custom expansion
749
749
750 |Note:
750 |Note:
751 |
751 |
752 | After the last rollback, the "unserved" branchheads cache became invalid, but
752 | After the last rollback, the "unserved" branchheads cache became invalid, but
753 | all changesets in the repo were public. For filtering this means:
753 | all changesets in the repo were public. For filtering this means:
754 | "mutable" == "unserved" == ΓΈ.
754 | "mutable" == "unserved" == ΓΈ.
755 |
755 |
756 | As the "unserved" cache is invalid, we fall back to the "mutable" cache. But
756 | As the "unserved" cache is invalid, we fall back to the "mutable" cache. But
757 | no update is needed between "mutable" and "unserved" and the "unserved" cache
757 | no update is needed between "mutable" and "unserved" and the "unserved" cache
758 | is not updated on disk. The on-disk version therefore stays invalid for some
758 | is not updated on disk. The on-disk version therefore stays invalid for some
759 | time. This explains why the "unserved" branchheads cache is detected as
759 | time. This explains why the "unserved" branchheads cache is detected as
760 | invalid here.
760 | invalid here.
761
761
762 $ hg --debug commit -l log -d '2 0' -u 'User Name <user@example.com>'
762 $ hg --debug commit -l log -d '2 0' -u 'User Name <user@example.com>'
763 invalid branchheads cache (unserved): tip differs
763 invalid branchheads cache (served): tip differs
764 a
764 a
765 invalid branchheads cache: tip differs
765 invalid branchheads cache: tip differs
766 invalid branchheads cache (unserved): tip differs
766 invalid branchheads cache (served): tip differs
767 overwriting a expanding keywords
767 overwriting a expanding keywords
768 committed changeset 2:bb948857c743469b22bbf51f7ec8112279ca5d83
768 committed changeset 2:bb948857c743469b22bbf51f7ec8112279ca5d83
769 $ rm log
769 $ rm log
770
770
771 Stat, verify and show custom expansion (firstline)
771 Stat, verify and show custom expansion (firstline)
772
772
773 $ hg status
773 $ hg status
774 ? c
774 ? c
775 $ hg verify
775 $ hg verify
776 checking changesets
776 checking changesets
777 checking manifests
777 checking manifests
778 crosschecking files in changesets and manifests
778 crosschecking files in changesets and manifests
779 checking files
779 checking files
780 3 files, 3 changesets, 4 total revisions
780 3 files, 3 changesets, 4 total revisions
781 $ cat a b
781 $ cat a b
782 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
782 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
783 do not process $Id:
783 do not process $Id:
784 xxx $
784 xxx $
785 $Xinfo: User Name <user@example.com>: firstline $
785 $Xinfo: User Name <user@example.com>: firstline $
786 ignore $Id$
786 ignore $Id$
787 $ hg cat sym a b && echo
787 $ hg cat sym a b && echo
788 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
788 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
789 do not process $Id:
789 do not process $Id:
790 xxx $
790 xxx $
791 $Xinfo: User Name <user@example.com>: firstline $
791 $Xinfo: User Name <user@example.com>: firstline $
792 ignore $Id$
792 ignore $Id$
793 a
793 a
794
794
795 annotate
795 annotate
796
796
797 $ hg annotate a
797 $ hg annotate a
798 1: expand $Id$
798 1: expand $Id$
799 1: do not process $Id:
799 1: do not process $Id:
800 1: xxx $
800 1: xxx $
801 2: $Xinfo$
801 2: $Xinfo$
802
802
803 remove with status checks
803 remove with status checks
804
804
805 $ hg debugrebuildstate
805 $ hg debugrebuildstate
806 $ hg remove a
806 $ hg remove a
807 $ hg --debug commit -m rma
807 $ hg --debug commit -m rma
808 invalid branchheads cache: tip differs
808 invalid branchheads cache: tip differs
809 committed changeset 3:d14c712653769de926994cf7fbb06c8fbd68f012
809 committed changeset 3:d14c712653769de926994cf7fbb06c8fbd68f012
810 $ hg status
810 $ hg status
811 ? c
811 ? c
812
812
813 Rollback, revert, and check expansion
813 Rollback, revert, and check expansion
814
814
815 $ hg rollback
815 $ hg rollback
816 repository tip rolled back to revision 2 (undo commit)
816 repository tip rolled back to revision 2 (undo commit)
817 working directory now based on revision 2
817 working directory now based on revision 2
818 $ hg status
818 $ hg status
819 R a
819 R a
820 ? c
820 ? c
821 $ hg revert --no-backup --rev tip a
821 $ hg revert --no-backup --rev tip a
822 $ cat a
822 $ cat a
823 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
823 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
824 do not process $Id:
824 do not process $Id:
825 xxx $
825 xxx $
826 $Xinfo: User Name <user@example.com>: firstline $
826 $Xinfo: User Name <user@example.com>: firstline $
827
827
828 Clone to test global and local configurations
828 Clone to test global and local configurations
829
829
830 $ cd ..
830 $ cd ..
831
831
832 Expansion in destination with global configuration
832 Expansion in destination with global configuration
833
833
834 $ hg --quiet clone Test globalconf
834 $ hg --quiet clone Test globalconf
835 $ cat globalconf/a
835 $ cat globalconf/a
836 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
836 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
837 do not process $Id:
837 do not process $Id:
838 xxx $
838 xxx $
839 $Xinfo: User Name <user@example.com>: firstline $
839 $Xinfo: User Name <user@example.com>: firstline $
840
840
841 No expansion in destination with local configuration in origin only
841 No expansion in destination with local configuration in origin only
842
842
843 $ hg --quiet --config 'keyword.**=ignore' clone Test localconf
843 $ hg --quiet --config 'keyword.**=ignore' clone Test localconf
844 $ cat localconf/a
844 $ cat localconf/a
845 expand $Id$
845 expand $Id$
846 do not process $Id:
846 do not process $Id:
847 xxx $
847 xxx $
848 $Xinfo$
848 $Xinfo$
849
849
850 Clone to test incoming
850 Clone to test incoming
851
851
852 $ hg clone -r1 Test Test-a
852 $ hg clone -r1 Test Test-a
853 adding changesets
853 adding changesets
854 adding manifests
854 adding manifests
855 adding file changes
855 adding file changes
856 added 2 changesets with 3 changes to 3 files
856 added 2 changesets with 3 changes to 3 files
857 updating to branch default
857 updating to branch default
858 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
858 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
859 $ cd Test-a
859 $ cd Test-a
860 $ cat <<EOF >> .hg/hgrc
860 $ cat <<EOF >> .hg/hgrc
861 > [paths]
861 > [paths]
862 > default = ../Test
862 > default = ../Test
863 > EOF
863 > EOF
864 $ hg incoming
864 $ hg incoming
865 comparing with $TESTTMP/Test (glob)
865 comparing with $TESTTMP/Test (glob)
866 searching for changes
866 searching for changes
867 changeset: 2:bb948857c743
867 changeset: 2:bb948857c743
868 tag: tip
868 tag: tip
869 user: User Name <user@example.com>
869 user: User Name <user@example.com>
870 date: Thu Jan 01 00:00:02 1970 +0000
870 date: Thu Jan 01 00:00:02 1970 +0000
871 summary: firstline
871 summary: firstline
872
872
873 Imported patch should not be rejected
873 Imported patch should not be rejected
874
874
875 >>> import re
875 >>> import re
876 >>> text = re.sub(r'(Id.*)', r'\1 rejecttest', open('a').read())
876 >>> text = re.sub(r'(Id.*)', r'\1 rejecttest', open('a').read())
877 >>> open('a', 'wb').write(text)
877 >>> open('a', 'wb').write(text)
878 $ hg --debug commit -m'rejects?' -d '3 0' -u 'User Name <user@example.com>'
878 $ hg --debug commit -m'rejects?' -d '3 0' -u 'User Name <user@example.com>'
879 a
879 a
880 overwriting a expanding keywords
880 overwriting a expanding keywords
881 committed changeset 2:85e279d709ffc28c9fdd1b868570985fc3d87082
881 committed changeset 2:85e279d709ffc28c9fdd1b868570985fc3d87082
882 $ hg export -o ../rejecttest.diff tip
882 $ hg export -o ../rejecttest.diff tip
883 $ cd ../Test
883 $ cd ../Test
884 $ hg import ../rejecttest.diff
884 $ hg import ../rejecttest.diff
885 applying ../rejecttest.diff
885 applying ../rejecttest.diff
886 $ cat a b
886 $ cat a b
887 expand $Id: a 4e0994474d25 Thu, 01 Jan 1970 00:00:03 +0000 user $ rejecttest
887 expand $Id: a 4e0994474d25 Thu, 01 Jan 1970 00:00:03 +0000 user $ rejecttest
888 do not process $Id: rejecttest
888 do not process $Id: rejecttest
889 xxx $
889 xxx $
890 $Xinfo: User Name <user@example.com>: rejects? $
890 $Xinfo: User Name <user@example.com>: rejects? $
891 ignore $Id$
891 ignore $Id$
892
892
893 $ hg rollback
893 $ hg rollback
894 repository tip rolled back to revision 2 (undo import)
894 repository tip rolled back to revision 2 (undo import)
895 working directory now based on revision 2
895 working directory now based on revision 2
896 $ hg update --clean
896 $ hg update --clean
897 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
897 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
898
898
899 kwexpand/kwshrink on selected files
899 kwexpand/kwshrink on selected files
900
900
901 $ mkdir x
901 $ mkdir x
902 $ hg copy a x/a
902 $ hg copy a x/a
903 $ hg --verbose kwshrink a
903 $ hg --verbose kwshrink a
904 overwriting a shrinking keywords
904 overwriting a shrinking keywords
905 - sleep required for dirstate.normal() check
905 - sleep required for dirstate.normal() check
906 $ sleep 1
906 $ sleep 1
907 $ hg status a
907 $ hg status a
908 $ hg --verbose kwexpand a
908 $ hg --verbose kwexpand a
909 overwriting a expanding keywords
909 overwriting a expanding keywords
910 $ hg status a
910 $ hg status a
911
911
912 kwexpand x/a should abort
912 kwexpand x/a should abort
913
913
914 $ hg --verbose kwexpand x/a
914 $ hg --verbose kwexpand x/a
915 abort: outstanding uncommitted changes
915 abort: outstanding uncommitted changes
916 [255]
916 [255]
917 $ cd x
917 $ cd x
918 $ hg --debug commit -m xa -d '3 0' -u 'User Name <user@example.com>'
918 $ hg --debug commit -m xa -d '3 0' -u 'User Name <user@example.com>'
919 x/a
919 x/a
920 x/a: copy a:779c764182ce5d43e2b1eb66ce06d7b47bfe342e
920 x/a: copy a:779c764182ce5d43e2b1eb66ce06d7b47bfe342e
921 invalid branchheads cache: tip differs
921 invalid branchheads cache: tip differs
922 overwriting x/a expanding keywords
922 overwriting x/a expanding keywords
923 committed changeset 3:b4560182a3f9a358179fd2d835c15e9da379c1e4
923 committed changeset 3:b4560182a3f9a358179fd2d835c15e9da379c1e4
924 $ cat a
924 $ cat a
925 expand $Id: x/a b4560182a3f9 Thu, 01 Jan 1970 00:00:03 +0000 user $
925 expand $Id: x/a b4560182a3f9 Thu, 01 Jan 1970 00:00:03 +0000 user $
926 do not process $Id:
926 do not process $Id:
927 xxx $
927 xxx $
928 $Xinfo: User Name <user@example.com>: xa $
928 $Xinfo: User Name <user@example.com>: xa $
929
929
930 kwshrink a inside directory x
930 kwshrink a inside directory x
931
931
932 $ hg --verbose kwshrink a
932 $ hg --verbose kwshrink a
933 overwriting x/a shrinking keywords
933 overwriting x/a shrinking keywords
934 $ cat a
934 $ cat a
935 expand $Id$
935 expand $Id$
936 do not process $Id:
936 do not process $Id:
937 xxx $
937 xxx $
938 $Xinfo$
938 $Xinfo$
939 $ cd ..
939 $ cd ..
940
940
941 kwexpand nonexistent
941 kwexpand nonexistent
942
942
943 $ hg kwexpand nonexistent
943 $ hg kwexpand nonexistent
944 nonexistent:* (glob)
944 nonexistent:* (glob)
945
945
946
946
947 #if serve
947 #if serve
948 hg serve
948 hg serve
949 - expand with hgweb file
949 - expand with hgweb file
950 - no expansion with hgweb annotate/changeset/filediff
950 - no expansion with hgweb annotate/changeset/filediff
951 - check errors
951 - check errors
952
952
953 $ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
953 $ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
954 $ cat hg.pid >> $DAEMON_PIDS
954 $ cat hg.pid >> $DAEMON_PIDS
955 $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT 'file/tip/a/?style=raw'
955 $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT 'file/tip/a/?style=raw'
956 200 Script output follows
956 200 Script output follows
957
957
958 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
958 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
959 do not process $Id:
959 do not process $Id:
960 xxx $
960 xxx $
961 $Xinfo: User Name <user@example.com>: firstline $
961 $Xinfo: User Name <user@example.com>: firstline $
962 $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT 'annotate/tip/a/?style=raw'
962 $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT 'annotate/tip/a/?style=raw'
963 200 Script output follows
963 200 Script output follows
964
964
965
965
966 user@1: expand $Id$
966 user@1: expand $Id$
967 user@1: do not process $Id:
967 user@1: do not process $Id:
968 user@1: xxx $
968 user@1: xxx $
969 user@2: $Xinfo$
969 user@2: $Xinfo$
970
970
971
971
972
972
973
973
974 $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT 'rev/tip/?style=raw'
974 $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT 'rev/tip/?style=raw'
975 200 Script output follows
975 200 Script output follows
976
976
977
977
978 # HG changeset patch
978 # HG changeset patch
979 # User User Name <user@example.com>
979 # User User Name <user@example.com>
980 # Date 3 0
980 # Date 3 0
981 # Node ID b4560182a3f9a358179fd2d835c15e9da379c1e4
981 # Node ID b4560182a3f9a358179fd2d835c15e9da379c1e4
982 # Parent bb948857c743469b22bbf51f7ec8112279ca5d83
982 # Parent bb948857c743469b22bbf51f7ec8112279ca5d83
983 xa
983 xa
984
984
985 diff -r bb948857c743 -r b4560182a3f9 x/a
985 diff -r bb948857c743 -r b4560182a3f9 x/a
986 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
986 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
987 +++ b/x/a Thu Jan 01 00:00:03 1970 +0000
987 +++ b/x/a Thu Jan 01 00:00:03 1970 +0000
988 @@ -0,0 +1,4 @@
988 @@ -0,0 +1,4 @@
989 +expand $Id$
989 +expand $Id$
990 +do not process $Id:
990 +do not process $Id:
991 +xxx $
991 +xxx $
992 +$Xinfo$
992 +$Xinfo$
993
993
994 $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT 'diff/bb948857c743/a?style=raw'
994 $ "$TESTDIR/get-with-headers.py" localhost:$HGPORT 'diff/bb948857c743/a?style=raw'
995 200 Script output follows
995 200 Script output follows
996
996
997
997
998 diff -r ef63ca68695b -r bb948857c743 a
998 diff -r ef63ca68695b -r bb948857c743 a
999 --- a/a Thu Jan 01 00:00:00 1970 +0000
999 --- a/a Thu Jan 01 00:00:00 1970 +0000
1000 +++ b/a Thu Jan 01 00:00:02 1970 +0000
1000 +++ b/a Thu Jan 01 00:00:02 1970 +0000
1001 @@ -1,3 +1,4 @@
1001 @@ -1,3 +1,4 @@
1002 expand $Id$
1002 expand $Id$
1003 do not process $Id:
1003 do not process $Id:
1004 xxx $
1004 xxx $
1005 +$Xinfo$
1005 +$Xinfo$
1006
1006
1007
1007
1008
1008
1009
1009
1010 $ cat errors.log
1010 $ cat errors.log
1011 #endif
1011 #endif
1012
1012
1013 Prepare merge and resolve tests
1013 Prepare merge and resolve tests
1014
1014
1015 $ echo '$Id$' > m
1015 $ echo '$Id$' > m
1016 $ hg add m
1016 $ hg add m
1017 $ hg commit -m 4kw
1017 $ hg commit -m 4kw
1018 $ echo foo >> m
1018 $ echo foo >> m
1019 $ hg commit -m 5foo
1019 $ hg commit -m 5foo
1020
1020
1021 simplemerge
1021 simplemerge
1022
1022
1023 $ hg update 4
1023 $ hg update 4
1024 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1024 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1025 $ echo foo >> m
1025 $ echo foo >> m
1026 $ hg commit -m 6foo
1026 $ hg commit -m 6foo
1027 created new head
1027 created new head
1028 $ hg merge
1028 $ hg merge
1029 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1029 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1030 (branch merge, don't forget to commit)
1030 (branch merge, don't forget to commit)
1031 $ hg commit -m simplemerge
1031 $ hg commit -m simplemerge
1032 $ cat m
1032 $ cat m
1033 $Id: m 27d48ee14f67 Thu, 01 Jan 1970 00:00:00 +0000 test $
1033 $Id: m 27d48ee14f67 Thu, 01 Jan 1970 00:00:00 +0000 test $
1034 foo
1034 foo
1035
1035
1036 conflict: keyword should stay outside conflict zone
1036 conflict: keyword should stay outside conflict zone
1037
1037
1038 $ hg update 4
1038 $ hg update 4
1039 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1039 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1040 $ echo bar >> m
1040 $ echo bar >> m
1041 $ hg commit -m 8bar
1041 $ hg commit -m 8bar
1042 created new head
1042 created new head
1043 $ hg merge
1043 $ hg merge
1044 merging m
1044 merging m
1045 warning: conflicts during merge.
1045 warning: conflicts during merge.
1046 merging m incomplete! (edit conflicts, then use 'hg resolve --mark')
1046 merging m incomplete! (edit conflicts, then use 'hg resolve --mark')
1047 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
1047 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
1048 use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
1048 use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
1049 [1]
1049 [1]
1050 $ cat m
1050 $ cat m
1051 $Id$
1051 $Id$
1052 <<<<<<< local
1052 <<<<<<< local
1053 bar
1053 bar
1054 =======
1054 =======
1055 foo
1055 foo
1056 >>>>>>> other
1056 >>>>>>> other
1057
1057
1058 resolve to local
1058 resolve to local
1059
1059
1060 $ HGMERGE=internal:local hg resolve -a
1060 $ HGMERGE=internal:local hg resolve -a
1061 $ hg commit -m localresolve
1061 $ hg commit -m localresolve
1062 $ cat m
1062 $ cat m
1063 $Id: m 800511b3a22d Thu, 01 Jan 1970 00:00:00 +0000 test $
1063 $Id: m 800511b3a22d Thu, 01 Jan 1970 00:00:00 +0000 test $
1064 bar
1064 bar
1065
1065
1066 Test restricted mode with transplant -b
1066 Test restricted mode with transplant -b
1067
1067
1068 $ hg update 6
1068 $ hg update 6
1069 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1069 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1070 $ hg branch foo
1070 $ hg branch foo
1071 marked working directory as branch foo
1071 marked working directory as branch foo
1072 (branches are permanent and global, did you want a bookmark?)
1072 (branches are permanent and global, did you want a bookmark?)
1073 $ mv a a.bak
1073 $ mv a a.bak
1074 $ echo foobranch > a
1074 $ echo foobranch > a
1075 $ cat a.bak >> a
1075 $ cat a.bak >> a
1076 $ rm a.bak
1076 $ rm a.bak
1077 $ hg commit -m 9foobranch
1077 $ hg commit -m 9foobranch
1078 $ hg update default
1078 $ hg update default
1079 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1079 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1080 $ hg -y transplant -b foo tip
1080 $ hg -y transplant -b foo tip
1081 applying 4aa30d025d50
1081 applying 4aa30d025d50
1082 4aa30d025d50 transplanted to e00abbf63521
1082 4aa30d025d50 transplanted to e00abbf63521
1083
1083
1084 Expansion in changeset but not in file
1084 Expansion in changeset but not in file
1085
1085
1086 $ hg tip -p
1086 $ hg tip -p
1087 changeset: 11:e00abbf63521
1087 changeset: 11:e00abbf63521
1088 tag: tip
1088 tag: tip
1089 parent: 9:800511b3a22d
1089 parent: 9:800511b3a22d
1090 user: test
1090 user: test
1091 date: Thu Jan 01 00:00:00 1970 +0000
1091 date: Thu Jan 01 00:00:00 1970 +0000
1092 summary: 9foobranch
1092 summary: 9foobranch
1093
1093
1094 diff -r 800511b3a22d -r e00abbf63521 a
1094 diff -r 800511b3a22d -r e00abbf63521 a
1095 --- a/a Thu Jan 01 00:00:00 1970 +0000
1095 --- a/a Thu Jan 01 00:00:00 1970 +0000
1096 +++ b/a Thu Jan 01 00:00:00 1970 +0000
1096 +++ b/a Thu Jan 01 00:00:00 1970 +0000
1097 @@ -1,3 +1,4 @@
1097 @@ -1,3 +1,4 @@
1098 +foobranch
1098 +foobranch
1099 expand $Id$
1099 expand $Id$
1100 do not process $Id:
1100 do not process $Id:
1101 xxx $
1101 xxx $
1102
1102
1103 $ head -n 2 a
1103 $ head -n 2 a
1104 foobranch
1104 foobranch
1105 expand $Id: a e00abbf63521 Thu, 01 Jan 1970 00:00:00 +0000 test $
1105 expand $Id: a e00abbf63521 Thu, 01 Jan 1970 00:00:00 +0000 test $
1106
1106
1107 Turn off expansion
1107 Turn off expansion
1108
1108
1109 $ hg -q rollback
1109 $ hg -q rollback
1110 $ hg -q update -C
1110 $ hg -q update -C
1111
1111
1112 kwshrink with unknown file u
1112 kwshrink with unknown file u
1113
1113
1114 $ cp a u
1114 $ cp a u
1115 $ hg --verbose kwshrink
1115 $ hg --verbose kwshrink
1116 overwriting a shrinking keywords
1116 overwriting a shrinking keywords
1117 overwriting m shrinking keywords
1117 overwriting m shrinking keywords
1118 overwriting x/a shrinking keywords
1118 overwriting x/a shrinking keywords
1119
1119
1120 Keywords shrunk in working directory, but not yet disabled
1120 Keywords shrunk in working directory, but not yet disabled
1121 - cat shows unexpanded keywords
1121 - cat shows unexpanded keywords
1122 - hg cat shows expanded keywords
1122 - hg cat shows expanded keywords
1123
1123
1124 $ cat a b
1124 $ cat a b
1125 expand $Id$
1125 expand $Id$
1126 do not process $Id:
1126 do not process $Id:
1127 xxx $
1127 xxx $
1128 $Xinfo$
1128 $Xinfo$
1129 ignore $Id$
1129 ignore $Id$
1130 $ hg cat sym a b && echo
1130 $ hg cat sym a b && echo
1131 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
1131 expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $
1132 do not process $Id:
1132 do not process $Id:
1133 xxx $
1133 xxx $
1134 $Xinfo: User Name <user@example.com>: firstline $
1134 $Xinfo: User Name <user@example.com>: firstline $
1135 ignore $Id$
1135 ignore $Id$
1136 a
1136 a
1137
1137
1138 Now disable keyword expansion
1138 Now disable keyword expansion
1139
1139
1140 $ rm "$HGRCPATH"
1140 $ rm "$HGRCPATH"
1141 $ cat a b
1141 $ cat a b
1142 expand $Id$
1142 expand $Id$
1143 do not process $Id:
1143 do not process $Id:
1144 xxx $
1144 xxx $
1145 $Xinfo$
1145 $Xinfo$
1146 ignore $Id$
1146 ignore $Id$
1147 $ hg cat sym a b && echo
1147 $ hg cat sym a b && echo
1148 expand $Id$
1148 expand $Id$
1149 do not process $Id:
1149 do not process $Id:
1150 xxx $
1150 xxx $
1151 $Xinfo$
1151 $Xinfo$
1152 ignore $Id$
1152 ignore $Id$
1153 a
1153 a
1154
1154
1155 $ cd ..
1155 $ cd ..
@@ -1,344 +1,344
1 $ branchcache=.hg/cache/branchheads
1 $ branchcache=.hg/cache/branchheads
2
2
3 $ listbranchcaches() {
3 $ listbranchcaches() {
4 > for f in .hg/cache/branchheads*;
4 > for f in .hg/cache/branchheads*;
5 > do echo === $f ===;
5 > do echo === $f ===;
6 > cat $f;
6 > cat $f;
7 > done;
7 > done;
8 > }
8 > }
9 $ purgebranchcaches() {
9 $ purgebranchcaches() {
10 > rm .hg/cache/branchheads*
10 > rm .hg/cache/branchheads*
11 > }
11 > }
12
12
13 $ hg init t
13 $ hg init t
14 $ cd t
14 $ cd t
15
15
16 $ hg branches
16 $ hg branches
17 $ echo foo > a
17 $ echo foo > a
18 $ hg add a
18 $ hg add a
19 $ hg ci -m "initial"
19 $ hg ci -m "initial"
20 $ hg branch foo
20 $ hg branch foo
21 marked working directory as branch foo
21 marked working directory as branch foo
22 (branches are permanent and global, did you want a bookmark?)
22 (branches are permanent and global, did you want a bookmark?)
23 $ hg branch
23 $ hg branch
24 foo
24 foo
25 $ hg ci -m "add branch name"
25 $ hg ci -m "add branch name"
26 $ hg branch bar
26 $ hg branch bar
27 marked working directory as branch bar
27 marked working directory as branch bar
28 (branches are permanent and global, did you want a bookmark?)
28 (branches are permanent and global, did you want a bookmark?)
29 $ hg ci -m "change branch name"
29 $ hg ci -m "change branch name"
30
30
31 Branch shadowing:
31 Branch shadowing:
32
32
33 $ hg branch default
33 $ hg branch default
34 abort: a branch of the same name already exists
34 abort: a branch of the same name already exists
35 (use 'hg update' to switch to it)
35 (use 'hg update' to switch to it)
36 [255]
36 [255]
37
37
38 $ hg branch -f default
38 $ hg branch -f default
39 marked working directory as branch default
39 marked working directory as branch default
40 (branches are permanent and global, did you want a bookmark?)
40 (branches are permanent and global, did you want a bookmark?)
41
41
42 $ hg ci -m "clear branch name"
42 $ hg ci -m "clear branch name"
43 created new head
43 created new head
44
44
45 There should be only one default branch head
45 There should be only one default branch head
46
46
47 $ hg heads .
47 $ hg heads .
48 changeset: 3:1c28f494dae6
48 changeset: 3:1c28f494dae6
49 tag: tip
49 tag: tip
50 user: test
50 user: test
51 date: Thu Jan 01 00:00:00 1970 +0000
51 date: Thu Jan 01 00:00:00 1970 +0000
52 summary: clear branch name
52 summary: clear branch name
53
53
54
54
55 $ hg co foo
55 $ hg co foo
56 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
56 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
57 $ hg branch
57 $ hg branch
58 foo
58 foo
59 $ echo bleah > a
59 $ echo bleah > a
60 $ hg ci -m "modify a branch"
60 $ hg ci -m "modify a branch"
61
61
62 $ hg merge default
62 $ hg merge default
63 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
63 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
64 (branch merge, don't forget to commit)
64 (branch merge, don't forget to commit)
65
65
66 $ hg branch
66 $ hg branch
67 foo
67 foo
68 $ hg ci -m "merge"
68 $ hg ci -m "merge"
69
69
70 $ hg log
70 $ hg log
71 changeset: 5:530046499edf
71 changeset: 5:530046499edf
72 branch: foo
72 branch: foo
73 tag: tip
73 tag: tip
74 parent: 4:adf1a74a7f7b
74 parent: 4:adf1a74a7f7b
75 parent: 3:1c28f494dae6
75 parent: 3:1c28f494dae6
76 user: test
76 user: test
77 date: Thu Jan 01 00:00:00 1970 +0000
77 date: Thu Jan 01 00:00:00 1970 +0000
78 summary: merge
78 summary: merge
79
79
80 changeset: 4:adf1a74a7f7b
80 changeset: 4:adf1a74a7f7b
81 branch: foo
81 branch: foo
82 parent: 1:6c0e42da283a
82 parent: 1:6c0e42da283a
83 user: test
83 user: test
84 date: Thu Jan 01 00:00:00 1970 +0000
84 date: Thu Jan 01 00:00:00 1970 +0000
85 summary: modify a branch
85 summary: modify a branch
86
86
87 changeset: 3:1c28f494dae6
87 changeset: 3:1c28f494dae6
88 user: test
88 user: test
89 date: Thu Jan 01 00:00:00 1970 +0000
89 date: Thu Jan 01 00:00:00 1970 +0000
90 summary: clear branch name
90 summary: clear branch name
91
91
92 changeset: 2:c21617b13b22
92 changeset: 2:c21617b13b22
93 branch: bar
93 branch: bar
94 user: test
94 user: test
95 date: Thu Jan 01 00:00:00 1970 +0000
95 date: Thu Jan 01 00:00:00 1970 +0000
96 summary: change branch name
96 summary: change branch name
97
97
98 changeset: 1:6c0e42da283a
98 changeset: 1:6c0e42da283a
99 branch: foo
99 branch: foo
100 user: test
100 user: test
101 date: Thu Jan 01 00:00:00 1970 +0000
101 date: Thu Jan 01 00:00:00 1970 +0000
102 summary: add branch name
102 summary: add branch name
103
103
104 changeset: 0:db01e8ea3388
104 changeset: 0:db01e8ea3388
105 user: test
105 user: test
106 date: Thu Jan 01 00:00:00 1970 +0000
106 date: Thu Jan 01 00:00:00 1970 +0000
107 summary: initial
107 summary: initial
108
108
109 $ hg branches
109 $ hg branches
110 foo 5:530046499edf
110 foo 5:530046499edf
111 default 3:1c28f494dae6 (inactive)
111 default 3:1c28f494dae6 (inactive)
112 bar 2:c21617b13b22 (inactive)
112 bar 2:c21617b13b22 (inactive)
113
113
114 $ hg branches -q
114 $ hg branches -q
115 foo
115 foo
116 default
116 default
117 bar
117 bar
118
118
119 Test for invalid branch cache:
119 Test for invalid branch cache:
120
120
121 $ hg rollback
121 $ hg rollback
122 repository tip rolled back to revision 4 (undo commit)
122 repository tip rolled back to revision 4 (undo commit)
123 working directory now based on revisions 4 and 3
123 working directory now based on revisions 4 and 3
124
124
125 $ cp ${branchcache}-unserved .hg/bc-invalid
125 $ cp ${branchcache}-served .hg/bc-invalid
126
126
127 $ hg log -r foo
127 $ hg log -r foo
128 changeset: 4:adf1a74a7f7b
128 changeset: 4:adf1a74a7f7b
129 branch: foo
129 branch: foo
130 tag: tip
130 tag: tip
131 parent: 1:6c0e42da283a
131 parent: 1:6c0e42da283a
132 user: test
132 user: test
133 date: Thu Jan 01 00:00:00 1970 +0000
133 date: Thu Jan 01 00:00:00 1970 +0000
134 summary: modify a branch
134 summary: modify a branch
135
135
136 $ cp .hg/bc-invalid $branchcache
136 $ cp .hg/bc-invalid $branchcache
137
137
138 $ hg --debug log -r foo
138 $ hg --debug log -r foo
139 changeset: 4:adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6
139 changeset: 4:adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6
140 branch: foo
140 branch: foo
141 tag: tip
141 tag: tip
142 phase: draft
142 phase: draft
143 parent: 1:6c0e42da283a56b5edc5b4fadb491365ec7f5fa8
143 parent: 1:6c0e42da283a56b5edc5b4fadb491365ec7f5fa8
144 parent: -1:0000000000000000000000000000000000000000
144 parent: -1:0000000000000000000000000000000000000000
145 manifest: 1:8c342a37dfba0b3d3ce073562a00d8a813c54ffe
145 manifest: 1:8c342a37dfba0b3d3ce073562a00d8a813c54ffe
146 user: test
146 user: test
147 date: Thu Jan 01 00:00:00 1970 +0000
147 date: Thu Jan 01 00:00:00 1970 +0000
148 files: a
148 files: a
149 extra: branch=foo
149 extra: branch=foo
150 description:
150 description:
151 modify a branch
151 modify a branch
152
152
153
153
154 $ purgebranchcaches
154 $ purgebranchcaches
155 $ echo corrupted > $branchcache
155 $ echo corrupted > $branchcache
156
156
157 $ hg log -qr foo
157 $ hg log -qr foo
158 4:adf1a74a7f7b
158 4:adf1a74a7f7b
159
159
160 $ listbranchcaches
160 $ listbranchcaches
161 === .hg/cache/branchheads ===
161 === .hg/cache/branchheads ===
162 corrupted
162 corrupted
163 === .hg/cache/branchheads-unserved ===
163 === .hg/cache/branchheads-served ===
164 adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6 4
164 adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6 4
165 c21617b13b220988e7a2e26290fbe4325ffa7139 bar
165 c21617b13b220988e7a2e26290fbe4325ffa7139 bar
166 1c28f494dae69a2f8fc815059d257eccf3fcfe75 default
166 1c28f494dae69a2f8fc815059d257eccf3fcfe75 default
167 adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6 foo
167 adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6 foo
168
168
169 Push should update the branch cache:
169 Push should update the branch cache:
170
170
171 $ hg init ../target
171 $ hg init ../target
172
172
173 Pushing just rev 0:
173 Pushing just rev 0:
174
174
175 $ hg push -qr 0 ../target
175 $ hg push -qr 0 ../target
176
176
177 $ (cd ../target/; listbranchcaches)
177 $ (cd ../target/; listbranchcaches)
178 === .hg/cache/branchheads-impactable ===
178 === .hg/cache/branchheads-base ===
179 db01e8ea3388fd3c7c94e1436ea2bd6a53d581c5 0
179 db01e8ea3388fd3c7c94e1436ea2bd6a53d581c5 0
180 db01e8ea3388fd3c7c94e1436ea2bd6a53d581c5 default
180 db01e8ea3388fd3c7c94e1436ea2bd6a53d581c5 default
181
181
182 Pushing everything:
182 Pushing everything:
183
183
184 $ hg push -qf ../target
184 $ hg push -qf ../target
185
185
186 $ (cd ../target/; listbranchcaches)
186 $ (cd ../target/; listbranchcaches)
187 === .hg/cache/branchheads-impactable ===
187 === .hg/cache/branchheads-base ===
188 adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6 4
188 adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6 4
189 c21617b13b220988e7a2e26290fbe4325ffa7139 bar
189 c21617b13b220988e7a2e26290fbe4325ffa7139 bar
190 1c28f494dae69a2f8fc815059d257eccf3fcfe75 default
190 1c28f494dae69a2f8fc815059d257eccf3fcfe75 default
191 adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6 foo
191 adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6 foo
192
192
193 Update with no arguments: tipmost revision of the current branch:
193 Update with no arguments: tipmost revision of the current branch:
194
194
195 $ hg up -q -C 0
195 $ hg up -q -C 0
196 $ hg up -q
196 $ hg up -q
197 $ hg id
197 $ hg id
198 1c28f494dae6
198 1c28f494dae6
199
199
200 $ hg up -q 1
200 $ hg up -q 1
201 $ hg up -q
201 $ hg up -q
202 $ hg id
202 $ hg id
203 adf1a74a7f7b (foo) tip
203 adf1a74a7f7b (foo) tip
204
204
205 $ hg branch foobar
205 $ hg branch foobar
206 marked working directory as branch foobar
206 marked working directory as branch foobar
207 (branches are permanent and global, did you want a bookmark?)
207 (branches are permanent and global, did you want a bookmark?)
208
208
209 $ hg up
209 $ hg up
210 abort: branch foobar not found
210 abort: branch foobar not found
211 [255]
211 [255]
212
212
213 Fastforward merge:
213 Fastforward merge:
214
214
215 $ hg branch ff
215 $ hg branch ff
216 marked working directory as branch ff
216 marked working directory as branch ff
217 (branches are permanent and global, did you want a bookmark?)
217 (branches are permanent and global, did you want a bookmark?)
218
218
219 $ echo ff > ff
219 $ echo ff > ff
220 $ hg ci -Am'fast forward'
220 $ hg ci -Am'fast forward'
221 adding ff
221 adding ff
222
222
223 $ hg up foo
223 $ hg up foo
224 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
224 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
225
225
226 $ hg merge ff
226 $ hg merge ff
227 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
227 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
228 (branch merge, don't forget to commit)
228 (branch merge, don't forget to commit)
229
229
230 $ hg branch
230 $ hg branch
231 foo
231 foo
232 $ hg commit -m'Merge ff into foo'
232 $ hg commit -m'Merge ff into foo'
233 $ hg parents
233 $ hg parents
234 changeset: 6:185ffbfefa30
234 changeset: 6:185ffbfefa30
235 branch: foo
235 branch: foo
236 tag: tip
236 tag: tip
237 parent: 4:adf1a74a7f7b
237 parent: 4:adf1a74a7f7b
238 parent: 5:1a3c27dc5e11
238 parent: 5:1a3c27dc5e11
239 user: test
239 user: test
240 date: Thu Jan 01 00:00:00 1970 +0000
240 date: Thu Jan 01 00:00:00 1970 +0000
241 summary: Merge ff into foo
241 summary: Merge ff into foo
242
242
243 $ hg manifest
243 $ hg manifest
244 a
244 a
245 ff
245 ff
246
246
247
247
248 Test merging, add 3 default heads and one test head:
248 Test merging, add 3 default heads and one test head:
249
249
250 $ cd ..
250 $ cd ..
251 $ hg init merges
251 $ hg init merges
252 $ cd merges
252 $ cd merges
253 $ echo a > a
253 $ echo a > a
254 $ hg ci -Ama
254 $ hg ci -Ama
255 adding a
255 adding a
256
256
257 $ echo b > b
257 $ echo b > b
258 $ hg ci -Amb
258 $ hg ci -Amb
259 adding b
259 adding b
260
260
261 $ hg up 0
261 $ hg up 0
262 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
262 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
263 $ echo c > c
263 $ echo c > c
264 $ hg ci -Amc
264 $ hg ci -Amc
265 adding c
265 adding c
266 created new head
266 created new head
267
267
268 $ hg up 0
268 $ hg up 0
269 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
269 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
270 $ echo d > d
270 $ echo d > d
271 $ hg ci -Amd
271 $ hg ci -Amd
272 adding d
272 adding d
273 created new head
273 created new head
274
274
275 $ hg up 0
275 $ hg up 0
276 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
276 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
277 $ hg branch test
277 $ hg branch test
278 marked working directory as branch test
278 marked working directory as branch test
279 (branches are permanent and global, did you want a bookmark?)
279 (branches are permanent and global, did you want a bookmark?)
280 $ echo e >> e
280 $ echo e >> e
281 $ hg ci -Ame
281 $ hg ci -Ame
282 adding e
282 adding e
283
283
284 $ hg log
284 $ hg log
285 changeset: 4:3a1e01ed1df4
285 changeset: 4:3a1e01ed1df4
286 branch: test
286 branch: test
287 tag: tip
287 tag: tip
288 parent: 0:cb9a9f314b8b
288 parent: 0:cb9a9f314b8b
289 user: test
289 user: test
290 date: Thu Jan 01 00:00:00 1970 +0000
290 date: Thu Jan 01 00:00:00 1970 +0000
291 summary: e
291 summary: e
292
292
293 changeset: 3:980f7dc84c29
293 changeset: 3:980f7dc84c29
294 parent: 0:cb9a9f314b8b
294 parent: 0:cb9a9f314b8b
295 user: test
295 user: test
296 date: Thu Jan 01 00:00:00 1970 +0000
296 date: Thu Jan 01 00:00:00 1970 +0000
297 summary: d
297 summary: d
298
298
299 changeset: 2:d36c0562f908
299 changeset: 2:d36c0562f908
300 parent: 0:cb9a9f314b8b
300 parent: 0:cb9a9f314b8b
301 user: test
301 user: test
302 date: Thu Jan 01 00:00:00 1970 +0000
302 date: Thu Jan 01 00:00:00 1970 +0000
303 summary: c
303 summary: c
304
304
305 changeset: 1:d2ae7f538514
305 changeset: 1:d2ae7f538514
306 user: test
306 user: test
307 date: Thu Jan 01 00:00:00 1970 +0000
307 date: Thu Jan 01 00:00:00 1970 +0000
308 summary: b
308 summary: b
309
309
310 changeset: 0:cb9a9f314b8b
310 changeset: 0:cb9a9f314b8b
311 user: test
311 user: test
312 date: Thu Jan 01 00:00:00 1970 +0000
312 date: Thu Jan 01 00:00:00 1970 +0000
313 summary: a
313 summary: a
314
314
315 Implicit merge with test branch as parent:
315 Implicit merge with test branch as parent:
316
316
317 $ hg merge
317 $ hg merge
318 abort: branch 'test' has one head - please merge with an explicit rev
318 abort: branch 'test' has one head - please merge with an explicit rev
319 (run 'hg heads' to see all heads)
319 (run 'hg heads' to see all heads)
320 [255]
320 [255]
321 $ hg up -C default
321 $ hg up -C default
322 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
322 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
323
323
324 Implicit merge with default branch as parent:
324 Implicit merge with default branch as parent:
325
325
326 $ hg merge
326 $ hg merge
327 abort: branch 'default' has 3 heads - please merge with an explicit rev
327 abort: branch 'default' has 3 heads - please merge with an explicit rev
328 (run 'hg heads .' to see heads)
328 (run 'hg heads .' to see heads)
329 [255]
329 [255]
330
330
331 3 branch heads, explicit merge required:
331 3 branch heads, explicit merge required:
332
332
333 $ hg merge 2
333 $ hg merge 2
334 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
334 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
335 (branch merge, don't forget to commit)
335 (branch merge, don't forget to commit)
336 $ hg ci -m merge
336 $ hg ci -m merge
337
337
338 2 branch heads, implicit merge works:
338 2 branch heads, implicit merge works:
339
339
340 $ hg merge
340 $ hg merge
341 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
341 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
342 (branch merge, don't forget to commit)
342 (branch merge, don't forget to commit)
343
343
344 $ cd ..
344 $ cd ..
@@ -1,447 +1,447
1 Test file dedicated to testing the divergent troubles from obsolete changeset.
1 Test file dedicated to testing the divergent troubles from obsolete changeset.
2
2
3 This is the most complexe troubles from far so we isolate it in a dedicated
3 This is the most complexe troubles from far so we isolate it in a dedicated
4 file.
4 file.
5
5
6 Enable obsolete
6 Enable obsolete
7
7
8 $ cat > obs.py << EOF
8 $ cat > obs.py << EOF
9 > import mercurial.obsolete
9 > import mercurial.obsolete
10 > mercurial.obsolete._enabled = True
10 > mercurial.obsolete._enabled = True
11 > EOF
11 > EOF
12 $ cat >> $HGRCPATH << EOF
12 $ cat >> $HGRCPATH << EOF
13 > [ui]
13 > [ui]
14 > logtemplate = {rev}:{node|short} {desc}\n
14 > logtemplate = {rev}:{node|short} {desc}\n
15 > [extensions]
15 > [extensions]
16 > obs=${TESTTMP}/obs.py
16 > obs=${TESTTMP}/obs.py
17 > [alias]
17 > [alias]
18 > debugobsolete = debugobsolete -d '0 0'
18 > debugobsolete = debugobsolete -d '0 0'
19 > [phases]
19 > [phases]
20 > publish=False
20 > publish=False
21 > EOF
21 > EOF
22
22
23
23
24 $ mkcommit() {
24 $ mkcommit() {
25 > echo "$1" > "$1"
25 > echo "$1" > "$1"
26 > hg add "$1"
26 > hg add "$1"
27 > hg ci -m "$1"
27 > hg ci -m "$1"
28 > }
28 > }
29 $ getid() {
29 $ getid() {
30 > hg id --debug --hidden -ir "desc('$1')"
30 > hg id --debug --hidden -ir "desc('$1')"
31 > }
31 > }
32
32
33 setup repo
33 setup repo
34
34
35 $ hg init reference
35 $ hg init reference
36 $ cd reference
36 $ cd reference
37 $ mkcommit base
37 $ mkcommit base
38 $ mkcommit A_0
38 $ mkcommit A_0
39 $ hg up 0
39 $ hg up 0
40 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
40 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
41 $ mkcommit A_1
41 $ mkcommit A_1
42 created new head
42 created new head
43 $ hg up 0
43 $ hg up 0
44 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
44 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
45 $ mkcommit A_2
45 $ mkcommit A_2
46 created new head
46 created new head
47 $ hg up 0
47 $ hg up 0
48 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
48 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
49 $ cd ..
49 $ cd ..
50
50
51
51
52 $ newcase() {
52 $ newcase() {
53 > hg clone -u 0 -q reference $1
53 > hg clone -u 0 -q reference $1
54 > cd $1
54 > cd $1
55 > }
55 > }
56
56
57 direct divergence
57 direct divergence
58 -----------------
58 -----------------
59
59
60 A_1 have two direct and divergent successors A_1 and A_1
60 A_1 have two direct and divergent successors A_1 and A_1
61
61
62 $ newcase direct
62 $ newcase direct
63 $ hg debugobsolete `getid A_0` `getid A_1`
63 $ hg debugobsolete `getid A_0` `getid A_1`
64 $ hg debugobsolete `getid A_0` `getid A_2`
64 $ hg debugobsolete `getid A_0` `getid A_2`
65 invalid branchheads cache (unserved): tip differs
65 invalid branchheads cache (served): tip differs
66 $ hg log -G --hidden
66 $ hg log -G --hidden
67 o 3:392fd25390da A_2
67 o 3:392fd25390da A_2
68 |
68 |
69 | o 2:82623d38b9ba A_1
69 | o 2:82623d38b9ba A_1
70 |/
70 |/
71 | x 1:007dc284c1f8 A_0
71 | x 1:007dc284c1f8 A_0
72 |/
72 |/
73 @ 0:d20a80d4def3 base
73 @ 0:d20a80d4def3 base
74
74
75 $ hg debugsuccessorssets --hidden 'all()'
75 $ hg debugsuccessorssets --hidden 'all()'
76 d20a80d4def3
76 d20a80d4def3
77 d20a80d4def3
77 d20a80d4def3
78 007dc284c1f8
78 007dc284c1f8
79 82623d38b9ba
79 82623d38b9ba
80 392fd25390da
80 392fd25390da
81 82623d38b9ba
81 82623d38b9ba
82 82623d38b9ba
82 82623d38b9ba
83 392fd25390da
83 392fd25390da
84 392fd25390da
84 392fd25390da
85 $ hg log -r 'divergent()'
85 $ hg log -r 'divergent()'
86 2:82623d38b9ba A_1
86 2:82623d38b9ba A_1
87 3:392fd25390da A_2
87 3:392fd25390da A_2
88
88
89 check that mercurial refuse to push
89 check that mercurial refuse to push
90
90
91 $ hg init ../other
91 $ hg init ../other
92 $ hg push ../other
92 $ hg push ../other
93 pushing to ../other
93 pushing to ../other
94 searching for changes
94 searching for changes
95 abort: push includes divergent changeset: 392fd25390da!
95 abort: push includes divergent changeset: 392fd25390da!
96 [255]
96 [255]
97
97
98 $ cd ..
98 $ cd ..
99
99
100
100
101 indirect divergence with known changeset
101 indirect divergence with known changeset
102 -------------------------------------------
102 -------------------------------------------
103
103
104 $ newcase indirect_known
104 $ newcase indirect_known
105 $ hg debugobsolete `getid A_0` `getid A_1`
105 $ hg debugobsolete `getid A_0` `getid A_1`
106 $ hg debugobsolete `getid A_0` `getid A_2`
106 $ hg debugobsolete `getid A_0` `getid A_2`
107 invalid branchheads cache (unserved): tip differs
107 invalid branchheads cache (served): tip differs
108 $ mkcommit A_3
108 $ mkcommit A_3
109 created new head
109 created new head
110 $ hg debugobsolete `getid A_2` `getid A_3`
110 $ hg debugobsolete `getid A_2` `getid A_3`
111 $ hg log -G --hidden
111 $ hg log -G --hidden
112 @ 4:01f36c5a8fda A_3
112 @ 4:01f36c5a8fda A_3
113 |
113 |
114 | x 3:392fd25390da A_2
114 | x 3:392fd25390da A_2
115 |/
115 |/
116 | o 2:82623d38b9ba A_1
116 | o 2:82623d38b9ba A_1
117 |/
117 |/
118 | x 1:007dc284c1f8 A_0
118 | x 1:007dc284c1f8 A_0
119 |/
119 |/
120 o 0:d20a80d4def3 base
120 o 0:d20a80d4def3 base
121
121
122 $ hg debugsuccessorssets --hidden 'all()'
122 $ hg debugsuccessorssets --hidden 'all()'
123 d20a80d4def3
123 d20a80d4def3
124 d20a80d4def3
124 d20a80d4def3
125 007dc284c1f8
125 007dc284c1f8
126 82623d38b9ba
126 82623d38b9ba
127 01f36c5a8fda
127 01f36c5a8fda
128 82623d38b9ba
128 82623d38b9ba
129 82623d38b9ba
129 82623d38b9ba
130 392fd25390da
130 392fd25390da
131 01f36c5a8fda
131 01f36c5a8fda
132 01f36c5a8fda
132 01f36c5a8fda
133 01f36c5a8fda
133 01f36c5a8fda
134 $ hg log -r 'divergent()'
134 $ hg log -r 'divergent()'
135 2:82623d38b9ba A_1
135 2:82623d38b9ba A_1
136 4:01f36c5a8fda A_3
136 4:01f36c5a8fda A_3
137 $ cd ..
137 $ cd ..
138
138
139
139
140 indirect divergence with known changeset
140 indirect divergence with known changeset
141 -------------------------------------------
141 -------------------------------------------
142
142
143 $ newcase indirect_unknown
143 $ newcase indirect_unknown
144 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
144 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
145 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
145 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
146 invalid branchheads cache (unserved): tip differs
146 invalid branchheads cache (served): tip differs
147 $ hg debugobsolete `getid A_0` `getid A_2`
147 $ hg debugobsolete `getid A_0` `getid A_2`
148 $ hg log -G --hidden
148 $ hg log -G --hidden
149 o 3:392fd25390da A_2
149 o 3:392fd25390da A_2
150 |
150 |
151 | o 2:82623d38b9ba A_1
151 | o 2:82623d38b9ba A_1
152 |/
152 |/
153 | x 1:007dc284c1f8 A_0
153 | x 1:007dc284c1f8 A_0
154 |/
154 |/
155 @ 0:d20a80d4def3 base
155 @ 0:d20a80d4def3 base
156
156
157 $ hg debugsuccessorssets --hidden 'all()'
157 $ hg debugsuccessorssets --hidden 'all()'
158 d20a80d4def3
158 d20a80d4def3
159 d20a80d4def3
159 d20a80d4def3
160 007dc284c1f8
160 007dc284c1f8
161 82623d38b9ba
161 82623d38b9ba
162 392fd25390da
162 392fd25390da
163 82623d38b9ba
163 82623d38b9ba
164 82623d38b9ba
164 82623d38b9ba
165 392fd25390da
165 392fd25390da
166 392fd25390da
166 392fd25390da
167 $ hg log -r 'divergent()'
167 $ hg log -r 'divergent()'
168 2:82623d38b9ba A_1
168 2:82623d38b9ba A_1
169 3:392fd25390da A_2
169 3:392fd25390da A_2
170 $ cd ..
170 $ cd ..
171
171
172 do not take unknown node in account if they are final
172 do not take unknown node in account if they are final
173 -----------------------------------------------------
173 -----------------------------------------------------
174
174
175 $ newcase final-unknown
175 $ newcase final-unknown
176 $ hg debugobsolete `getid A_0` `getid A_1`
176 $ hg debugobsolete `getid A_0` `getid A_1`
177 $ hg debugobsolete `getid A_1` `getid A_2`
177 $ hg debugobsolete `getid A_1` `getid A_2`
178 invalid branchheads cache (unserved): tip differs
178 invalid branchheads cache (served): tip differs
179 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
179 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
180 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
180 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
181 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
181 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
182
182
183 $ hg debugsuccessorssets --hidden 'desc('A_0')'
183 $ hg debugsuccessorssets --hidden 'desc('A_0')'
184 007dc284c1f8
184 007dc284c1f8
185 392fd25390da
185 392fd25390da
186
186
187 $ cd ..
187 $ cd ..
188
188
189 divergence that converge again is not divergence anymore
189 divergence that converge again is not divergence anymore
190 -----------------------------------------------------
190 -----------------------------------------------------
191
191
192 $ newcase converged_divergence
192 $ newcase converged_divergence
193 $ hg debugobsolete `getid A_0` `getid A_1`
193 $ hg debugobsolete `getid A_0` `getid A_1`
194 $ hg debugobsolete `getid A_0` `getid A_2`
194 $ hg debugobsolete `getid A_0` `getid A_2`
195 invalid branchheads cache (unserved): tip differs
195 invalid branchheads cache (served): tip differs
196 $ mkcommit A_3
196 $ mkcommit A_3
197 created new head
197 created new head
198 $ hg debugobsolete `getid A_1` `getid A_3`
198 $ hg debugobsolete `getid A_1` `getid A_3`
199 $ hg debugobsolete `getid A_2` `getid A_3`
199 $ hg debugobsolete `getid A_2` `getid A_3`
200 $ hg log -G --hidden
200 $ hg log -G --hidden
201 @ 4:01f36c5a8fda A_3
201 @ 4:01f36c5a8fda A_3
202 |
202 |
203 | x 3:392fd25390da A_2
203 | x 3:392fd25390da A_2
204 |/
204 |/
205 | x 2:82623d38b9ba A_1
205 | x 2:82623d38b9ba A_1
206 |/
206 |/
207 | x 1:007dc284c1f8 A_0
207 | x 1:007dc284c1f8 A_0
208 |/
208 |/
209 o 0:d20a80d4def3 base
209 o 0:d20a80d4def3 base
210
210
211 $ hg debugsuccessorssets --hidden 'all()'
211 $ hg debugsuccessorssets --hidden 'all()'
212 d20a80d4def3
212 d20a80d4def3
213 d20a80d4def3
213 d20a80d4def3
214 007dc284c1f8
214 007dc284c1f8
215 01f36c5a8fda
215 01f36c5a8fda
216 82623d38b9ba
216 82623d38b9ba
217 01f36c5a8fda
217 01f36c5a8fda
218 392fd25390da
218 392fd25390da
219 01f36c5a8fda
219 01f36c5a8fda
220 01f36c5a8fda
220 01f36c5a8fda
221 01f36c5a8fda
221 01f36c5a8fda
222 $ hg log -r 'divergent()'
222 $ hg log -r 'divergent()'
223 $ cd ..
223 $ cd ..
224
224
225 split is not divergences
225 split is not divergences
226 -----------------------------
226 -----------------------------
227
227
228 $ newcase split
228 $ newcase split
229 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
229 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
230 $ hg log -G --hidden
230 $ hg log -G --hidden
231 o 3:392fd25390da A_2
231 o 3:392fd25390da A_2
232 |
232 |
233 | o 2:82623d38b9ba A_1
233 | o 2:82623d38b9ba A_1
234 |/
234 |/
235 | x 1:007dc284c1f8 A_0
235 | x 1:007dc284c1f8 A_0
236 |/
236 |/
237 @ 0:d20a80d4def3 base
237 @ 0:d20a80d4def3 base
238
238
239 $ hg debugsuccessorssets --hidden 'all()'
239 $ hg debugsuccessorssets --hidden 'all()'
240 d20a80d4def3
240 d20a80d4def3
241 d20a80d4def3
241 d20a80d4def3
242 007dc284c1f8
242 007dc284c1f8
243 82623d38b9ba 392fd25390da
243 82623d38b9ba 392fd25390da
244 82623d38b9ba
244 82623d38b9ba
245 82623d38b9ba
245 82623d38b9ba
246 392fd25390da
246 392fd25390da
247 392fd25390da
247 392fd25390da
248 $ hg log -r 'divergent()'
248 $ hg log -r 'divergent()'
249
249
250 Even when subsequente rewriting happen
250 Even when subsequente rewriting happen
251
251
252 $ mkcommit A_3
252 $ mkcommit A_3
253 created new head
253 created new head
254 $ hg debugobsolete `getid A_1` `getid A_3`
254 $ hg debugobsolete `getid A_1` `getid A_3`
255 $ hg up 0
255 $ hg up 0
256 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
256 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
257 $ mkcommit A_4
257 $ mkcommit A_4
258 created new head
258 created new head
259 $ hg debugobsolete `getid A_2` `getid A_4`
259 $ hg debugobsolete `getid A_2` `getid A_4`
260 $ hg up 0
260 $ hg up 0
261 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
261 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
262 $ mkcommit A_5
262 $ mkcommit A_5
263 created new head
263 created new head
264 $ hg debugobsolete `getid A_4` `getid A_5`
264 $ hg debugobsolete `getid A_4` `getid A_5`
265 $ hg log -G --hidden
265 $ hg log -G --hidden
266 @ 6:e442cfc57690 A_5
266 @ 6:e442cfc57690 A_5
267 |
267 |
268 | x 5:6a411f0d7a0a A_4
268 | x 5:6a411f0d7a0a A_4
269 |/
269 |/
270 | o 4:01f36c5a8fda A_3
270 | o 4:01f36c5a8fda A_3
271 |/
271 |/
272 | x 3:392fd25390da A_2
272 | x 3:392fd25390da A_2
273 |/
273 |/
274 | x 2:82623d38b9ba A_1
274 | x 2:82623d38b9ba A_1
275 |/
275 |/
276 | x 1:007dc284c1f8 A_0
276 | x 1:007dc284c1f8 A_0
277 |/
277 |/
278 o 0:d20a80d4def3 base
278 o 0:d20a80d4def3 base
279
279
280 $ hg debugsuccessorssets --hidden 'all()'
280 $ hg debugsuccessorssets --hidden 'all()'
281 d20a80d4def3
281 d20a80d4def3
282 d20a80d4def3
282 d20a80d4def3
283 007dc284c1f8
283 007dc284c1f8
284 01f36c5a8fda e442cfc57690
284 01f36c5a8fda e442cfc57690
285 82623d38b9ba
285 82623d38b9ba
286 01f36c5a8fda
286 01f36c5a8fda
287 392fd25390da
287 392fd25390da
288 e442cfc57690
288 e442cfc57690
289 01f36c5a8fda
289 01f36c5a8fda
290 01f36c5a8fda
290 01f36c5a8fda
291 6a411f0d7a0a
291 6a411f0d7a0a
292 e442cfc57690
292 e442cfc57690
293 e442cfc57690
293 e442cfc57690
294 e442cfc57690
294 e442cfc57690
295 $ hg log -r 'divergent()'
295 $ hg log -r 'divergent()'
296
296
297 Check more complexe obsolescence graft (with divergence)
297 Check more complexe obsolescence graft (with divergence)
298
298
299 $ mkcommit B_0; hg up 0
299 $ mkcommit B_0; hg up 0
300 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
300 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
301 $ hg debugobsolete `getid B_0` `getid A_2`
301 $ hg debugobsolete `getid B_0` `getid A_2`
302 $ mkcommit A_7; hg up 0
302 $ mkcommit A_7; hg up 0
303 created new head
303 created new head
304 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
304 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
305 $ mkcommit A_8; hg up 0
305 $ mkcommit A_8; hg up 0
306 created new head
306 created new head
307 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
307 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
308 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
308 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
309 $ mkcommit A_9; hg up 0
309 $ mkcommit A_9; hg up 0
310 created new head
310 created new head
311 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
311 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
312 $ hg debugobsolete `getid A_5` `getid A_9`
312 $ hg debugobsolete `getid A_5` `getid A_9`
313 $ hg log -G --hidden
313 $ hg log -G --hidden
314 o 10:bed64f5d2f5a A_9
314 o 10:bed64f5d2f5a A_9
315 |
315 |
316 | o 9:14608b260df8 A_8
316 | o 9:14608b260df8 A_8
317 |/
317 |/
318 | o 8:7ae126973a96 A_7
318 | o 8:7ae126973a96 A_7
319 |/
319 |/
320 | x 7:3750ebee865d B_0
320 | x 7:3750ebee865d B_0
321 | |
321 | |
322 | x 6:e442cfc57690 A_5
322 | x 6:e442cfc57690 A_5
323 |/
323 |/
324 | x 5:6a411f0d7a0a A_4
324 | x 5:6a411f0d7a0a A_4
325 |/
325 |/
326 | o 4:01f36c5a8fda A_3
326 | o 4:01f36c5a8fda A_3
327 |/
327 |/
328 | x 3:392fd25390da A_2
328 | x 3:392fd25390da A_2
329 |/
329 |/
330 | x 2:82623d38b9ba A_1
330 | x 2:82623d38b9ba A_1
331 |/
331 |/
332 | x 1:007dc284c1f8 A_0
332 | x 1:007dc284c1f8 A_0
333 |/
333 |/
334 @ 0:d20a80d4def3 base
334 @ 0:d20a80d4def3 base
335
335
336 $ hg debugsuccessorssets --hidden 'all()'
336 $ hg debugsuccessorssets --hidden 'all()'
337 d20a80d4def3
337 d20a80d4def3
338 d20a80d4def3
338 d20a80d4def3
339 007dc284c1f8
339 007dc284c1f8
340 01f36c5a8fda bed64f5d2f5a
340 01f36c5a8fda bed64f5d2f5a
341 01f36c5a8fda 7ae126973a96 14608b260df8
341 01f36c5a8fda 7ae126973a96 14608b260df8
342 82623d38b9ba
342 82623d38b9ba
343 01f36c5a8fda
343 01f36c5a8fda
344 392fd25390da
344 392fd25390da
345 bed64f5d2f5a
345 bed64f5d2f5a
346 7ae126973a96 14608b260df8
346 7ae126973a96 14608b260df8
347 01f36c5a8fda
347 01f36c5a8fda
348 01f36c5a8fda
348 01f36c5a8fda
349 6a411f0d7a0a
349 6a411f0d7a0a
350 bed64f5d2f5a
350 bed64f5d2f5a
351 7ae126973a96 14608b260df8
351 7ae126973a96 14608b260df8
352 e442cfc57690
352 e442cfc57690
353 bed64f5d2f5a
353 bed64f5d2f5a
354 7ae126973a96 14608b260df8
354 7ae126973a96 14608b260df8
355 3750ebee865d
355 3750ebee865d
356 bed64f5d2f5a
356 bed64f5d2f5a
357 7ae126973a96 14608b260df8
357 7ae126973a96 14608b260df8
358 7ae126973a96
358 7ae126973a96
359 7ae126973a96
359 7ae126973a96
360 14608b260df8
360 14608b260df8
361 14608b260df8
361 14608b260df8
362 bed64f5d2f5a
362 bed64f5d2f5a
363 bed64f5d2f5a
363 bed64f5d2f5a
364 $ hg log -r 'divergent()'
364 $ hg log -r 'divergent()'
365 4:01f36c5a8fda A_3
365 4:01f36c5a8fda A_3
366 8:7ae126973a96 A_7
366 8:7ae126973a96 A_7
367 9:14608b260df8 A_8
367 9:14608b260df8 A_8
368 10:bed64f5d2f5a A_9
368 10:bed64f5d2f5a A_9
369
369
370 fix the divergence
370 fix the divergence
371
371
372 $ mkcommit A_A; hg up 0
372 $ mkcommit A_A; hg up 0
373 created new head
373 created new head
374 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
374 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
375 $ hg debugobsolete `getid A_9` `getid A_A`
375 $ hg debugobsolete `getid A_9` `getid A_A`
376 $ hg debugobsolete `getid A_7` `getid A_A`
376 $ hg debugobsolete `getid A_7` `getid A_A`
377 $ hg debugobsolete `getid A_8` `getid A_A`
377 $ hg debugobsolete `getid A_8` `getid A_A`
378 $ hg log -G --hidden
378 $ hg log -G --hidden
379 o 11:a139f71be9da A_A
379 o 11:a139f71be9da A_A
380 |
380 |
381 | x 10:bed64f5d2f5a A_9
381 | x 10:bed64f5d2f5a A_9
382 |/
382 |/
383 | x 9:14608b260df8 A_8
383 | x 9:14608b260df8 A_8
384 |/
384 |/
385 | x 8:7ae126973a96 A_7
385 | x 8:7ae126973a96 A_7
386 |/
386 |/
387 | x 7:3750ebee865d B_0
387 | x 7:3750ebee865d B_0
388 | |
388 | |
389 | x 6:e442cfc57690 A_5
389 | x 6:e442cfc57690 A_5
390 |/
390 |/
391 | x 5:6a411f0d7a0a A_4
391 | x 5:6a411f0d7a0a A_4
392 |/
392 |/
393 | o 4:01f36c5a8fda A_3
393 | o 4:01f36c5a8fda A_3
394 |/
394 |/
395 | x 3:392fd25390da A_2
395 | x 3:392fd25390da A_2
396 |/
396 |/
397 | x 2:82623d38b9ba A_1
397 | x 2:82623d38b9ba A_1
398 |/
398 |/
399 | x 1:007dc284c1f8 A_0
399 | x 1:007dc284c1f8 A_0
400 |/
400 |/
401 @ 0:d20a80d4def3 base
401 @ 0:d20a80d4def3 base
402
402
403 $ hg debugsuccessorssets --hidden 'all()'
403 $ hg debugsuccessorssets --hidden 'all()'
404 d20a80d4def3
404 d20a80d4def3
405 d20a80d4def3
405 d20a80d4def3
406 007dc284c1f8
406 007dc284c1f8
407 01f36c5a8fda a139f71be9da
407 01f36c5a8fda a139f71be9da
408 82623d38b9ba
408 82623d38b9ba
409 01f36c5a8fda
409 01f36c5a8fda
410 392fd25390da
410 392fd25390da
411 a139f71be9da
411 a139f71be9da
412 01f36c5a8fda
412 01f36c5a8fda
413 01f36c5a8fda
413 01f36c5a8fda
414 6a411f0d7a0a
414 6a411f0d7a0a
415 a139f71be9da
415 a139f71be9da
416 e442cfc57690
416 e442cfc57690
417 a139f71be9da
417 a139f71be9da
418 3750ebee865d
418 3750ebee865d
419 a139f71be9da
419 a139f71be9da
420 7ae126973a96
420 7ae126973a96
421 a139f71be9da
421 a139f71be9da
422 14608b260df8
422 14608b260df8
423 a139f71be9da
423 a139f71be9da
424 bed64f5d2f5a
424 bed64f5d2f5a
425 a139f71be9da
425 a139f71be9da
426 a139f71be9da
426 a139f71be9da
427 a139f71be9da
427 a139f71be9da
428 $ hg log -r 'divergent()'
428 $ hg log -r 'divergent()'
429
429
430 $ cd ..
430 $ cd ..
431
431
432
432
433 Subset does not diverge
433 Subset does not diverge
434 ------------------------------
434 ------------------------------
435
435
436 Do not report divergent successors-set if it is a subset of another
436 Do not report divergent successors-set if it is a subset of another
437 successors-set. (report [A,B] not [A] + [A,B])
437 successors-set. (report [A,B] not [A] + [A,B])
438
438
439 $ newcase subset
439 $ newcase subset
440 $ hg debugobsolete `getid A_0` `getid A_2`
440 $ hg debugobsolete `getid A_0` `getid A_2`
441 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
441 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
442 invalid branchheads cache (unserved): tip differs
442 invalid branchheads cache (served): tip differs
443 $ hg debugsuccessorssets --hidden 'desc('A_0')'
443 $ hg debugsuccessorssets --hidden 'desc('A_0')'
444 007dc284c1f8
444 007dc284c1f8
445 82623d38b9ba 392fd25390da
445 82623d38b9ba 392fd25390da
446
446
447 $ cd ..
447 $ cd ..
@@ -1,503 +1,503
1 $ hglog() { hg log --template "{rev} {phaseidx} {desc}\n" $*; }
1 $ hglog() { hg log --template "{rev} {phaseidx} {desc}\n" $*; }
2 $ mkcommit() {
2 $ mkcommit() {
3 > echo "$1" > "$1"
3 > echo "$1" > "$1"
4 > hg add "$1"
4 > hg add "$1"
5 > message="$1"
5 > message="$1"
6 > shift
6 > shift
7 > hg ci -m "$message" $*
7 > hg ci -m "$message" $*
8 > }
8 > }
9
9
10 $ hg init initialrepo
10 $ hg init initialrepo
11 $ cd initialrepo
11 $ cd initialrepo
12
12
13 Cannot change null revision phase
13 Cannot change null revision phase
14
14
15 $ hg phase --force --secret null
15 $ hg phase --force --secret null
16 abort: cannot change null revision phase
16 abort: cannot change null revision phase
17 [255]
17 [255]
18 $ hg phase null
18 $ hg phase null
19 -1: public
19 -1: public
20
20
21 $ mkcommit A
21 $ mkcommit A
22
22
23 New commit are draft by default
23 New commit are draft by default
24
24
25 $ hglog
25 $ hglog
26 0 1 A
26 0 1 A
27
27
28 Following commit are draft too
28 Following commit are draft too
29
29
30 $ mkcommit B
30 $ mkcommit B
31
31
32 $ hglog
32 $ hglog
33 1 1 B
33 1 1 B
34 0 1 A
34 0 1 A
35
35
36 Draft commit are properly created over public one:
36 Draft commit are properly created over public one:
37
37
38 $ hg phase --public .
38 $ hg phase --public .
39 $ hglog
39 $ hglog
40 1 0 B
40 1 0 B
41 0 0 A
41 0 0 A
42
42
43 $ mkcommit C
43 $ mkcommit C
44 $ mkcommit D
44 $ mkcommit D
45
45
46 $ hglog
46 $ hglog
47 3 1 D
47 3 1 D
48 2 1 C
48 2 1 C
49 1 0 B
49 1 0 B
50 0 0 A
50 0 0 A
51
51
52 Test creating changeset as secret
52 Test creating changeset as secret
53
53
54 $ mkcommit E --config phases.new-commit='secret'
54 $ mkcommit E --config phases.new-commit='secret'
55 $ hglog
55 $ hglog
56 4 2 E
56 4 2 E
57 3 1 D
57 3 1 D
58 2 1 C
58 2 1 C
59 1 0 B
59 1 0 B
60 0 0 A
60 0 0 A
61
61
62 Test the secret property is inherited
62 Test the secret property is inherited
63
63
64 $ mkcommit H
64 $ mkcommit H
65 $ hglog
65 $ hglog
66 5 2 H
66 5 2 H
67 4 2 E
67 4 2 E
68 3 1 D
68 3 1 D
69 2 1 C
69 2 1 C
70 1 0 B
70 1 0 B
71 0 0 A
71 0 0 A
72
72
73 Even on merge
73 Even on merge
74
74
75 $ hg up -q 1
75 $ hg up -q 1
76 $ mkcommit "B'"
76 $ mkcommit "B'"
77 created new head
77 created new head
78 $ hglog
78 $ hglog
79 6 1 B'
79 6 1 B'
80 5 2 H
80 5 2 H
81 4 2 E
81 4 2 E
82 3 1 D
82 3 1 D
83 2 1 C
83 2 1 C
84 1 0 B
84 1 0 B
85 0 0 A
85 0 0 A
86 $ hg merge 4 # E
86 $ hg merge 4 # E
87 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
87 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
88 (branch merge, don't forget to commit)
88 (branch merge, don't forget to commit)
89 $ hg ci -m "merge B' and E"
89 $ hg ci -m "merge B' and E"
90 $ hglog
90 $ hglog
91 7 2 merge B' and E
91 7 2 merge B' and E
92 6 1 B'
92 6 1 B'
93 5 2 H
93 5 2 H
94 4 2 E
94 4 2 E
95 3 1 D
95 3 1 D
96 2 1 C
96 2 1 C
97 1 0 B
97 1 0 B
98 0 0 A
98 0 0 A
99
99
100 Test secret changeset are not pushed
100 Test secret changeset are not pushed
101
101
102 $ hg init ../push-dest
102 $ hg init ../push-dest
103 $ cat > ../push-dest/.hg/hgrc << EOF
103 $ cat > ../push-dest/.hg/hgrc << EOF
104 > [phases]
104 > [phases]
105 > publish=False
105 > publish=False
106 > EOF
106 > EOF
107 $ hg outgoing ../push-dest --template='{rev} {phase} {desc|firstline}\n'
107 $ hg outgoing ../push-dest --template='{rev} {phase} {desc|firstline}\n'
108 comparing with ../push-dest
108 comparing with ../push-dest
109 searching for changes
109 searching for changes
110 0 public A
110 0 public A
111 1 public B
111 1 public B
112 2 draft C
112 2 draft C
113 3 draft D
113 3 draft D
114 6 draft B'
114 6 draft B'
115 $ hg outgoing -r 'branch(default)' ../push-dest --template='{rev} {phase} {desc|firstline}\n'
115 $ hg outgoing -r 'branch(default)' ../push-dest --template='{rev} {phase} {desc|firstline}\n'
116 comparing with ../push-dest
116 comparing with ../push-dest
117 searching for changes
117 searching for changes
118 0 public A
118 0 public A
119 1 public B
119 1 public B
120 2 draft C
120 2 draft C
121 3 draft D
121 3 draft D
122 6 draft B'
122 6 draft B'
123
123
124 $ hg push ../push-dest -f # force because we push multiple heads
124 $ hg push ../push-dest -f # force because we push multiple heads
125 pushing to ../push-dest
125 pushing to ../push-dest
126 searching for changes
126 searching for changes
127 adding changesets
127 adding changesets
128 adding manifests
128 adding manifests
129 adding file changes
129 adding file changes
130 added 5 changesets with 5 changes to 5 files (+1 heads)
130 added 5 changesets with 5 changes to 5 files (+1 heads)
131 $ hglog
131 $ hglog
132 7 2 merge B' and E
132 7 2 merge B' and E
133 6 1 B'
133 6 1 B'
134 5 2 H
134 5 2 H
135 4 2 E
135 4 2 E
136 3 1 D
136 3 1 D
137 2 1 C
137 2 1 C
138 1 0 B
138 1 0 B
139 0 0 A
139 0 0 A
140 $ cd ../push-dest
140 $ cd ../push-dest
141 $ hglog
141 $ hglog
142 4 1 B'
142 4 1 B'
143 3 1 D
143 3 1 D
144 2 1 C
144 2 1 C
145 1 0 B
145 1 0 B
146 0 0 A
146 0 0 A
147
147
148 (Issue3303)
148 (Issue3303)
149 Check that remote secret changeset are ignore when checking creation of remote heads
149 Check that remote secret changeset are ignore when checking creation of remote heads
150
150
151 We add a secret head into the push destination. This secreat head shadow a
151 We add a secret head into the push destination. This secreat head shadow a
152 visible shared between the initial repo and the push destination.
152 visible shared between the initial repo and the push destination.
153
153
154 $ hg up -q 4 # B'
154 $ hg up -q 4 # B'
155 $ mkcommit Z --config phases.new-commit=secret
155 $ mkcommit Z --config phases.new-commit=secret
156 $ hg phase .
156 $ hg phase .
157 5: secret
157 5: secret
158
158
159 # We now try to push a new public changeset that descend from the common public
159 # We now try to push a new public changeset that descend from the common public
160 # head shadowed by the remote secret head.
160 # head shadowed by the remote secret head.
161
161
162 $ cd ../initialrepo
162 $ cd ../initialrepo
163 $ hg up -q 6 #B'
163 $ hg up -q 6 #B'
164 $ mkcommit I
164 $ mkcommit I
165 created new head
165 created new head
166 $ hg push ../push-dest
166 $ hg push ../push-dest
167 pushing to ../push-dest
167 pushing to ../push-dest
168 searching for changes
168 searching for changes
169 adding changesets
169 adding changesets
170 adding manifests
170 adding manifests
171 adding file changes
171 adding file changes
172 added 1 changesets with 1 changes to 1 files (+1 heads)
172 added 1 changesets with 1 changes to 1 files (+1 heads)
173
173
174 :note: The "(+1 heads)" is wrong as we do not had any visible head
174 :note: The "(+1 heads)" is wrong as we do not had any visible head
175
175
176 check that branch cache with "unserved" filter are properly computed and stored
176 check that branch cache with "unserved" filter are properly computed and stored
177
177
178 $ ls ../push-dest/.hg/cache/branchheads*
178 $ ls ../push-dest/.hg/cache/branchheads*
179 ../push-dest/.hg/cache/branchheads-hidden
179 ../push-dest/.hg/cache/branchheads-served
180 ../push-dest/.hg/cache/branchheads-unserved
180 ../push-dest/.hg/cache/branchheads-visible
181 $ cat ../push-dest/.hg/cache/branchheads-hidden
181 $ cat ../push-dest/.hg/cache/branchheads-visible
182 6d6770faffce199f1fddd1cf87f6f026138cf061 6
182 6d6770faffce199f1fddd1cf87f6f026138cf061 6
183 b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e default
183 b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e default
184 2713879da13d6eea1ff22b442a5a87cb31a7ce6a default
184 2713879da13d6eea1ff22b442a5a87cb31a7ce6a default
185 6d6770faffce199f1fddd1cf87f6f026138cf061 default
185 6d6770faffce199f1fddd1cf87f6f026138cf061 default
186 $ cat ../push-dest/.hg/cache/branchheads-unserved
186 $ cat ../push-dest/.hg/cache/branchheads-served
187 cf9fe039dfd67e829edf6522a45de057b5c86519 4
187 cf9fe039dfd67e829edf6522a45de057b5c86519 4
188 b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e default
188 b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e default
189 cf9fe039dfd67e829edf6522a45de057b5c86519 default
189 cf9fe039dfd67e829edf6522a45de057b5c86519 default
190
190
191
191
192 Restore condition prior extra insertion.
192 Restore condition prior extra insertion.
193 $ hg -q --config extensions.mq= strip .
193 $ hg -q --config extensions.mq= strip .
194 $ hg up -q 7
194 $ hg up -q 7
195 $ cd ..
195 $ cd ..
196
196
197 Test secret changeset are not pull
197 Test secret changeset are not pull
198
198
199 $ hg init pull-dest
199 $ hg init pull-dest
200 $ cd pull-dest
200 $ cd pull-dest
201 $ hg pull ../initialrepo
201 $ hg pull ../initialrepo
202 pulling from ../initialrepo
202 pulling from ../initialrepo
203 requesting all changes
203 requesting all changes
204 adding changesets
204 adding changesets
205 adding manifests
205 adding manifests
206 adding file changes
206 adding file changes
207 added 5 changesets with 5 changes to 5 files (+1 heads)
207 added 5 changesets with 5 changes to 5 files (+1 heads)
208 (run 'hg heads' to see heads, 'hg merge' to merge)
208 (run 'hg heads' to see heads, 'hg merge' to merge)
209 $ hglog
209 $ hglog
210 4 0 B'
210 4 0 B'
211 3 0 D
211 3 0 D
212 2 0 C
212 2 0 C
213 1 0 B
213 1 0 B
214 0 0 A
214 0 0 A
215 $ cd ..
215 $ cd ..
216
216
217 But secret can still be bundled explicitly
217 But secret can still be bundled explicitly
218
218
219 $ cd initialrepo
219 $ cd initialrepo
220 $ hg bundle --base '4^' -r 'children(4)' ../secret-bundle.hg
220 $ hg bundle --base '4^' -r 'children(4)' ../secret-bundle.hg
221 4 changesets found
221 4 changesets found
222 $ cd ..
222 $ cd ..
223
223
224 Test secret changeset are not cloned
224 Test secret changeset are not cloned
225 (during local clone)
225 (during local clone)
226
226
227 $ hg clone -qU initialrepo clone-dest
227 $ hg clone -qU initialrepo clone-dest
228 $ hglog -R clone-dest
228 $ hglog -R clone-dest
229 4 0 B'
229 4 0 B'
230 3 0 D
230 3 0 D
231 2 0 C
231 2 0 C
232 1 0 B
232 1 0 B
233 0 0 A
233 0 0 A
234
234
235 Test revset
235 Test revset
236
236
237 $ cd initialrepo
237 $ cd initialrepo
238 $ hglog -r 'public()'
238 $ hglog -r 'public()'
239 0 0 A
239 0 0 A
240 1 0 B
240 1 0 B
241 $ hglog -r 'draft()'
241 $ hglog -r 'draft()'
242 2 1 C
242 2 1 C
243 3 1 D
243 3 1 D
244 6 1 B'
244 6 1 B'
245 $ hglog -r 'secret()'
245 $ hglog -r 'secret()'
246 4 2 E
246 4 2 E
247 5 2 H
247 5 2 H
248 7 2 merge B' and E
248 7 2 merge B' and E
249
249
250 test that phase are displayed in log at debug level
250 test that phase are displayed in log at debug level
251
251
252 $ hg log --debug
252 $ hg log --debug
253 changeset: 7:17a481b3bccb796c0521ae97903d81c52bfee4af
253 changeset: 7:17a481b3bccb796c0521ae97903d81c52bfee4af
254 tag: tip
254 tag: tip
255 phase: secret
255 phase: secret
256 parent: 6:cf9fe039dfd67e829edf6522a45de057b5c86519
256 parent: 6:cf9fe039dfd67e829edf6522a45de057b5c86519
257 parent: 4:a603bfb5a83e312131cebcd05353c217d4d21dde
257 parent: 4:a603bfb5a83e312131cebcd05353c217d4d21dde
258 manifest: 7:5e724ffacba267b2ab726c91fc8b650710deaaa8
258 manifest: 7:5e724ffacba267b2ab726c91fc8b650710deaaa8
259 user: test
259 user: test
260 date: Thu Jan 01 00:00:00 1970 +0000
260 date: Thu Jan 01 00:00:00 1970 +0000
261 files+: C D E
261 files+: C D E
262 extra: branch=default
262 extra: branch=default
263 description:
263 description:
264 merge B' and E
264 merge B' and E
265
265
266
266
267 changeset: 6:cf9fe039dfd67e829edf6522a45de057b5c86519
267 changeset: 6:cf9fe039dfd67e829edf6522a45de057b5c86519
268 phase: draft
268 phase: draft
269 parent: 1:27547f69f25460a52fff66ad004e58da7ad3fb56
269 parent: 1:27547f69f25460a52fff66ad004e58da7ad3fb56
270 parent: -1:0000000000000000000000000000000000000000
270 parent: -1:0000000000000000000000000000000000000000
271 manifest: 6:ab8bfef2392903058bf4ebb9e7746e8d7026b27a
271 manifest: 6:ab8bfef2392903058bf4ebb9e7746e8d7026b27a
272 user: test
272 user: test
273 date: Thu Jan 01 00:00:00 1970 +0000
273 date: Thu Jan 01 00:00:00 1970 +0000
274 files+: B'
274 files+: B'
275 extra: branch=default
275 extra: branch=default
276 description:
276 description:
277 B'
277 B'
278
278
279
279
280 changeset: 5:a030c6be5127abc010fcbff1851536552e6951a8
280 changeset: 5:a030c6be5127abc010fcbff1851536552e6951a8
281 phase: secret
281 phase: secret
282 parent: 4:a603bfb5a83e312131cebcd05353c217d4d21dde
282 parent: 4:a603bfb5a83e312131cebcd05353c217d4d21dde
283 parent: -1:0000000000000000000000000000000000000000
283 parent: -1:0000000000000000000000000000000000000000
284 manifest: 5:5c710aa854874fe3d5fa7192e77bdb314cc08b5a
284 manifest: 5:5c710aa854874fe3d5fa7192e77bdb314cc08b5a
285 user: test
285 user: test
286 date: Thu Jan 01 00:00:00 1970 +0000
286 date: Thu Jan 01 00:00:00 1970 +0000
287 files+: H
287 files+: H
288 extra: branch=default
288 extra: branch=default
289 description:
289 description:
290 H
290 H
291
291
292
292
293 changeset: 4:a603bfb5a83e312131cebcd05353c217d4d21dde
293 changeset: 4:a603bfb5a83e312131cebcd05353c217d4d21dde
294 phase: secret
294 phase: secret
295 parent: 3:b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e
295 parent: 3:b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e
296 parent: -1:0000000000000000000000000000000000000000
296 parent: -1:0000000000000000000000000000000000000000
297 manifest: 4:7173fd1c27119750b959e3a0f47ed78abe75d6dc
297 manifest: 4:7173fd1c27119750b959e3a0f47ed78abe75d6dc
298 user: test
298 user: test
299 date: Thu Jan 01 00:00:00 1970 +0000
299 date: Thu Jan 01 00:00:00 1970 +0000
300 files+: E
300 files+: E
301 extra: branch=default
301 extra: branch=default
302 description:
302 description:
303 E
303 E
304
304
305
305
306 changeset: 3:b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e
306 changeset: 3:b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e
307 phase: draft
307 phase: draft
308 parent: 2:f838bfaca5c7226600ebcfd84f3c3c13a28d3757
308 parent: 2:f838bfaca5c7226600ebcfd84f3c3c13a28d3757
309 parent: -1:0000000000000000000000000000000000000000
309 parent: -1:0000000000000000000000000000000000000000
310 manifest: 3:6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c
310 manifest: 3:6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c
311 user: test
311 user: test
312 date: Thu Jan 01 00:00:00 1970 +0000
312 date: Thu Jan 01 00:00:00 1970 +0000
313 files+: D
313 files+: D
314 extra: branch=default
314 extra: branch=default
315 description:
315 description:
316 D
316 D
317
317
318
318
319 changeset: 2:f838bfaca5c7226600ebcfd84f3c3c13a28d3757
319 changeset: 2:f838bfaca5c7226600ebcfd84f3c3c13a28d3757
320 phase: draft
320 phase: draft
321 parent: 1:27547f69f25460a52fff66ad004e58da7ad3fb56
321 parent: 1:27547f69f25460a52fff66ad004e58da7ad3fb56
322 parent: -1:0000000000000000000000000000000000000000
322 parent: -1:0000000000000000000000000000000000000000
323 manifest: 2:66a5a01817fdf5239c273802b5b7618d051c89e4
323 manifest: 2:66a5a01817fdf5239c273802b5b7618d051c89e4
324 user: test
324 user: test
325 date: Thu Jan 01 00:00:00 1970 +0000
325 date: Thu Jan 01 00:00:00 1970 +0000
326 files+: C
326 files+: C
327 extra: branch=default
327 extra: branch=default
328 description:
328 description:
329 C
329 C
330
330
331
331
332 changeset: 1:27547f69f25460a52fff66ad004e58da7ad3fb56
332 changeset: 1:27547f69f25460a52fff66ad004e58da7ad3fb56
333 parent: 0:4a2df7238c3b48766b5e22fafbb8a2f506ec8256
333 parent: 0:4a2df7238c3b48766b5e22fafbb8a2f506ec8256
334 parent: -1:0000000000000000000000000000000000000000
334 parent: -1:0000000000000000000000000000000000000000
335 manifest: 1:cb5cbbc1bfbf24cc34b9e8c16914e9caa2d2a7fd
335 manifest: 1:cb5cbbc1bfbf24cc34b9e8c16914e9caa2d2a7fd
336 user: test
336 user: test
337 date: Thu Jan 01 00:00:00 1970 +0000
337 date: Thu Jan 01 00:00:00 1970 +0000
338 files+: B
338 files+: B
339 extra: branch=default
339 extra: branch=default
340 description:
340 description:
341 B
341 B
342
342
343
343
344 changeset: 0:4a2df7238c3b48766b5e22fafbb8a2f506ec8256
344 changeset: 0:4a2df7238c3b48766b5e22fafbb8a2f506ec8256
345 parent: -1:0000000000000000000000000000000000000000
345 parent: -1:0000000000000000000000000000000000000000
346 parent: -1:0000000000000000000000000000000000000000
346 parent: -1:0000000000000000000000000000000000000000
347 manifest: 0:007d8c9d88841325f5c6b06371b35b4e8a2b1a83
347 manifest: 0:007d8c9d88841325f5c6b06371b35b4e8a2b1a83
348 user: test
348 user: test
349 date: Thu Jan 01 00:00:00 1970 +0000
349 date: Thu Jan 01 00:00:00 1970 +0000
350 files+: A
350 files+: A
351 extra: branch=default
351 extra: branch=default
352 description:
352 description:
353 A
353 A
354
354
355
355
356
356
357
357
358 (Issue3707)
358 (Issue3707)
359 test invalid phase name
359 test invalid phase name
360
360
361 $ mkcommit I --config phases.new-commit='babar'
361 $ mkcommit I --config phases.new-commit='babar'
362 transaction abort!
362 transaction abort!
363 rollback completed
363 rollback completed
364 abort: phases.new-commit: not a valid phase name ('babar')
364 abort: phases.new-commit: not a valid phase name ('babar')
365 [255]
365 [255]
366 Test phase command
366 Test phase command
367 ===================
367 ===================
368
368
369 initial picture
369 initial picture
370
370
371 $ cat >> $HGRCPATH << EOF
371 $ cat >> $HGRCPATH << EOF
372 > [extensions]
372 > [extensions]
373 > hgext.graphlog=
373 > hgext.graphlog=
374 > EOF
374 > EOF
375 $ hg log -G --template "{rev} {phase} {desc}\n"
375 $ hg log -G --template "{rev} {phase} {desc}\n"
376 @ 7 secret merge B' and E
376 @ 7 secret merge B' and E
377 |\
377 |\
378 | o 6 draft B'
378 | o 6 draft B'
379 | |
379 | |
380 +---o 5 secret H
380 +---o 5 secret H
381 | |
381 | |
382 o | 4 secret E
382 o | 4 secret E
383 | |
383 | |
384 o | 3 draft D
384 o | 3 draft D
385 | |
385 | |
386 o | 2 draft C
386 o | 2 draft C
387 |/
387 |/
388 o 1 public B
388 o 1 public B
389 |
389 |
390 o 0 public A
390 o 0 public A
391
391
392
392
393 display changesets phase
393 display changesets phase
394
394
395 (mixing -r and plain rev specification)
395 (mixing -r and plain rev specification)
396
396
397 $ hg phase 1::4 -r 7
397 $ hg phase 1::4 -r 7
398 1: public
398 1: public
399 2: draft
399 2: draft
400 3: draft
400 3: draft
401 4: secret
401 4: secret
402 7: secret
402 7: secret
403
403
404
404
405 move changeset forward
405 move changeset forward
406
406
407 (with -r option)
407 (with -r option)
408
408
409 $ hg phase --public -r 2
409 $ hg phase --public -r 2
410 $ hg log -G --template "{rev} {phase} {desc}\n"
410 $ hg log -G --template "{rev} {phase} {desc}\n"
411 @ 7 secret merge B' and E
411 @ 7 secret merge B' and E
412 |\
412 |\
413 | o 6 draft B'
413 | o 6 draft B'
414 | |
414 | |
415 +---o 5 secret H
415 +---o 5 secret H
416 | |
416 | |
417 o | 4 secret E
417 o | 4 secret E
418 | |
418 | |
419 o | 3 draft D
419 o | 3 draft D
420 | |
420 | |
421 o | 2 public C
421 o | 2 public C
422 |/
422 |/
423 o 1 public B
423 o 1 public B
424 |
424 |
425 o 0 public A
425 o 0 public A
426
426
427
427
428 move changeset backward
428 move changeset backward
429
429
430 (without -r option)
430 (without -r option)
431
431
432 $ hg phase --draft --force 2
432 $ hg phase --draft --force 2
433 $ hg log -G --template "{rev} {phase} {desc}\n"
433 $ hg log -G --template "{rev} {phase} {desc}\n"
434 @ 7 secret merge B' and E
434 @ 7 secret merge B' and E
435 |\
435 |\
436 | o 6 draft B'
436 | o 6 draft B'
437 | |
437 | |
438 +---o 5 secret H
438 +---o 5 secret H
439 | |
439 | |
440 o | 4 secret E
440 o | 4 secret E
441 | |
441 | |
442 o | 3 draft D
442 o | 3 draft D
443 | |
443 | |
444 o | 2 draft C
444 o | 2 draft C
445 |/
445 |/
446 o 1 public B
446 o 1 public B
447 |
447 |
448 o 0 public A
448 o 0 public A
449
449
450
450
451 move changeset forward and backward
451 move changeset forward and backward
452
452
453 $ hg phase --draft --force 1::4
453 $ hg phase --draft --force 1::4
454 $ hg log -G --template "{rev} {phase} {desc}\n"
454 $ hg log -G --template "{rev} {phase} {desc}\n"
455 @ 7 secret merge B' and E
455 @ 7 secret merge B' and E
456 |\
456 |\
457 | o 6 draft B'
457 | o 6 draft B'
458 | |
458 | |
459 +---o 5 secret H
459 +---o 5 secret H
460 | |
460 | |
461 o | 4 draft E
461 o | 4 draft E
462 | |
462 | |
463 o | 3 draft D
463 o | 3 draft D
464 | |
464 | |
465 o | 2 draft C
465 o | 2 draft C
466 |/
466 |/
467 o 1 draft B
467 o 1 draft B
468 |
468 |
469 o 0 public A
469 o 0 public A
470
470
471 test partial failure
471 test partial failure
472
472
473 $ hg phase --public 7
473 $ hg phase --public 7
474 $ hg phase --draft '5 or 7'
474 $ hg phase --draft '5 or 7'
475 cannot move 1 changesets to a more permissive phase, use --force
475 cannot move 1 changesets to a more permissive phase, use --force
476 phase changed for 1 changesets
476 phase changed for 1 changesets
477 [1]
477 [1]
478 $ hg log -G --template "{rev} {phase} {desc}\n"
478 $ hg log -G --template "{rev} {phase} {desc}\n"
479 @ 7 public merge B' and E
479 @ 7 public merge B' and E
480 |\
480 |\
481 | o 6 public B'
481 | o 6 public B'
482 | |
482 | |
483 +---o 5 draft H
483 +---o 5 draft H
484 | |
484 | |
485 o | 4 public E
485 o | 4 public E
486 | |
486 | |
487 o | 3 public D
487 o | 3 public D
488 | |
488 | |
489 o | 2 public C
489 o | 2 public C
490 |/
490 |/
491 o 1 public B
491 o 1 public B
492 |
492 |
493 o 0 public A
493 o 0 public A
494
494
495
495
496 test complete failure
496 test complete failure
497
497
498 $ hg phase --draft 7
498 $ hg phase --draft 7
499 cannot move 1 changesets to a more permissive phase, use --force
499 cannot move 1 changesets to a more permissive phase, use --force
500 no phases changed
500 no phases changed
501 [1]
501 [1]
502
502
503 $ cd ..
503 $ cd ..
@@ -1,722 +1,722
1 $ cat >> $HGRCPATH <<EOF
1 $ cat >> $HGRCPATH <<EOF
2 > [extensions]
2 > [extensions]
3 > graphlog=
3 > graphlog=
4 > rebase=
4 > rebase=
5 > mq=
5 > mq=
6 >
6 >
7 > [phases]
7 > [phases]
8 > publish=False
8 > publish=False
9 >
9 >
10 > [alias]
10 > [alias]
11 > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
11 > tglog = log -G --template "{rev}: '{desc}' {branches}\n"
12 > tglogp = log -G --template "{rev}:{phase} '{desc}' {branches}\n"
12 > tglogp = log -G --template "{rev}:{phase} '{desc}' {branches}\n"
13 > EOF
13 > EOF
14
14
15 Create repo a:
15 Create repo a:
16
16
17 $ hg init a
17 $ hg init a
18 $ cd a
18 $ cd a
19 $ hg unbundle "$TESTDIR/bundles/rebase.hg"
19 $ hg unbundle "$TESTDIR/bundles/rebase.hg"
20 adding changesets
20 adding changesets
21 adding manifests
21 adding manifests
22 adding file changes
22 adding file changes
23 added 8 changesets with 7 changes to 7 files (+2 heads)
23 added 8 changesets with 7 changes to 7 files (+2 heads)
24 (run 'hg heads' to see heads, 'hg merge' to merge)
24 (run 'hg heads' to see heads, 'hg merge' to merge)
25 $ hg up tip
25 $ hg up tip
26 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
26 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
27
27
28 $ hg tglog
28 $ hg tglog
29 @ 7: 'H'
29 @ 7: 'H'
30 |
30 |
31 | o 6: 'G'
31 | o 6: 'G'
32 |/|
32 |/|
33 o | 5: 'F'
33 o | 5: 'F'
34 | |
34 | |
35 | o 4: 'E'
35 | o 4: 'E'
36 |/
36 |/
37 | o 3: 'D'
37 | o 3: 'D'
38 | |
38 | |
39 | o 2: 'C'
39 | o 2: 'C'
40 | |
40 | |
41 | o 1: 'B'
41 | o 1: 'B'
42 |/
42 |/
43 o 0: 'A'
43 o 0: 'A'
44
44
45 $ cd ..
45 $ cd ..
46
46
47
47
48 Rebasing B onto H and collapsing changesets with different phases:
48 Rebasing B onto H and collapsing changesets with different phases:
49
49
50
50
51 $ hg clone -q -u 3 a a1
51 $ hg clone -q -u 3 a a1
52 $ cd a1
52 $ cd a1
53
53
54 $ hg phase --force --secret 3
54 $ hg phase --force --secret 3
55
55
56 $ hg rebase --collapse --keepbranches
56 $ hg rebase --collapse --keepbranches
57 saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
57 saved backup bundle to $TESTTMP/a1/.hg/strip-backup/*-backup.hg (glob)
58
58
59 $ hg tglogp
59 $ hg tglogp
60 @ 5:secret 'Collapsed revision
60 @ 5:secret 'Collapsed revision
61 | * B
61 | * B
62 | * C
62 | * C
63 | * D'
63 | * D'
64 o 4:draft 'H'
64 o 4:draft 'H'
65 |
65 |
66 | o 3:draft 'G'
66 | o 3:draft 'G'
67 |/|
67 |/|
68 o | 2:draft 'F'
68 o | 2:draft 'F'
69 | |
69 | |
70 | o 1:draft 'E'
70 | o 1:draft 'E'
71 |/
71 |/
72 o 0:draft 'A'
72 o 0:draft 'A'
73
73
74 $ hg manifest
74 $ hg manifest
75 A
75 A
76 B
76 B
77 C
77 C
78 D
78 D
79 F
79 F
80 H
80 H
81
81
82 $ cd ..
82 $ cd ..
83
83
84
84
85 Rebasing E onto H:
85 Rebasing E onto H:
86
86
87 $ hg clone -q -u . a a2
87 $ hg clone -q -u . a a2
88 $ cd a2
88 $ cd a2
89
89
90 $ hg phase --force --secret 6
90 $ hg phase --force --secret 6
91 $ hg rebase --source 4 --collapse
91 $ hg rebase --source 4 --collapse
92 saved backup bundle to $TESTTMP/a2/.hg/strip-backup/*-backup.hg (glob)
92 saved backup bundle to $TESTTMP/a2/.hg/strip-backup/*-backup.hg (glob)
93
93
94 $ hg tglog
94 $ hg tglog
95 @ 6: 'Collapsed revision
95 @ 6: 'Collapsed revision
96 | * E
96 | * E
97 | * G'
97 | * G'
98 o 5: 'H'
98 o 5: 'H'
99 |
99 |
100 o 4: 'F'
100 o 4: 'F'
101 |
101 |
102 | o 3: 'D'
102 | o 3: 'D'
103 | |
103 | |
104 | o 2: 'C'
104 | o 2: 'C'
105 | |
105 | |
106 | o 1: 'B'
106 | o 1: 'B'
107 |/
107 |/
108 o 0: 'A'
108 o 0: 'A'
109
109
110 $ hg manifest
110 $ hg manifest
111 A
111 A
112 E
112 E
113 F
113 F
114 H
114 H
115
115
116 $ cd ..
116 $ cd ..
117
117
118 Rebasing G onto H with custom message:
118 Rebasing G onto H with custom message:
119
119
120 $ hg clone -q -u . a a3
120 $ hg clone -q -u . a a3
121 $ cd a3
121 $ cd a3
122
122
123 $ hg rebase --base 6 -m 'custom message'
123 $ hg rebase --base 6 -m 'custom message'
124 abort: message can only be specified with collapse
124 abort: message can only be specified with collapse
125 [255]
125 [255]
126
126
127 $ hg rebase --source 4 --collapse -m 'custom message'
127 $ hg rebase --source 4 --collapse -m 'custom message'
128 saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob)
128 saved backup bundle to $TESTTMP/a3/.hg/strip-backup/*-backup.hg (glob)
129
129
130 $ hg tglog
130 $ hg tglog
131 @ 6: 'custom message'
131 @ 6: 'custom message'
132 |
132 |
133 o 5: 'H'
133 o 5: 'H'
134 |
134 |
135 o 4: 'F'
135 o 4: 'F'
136 |
136 |
137 | o 3: 'D'
137 | o 3: 'D'
138 | |
138 | |
139 | o 2: 'C'
139 | o 2: 'C'
140 | |
140 | |
141 | o 1: 'B'
141 | o 1: 'B'
142 |/
142 |/
143 o 0: 'A'
143 o 0: 'A'
144
144
145 $ hg manifest
145 $ hg manifest
146 A
146 A
147 E
147 E
148 F
148 F
149 H
149 H
150
150
151 $ cd ..
151 $ cd ..
152
152
153 Create repo b:
153 Create repo b:
154
154
155 $ hg init b
155 $ hg init b
156 $ cd b
156 $ cd b
157
157
158 $ echo A > A
158 $ echo A > A
159 $ hg ci -Am A
159 $ hg ci -Am A
160 adding A
160 adding A
161 $ echo B > B
161 $ echo B > B
162 $ hg ci -Am B
162 $ hg ci -Am B
163 adding B
163 adding B
164
164
165 $ hg up -q 0
165 $ hg up -q 0
166
166
167 $ echo C > C
167 $ echo C > C
168 $ hg ci -Am C
168 $ hg ci -Am C
169 adding C
169 adding C
170 created new head
170 created new head
171
171
172 $ hg merge
172 $ hg merge
173 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
173 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
174 (branch merge, don't forget to commit)
174 (branch merge, don't forget to commit)
175
175
176 $ echo D > D
176 $ echo D > D
177 $ hg ci -Am D
177 $ hg ci -Am D
178 adding D
178 adding D
179
179
180 $ hg up -q 1
180 $ hg up -q 1
181
181
182 $ echo E > E
182 $ echo E > E
183 $ hg ci -Am E
183 $ hg ci -Am E
184 adding E
184 adding E
185 created new head
185 created new head
186
186
187 $ echo F > F
187 $ echo F > F
188 $ hg ci -Am F
188 $ hg ci -Am F
189 adding F
189 adding F
190
190
191 $ hg merge
191 $ hg merge
192 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
192 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
193 (branch merge, don't forget to commit)
193 (branch merge, don't forget to commit)
194 $ hg ci -m G
194 $ hg ci -m G
195
195
196 $ hg up -q 0
196 $ hg up -q 0
197
197
198 $ echo H > H
198 $ echo H > H
199 $ hg ci -Am H
199 $ hg ci -Am H
200 adding H
200 adding H
201 created new head
201 created new head
202
202
203 $ hg tglog
203 $ hg tglog
204 @ 7: 'H'
204 @ 7: 'H'
205 |
205 |
206 | o 6: 'G'
206 | o 6: 'G'
207 | |\
207 | |\
208 | | o 5: 'F'
208 | | o 5: 'F'
209 | | |
209 | | |
210 | | o 4: 'E'
210 | | o 4: 'E'
211 | | |
211 | | |
212 | o | 3: 'D'
212 | o | 3: 'D'
213 | |\|
213 | |\|
214 | o | 2: 'C'
214 | o | 2: 'C'
215 |/ /
215 |/ /
216 | o 1: 'B'
216 | o 1: 'B'
217 |/
217 |/
218 o 0: 'A'
218 o 0: 'A'
219
219
220 $ cd ..
220 $ cd ..
221
221
222
222
223 Rebase and collapse - more than one external (fail):
223 Rebase and collapse - more than one external (fail):
224
224
225 $ hg clone -q -u . b b1
225 $ hg clone -q -u . b b1
226 $ cd b1
226 $ cd b1
227
227
228 $ hg rebase -s 2 --collapse
228 $ hg rebase -s 2 --collapse
229 abort: unable to collapse, there is more than one external parent
229 abort: unable to collapse, there is more than one external parent
230 [255]
230 [255]
231
231
232 Rebase and collapse - E onto H:
232 Rebase and collapse - E onto H:
233
233
234 $ hg rebase -s 4 --collapse # root (4) is not a merge
234 $ hg rebase -s 4 --collapse # root (4) is not a merge
235 saved backup bundle to $TESTTMP/b1/.hg/strip-backup/*-backup.hg (glob)
235 saved backup bundle to $TESTTMP/b1/.hg/strip-backup/*-backup.hg (glob)
236
236
237 $ hg tglog
237 $ hg tglog
238 @ 5: 'Collapsed revision
238 @ 5: 'Collapsed revision
239 |\ * E
239 |\ * E
240 | | * F
240 | | * F
241 | | * G'
241 | | * G'
242 | o 4: 'H'
242 | o 4: 'H'
243 | |
243 | |
244 o | 3: 'D'
244 o | 3: 'D'
245 |\ \
245 |\ \
246 | o | 2: 'C'
246 | o | 2: 'C'
247 | |/
247 | |/
248 o / 1: 'B'
248 o / 1: 'B'
249 |/
249 |/
250 o 0: 'A'
250 o 0: 'A'
251
251
252 $ hg manifest
252 $ hg manifest
253 A
253 A
254 C
254 C
255 D
255 D
256 E
256 E
257 F
257 F
258 H
258 H
259
259
260 $ cd ..
260 $ cd ..
261
261
262
262
263
263
264
264
265 Test that branchheads cache is updated correctly when doing a strip in which
265 Test that branchheads cache is updated correctly when doing a strip in which
266 the parent of the ancestor node to be stripped does not become a head and
266 the parent of the ancestor node to be stripped does not become a head and
267 also, the parent of a node that is a child of the node stripped becomes a head
267 also, the parent of a node that is a child of the node stripped becomes a head
268 (node 3).
268 (node 3).
269
269
270 $ hg clone -q -u . b b2
270 $ hg clone -q -u . b b2
271 $ cd b2
271 $ cd b2
272
272
273 $ hg heads --template="{rev}:{node} {branch}\n"
273 $ hg heads --template="{rev}:{node} {branch}\n"
274 7:c65502d4178782309ce0574c5ae6ee9485a9bafa default
274 7:c65502d4178782309ce0574c5ae6ee9485a9bafa default
275 6:c772a8b2dc17629cec88a19d09c926c4814b12c7 default
275 6:c772a8b2dc17629cec88a19d09c926c4814b12c7 default
276
276
277 $ cat $TESTTMP/b2/.hg/cache/branchheads-unserved
277 $ cat $TESTTMP/b2/.hg/cache/branchheads-served
278 c65502d4178782309ce0574c5ae6ee9485a9bafa 7
278 c65502d4178782309ce0574c5ae6ee9485a9bafa 7
279 c772a8b2dc17629cec88a19d09c926c4814b12c7 default
279 c772a8b2dc17629cec88a19d09c926c4814b12c7 default
280 c65502d4178782309ce0574c5ae6ee9485a9bafa default
280 c65502d4178782309ce0574c5ae6ee9485a9bafa default
281
281
282 $ hg strip 4
282 $ hg strip 4
283 saved backup bundle to $TESTTMP/b2/.hg/strip-backup/8a5212ebc852-backup.hg (glob)
283 saved backup bundle to $TESTTMP/b2/.hg/strip-backup/8a5212ebc852-backup.hg (glob)
284
284
285 $ cat $TESTTMP/b2/.hg/cache/branchheads
285 $ cat $TESTTMP/b2/.hg/cache/branchheads
286 c65502d4178782309ce0574c5ae6ee9485a9bafa 4
286 c65502d4178782309ce0574c5ae6ee9485a9bafa 4
287 2870ad076e541e714f3c2bc32826b5c6a6e5b040 default
287 2870ad076e541e714f3c2bc32826b5c6a6e5b040 default
288 c65502d4178782309ce0574c5ae6ee9485a9bafa default
288 c65502d4178782309ce0574c5ae6ee9485a9bafa default
289
289
290 $ hg heads --template="{rev}:{node} {branch}\n"
290 $ hg heads --template="{rev}:{node} {branch}\n"
291 4:c65502d4178782309ce0574c5ae6ee9485a9bafa default
291 4:c65502d4178782309ce0574c5ae6ee9485a9bafa default
292 3:2870ad076e541e714f3c2bc32826b5c6a6e5b040 default
292 3:2870ad076e541e714f3c2bc32826b5c6a6e5b040 default
293
293
294 $ cd ..
294 $ cd ..
295
295
296
296
297
297
298
298
299
299
300
300
301 Create repo c:
301 Create repo c:
302
302
303 $ hg init c
303 $ hg init c
304 $ cd c
304 $ cd c
305
305
306 $ echo A > A
306 $ echo A > A
307 $ hg ci -Am A
307 $ hg ci -Am A
308 adding A
308 adding A
309 $ echo B > B
309 $ echo B > B
310 $ hg ci -Am B
310 $ hg ci -Am B
311 adding B
311 adding B
312
312
313 $ hg up -q 0
313 $ hg up -q 0
314
314
315 $ echo C > C
315 $ echo C > C
316 $ hg ci -Am C
316 $ hg ci -Am C
317 adding C
317 adding C
318 created new head
318 created new head
319
319
320 $ hg merge
320 $ hg merge
321 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
321 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
322 (branch merge, don't forget to commit)
322 (branch merge, don't forget to commit)
323
323
324 $ echo D > D
324 $ echo D > D
325 $ hg ci -Am D
325 $ hg ci -Am D
326 adding D
326 adding D
327
327
328 $ hg up -q 1
328 $ hg up -q 1
329
329
330 $ echo E > E
330 $ echo E > E
331 $ hg ci -Am E
331 $ hg ci -Am E
332 adding E
332 adding E
333 created new head
333 created new head
334 $ echo F > E
334 $ echo F > E
335 $ hg ci -m 'F'
335 $ hg ci -m 'F'
336
336
337 $ echo G > G
337 $ echo G > G
338 $ hg ci -Am G
338 $ hg ci -Am G
339 adding G
339 adding G
340
340
341 $ hg merge
341 $ hg merge
342 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
342 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
343 (branch merge, don't forget to commit)
343 (branch merge, don't forget to commit)
344
344
345 $ hg ci -m H
345 $ hg ci -m H
346
346
347 $ hg up -q 0
347 $ hg up -q 0
348
348
349 $ echo I > I
349 $ echo I > I
350 $ hg ci -Am I
350 $ hg ci -Am I
351 adding I
351 adding I
352 created new head
352 created new head
353
353
354 $ hg tglog
354 $ hg tglog
355 @ 8: 'I'
355 @ 8: 'I'
356 |
356 |
357 | o 7: 'H'
357 | o 7: 'H'
358 | |\
358 | |\
359 | | o 6: 'G'
359 | | o 6: 'G'
360 | | |
360 | | |
361 | | o 5: 'F'
361 | | o 5: 'F'
362 | | |
362 | | |
363 | | o 4: 'E'
363 | | o 4: 'E'
364 | | |
364 | | |
365 | o | 3: 'D'
365 | o | 3: 'D'
366 | |\|
366 | |\|
367 | o | 2: 'C'
367 | o | 2: 'C'
368 |/ /
368 |/ /
369 | o 1: 'B'
369 | o 1: 'B'
370 |/
370 |/
371 o 0: 'A'
371 o 0: 'A'
372
372
373 $ cd ..
373 $ cd ..
374
374
375
375
376 Rebase and collapse - E onto I:
376 Rebase and collapse - E onto I:
377
377
378 $ hg clone -q -u . c c1
378 $ hg clone -q -u . c c1
379 $ cd c1
379 $ cd c1
380
380
381 $ hg rebase -s 4 --collapse # root (4) is not a merge
381 $ hg rebase -s 4 --collapse # root (4) is not a merge
382 merging E
382 merging E
383 saved backup bundle to $TESTTMP/c1/.hg/strip-backup/*-backup.hg (glob)
383 saved backup bundle to $TESTTMP/c1/.hg/strip-backup/*-backup.hg (glob)
384
384
385 $ hg tglog
385 $ hg tglog
386 @ 5: 'Collapsed revision
386 @ 5: 'Collapsed revision
387 |\ * E
387 |\ * E
388 | | * F
388 | | * F
389 | | * G
389 | | * G
390 | | * H'
390 | | * H'
391 | o 4: 'I'
391 | o 4: 'I'
392 | |
392 | |
393 o | 3: 'D'
393 o | 3: 'D'
394 |\ \
394 |\ \
395 | o | 2: 'C'
395 | o | 2: 'C'
396 | |/
396 | |/
397 o / 1: 'B'
397 o / 1: 'B'
398 |/
398 |/
399 o 0: 'A'
399 o 0: 'A'
400
400
401 $ hg manifest
401 $ hg manifest
402 A
402 A
403 C
403 C
404 D
404 D
405 E
405 E
406 G
406 G
407 I
407 I
408
408
409 $ cat E
409 $ cat E
410 F
410 F
411
411
412 $ cd ..
412 $ cd ..
413
413
414
414
415 Create repo d:
415 Create repo d:
416
416
417 $ hg init d
417 $ hg init d
418 $ cd d
418 $ cd d
419
419
420 $ echo A > A
420 $ echo A > A
421 $ hg ci -Am A
421 $ hg ci -Am A
422 adding A
422 adding A
423 $ echo B > B
423 $ echo B > B
424 $ hg ci -Am B
424 $ hg ci -Am B
425 adding B
425 adding B
426 $ echo C > C
426 $ echo C > C
427 $ hg ci -Am C
427 $ hg ci -Am C
428 adding C
428 adding C
429
429
430 $ hg up -q 1
430 $ hg up -q 1
431
431
432 $ echo D > D
432 $ echo D > D
433 $ hg ci -Am D
433 $ hg ci -Am D
434 adding D
434 adding D
435 created new head
435 created new head
436 $ hg merge
436 $ hg merge
437 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
437 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
438 (branch merge, don't forget to commit)
438 (branch merge, don't forget to commit)
439
439
440 $ hg ci -m E
440 $ hg ci -m E
441
441
442 $ hg up -q 0
442 $ hg up -q 0
443
443
444 $ echo F > F
444 $ echo F > F
445 $ hg ci -Am F
445 $ hg ci -Am F
446 adding F
446 adding F
447 created new head
447 created new head
448
448
449 $ hg tglog
449 $ hg tglog
450 @ 5: 'F'
450 @ 5: 'F'
451 |
451 |
452 | o 4: 'E'
452 | o 4: 'E'
453 | |\
453 | |\
454 | | o 3: 'D'
454 | | o 3: 'D'
455 | | |
455 | | |
456 | o | 2: 'C'
456 | o | 2: 'C'
457 | |/
457 | |/
458 | o 1: 'B'
458 | o 1: 'B'
459 |/
459 |/
460 o 0: 'A'
460 o 0: 'A'
461
461
462 $ cd ..
462 $ cd ..
463
463
464
464
465 Rebase and collapse - B onto F:
465 Rebase and collapse - B onto F:
466
466
467 $ hg clone -q -u . d d1
467 $ hg clone -q -u . d d1
468 $ cd d1
468 $ cd d1
469
469
470 $ hg rebase -s 1 --collapse
470 $ hg rebase -s 1 --collapse
471 saved backup bundle to $TESTTMP/d1/.hg/strip-backup/*-backup.hg (glob)
471 saved backup bundle to $TESTTMP/d1/.hg/strip-backup/*-backup.hg (glob)
472
472
473 $ hg tglog
473 $ hg tglog
474 @ 2: 'Collapsed revision
474 @ 2: 'Collapsed revision
475 | * B
475 | * B
476 | * C
476 | * C
477 | * D
477 | * D
478 | * E'
478 | * E'
479 o 1: 'F'
479 o 1: 'F'
480 |
480 |
481 o 0: 'A'
481 o 0: 'A'
482
482
483 $ hg manifest
483 $ hg manifest
484 A
484 A
485 B
485 B
486 C
486 C
487 D
487 D
488 F
488 F
489
489
490 Interactions between collapse and keepbranches
490 Interactions between collapse and keepbranches
491 $ cd ..
491 $ cd ..
492 $ hg init e
492 $ hg init e
493 $ cd e
493 $ cd e
494 $ echo 'a' > a
494 $ echo 'a' > a
495 $ hg ci -Am 'A'
495 $ hg ci -Am 'A'
496 adding a
496 adding a
497
497
498 $ hg branch '1'
498 $ hg branch '1'
499 marked working directory as branch 1
499 marked working directory as branch 1
500 (branches are permanent and global, did you want a bookmark?)
500 (branches are permanent and global, did you want a bookmark?)
501 $ echo 'b' > b
501 $ echo 'b' > b
502 $ hg ci -Am 'B'
502 $ hg ci -Am 'B'
503 adding b
503 adding b
504
504
505 $ hg branch '2'
505 $ hg branch '2'
506 marked working directory as branch 2
506 marked working directory as branch 2
507 (branches are permanent and global, did you want a bookmark?)
507 (branches are permanent and global, did you want a bookmark?)
508 $ echo 'c' > c
508 $ echo 'c' > c
509 $ hg ci -Am 'C'
509 $ hg ci -Am 'C'
510 adding c
510 adding c
511
511
512 $ hg up -q 0
512 $ hg up -q 0
513 $ echo 'd' > d
513 $ echo 'd' > d
514 $ hg ci -Am 'D'
514 $ hg ci -Am 'D'
515 adding d
515 adding d
516
516
517 $ hg tglog
517 $ hg tglog
518 @ 3: 'D'
518 @ 3: 'D'
519 |
519 |
520 | o 2: 'C' 2
520 | o 2: 'C' 2
521 | |
521 | |
522 | o 1: 'B' 1
522 | o 1: 'B' 1
523 |/
523 |/
524 o 0: 'A'
524 o 0: 'A'
525
525
526 $ hg rebase --keepbranches --collapse -s 1 -d 3
526 $ hg rebase --keepbranches --collapse -s 1 -d 3
527 abort: cannot collapse multiple named branches
527 abort: cannot collapse multiple named branches
528 [255]
528 [255]
529
529
530 $ repeatchange() {
530 $ repeatchange() {
531 > hg checkout $1
531 > hg checkout $1
532 > hg cp d z
532 > hg cp d z
533 > echo blah >> z
533 > echo blah >> z
534 > hg commit -Am "$2" --user "$3"
534 > hg commit -Am "$2" --user "$3"
535 > }
535 > }
536 $ repeatchange 3 "E" "user1"
536 $ repeatchange 3 "E" "user1"
537 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
537 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
538 $ repeatchange 3 "E" "user2"
538 $ repeatchange 3 "E" "user2"
539 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
539 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
540 created new head
540 created new head
541 $ hg tglog
541 $ hg tglog
542 @ 5: 'E'
542 @ 5: 'E'
543 |
543 |
544 | o 4: 'E'
544 | o 4: 'E'
545 |/
545 |/
546 o 3: 'D'
546 o 3: 'D'
547 |
547 |
548 | o 2: 'C' 2
548 | o 2: 'C' 2
549 | |
549 | |
550 | o 1: 'B' 1
550 | o 1: 'B' 1
551 |/
551 |/
552 o 0: 'A'
552 o 0: 'A'
553
553
554 $ hg rebase -s 5 -d 4
554 $ hg rebase -s 5 -d 4
555 saved backup bundle to $TESTTMP/e/.hg/strip-backup/*-backup.hg (glob)
555 saved backup bundle to $TESTTMP/e/.hg/strip-backup/*-backup.hg (glob)
556 $ hg tglog
556 $ hg tglog
557 @ 4: 'E'
557 @ 4: 'E'
558 |
558 |
559 o 3: 'D'
559 o 3: 'D'
560 |
560 |
561 | o 2: 'C' 2
561 | o 2: 'C' 2
562 | |
562 | |
563 | o 1: 'B' 1
563 | o 1: 'B' 1
564 |/
564 |/
565 o 0: 'A'
565 o 0: 'A'
566
566
567 $ hg export tip
567 $ hg export tip
568 # HG changeset patch
568 # HG changeset patch
569 # User user1
569 # User user1
570 # Date 0 0
570 # Date 0 0
571 # Node ID f338eb3c2c7cc5b5915676a2376ba7ac558c5213
571 # Node ID f338eb3c2c7cc5b5915676a2376ba7ac558c5213
572 # Parent 41acb9dca9eb976e84cd21fcb756b4afa5a35c09
572 # Parent 41acb9dca9eb976e84cd21fcb756b4afa5a35c09
573 E
573 E
574
574
575 diff -r 41acb9dca9eb -r f338eb3c2c7c z
575 diff -r 41acb9dca9eb -r f338eb3c2c7c z
576 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
576 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
577 +++ b/z Thu Jan 01 00:00:00 1970 +0000
577 +++ b/z Thu Jan 01 00:00:00 1970 +0000
578 @@ -0,0 +1,2 @@
578 @@ -0,0 +1,2 @@
579 +d
579 +d
580 +blah
580 +blah
581
581
582 $ cd ..
582 $ cd ..
583
583
584 Rebase, collapse and copies
584 Rebase, collapse and copies
585
585
586 $ hg init copies
586 $ hg init copies
587 $ cd copies
587 $ cd copies
588 $ hg unbundle "$TESTDIR/bundles/renames.hg"
588 $ hg unbundle "$TESTDIR/bundles/renames.hg"
589 adding changesets
589 adding changesets
590 adding manifests
590 adding manifests
591 adding file changes
591 adding file changes
592 added 4 changesets with 11 changes to 7 files (+1 heads)
592 added 4 changesets with 11 changes to 7 files (+1 heads)
593 (run 'hg heads' to see heads, 'hg merge' to merge)
593 (run 'hg heads' to see heads, 'hg merge' to merge)
594 $ hg up -q tip
594 $ hg up -q tip
595 $ hg tglog
595 $ hg tglog
596 @ 3: 'move2'
596 @ 3: 'move2'
597 |
597 |
598 o 2: 'move1'
598 o 2: 'move1'
599 |
599 |
600 | o 1: 'change'
600 | o 1: 'change'
601 |/
601 |/
602 o 0: 'add'
602 o 0: 'add'
603
603
604 $ hg rebase --collapse -d 1
604 $ hg rebase --collapse -d 1
605 merging a and d to d
605 merging a and d to d
606 merging b and e to e
606 merging b and e to e
607 merging c and f to f
607 merging c and f to f
608 merging e and g to g
608 merging e and g to g
609 merging f and c to c
609 merging f and c to c
610 saved backup bundle to $TESTTMP/copies/.hg/strip-backup/*-backup.hg (glob)
610 saved backup bundle to $TESTTMP/copies/.hg/strip-backup/*-backup.hg (glob)
611 $ hg st
611 $ hg st
612 $ hg st --copies --change .
612 $ hg st --copies --change .
613 A d
613 A d
614 a
614 a
615 A g
615 A g
616 b
616 b
617 R b
617 R b
618 $ cat c
618 $ cat c
619 c
619 c
620 c
620 c
621 $ cat d
621 $ cat d
622 a
622 a
623 a
623 a
624 $ cat g
624 $ cat g
625 b
625 b
626 b
626 b
627 $ hg log -r . --template "{file_copies}\n"
627 $ hg log -r . --template "{file_copies}\n"
628 d (a)g (b)
628 d (a)g (b)
629
629
630 Test collapsing a middle revision in-place
630 Test collapsing a middle revision in-place
631
631
632 $ hg tglog
632 $ hg tglog
633 @ 2: 'Collapsed revision
633 @ 2: 'Collapsed revision
634 | * move1
634 | * move1
635 | * move2'
635 | * move2'
636 o 1: 'change'
636 o 1: 'change'
637 |
637 |
638 o 0: 'add'
638 o 0: 'add'
639
639
640 $ hg rebase --collapse -r 1 -d 0
640 $ hg rebase --collapse -r 1 -d 0
641 abort: can't remove original changesets with unrebased descendants
641 abort: can't remove original changesets with unrebased descendants
642 (use --keep to keep original changesets)
642 (use --keep to keep original changesets)
643 [255]
643 [255]
644
644
645 Test collapsing in place
645 Test collapsing in place
646
646
647 $ hg rebase --collapse -b . -d 0
647 $ hg rebase --collapse -b . -d 0
648 saved backup bundle to $TESTTMP/copies/.hg/strip-backup/*-backup.hg (glob)
648 saved backup bundle to $TESTTMP/copies/.hg/strip-backup/*-backup.hg (glob)
649 $ hg st --change . --copies
649 $ hg st --change . --copies
650 M a
650 M a
651 M c
651 M c
652 A d
652 A d
653 a
653 a
654 A g
654 A g
655 b
655 b
656 R b
656 R b
657 $ cat a
657 $ cat a
658 a
658 a
659 a
659 a
660 $ cat c
660 $ cat c
661 c
661 c
662 c
662 c
663 $ cat d
663 $ cat d
664 a
664 a
665 a
665 a
666 $ cat g
666 $ cat g
667 b
667 b
668 b
668 b
669 $ cd ..
669 $ cd ..
670
670
671
671
672 Test stripping a revision with another child
672 Test stripping a revision with another child
673
673
674 $ hg init f
674 $ hg init f
675 $ cd f
675 $ cd f
676
676
677 $ echo A > A
677 $ echo A > A
678 $ hg ci -Am A
678 $ hg ci -Am A
679 adding A
679 adding A
680 $ echo B > B
680 $ echo B > B
681 $ hg ci -Am B
681 $ hg ci -Am B
682 adding B
682 adding B
683
683
684 $ hg up -q 0
684 $ hg up -q 0
685
685
686 $ echo C > C
686 $ echo C > C
687 $ hg ci -Am C
687 $ hg ci -Am C
688 adding C
688 adding C
689 created new head
689 created new head
690
690
691 $ hg tglog
691 $ hg tglog
692 @ 2: 'C'
692 @ 2: 'C'
693 |
693 |
694 | o 1: 'B'
694 | o 1: 'B'
695 |/
695 |/
696 o 0: 'A'
696 o 0: 'A'
697
697
698
698
699
699
700 $ hg heads --template="{rev}:{node} {branch}: {desc}\n"
700 $ hg heads --template="{rev}:{node} {branch}: {desc}\n"
701 2:c5cefa58fd557f84b72b87f970135984337acbc5 default: C
701 2:c5cefa58fd557f84b72b87f970135984337acbc5 default: C
702 1:27547f69f25460a52fff66ad004e58da7ad3fb56 default: B
702 1:27547f69f25460a52fff66ad004e58da7ad3fb56 default: B
703
703
704 $ hg strip 2
704 $ hg strip 2
705 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
705 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
706 saved backup bundle to $TESTTMP/f/.hg/strip-backup/*-backup.hg (glob)
706 saved backup bundle to $TESTTMP/f/.hg/strip-backup/*-backup.hg (glob)
707
707
708 $ hg tglog
708 $ hg tglog
709 o 1: 'B'
709 o 1: 'B'
710 |
710 |
711 @ 0: 'A'
711 @ 0: 'A'
712
712
713
713
714
714
715 $ hg heads --template="{rev}:{node} {branch}: {desc}\n"
715 $ hg heads --template="{rev}:{node} {branch}: {desc}\n"
716 1:27547f69f25460a52fff66ad004e58da7ad3fb56 default: B
716 1:27547f69f25460a52fff66ad004e58da7ad3fb56 default: B
717
717
718 $ cd ..
718 $ cd ..
719
719
720
720
721
721
722
722
General Comments 0
You need to be logged in to leave comments. Login now