##// END OF EJS Templates
linkrev: take a revision number rather than a hash
Matt Mackall -
r7361:9fe97eea default
parent child Browse files
Show More
@@ -1,25 +1,25 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # Dump revlogs as raw data stream
2 # Dump revlogs as raw data stream
3 # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
3 # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
4
4
5 import sys
5 import sys
6 from mercurial import revlog, node, util
6 from mercurial import revlog, node, util
7
7
8 for fp in (sys.stdin, sys.stdout, sys.stderr):
8 for fp in (sys.stdin, sys.stdout, sys.stderr):
9 util.set_binary(fp)
9 util.set_binary(fp)
10
10
11 for f in sys.argv[1:]:
11 for f in sys.argv[1:]:
12 binopen = lambda fn: open(fn, 'rb')
12 binopen = lambda fn: open(fn, 'rb')
13 r = revlog.revlog(binopen, f)
13 r = revlog.revlog(binopen, f)
14 print "file:", f
14 print "file:", f
15 for i in r:
15 for i in r:
16 n = r.node(i)
16 n = r.node(i)
17 p = r.parents(n)
17 p = r.parents(n)
18 d = r.revision(n)
18 d = r.revision(n)
19 print "node:", node.hex(n)
19 print "node:", node.hex(n)
20 print "linkrev:", r.linkrev(n)
20 print "linkrev:", r.linkrev(i)
21 print "parents:", node.hex(p[0]), node.hex(p[1])
21 print "parents:", node.hex(p[0]), node.hex(p[1])
22 print "length:", len(d)
22 print "length:", len(d)
23 print "-start-"
23 print "-start-"
24 print d
24 print d
25 print "-end-"
25 print "-end-"
@@ -1,1193 +1,1193 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, bisect, stat
10 import os, sys, bisect, stat
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12 import match as _match
12 import match as _match
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20
20
21 def findpossible(cmd, table, strict=False):
21 def findpossible(cmd, table, strict=False):
22 """
22 """
23 Return cmd -> (aliases, command table entry)
23 Return cmd -> (aliases, command table entry)
24 for each matching command.
24 for each matching command.
25 Return debug commands (or their aliases) only if no normal command matches.
25 Return debug commands (or their aliases) only if no normal command matches.
26 """
26 """
27 choice = {}
27 choice = {}
28 debugchoice = {}
28 debugchoice = {}
29 for e in table.keys():
29 for e in table.keys():
30 aliases = e.lstrip("^").split("|")
30 aliases = e.lstrip("^").split("|")
31 found = None
31 found = None
32 if cmd in aliases:
32 if cmd in aliases:
33 found = cmd
33 found = cmd
34 elif not strict:
34 elif not strict:
35 for a in aliases:
35 for a in aliases:
36 if a.startswith(cmd):
36 if a.startswith(cmd):
37 found = a
37 found = a
38 break
38 break
39 if found is not None:
39 if found is not None:
40 if aliases[0].startswith("debug") or found.startswith("debug"):
40 if aliases[0].startswith("debug") or found.startswith("debug"):
41 debugchoice[found] = (aliases, table[e])
41 debugchoice[found] = (aliases, table[e])
42 else:
42 else:
43 choice[found] = (aliases, table[e])
43 choice[found] = (aliases, table[e])
44
44
45 if not choice and debugchoice:
45 if not choice and debugchoice:
46 choice = debugchoice
46 choice = debugchoice
47
47
48 return choice
48 return choice
49
49
50 def findcmd(cmd, table, strict=True):
50 def findcmd(cmd, table, strict=True):
51 """Return (aliases, command table entry) for command string."""
51 """Return (aliases, command table entry) for command string."""
52 choice = findpossible(cmd, table, strict)
52 choice = findpossible(cmd, table, strict)
53
53
54 if cmd in choice:
54 if cmd in choice:
55 return choice[cmd]
55 return choice[cmd]
56
56
57 if len(choice) > 1:
57 if len(choice) > 1:
58 clist = choice.keys()
58 clist = choice.keys()
59 clist.sort()
59 clist.sort()
60 raise AmbiguousCommand(cmd, clist)
60 raise AmbiguousCommand(cmd, clist)
61
61
62 if choice:
62 if choice:
63 return choice.values()[0]
63 return choice.values()[0]
64
64
65 raise UnknownCommand(cmd)
65 raise UnknownCommand(cmd)
66
66
67 def bail_if_changed(repo):
67 def bail_if_changed(repo):
68 if repo.dirstate.parents()[1] != nullid:
68 if repo.dirstate.parents()[1] != nullid:
69 raise util.Abort(_('outstanding uncommitted merge'))
69 raise util.Abort(_('outstanding uncommitted merge'))
70 modified, added, removed, deleted = repo.status()[:4]
70 modified, added, removed, deleted = repo.status()[:4]
71 if modified or added or removed or deleted:
71 if modified or added or removed or deleted:
72 raise util.Abort(_("outstanding uncommitted changes"))
72 raise util.Abort(_("outstanding uncommitted changes"))
73
73
74 def logmessage(opts):
74 def logmessage(opts):
75 """ get the log message according to -m and -l option """
75 """ get the log message according to -m and -l option """
76 message = opts['message']
76 message = opts['message']
77 logfile = opts['logfile']
77 logfile = opts['logfile']
78
78
79 if message and logfile:
79 if message and logfile:
80 raise util.Abort(_('options --message and --logfile are mutually '
80 raise util.Abort(_('options --message and --logfile are mutually '
81 'exclusive'))
81 'exclusive'))
82 if not message and logfile:
82 if not message and logfile:
83 try:
83 try:
84 if logfile == '-':
84 if logfile == '-':
85 message = sys.stdin.read()
85 message = sys.stdin.read()
86 else:
86 else:
87 message = open(logfile).read()
87 message = open(logfile).read()
88 except IOError, inst:
88 except IOError, inst:
89 raise util.Abort(_("can't read commit message '%s': %s") %
89 raise util.Abort(_("can't read commit message '%s': %s") %
90 (logfile, inst.strerror))
90 (logfile, inst.strerror))
91 return message
91 return message
92
92
93 def loglimit(opts):
93 def loglimit(opts):
94 """get the log limit according to option -l/--limit"""
94 """get the log limit according to option -l/--limit"""
95 limit = opts.get('limit')
95 limit = opts.get('limit')
96 if limit:
96 if limit:
97 try:
97 try:
98 limit = int(limit)
98 limit = int(limit)
99 except ValueError:
99 except ValueError:
100 raise util.Abort(_('limit must be a positive integer'))
100 raise util.Abort(_('limit must be a positive integer'))
101 if limit <= 0: raise util.Abort(_('limit must be positive'))
101 if limit <= 0: raise util.Abort(_('limit must be positive'))
102 else:
102 else:
103 limit = sys.maxint
103 limit = sys.maxint
104 return limit
104 return limit
105
105
106 def setremoteconfig(ui, opts):
106 def setremoteconfig(ui, opts):
107 "copy remote options to ui tree"
107 "copy remote options to ui tree"
108 if opts.get('ssh'):
108 if opts.get('ssh'):
109 ui.setconfig("ui", "ssh", opts['ssh'])
109 ui.setconfig("ui", "ssh", opts['ssh'])
110 if opts.get('remotecmd'):
110 if opts.get('remotecmd'):
111 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
111 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
112
112
113 def revpair(repo, revs):
113 def revpair(repo, revs):
114 '''return pair of nodes, given list of revisions. second item can
114 '''return pair of nodes, given list of revisions. second item can
115 be None, meaning use working dir.'''
115 be None, meaning use working dir.'''
116
116
117 def revfix(repo, val, defval):
117 def revfix(repo, val, defval):
118 if not val and val != 0 and defval is not None:
118 if not val and val != 0 and defval is not None:
119 val = defval
119 val = defval
120 return repo.lookup(val)
120 return repo.lookup(val)
121
121
122 if not revs:
122 if not revs:
123 return repo.dirstate.parents()[0], None
123 return repo.dirstate.parents()[0], None
124 end = None
124 end = None
125 if len(revs) == 1:
125 if len(revs) == 1:
126 if revrangesep in revs[0]:
126 if revrangesep in revs[0]:
127 start, end = revs[0].split(revrangesep, 1)
127 start, end = revs[0].split(revrangesep, 1)
128 start = revfix(repo, start, 0)
128 start = revfix(repo, start, 0)
129 end = revfix(repo, end, len(repo) - 1)
129 end = revfix(repo, end, len(repo) - 1)
130 else:
130 else:
131 start = revfix(repo, revs[0], None)
131 start = revfix(repo, revs[0], None)
132 elif len(revs) == 2:
132 elif len(revs) == 2:
133 if revrangesep in revs[0] or revrangesep in revs[1]:
133 if revrangesep in revs[0] or revrangesep in revs[1]:
134 raise util.Abort(_('too many revisions specified'))
134 raise util.Abort(_('too many revisions specified'))
135 start = revfix(repo, revs[0], None)
135 start = revfix(repo, revs[0], None)
136 end = revfix(repo, revs[1], None)
136 end = revfix(repo, revs[1], None)
137 else:
137 else:
138 raise util.Abort(_('too many revisions specified'))
138 raise util.Abort(_('too many revisions specified'))
139 return start, end
139 return start, end
140
140
141 def revrange(repo, revs):
141 def revrange(repo, revs):
142 """Yield revision as strings from a list of revision specifications."""
142 """Yield revision as strings from a list of revision specifications."""
143
143
144 def revfix(repo, val, defval):
144 def revfix(repo, val, defval):
145 if not val and val != 0 and defval is not None:
145 if not val and val != 0 and defval is not None:
146 return defval
146 return defval
147 return repo.changelog.rev(repo.lookup(val))
147 return repo.changelog.rev(repo.lookup(val))
148
148
149 seen, l = {}, []
149 seen, l = {}, []
150 for spec in revs:
150 for spec in revs:
151 if revrangesep in spec:
151 if revrangesep in spec:
152 start, end = spec.split(revrangesep, 1)
152 start, end = spec.split(revrangesep, 1)
153 start = revfix(repo, start, 0)
153 start = revfix(repo, start, 0)
154 end = revfix(repo, end, len(repo) - 1)
154 end = revfix(repo, end, len(repo) - 1)
155 step = start > end and -1 or 1
155 step = start > end and -1 or 1
156 for rev in xrange(start, end+step, step):
156 for rev in xrange(start, end+step, step):
157 if rev in seen:
157 if rev in seen:
158 continue
158 continue
159 seen[rev] = 1
159 seen[rev] = 1
160 l.append(rev)
160 l.append(rev)
161 else:
161 else:
162 rev = revfix(repo, spec, None)
162 rev = revfix(repo, spec, None)
163 if rev in seen:
163 if rev in seen:
164 continue
164 continue
165 seen[rev] = 1
165 seen[rev] = 1
166 l.append(rev)
166 l.append(rev)
167
167
168 return l
168 return l
169
169
170 def make_filename(repo, pat, node,
170 def make_filename(repo, pat, node,
171 total=None, seqno=None, revwidth=None, pathname=None):
171 total=None, seqno=None, revwidth=None, pathname=None):
172 node_expander = {
172 node_expander = {
173 'H': lambda: hex(node),
173 'H': lambda: hex(node),
174 'R': lambda: str(repo.changelog.rev(node)),
174 'R': lambda: str(repo.changelog.rev(node)),
175 'h': lambda: short(node),
175 'h': lambda: short(node),
176 }
176 }
177 expander = {
177 expander = {
178 '%': lambda: '%',
178 '%': lambda: '%',
179 'b': lambda: os.path.basename(repo.root),
179 'b': lambda: os.path.basename(repo.root),
180 }
180 }
181
181
182 try:
182 try:
183 if node:
183 if node:
184 expander.update(node_expander)
184 expander.update(node_expander)
185 if node:
185 if node:
186 expander['r'] = (lambda:
186 expander['r'] = (lambda:
187 str(repo.changelog.rev(node)).zfill(revwidth or 0))
187 str(repo.changelog.rev(node)).zfill(revwidth or 0))
188 if total is not None:
188 if total is not None:
189 expander['N'] = lambda: str(total)
189 expander['N'] = lambda: str(total)
190 if seqno is not None:
190 if seqno is not None:
191 expander['n'] = lambda: str(seqno)
191 expander['n'] = lambda: str(seqno)
192 if total is not None and seqno is not None:
192 if total is not None and seqno is not None:
193 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
193 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
194 if pathname is not None:
194 if pathname is not None:
195 expander['s'] = lambda: os.path.basename(pathname)
195 expander['s'] = lambda: os.path.basename(pathname)
196 expander['d'] = lambda: os.path.dirname(pathname) or '.'
196 expander['d'] = lambda: os.path.dirname(pathname) or '.'
197 expander['p'] = lambda: pathname
197 expander['p'] = lambda: pathname
198
198
199 newname = []
199 newname = []
200 patlen = len(pat)
200 patlen = len(pat)
201 i = 0
201 i = 0
202 while i < patlen:
202 while i < patlen:
203 c = pat[i]
203 c = pat[i]
204 if c == '%':
204 if c == '%':
205 i += 1
205 i += 1
206 c = pat[i]
206 c = pat[i]
207 c = expander[c]()
207 c = expander[c]()
208 newname.append(c)
208 newname.append(c)
209 i += 1
209 i += 1
210 return ''.join(newname)
210 return ''.join(newname)
211 except KeyError, inst:
211 except KeyError, inst:
212 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
212 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
213 inst.args[0])
213 inst.args[0])
214
214
215 def make_file(repo, pat, node=None,
215 def make_file(repo, pat, node=None,
216 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
216 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
217
217
218 writable = 'w' in mode or 'a' in mode
218 writable = 'w' in mode or 'a' in mode
219
219
220 if not pat or pat == '-':
220 if not pat or pat == '-':
221 return writable and sys.stdout or sys.stdin
221 return writable and sys.stdout or sys.stdin
222 if hasattr(pat, 'write') and writable:
222 if hasattr(pat, 'write') and writable:
223 return pat
223 return pat
224 if hasattr(pat, 'read') and 'r' in mode:
224 if hasattr(pat, 'read') and 'r' in mode:
225 return pat
225 return pat
226 return open(make_filename(repo, pat, node, total, seqno, revwidth,
226 return open(make_filename(repo, pat, node, total, seqno, revwidth,
227 pathname),
227 pathname),
228 mode)
228 mode)
229
229
230 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
230 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
231 if not globbed and default == 'relpath':
231 if not globbed and default == 'relpath':
232 pats = util.expand_glob(pats or [])
232 pats = util.expand_glob(pats or [])
233 m = _match.match(repo.root, repo.getcwd(), pats,
233 m = _match.match(repo.root, repo.getcwd(), pats,
234 opts.get('include'), opts.get('exclude'), default)
234 opts.get('include'), opts.get('exclude'), default)
235 def badfn(f, msg):
235 def badfn(f, msg):
236 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
236 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
237 return False
237 return False
238 m.bad = badfn
238 m.bad = badfn
239 return m
239 return m
240
240
241 def matchall(repo):
241 def matchall(repo):
242 return _match.always(repo.root, repo.getcwd())
242 return _match.always(repo.root, repo.getcwd())
243
243
244 def matchfiles(repo, files):
244 def matchfiles(repo, files):
245 return _match.exact(repo.root, repo.getcwd(), files)
245 return _match.exact(repo.root, repo.getcwd(), files)
246
246
247 def findrenames(repo, added=None, removed=None, threshold=0.5):
247 def findrenames(repo, added=None, removed=None, threshold=0.5):
248 '''find renamed files -- yields (before, after, score) tuples'''
248 '''find renamed files -- yields (before, after, score) tuples'''
249 if added is None or removed is None:
249 if added is None or removed is None:
250 added, removed = repo.status()[1:3]
250 added, removed = repo.status()[1:3]
251 ctx = repo['.']
251 ctx = repo['.']
252 for a in added:
252 for a in added:
253 aa = repo.wread(a)
253 aa = repo.wread(a)
254 bestname, bestscore = None, threshold
254 bestname, bestscore = None, threshold
255 for r in removed:
255 for r in removed:
256 rr = ctx.filectx(r).data()
256 rr = ctx.filectx(r).data()
257
257
258 # bdiff.blocks() returns blocks of matching lines
258 # bdiff.blocks() returns blocks of matching lines
259 # count the number of bytes in each
259 # count the number of bytes in each
260 equal = 0
260 equal = 0
261 alines = mdiff.splitnewlines(aa)
261 alines = mdiff.splitnewlines(aa)
262 matches = bdiff.blocks(aa, rr)
262 matches = bdiff.blocks(aa, rr)
263 for x1,x2,y1,y2 in matches:
263 for x1,x2,y1,y2 in matches:
264 for line in alines[x1:x2]:
264 for line in alines[x1:x2]:
265 equal += len(line)
265 equal += len(line)
266
266
267 lengths = len(aa) + len(rr)
267 lengths = len(aa) + len(rr)
268 if lengths:
268 if lengths:
269 myscore = equal*2.0 / lengths
269 myscore = equal*2.0 / lengths
270 if myscore >= bestscore:
270 if myscore >= bestscore:
271 bestname, bestscore = r, myscore
271 bestname, bestscore = r, myscore
272 if bestname:
272 if bestname:
273 yield bestname, a, bestscore
273 yield bestname, a, bestscore
274
274
275 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
275 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
276 if dry_run is None:
276 if dry_run is None:
277 dry_run = opts.get('dry_run')
277 dry_run = opts.get('dry_run')
278 if similarity is None:
278 if similarity is None:
279 similarity = float(opts.get('similarity') or 0)
279 similarity = float(opts.get('similarity') or 0)
280 add, remove = [], []
280 add, remove = [], []
281 mapping = {}
281 mapping = {}
282 audit_path = util.path_auditor(repo.root)
282 audit_path = util.path_auditor(repo.root)
283 m = match(repo, pats, opts)
283 m = match(repo, pats, opts)
284 for abs in repo.walk(m):
284 for abs in repo.walk(m):
285 target = repo.wjoin(abs)
285 target = repo.wjoin(abs)
286 good = True
286 good = True
287 try:
287 try:
288 audit_path(abs)
288 audit_path(abs)
289 except:
289 except:
290 good = False
290 good = False
291 rel = m.rel(abs)
291 rel = m.rel(abs)
292 exact = m.exact(abs)
292 exact = m.exact(abs)
293 if good and abs not in repo.dirstate:
293 if good and abs not in repo.dirstate:
294 add.append(abs)
294 add.append(abs)
295 mapping[abs] = rel, m.exact(abs)
295 mapping[abs] = rel, m.exact(abs)
296 if repo.ui.verbose or not exact:
296 if repo.ui.verbose or not exact:
297 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
297 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
298 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
298 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
299 or (os.path.isdir(target) and not os.path.islink(target))):
299 or (os.path.isdir(target) and not os.path.islink(target))):
300 remove.append(abs)
300 remove.append(abs)
301 mapping[abs] = rel, exact
301 mapping[abs] = rel, exact
302 if repo.ui.verbose or not exact:
302 if repo.ui.verbose or not exact:
303 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
303 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
304 if not dry_run:
304 if not dry_run:
305 repo.remove(remove)
305 repo.remove(remove)
306 repo.add(add)
306 repo.add(add)
307 if similarity > 0:
307 if similarity > 0:
308 for old, new, score in findrenames(repo, add, remove, similarity):
308 for old, new, score in findrenames(repo, add, remove, similarity):
309 oldrel, oldexact = mapping[old]
309 oldrel, oldexact = mapping[old]
310 newrel, newexact = mapping[new]
310 newrel, newexact = mapping[new]
311 if repo.ui.verbose or not oldexact or not newexact:
311 if repo.ui.verbose or not oldexact or not newexact:
312 repo.ui.status(_('recording removal of %s as rename to %s '
312 repo.ui.status(_('recording removal of %s as rename to %s '
313 '(%d%% similar)\n') %
313 '(%d%% similar)\n') %
314 (oldrel, newrel, score * 100))
314 (oldrel, newrel, score * 100))
315 if not dry_run:
315 if not dry_run:
316 repo.copy(old, new)
316 repo.copy(old, new)
317
317
318 def copy(ui, repo, pats, opts, rename=False):
318 def copy(ui, repo, pats, opts, rename=False):
319 # called with the repo lock held
319 # called with the repo lock held
320 #
320 #
321 # hgsep => pathname that uses "/" to separate directories
321 # hgsep => pathname that uses "/" to separate directories
322 # ossep => pathname that uses os.sep to separate directories
322 # ossep => pathname that uses os.sep to separate directories
323 cwd = repo.getcwd()
323 cwd = repo.getcwd()
324 targets = {}
324 targets = {}
325 after = opts.get("after")
325 after = opts.get("after")
326 dryrun = opts.get("dry_run")
326 dryrun = opts.get("dry_run")
327
327
328 def walkpat(pat):
328 def walkpat(pat):
329 srcs = []
329 srcs = []
330 m = match(repo, [pat], opts, globbed=True)
330 m = match(repo, [pat], opts, globbed=True)
331 for abs in repo.walk(m):
331 for abs in repo.walk(m):
332 state = repo.dirstate[abs]
332 state = repo.dirstate[abs]
333 rel = m.rel(abs)
333 rel = m.rel(abs)
334 exact = m.exact(abs)
334 exact = m.exact(abs)
335 if state in '?r':
335 if state in '?r':
336 if exact and state == '?':
336 if exact and state == '?':
337 ui.warn(_('%s: not copying - file is not managed\n') % rel)
337 ui.warn(_('%s: not copying - file is not managed\n') % rel)
338 if exact and state == 'r':
338 if exact and state == 'r':
339 ui.warn(_('%s: not copying - file has been marked for'
339 ui.warn(_('%s: not copying - file has been marked for'
340 ' remove\n') % rel)
340 ' remove\n') % rel)
341 continue
341 continue
342 # abs: hgsep
342 # abs: hgsep
343 # rel: ossep
343 # rel: ossep
344 srcs.append((abs, rel, exact))
344 srcs.append((abs, rel, exact))
345 return srcs
345 return srcs
346
346
347 # abssrc: hgsep
347 # abssrc: hgsep
348 # relsrc: ossep
348 # relsrc: ossep
349 # otarget: ossep
349 # otarget: ossep
350 def copyfile(abssrc, relsrc, otarget, exact):
350 def copyfile(abssrc, relsrc, otarget, exact):
351 abstarget = util.canonpath(repo.root, cwd, otarget)
351 abstarget = util.canonpath(repo.root, cwd, otarget)
352 reltarget = repo.pathto(abstarget, cwd)
352 reltarget = repo.pathto(abstarget, cwd)
353 target = repo.wjoin(abstarget)
353 target = repo.wjoin(abstarget)
354 src = repo.wjoin(abssrc)
354 src = repo.wjoin(abssrc)
355 state = repo.dirstate[abstarget]
355 state = repo.dirstate[abstarget]
356
356
357 # check for collisions
357 # check for collisions
358 prevsrc = targets.get(abstarget)
358 prevsrc = targets.get(abstarget)
359 if prevsrc is not None:
359 if prevsrc is not None:
360 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
360 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
361 (reltarget, repo.pathto(abssrc, cwd),
361 (reltarget, repo.pathto(abssrc, cwd),
362 repo.pathto(prevsrc, cwd)))
362 repo.pathto(prevsrc, cwd)))
363 return
363 return
364
364
365 # check for overwrites
365 # check for overwrites
366 exists = os.path.exists(target)
366 exists = os.path.exists(target)
367 if (not after and exists or after and state in 'mn'):
367 if (not after and exists or after and state in 'mn'):
368 if not opts['force']:
368 if not opts['force']:
369 ui.warn(_('%s: not overwriting - file exists\n') %
369 ui.warn(_('%s: not overwriting - file exists\n') %
370 reltarget)
370 reltarget)
371 return
371 return
372
372
373 if after:
373 if after:
374 if not exists:
374 if not exists:
375 return
375 return
376 elif not dryrun:
376 elif not dryrun:
377 try:
377 try:
378 if exists:
378 if exists:
379 os.unlink(target)
379 os.unlink(target)
380 targetdir = os.path.dirname(target) or '.'
380 targetdir = os.path.dirname(target) or '.'
381 if not os.path.isdir(targetdir):
381 if not os.path.isdir(targetdir):
382 os.makedirs(targetdir)
382 os.makedirs(targetdir)
383 util.copyfile(src, target)
383 util.copyfile(src, target)
384 except IOError, inst:
384 except IOError, inst:
385 if inst.errno == errno.ENOENT:
385 if inst.errno == errno.ENOENT:
386 ui.warn(_('%s: deleted in working copy\n') % relsrc)
386 ui.warn(_('%s: deleted in working copy\n') % relsrc)
387 else:
387 else:
388 ui.warn(_('%s: cannot copy - %s\n') %
388 ui.warn(_('%s: cannot copy - %s\n') %
389 (relsrc, inst.strerror))
389 (relsrc, inst.strerror))
390 return True # report a failure
390 return True # report a failure
391
391
392 if ui.verbose or not exact:
392 if ui.verbose or not exact:
393 action = rename and "moving" or "copying"
393 action = rename and "moving" or "copying"
394 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
394 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
395
395
396 targets[abstarget] = abssrc
396 targets[abstarget] = abssrc
397
397
398 # fix up dirstate
398 # fix up dirstate
399 origsrc = repo.dirstate.copied(abssrc) or abssrc
399 origsrc = repo.dirstate.copied(abssrc) or abssrc
400 if abstarget == origsrc: # copying back a copy?
400 if abstarget == origsrc: # copying back a copy?
401 if state not in 'mn' and not dryrun:
401 if state not in 'mn' and not dryrun:
402 repo.dirstate.normallookup(abstarget)
402 repo.dirstate.normallookup(abstarget)
403 else:
403 else:
404 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
404 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
405 if not ui.quiet:
405 if not ui.quiet:
406 ui.warn(_("%s has not been committed yet, so no copy "
406 ui.warn(_("%s has not been committed yet, so no copy "
407 "data will be stored for %s.\n")
407 "data will be stored for %s.\n")
408 % (repo.pathto(origsrc, cwd), reltarget))
408 % (repo.pathto(origsrc, cwd), reltarget))
409 if repo.dirstate[abstarget] in '?r' and not dryrun:
409 if repo.dirstate[abstarget] in '?r' and not dryrun:
410 repo.add([abstarget])
410 repo.add([abstarget])
411 elif not dryrun:
411 elif not dryrun:
412 repo.copy(origsrc, abstarget)
412 repo.copy(origsrc, abstarget)
413
413
414 if rename and not dryrun:
414 if rename and not dryrun:
415 repo.remove([abssrc], not after)
415 repo.remove([abssrc], not after)
416
416
417 # pat: ossep
417 # pat: ossep
418 # dest ossep
418 # dest ossep
419 # srcs: list of (hgsep, hgsep, ossep, bool)
419 # srcs: list of (hgsep, hgsep, ossep, bool)
420 # return: function that takes hgsep and returns ossep
420 # return: function that takes hgsep and returns ossep
421 def targetpathfn(pat, dest, srcs):
421 def targetpathfn(pat, dest, srcs):
422 if os.path.isdir(pat):
422 if os.path.isdir(pat):
423 abspfx = util.canonpath(repo.root, cwd, pat)
423 abspfx = util.canonpath(repo.root, cwd, pat)
424 abspfx = util.localpath(abspfx)
424 abspfx = util.localpath(abspfx)
425 if destdirexists:
425 if destdirexists:
426 striplen = len(os.path.split(abspfx)[0])
426 striplen = len(os.path.split(abspfx)[0])
427 else:
427 else:
428 striplen = len(abspfx)
428 striplen = len(abspfx)
429 if striplen:
429 if striplen:
430 striplen += len(os.sep)
430 striplen += len(os.sep)
431 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
431 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
432 elif destdirexists:
432 elif destdirexists:
433 res = lambda p: os.path.join(dest,
433 res = lambda p: os.path.join(dest,
434 os.path.basename(util.localpath(p)))
434 os.path.basename(util.localpath(p)))
435 else:
435 else:
436 res = lambda p: dest
436 res = lambda p: dest
437 return res
437 return res
438
438
439 # pat: ossep
439 # pat: ossep
440 # dest ossep
440 # dest ossep
441 # srcs: list of (hgsep, hgsep, ossep, bool)
441 # srcs: list of (hgsep, hgsep, ossep, bool)
442 # return: function that takes hgsep and returns ossep
442 # return: function that takes hgsep and returns ossep
443 def targetpathafterfn(pat, dest, srcs):
443 def targetpathafterfn(pat, dest, srcs):
444 if util.patkind(pat, None)[0]:
444 if util.patkind(pat, None)[0]:
445 # a mercurial pattern
445 # a mercurial pattern
446 res = lambda p: os.path.join(dest,
446 res = lambda p: os.path.join(dest,
447 os.path.basename(util.localpath(p)))
447 os.path.basename(util.localpath(p)))
448 else:
448 else:
449 abspfx = util.canonpath(repo.root, cwd, pat)
449 abspfx = util.canonpath(repo.root, cwd, pat)
450 if len(abspfx) < len(srcs[0][0]):
450 if len(abspfx) < len(srcs[0][0]):
451 # A directory. Either the target path contains the last
451 # A directory. Either the target path contains the last
452 # component of the source path or it does not.
452 # component of the source path or it does not.
453 def evalpath(striplen):
453 def evalpath(striplen):
454 score = 0
454 score = 0
455 for s in srcs:
455 for s in srcs:
456 t = os.path.join(dest, util.localpath(s[0])[striplen:])
456 t = os.path.join(dest, util.localpath(s[0])[striplen:])
457 if os.path.exists(t):
457 if os.path.exists(t):
458 score += 1
458 score += 1
459 return score
459 return score
460
460
461 abspfx = util.localpath(abspfx)
461 abspfx = util.localpath(abspfx)
462 striplen = len(abspfx)
462 striplen = len(abspfx)
463 if striplen:
463 if striplen:
464 striplen += len(os.sep)
464 striplen += len(os.sep)
465 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
465 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
466 score = evalpath(striplen)
466 score = evalpath(striplen)
467 striplen1 = len(os.path.split(abspfx)[0])
467 striplen1 = len(os.path.split(abspfx)[0])
468 if striplen1:
468 if striplen1:
469 striplen1 += len(os.sep)
469 striplen1 += len(os.sep)
470 if evalpath(striplen1) > score:
470 if evalpath(striplen1) > score:
471 striplen = striplen1
471 striplen = striplen1
472 res = lambda p: os.path.join(dest,
472 res = lambda p: os.path.join(dest,
473 util.localpath(p)[striplen:])
473 util.localpath(p)[striplen:])
474 else:
474 else:
475 # a file
475 # a file
476 if destdirexists:
476 if destdirexists:
477 res = lambda p: os.path.join(dest,
477 res = lambda p: os.path.join(dest,
478 os.path.basename(util.localpath(p)))
478 os.path.basename(util.localpath(p)))
479 else:
479 else:
480 res = lambda p: dest
480 res = lambda p: dest
481 return res
481 return res
482
482
483
483
484 pats = util.expand_glob(pats)
484 pats = util.expand_glob(pats)
485 if not pats:
485 if not pats:
486 raise util.Abort(_('no source or destination specified'))
486 raise util.Abort(_('no source or destination specified'))
487 if len(pats) == 1:
487 if len(pats) == 1:
488 raise util.Abort(_('no destination specified'))
488 raise util.Abort(_('no destination specified'))
489 dest = pats.pop()
489 dest = pats.pop()
490 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
490 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
491 if not destdirexists:
491 if not destdirexists:
492 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
492 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
493 raise util.Abort(_('with multiple sources, destination must be an '
493 raise util.Abort(_('with multiple sources, destination must be an '
494 'existing directory'))
494 'existing directory'))
495 if util.endswithsep(dest):
495 if util.endswithsep(dest):
496 raise util.Abort(_('destination %s is not a directory') % dest)
496 raise util.Abort(_('destination %s is not a directory') % dest)
497
497
498 tfn = targetpathfn
498 tfn = targetpathfn
499 if after:
499 if after:
500 tfn = targetpathafterfn
500 tfn = targetpathafterfn
501 copylist = []
501 copylist = []
502 for pat in pats:
502 for pat in pats:
503 srcs = walkpat(pat)
503 srcs = walkpat(pat)
504 if not srcs:
504 if not srcs:
505 continue
505 continue
506 copylist.append((tfn(pat, dest, srcs), srcs))
506 copylist.append((tfn(pat, dest, srcs), srcs))
507 if not copylist:
507 if not copylist:
508 raise util.Abort(_('no files to copy'))
508 raise util.Abort(_('no files to copy'))
509
509
510 errors = 0
510 errors = 0
511 for targetpath, srcs in copylist:
511 for targetpath, srcs in copylist:
512 for abssrc, relsrc, exact in srcs:
512 for abssrc, relsrc, exact in srcs:
513 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
513 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
514 errors += 1
514 errors += 1
515
515
516 if errors:
516 if errors:
517 ui.warn(_('(consider using --after)\n'))
517 ui.warn(_('(consider using --after)\n'))
518
518
519 return errors
519 return errors
520
520
521 def service(opts, parentfn=None, initfn=None, runfn=None):
521 def service(opts, parentfn=None, initfn=None, runfn=None):
522 '''Run a command as a service.'''
522 '''Run a command as a service.'''
523
523
524 if opts['daemon'] and not opts['daemon_pipefds']:
524 if opts['daemon'] and not opts['daemon_pipefds']:
525 rfd, wfd = os.pipe()
525 rfd, wfd = os.pipe()
526 args = sys.argv[:]
526 args = sys.argv[:]
527 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
527 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
528 # Don't pass --cwd to the child process, because we've already
528 # Don't pass --cwd to the child process, because we've already
529 # changed directory.
529 # changed directory.
530 for i in xrange(1,len(args)):
530 for i in xrange(1,len(args)):
531 if args[i].startswith('--cwd='):
531 if args[i].startswith('--cwd='):
532 del args[i]
532 del args[i]
533 break
533 break
534 elif args[i].startswith('--cwd'):
534 elif args[i].startswith('--cwd'):
535 del args[i:i+2]
535 del args[i:i+2]
536 break
536 break
537 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
537 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
538 args[0], args)
538 args[0], args)
539 os.close(wfd)
539 os.close(wfd)
540 os.read(rfd, 1)
540 os.read(rfd, 1)
541 if parentfn:
541 if parentfn:
542 return parentfn(pid)
542 return parentfn(pid)
543 else:
543 else:
544 os._exit(0)
544 os._exit(0)
545
545
546 if initfn:
546 if initfn:
547 initfn()
547 initfn()
548
548
549 if opts['pid_file']:
549 if opts['pid_file']:
550 fp = open(opts['pid_file'], 'w')
550 fp = open(opts['pid_file'], 'w')
551 fp.write(str(os.getpid()) + '\n')
551 fp.write(str(os.getpid()) + '\n')
552 fp.close()
552 fp.close()
553
553
554 if opts['daemon_pipefds']:
554 if opts['daemon_pipefds']:
555 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
555 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
556 os.close(rfd)
556 os.close(rfd)
557 try:
557 try:
558 os.setsid()
558 os.setsid()
559 except AttributeError:
559 except AttributeError:
560 pass
560 pass
561 os.write(wfd, 'y')
561 os.write(wfd, 'y')
562 os.close(wfd)
562 os.close(wfd)
563 sys.stdout.flush()
563 sys.stdout.flush()
564 sys.stderr.flush()
564 sys.stderr.flush()
565 fd = os.open(util.nulldev, os.O_RDWR)
565 fd = os.open(util.nulldev, os.O_RDWR)
566 if fd != 0: os.dup2(fd, 0)
566 if fd != 0: os.dup2(fd, 0)
567 if fd != 1: os.dup2(fd, 1)
567 if fd != 1: os.dup2(fd, 1)
568 if fd != 2: os.dup2(fd, 2)
568 if fd != 2: os.dup2(fd, 2)
569 if fd not in (0, 1, 2): os.close(fd)
569 if fd not in (0, 1, 2): os.close(fd)
570
570
571 if runfn:
571 if runfn:
572 return runfn()
572 return runfn()
573
573
574 class changeset_printer(object):
574 class changeset_printer(object):
575 '''show changeset information when templating not requested.'''
575 '''show changeset information when templating not requested.'''
576
576
577 def __init__(self, ui, repo, patch, buffered):
577 def __init__(self, ui, repo, patch, buffered):
578 self.ui = ui
578 self.ui = ui
579 self.repo = repo
579 self.repo = repo
580 self.buffered = buffered
580 self.buffered = buffered
581 self.patch = patch
581 self.patch = patch
582 self.header = {}
582 self.header = {}
583 self.hunk = {}
583 self.hunk = {}
584 self.lastheader = None
584 self.lastheader = None
585
585
586 def flush(self, rev):
586 def flush(self, rev):
587 if rev in self.header:
587 if rev in self.header:
588 h = self.header[rev]
588 h = self.header[rev]
589 if h != self.lastheader:
589 if h != self.lastheader:
590 self.lastheader = h
590 self.lastheader = h
591 self.ui.write(h)
591 self.ui.write(h)
592 del self.header[rev]
592 del self.header[rev]
593 if rev in self.hunk:
593 if rev in self.hunk:
594 self.ui.write(self.hunk[rev])
594 self.ui.write(self.hunk[rev])
595 del self.hunk[rev]
595 del self.hunk[rev]
596 return 1
596 return 1
597 return 0
597 return 0
598
598
599 def show(self, rev=0, changenode=None, copies=(), **props):
599 def show(self, rev=0, changenode=None, copies=(), **props):
600 if self.buffered:
600 if self.buffered:
601 self.ui.pushbuffer()
601 self.ui.pushbuffer()
602 self._show(rev, changenode, copies, props)
602 self._show(rev, changenode, copies, props)
603 self.hunk[rev] = self.ui.popbuffer()
603 self.hunk[rev] = self.ui.popbuffer()
604 else:
604 else:
605 self._show(rev, changenode, copies, props)
605 self._show(rev, changenode, copies, props)
606
606
607 def _show(self, rev, changenode, copies, props):
607 def _show(self, rev, changenode, copies, props):
608 '''show a single changeset or file revision'''
608 '''show a single changeset or file revision'''
609 log = self.repo.changelog
609 log = self.repo.changelog
610 if changenode is None:
610 if changenode is None:
611 changenode = log.node(rev)
611 changenode = log.node(rev)
612 elif not rev:
612 elif not rev:
613 rev = log.rev(changenode)
613 rev = log.rev(changenode)
614
614
615 if self.ui.quiet:
615 if self.ui.quiet:
616 self.ui.write("%d:%s\n" % (rev, short(changenode)))
616 self.ui.write("%d:%s\n" % (rev, short(changenode)))
617 return
617 return
618
618
619 changes = log.read(changenode)
619 changes = log.read(changenode)
620 date = util.datestr(changes[2])
620 date = util.datestr(changes[2])
621 extra = changes[5]
621 extra = changes[5]
622 branch = extra.get("branch")
622 branch = extra.get("branch")
623
623
624 hexfunc = self.ui.debugflag and hex or short
624 hexfunc = self.ui.debugflag and hex or short
625
625
626 parents = [(p, hexfunc(log.node(p)))
626 parents = [(p, hexfunc(log.node(p)))
627 for p in self._meaningful_parentrevs(log, rev)]
627 for p in self._meaningful_parentrevs(log, rev)]
628
628
629 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
629 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
630
630
631 # don't show the default branch name
631 # don't show the default branch name
632 if branch != 'default':
632 if branch != 'default':
633 branch = util.tolocal(branch)
633 branch = util.tolocal(branch)
634 self.ui.write(_("branch: %s\n") % branch)
634 self.ui.write(_("branch: %s\n") % branch)
635 for tag in self.repo.nodetags(changenode):
635 for tag in self.repo.nodetags(changenode):
636 self.ui.write(_("tag: %s\n") % tag)
636 self.ui.write(_("tag: %s\n") % tag)
637 for parent in parents:
637 for parent in parents:
638 self.ui.write(_("parent: %d:%s\n") % parent)
638 self.ui.write(_("parent: %d:%s\n") % parent)
639
639
640 if self.ui.debugflag:
640 if self.ui.debugflag:
641 self.ui.write(_("manifest: %d:%s\n") %
641 self.ui.write(_("manifest: %d:%s\n") %
642 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
642 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
643 self.ui.write(_("user: %s\n") % changes[1])
643 self.ui.write(_("user: %s\n") % changes[1])
644 self.ui.write(_("date: %s\n") % date)
644 self.ui.write(_("date: %s\n") % date)
645
645
646 if self.ui.debugflag:
646 if self.ui.debugflag:
647 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
647 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
648 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
648 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
649 files):
649 files):
650 if value:
650 if value:
651 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
651 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
652 elif changes[3] and self.ui.verbose:
652 elif changes[3] and self.ui.verbose:
653 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
653 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
654 if copies and self.ui.verbose:
654 if copies and self.ui.verbose:
655 copies = ['%s (%s)' % c for c in copies]
655 copies = ['%s (%s)' % c for c in copies]
656 self.ui.write(_("copies: %s\n") % ' '.join(copies))
656 self.ui.write(_("copies: %s\n") % ' '.join(copies))
657
657
658 if extra and self.ui.debugflag:
658 if extra and self.ui.debugflag:
659 for key, value in util.sort(extra.items()):
659 for key, value in util.sort(extra.items()):
660 self.ui.write(_("extra: %s=%s\n")
660 self.ui.write(_("extra: %s=%s\n")
661 % (key, value.encode('string_escape')))
661 % (key, value.encode('string_escape')))
662
662
663 description = changes[4].strip()
663 description = changes[4].strip()
664 if description:
664 if description:
665 if self.ui.verbose:
665 if self.ui.verbose:
666 self.ui.write(_("description:\n"))
666 self.ui.write(_("description:\n"))
667 self.ui.write(description)
667 self.ui.write(description)
668 self.ui.write("\n\n")
668 self.ui.write("\n\n")
669 else:
669 else:
670 self.ui.write(_("summary: %s\n") %
670 self.ui.write(_("summary: %s\n") %
671 description.splitlines()[0])
671 description.splitlines()[0])
672 self.ui.write("\n")
672 self.ui.write("\n")
673
673
674 self.showpatch(changenode)
674 self.showpatch(changenode)
675
675
676 def showpatch(self, node):
676 def showpatch(self, node):
677 if self.patch:
677 if self.patch:
678 prev = self.repo.changelog.parents(node)[0]
678 prev = self.repo.changelog.parents(node)[0]
679 chunks = patch.diff(self.repo, prev, node, match=self.patch,
679 chunks = patch.diff(self.repo, prev, node, match=self.patch,
680 opts=patch.diffopts(self.ui))
680 opts=patch.diffopts(self.ui))
681 for chunk in chunks:
681 for chunk in chunks:
682 self.ui.write(chunk)
682 self.ui.write(chunk)
683 self.ui.write("\n")
683 self.ui.write("\n")
684
684
685 def _meaningful_parentrevs(self, log, rev):
685 def _meaningful_parentrevs(self, log, rev):
686 """Return list of meaningful (or all if debug) parentrevs for rev.
686 """Return list of meaningful (or all if debug) parentrevs for rev.
687
687
688 For merges (two non-nullrev revisions) both parents are meaningful.
688 For merges (two non-nullrev revisions) both parents are meaningful.
689 Otherwise the first parent revision is considered meaningful if it
689 Otherwise the first parent revision is considered meaningful if it
690 is not the preceding revision.
690 is not the preceding revision.
691 """
691 """
692 parents = log.parentrevs(rev)
692 parents = log.parentrevs(rev)
693 if not self.ui.debugflag and parents[1] == nullrev:
693 if not self.ui.debugflag and parents[1] == nullrev:
694 if parents[0] >= rev - 1:
694 if parents[0] >= rev - 1:
695 parents = []
695 parents = []
696 else:
696 else:
697 parents = [parents[0]]
697 parents = [parents[0]]
698 return parents
698 return parents
699
699
700
700
701 class changeset_templater(changeset_printer):
701 class changeset_templater(changeset_printer):
702 '''format changeset information.'''
702 '''format changeset information.'''
703
703
704 def __init__(self, ui, repo, patch, mapfile, buffered):
704 def __init__(self, ui, repo, patch, mapfile, buffered):
705 changeset_printer.__init__(self, ui, repo, patch, buffered)
705 changeset_printer.__init__(self, ui, repo, patch, buffered)
706 filters = templatefilters.filters.copy()
706 filters = templatefilters.filters.copy()
707 filters['formatnode'] = (ui.debugflag and (lambda x: x)
707 filters['formatnode'] = (ui.debugflag and (lambda x: x)
708 or (lambda x: x[:12]))
708 or (lambda x: x[:12]))
709 self.t = templater.templater(mapfile, filters,
709 self.t = templater.templater(mapfile, filters,
710 cache={
710 cache={
711 'parent': '{rev}:{node|formatnode} ',
711 'parent': '{rev}:{node|formatnode} ',
712 'manifest': '{rev}:{node|formatnode}',
712 'manifest': '{rev}:{node|formatnode}',
713 'filecopy': '{name} ({source})'})
713 'filecopy': '{name} ({source})'})
714
714
715 def use_template(self, t):
715 def use_template(self, t):
716 '''set template string to use'''
716 '''set template string to use'''
717 self.t.cache['changeset'] = t
717 self.t.cache['changeset'] = t
718
718
719 def _show(self, rev, changenode, copies, props):
719 def _show(self, rev, changenode, copies, props):
720 '''show a single changeset or file revision'''
720 '''show a single changeset or file revision'''
721 log = self.repo.changelog
721 log = self.repo.changelog
722 if changenode is None:
722 if changenode is None:
723 changenode = log.node(rev)
723 changenode = log.node(rev)
724 elif not rev:
724 elif not rev:
725 rev = log.rev(changenode)
725 rev = log.rev(changenode)
726
726
727 changes = log.read(changenode)
727 changes = log.read(changenode)
728
728
729 def showlist(name, values, plural=None, **args):
729 def showlist(name, values, plural=None, **args):
730 '''expand set of values.
730 '''expand set of values.
731 name is name of key in template map.
731 name is name of key in template map.
732 values is list of strings or dicts.
732 values is list of strings or dicts.
733 plural is plural of name, if not simply name + 's'.
733 plural is plural of name, if not simply name + 's'.
734
734
735 expansion works like this, given name 'foo'.
735 expansion works like this, given name 'foo'.
736
736
737 if values is empty, expand 'no_foos'.
737 if values is empty, expand 'no_foos'.
738
738
739 if 'foo' not in template map, return values as a string,
739 if 'foo' not in template map, return values as a string,
740 joined by space.
740 joined by space.
741
741
742 expand 'start_foos'.
742 expand 'start_foos'.
743
743
744 for each value, expand 'foo'. if 'last_foo' in template
744 for each value, expand 'foo'. if 'last_foo' in template
745 map, expand it instead of 'foo' for last key.
745 map, expand it instead of 'foo' for last key.
746
746
747 expand 'end_foos'.
747 expand 'end_foos'.
748 '''
748 '''
749 if plural: names = plural
749 if plural: names = plural
750 else: names = name + 's'
750 else: names = name + 's'
751 if not values:
751 if not values:
752 noname = 'no_' + names
752 noname = 'no_' + names
753 if noname in self.t:
753 if noname in self.t:
754 yield self.t(noname, **args)
754 yield self.t(noname, **args)
755 return
755 return
756 if name not in self.t:
756 if name not in self.t:
757 if isinstance(values[0], str):
757 if isinstance(values[0], str):
758 yield ' '.join(values)
758 yield ' '.join(values)
759 else:
759 else:
760 for v in values:
760 for v in values:
761 yield dict(v, **args)
761 yield dict(v, **args)
762 return
762 return
763 startname = 'start_' + names
763 startname = 'start_' + names
764 if startname in self.t:
764 if startname in self.t:
765 yield self.t(startname, **args)
765 yield self.t(startname, **args)
766 vargs = args.copy()
766 vargs = args.copy()
767 def one(v, tag=name):
767 def one(v, tag=name):
768 try:
768 try:
769 vargs.update(v)
769 vargs.update(v)
770 except (AttributeError, ValueError):
770 except (AttributeError, ValueError):
771 try:
771 try:
772 for a, b in v:
772 for a, b in v:
773 vargs[a] = b
773 vargs[a] = b
774 except ValueError:
774 except ValueError:
775 vargs[name] = v
775 vargs[name] = v
776 return self.t(tag, **vargs)
776 return self.t(tag, **vargs)
777 lastname = 'last_' + name
777 lastname = 'last_' + name
778 if lastname in self.t:
778 if lastname in self.t:
779 last = values.pop()
779 last = values.pop()
780 else:
780 else:
781 last = None
781 last = None
782 for v in values:
782 for v in values:
783 yield one(v)
783 yield one(v)
784 if last is not None:
784 if last is not None:
785 yield one(last, tag=lastname)
785 yield one(last, tag=lastname)
786 endname = 'end_' + names
786 endname = 'end_' + names
787 if endname in self.t:
787 if endname in self.t:
788 yield self.t(endname, **args)
788 yield self.t(endname, **args)
789
789
790 def showbranches(**args):
790 def showbranches(**args):
791 branch = changes[5].get("branch")
791 branch = changes[5].get("branch")
792 if branch != 'default':
792 if branch != 'default':
793 branch = util.tolocal(branch)
793 branch = util.tolocal(branch)
794 return showlist('branch', [branch], plural='branches', **args)
794 return showlist('branch', [branch], plural='branches', **args)
795
795
796 def showparents(**args):
796 def showparents(**args):
797 parents = [[('rev', p), ('node', hex(log.node(p)))]
797 parents = [[('rev', p), ('node', hex(log.node(p)))]
798 for p in self._meaningful_parentrevs(log, rev)]
798 for p in self._meaningful_parentrevs(log, rev)]
799 return showlist('parent', parents, **args)
799 return showlist('parent', parents, **args)
800
800
801 def showtags(**args):
801 def showtags(**args):
802 return showlist('tag', self.repo.nodetags(changenode), **args)
802 return showlist('tag', self.repo.nodetags(changenode), **args)
803
803
804 def showextras(**args):
804 def showextras(**args):
805 for key, value in util.sort(changes[5].items()):
805 for key, value in util.sort(changes[5].items()):
806 args = args.copy()
806 args = args.copy()
807 args.update(dict(key=key, value=value))
807 args.update(dict(key=key, value=value))
808 yield self.t('extra', **args)
808 yield self.t('extra', **args)
809
809
810 def showcopies(**args):
810 def showcopies(**args):
811 c = [{'name': x[0], 'source': x[1]} for x in copies]
811 c = [{'name': x[0], 'source': x[1]} for x in copies]
812 return showlist('file_copy', c, plural='file_copies', **args)
812 return showlist('file_copy', c, plural='file_copies', **args)
813
813
814 files = []
814 files = []
815 def getfiles():
815 def getfiles():
816 if not files:
816 if not files:
817 files[:] = self.repo.status(
817 files[:] = self.repo.status(
818 log.parents(changenode)[0], changenode)[:3]
818 log.parents(changenode)[0], changenode)[:3]
819 return files
819 return files
820 def showfiles(**args):
820 def showfiles(**args):
821 return showlist('file', changes[3], **args)
821 return showlist('file', changes[3], **args)
822 def showmods(**args):
822 def showmods(**args):
823 return showlist('file_mod', getfiles()[0], **args)
823 return showlist('file_mod', getfiles()[0], **args)
824 def showadds(**args):
824 def showadds(**args):
825 return showlist('file_add', getfiles()[1], **args)
825 return showlist('file_add', getfiles()[1], **args)
826 def showdels(**args):
826 def showdels(**args):
827 return showlist('file_del', getfiles()[2], **args)
827 return showlist('file_del', getfiles()[2], **args)
828 def showmanifest(**args):
828 def showmanifest(**args):
829 args = args.copy()
829 args = args.copy()
830 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
830 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
831 node=hex(changes[0])))
831 node=hex(changes[0])))
832 return self.t('manifest', **args)
832 return self.t('manifest', **args)
833
833
834 defprops = {
834 defprops = {
835 'author': changes[1],
835 'author': changes[1],
836 'branches': showbranches,
836 'branches': showbranches,
837 'date': changes[2],
837 'date': changes[2],
838 'desc': changes[4].strip(),
838 'desc': changes[4].strip(),
839 'file_adds': showadds,
839 'file_adds': showadds,
840 'file_dels': showdels,
840 'file_dels': showdels,
841 'file_mods': showmods,
841 'file_mods': showmods,
842 'files': showfiles,
842 'files': showfiles,
843 'file_copies': showcopies,
843 'file_copies': showcopies,
844 'manifest': showmanifest,
844 'manifest': showmanifest,
845 'node': hex(changenode),
845 'node': hex(changenode),
846 'parents': showparents,
846 'parents': showparents,
847 'rev': rev,
847 'rev': rev,
848 'tags': showtags,
848 'tags': showtags,
849 'extras': showextras,
849 'extras': showextras,
850 }
850 }
851 props = props.copy()
851 props = props.copy()
852 props.update(defprops)
852 props.update(defprops)
853
853
854 try:
854 try:
855 if self.ui.debugflag and 'header_debug' in self.t:
855 if self.ui.debugflag and 'header_debug' in self.t:
856 key = 'header_debug'
856 key = 'header_debug'
857 elif self.ui.quiet and 'header_quiet' in self.t:
857 elif self.ui.quiet and 'header_quiet' in self.t:
858 key = 'header_quiet'
858 key = 'header_quiet'
859 elif self.ui.verbose and 'header_verbose' in self.t:
859 elif self.ui.verbose and 'header_verbose' in self.t:
860 key = 'header_verbose'
860 key = 'header_verbose'
861 elif 'header' in self.t:
861 elif 'header' in self.t:
862 key = 'header'
862 key = 'header'
863 else:
863 else:
864 key = ''
864 key = ''
865 if key:
865 if key:
866 h = templater.stringify(self.t(key, **props))
866 h = templater.stringify(self.t(key, **props))
867 if self.buffered:
867 if self.buffered:
868 self.header[rev] = h
868 self.header[rev] = h
869 else:
869 else:
870 self.ui.write(h)
870 self.ui.write(h)
871 if self.ui.debugflag and 'changeset_debug' in self.t:
871 if self.ui.debugflag and 'changeset_debug' in self.t:
872 key = 'changeset_debug'
872 key = 'changeset_debug'
873 elif self.ui.quiet and 'changeset_quiet' in self.t:
873 elif self.ui.quiet and 'changeset_quiet' in self.t:
874 key = 'changeset_quiet'
874 key = 'changeset_quiet'
875 elif self.ui.verbose and 'changeset_verbose' in self.t:
875 elif self.ui.verbose and 'changeset_verbose' in self.t:
876 key = 'changeset_verbose'
876 key = 'changeset_verbose'
877 else:
877 else:
878 key = 'changeset'
878 key = 'changeset'
879 self.ui.write(templater.stringify(self.t(key, **props)))
879 self.ui.write(templater.stringify(self.t(key, **props)))
880 self.showpatch(changenode)
880 self.showpatch(changenode)
881 except KeyError, inst:
881 except KeyError, inst:
882 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
882 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
883 inst.args[0]))
883 inst.args[0]))
884 except SyntaxError, inst:
884 except SyntaxError, inst:
885 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
885 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
886
886
887 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
887 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
888 """show one changeset using template or regular display.
888 """show one changeset using template or regular display.
889
889
890 Display format will be the first non-empty hit of:
890 Display format will be the first non-empty hit of:
891 1. option 'template'
891 1. option 'template'
892 2. option 'style'
892 2. option 'style'
893 3. [ui] setting 'logtemplate'
893 3. [ui] setting 'logtemplate'
894 4. [ui] setting 'style'
894 4. [ui] setting 'style'
895 If all of these values are either the unset or the empty string,
895 If all of these values are either the unset or the empty string,
896 regular display via changeset_printer() is done.
896 regular display via changeset_printer() is done.
897 """
897 """
898 # options
898 # options
899 patch = False
899 patch = False
900 if opts.get('patch'):
900 if opts.get('patch'):
901 patch = matchfn or matchall(repo)
901 patch = matchfn or matchall(repo)
902
902
903 tmpl = opts.get('template')
903 tmpl = opts.get('template')
904 mapfile = None
904 mapfile = None
905 if tmpl:
905 if tmpl:
906 tmpl = templater.parsestring(tmpl, quoted=False)
906 tmpl = templater.parsestring(tmpl, quoted=False)
907 else:
907 else:
908 mapfile = opts.get('style')
908 mapfile = opts.get('style')
909 # ui settings
909 # ui settings
910 if not mapfile:
910 if not mapfile:
911 tmpl = ui.config('ui', 'logtemplate')
911 tmpl = ui.config('ui', 'logtemplate')
912 if tmpl:
912 if tmpl:
913 tmpl = templater.parsestring(tmpl)
913 tmpl = templater.parsestring(tmpl)
914 else:
914 else:
915 mapfile = ui.config('ui', 'style')
915 mapfile = ui.config('ui', 'style')
916
916
917 if tmpl or mapfile:
917 if tmpl or mapfile:
918 if mapfile:
918 if mapfile:
919 if not os.path.split(mapfile)[0]:
919 if not os.path.split(mapfile)[0]:
920 mapname = (templater.templatepath('map-cmdline.' + mapfile)
920 mapname = (templater.templatepath('map-cmdline.' + mapfile)
921 or templater.templatepath(mapfile))
921 or templater.templatepath(mapfile))
922 if mapname: mapfile = mapname
922 if mapname: mapfile = mapname
923 try:
923 try:
924 t = changeset_templater(ui, repo, patch, mapfile, buffered)
924 t = changeset_templater(ui, repo, patch, mapfile, buffered)
925 except SyntaxError, inst:
925 except SyntaxError, inst:
926 raise util.Abort(inst.args[0])
926 raise util.Abort(inst.args[0])
927 if tmpl: t.use_template(tmpl)
927 if tmpl: t.use_template(tmpl)
928 return t
928 return t
929 return changeset_printer(ui, repo, patch, buffered)
929 return changeset_printer(ui, repo, patch, buffered)
930
930
931 def finddate(ui, repo, date):
931 def finddate(ui, repo, date):
932 """Find the tipmost changeset that matches the given date spec"""
932 """Find the tipmost changeset that matches the given date spec"""
933 df = util.matchdate(date)
933 df = util.matchdate(date)
934 get = util.cachefunc(lambda r: repo[r].changeset())
934 get = util.cachefunc(lambda r: repo[r].changeset())
935 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
935 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
936 results = {}
936 results = {}
937 for st, rev, fns in changeiter:
937 for st, rev, fns in changeiter:
938 if st == 'add':
938 if st == 'add':
939 d = get(rev)[2]
939 d = get(rev)[2]
940 if df(d[0]):
940 if df(d[0]):
941 results[rev] = d
941 results[rev] = d
942 elif st == 'iter':
942 elif st == 'iter':
943 if rev in results:
943 if rev in results:
944 ui.status(_("Found revision %s from %s\n") %
944 ui.status(_("Found revision %s from %s\n") %
945 (rev, util.datestr(results[rev])))
945 (rev, util.datestr(results[rev])))
946 return str(rev)
946 return str(rev)
947
947
948 raise util.Abort(_("revision matching date not found"))
948 raise util.Abort(_("revision matching date not found"))
949
949
950 def walkchangerevs(ui, repo, pats, change, opts):
950 def walkchangerevs(ui, repo, pats, change, opts):
951 '''Iterate over files and the revs they changed in.
951 '''Iterate over files and the revs they changed in.
952
952
953 Callers most commonly need to iterate backwards over the history
953 Callers most commonly need to iterate backwards over the history
954 it is interested in. Doing so has awful (quadratic-looking)
954 it is interested in. Doing so has awful (quadratic-looking)
955 performance, so we use iterators in a "windowed" way.
955 performance, so we use iterators in a "windowed" way.
956
956
957 We walk a window of revisions in the desired order. Within the
957 We walk a window of revisions in the desired order. Within the
958 window, we first walk forwards to gather data, then in the desired
958 window, we first walk forwards to gather data, then in the desired
959 order (usually backwards) to display it.
959 order (usually backwards) to display it.
960
960
961 This function returns an (iterator, matchfn) tuple. The iterator
961 This function returns an (iterator, matchfn) tuple. The iterator
962 yields 3-tuples. They will be of one of the following forms:
962 yields 3-tuples. They will be of one of the following forms:
963
963
964 "window", incrementing, lastrev: stepping through a window,
964 "window", incrementing, lastrev: stepping through a window,
965 positive if walking forwards through revs, last rev in the
965 positive if walking forwards through revs, last rev in the
966 sequence iterated over - use to reset state for the current window
966 sequence iterated over - use to reset state for the current window
967
967
968 "add", rev, fns: out-of-order traversal of the given file names
968 "add", rev, fns: out-of-order traversal of the given file names
969 fns, which changed during revision rev - use to gather data for
969 fns, which changed during revision rev - use to gather data for
970 possible display
970 possible display
971
971
972 "iter", rev, None: in-order traversal of the revs earlier iterated
972 "iter", rev, None: in-order traversal of the revs earlier iterated
973 over with "add" - use to display data'''
973 over with "add" - use to display data'''
974
974
975 def increasing_windows(start, end, windowsize=8, sizelimit=512):
975 def increasing_windows(start, end, windowsize=8, sizelimit=512):
976 if start < end:
976 if start < end:
977 while start < end:
977 while start < end:
978 yield start, min(windowsize, end-start)
978 yield start, min(windowsize, end-start)
979 start += windowsize
979 start += windowsize
980 if windowsize < sizelimit:
980 if windowsize < sizelimit:
981 windowsize *= 2
981 windowsize *= 2
982 else:
982 else:
983 while start > end:
983 while start > end:
984 yield start, min(windowsize, start-end-1)
984 yield start, min(windowsize, start-end-1)
985 start -= windowsize
985 start -= windowsize
986 if windowsize < sizelimit:
986 if windowsize < sizelimit:
987 windowsize *= 2
987 windowsize *= 2
988
988
989 m = match(repo, pats, opts)
989 m = match(repo, pats, opts)
990 follow = opts.get('follow') or opts.get('follow_first')
990 follow = opts.get('follow') or opts.get('follow_first')
991
991
992 if not len(repo):
992 if not len(repo):
993 return [], m
993 return [], m
994
994
995 if follow:
995 if follow:
996 defrange = '%s:0' % repo['.'].rev()
996 defrange = '%s:0' % repo['.'].rev()
997 else:
997 else:
998 defrange = '-1:0'
998 defrange = '-1:0'
999 revs = revrange(repo, opts['rev'] or [defrange])
999 revs = revrange(repo, opts['rev'] or [defrange])
1000 wanted = {}
1000 wanted = {}
1001 slowpath = m.anypats() or opts.get('removed')
1001 slowpath = m.anypats() or opts.get('removed')
1002 fncache = {}
1002 fncache = {}
1003
1003
1004 if not slowpath and not m.files():
1004 if not slowpath and not m.files():
1005 # No files, no patterns. Display all revs.
1005 # No files, no patterns. Display all revs.
1006 wanted = dict.fromkeys(revs)
1006 wanted = dict.fromkeys(revs)
1007 copies = []
1007 copies = []
1008 if not slowpath:
1008 if not slowpath:
1009 # Only files, no patterns. Check the history of each file.
1009 # Only files, no patterns. Check the history of each file.
1010 def filerevgen(filelog, node):
1010 def filerevgen(filelog, node):
1011 cl_count = len(repo)
1011 cl_count = len(repo)
1012 if node is None:
1012 if node is None:
1013 last = len(filelog) - 1
1013 last = len(filelog) - 1
1014 else:
1014 else:
1015 last = filelog.rev(node)
1015 last = filelog.rev(node)
1016 for i, window in increasing_windows(last, nullrev):
1016 for i, window in increasing_windows(last, nullrev):
1017 revs = []
1017 revs = []
1018 for j in xrange(i - window, i + 1):
1018 for j in xrange(i - window, i + 1):
1019 n = filelog.node(j)
1019 n = filelog.node(j)
1020 revs.append((filelog.linkrev(n),
1020 revs.append((filelog.linkrev(j),
1021 follow and filelog.renamed(n)))
1021 follow and filelog.renamed(n)))
1022 revs.reverse()
1022 revs.reverse()
1023 for rev in revs:
1023 for rev in revs:
1024 # only yield rev for which we have the changelog, it can
1024 # only yield rev for which we have the changelog, it can
1025 # happen while doing "hg log" during a pull or commit
1025 # happen while doing "hg log" during a pull or commit
1026 if rev[0] < cl_count:
1026 if rev[0] < cl_count:
1027 yield rev
1027 yield rev
1028 def iterfiles():
1028 def iterfiles():
1029 for filename in m.files():
1029 for filename in m.files():
1030 yield filename, None
1030 yield filename, None
1031 for filename_node in copies:
1031 for filename_node in copies:
1032 yield filename_node
1032 yield filename_node
1033 minrev, maxrev = min(revs), max(revs)
1033 minrev, maxrev = min(revs), max(revs)
1034 for file_, node in iterfiles():
1034 for file_, node in iterfiles():
1035 filelog = repo.file(file_)
1035 filelog = repo.file(file_)
1036 if not len(filelog):
1036 if not len(filelog):
1037 if node is None:
1037 if node is None:
1038 # A zero count may be a directory or deleted file, so
1038 # A zero count may be a directory or deleted file, so
1039 # try to find matching entries on the slow path.
1039 # try to find matching entries on the slow path.
1040 slowpath = True
1040 slowpath = True
1041 break
1041 break
1042 else:
1042 else:
1043 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1043 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1044 % (file_, short(node)))
1044 % (file_, short(node)))
1045 continue
1045 continue
1046 for rev, copied in filerevgen(filelog, node):
1046 for rev, copied in filerevgen(filelog, node):
1047 if rev <= maxrev:
1047 if rev <= maxrev:
1048 if rev < minrev:
1048 if rev < minrev:
1049 break
1049 break
1050 fncache.setdefault(rev, [])
1050 fncache.setdefault(rev, [])
1051 fncache[rev].append(file_)
1051 fncache[rev].append(file_)
1052 wanted[rev] = 1
1052 wanted[rev] = 1
1053 if follow and copied:
1053 if follow and copied:
1054 copies.append(copied)
1054 copies.append(copied)
1055 if slowpath:
1055 if slowpath:
1056 if follow:
1056 if follow:
1057 raise util.Abort(_('can only follow copies/renames for explicit '
1057 raise util.Abort(_('can only follow copies/renames for explicit '
1058 'file names'))
1058 'file names'))
1059
1059
1060 # The slow path checks files modified in every changeset.
1060 # The slow path checks files modified in every changeset.
1061 def changerevgen():
1061 def changerevgen():
1062 for i, window in increasing_windows(len(repo) - 1, nullrev):
1062 for i, window in increasing_windows(len(repo) - 1, nullrev):
1063 for j in xrange(i - window, i + 1):
1063 for j in xrange(i - window, i + 1):
1064 yield j, change(j)[3]
1064 yield j, change(j)[3]
1065
1065
1066 for rev, changefiles in changerevgen():
1066 for rev, changefiles in changerevgen():
1067 matches = filter(m, changefiles)
1067 matches = filter(m, changefiles)
1068 if matches:
1068 if matches:
1069 fncache[rev] = matches
1069 fncache[rev] = matches
1070 wanted[rev] = 1
1070 wanted[rev] = 1
1071
1071
1072 class followfilter:
1072 class followfilter:
1073 def __init__(self, onlyfirst=False):
1073 def __init__(self, onlyfirst=False):
1074 self.startrev = nullrev
1074 self.startrev = nullrev
1075 self.roots = []
1075 self.roots = []
1076 self.onlyfirst = onlyfirst
1076 self.onlyfirst = onlyfirst
1077
1077
1078 def match(self, rev):
1078 def match(self, rev):
1079 def realparents(rev):
1079 def realparents(rev):
1080 if self.onlyfirst:
1080 if self.onlyfirst:
1081 return repo.changelog.parentrevs(rev)[0:1]
1081 return repo.changelog.parentrevs(rev)[0:1]
1082 else:
1082 else:
1083 return filter(lambda x: x != nullrev,
1083 return filter(lambda x: x != nullrev,
1084 repo.changelog.parentrevs(rev))
1084 repo.changelog.parentrevs(rev))
1085
1085
1086 if self.startrev == nullrev:
1086 if self.startrev == nullrev:
1087 self.startrev = rev
1087 self.startrev = rev
1088 return True
1088 return True
1089
1089
1090 if rev > self.startrev:
1090 if rev > self.startrev:
1091 # forward: all descendants
1091 # forward: all descendants
1092 if not self.roots:
1092 if not self.roots:
1093 self.roots.append(self.startrev)
1093 self.roots.append(self.startrev)
1094 for parent in realparents(rev):
1094 for parent in realparents(rev):
1095 if parent in self.roots:
1095 if parent in self.roots:
1096 self.roots.append(rev)
1096 self.roots.append(rev)
1097 return True
1097 return True
1098 else:
1098 else:
1099 # backwards: all parents
1099 # backwards: all parents
1100 if not self.roots:
1100 if not self.roots:
1101 self.roots.extend(realparents(self.startrev))
1101 self.roots.extend(realparents(self.startrev))
1102 if rev in self.roots:
1102 if rev in self.roots:
1103 self.roots.remove(rev)
1103 self.roots.remove(rev)
1104 self.roots.extend(realparents(rev))
1104 self.roots.extend(realparents(rev))
1105 return True
1105 return True
1106
1106
1107 return False
1107 return False
1108
1108
1109 # it might be worthwhile to do this in the iterator if the rev range
1109 # it might be worthwhile to do this in the iterator if the rev range
1110 # is descending and the prune args are all within that range
1110 # is descending and the prune args are all within that range
1111 for rev in opts.get('prune', ()):
1111 for rev in opts.get('prune', ()):
1112 rev = repo.changelog.rev(repo.lookup(rev))
1112 rev = repo.changelog.rev(repo.lookup(rev))
1113 ff = followfilter()
1113 ff = followfilter()
1114 stop = min(revs[0], revs[-1])
1114 stop = min(revs[0], revs[-1])
1115 for x in xrange(rev, stop-1, -1):
1115 for x in xrange(rev, stop-1, -1):
1116 if ff.match(x) and x in wanted:
1116 if ff.match(x) and x in wanted:
1117 del wanted[x]
1117 del wanted[x]
1118
1118
1119 def iterate():
1119 def iterate():
1120 if follow and not m.files():
1120 if follow and not m.files():
1121 ff = followfilter(onlyfirst=opts.get('follow_first'))
1121 ff = followfilter(onlyfirst=opts.get('follow_first'))
1122 def want(rev):
1122 def want(rev):
1123 if ff.match(rev) and rev in wanted:
1123 if ff.match(rev) and rev in wanted:
1124 return True
1124 return True
1125 return False
1125 return False
1126 else:
1126 else:
1127 def want(rev):
1127 def want(rev):
1128 return rev in wanted
1128 return rev in wanted
1129
1129
1130 for i, window in increasing_windows(0, len(revs)):
1130 for i, window in increasing_windows(0, len(revs)):
1131 yield 'window', revs[0] < revs[-1], revs[-1]
1131 yield 'window', revs[0] < revs[-1], revs[-1]
1132 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1132 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1133 for rev in util.sort(list(nrevs)):
1133 for rev in util.sort(list(nrevs)):
1134 fns = fncache.get(rev)
1134 fns = fncache.get(rev)
1135 if not fns:
1135 if not fns:
1136 def fns_generator():
1136 def fns_generator():
1137 for f in change(rev)[3]:
1137 for f in change(rev)[3]:
1138 if m(f):
1138 if m(f):
1139 yield f
1139 yield f
1140 fns = fns_generator()
1140 fns = fns_generator()
1141 yield 'add', rev, fns
1141 yield 'add', rev, fns
1142 for rev in nrevs:
1142 for rev in nrevs:
1143 yield 'iter', rev, None
1143 yield 'iter', rev, None
1144 return iterate(), m
1144 return iterate(), m
1145
1145
1146 def commit(ui, repo, commitfunc, pats, opts):
1146 def commit(ui, repo, commitfunc, pats, opts):
1147 '''commit the specified files or all outstanding changes'''
1147 '''commit the specified files or all outstanding changes'''
1148 date = opts.get('date')
1148 date = opts.get('date')
1149 if date:
1149 if date:
1150 opts['date'] = util.parsedate(date)
1150 opts['date'] = util.parsedate(date)
1151 message = logmessage(opts)
1151 message = logmessage(opts)
1152
1152
1153 # extract addremove carefully -- this function can be called from a command
1153 # extract addremove carefully -- this function can be called from a command
1154 # that doesn't support addremove
1154 # that doesn't support addremove
1155 if opts.get('addremove'):
1155 if opts.get('addremove'):
1156 addremove(repo, pats, opts)
1156 addremove(repo, pats, opts)
1157
1157
1158 m = match(repo, pats, opts)
1158 m = match(repo, pats, opts)
1159 if pats:
1159 if pats:
1160 modified, added, removed = repo.status(match=m)[:3]
1160 modified, added, removed = repo.status(match=m)[:3]
1161 files = util.sort(modified + added + removed)
1161 files = util.sort(modified + added + removed)
1162
1162
1163 def is_dir(f):
1163 def is_dir(f):
1164 name = f + '/'
1164 name = f + '/'
1165 i = bisect.bisect(files, name)
1165 i = bisect.bisect(files, name)
1166 return i < len(files) and files[i].startswith(name)
1166 return i < len(files) and files[i].startswith(name)
1167
1167
1168 for f in m.files():
1168 for f in m.files():
1169 if f == '.':
1169 if f == '.':
1170 continue
1170 continue
1171 if f not in files:
1171 if f not in files:
1172 rf = repo.wjoin(f)
1172 rf = repo.wjoin(f)
1173 rel = repo.pathto(f)
1173 rel = repo.pathto(f)
1174 try:
1174 try:
1175 mode = os.lstat(rf)[stat.ST_MODE]
1175 mode = os.lstat(rf)[stat.ST_MODE]
1176 except OSError:
1176 except OSError:
1177 if is_dir(f): # deleted directory ?
1177 if is_dir(f): # deleted directory ?
1178 continue
1178 continue
1179 raise util.Abort(_("file %s not found!") % rel)
1179 raise util.Abort(_("file %s not found!") % rel)
1180 if stat.S_ISDIR(mode):
1180 if stat.S_ISDIR(mode):
1181 if not is_dir(f):
1181 if not is_dir(f):
1182 raise util.Abort(_("no match under directory %s!")
1182 raise util.Abort(_("no match under directory %s!")
1183 % rel)
1183 % rel)
1184 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1184 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1185 raise util.Abort(_("can't commit %s: "
1185 raise util.Abort(_("can't commit %s: "
1186 "unsupported file type!") % rel)
1186 "unsupported file type!") % rel)
1187 elif f not in repo.dirstate:
1187 elif f not in repo.dirstate:
1188 raise util.Abort(_("file %s not tracked!") % rel)
1188 raise util.Abort(_("file %s not tracked!") % rel)
1189 m = matchfiles(repo, files)
1189 m = matchfiles(repo, files)
1190 try:
1190 try:
1191 return commitfunc(ui, repo, message, m, opts)
1191 return commitfunc(ui, repo, message, m, opts)
1192 except ValueError, inst:
1192 except ValueError, inst:
1193 raise util.Abort(str(inst))
1193 raise util.Abort(str(inst))
@@ -1,3384 +1,3384 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from repo import RepoError, NoCapability
9 from repo import RepoError, NoCapability
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys
11 import os, re, sys
12 import hg, util, revlog, bundlerepo, extensions, copies
12 import hg, util, revlog, bundlerepo, extensions, copies
13 import difflib, patch, time, help, mdiff, tempfile, url
13 import difflib, patch, time, help, mdiff, tempfile, url
14 import version
14 import version
15 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
16 import merge as merge_
16 import merge as merge_
17
17
18 # Commands start here, listed alphabetically
18 # Commands start here, listed alphabetically
19
19
20 def add(ui, repo, *pats, **opts):
20 def add(ui, repo, *pats, **opts):
21 """add the specified files on the next commit
21 """add the specified files on the next commit
22
22
23 Schedule files to be version controlled and added to the repository.
23 Schedule files to be version controlled and added to the repository.
24
24
25 The files will be added to the repository at the next commit. To
25 The files will be added to the repository at the next commit. To
26 undo an add before that, see hg revert.
26 undo an add before that, see hg revert.
27
27
28 If no names are given, add all files in the repository.
28 If no names are given, add all files in the repository.
29 """
29 """
30
30
31 rejected = None
31 rejected = None
32 exacts = {}
32 exacts = {}
33 names = []
33 names = []
34 m = cmdutil.match(repo, pats, opts)
34 m = cmdutil.match(repo, pats, opts)
35 m.bad = lambda x,y: True
35 m.bad = lambda x,y: True
36 for abs in repo.walk(m):
36 for abs in repo.walk(m):
37 if m.exact(abs):
37 if m.exact(abs):
38 if ui.verbose:
38 if ui.verbose:
39 ui.status(_('adding %s\n') % m.rel(abs))
39 ui.status(_('adding %s\n') % m.rel(abs))
40 names.append(abs)
40 names.append(abs)
41 exacts[abs] = 1
41 exacts[abs] = 1
42 elif abs not in repo.dirstate:
42 elif abs not in repo.dirstate:
43 ui.status(_('adding %s\n') % m.rel(abs))
43 ui.status(_('adding %s\n') % m.rel(abs))
44 names.append(abs)
44 names.append(abs)
45 if not opts.get('dry_run'):
45 if not opts.get('dry_run'):
46 rejected = repo.add(names)
46 rejected = repo.add(names)
47 rejected = [p for p in rejected if p in exacts]
47 rejected = [p for p in rejected if p in exacts]
48 return rejected and 1 or 0
48 return rejected and 1 or 0
49
49
50 def addremove(ui, repo, *pats, **opts):
50 def addremove(ui, repo, *pats, **opts):
51 """add all new files, delete all missing files
51 """add all new files, delete all missing files
52
52
53 Add all new files and remove all missing files from the repository.
53 Add all new files and remove all missing files from the repository.
54
54
55 New files are ignored if they match any of the patterns in .hgignore. As
55 New files are ignored if they match any of the patterns in .hgignore. As
56 with add, these changes take effect at the next commit.
56 with add, these changes take effect at the next commit.
57
57
58 Use the -s option to detect renamed files. With a parameter > 0,
58 Use the -s option to detect renamed files. With a parameter > 0,
59 this compares every removed file with every added file and records
59 this compares every removed file with every added file and records
60 those similar enough as renames. This option takes a percentage
60 those similar enough as renames. This option takes a percentage
61 between 0 (disabled) and 100 (files must be identical) as its
61 between 0 (disabled) and 100 (files must be identical) as its
62 parameter. Detecting renamed files this way can be expensive.
62 parameter. Detecting renamed files this way can be expensive.
63 """
63 """
64 try:
64 try:
65 sim = float(opts.get('similarity') or 0)
65 sim = float(opts.get('similarity') or 0)
66 except ValueError:
66 except ValueError:
67 raise util.Abort(_('similarity must be a number'))
67 raise util.Abort(_('similarity must be a number'))
68 if sim < 0 or sim > 100:
68 if sim < 0 or sim > 100:
69 raise util.Abort(_('similarity must be between 0 and 100'))
69 raise util.Abort(_('similarity must be between 0 and 100'))
70 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
70 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
71
71
72 def annotate(ui, repo, *pats, **opts):
72 def annotate(ui, repo, *pats, **opts):
73 """show changeset information per file line
73 """show changeset information per file line
74
74
75 List changes in files, showing the revision id responsible for each line
75 List changes in files, showing the revision id responsible for each line
76
76
77 This command is useful to discover who did a change or when a change took
77 This command is useful to discover who did a change or when a change took
78 place.
78 place.
79
79
80 Without the -a option, annotate will avoid processing files it
80 Without the -a option, annotate will avoid processing files it
81 detects as binary. With -a, annotate will generate an annotation
81 detects as binary. With -a, annotate will generate an annotation
82 anyway, probably with undesirable results.
82 anyway, probably with undesirable results.
83 """
83 """
84 datefunc = ui.quiet and util.shortdate or util.datestr
84 datefunc = ui.quiet and util.shortdate or util.datestr
85 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
85 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
86
86
87 if not pats:
87 if not pats:
88 raise util.Abort(_('at least one file name or pattern required'))
88 raise util.Abort(_('at least one file name or pattern required'))
89
89
90 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
90 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
91 ('number', lambda x: str(x[0].rev())),
91 ('number', lambda x: str(x[0].rev())),
92 ('changeset', lambda x: short(x[0].node())),
92 ('changeset', lambda x: short(x[0].node())),
93 ('date', getdate),
93 ('date', getdate),
94 ('follow', lambda x: x[0].path()),
94 ('follow', lambda x: x[0].path()),
95 ]
95 ]
96
96
97 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
97 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
98 and not opts.get('follow')):
98 and not opts.get('follow')):
99 opts['number'] = 1
99 opts['number'] = 1
100
100
101 linenumber = opts.get('line_number') is not None
101 linenumber = opts.get('line_number') is not None
102 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
102 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
103 raise util.Abort(_('at least one of -n/-c is required for -l'))
103 raise util.Abort(_('at least one of -n/-c is required for -l'))
104
104
105 funcmap = [func for op, func in opmap if opts.get(op)]
105 funcmap = [func for op, func in opmap if opts.get(op)]
106 if linenumber:
106 if linenumber:
107 lastfunc = funcmap[-1]
107 lastfunc = funcmap[-1]
108 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
108 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
109
109
110 ctx = repo[opts.get('rev')]
110 ctx = repo[opts.get('rev')]
111
111
112 m = cmdutil.match(repo, pats, opts)
112 m = cmdutil.match(repo, pats, opts)
113 for abs in ctx.walk(m):
113 for abs in ctx.walk(m):
114 fctx = ctx[abs]
114 fctx = ctx[abs]
115 if not opts.get('text') and util.binary(fctx.data()):
115 if not opts.get('text') and util.binary(fctx.data()):
116 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
116 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
117 continue
117 continue
118
118
119 lines = fctx.annotate(follow=opts.get('follow'),
119 lines = fctx.annotate(follow=opts.get('follow'),
120 linenumber=linenumber)
120 linenumber=linenumber)
121 pieces = []
121 pieces = []
122
122
123 for f in funcmap:
123 for f in funcmap:
124 l = [f(n) for n, dummy in lines]
124 l = [f(n) for n, dummy in lines]
125 if l:
125 if l:
126 ml = max(map(len, l))
126 ml = max(map(len, l))
127 pieces.append(["%*s" % (ml, x) for x in l])
127 pieces.append(["%*s" % (ml, x) for x in l])
128
128
129 if pieces:
129 if pieces:
130 for p, l in zip(zip(*pieces), lines):
130 for p, l in zip(zip(*pieces), lines):
131 ui.write("%s: %s" % (" ".join(p), l[1]))
131 ui.write("%s: %s" % (" ".join(p), l[1]))
132
132
133 def archive(ui, repo, dest, **opts):
133 def archive(ui, repo, dest, **opts):
134 '''create unversioned archive of a repository revision
134 '''create unversioned archive of a repository revision
135
135
136 By default, the revision used is the parent of the working
136 By default, the revision used is the parent of the working
137 directory; use "-r" to specify a different revision.
137 directory; use "-r" to specify a different revision.
138
138
139 To specify the type of archive to create, use "-t". Valid
139 To specify the type of archive to create, use "-t". Valid
140 types are:
140 types are:
141
141
142 "files" (default): a directory full of files
142 "files" (default): a directory full of files
143 "tar": tar archive, uncompressed
143 "tar": tar archive, uncompressed
144 "tbz2": tar archive, compressed using bzip2
144 "tbz2": tar archive, compressed using bzip2
145 "tgz": tar archive, compressed using gzip
145 "tgz": tar archive, compressed using gzip
146 "uzip": zip archive, uncompressed
146 "uzip": zip archive, uncompressed
147 "zip": zip archive, compressed using deflate
147 "zip": zip archive, compressed using deflate
148
148
149 The exact name of the destination archive or directory is given
149 The exact name of the destination archive or directory is given
150 using a format string; see "hg help export" for details.
150 using a format string; see "hg help export" for details.
151
151
152 Each member added to an archive file has a directory prefix
152 Each member added to an archive file has a directory prefix
153 prepended. Use "-p" to specify a format string for the prefix.
153 prepended. Use "-p" to specify a format string for the prefix.
154 The default is the basename of the archive, with suffixes removed.
154 The default is the basename of the archive, with suffixes removed.
155 '''
155 '''
156
156
157 ctx = repo[opts.get('rev')]
157 ctx = repo[opts.get('rev')]
158 if not ctx:
158 if not ctx:
159 raise util.Abort(_('repository has no revisions'))
159 raise util.Abort(_('repository has no revisions'))
160 node = ctx.node()
160 node = ctx.node()
161 dest = cmdutil.make_filename(repo, dest, node)
161 dest = cmdutil.make_filename(repo, dest, node)
162 if os.path.realpath(dest) == repo.root:
162 if os.path.realpath(dest) == repo.root:
163 raise util.Abort(_('repository root cannot be destination'))
163 raise util.Abort(_('repository root cannot be destination'))
164 matchfn = cmdutil.match(repo, [], opts)
164 matchfn = cmdutil.match(repo, [], opts)
165 kind = opts.get('type') or 'files'
165 kind = opts.get('type') or 'files'
166 prefix = opts.get('prefix')
166 prefix = opts.get('prefix')
167 if dest == '-':
167 if dest == '-':
168 if kind == 'files':
168 if kind == 'files':
169 raise util.Abort(_('cannot archive plain files to stdout'))
169 raise util.Abort(_('cannot archive plain files to stdout'))
170 dest = sys.stdout
170 dest = sys.stdout
171 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
171 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
172 prefix = cmdutil.make_filename(repo, prefix, node)
172 prefix = cmdutil.make_filename(repo, prefix, node)
173 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
173 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
174 matchfn, prefix)
174 matchfn, prefix)
175
175
176 def backout(ui, repo, node=None, rev=None, **opts):
176 def backout(ui, repo, node=None, rev=None, **opts):
177 '''reverse effect of earlier changeset
177 '''reverse effect of earlier changeset
178
178
179 Commit the backed out changes as a new changeset. The new
179 Commit the backed out changes as a new changeset. The new
180 changeset is a child of the backed out changeset.
180 changeset is a child of the backed out changeset.
181
181
182 If you back out a changeset other than the tip, a new head is
182 If you back out a changeset other than the tip, a new head is
183 created. This head will be the new tip and you should merge this
183 created. This head will be the new tip and you should merge this
184 backout changeset with another head (current one by default).
184 backout changeset with another head (current one by default).
185
185
186 The --merge option remembers the parent of the working directory
186 The --merge option remembers the parent of the working directory
187 before starting the backout, then merges the new head with that
187 before starting the backout, then merges the new head with that
188 changeset afterwards. This saves you from doing the merge by
188 changeset afterwards. This saves you from doing the merge by
189 hand. The result of this merge is not committed, as for a normal
189 hand. The result of this merge is not committed, as for a normal
190 merge.
190 merge.
191
191
192 See \'hg help dates\' for a list of formats valid for -d/--date.
192 See \'hg help dates\' for a list of formats valid for -d/--date.
193 '''
193 '''
194 if rev and node:
194 if rev and node:
195 raise util.Abort(_("please specify just one revision"))
195 raise util.Abort(_("please specify just one revision"))
196
196
197 if not rev:
197 if not rev:
198 rev = node
198 rev = node
199
199
200 if not rev:
200 if not rev:
201 raise util.Abort(_("please specify a revision to backout"))
201 raise util.Abort(_("please specify a revision to backout"))
202
202
203 date = opts.get('date')
203 date = opts.get('date')
204 if date:
204 if date:
205 opts['date'] = util.parsedate(date)
205 opts['date'] = util.parsedate(date)
206
206
207 cmdutil.bail_if_changed(repo)
207 cmdutil.bail_if_changed(repo)
208 node = repo.lookup(rev)
208 node = repo.lookup(rev)
209
209
210 op1, op2 = repo.dirstate.parents()
210 op1, op2 = repo.dirstate.parents()
211 a = repo.changelog.ancestor(op1, node)
211 a = repo.changelog.ancestor(op1, node)
212 if a != node:
212 if a != node:
213 raise util.Abort(_('cannot back out change on a different branch'))
213 raise util.Abort(_('cannot back out change on a different branch'))
214
214
215 p1, p2 = repo.changelog.parents(node)
215 p1, p2 = repo.changelog.parents(node)
216 if p1 == nullid:
216 if p1 == nullid:
217 raise util.Abort(_('cannot back out a change with no parents'))
217 raise util.Abort(_('cannot back out a change with no parents'))
218 if p2 != nullid:
218 if p2 != nullid:
219 if not opts.get('parent'):
219 if not opts.get('parent'):
220 raise util.Abort(_('cannot back out a merge changeset without '
220 raise util.Abort(_('cannot back out a merge changeset without '
221 '--parent'))
221 '--parent'))
222 p = repo.lookup(opts['parent'])
222 p = repo.lookup(opts['parent'])
223 if p not in (p1, p2):
223 if p not in (p1, p2):
224 raise util.Abort(_('%s is not a parent of %s') %
224 raise util.Abort(_('%s is not a parent of %s') %
225 (short(p), short(node)))
225 (short(p), short(node)))
226 parent = p
226 parent = p
227 else:
227 else:
228 if opts.get('parent'):
228 if opts.get('parent'):
229 raise util.Abort(_('cannot use --parent on non-merge changeset'))
229 raise util.Abort(_('cannot use --parent on non-merge changeset'))
230 parent = p1
230 parent = p1
231
231
232 # the backout should appear on the same branch
232 # the backout should appear on the same branch
233 branch = repo.dirstate.branch()
233 branch = repo.dirstate.branch()
234 hg.clean(repo, node, show_stats=False)
234 hg.clean(repo, node, show_stats=False)
235 repo.dirstate.setbranch(branch)
235 repo.dirstate.setbranch(branch)
236 revert_opts = opts.copy()
236 revert_opts = opts.copy()
237 revert_opts['date'] = None
237 revert_opts['date'] = None
238 revert_opts['all'] = True
238 revert_opts['all'] = True
239 revert_opts['rev'] = hex(parent)
239 revert_opts['rev'] = hex(parent)
240 revert_opts['no_backup'] = None
240 revert_opts['no_backup'] = None
241 revert(ui, repo, **revert_opts)
241 revert(ui, repo, **revert_opts)
242 commit_opts = opts.copy()
242 commit_opts = opts.copy()
243 commit_opts['addremove'] = False
243 commit_opts['addremove'] = False
244 if not commit_opts['message'] and not commit_opts['logfile']:
244 if not commit_opts['message'] and not commit_opts['logfile']:
245 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
245 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
246 commit_opts['force_editor'] = True
246 commit_opts['force_editor'] = True
247 commit(ui, repo, **commit_opts)
247 commit(ui, repo, **commit_opts)
248 def nice(node):
248 def nice(node):
249 return '%d:%s' % (repo.changelog.rev(node), short(node))
249 return '%d:%s' % (repo.changelog.rev(node), short(node))
250 ui.status(_('changeset %s backs out changeset %s\n') %
250 ui.status(_('changeset %s backs out changeset %s\n') %
251 (nice(repo.changelog.tip()), nice(node)))
251 (nice(repo.changelog.tip()), nice(node)))
252 if op1 != node:
252 if op1 != node:
253 hg.clean(repo, op1, show_stats=False)
253 hg.clean(repo, op1, show_stats=False)
254 if opts.get('merge'):
254 if opts.get('merge'):
255 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
255 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
256 hg.merge(repo, hex(repo.changelog.tip()))
256 hg.merge(repo, hex(repo.changelog.tip()))
257 else:
257 else:
258 ui.status(_('the backout changeset is a new head - '
258 ui.status(_('the backout changeset is a new head - '
259 'do not forget to merge\n'))
259 'do not forget to merge\n'))
260 ui.status(_('(use "backout --merge" '
260 ui.status(_('(use "backout --merge" '
261 'if you want to auto-merge)\n'))
261 'if you want to auto-merge)\n'))
262
262
263 def bisect(ui, repo, rev=None, extra=None, command=None,
263 def bisect(ui, repo, rev=None, extra=None, command=None,
264 reset=None, good=None, bad=None, skip=None, noupdate=None):
264 reset=None, good=None, bad=None, skip=None, noupdate=None):
265 """subdivision search of changesets
265 """subdivision search of changesets
266
266
267 This command helps to find changesets which introduce problems.
267 This command helps to find changesets which introduce problems.
268 To use, mark the earliest changeset you know exhibits the problem
268 To use, mark the earliest changeset you know exhibits the problem
269 as bad, then mark the latest changeset which is free from the
269 as bad, then mark the latest changeset which is free from the
270 problem as good. Bisect will update your working directory to a
270 problem as good. Bisect will update your working directory to a
271 revision for testing (unless the --noupdate option is specified).
271 revision for testing (unless the --noupdate option is specified).
272 Once you have performed tests, mark the working directory as bad
272 Once you have performed tests, mark the working directory as bad
273 or good and bisect will either update to another candidate changeset
273 or good and bisect will either update to another candidate changeset
274 or announce that it has found the bad revision.
274 or announce that it has found the bad revision.
275
275
276 As a shortcut, you can also use the revision argument to mark a
276 As a shortcut, you can also use the revision argument to mark a
277 revision as good or bad without checking it out first.
277 revision as good or bad without checking it out first.
278
278
279 If you supply a command it will be used for automatic bisection. Its exit
279 If you supply a command it will be used for automatic bisection. Its exit
280 status will be used as flag to mark revision as bad or good. In case exit
280 status will be used as flag to mark revision as bad or good. In case exit
281 status is 0 the revision is marked as good, 125 - skipped, 127 (command not
281 status is 0 the revision is marked as good, 125 - skipped, 127 (command not
282 found) - bisection will be aborted and any other status bigger than 0 will
282 found) - bisection will be aborted and any other status bigger than 0 will
283 mark revision as bad.
283 mark revision as bad.
284 """
284 """
285 def print_result(nodes, good):
285 def print_result(nodes, good):
286 displayer = cmdutil.show_changeset(ui, repo, {})
286 displayer = cmdutil.show_changeset(ui, repo, {})
287 transition = (good and "good" or "bad")
287 transition = (good and "good" or "bad")
288 if len(nodes) == 1:
288 if len(nodes) == 1:
289 # narrowed it down to a single revision
289 # narrowed it down to a single revision
290 ui.write(_("The first %s revision is:\n") % transition)
290 ui.write(_("The first %s revision is:\n") % transition)
291 displayer.show(changenode=nodes[0])
291 displayer.show(changenode=nodes[0])
292 else:
292 else:
293 # multiple possible revisions
293 # multiple possible revisions
294 ui.write(_("Due to skipped revisions, the first "
294 ui.write(_("Due to skipped revisions, the first "
295 "%s revision could be any of:\n") % transition)
295 "%s revision could be any of:\n") % transition)
296 for n in nodes:
296 for n in nodes:
297 displayer.show(changenode=n)
297 displayer.show(changenode=n)
298
298
299 def check_state(state, interactive=True):
299 def check_state(state, interactive=True):
300 if not state['good'] or not state['bad']:
300 if not state['good'] or not state['bad']:
301 if (good or bad or skip or reset) and interactive:
301 if (good or bad or skip or reset) and interactive:
302 return
302 return
303 if not state['good']:
303 if not state['good']:
304 raise util.Abort(_('cannot bisect (no known good revisions)'))
304 raise util.Abort(_('cannot bisect (no known good revisions)'))
305 else:
305 else:
306 raise util.Abort(_('cannot bisect (no known bad revisions)'))
306 raise util.Abort(_('cannot bisect (no known bad revisions)'))
307 return True
307 return True
308
308
309 # backward compatibility
309 # backward compatibility
310 if rev in "good bad reset init".split():
310 if rev in "good bad reset init".split():
311 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
311 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
312 cmd, rev, extra = rev, extra, None
312 cmd, rev, extra = rev, extra, None
313 if cmd == "good":
313 if cmd == "good":
314 good = True
314 good = True
315 elif cmd == "bad":
315 elif cmd == "bad":
316 bad = True
316 bad = True
317 else:
317 else:
318 reset = True
318 reset = True
319 elif extra or good + bad + skip + reset + bool(command) > 1:
319 elif extra or good + bad + skip + reset + bool(command) > 1:
320 raise util.Abort(_('incompatible arguments'))
320 raise util.Abort(_('incompatible arguments'))
321
321
322 if reset:
322 if reset:
323 p = repo.join("bisect.state")
323 p = repo.join("bisect.state")
324 if os.path.exists(p):
324 if os.path.exists(p):
325 os.unlink(p)
325 os.unlink(p)
326 return
326 return
327
327
328 state = hbisect.load_state(repo)
328 state = hbisect.load_state(repo)
329
329
330 if command:
330 if command:
331 changesets = 1
331 changesets = 1
332 while changesets:
332 while changesets:
333 # update state
333 # update state
334 status = os.spawnlp(os.P_WAIT, command)
334 status = os.spawnlp(os.P_WAIT, command)
335 node = repo.lookup(rev or '.')
335 node = repo.lookup(rev or '.')
336 if status == 125:
336 if status == 125:
337 transition = "skip"
337 transition = "skip"
338 elif status == 0:
338 elif status == 0:
339 transition = "good"
339 transition = "good"
340 # status < 0 means process was killed
340 # status < 0 means process was killed
341 elif status == 127 or status < 0:
341 elif status == 127 or status < 0:
342 break
342 break
343 else:
343 else:
344 transition = "bad"
344 transition = "bad"
345 state[transition].append(node)
345 state[transition].append(node)
346 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
346 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
347 check_state(state, interactive=False)
347 check_state(state, interactive=False)
348 # bisect
348 # bisect
349 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
349 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
350 # update to next check
350 # update to next check
351 cmdutil.bail_if_changed(repo)
351 cmdutil.bail_if_changed(repo)
352 hg.clean(repo, nodes[0], show_stats=False)
352 hg.clean(repo, nodes[0], show_stats=False)
353 hbisect.save_state(repo, state)
353 hbisect.save_state(repo, state)
354 return print_result(nodes, not status)
354 return print_result(nodes, not status)
355
355
356 # update state
356 # update state
357 node = repo.lookup(rev or '.')
357 node = repo.lookup(rev or '.')
358 if good:
358 if good:
359 state['good'].append(node)
359 state['good'].append(node)
360 elif bad:
360 elif bad:
361 state['bad'].append(node)
361 state['bad'].append(node)
362 elif skip:
362 elif skip:
363 state['skip'].append(node)
363 state['skip'].append(node)
364
364
365 hbisect.save_state(repo, state)
365 hbisect.save_state(repo, state)
366
366
367 if not check_state(state):
367 if not check_state(state):
368 return
368 return
369
369
370 # actually bisect
370 # actually bisect
371 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
371 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
372 if changesets == 0:
372 if changesets == 0:
373 print_result(nodes, good)
373 print_result(nodes, good)
374 else:
374 else:
375 assert len(nodes) == 1 # only a single node can be tested next
375 assert len(nodes) == 1 # only a single node can be tested next
376 node = nodes[0]
376 node = nodes[0]
377 # compute the approximate number of remaining tests
377 # compute the approximate number of remaining tests
378 tests, size = 0, 2
378 tests, size = 0, 2
379 while size <= changesets:
379 while size <= changesets:
380 tests, size = tests + 1, size * 2
380 tests, size = tests + 1, size * 2
381 rev = repo.changelog.rev(node)
381 rev = repo.changelog.rev(node)
382 ui.write(_("Testing changeset %s:%s "
382 ui.write(_("Testing changeset %s:%s "
383 "(%s changesets remaining, ~%s tests)\n")
383 "(%s changesets remaining, ~%s tests)\n")
384 % (rev, short(node), changesets, tests))
384 % (rev, short(node), changesets, tests))
385 if not noupdate:
385 if not noupdate:
386 cmdutil.bail_if_changed(repo)
386 cmdutil.bail_if_changed(repo)
387 return hg.clean(repo, node)
387 return hg.clean(repo, node)
388
388
389 def branch(ui, repo, label=None, **opts):
389 def branch(ui, repo, label=None, **opts):
390 """set or show the current branch name
390 """set or show the current branch name
391
391
392 With no argument, show the current branch name. With one argument,
392 With no argument, show the current branch name. With one argument,
393 set the working directory branch name (the branch does not exist in
393 set the working directory branch name (the branch does not exist in
394 the repository until the next commit).
394 the repository until the next commit).
395
395
396 Unless --force is specified, branch will not let you set a
396 Unless --force is specified, branch will not let you set a
397 branch name that shadows an existing branch.
397 branch name that shadows an existing branch.
398
398
399 Use --clean to reset the working directory branch to that of the
399 Use --clean to reset the working directory branch to that of the
400 parent of the working directory, negating a previous branch change.
400 parent of the working directory, negating a previous branch change.
401
401
402 Use the command 'hg update' to switch to an existing branch.
402 Use the command 'hg update' to switch to an existing branch.
403 """
403 """
404
404
405 if opts.get('clean'):
405 if opts.get('clean'):
406 label = repo[None].parents()[0].branch()
406 label = repo[None].parents()[0].branch()
407 repo.dirstate.setbranch(label)
407 repo.dirstate.setbranch(label)
408 ui.status(_('reset working directory to branch %s\n') % label)
408 ui.status(_('reset working directory to branch %s\n') % label)
409 elif label:
409 elif label:
410 if not opts.get('force') and label in repo.branchtags():
410 if not opts.get('force') and label in repo.branchtags():
411 if label not in [p.branch() for p in repo.parents()]:
411 if label not in [p.branch() for p in repo.parents()]:
412 raise util.Abort(_('a branch of the same name already exists'
412 raise util.Abort(_('a branch of the same name already exists'
413 ' (use --force to override)'))
413 ' (use --force to override)'))
414 repo.dirstate.setbranch(util.fromlocal(label))
414 repo.dirstate.setbranch(util.fromlocal(label))
415 ui.status(_('marked working directory as branch %s\n') % label)
415 ui.status(_('marked working directory as branch %s\n') % label)
416 else:
416 else:
417 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
417 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
418
418
419 def branches(ui, repo, active=False):
419 def branches(ui, repo, active=False):
420 """list repository named branches
420 """list repository named branches
421
421
422 List the repository's named branches, indicating which ones are
422 List the repository's named branches, indicating which ones are
423 inactive. If active is specified, only show active branches.
423 inactive. If active is specified, only show active branches.
424
424
425 A branch is considered active if it contains repository heads.
425 A branch is considered active if it contains repository heads.
426
426
427 Use the command 'hg update' to switch to an existing branch.
427 Use the command 'hg update' to switch to an existing branch.
428 """
428 """
429 hexfunc = ui.debugflag and hex or short
429 hexfunc = ui.debugflag and hex or short
430 activebranches = [util.tolocal(repo[n].branch())
430 activebranches = [util.tolocal(repo[n].branch())
431 for n in repo.heads()]
431 for n in repo.heads()]
432 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
432 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
433 for tag, node in repo.branchtags().items()])
433 for tag, node in repo.branchtags().items()])
434 branches.reverse()
434 branches.reverse()
435
435
436 for isactive, node, tag in branches:
436 for isactive, node, tag in branches:
437 if (not active) or isactive:
437 if (not active) or isactive:
438 if ui.quiet:
438 if ui.quiet:
439 ui.write("%s\n" % tag)
439 ui.write("%s\n" % tag)
440 else:
440 else:
441 rev = str(node).rjust(31 - util.locallen(tag))
441 rev = str(node).rjust(31 - util.locallen(tag))
442 isinactive = ((not isactive) and " (inactive)") or ''
442 isinactive = ((not isactive) and " (inactive)") or ''
443 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
443 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
444 ui.write("%s %s:%s%s\n" % data)
444 ui.write("%s %s:%s%s\n" % data)
445
445
446 def bundle(ui, repo, fname, dest=None, **opts):
446 def bundle(ui, repo, fname, dest=None, **opts):
447 """create a changegroup file
447 """create a changegroup file
448
448
449 Generate a compressed changegroup file collecting changesets not
449 Generate a compressed changegroup file collecting changesets not
450 found in the other repository.
450 found in the other repository.
451
451
452 If no destination repository is specified the destination is
452 If no destination repository is specified the destination is
453 assumed to have all the nodes specified by one or more --base
453 assumed to have all the nodes specified by one or more --base
454 parameters. To create a bundle containing all changesets, use
454 parameters. To create a bundle containing all changesets, use
455 --all (or --base null). To change the compression method applied,
455 --all (or --base null). To change the compression method applied,
456 use the -t option (by default, bundles are compressed using bz2).
456 use the -t option (by default, bundles are compressed using bz2).
457
457
458 The bundle file can then be transferred using conventional means and
458 The bundle file can then be transferred using conventional means and
459 applied to another repository with the unbundle or pull command.
459 applied to another repository with the unbundle or pull command.
460 This is useful when direct push and pull are not available or when
460 This is useful when direct push and pull are not available or when
461 exporting an entire repository is undesirable.
461 exporting an entire repository is undesirable.
462
462
463 Applying bundles preserves all changeset contents including
463 Applying bundles preserves all changeset contents including
464 permissions, copy/rename information, and revision history.
464 permissions, copy/rename information, and revision history.
465 """
465 """
466 revs = opts.get('rev') or None
466 revs = opts.get('rev') or None
467 if revs:
467 if revs:
468 revs = [repo.lookup(rev) for rev in revs]
468 revs = [repo.lookup(rev) for rev in revs]
469 if opts.get('all'):
469 if opts.get('all'):
470 base = ['null']
470 base = ['null']
471 else:
471 else:
472 base = opts.get('base')
472 base = opts.get('base')
473 if base:
473 if base:
474 if dest:
474 if dest:
475 raise util.Abort(_("--base is incompatible with specifiying "
475 raise util.Abort(_("--base is incompatible with specifiying "
476 "a destination"))
476 "a destination"))
477 base = [repo.lookup(rev) for rev in base]
477 base = [repo.lookup(rev) for rev in base]
478 # create the right base
478 # create the right base
479 # XXX: nodesbetween / changegroup* should be "fixed" instead
479 # XXX: nodesbetween / changegroup* should be "fixed" instead
480 o = []
480 o = []
481 has = {nullid: None}
481 has = {nullid: None}
482 for n in base:
482 for n in base:
483 has.update(repo.changelog.reachable(n))
483 has.update(repo.changelog.reachable(n))
484 if revs:
484 if revs:
485 visit = list(revs)
485 visit = list(revs)
486 else:
486 else:
487 visit = repo.changelog.heads()
487 visit = repo.changelog.heads()
488 seen = {}
488 seen = {}
489 while visit:
489 while visit:
490 n = visit.pop(0)
490 n = visit.pop(0)
491 parents = [p for p in repo.changelog.parents(n) if p not in has]
491 parents = [p for p in repo.changelog.parents(n) if p not in has]
492 if len(parents) == 0:
492 if len(parents) == 0:
493 o.insert(0, n)
493 o.insert(0, n)
494 else:
494 else:
495 for p in parents:
495 for p in parents:
496 if p not in seen:
496 if p not in seen:
497 seen[p] = 1
497 seen[p] = 1
498 visit.append(p)
498 visit.append(p)
499 else:
499 else:
500 cmdutil.setremoteconfig(ui, opts)
500 cmdutil.setremoteconfig(ui, opts)
501 dest, revs, checkout = hg.parseurl(
501 dest, revs, checkout = hg.parseurl(
502 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
502 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
503 other = hg.repository(ui, dest)
503 other = hg.repository(ui, dest)
504 o = repo.findoutgoing(other, force=opts.get('force'))
504 o = repo.findoutgoing(other, force=opts.get('force'))
505
505
506 if revs:
506 if revs:
507 cg = repo.changegroupsubset(o, revs, 'bundle')
507 cg = repo.changegroupsubset(o, revs, 'bundle')
508 else:
508 else:
509 cg = repo.changegroup(o, 'bundle')
509 cg = repo.changegroup(o, 'bundle')
510
510
511 bundletype = opts.get('type', 'bzip2').lower()
511 bundletype = opts.get('type', 'bzip2').lower()
512 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
512 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
513 bundletype = btypes.get(bundletype)
513 bundletype = btypes.get(bundletype)
514 if bundletype not in changegroup.bundletypes:
514 if bundletype not in changegroup.bundletypes:
515 raise util.Abort(_('unknown bundle type specified with --type'))
515 raise util.Abort(_('unknown bundle type specified with --type'))
516
516
517 changegroup.writebundle(cg, fname, bundletype)
517 changegroup.writebundle(cg, fname, bundletype)
518
518
519 def cat(ui, repo, file1, *pats, **opts):
519 def cat(ui, repo, file1, *pats, **opts):
520 """output the current or given revision of files
520 """output the current or given revision of files
521
521
522 Print the specified files as they were at the given revision.
522 Print the specified files as they were at the given revision.
523 If no revision is given, the parent of the working directory is used,
523 If no revision is given, the parent of the working directory is used,
524 or tip if no revision is checked out.
524 or tip if no revision is checked out.
525
525
526 Output may be to a file, in which case the name of the file is
526 Output may be to a file, in which case the name of the file is
527 given using a format string. The formatting rules are the same as
527 given using a format string. The formatting rules are the same as
528 for the export command, with the following additions:
528 for the export command, with the following additions:
529
529
530 %s basename of file being printed
530 %s basename of file being printed
531 %d dirname of file being printed, or '.' if in repo root
531 %d dirname of file being printed, or '.' if in repo root
532 %p root-relative path name of file being printed
532 %p root-relative path name of file being printed
533 """
533 """
534 ctx = repo[opts.get('rev')]
534 ctx = repo[opts.get('rev')]
535 err = 1
535 err = 1
536 m = cmdutil.match(repo, (file1,) + pats, opts)
536 m = cmdutil.match(repo, (file1,) + pats, opts)
537 for abs in ctx.walk(m):
537 for abs in ctx.walk(m):
538 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
538 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
539 data = ctx[abs].data()
539 data = ctx[abs].data()
540 if opts.get('decode'):
540 if opts.get('decode'):
541 data = repo.wwritedata(abs, data)
541 data = repo.wwritedata(abs, data)
542 fp.write(data)
542 fp.write(data)
543 err = 0
543 err = 0
544 return err
544 return err
545
545
546 def clone(ui, source, dest=None, **opts):
546 def clone(ui, source, dest=None, **opts):
547 """make a copy of an existing repository
547 """make a copy of an existing repository
548
548
549 Create a copy of an existing repository in a new directory.
549 Create a copy of an existing repository in a new directory.
550
550
551 If no destination directory name is specified, it defaults to the
551 If no destination directory name is specified, it defaults to the
552 basename of the source.
552 basename of the source.
553
553
554 The location of the source is added to the new repository's
554 The location of the source is added to the new repository's
555 .hg/hgrc file, as the default to be used for future pulls.
555 .hg/hgrc file, as the default to be used for future pulls.
556
556
557 For efficiency, hardlinks are used for cloning whenever the source
557 For efficiency, hardlinks are used for cloning whenever the source
558 and destination are on the same filesystem (note this applies only
558 and destination are on the same filesystem (note this applies only
559 to the repository data, not to the checked out files). Some
559 to the repository data, not to the checked out files). Some
560 filesystems, such as AFS, implement hardlinking incorrectly, but
560 filesystems, such as AFS, implement hardlinking incorrectly, but
561 do not report errors. In these cases, use the --pull option to
561 do not report errors. In these cases, use the --pull option to
562 avoid hardlinking.
562 avoid hardlinking.
563
563
564 In some cases, you can clone repositories and checked out files
564 In some cases, you can clone repositories and checked out files
565 using full hardlinks with
565 using full hardlinks with
566
566
567 $ cp -al REPO REPOCLONE
567 $ cp -al REPO REPOCLONE
568
568
569 This is the fastest way to clone, but it is not always safe. The
569 This is the fastest way to clone, but it is not always safe. The
570 operation is not atomic (making sure REPO is not modified during
570 operation is not atomic (making sure REPO is not modified during
571 the operation is up to you) and you have to make sure your editor
571 the operation is up to you) and you have to make sure your editor
572 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
572 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
573 this is not compatible with certain extensions that place their
573 this is not compatible with certain extensions that place their
574 metadata under the .hg directory, such as mq.
574 metadata under the .hg directory, such as mq.
575
575
576 If you use the -r option to clone up to a specific revision, no
576 If you use the -r option to clone up to a specific revision, no
577 subsequent revisions will be present in the cloned repository.
577 subsequent revisions will be present in the cloned repository.
578 This option implies --pull, even on local repositories.
578 This option implies --pull, even on local repositories.
579
579
580 If the -U option is used, the new clone will contain only a repository
580 If the -U option is used, the new clone will contain only a repository
581 (.hg) and no working copy (the working copy parent is the null revision).
581 (.hg) and no working copy (the working copy parent is the null revision).
582
582
583 See pull for valid source format details.
583 See pull for valid source format details.
584
584
585 It is possible to specify an ssh:// URL as the destination, but no
585 It is possible to specify an ssh:// URL as the destination, but no
586 .hg/hgrc and working directory will be created on the remote side.
586 .hg/hgrc and working directory will be created on the remote side.
587 Look at the help text for the pull command for important details
587 Look at the help text for the pull command for important details
588 about ssh:// URLs.
588 about ssh:// URLs.
589 """
589 """
590 cmdutil.setremoteconfig(ui, opts)
590 cmdutil.setremoteconfig(ui, opts)
591 hg.clone(ui, source, dest,
591 hg.clone(ui, source, dest,
592 pull=opts.get('pull'),
592 pull=opts.get('pull'),
593 stream=opts.get('uncompressed'),
593 stream=opts.get('uncompressed'),
594 rev=opts.get('rev'),
594 rev=opts.get('rev'),
595 update=not opts.get('noupdate'))
595 update=not opts.get('noupdate'))
596
596
597 def commit(ui, repo, *pats, **opts):
597 def commit(ui, repo, *pats, **opts):
598 """commit the specified files or all outstanding changes
598 """commit the specified files or all outstanding changes
599
599
600 Commit changes to the given files into the repository.
600 Commit changes to the given files into the repository.
601
601
602 If a list of files is omitted, all changes reported by "hg status"
602 If a list of files is omitted, all changes reported by "hg status"
603 will be committed.
603 will be committed.
604
604
605 If you are committing the result of a merge, do not provide any
605 If you are committing the result of a merge, do not provide any
606 file names or -I/-X filters.
606 file names or -I/-X filters.
607
607
608 If no commit message is specified, the configured editor is started to
608 If no commit message is specified, the configured editor is started to
609 enter a message.
609 enter a message.
610
610
611 See 'hg help dates' for a list of formats valid for -d/--date.
611 See 'hg help dates' for a list of formats valid for -d/--date.
612 """
612 """
613 def commitfunc(ui, repo, message, match, opts):
613 def commitfunc(ui, repo, message, match, opts):
614 return repo.commit(match.files(), message, opts.get('user'), opts.get('date'),
614 return repo.commit(match.files(), message, opts.get('user'), opts.get('date'),
615 match, force_editor=opts.get('force_editor'))
615 match, force_editor=opts.get('force_editor'))
616
616
617 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
617 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
618 if not node:
618 if not node:
619 return
619 return
620 cl = repo.changelog
620 cl = repo.changelog
621 rev = cl.rev(node)
621 rev = cl.rev(node)
622 parents = cl.parentrevs(rev)
622 parents = cl.parentrevs(rev)
623 if rev - 1 in parents:
623 if rev - 1 in parents:
624 # one of the parents was the old tip
624 # one of the parents was the old tip
625 pass
625 pass
626 elif (parents == (nullrev, nullrev) or
626 elif (parents == (nullrev, nullrev) or
627 len(cl.heads(cl.node(parents[0]))) > 1 and
627 len(cl.heads(cl.node(parents[0]))) > 1 and
628 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
628 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
629 ui.status(_('created new head\n'))
629 ui.status(_('created new head\n'))
630
630
631 if ui.debugflag:
631 if ui.debugflag:
632 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
632 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
633 elif ui.verbose:
633 elif ui.verbose:
634 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
634 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
635
635
636 def copy(ui, repo, *pats, **opts):
636 def copy(ui, repo, *pats, **opts):
637 """mark files as copied for the next commit
637 """mark files as copied for the next commit
638
638
639 Mark dest as having copies of source files. If dest is a
639 Mark dest as having copies of source files. If dest is a
640 directory, copies are put in that directory. If dest is a file,
640 directory, copies are put in that directory. If dest is a file,
641 there can only be one source.
641 there can only be one source.
642
642
643 By default, this command copies the contents of files as they
643 By default, this command copies the contents of files as they
644 stand in the working directory. If invoked with --after, the
644 stand in the working directory. If invoked with --after, the
645 operation is recorded, but no copying is performed.
645 operation is recorded, but no copying is performed.
646
646
647 This command takes effect in the next commit. To undo a copy
647 This command takes effect in the next commit. To undo a copy
648 before that, see hg revert.
648 before that, see hg revert.
649 """
649 """
650 wlock = repo.wlock(False)
650 wlock = repo.wlock(False)
651 try:
651 try:
652 return cmdutil.copy(ui, repo, pats, opts)
652 return cmdutil.copy(ui, repo, pats, opts)
653 finally:
653 finally:
654 del wlock
654 del wlock
655
655
656 def debugancestor(ui, repo, *args):
656 def debugancestor(ui, repo, *args):
657 """find the ancestor revision of two revisions in a given index"""
657 """find the ancestor revision of two revisions in a given index"""
658 if len(args) == 3:
658 if len(args) == 3:
659 index, rev1, rev2 = args
659 index, rev1, rev2 = args
660 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
660 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
661 lookup = r.lookup
661 lookup = r.lookup
662 elif len(args) == 2:
662 elif len(args) == 2:
663 if not repo:
663 if not repo:
664 raise util.Abort(_("There is no Mercurial repository here "
664 raise util.Abort(_("There is no Mercurial repository here "
665 "(.hg not found)"))
665 "(.hg not found)"))
666 rev1, rev2 = args
666 rev1, rev2 = args
667 r = repo.changelog
667 r = repo.changelog
668 lookup = repo.lookup
668 lookup = repo.lookup
669 else:
669 else:
670 raise util.Abort(_('either two or three arguments required'))
670 raise util.Abort(_('either two or three arguments required'))
671 a = r.ancestor(lookup(rev1), lookup(rev2))
671 a = r.ancestor(lookup(rev1), lookup(rev2))
672 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
672 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
673
673
674 def debugcomplete(ui, cmd='', **opts):
674 def debugcomplete(ui, cmd='', **opts):
675 """returns the completion list associated with the given command"""
675 """returns the completion list associated with the given command"""
676
676
677 if opts.get('options'):
677 if opts.get('options'):
678 options = []
678 options = []
679 otables = [globalopts]
679 otables = [globalopts]
680 if cmd:
680 if cmd:
681 aliases, entry = cmdutil.findcmd(cmd, table, False)
681 aliases, entry = cmdutil.findcmd(cmd, table, False)
682 otables.append(entry[1])
682 otables.append(entry[1])
683 for t in otables:
683 for t in otables:
684 for o in t:
684 for o in t:
685 if o[0]:
685 if o[0]:
686 options.append('-%s' % o[0])
686 options.append('-%s' % o[0])
687 options.append('--%s' % o[1])
687 options.append('--%s' % o[1])
688 ui.write("%s\n" % "\n".join(options))
688 ui.write("%s\n" % "\n".join(options))
689 return
689 return
690
690
691 ui.write("%s\n" % "\n".join(util.sort(cmdutil.findpossible(cmd, table))))
691 ui.write("%s\n" % "\n".join(util.sort(cmdutil.findpossible(cmd, table))))
692
692
693 def debugfsinfo(ui, path = "."):
693 def debugfsinfo(ui, path = "."):
694 file('.debugfsinfo', 'w').write('')
694 file('.debugfsinfo', 'w').write('')
695 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
695 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
696 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
696 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
697 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
697 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
698 and 'yes' or 'no'))
698 and 'yes' or 'no'))
699 os.unlink('.debugfsinfo')
699 os.unlink('.debugfsinfo')
700
700
701 def debugrebuildstate(ui, repo, rev="tip"):
701 def debugrebuildstate(ui, repo, rev="tip"):
702 """rebuild the dirstate as it would look like for the given revision"""
702 """rebuild the dirstate as it would look like for the given revision"""
703 ctx = repo[rev]
703 ctx = repo[rev]
704 wlock = repo.wlock()
704 wlock = repo.wlock()
705 try:
705 try:
706 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
706 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
707 finally:
707 finally:
708 del wlock
708 del wlock
709
709
710 def debugcheckstate(ui, repo):
710 def debugcheckstate(ui, repo):
711 """validate the correctness of the current dirstate"""
711 """validate the correctness of the current dirstate"""
712 parent1, parent2 = repo.dirstate.parents()
712 parent1, parent2 = repo.dirstate.parents()
713 m1 = repo[parent1].manifest()
713 m1 = repo[parent1].manifest()
714 m2 = repo[parent2].manifest()
714 m2 = repo[parent2].manifest()
715 errors = 0
715 errors = 0
716 for f in repo.dirstate:
716 for f in repo.dirstate:
717 state = repo.dirstate[f]
717 state = repo.dirstate[f]
718 if state in "nr" and f not in m1:
718 if state in "nr" and f not in m1:
719 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
719 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
720 errors += 1
720 errors += 1
721 if state in "a" and f in m1:
721 if state in "a" and f in m1:
722 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
722 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
723 errors += 1
723 errors += 1
724 if state in "m" and f not in m1 and f not in m2:
724 if state in "m" and f not in m1 and f not in m2:
725 ui.warn(_("%s in state %s, but not in either manifest\n") %
725 ui.warn(_("%s in state %s, but not in either manifest\n") %
726 (f, state))
726 (f, state))
727 errors += 1
727 errors += 1
728 for f in m1:
728 for f in m1:
729 state = repo.dirstate[f]
729 state = repo.dirstate[f]
730 if state not in "nrm":
730 if state not in "nrm":
731 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
731 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
732 errors += 1
732 errors += 1
733 if errors:
733 if errors:
734 error = _(".hg/dirstate inconsistent with current parent's manifest")
734 error = _(".hg/dirstate inconsistent with current parent's manifest")
735 raise util.Abort(error)
735 raise util.Abort(error)
736
736
737 def showconfig(ui, repo, *values, **opts):
737 def showconfig(ui, repo, *values, **opts):
738 """show combined config settings from all hgrc files
738 """show combined config settings from all hgrc files
739
739
740 With no args, print names and values of all config items.
740 With no args, print names and values of all config items.
741
741
742 With one arg of the form section.name, print just the value of
742 With one arg of the form section.name, print just the value of
743 that config item.
743 that config item.
744
744
745 With multiple args, print names and values of all config items
745 With multiple args, print names and values of all config items
746 with matching section names."""
746 with matching section names."""
747
747
748 untrusted = bool(opts.get('untrusted'))
748 untrusted = bool(opts.get('untrusted'))
749 if values:
749 if values:
750 if len([v for v in values if '.' in v]) > 1:
750 if len([v for v in values if '.' in v]) > 1:
751 raise util.Abort(_('only one config item permitted'))
751 raise util.Abort(_('only one config item permitted'))
752 for section, name, value in ui.walkconfig(untrusted=untrusted):
752 for section, name, value in ui.walkconfig(untrusted=untrusted):
753 sectname = section + '.' + name
753 sectname = section + '.' + name
754 if values:
754 if values:
755 for v in values:
755 for v in values:
756 if v == section:
756 if v == section:
757 ui.write('%s=%s\n' % (sectname, value))
757 ui.write('%s=%s\n' % (sectname, value))
758 elif v == sectname:
758 elif v == sectname:
759 ui.write(value, '\n')
759 ui.write(value, '\n')
760 else:
760 else:
761 ui.write('%s=%s\n' % (sectname, value))
761 ui.write('%s=%s\n' % (sectname, value))
762
762
763 def debugsetparents(ui, repo, rev1, rev2=None):
763 def debugsetparents(ui, repo, rev1, rev2=None):
764 """manually set the parents of the current working directory
764 """manually set the parents of the current working directory
765
765
766 This is useful for writing repository conversion tools, but should
766 This is useful for writing repository conversion tools, but should
767 be used with care.
767 be used with care.
768 """
768 """
769
769
770 if not rev2:
770 if not rev2:
771 rev2 = hex(nullid)
771 rev2 = hex(nullid)
772
772
773 wlock = repo.wlock()
773 wlock = repo.wlock()
774 try:
774 try:
775 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
775 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
776 finally:
776 finally:
777 del wlock
777 del wlock
778
778
779 def debugstate(ui, repo, nodates=None):
779 def debugstate(ui, repo, nodates=None):
780 """show the contents of the current dirstate"""
780 """show the contents of the current dirstate"""
781 timestr = ""
781 timestr = ""
782 showdate = not nodates
782 showdate = not nodates
783 for file_, ent in util.sort(repo.dirstate._map.items()):
783 for file_, ent in util.sort(repo.dirstate._map.items()):
784 if showdate:
784 if showdate:
785 if ent[3] == -1:
785 if ent[3] == -1:
786 # Pad or slice to locale representation
786 # Pad or slice to locale representation
787 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
787 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
788 timestr = 'unset'
788 timestr = 'unset'
789 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
789 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
790 else:
790 else:
791 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
791 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
792 if ent[1] & 020000:
792 if ent[1] & 020000:
793 mode = 'lnk'
793 mode = 'lnk'
794 else:
794 else:
795 mode = '%3o' % (ent[1] & 0777)
795 mode = '%3o' % (ent[1] & 0777)
796 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
796 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
797 for f in repo.dirstate.copies():
797 for f in repo.dirstate.copies():
798 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
798 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
799
799
800 def debugdata(ui, file_, rev):
800 def debugdata(ui, file_, rev):
801 """dump the contents of a data file revision"""
801 """dump the contents of a data file revision"""
802 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
802 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
803 try:
803 try:
804 ui.write(r.revision(r.lookup(rev)))
804 ui.write(r.revision(r.lookup(rev)))
805 except KeyError:
805 except KeyError:
806 raise util.Abort(_('invalid revision identifier %s') % rev)
806 raise util.Abort(_('invalid revision identifier %s') % rev)
807
807
808 def debugdate(ui, date, range=None, **opts):
808 def debugdate(ui, date, range=None, **opts):
809 """parse and display a date"""
809 """parse and display a date"""
810 if opts["extended"]:
810 if opts["extended"]:
811 d = util.parsedate(date, util.extendeddateformats)
811 d = util.parsedate(date, util.extendeddateformats)
812 else:
812 else:
813 d = util.parsedate(date)
813 d = util.parsedate(date)
814 ui.write("internal: %s %s\n" % d)
814 ui.write("internal: %s %s\n" % d)
815 ui.write("standard: %s\n" % util.datestr(d))
815 ui.write("standard: %s\n" % util.datestr(d))
816 if range:
816 if range:
817 m = util.matchdate(range)
817 m = util.matchdate(range)
818 ui.write("match: %s\n" % m(d[0]))
818 ui.write("match: %s\n" % m(d[0]))
819
819
820 def debugindex(ui, file_):
820 def debugindex(ui, file_):
821 """dump the contents of an index file"""
821 """dump the contents of an index file"""
822 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
822 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
823 ui.write(" rev offset length base linkrev" +
823 ui.write(" rev offset length base linkrev" +
824 " nodeid p1 p2\n")
824 " nodeid p1 p2\n")
825 for i in r:
825 for i in r:
826 node = r.node(i)
826 node = r.node(i)
827 try:
827 try:
828 pp = r.parents(node)
828 pp = r.parents(node)
829 except:
829 except:
830 pp = [nullid, nullid]
830 pp = [nullid, nullid]
831 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
831 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
832 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
832 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
833 short(node), short(pp[0]), short(pp[1])))
833 short(node), short(pp[0]), short(pp[1])))
834
834
835 def debugindexdot(ui, file_):
835 def debugindexdot(ui, file_):
836 """dump an index DAG as a .dot file"""
836 """dump an index DAG as a .dot file"""
837 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
837 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
838 ui.write("digraph G {\n")
838 ui.write("digraph G {\n")
839 for i in r:
839 for i in r:
840 node = r.node(i)
840 node = r.node(i)
841 pp = r.parents(node)
841 pp = r.parents(node)
842 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
842 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
843 if pp[1] != nullid:
843 if pp[1] != nullid:
844 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
844 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
845 ui.write("}\n")
845 ui.write("}\n")
846
846
847 def debuginstall(ui):
847 def debuginstall(ui):
848 '''test Mercurial installation'''
848 '''test Mercurial installation'''
849
849
850 def writetemp(contents):
850 def writetemp(contents):
851 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
851 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
852 f = os.fdopen(fd, "wb")
852 f = os.fdopen(fd, "wb")
853 f.write(contents)
853 f.write(contents)
854 f.close()
854 f.close()
855 return name
855 return name
856
856
857 problems = 0
857 problems = 0
858
858
859 # encoding
859 # encoding
860 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
860 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
861 try:
861 try:
862 util.fromlocal("test")
862 util.fromlocal("test")
863 except util.Abort, inst:
863 except util.Abort, inst:
864 ui.write(" %s\n" % inst)
864 ui.write(" %s\n" % inst)
865 ui.write(_(" (check that your locale is properly set)\n"))
865 ui.write(_(" (check that your locale is properly set)\n"))
866 problems += 1
866 problems += 1
867
867
868 # compiled modules
868 # compiled modules
869 ui.status(_("Checking extensions...\n"))
869 ui.status(_("Checking extensions...\n"))
870 try:
870 try:
871 import bdiff, mpatch, base85
871 import bdiff, mpatch, base85
872 except Exception, inst:
872 except Exception, inst:
873 ui.write(" %s\n" % inst)
873 ui.write(" %s\n" % inst)
874 ui.write(_(" One or more extensions could not be found"))
874 ui.write(_(" One or more extensions could not be found"))
875 ui.write(_(" (check that you compiled the extensions)\n"))
875 ui.write(_(" (check that you compiled the extensions)\n"))
876 problems += 1
876 problems += 1
877
877
878 # templates
878 # templates
879 ui.status(_("Checking templates...\n"))
879 ui.status(_("Checking templates...\n"))
880 try:
880 try:
881 import templater
881 import templater
882 t = templater.templater(templater.templatepath("map-cmdline.default"))
882 t = templater.templater(templater.templatepath("map-cmdline.default"))
883 except Exception, inst:
883 except Exception, inst:
884 ui.write(" %s\n" % inst)
884 ui.write(" %s\n" % inst)
885 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
885 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
886 problems += 1
886 problems += 1
887
887
888 # patch
888 # patch
889 ui.status(_("Checking patch...\n"))
889 ui.status(_("Checking patch...\n"))
890 patchproblems = 0
890 patchproblems = 0
891 a = "1\n2\n3\n4\n"
891 a = "1\n2\n3\n4\n"
892 b = "1\n2\n3\ninsert\n4\n"
892 b = "1\n2\n3\ninsert\n4\n"
893 fa = writetemp(a)
893 fa = writetemp(a)
894 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
894 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
895 os.path.basename(fa))
895 os.path.basename(fa))
896 fd = writetemp(d)
896 fd = writetemp(d)
897
897
898 files = {}
898 files = {}
899 try:
899 try:
900 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
900 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
901 except util.Abort, e:
901 except util.Abort, e:
902 ui.write(_(" patch call failed:\n"))
902 ui.write(_(" patch call failed:\n"))
903 ui.write(" " + str(e) + "\n")
903 ui.write(" " + str(e) + "\n")
904 patchproblems += 1
904 patchproblems += 1
905 else:
905 else:
906 if list(files) != [os.path.basename(fa)]:
906 if list(files) != [os.path.basename(fa)]:
907 ui.write(_(" unexpected patch output!\n"))
907 ui.write(_(" unexpected patch output!\n"))
908 patchproblems += 1
908 patchproblems += 1
909 a = file(fa).read()
909 a = file(fa).read()
910 if a != b:
910 if a != b:
911 ui.write(_(" patch test failed!\n"))
911 ui.write(_(" patch test failed!\n"))
912 patchproblems += 1
912 patchproblems += 1
913
913
914 if patchproblems:
914 if patchproblems:
915 if ui.config('ui', 'patch'):
915 if ui.config('ui', 'patch'):
916 ui.write(_(" (Current patch tool may be incompatible with patch,"
916 ui.write(_(" (Current patch tool may be incompatible with patch,"
917 " or misconfigured. Please check your .hgrc file)\n"))
917 " or misconfigured. Please check your .hgrc file)\n"))
918 else:
918 else:
919 ui.write(_(" Internal patcher failure, please report this error"
919 ui.write(_(" Internal patcher failure, please report this error"
920 " to http://www.selenic.com/mercurial/bts\n"))
920 " to http://www.selenic.com/mercurial/bts\n"))
921 problems += patchproblems
921 problems += patchproblems
922
922
923 os.unlink(fa)
923 os.unlink(fa)
924 os.unlink(fd)
924 os.unlink(fd)
925
925
926 # editor
926 # editor
927 ui.status(_("Checking commit editor...\n"))
927 ui.status(_("Checking commit editor...\n"))
928 editor = ui.geteditor()
928 editor = ui.geteditor()
929 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
929 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
930 if not cmdpath:
930 if not cmdpath:
931 if editor == 'vi':
931 if editor == 'vi':
932 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
932 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
933 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
933 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
934 else:
934 else:
935 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
935 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
936 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
936 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
937 problems += 1
937 problems += 1
938
938
939 # check username
939 # check username
940 ui.status(_("Checking username...\n"))
940 ui.status(_("Checking username...\n"))
941 user = os.environ.get("HGUSER")
941 user = os.environ.get("HGUSER")
942 if user is None:
942 if user is None:
943 user = ui.config("ui", "username")
943 user = ui.config("ui", "username")
944 if user is None:
944 if user is None:
945 user = os.environ.get("EMAIL")
945 user = os.environ.get("EMAIL")
946 if not user:
946 if not user:
947 ui.warn(" ")
947 ui.warn(" ")
948 ui.username()
948 ui.username()
949 ui.write(_(" (specify a username in your .hgrc file)\n"))
949 ui.write(_(" (specify a username in your .hgrc file)\n"))
950
950
951 if not problems:
951 if not problems:
952 ui.status(_("No problems detected\n"))
952 ui.status(_("No problems detected\n"))
953 else:
953 else:
954 ui.write(_("%s problems detected,"
954 ui.write(_("%s problems detected,"
955 " please check your install!\n") % problems)
955 " please check your install!\n") % problems)
956
956
957 return problems
957 return problems
958
958
959 def debugrename(ui, repo, file1, *pats, **opts):
959 def debugrename(ui, repo, file1, *pats, **opts):
960 """dump rename information"""
960 """dump rename information"""
961
961
962 ctx = repo[opts.get('rev')]
962 ctx = repo[opts.get('rev')]
963 m = cmdutil.match(repo, (file1,) + pats, opts)
963 m = cmdutil.match(repo, (file1,) + pats, opts)
964 for abs in ctx.walk(m):
964 for abs in ctx.walk(m):
965 fctx = ctx[abs]
965 fctx = ctx[abs]
966 o = fctx.filelog().renamed(fctx.filenode())
966 o = fctx.filelog().renamed(fctx.filenode())
967 rel = m.rel(abs)
967 rel = m.rel(abs)
968 if o:
968 if o:
969 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
969 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
970 else:
970 else:
971 ui.write(_("%s not renamed\n") % rel)
971 ui.write(_("%s not renamed\n") % rel)
972
972
973 def debugwalk(ui, repo, *pats, **opts):
973 def debugwalk(ui, repo, *pats, **opts):
974 """show how files match on given patterns"""
974 """show how files match on given patterns"""
975 m = cmdutil.match(repo, pats, opts)
975 m = cmdutil.match(repo, pats, opts)
976 items = list(repo.walk(m))
976 items = list(repo.walk(m))
977 if not items:
977 if not items:
978 return
978 return
979 fmt = 'f %%-%ds %%-%ds %%s' % (
979 fmt = 'f %%-%ds %%-%ds %%s' % (
980 max([len(abs) for abs in items]),
980 max([len(abs) for abs in items]),
981 max([len(m.rel(abs)) for abs in items]))
981 max([len(m.rel(abs)) for abs in items]))
982 for abs in items:
982 for abs in items:
983 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
983 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
984 ui.write("%s\n" % line.rstrip())
984 ui.write("%s\n" % line.rstrip())
985
985
986 def diff(ui, repo, *pats, **opts):
986 def diff(ui, repo, *pats, **opts):
987 """diff repository (or selected files)
987 """diff repository (or selected files)
988
988
989 Show differences between revisions for the specified files.
989 Show differences between revisions for the specified files.
990
990
991 Differences between files are shown using the unified diff format.
991 Differences between files are shown using the unified diff format.
992
992
993 NOTE: diff may generate unexpected results for merges, as it will
993 NOTE: diff may generate unexpected results for merges, as it will
994 default to comparing against the working directory's first parent
994 default to comparing against the working directory's first parent
995 changeset if no revisions are specified.
995 changeset if no revisions are specified.
996
996
997 When two revision arguments are given, then changes are shown
997 When two revision arguments are given, then changes are shown
998 between those revisions. If only one revision is specified then
998 between those revisions. If only one revision is specified then
999 that revision is compared to the working directory, and, when no
999 that revision is compared to the working directory, and, when no
1000 revisions are specified, the working directory files are compared
1000 revisions are specified, the working directory files are compared
1001 to its parent.
1001 to its parent.
1002
1002
1003 Without the -a option, diff will avoid generating diffs of files
1003 Without the -a option, diff will avoid generating diffs of files
1004 it detects as binary. With -a, diff will generate a diff anyway,
1004 it detects as binary. With -a, diff will generate a diff anyway,
1005 probably with undesirable results.
1005 probably with undesirable results.
1006
1006
1007 Use the --git option to generate diffs in the git extended diff
1007 Use the --git option to generate diffs in the git extended diff
1008 format. Read the gitdiffs help topic for more information.
1008 format. Read the gitdiffs help topic for more information.
1009 """
1009 """
1010 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
1010 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
1011
1011
1012 m = cmdutil.match(repo, pats, opts)
1012 m = cmdutil.match(repo, pats, opts)
1013 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1013 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1014 for chunk in it:
1014 for chunk in it:
1015 repo.ui.write(chunk)
1015 repo.ui.write(chunk)
1016
1016
1017 def export(ui, repo, *changesets, **opts):
1017 def export(ui, repo, *changesets, **opts):
1018 """dump the header and diffs for one or more changesets
1018 """dump the header and diffs for one or more changesets
1019
1019
1020 Print the changeset header and diffs for one or more revisions.
1020 Print the changeset header and diffs for one or more revisions.
1021
1021
1022 The information shown in the changeset header is: author,
1022 The information shown in the changeset header is: author,
1023 changeset hash, parent(s) and commit comment.
1023 changeset hash, parent(s) and commit comment.
1024
1024
1025 NOTE: export may generate unexpected diff output for merge changesets,
1025 NOTE: export may generate unexpected diff output for merge changesets,
1026 as it will compare the merge changeset against its first parent only.
1026 as it will compare the merge changeset against its first parent only.
1027
1027
1028 Output may be to a file, in which case the name of the file is
1028 Output may be to a file, in which case the name of the file is
1029 given using a format string. The formatting rules are as follows:
1029 given using a format string. The formatting rules are as follows:
1030
1030
1031 %% literal "%" character
1031 %% literal "%" character
1032 %H changeset hash (40 bytes of hexadecimal)
1032 %H changeset hash (40 bytes of hexadecimal)
1033 %N number of patches being generated
1033 %N number of patches being generated
1034 %R changeset revision number
1034 %R changeset revision number
1035 %b basename of the exporting repository
1035 %b basename of the exporting repository
1036 %h short-form changeset hash (12 bytes of hexadecimal)
1036 %h short-form changeset hash (12 bytes of hexadecimal)
1037 %n zero-padded sequence number, starting at 1
1037 %n zero-padded sequence number, starting at 1
1038 %r zero-padded changeset revision number
1038 %r zero-padded changeset revision number
1039
1039
1040 Without the -a option, export will avoid generating diffs of files
1040 Without the -a option, export will avoid generating diffs of files
1041 it detects as binary. With -a, export will generate a diff anyway,
1041 it detects as binary. With -a, export will generate a diff anyway,
1042 probably with undesirable results.
1042 probably with undesirable results.
1043
1043
1044 Use the --git option to generate diffs in the git extended diff
1044 Use the --git option to generate diffs in the git extended diff
1045 format. Read the gitdiffs help topic for more information.
1045 format. Read the gitdiffs help topic for more information.
1046
1046
1047 With the --switch-parent option, the diff will be against the second
1047 With the --switch-parent option, the diff will be against the second
1048 parent. It can be useful to review a merge.
1048 parent. It can be useful to review a merge.
1049 """
1049 """
1050 if not changesets:
1050 if not changesets:
1051 raise util.Abort(_("export requires at least one changeset"))
1051 raise util.Abort(_("export requires at least one changeset"))
1052 revs = cmdutil.revrange(repo, changesets)
1052 revs = cmdutil.revrange(repo, changesets)
1053 if len(revs) > 1:
1053 if len(revs) > 1:
1054 ui.note(_('exporting patches:\n'))
1054 ui.note(_('exporting patches:\n'))
1055 else:
1055 else:
1056 ui.note(_('exporting patch:\n'))
1056 ui.note(_('exporting patch:\n'))
1057 patch.export(repo, revs, template=opts.get('output'),
1057 patch.export(repo, revs, template=opts.get('output'),
1058 switch_parent=opts.get('switch_parent'),
1058 switch_parent=opts.get('switch_parent'),
1059 opts=patch.diffopts(ui, opts))
1059 opts=patch.diffopts(ui, opts))
1060
1060
1061 def grep(ui, repo, pattern, *pats, **opts):
1061 def grep(ui, repo, pattern, *pats, **opts):
1062 """search for a pattern in specified files and revisions
1062 """search for a pattern in specified files and revisions
1063
1063
1064 Search revisions of files for a regular expression.
1064 Search revisions of files for a regular expression.
1065
1065
1066 This command behaves differently than Unix grep. It only accepts
1066 This command behaves differently than Unix grep. It only accepts
1067 Python/Perl regexps. It searches repository history, not the
1067 Python/Perl regexps. It searches repository history, not the
1068 working directory. It always prints the revision number in which
1068 working directory. It always prints the revision number in which
1069 a match appears.
1069 a match appears.
1070
1070
1071 By default, grep only prints output for the first revision of a
1071 By default, grep only prints output for the first revision of a
1072 file in which it finds a match. To get it to print every revision
1072 file in which it finds a match. To get it to print every revision
1073 that contains a change in match status ("-" for a match that
1073 that contains a change in match status ("-" for a match that
1074 becomes a non-match, or "+" for a non-match that becomes a match),
1074 becomes a non-match, or "+" for a non-match that becomes a match),
1075 use the --all flag.
1075 use the --all flag.
1076 """
1076 """
1077 reflags = 0
1077 reflags = 0
1078 if opts.get('ignore_case'):
1078 if opts.get('ignore_case'):
1079 reflags |= re.I
1079 reflags |= re.I
1080 try:
1080 try:
1081 regexp = re.compile(pattern, reflags)
1081 regexp = re.compile(pattern, reflags)
1082 except Exception, inst:
1082 except Exception, inst:
1083 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1083 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1084 return None
1084 return None
1085 sep, eol = ':', '\n'
1085 sep, eol = ':', '\n'
1086 if opts.get('print0'):
1086 if opts.get('print0'):
1087 sep = eol = '\0'
1087 sep = eol = '\0'
1088
1088
1089 fcache = {}
1089 fcache = {}
1090 def getfile(fn):
1090 def getfile(fn):
1091 if fn not in fcache:
1091 if fn not in fcache:
1092 fcache[fn] = repo.file(fn)
1092 fcache[fn] = repo.file(fn)
1093 return fcache[fn]
1093 return fcache[fn]
1094
1094
1095 def matchlines(body):
1095 def matchlines(body):
1096 begin = 0
1096 begin = 0
1097 linenum = 0
1097 linenum = 0
1098 while True:
1098 while True:
1099 match = regexp.search(body, begin)
1099 match = regexp.search(body, begin)
1100 if not match:
1100 if not match:
1101 break
1101 break
1102 mstart, mend = match.span()
1102 mstart, mend = match.span()
1103 linenum += body.count('\n', begin, mstart) + 1
1103 linenum += body.count('\n', begin, mstart) + 1
1104 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1104 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1105 begin = body.find('\n', mend) + 1 or len(body)
1105 begin = body.find('\n', mend) + 1 or len(body)
1106 lend = begin - 1
1106 lend = begin - 1
1107 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1107 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1108
1108
1109 class linestate(object):
1109 class linestate(object):
1110 def __init__(self, line, linenum, colstart, colend):
1110 def __init__(self, line, linenum, colstart, colend):
1111 self.line = line
1111 self.line = line
1112 self.linenum = linenum
1112 self.linenum = linenum
1113 self.colstart = colstart
1113 self.colstart = colstart
1114 self.colend = colend
1114 self.colend = colend
1115
1115
1116 def __hash__(self):
1116 def __hash__(self):
1117 return hash((self.linenum, self.line))
1117 return hash((self.linenum, self.line))
1118
1118
1119 def __eq__(self, other):
1119 def __eq__(self, other):
1120 return self.line == other.line
1120 return self.line == other.line
1121
1121
1122 matches = {}
1122 matches = {}
1123 copies = {}
1123 copies = {}
1124 def grepbody(fn, rev, body):
1124 def grepbody(fn, rev, body):
1125 matches[rev].setdefault(fn, [])
1125 matches[rev].setdefault(fn, [])
1126 m = matches[rev][fn]
1126 m = matches[rev][fn]
1127 for lnum, cstart, cend, line in matchlines(body):
1127 for lnum, cstart, cend, line in matchlines(body):
1128 s = linestate(line, lnum, cstart, cend)
1128 s = linestate(line, lnum, cstart, cend)
1129 m.append(s)
1129 m.append(s)
1130
1130
1131 def difflinestates(a, b):
1131 def difflinestates(a, b):
1132 sm = difflib.SequenceMatcher(None, a, b)
1132 sm = difflib.SequenceMatcher(None, a, b)
1133 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1133 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1134 if tag == 'insert':
1134 if tag == 'insert':
1135 for i in xrange(blo, bhi):
1135 for i in xrange(blo, bhi):
1136 yield ('+', b[i])
1136 yield ('+', b[i])
1137 elif tag == 'delete':
1137 elif tag == 'delete':
1138 for i in xrange(alo, ahi):
1138 for i in xrange(alo, ahi):
1139 yield ('-', a[i])
1139 yield ('-', a[i])
1140 elif tag == 'replace':
1140 elif tag == 'replace':
1141 for i in xrange(alo, ahi):
1141 for i in xrange(alo, ahi):
1142 yield ('-', a[i])
1142 yield ('-', a[i])
1143 for i in xrange(blo, bhi):
1143 for i in xrange(blo, bhi):
1144 yield ('+', b[i])
1144 yield ('+', b[i])
1145
1145
1146 prev = {}
1146 prev = {}
1147 def display(fn, rev, states, prevstates):
1147 def display(fn, rev, states, prevstates):
1148 datefunc = ui.quiet and util.shortdate or util.datestr
1148 datefunc = ui.quiet and util.shortdate or util.datestr
1149 found = False
1149 found = False
1150 filerevmatches = {}
1150 filerevmatches = {}
1151 r = prev.get(fn, -1)
1151 r = prev.get(fn, -1)
1152 if opts.get('all'):
1152 if opts.get('all'):
1153 iter = difflinestates(states, prevstates)
1153 iter = difflinestates(states, prevstates)
1154 else:
1154 else:
1155 iter = [('', l) for l in prevstates]
1155 iter = [('', l) for l in prevstates]
1156 for change, l in iter:
1156 for change, l in iter:
1157 cols = [fn, str(r)]
1157 cols = [fn, str(r)]
1158 if opts.get('line_number'):
1158 if opts.get('line_number'):
1159 cols.append(str(l.linenum))
1159 cols.append(str(l.linenum))
1160 if opts.get('all'):
1160 if opts.get('all'):
1161 cols.append(change)
1161 cols.append(change)
1162 if opts.get('user'):
1162 if opts.get('user'):
1163 cols.append(ui.shortuser(get(r)[1]))
1163 cols.append(ui.shortuser(get(r)[1]))
1164 if opts.get('date'):
1164 if opts.get('date'):
1165 cols.append(datefunc(get(r)[2]))
1165 cols.append(datefunc(get(r)[2]))
1166 if opts.get('files_with_matches'):
1166 if opts.get('files_with_matches'):
1167 c = (fn, r)
1167 c = (fn, r)
1168 if c in filerevmatches:
1168 if c in filerevmatches:
1169 continue
1169 continue
1170 filerevmatches[c] = 1
1170 filerevmatches[c] = 1
1171 else:
1171 else:
1172 cols.append(l.line)
1172 cols.append(l.line)
1173 ui.write(sep.join(cols), eol)
1173 ui.write(sep.join(cols), eol)
1174 found = True
1174 found = True
1175 return found
1175 return found
1176
1176
1177 fstate = {}
1177 fstate = {}
1178 skip = {}
1178 skip = {}
1179 get = util.cachefunc(lambda r: repo[r].changeset())
1179 get = util.cachefunc(lambda r: repo[r].changeset())
1180 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1180 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1181 found = False
1181 found = False
1182 follow = opts.get('follow')
1182 follow = opts.get('follow')
1183 for st, rev, fns in changeiter:
1183 for st, rev, fns in changeiter:
1184 if st == 'window':
1184 if st == 'window':
1185 matches.clear()
1185 matches.clear()
1186 elif st == 'add':
1186 elif st == 'add':
1187 ctx = repo[rev]
1187 ctx = repo[rev]
1188 matches[rev] = {}
1188 matches[rev] = {}
1189 for fn in fns:
1189 for fn in fns:
1190 if fn in skip:
1190 if fn in skip:
1191 continue
1191 continue
1192 try:
1192 try:
1193 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1193 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1194 fstate.setdefault(fn, [])
1194 fstate.setdefault(fn, [])
1195 if follow:
1195 if follow:
1196 copied = getfile(fn).renamed(ctx.filenode(fn))
1196 copied = getfile(fn).renamed(ctx.filenode(fn))
1197 if copied:
1197 if copied:
1198 copies.setdefault(rev, {})[fn] = copied[0]
1198 copies.setdefault(rev, {})[fn] = copied[0]
1199 except revlog.LookupError:
1199 except revlog.LookupError:
1200 pass
1200 pass
1201 elif st == 'iter':
1201 elif st == 'iter':
1202 for fn, m in util.sort(matches[rev].items()):
1202 for fn, m in util.sort(matches[rev].items()):
1203 copy = copies.get(rev, {}).get(fn)
1203 copy = copies.get(rev, {}).get(fn)
1204 if fn in skip:
1204 if fn in skip:
1205 if copy:
1205 if copy:
1206 skip[copy] = True
1206 skip[copy] = True
1207 continue
1207 continue
1208 if fn in prev or fstate[fn]:
1208 if fn in prev or fstate[fn]:
1209 r = display(fn, rev, m, fstate[fn])
1209 r = display(fn, rev, m, fstate[fn])
1210 found = found or r
1210 found = found or r
1211 if r and not opts.get('all'):
1211 if r and not opts.get('all'):
1212 skip[fn] = True
1212 skip[fn] = True
1213 if copy:
1213 if copy:
1214 skip[copy] = True
1214 skip[copy] = True
1215 fstate[fn] = m
1215 fstate[fn] = m
1216 if copy:
1216 if copy:
1217 fstate[copy] = m
1217 fstate[copy] = m
1218 prev[fn] = rev
1218 prev[fn] = rev
1219
1219
1220 for fn, state in util.sort(fstate.items()):
1220 for fn, state in util.sort(fstate.items()):
1221 if fn in skip:
1221 if fn in skip:
1222 continue
1222 continue
1223 if fn not in copies.get(prev[fn], {}):
1223 if fn not in copies.get(prev[fn], {}):
1224 found = display(fn, rev, {}, state) or found
1224 found = display(fn, rev, {}, state) or found
1225 return (not found and 1) or 0
1225 return (not found and 1) or 0
1226
1226
1227 def heads(ui, repo, *branchrevs, **opts):
1227 def heads(ui, repo, *branchrevs, **opts):
1228 """show current repository heads or show branch heads
1228 """show current repository heads or show branch heads
1229
1229
1230 With no arguments, show all repository head changesets.
1230 With no arguments, show all repository head changesets.
1231
1231
1232 If branch or revisions names are given this will show the heads of
1232 If branch or revisions names are given this will show the heads of
1233 the specified branches or the branches those revisions are tagged
1233 the specified branches or the branches those revisions are tagged
1234 with.
1234 with.
1235
1235
1236 Repository "heads" are changesets that don't have child
1236 Repository "heads" are changesets that don't have child
1237 changesets. They are where development generally takes place and
1237 changesets. They are where development generally takes place and
1238 are the usual targets for update and merge operations.
1238 are the usual targets for update and merge operations.
1239
1239
1240 Branch heads are changesets that have a given branch tag, but have
1240 Branch heads are changesets that have a given branch tag, but have
1241 no child changesets with that tag. They are usually where
1241 no child changesets with that tag. They are usually where
1242 development on the given branch takes place.
1242 development on the given branch takes place.
1243 """
1243 """
1244 if opts.get('rev'):
1244 if opts.get('rev'):
1245 start = repo.lookup(opts['rev'])
1245 start = repo.lookup(opts['rev'])
1246 else:
1246 else:
1247 start = None
1247 start = None
1248 if not branchrevs:
1248 if not branchrevs:
1249 # Assume we're looking repo-wide heads if no revs were specified.
1249 # Assume we're looking repo-wide heads if no revs were specified.
1250 heads = repo.heads(start)
1250 heads = repo.heads(start)
1251 else:
1251 else:
1252 heads = []
1252 heads = []
1253 visitedset = util.set()
1253 visitedset = util.set()
1254 for branchrev in branchrevs:
1254 for branchrev in branchrevs:
1255 branch = repo[branchrev].branch()
1255 branch = repo[branchrev].branch()
1256 if branch in visitedset:
1256 if branch in visitedset:
1257 continue
1257 continue
1258 visitedset.add(branch)
1258 visitedset.add(branch)
1259 bheads = repo.branchheads(branch, start)
1259 bheads = repo.branchheads(branch, start)
1260 if not bheads:
1260 if not bheads:
1261 if branch != branchrev:
1261 if branch != branchrev:
1262 ui.warn(_("no changes on branch %s containing %s are "
1262 ui.warn(_("no changes on branch %s containing %s are "
1263 "reachable from %s\n")
1263 "reachable from %s\n")
1264 % (branch, branchrev, opts.get('rev')))
1264 % (branch, branchrev, opts.get('rev')))
1265 else:
1265 else:
1266 ui.warn(_("no changes on branch %s are reachable from %s\n")
1266 ui.warn(_("no changes on branch %s are reachable from %s\n")
1267 % (branch, opts.get('rev')))
1267 % (branch, opts.get('rev')))
1268 heads.extend(bheads)
1268 heads.extend(bheads)
1269 if not heads:
1269 if not heads:
1270 return 1
1270 return 1
1271 displayer = cmdutil.show_changeset(ui, repo, opts)
1271 displayer = cmdutil.show_changeset(ui, repo, opts)
1272 for n in heads:
1272 for n in heads:
1273 displayer.show(changenode=n)
1273 displayer.show(changenode=n)
1274
1274
1275 def help_(ui, name=None, with_version=False):
1275 def help_(ui, name=None, with_version=False):
1276 """show help for a given topic or a help overview
1276 """show help for a given topic or a help overview
1277
1277
1278 With no arguments, print a list of commands and short help.
1278 With no arguments, print a list of commands and short help.
1279
1279
1280 Given a topic, extension, or command name, print help for that topic."""
1280 Given a topic, extension, or command name, print help for that topic."""
1281 option_lists = []
1281 option_lists = []
1282
1282
1283 def addglobalopts(aliases):
1283 def addglobalopts(aliases):
1284 if ui.verbose:
1284 if ui.verbose:
1285 option_lists.append((_("global options:"), globalopts))
1285 option_lists.append((_("global options:"), globalopts))
1286 if name == 'shortlist':
1286 if name == 'shortlist':
1287 option_lists.append((_('use "hg help" for the full list '
1287 option_lists.append((_('use "hg help" for the full list '
1288 'of commands'), ()))
1288 'of commands'), ()))
1289 else:
1289 else:
1290 if name == 'shortlist':
1290 if name == 'shortlist':
1291 msg = _('use "hg help" for the full list of commands '
1291 msg = _('use "hg help" for the full list of commands '
1292 'or "hg -v" for details')
1292 'or "hg -v" for details')
1293 elif aliases:
1293 elif aliases:
1294 msg = _('use "hg -v help%s" to show aliases and '
1294 msg = _('use "hg -v help%s" to show aliases and '
1295 'global options') % (name and " " + name or "")
1295 'global options') % (name and " " + name or "")
1296 else:
1296 else:
1297 msg = _('use "hg -v help %s" to show global options') % name
1297 msg = _('use "hg -v help %s" to show global options') % name
1298 option_lists.append((msg, ()))
1298 option_lists.append((msg, ()))
1299
1299
1300 def helpcmd(name):
1300 def helpcmd(name):
1301 if with_version:
1301 if with_version:
1302 version_(ui)
1302 version_(ui)
1303 ui.write('\n')
1303 ui.write('\n')
1304
1304
1305 try:
1305 try:
1306 aliases, i = cmdutil.findcmd(name, table, False)
1306 aliases, i = cmdutil.findcmd(name, table, False)
1307 except cmdutil.AmbiguousCommand, inst:
1307 except cmdutil.AmbiguousCommand, inst:
1308 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1308 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1309 helplist(_('list of commands:\n\n'), select)
1309 helplist(_('list of commands:\n\n'), select)
1310 return
1310 return
1311
1311
1312 # synopsis
1312 # synopsis
1313 ui.write("%s\n" % i[2])
1313 ui.write("%s\n" % i[2])
1314
1314
1315 # aliases
1315 # aliases
1316 if not ui.quiet and len(aliases) > 1:
1316 if not ui.quiet and len(aliases) > 1:
1317 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1317 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1318
1318
1319 # description
1319 # description
1320 doc = gettext(i[0].__doc__)
1320 doc = gettext(i[0].__doc__)
1321 if not doc:
1321 if not doc:
1322 doc = _("(No help text available)")
1322 doc = _("(No help text available)")
1323 if ui.quiet:
1323 if ui.quiet:
1324 doc = doc.splitlines(0)[0]
1324 doc = doc.splitlines(0)[0]
1325 ui.write("\n%s\n" % doc.rstrip())
1325 ui.write("\n%s\n" % doc.rstrip())
1326
1326
1327 if not ui.quiet:
1327 if not ui.quiet:
1328 # options
1328 # options
1329 if i[1]:
1329 if i[1]:
1330 option_lists.append((_("options:\n"), i[1]))
1330 option_lists.append((_("options:\n"), i[1]))
1331
1331
1332 addglobalopts(False)
1332 addglobalopts(False)
1333
1333
1334 def helplist(header, select=None):
1334 def helplist(header, select=None):
1335 h = {}
1335 h = {}
1336 cmds = {}
1336 cmds = {}
1337 for c, e in table.items():
1337 for c, e in table.items():
1338 f = c.split("|", 1)[0]
1338 f = c.split("|", 1)[0]
1339 if select and not select(f):
1339 if select and not select(f):
1340 continue
1340 continue
1341 if (not select and name != 'shortlist' and
1341 if (not select and name != 'shortlist' and
1342 e[0].__module__ != __name__):
1342 e[0].__module__ != __name__):
1343 continue
1343 continue
1344 if name == "shortlist" and not f.startswith("^"):
1344 if name == "shortlist" and not f.startswith("^"):
1345 continue
1345 continue
1346 f = f.lstrip("^")
1346 f = f.lstrip("^")
1347 if not ui.debugflag and f.startswith("debug"):
1347 if not ui.debugflag and f.startswith("debug"):
1348 continue
1348 continue
1349 doc = gettext(e[0].__doc__)
1349 doc = gettext(e[0].__doc__)
1350 if not doc:
1350 if not doc:
1351 doc = _("(No help text available)")
1351 doc = _("(No help text available)")
1352 h[f] = doc.splitlines(0)[0].rstrip()
1352 h[f] = doc.splitlines(0)[0].rstrip()
1353 cmds[f] = c.lstrip("^")
1353 cmds[f] = c.lstrip("^")
1354
1354
1355 if not h:
1355 if not h:
1356 ui.status(_('no commands defined\n'))
1356 ui.status(_('no commands defined\n'))
1357 return
1357 return
1358
1358
1359 ui.status(header)
1359 ui.status(header)
1360 fns = util.sort(h)
1360 fns = util.sort(h)
1361 m = max(map(len, fns))
1361 m = max(map(len, fns))
1362 for f in fns:
1362 for f in fns:
1363 if ui.verbose:
1363 if ui.verbose:
1364 commands = cmds[f].replace("|",", ")
1364 commands = cmds[f].replace("|",", ")
1365 ui.write(" %s:\n %s\n"%(commands, h[f]))
1365 ui.write(" %s:\n %s\n"%(commands, h[f]))
1366 else:
1366 else:
1367 ui.write(' %-*s %s\n' % (m, f, h[f]))
1367 ui.write(' %-*s %s\n' % (m, f, h[f]))
1368
1368
1369 exts = list(extensions.extensions())
1369 exts = list(extensions.extensions())
1370 if exts and name != 'shortlist':
1370 if exts and name != 'shortlist':
1371 ui.write(_('\nenabled extensions:\n\n'))
1371 ui.write(_('\nenabled extensions:\n\n'))
1372 maxlength = 0
1372 maxlength = 0
1373 exthelps = []
1373 exthelps = []
1374 for ename, ext in exts:
1374 for ename, ext in exts:
1375 doc = (ext.__doc__ or _('(no help text available)'))
1375 doc = (ext.__doc__ or _('(no help text available)'))
1376 ename = ename.split('.')[-1]
1376 ename = ename.split('.')[-1]
1377 maxlength = max(len(ename), maxlength)
1377 maxlength = max(len(ename), maxlength)
1378 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1378 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1379 for ename, text in exthelps:
1379 for ename, text in exthelps:
1380 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1380 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1381
1381
1382 if not ui.quiet:
1382 if not ui.quiet:
1383 addglobalopts(True)
1383 addglobalopts(True)
1384
1384
1385 def helptopic(name):
1385 def helptopic(name):
1386 for names, header, doc in help.helptable:
1386 for names, header, doc in help.helptable:
1387 if name in names:
1387 if name in names:
1388 break
1388 break
1389 else:
1389 else:
1390 raise cmdutil.UnknownCommand(name)
1390 raise cmdutil.UnknownCommand(name)
1391
1391
1392 # description
1392 # description
1393 if not doc:
1393 if not doc:
1394 doc = _("(No help text available)")
1394 doc = _("(No help text available)")
1395 if callable(doc):
1395 if callable(doc):
1396 doc = doc()
1396 doc = doc()
1397
1397
1398 ui.write("%s\n" % header)
1398 ui.write("%s\n" % header)
1399 ui.write("%s\n" % doc.rstrip())
1399 ui.write("%s\n" % doc.rstrip())
1400
1400
1401 def helpext(name):
1401 def helpext(name):
1402 try:
1402 try:
1403 mod = extensions.find(name)
1403 mod = extensions.find(name)
1404 except KeyError:
1404 except KeyError:
1405 raise cmdutil.UnknownCommand(name)
1405 raise cmdutil.UnknownCommand(name)
1406
1406
1407 doc = gettext(mod.__doc__) or _('No help text available')
1407 doc = gettext(mod.__doc__) or _('No help text available')
1408 doc = doc.splitlines(0)
1408 doc = doc.splitlines(0)
1409 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1409 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1410 for d in doc[1:]:
1410 for d in doc[1:]:
1411 ui.write(d, '\n')
1411 ui.write(d, '\n')
1412
1412
1413 ui.status('\n')
1413 ui.status('\n')
1414
1414
1415 try:
1415 try:
1416 ct = mod.cmdtable
1416 ct = mod.cmdtable
1417 except AttributeError:
1417 except AttributeError:
1418 ct = {}
1418 ct = {}
1419
1419
1420 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1420 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1421 helplist(_('list of commands:\n\n'), modcmds.has_key)
1421 helplist(_('list of commands:\n\n'), modcmds.has_key)
1422
1422
1423 if name and name != 'shortlist':
1423 if name and name != 'shortlist':
1424 i = None
1424 i = None
1425 for f in (helpcmd, helptopic, helpext):
1425 for f in (helpcmd, helptopic, helpext):
1426 try:
1426 try:
1427 f(name)
1427 f(name)
1428 i = None
1428 i = None
1429 break
1429 break
1430 except cmdutil.UnknownCommand, inst:
1430 except cmdutil.UnknownCommand, inst:
1431 i = inst
1431 i = inst
1432 if i:
1432 if i:
1433 raise i
1433 raise i
1434
1434
1435 else:
1435 else:
1436 # program name
1436 # program name
1437 if ui.verbose or with_version:
1437 if ui.verbose or with_version:
1438 version_(ui)
1438 version_(ui)
1439 else:
1439 else:
1440 ui.status(_("Mercurial Distributed SCM\n"))
1440 ui.status(_("Mercurial Distributed SCM\n"))
1441 ui.status('\n')
1441 ui.status('\n')
1442
1442
1443 # list of commands
1443 # list of commands
1444 if name == "shortlist":
1444 if name == "shortlist":
1445 header = _('basic commands:\n\n')
1445 header = _('basic commands:\n\n')
1446 else:
1446 else:
1447 header = _('list of commands:\n\n')
1447 header = _('list of commands:\n\n')
1448
1448
1449 helplist(header)
1449 helplist(header)
1450
1450
1451 # list all option lists
1451 # list all option lists
1452 opt_output = []
1452 opt_output = []
1453 for title, options in option_lists:
1453 for title, options in option_lists:
1454 opt_output.append(("\n%s" % title, None))
1454 opt_output.append(("\n%s" % title, None))
1455 for shortopt, longopt, default, desc in options:
1455 for shortopt, longopt, default, desc in options:
1456 if "DEPRECATED" in desc and not ui.verbose: continue
1456 if "DEPRECATED" in desc and not ui.verbose: continue
1457 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1457 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1458 longopt and " --%s" % longopt),
1458 longopt and " --%s" % longopt),
1459 "%s%s" % (desc,
1459 "%s%s" % (desc,
1460 default
1460 default
1461 and _(" (default: %s)") % default
1461 and _(" (default: %s)") % default
1462 or "")))
1462 or "")))
1463
1463
1464 if not name:
1464 if not name:
1465 ui.write(_("\nadditional help topics:\n\n"))
1465 ui.write(_("\nadditional help topics:\n\n"))
1466 topics = []
1466 topics = []
1467 for names, header, doc in help.helptable:
1467 for names, header, doc in help.helptable:
1468 names = [(-len(name), name) for name in names]
1468 names = [(-len(name), name) for name in names]
1469 names.sort()
1469 names.sort()
1470 topics.append((names[0][1], header))
1470 topics.append((names[0][1], header))
1471 topics_len = max([len(s[0]) for s in topics])
1471 topics_len = max([len(s[0]) for s in topics])
1472 for t, desc in topics:
1472 for t, desc in topics:
1473 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1473 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1474
1474
1475 if opt_output:
1475 if opt_output:
1476 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1476 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1477 for first, second in opt_output:
1477 for first, second in opt_output:
1478 if second:
1478 if second:
1479 ui.write(" %-*s %s\n" % (opts_len, first, second))
1479 ui.write(" %-*s %s\n" % (opts_len, first, second))
1480 else:
1480 else:
1481 ui.write("%s\n" % first)
1481 ui.write("%s\n" % first)
1482
1482
1483 def identify(ui, repo, source=None,
1483 def identify(ui, repo, source=None,
1484 rev=None, num=None, id=None, branch=None, tags=None):
1484 rev=None, num=None, id=None, branch=None, tags=None):
1485 """identify the working copy or specified revision
1485 """identify the working copy or specified revision
1486
1486
1487 With no revision, print a summary of the current state of the repo.
1487 With no revision, print a summary of the current state of the repo.
1488
1488
1489 With a path, do a lookup in another repository.
1489 With a path, do a lookup in another repository.
1490
1490
1491 This summary identifies the repository state using one or two parent
1491 This summary identifies the repository state using one or two parent
1492 hash identifiers, followed by a "+" if there are uncommitted changes
1492 hash identifiers, followed by a "+" if there are uncommitted changes
1493 in the working directory, a list of tags for this revision and a branch
1493 in the working directory, a list of tags for this revision and a branch
1494 name for non-default branches.
1494 name for non-default branches.
1495 """
1495 """
1496
1496
1497 if not repo and not source:
1497 if not repo and not source:
1498 raise util.Abort(_("There is no Mercurial repository here "
1498 raise util.Abort(_("There is no Mercurial repository here "
1499 "(.hg not found)"))
1499 "(.hg not found)"))
1500
1500
1501 hexfunc = ui.debugflag and hex or short
1501 hexfunc = ui.debugflag and hex or short
1502 default = not (num or id or branch or tags)
1502 default = not (num or id or branch or tags)
1503 output = []
1503 output = []
1504
1504
1505 if source:
1505 if source:
1506 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1506 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1507 srepo = hg.repository(ui, source)
1507 srepo = hg.repository(ui, source)
1508 if not rev and revs:
1508 if not rev and revs:
1509 rev = revs[0]
1509 rev = revs[0]
1510 if not rev:
1510 if not rev:
1511 rev = "tip"
1511 rev = "tip"
1512 if num or branch or tags:
1512 if num or branch or tags:
1513 raise util.Abort(
1513 raise util.Abort(
1514 "can't query remote revision number, branch, or tags")
1514 "can't query remote revision number, branch, or tags")
1515 output = [hexfunc(srepo.lookup(rev))]
1515 output = [hexfunc(srepo.lookup(rev))]
1516 elif not rev:
1516 elif not rev:
1517 ctx = repo[None]
1517 ctx = repo[None]
1518 parents = ctx.parents()
1518 parents = ctx.parents()
1519 changed = False
1519 changed = False
1520 if default or id or num:
1520 if default or id or num:
1521 changed = ctx.files() + ctx.deleted()
1521 changed = ctx.files() + ctx.deleted()
1522 if default or id:
1522 if default or id:
1523 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1523 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1524 (changed) and "+" or "")]
1524 (changed) and "+" or "")]
1525 if num:
1525 if num:
1526 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1526 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1527 (changed) and "+" or ""))
1527 (changed) and "+" or ""))
1528 else:
1528 else:
1529 ctx = repo[rev]
1529 ctx = repo[rev]
1530 if default or id:
1530 if default or id:
1531 output = [hexfunc(ctx.node())]
1531 output = [hexfunc(ctx.node())]
1532 if num:
1532 if num:
1533 output.append(str(ctx.rev()))
1533 output.append(str(ctx.rev()))
1534
1534
1535 if not source and default and not ui.quiet:
1535 if not source and default and not ui.quiet:
1536 b = util.tolocal(ctx.branch())
1536 b = util.tolocal(ctx.branch())
1537 if b != 'default':
1537 if b != 'default':
1538 output.append("(%s)" % b)
1538 output.append("(%s)" % b)
1539
1539
1540 # multiple tags for a single parent separated by '/'
1540 # multiple tags for a single parent separated by '/'
1541 t = "/".join(ctx.tags())
1541 t = "/".join(ctx.tags())
1542 if t:
1542 if t:
1543 output.append(t)
1543 output.append(t)
1544
1544
1545 if branch:
1545 if branch:
1546 output.append(util.tolocal(ctx.branch()))
1546 output.append(util.tolocal(ctx.branch()))
1547
1547
1548 if tags:
1548 if tags:
1549 output.extend(ctx.tags())
1549 output.extend(ctx.tags())
1550
1550
1551 ui.write("%s\n" % ' '.join(output))
1551 ui.write("%s\n" % ' '.join(output))
1552
1552
1553 def import_(ui, repo, patch1, *patches, **opts):
1553 def import_(ui, repo, patch1, *patches, **opts):
1554 """import an ordered set of patches
1554 """import an ordered set of patches
1555
1555
1556 Import a list of patches and commit them individually.
1556 Import a list of patches and commit them individually.
1557
1557
1558 If there are outstanding changes in the working directory, import
1558 If there are outstanding changes in the working directory, import
1559 will abort unless given the -f flag.
1559 will abort unless given the -f flag.
1560
1560
1561 You can import a patch straight from a mail message. Even patches
1561 You can import a patch straight from a mail message. Even patches
1562 as attachments work (body part must be type text/plain or
1562 as attachments work (body part must be type text/plain or
1563 text/x-patch to be used). From and Subject headers of email
1563 text/x-patch to be used). From and Subject headers of email
1564 message are used as default committer and commit message. All
1564 message are used as default committer and commit message. All
1565 text/plain body parts before first diff are added to commit
1565 text/plain body parts before first diff are added to commit
1566 message.
1566 message.
1567
1567
1568 If the imported patch was generated by hg export, user and description
1568 If the imported patch was generated by hg export, user and description
1569 from patch override values from message headers and body. Values
1569 from patch override values from message headers and body. Values
1570 given on command line with -m and -u override these.
1570 given on command line with -m and -u override these.
1571
1571
1572 If --exact is specified, import will set the working directory
1572 If --exact is specified, import will set the working directory
1573 to the parent of each patch before applying it, and will abort
1573 to the parent of each patch before applying it, and will abort
1574 if the resulting changeset has a different ID than the one
1574 if the resulting changeset has a different ID than the one
1575 recorded in the patch. This may happen due to character set
1575 recorded in the patch. This may happen due to character set
1576 problems or other deficiencies in the text patch format.
1576 problems or other deficiencies in the text patch format.
1577
1577
1578 To read a patch from standard input, use patch name "-".
1578 To read a patch from standard input, use patch name "-".
1579 See 'hg help dates' for a list of formats valid for -d/--date.
1579 See 'hg help dates' for a list of formats valid for -d/--date.
1580 """
1580 """
1581 patches = (patch1,) + patches
1581 patches = (patch1,) + patches
1582
1582
1583 date = opts.get('date')
1583 date = opts.get('date')
1584 if date:
1584 if date:
1585 opts['date'] = util.parsedate(date)
1585 opts['date'] = util.parsedate(date)
1586
1586
1587 if opts.get('exact') or not opts.get('force'):
1587 if opts.get('exact') or not opts.get('force'):
1588 cmdutil.bail_if_changed(repo)
1588 cmdutil.bail_if_changed(repo)
1589
1589
1590 d = opts["base"]
1590 d = opts["base"]
1591 strip = opts["strip"]
1591 strip = opts["strip"]
1592 wlock = lock = None
1592 wlock = lock = None
1593 try:
1593 try:
1594 wlock = repo.wlock()
1594 wlock = repo.wlock()
1595 lock = repo.lock()
1595 lock = repo.lock()
1596 for p in patches:
1596 for p in patches:
1597 pf = os.path.join(d, p)
1597 pf = os.path.join(d, p)
1598
1598
1599 if pf == '-':
1599 if pf == '-':
1600 ui.status(_("applying patch from stdin\n"))
1600 ui.status(_("applying patch from stdin\n"))
1601 pf = sys.stdin
1601 pf = sys.stdin
1602 else:
1602 else:
1603 ui.status(_("applying %s\n") % p)
1603 ui.status(_("applying %s\n") % p)
1604 pf = url.open(ui, pf)
1604 pf = url.open(ui, pf)
1605 data = patch.extract(ui, pf)
1605 data = patch.extract(ui, pf)
1606 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1606 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1607
1607
1608 if tmpname is None:
1608 if tmpname is None:
1609 raise util.Abort(_('no diffs found'))
1609 raise util.Abort(_('no diffs found'))
1610
1610
1611 try:
1611 try:
1612 cmdline_message = cmdutil.logmessage(opts)
1612 cmdline_message = cmdutil.logmessage(opts)
1613 if cmdline_message:
1613 if cmdline_message:
1614 # pickup the cmdline msg
1614 # pickup the cmdline msg
1615 message = cmdline_message
1615 message = cmdline_message
1616 elif message:
1616 elif message:
1617 # pickup the patch msg
1617 # pickup the patch msg
1618 message = message.strip()
1618 message = message.strip()
1619 else:
1619 else:
1620 # launch the editor
1620 # launch the editor
1621 message = None
1621 message = None
1622 ui.debug(_('message:\n%s\n') % message)
1622 ui.debug(_('message:\n%s\n') % message)
1623
1623
1624 wp = repo.parents()
1624 wp = repo.parents()
1625 if opts.get('exact'):
1625 if opts.get('exact'):
1626 if not nodeid or not p1:
1626 if not nodeid or not p1:
1627 raise util.Abort(_('not a mercurial patch'))
1627 raise util.Abort(_('not a mercurial patch'))
1628 p1 = repo.lookup(p1)
1628 p1 = repo.lookup(p1)
1629 p2 = repo.lookup(p2 or hex(nullid))
1629 p2 = repo.lookup(p2 or hex(nullid))
1630
1630
1631 if p1 != wp[0].node():
1631 if p1 != wp[0].node():
1632 hg.clean(repo, p1)
1632 hg.clean(repo, p1)
1633 repo.dirstate.setparents(p1, p2)
1633 repo.dirstate.setparents(p1, p2)
1634 elif p2:
1634 elif p2:
1635 try:
1635 try:
1636 p1 = repo.lookup(p1)
1636 p1 = repo.lookup(p1)
1637 p2 = repo.lookup(p2)
1637 p2 = repo.lookup(p2)
1638 if p1 == wp[0].node():
1638 if p1 == wp[0].node():
1639 repo.dirstate.setparents(p1, p2)
1639 repo.dirstate.setparents(p1, p2)
1640 except RepoError:
1640 except RepoError:
1641 pass
1641 pass
1642 if opts.get('exact') or opts.get('import_branch'):
1642 if opts.get('exact') or opts.get('import_branch'):
1643 repo.dirstate.setbranch(branch or 'default')
1643 repo.dirstate.setbranch(branch or 'default')
1644
1644
1645 files = {}
1645 files = {}
1646 try:
1646 try:
1647 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1647 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1648 files=files)
1648 files=files)
1649 finally:
1649 finally:
1650 files = patch.updatedir(ui, repo, files)
1650 files = patch.updatedir(ui, repo, files)
1651 if not opts.get('no_commit'):
1651 if not opts.get('no_commit'):
1652 n = repo.commit(files, message, opts.get('user') or user,
1652 n = repo.commit(files, message, opts.get('user') or user,
1653 opts.get('date') or date)
1653 opts.get('date') or date)
1654 if opts.get('exact'):
1654 if opts.get('exact'):
1655 if hex(n) != nodeid:
1655 if hex(n) != nodeid:
1656 repo.rollback()
1656 repo.rollback()
1657 raise util.Abort(_('patch is damaged'
1657 raise util.Abort(_('patch is damaged'
1658 ' or loses information'))
1658 ' or loses information'))
1659 # Force a dirstate write so that the next transaction
1659 # Force a dirstate write so that the next transaction
1660 # backups an up-do-date file.
1660 # backups an up-do-date file.
1661 repo.dirstate.write()
1661 repo.dirstate.write()
1662 finally:
1662 finally:
1663 os.unlink(tmpname)
1663 os.unlink(tmpname)
1664 finally:
1664 finally:
1665 del lock, wlock
1665 del lock, wlock
1666
1666
1667 def incoming(ui, repo, source="default", **opts):
1667 def incoming(ui, repo, source="default", **opts):
1668 """show new changesets found in source
1668 """show new changesets found in source
1669
1669
1670 Show new changesets found in the specified path/URL or the default
1670 Show new changesets found in the specified path/URL or the default
1671 pull location. These are the changesets that would be pulled if a pull
1671 pull location. These are the changesets that would be pulled if a pull
1672 was requested.
1672 was requested.
1673
1673
1674 For remote repository, using --bundle avoids downloading the changesets
1674 For remote repository, using --bundle avoids downloading the changesets
1675 twice if the incoming is followed by a pull.
1675 twice if the incoming is followed by a pull.
1676
1676
1677 See pull for valid source format details.
1677 See pull for valid source format details.
1678 """
1678 """
1679 limit = cmdutil.loglimit(opts)
1679 limit = cmdutil.loglimit(opts)
1680 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1680 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1681 cmdutil.setremoteconfig(ui, opts)
1681 cmdutil.setremoteconfig(ui, opts)
1682
1682
1683 other = hg.repository(ui, source)
1683 other = hg.repository(ui, source)
1684 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1684 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1685 if revs:
1685 if revs:
1686 revs = [other.lookup(rev) for rev in revs]
1686 revs = [other.lookup(rev) for rev in revs]
1687 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1687 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1688 if not incoming:
1688 if not incoming:
1689 try:
1689 try:
1690 os.unlink(opts["bundle"])
1690 os.unlink(opts["bundle"])
1691 except:
1691 except:
1692 pass
1692 pass
1693 ui.status(_("no changes found\n"))
1693 ui.status(_("no changes found\n"))
1694 return 1
1694 return 1
1695
1695
1696 cleanup = None
1696 cleanup = None
1697 try:
1697 try:
1698 fname = opts["bundle"]
1698 fname = opts["bundle"]
1699 if fname or not other.local():
1699 if fname or not other.local():
1700 # create a bundle (uncompressed if other repo is not local)
1700 # create a bundle (uncompressed if other repo is not local)
1701 if revs is None:
1701 if revs is None:
1702 cg = other.changegroup(incoming, "incoming")
1702 cg = other.changegroup(incoming, "incoming")
1703 else:
1703 else:
1704 cg = other.changegroupsubset(incoming, revs, 'incoming')
1704 cg = other.changegroupsubset(incoming, revs, 'incoming')
1705 bundletype = other.local() and "HG10BZ" or "HG10UN"
1705 bundletype = other.local() and "HG10BZ" or "HG10UN"
1706 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1706 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1707 # keep written bundle?
1707 # keep written bundle?
1708 if opts["bundle"]:
1708 if opts["bundle"]:
1709 cleanup = None
1709 cleanup = None
1710 if not other.local():
1710 if not other.local():
1711 # use the created uncompressed bundlerepo
1711 # use the created uncompressed bundlerepo
1712 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1712 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1713
1713
1714 o = other.changelog.nodesbetween(incoming, revs)[0]
1714 o = other.changelog.nodesbetween(incoming, revs)[0]
1715 if opts.get('newest_first'):
1715 if opts.get('newest_first'):
1716 o.reverse()
1716 o.reverse()
1717 displayer = cmdutil.show_changeset(ui, other, opts)
1717 displayer = cmdutil.show_changeset(ui, other, opts)
1718 count = 0
1718 count = 0
1719 for n in o:
1719 for n in o:
1720 if count >= limit:
1720 if count >= limit:
1721 break
1721 break
1722 parents = [p for p in other.changelog.parents(n) if p != nullid]
1722 parents = [p for p in other.changelog.parents(n) if p != nullid]
1723 if opts.get('no_merges') and len(parents) == 2:
1723 if opts.get('no_merges') and len(parents) == 2:
1724 continue
1724 continue
1725 count += 1
1725 count += 1
1726 displayer.show(changenode=n)
1726 displayer.show(changenode=n)
1727 finally:
1727 finally:
1728 if hasattr(other, 'close'):
1728 if hasattr(other, 'close'):
1729 other.close()
1729 other.close()
1730 if cleanup:
1730 if cleanup:
1731 os.unlink(cleanup)
1731 os.unlink(cleanup)
1732
1732
1733 def init(ui, dest=".", **opts):
1733 def init(ui, dest=".", **opts):
1734 """create a new repository in the given directory
1734 """create a new repository in the given directory
1735
1735
1736 Initialize a new repository in the given directory. If the given
1736 Initialize a new repository in the given directory. If the given
1737 directory does not exist, it is created.
1737 directory does not exist, it is created.
1738
1738
1739 If no directory is given, the current directory is used.
1739 If no directory is given, the current directory is used.
1740
1740
1741 It is possible to specify an ssh:// URL as the destination.
1741 It is possible to specify an ssh:// URL as the destination.
1742 Look at the help text for the pull command for important details
1742 Look at the help text for the pull command for important details
1743 about ssh:// URLs.
1743 about ssh:// URLs.
1744 """
1744 """
1745 cmdutil.setremoteconfig(ui, opts)
1745 cmdutil.setremoteconfig(ui, opts)
1746 hg.repository(ui, dest, create=1)
1746 hg.repository(ui, dest, create=1)
1747
1747
1748 def locate(ui, repo, *pats, **opts):
1748 def locate(ui, repo, *pats, **opts):
1749 """locate files matching specific patterns
1749 """locate files matching specific patterns
1750
1750
1751 Print all files under Mercurial control whose names match the
1751 Print all files under Mercurial control whose names match the
1752 given patterns.
1752 given patterns.
1753
1753
1754 This command searches the entire repository by default. To search
1754 This command searches the entire repository by default. To search
1755 just the current directory and its subdirectories, use
1755 just the current directory and its subdirectories, use
1756 "--include .".
1756 "--include .".
1757
1757
1758 If no patterns are given to match, this command prints all file
1758 If no patterns are given to match, this command prints all file
1759 names.
1759 names.
1760
1760
1761 If you want to feed the output of this command into the "xargs"
1761 If you want to feed the output of this command into the "xargs"
1762 command, use the "-0" option to both this command and "xargs".
1762 command, use the "-0" option to both this command and "xargs".
1763 This will avoid the problem of "xargs" treating single filenames
1763 This will avoid the problem of "xargs" treating single filenames
1764 that contain white space as multiple filenames.
1764 that contain white space as multiple filenames.
1765 """
1765 """
1766 end = opts.get('print0') and '\0' or '\n'
1766 end = opts.get('print0') and '\0' or '\n'
1767 rev = opts.get('rev') or None
1767 rev = opts.get('rev') or None
1768
1768
1769 ret = 1
1769 ret = 1
1770 m = cmdutil.match(repo, pats, opts, default='relglob')
1770 m = cmdutil.match(repo, pats, opts, default='relglob')
1771 m.bad = lambda x,y: False
1771 m.bad = lambda x,y: False
1772 for abs in repo[rev].walk(m):
1772 for abs in repo[rev].walk(m):
1773 if not rev and abs not in repo.dirstate:
1773 if not rev and abs not in repo.dirstate:
1774 continue
1774 continue
1775 if opts.get('fullpath'):
1775 if opts.get('fullpath'):
1776 ui.write(os.path.join(repo.root, abs), end)
1776 ui.write(os.path.join(repo.root, abs), end)
1777 else:
1777 else:
1778 ui.write(((pats and m.rel(abs)) or abs), end)
1778 ui.write(((pats and m.rel(abs)) or abs), end)
1779 ret = 0
1779 ret = 0
1780
1780
1781 return ret
1781 return ret
1782
1782
1783 def log(ui, repo, *pats, **opts):
1783 def log(ui, repo, *pats, **opts):
1784 """show revision history of entire repository or files
1784 """show revision history of entire repository or files
1785
1785
1786 Print the revision history of the specified files or the entire
1786 Print the revision history of the specified files or the entire
1787 project.
1787 project.
1788
1788
1789 File history is shown without following rename or copy history of
1789 File history is shown without following rename or copy history of
1790 files. Use -f/--follow with a file name to follow history across
1790 files. Use -f/--follow with a file name to follow history across
1791 renames and copies. --follow without a file name will only show
1791 renames and copies. --follow without a file name will only show
1792 ancestors or descendants of the starting revision. --follow-first
1792 ancestors or descendants of the starting revision. --follow-first
1793 only follows the first parent of merge revisions.
1793 only follows the first parent of merge revisions.
1794
1794
1795 If no revision range is specified, the default is tip:0 unless
1795 If no revision range is specified, the default is tip:0 unless
1796 --follow is set, in which case the working directory parent is
1796 --follow is set, in which case the working directory parent is
1797 used as the starting revision.
1797 used as the starting revision.
1798
1798
1799 See 'hg help dates' for a list of formats valid for -d/--date.
1799 See 'hg help dates' for a list of formats valid for -d/--date.
1800
1800
1801 By default this command outputs: changeset id and hash, tags,
1801 By default this command outputs: changeset id and hash, tags,
1802 non-trivial parents, user, date and time, and a summary for each
1802 non-trivial parents, user, date and time, and a summary for each
1803 commit. When the -v/--verbose switch is used, the list of changed
1803 commit. When the -v/--verbose switch is used, the list of changed
1804 files and full commit message is shown.
1804 files and full commit message is shown.
1805
1805
1806 NOTE: log -p may generate unexpected diff output for merge
1806 NOTE: log -p may generate unexpected diff output for merge
1807 changesets, as it will compare the merge changeset against its
1807 changesets, as it will compare the merge changeset against its
1808 first parent only. Also, the files: list will only reflect files
1808 first parent only. Also, the files: list will only reflect files
1809 that are different from BOTH parents.
1809 that are different from BOTH parents.
1810
1810
1811 """
1811 """
1812
1812
1813 get = util.cachefunc(lambda r: repo[r].changeset())
1813 get = util.cachefunc(lambda r: repo[r].changeset())
1814 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1814 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1815
1815
1816 limit = cmdutil.loglimit(opts)
1816 limit = cmdutil.loglimit(opts)
1817 count = 0
1817 count = 0
1818
1818
1819 if opts.get('copies') and opts.get('rev'):
1819 if opts.get('copies') and opts.get('rev'):
1820 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1820 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1821 else:
1821 else:
1822 endrev = len(repo)
1822 endrev = len(repo)
1823 rcache = {}
1823 rcache = {}
1824 ncache = {}
1824 ncache = {}
1825 def getrenamed(fn, rev):
1825 def getrenamed(fn, rev):
1826 '''looks up all renames for a file (up to endrev) the first
1826 '''looks up all renames for a file (up to endrev) the first
1827 time the file is given. It indexes on the changerev and only
1827 time the file is given. It indexes on the changerev and only
1828 parses the manifest if linkrev != changerev.
1828 parses the manifest if linkrev != changerev.
1829 Returns rename info for fn at changerev rev.'''
1829 Returns rename info for fn at changerev rev.'''
1830 if fn not in rcache:
1830 if fn not in rcache:
1831 rcache[fn] = {}
1831 rcache[fn] = {}
1832 ncache[fn] = {}
1832 ncache[fn] = {}
1833 fl = repo.file(fn)
1833 fl = repo.file(fn)
1834 for i in fl:
1834 for i in fl:
1835 node = fl.node(i)
1835 node = fl.node(i)
1836 lr = fl.linkrev(node)
1836 lr = fl.linkrev(i)
1837 renamed = fl.renamed(node)
1837 renamed = fl.renamed(node)
1838 rcache[fn][lr] = renamed
1838 rcache[fn][lr] = renamed
1839 if renamed:
1839 if renamed:
1840 ncache[fn][node] = renamed
1840 ncache[fn][node] = renamed
1841 if lr >= endrev:
1841 if lr >= endrev:
1842 break
1842 break
1843 if rev in rcache[fn]:
1843 if rev in rcache[fn]:
1844 return rcache[fn][rev]
1844 return rcache[fn][rev]
1845
1845
1846 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1846 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1847 # filectx logic.
1847 # filectx logic.
1848
1848
1849 try:
1849 try:
1850 return repo[rev][fn].renamed()
1850 return repo[rev][fn].renamed()
1851 except revlog.LookupError:
1851 except revlog.LookupError:
1852 pass
1852 pass
1853 return None
1853 return None
1854
1854
1855 df = False
1855 df = False
1856 if opts["date"]:
1856 if opts["date"]:
1857 df = util.matchdate(opts["date"])
1857 df = util.matchdate(opts["date"])
1858
1858
1859 only_branches = opts.get('only_branch')
1859 only_branches = opts.get('only_branch')
1860
1860
1861 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1861 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1862 for st, rev, fns in changeiter:
1862 for st, rev, fns in changeiter:
1863 if st == 'add':
1863 if st == 'add':
1864 changenode = repo.changelog.node(rev)
1864 changenode = repo.changelog.node(rev)
1865 parents = [p for p in repo.changelog.parentrevs(rev)
1865 parents = [p for p in repo.changelog.parentrevs(rev)
1866 if p != nullrev]
1866 if p != nullrev]
1867 if opts.get('no_merges') and len(parents) == 2:
1867 if opts.get('no_merges') and len(parents) == 2:
1868 continue
1868 continue
1869 if opts.get('only_merges') and len(parents) != 2:
1869 if opts.get('only_merges') and len(parents) != 2:
1870 continue
1870 continue
1871
1871
1872 if only_branches:
1872 if only_branches:
1873 revbranch = get(rev)[5]['branch']
1873 revbranch = get(rev)[5]['branch']
1874 if revbranch not in only_branches:
1874 if revbranch not in only_branches:
1875 continue
1875 continue
1876
1876
1877 if df:
1877 if df:
1878 changes = get(rev)
1878 changes = get(rev)
1879 if not df(changes[2][0]):
1879 if not df(changes[2][0]):
1880 continue
1880 continue
1881
1881
1882 if opts.get('keyword'):
1882 if opts.get('keyword'):
1883 changes = get(rev)
1883 changes = get(rev)
1884 miss = 0
1884 miss = 0
1885 for k in [kw.lower() for kw in opts['keyword']]:
1885 for k in [kw.lower() for kw in opts['keyword']]:
1886 if not (k in changes[1].lower() or
1886 if not (k in changes[1].lower() or
1887 k in changes[4].lower() or
1887 k in changes[4].lower() or
1888 k in " ".join(changes[3]).lower()):
1888 k in " ".join(changes[3]).lower()):
1889 miss = 1
1889 miss = 1
1890 break
1890 break
1891 if miss:
1891 if miss:
1892 continue
1892 continue
1893
1893
1894 if opts['user']:
1894 if opts['user']:
1895 changes = get(rev)
1895 changes = get(rev)
1896 miss = 0
1896 miss = 0
1897 for k in opts['user']:
1897 for k in opts['user']:
1898 if k != changes[1]:
1898 if k != changes[1]:
1899 miss = 1
1899 miss = 1
1900 break
1900 break
1901 if miss:
1901 if miss:
1902 continue
1902 continue
1903
1903
1904 copies = []
1904 copies = []
1905 if opts.get('copies') and rev:
1905 if opts.get('copies') and rev:
1906 for fn in get(rev)[3]:
1906 for fn in get(rev)[3]:
1907 rename = getrenamed(fn, rev)
1907 rename = getrenamed(fn, rev)
1908 if rename:
1908 if rename:
1909 copies.append((fn, rename[0]))
1909 copies.append((fn, rename[0]))
1910 displayer.show(rev, changenode, copies=copies)
1910 displayer.show(rev, changenode, copies=copies)
1911 elif st == 'iter':
1911 elif st == 'iter':
1912 if count == limit: break
1912 if count == limit: break
1913 if displayer.flush(rev):
1913 if displayer.flush(rev):
1914 count += 1
1914 count += 1
1915
1915
1916 def manifest(ui, repo, node=None, rev=None):
1916 def manifest(ui, repo, node=None, rev=None):
1917 """output the current or given revision of the project manifest
1917 """output the current or given revision of the project manifest
1918
1918
1919 Print a list of version controlled files for the given revision.
1919 Print a list of version controlled files for the given revision.
1920 If no revision is given, the parent of the working directory is used,
1920 If no revision is given, the parent of the working directory is used,
1921 or tip if no revision is checked out.
1921 or tip if no revision is checked out.
1922
1922
1923 The manifest is the list of files being version controlled. If no revision
1923 The manifest is the list of files being version controlled. If no revision
1924 is given then the first parent of the working directory is used.
1924 is given then the first parent of the working directory is used.
1925
1925
1926 With -v flag, print file permissions, symlink and executable bits. With
1926 With -v flag, print file permissions, symlink and executable bits. With
1927 --debug flag, print file revision hashes.
1927 --debug flag, print file revision hashes.
1928 """
1928 """
1929
1929
1930 if rev and node:
1930 if rev and node:
1931 raise util.Abort(_("please specify just one revision"))
1931 raise util.Abort(_("please specify just one revision"))
1932
1932
1933 if not node:
1933 if not node:
1934 node = rev
1934 node = rev
1935
1935
1936 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1936 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1937 ctx = repo[node]
1937 ctx = repo[node]
1938 for f in ctx:
1938 for f in ctx:
1939 if ui.debugflag:
1939 if ui.debugflag:
1940 ui.write("%40s " % hex(ctx.manifest()[f]))
1940 ui.write("%40s " % hex(ctx.manifest()[f]))
1941 if ui.verbose:
1941 if ui.verbose:
1942 ui.write(decor[ctx.flags(f)])
1942 ui.write(decor[ctx.flags(f)])
1943 ui.write("%s\n" % f)
1943 ui.write("%s\n" % f)
1944
1944
1945 def merge(ui, repo, node=None, force=None, rev=None):
1945 def merge(ui, repo, node=None, force=None, rev=None):
1946 """merge working directory with another revision
1946 """merge working directory with another revision
1947
1947
1948 Merge the contents of the current working directory and the
1948 Merge the contents of the current working directory and the
1949 requested revision. Files that changed between either parent are
1949 requested revision. Files that changed between either parent are
1950 marked as changed for the next commit and a commit must be
1950 marked as changed for the next commit and a commit must be
1951 performed before any further updates are allowed.
1951 performed before any further updates are allowed.
1952
1952
1953 If no revision is specified, the working directory's parent is a
1953 If no revision is specified, the working directory's parent is a
1954 head revision, and the current branch contains exactly one other head,
1954 head revision, and the current branch contains exactly one other head,
1955 the other head is merged with by default. Otherwise, an explicit
1955 the other head is merged with by default. Otherwise, an explicit
1956 revision to merge with must be provided.
1956 revision to merge with must be provided.
1957 """
1957 """
1958
1958
1959 if rev and node:
1959 if rev and node:
1960 raise util.Abort(_("please specify just one revision"))
1960 raise util.Abort(_("please specify just one revision"))
1961 if not node:
1961 if not node:
1962 node = rev
1962 node = rev
1963
1963
1964 if not node:
1964 if not node:
1965 branch = repo.changectx(None).branch()
1965 branch = repo.changectx(None).branch()
1966 bheads = repo.branchheads(branch)
1966 bheads = repo.branchheads(branch)
1967 if len(bheads) > 2:
1967 if len(bheads) > 2:
1968 raise util.Abort(_("branch '%s' has %d heads - "
1968 raise util.Abort(_("branch '%s' has %d heads - "
1969 "please merge with an explicit rev") %
1969 "please merge with an explicit rev") %
1970 (branch, len(bheads)))
1970 (branch, len(bheads)))
1971
1971
1972 parent = repo.dirstate.parents()[0]
1972 parent = repo.dirstate.parents()[0]
1973 if len(bheads) == 1:
1973 if len(bheads) == 1:
1974 if len(repo.heads()) > 1:
1974 if len(repo.heads()) > 1:
1975 raise util.Abort(_("branch '%s' has one head - "
1975 raise util.Abort(_("branch '%s' has one head - "
1976 "please merge with an explicit rev") %
1976 "please merge with an explicit rev") %
1977 branch)
1977 branch)
1978 msg = _('there is nothing to merge')
1978 msg = _('there is nothing to merge')
1979 if parent != repo.lookup(repo[None].branch()):
1979 if parent != repo.lookup(repo[None].branch()):
1980 msg = _('%s - use "hg update" instead') % msg
1980 msg = _('%s - use "hg update" instead') % msg
1981 raise util.Abort(msg)
1981 raise util.Abort(msg)
1982
1982
1983 if parent not in bheads:
1983 if parent not in bheads:
1984 raise util.Abort(_('working dir not at a head rev - '
1984 raise util.Abort(_('working dir not at a head rev - '
1985 'use "hg update" or merge with an explicit rev'))
1985 'use "hg update" or merge with an explicit rev'))
1986 node = parent == bheads[0] and bheads[-1] or bheads[0]
1986 node = parent == bheads[0] and bheads[-1] or bheads[0]
1987 return hg.merge(repo, node, force=force)
1987 return hg.merge(repo, node, force=force)
1988
1988
1989 def outgoing(ui, repo, dest=None, **opts):
1989 def outgoing(ui, repo, dest=None, **opts):
1990 """show changesets not found in destination
1990 """show changesets not found in destination
1991
1991
1992 Show changesets not found in the specified destination repository or
1992 Show changesets not found in the specified destination repository or
1993 the default push location. These are the changesets that would be pushed
1993 the default push location. These are the changesets that would be pushed
1994 if a push was requested.
1994 if a push was requested.
1995
1995
1996 See pull for valid destination format details.
1996 See pull for valid destination format details.
1997 """
1997 """
1998 limit = cmdutil.loglimit(opts)
1998 limit = cmdutil.loglimit(opts)
1999 dest, revs, checkout = hg.parseurl(
1999 dest, revs, checkout = hg.parseurl(
2000 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2000 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2001 cmdutil.setremoteconfig(ui, opts)
2001 cmdutil.setremoteconfig(ui, opts)
2002 if revs:
2002 if revs:
2003 revs = [repo.lookup(rev) for rev in revs]
2003 revs = [repo.lookup(rev) for rev in revs]
2004
2004
2005 other = hg.repository(ui, dest)
2005 other = hg.repository(ui, dest)
2006 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2006 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2007 o = repo.findoutgoing(other, force=opts.get('force'))
2007 o = repo.findoutgoing(other, force=opts.get('force'))
2008 if not o:
2008 if not o:
2009 ui.status(_("no changes found\n"))
2009 ui.status(_("no changes found\n"))
2010 return 1
2010 return 1
2011 o = repo.changelog.nodesbetween(o, revs)[0]
2011 o = repo.changelog.nodesbetween(o, revs)[0]
2012 if opts.get('newest_first'):
2012 if opts.get('newest_first'):
2013 o.reverse()
2013 o.reverse()
2014 displayer = cmdutil.show_changeset(ui, repo, opts)
2014 displayer = cmdutil.show_changeset(ui, repo, opts)
2015 count = 0
2015 count = 0
2016 for n in o:
2016 for n in o:
2017 if count >= limit:
2017 if count >= limit:
2018 break
2018 break
2019 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2019 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2020 if opts.get('no_merges') and len(parents) == 2:
2020 if opts.get('no_merges') and len(parents) == 2:
2021 continue
2021 continue
2022 count += 1
2022 count += 1
2023 displayer.show(changenode=n)
2023 displayer.show(changenode=n)
2024
2024
2025 def parents(ui, repo, file_=None, **opts):
2025 def parents(ui, repo, file_=None, **opts):
2026 """show the parents of the working dir or revision
2026 """show the parents of the working dir or revision
2027
2027
2028 Print the working directory's parent revisions. If a
2028 Print the working directory's parent revisions. If a
2029 revision is given via --rev, the parent of that revision
2029 revision is given via --rev, the parent of that revision
2030 will be printed. If a file argument is given, revision in
2030 will be printed. If a file argument is given, revision in
2031 which the file was last changed (before the working directory
2031 which the file was last changed (before the working directory
2032 revision or the argument to --rev if given) is printed.
2032 revision or the argument to --rev if given) is printed.
2033 """
2033 """
2034 rev = opts.get('rev')
2034 rev = opts.get('rev')
2035 if rev:
2035 if rev:
2036 ctx = repo[rev]
2036 ctx = repo[rev]
2037 else:
2037 else:
2038 ctx = repo[None]
2038 ctx = repo[None]
2039
2039
2040 if file_:
2040 if file_:
2041 m = cmdutil.match(repo, (file_,), opts)
2041 m = cmdutil.match(repo, (file_,), opts)
2042 if m.anypats() or len(m.files()) != 1:
2042 if m.anypats() or len(m.files()) != 1:
2043 raise util.Abort(_('can only specify an explicit file name'))
2043 raise util.Abort(_('can only specify an explicit file name'))
2044 file_ = m.files()[0]
2044 file_ = m.files()[0]
2045 filenodes = []
2045 filenodes = []
2046 for cp in ctx.parents():
2046 for cp in ctx.parents():
2047 if not cp:
2047 if not cp:
2048 continue
2048 continue
2049 try:
2049 try:
2050 filenodes.append(cp.filenode(file_))
2050 filenodes.append(cp.filenode(file_))
2051 except revlog.LookupError:
2051 except revlog.LookupError:
2052 pass
2052 pass
2053 if not filenodes:
2053 if not filenodes:
2054 raise util.Abort(_("'%s' not found in manifest!") % file_)
2054 raise util.Abort(_("'%s' not found in manifest!") % file_)
2055 fl = repo.file(file_)
2055 fl = repo.file(file_)
2056 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
2056 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2057 else:
2057 else:
2058 p = [cp.node() for cp in ctx.parents()]
2058 p = [cp.node() for cp in ctx.parents()]
2059
2059
2060 displayer = cmdutil.show_changeset(ui, repo, opts)
2060 displayer = cmdutil.show_changeset(ui, repo, opts)
2061 for n in p:
2061 for n in p:
2062 if n != nullid:
2062 if n != nullid:
2063 displayer.show(changenode=n)
2063 displayer.show(changenode=n)
2064
2064
2065 def paths(ui, repo, search=None):
2065 def paths(ui, repo, search=None):
2066 """show definition of symbolic path names
2066 """show definition of symbolic path names
2067
2067
2068 Show definition of symbolic path name NAME. If no name is given, show
2068 Show definition of symbolic path name NAME. If no name is given, show
2069 definition of available names.
2069 definition of available names.
2070
2070
2071 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2071 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2072 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2072 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2073 """
2073 """
2074 if search:
2074 if search:
2075 for name, path in ui.configitems("paths"):
2075 for name, path in ui.configitems("paths"):
2076 if name == search:
2076 if name == search:
2077 ui.write("%s\n" % url.hidepassword(path))
2077 ui.write("%s\n" % url.hidepassword(path))
2078 return
2078 return
2079 ui.warn(_("not found!\n"))
2079 ui.warn(_("not found!\n"))
2080 return 1
2080 return 1
2081 else:
2081 else:
2082 for name, path in ui.configitems("paths"):
2082 for name, path in ui.configitems("paths"):
2083 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2083 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2084
2084
2085 def postincoming(ui, repo, modheads, optupdate, checkout):
2085 def postincoming(ui, repo, modheads, optupdate, checkout):
2086 if modheads == 0:
2086 if modheads == 0:
2087 return
2087 return
2088 if optupdate:
2088 if optupdate:
2089 if modheads <= 1 or checkout:
2089 if modheads <= 1 or checkout:
2090 return hg.update(repo, checkout)
2090 return hg.update(repo, checkout)
2091 else:
2091 else:
2092 ui.status(_("not updating, since new heads added\n"))
2092 ui.status(_("not updating, since new heads added\n"))
2093 if modheads > 1:
2093 if modheads > 1:
2094 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2094 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2095 else:
2095 else:
2096 ui.status(_("(run 'hg update' to get a working copy)\n"))
2096 ui.status(_("(run 'hg update' to get a working copy)\n"))
2097
2097
2098 def pull(ui, repo, source="default", **opts):
2098 def pull(ui, repo, source="default", **opts):
2099 """pull changes from the specified source
2099 """pull changes from the specified source
2100
2100
2101 Pull changes from a remote repository to a local one.
2101 Pull changes from a remote repository to a local one.
2102
2102
2103 This finds all changes from the repository at the specified path
2103 This finds all changes from the repository at the specified path
2104 or URL and adds them to the local repository. By default, this
2104 or URL and adds them to the local repository. By default, this
2105 does not update the copy of the project in the working directory.
2105 does not update the copy of the project in the working directory.
2106
2106
2107 Valid URLs are of the form:
2107 Valid URLs are of the form:
2108
2108
2109 local/filesystem/path (or file://local/filesystem/path)
2109 local/filesystem/path (or file://local/filesystem/path)
2110 http://[user[:pass]@]host[:port]/[path]
2110 http://[user[:pass]@]host[:port]/[path]
2111 https://[user[:pass]@]host[:port]/[path]
2111 https://[user[:pass]@]host[:port]/[path]
2112 ssh://[user[:pass]@]host[:port]/[path]
2112 ssh://[user[:pass]@]host[:port]/[path]
2113
2113
2114 Paths in the local filesystem can either point to Mercurial
2114 Paths in the local filesystem can either point to Mercurial
2115 repositories or to bundle files (as created by 'hg bundle' or
2115 repositories or to bundle files (as created by 'hg bundle' or
2116 'hg incoming --bundle').
2116 'hg incoming --bundle').
2117
2117
2118 An optional identifier after # indicates a particular branch, tag,
2118 An optional identifier after # indicates a particular branch, tag,
2119 or changeset to pull.
2119 or changeset to pull.
2120
2120
2121 Some notes about using SSH with Mercurial:
2121 Some notes about using SSH with Mercurial:
2122 - SSH requires an accessible shell account on the destination machine
2122 - SSH requires an accessible shell account on the destination machine
2123 and a copy of hg in the remote path or specified with as remotecmd.
2123 and a copy of hg in the remote path or specified with as remotecmd.
2124 - path is relative to the remote user's home directory by default.
2124 - path is relative to the remote user's home directory by default.
2125 Use an extra slash at the start of a path to specify an absolute path:
2125 Use an extra slash at the start of a path to specify an absolute path:
2126 ssh://example.com//tmp/repository
2126 ssh://example.com//tmp/repository
2127 - Mercurial doesn't use its own compression via SSH; the right thing
2127 - Mercurial doesn't use its own compression via SSH; the right thing
2128 to do is to configure it in your ~/.ssh/config, e.g.:
2128 to do is to configure it in your ~/.ssh/config, e.g.:
2129 Host *.mylocalnetwork.example.com
2129 Host *.mylocalnetwork.example.com
2130 Compression no
2130 Compression no
2131 Host *
2131 Host *
2132 Compression yes
2132 Compression yes
2133 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2133 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2134 with the --ssh command line option.
2134 with the --ssh command line option.
2135 """
2135 """
2136 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2136 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2137 cmdutil.setremoteconfig(ui, opts)
2137 cmdutil.setremoteconfig(ui, opts)
2138
2138
2139 other = hg.repository(ui, source)
2139 other = hg.repository(ui, source)
2140 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2140 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2141 if revs:
2141 if revs:
2142 try:
2142 try:
2143 revs = [other.lookup(rev) for rev in revs]
2143 revs = [other.lookup(rev) for rev in revs]
2144 except NoCapability:
2144 except NoCapability:
2145 error = _("Other repository doesn't support revision lookup, "
2145 error = _("Other repository doesn't support revision lookup, "
2146 "so a rev cannot be specified.")
2146 "so a rev cannot be specified.")
2147 raise util.Abort(error)
2147 raise util.Abort(error)
2148
2148
2149 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2149 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2150 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2150 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2151
2151
2152 def push(ui, repo, dest=None, **opts):
2152 def push(ui, repo, dest=None, **opts):
2153 """push changes to the specified destination
2153 """push changes to the specified destination
2154
2154
2155 Push changes from the local repository to the given destination.
2155 Push changes from the local repository to the given destination.
2156
2156
2157 This is the symmetrical operation for pull. It helps to move
2157 This is the symmetrical operation for pull. It helps to move
2158 changes from the current repository to a different one. If the
2158 changes from the current repository to a different one. If the
2159 destination is local this is identical to a pull in that directory
2159 destination is local this is identical to a pull in that directory
2160 from the current one.
2160 from the current one.
2161
2161
2162 By default, push will refuse to run if it detects the result would
2162 By default, push will refuse to run if it detects the result would
2163 increase the number of remote heads. This generally indicates the
2163 increase the number of remote heads. This generally indicates the
2164 the client has forgotten to pull and merge before pushing.
2164 the client has forgotten to pull and merge before pushing.
2165
2165
2166 Valid URLs are of the form:
2166 Valid URLs are of the form:
2167
2167
2168 local/filesystem/path (or file://local/filesystem/path)
2168 local/filesystem/path (or file://local/filesystem/path)
2169 ssh://[user[:pass]@]host[:port]/[path]
2169 ssh://[user[:pass]@]host[:port]/[path]
2170 http://[user[:pass]@]host[:port]/[path]
2170 http://[user[:pass]@]host[:port]/[path]
2171 https://[user[:pass]@]host[:port]/[path]
2171 https://[user[:pass]@]host[:port]/[path]
2172
2172
2173 An optional identifier after # indicates a particular branch, tag,
2173 An optional identifier after # indicates a particular branch, tag,
2174 or changeset to push. If -r is used, the named changeset and all its
2174 or changeset to push. If -r is used, the named changeset and all its
2175 ancestors will be pushed to the remote repository.
2175 ancestors will be pushed to the remote repository.
2176
2176
2177 Look at the help text for the pull command for important details
2177 Look at the help text for the pull command for important details
2178 about ssh:// URLs.
2178 about ssh:// URLs.
2179
2179
2180 Pushing to http:// and https:// URLs is only possible, if this
2180 Pushing to http:// and https:// URLs is only possible, if this
2181 feature is explicitly enabled on the remote Mercurial server.
2181 feature is explicitly enabled on the remote Mercurial server.
2182 """
2182 """
2183 dest, revs, checkout = hg.parseurl(
2183 dest, revs, checkout = hg.parseurl(
2184 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2184 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2185 cmdutil.setremoteconfig(ui, opts)
2185 cmdutil.setremoteconfig(ui, opts)
2186
2186
2187 other = hg.repository(ui, dest)
2187 other = hg.repository(ui, dest)
2188 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2188 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2189 if revs:
2189 if revs:
2190 revs = [repo.lookup(rev) for rev in revs]
2190 revs = [repo.lookup(rev) for rev in revs]
2191 r = repo.push(other, opts.get('force'), revs=revs)
2191 r = repo.push(other, opts.get('force'), revs=revs)
2192 return r == 0
2192 return r == 0
2193
2193
2194 def rawcommit(ui, repo, *pats, **opts):
2194 def rawcommit(ui, repo, *pats, **opts):
2195 """raw commit interface (DEPRECATED)
2195 """raw commit interface (DEPRECATED)
2196
2196
2197 (DEPRECATED)
2197 (DEPRECATED)
2198 Lowlevel commit, for use in helper scripts.
2198 Lowlevel commit, for use in helper scripts.
2199
2199
2200 This command is not intended to be used by normal users, as it is
2200 This command is not intended to be used by normal users, as it is
2201 primarily useful for importing from other SCMs.
2201 primarily useful for importing from other SCMs.
2202
2202
2203 This command is now deprecated and will be removed in a future
2203 This command is now deprecated and will be removed in a future
2204 release, please use debugsetparents and commit instead.
2204 release, please use debugsetparents and commit instead.
2205 """
2205 """
2206
2206
2207 ui.warn(_("(the rawcommit command is deprecated)\n"))
2207 ui.warn(_("(the rawcommit command is deprecated)\n"))
2208
2208
2209 message = cmdutil.logmessage(opts)
2209 message = cmdutil.logmessage(opts)
2210
2210
2211 files = cmdutil.match(repo, pats, opts).files()
2211 files = cmdutil.match(repo, pats, opts).files()
2212 if opts.get('files'):
2212 if opts.get('files'):
2213 files += open(opts['files']).read().splitlines()
2213 files += open(opts['files']).read().splitlines()
2214
2214
2215 parents = [repo.lookup(p) for p in opts['parent']]
2215 parents = [repo.lookup(p) for p in opts['parent']]
2216
2216
2217 try:
2217 try:
2218 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2218 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2219 except ValueError, inst:
2219 except ValueError, inst:
2220 raise util.Abort(str(inst))
2220 raise util.Abort(str(inst))
2221
2221
2222 def recover(ui, repo):
2222 def recover(ui, repo):
2223 """roll back an interrupted transaction
2223 """roll back an interrupted transaction
2224
2224
2225 Recover from an interrupted commit or pull.
2225 Recover from an interrupted commit or pull.
2226
2226
2227 This command tries to fix the repository status after an interrupted
2227 This command tries to fix the repository status after an interrupted
2228 operation. It should only be necessary when Mercurial suggests it.
2228 operation. It should only be necessary when Mercurial suggests it.
2229 """
2229 """
2230 if repo.recover():
2230 if repo.recover():
2231 return hg.verify(repo)
2231 return hg.verify(repo)
2232 return 1
2232 return 1
2233
2233
2234 def remove(ui, repo, *pats, **opts):
2234 def remove(ui, repo, *pats, **opts):
2235 """remove the specified files on the next commit
2235 """remove the specified files on the next commit
2236
2236
2237 Schedule the indicated files for removal from the repository.
2237 Schedule the indicated files for removal from the repository.
2238
2238
2239 This only removes files from the current branch, not from the entire
2239 This only removes files from the current branch, not from the entire
2240 project history. -A can be used to remove only files that have already
2240 project history. -A can be used to remove only files that have already
2241 been deleted, -f can be used to force deletion, and -Af can be used
2241 been deleted, -f can be used to force deletion, and -Af can be used
2242 to remove files from the next revision without deleting them.
2242 to remove files from the next revision without deleting them.
2243
2243
2244 The following table details the behavior of remove for different file
2244 The following table details the behavior of remove for different file
2245 states (columns) and option combinations (rows). The file states are
2245 states (columns) and option combinations (rows). The file states are
2246 Added, Clean, Modified and Missing (as reported by hg status). The
2246 Added, Clean, Modified and Missing (as reported by hg status). The
2247 actions are Warn, Remove (from branch) and Delete (from disk).
2247 actions are Warn, Remove (from branch) and Delete (from disk).
2248
2248
2249 A C M !
2249 A C M !
2250 none W RD W R
2250 none W RD W R
2251 -f R RD RD R
2251 -f R RD RD R
2252 -A W W W R
2252 -A W W W R
2253 -Af R R R R
2253 -Af R R R R
2254
2254
2255 This command schedules the files to be removed at the next commit.
2255 This command schedules the files to be removed at the next commit.
2256 To undo a remove before that, see hg revert.
2256 To undo a remove before that, see hg revert.
2257 """
2257 """
2258
2258
2259 after, force = opts.get('after'), opts.get('force')
2259 after, force = opts.get('after'), opts.get('force')
2260 if not pats and not after:
2260 if not pats and not after:
2261 raise util.Abort(_('no files specified'))
2261 raise util.Abort(_('no files specified'))
2262
2262
2263 m = cmdutil.match(repo, pats, opts)
2263 m = cmdutil.match(repo, pats, opts)
2264 s = repo.status(match=m, clean=True)
2264 s = repo.status(match=m, clean=True)
2265 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2265 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2266
2266
2267 def warn(files, reason):
2267 def warn(files, reason):
2268 for f in files:
2268 for f in files:
2269 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2269 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2270 % (m.rel(f), reason))
2270 % (m.rel(f), reason))
2271
2271
2272 if force:
2272 if force:
2273 remove, forget = modified + deleted + clean, added
2273 remove, forget = modified + deleted + clean, added
2274 elif after:
2274 elif after:
2275 remove, forget = deleted, []
2275 remove, forget = deleted, []
2276 warn(modified + added + clean, _('still exists'))
2276 warn(modified + added + clean, _('still exists'))
2277 else:
2277 else:
2278 remove, forget = deleted + clean, []
2278 remove, forget = deleted + clean, []
2279 warn(modified, _('is modified'))
2279 warn(modified, _('is modified'))
2280 warn(added, _('has been marked for add'))
2280 warn(added, _('has been marked for add'))
2281
2281
2282 for f in util.sort(remove + forget):
2282 for f in util.sort(remove + forget):
2283 if ui.verbose or not m.exact(f):
2283 if ui.verbose or not m.exact(f):
2284 ui.status(_('removing %s\n') % m.rel(f))
2284 ui.status(_('removing %s\n') % m.rel(f))
2285
2285
2286 repo.forget(forget)
2286 repo.forget(forget)
2287 repo.remove(remove, unlink=not after)
2287 repo.remove(remove, unlink=not after)
2288
2288
2289 def rename(ui, repo, *pats, **opts):
2289 def rename(ui, repo, *pats, **opts):
2290 """rename files; equivalent of copy + remove
2290 """rename files; equivalent of copy + remove
2291
2291
2292 Mark dest as copies of sources; mark sources for deletion. If
2292 Mark dest as copies of sources; mark sources for deletion. If
2293 dest is a directory, copies are put in that directory. If dest is
2293 dest is a directory, copies are put in that directory. If dest is
2294 a file, there can only be one source.
2294 a file, there can only be one source.
2295
2295
2296 By default, this command copies the contents of files as they
2296 By default, this command copies the contents of files as they
2297 stand in the working directory. If invoked with --after, the
2297 stand in the working directory. If invoked with --after, the
2298 operation is recorded, but no copying is performed.
2298 operation is recorded, but no copying is performed.
2299
2299
2300 This command takes effect in the next commit. To undo a rename
2300 This command takes effect in the next commit. To undo a rename
2301 before that, see hg revert.
2301 before that, see hg revert.
2302 """
2302 """
2303 wlock = repo.wlock(False)
2303 wlock = repo.wlock(False)
2304 try:
2304 try:
2305 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2305 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2306 finally:
2306 finally:
2307 del wlock
2307 del wlock
2308
2308
2309 def resolve(ui, repo, *pats, **opts):
2309 def resolve(ui, repo, *pats, **opts):
2310 """resolve file merges from a branch merge or update
2310 """resolve file merges from a branch merge or update
2311
2311
2312 This command will attempt to resolve unresolved merges from the
2312 This command will attempt to resolve unresolved merges from the
2313 last update or merge command. This will use the local file
2313 last update or merge command. This will use the local file
2314 revision preserved at the last update or merge to cleanly retry
2314 revision preserved at the last update or merge to cleanly retry
2315 the file merge attempt. With no file or options specified, this
2315 the file merge attempt. With no file or options specified, this
2316 command will attempt to resolve all unresolved files.
2316 command will attempt to resolve all unresolved files.
2317
2317
2318 The codes used to show the status of files are:
2318 The codes used to show the status of files are:
2319 U = unresolved
2319 U = unresolved
2320 R = resolved
2320 R = resolved
2321 """
2321 """
2322
2322
2323 if len([x for x in opts if opts[x]]) > 1:
2323 if len([x for x in opts if opts[x]]) > 1:
2324 raise util.Abort(_("too many options specified"))
2324 raise util.Abort(_("too many options specified"))
2325
2325
2326 ms = merge_.mergestate(repo)
2326 ms = merge_.mergestate(repo)
2327 m = cmdutil.match(repo, pats, opts)
2327 m = cmdutil.match(repo, pats, opts)
2328
2328
2329 for f in ms:
2329 for f in ms:
2330 if m(f):
2330 if m(f):
2331 if opts.get("list"):
2331 if opts.get("list"):
2332 ui.write("%s %s\n" % (ms[f].upper(), f))
2332 ui.write("%s %s\n" % (ms[f].upper(), f))
2333 elif opts.get("mark"):
2333 elif opts.get("mark"):
2334 ms.mark(f, "r")
2334 ms.mark(f, "r")
2335 elif opts.get("unmark"):
2335 elif opts.get("unmark"):
2336 ms.mark(f, "u")
2336 ms.mark(f, "u")
2337 else:
2337 else:
2338 wctx = repo[None]
2338 wctx = repo[None]
2339 mctx = wctx.parents()[-1]
2339 mctx = wctx.parents()[-1]
2340 ms.resolve(f, wctx, mctx)
2340 ms.resolve(f, wctx, mctx)
2341
2341
2342 def revert(ui, repo, *pats, **opts):
2342 def revert(ui, repo, *pats, **opts):
2343 """restore individual files or dirs to an earlier state
2343 """restore individual files or dirs to an earlier state
2344
2344
2345 (use update -r to check out earlier revisions, revert does not
2345 (use update -r to check out earlier revisions, revert does not
2346 change the working dir parents)
2346 change the working dir parents)
2347
2347
2348 With no revision specified, revert the named files or directories
2348 With no revision specified, revert the named files or directories
2349 to the contents they had in the parent of the working directory.
2349 to the contents they had in the parent of the working directory.
2350 This restores the contents of the affected files to an unmodified
2350 This restores the contents of the affected files to an unmodified
2351 state and unschedules adds, removes, copies, and renames. If the
2351 state and unschedules adds, removes, copies, and renames. If the
2352 working directory has two parents, you must explicitly specify the
2352 working directory has two parents, you must explicitly specify the
2353 revision to revert to.
2353 revision to revert to.
2354
2354
2355 Using the -r option, revert the given files or directories to their
2355 Using the -r option, revert the given files or directories to their
2356 contents as of a specific revision. This can be helpful to "roll
2356 contents as of a specific revision. This can be helpful to "roll
2357 back" some or all of an earlier change.
2357 back" some or all of an earlier change.
2358 See 'hg help dates' for a list of formats valid for -d/--date.
2358 See 'hg help dates' for a list of formats valid for -d/--date.
2359
2359
2360 Revert modifies the working directory. It does not commit any
2360 Revert modifies the working directory. It does not commit any
2361 changes, or change the parent of the working directory. If you
2361 changes, or change the parent of the working directory. If you
2362 revert to a revision other than the parent of the working
2362 revert to a revision other than the parent of the working
2363 directory, the reverted files will thus appear modified
2363 directory, the reverted files will thus appear modified
2364 afterwards.
2364 afterwards.
2365
2365
2366 If a file has been deleted, it is restored. If the executable
2366 If a file has been deleted, it is restored. If the executable
2367 mode of a file was changed, it is reset.
2367 mode of a file was changed, it is reset.
2368
2368
2369 If names are given, all files matching the names are reverted.
2369 If names are given, all files matching the names are reverted.
2370 If no arguments are given, no files are reverted.
2370 If no arguments are given, no files are reverted.
2371
2371
2372 Modified files are saved with a .orig suffix before reverting.
2372 Modified files are saved with a .orig suffix before reverting.
2373 To disable these backups, use --no-backup.
2373 To disable these backups, use --no-backup.
2374 """
2374 """
2375
2375
2376 if opts["date"]:
2376 if opts["date"]:
2377 if opts["rev"]:
2377 if opts["rev"]:
2378 raise util.Abort(_("you can't specify a revision and a date"))
2378 raise util.Abort(_("you can't specify a revision and a date"))
2379 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2379 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2380
2380
2381 if not pats and not opts.get('all'):
2381 if not pats and not opts.get('all'):
2382 raise util.Abort(_('no files or directories specified; '
2382 raise util.Abort(_('no files or directories specified; '
2383 'use --all to revert the whole repo'))
2383 'use --all to revert the whole repo'))
2384
2384
2385 parent, p2 = repo.dirstate.parents()
2385 parent, p2 = repo.dirstate.parents()
2386 if not opts.get('rev') and p2 != nullid:
2386 if not opts.get('rev') and p2 != nullid:
2387 raise util.Abort(_('uncommitted merge - please provide a '
2387 raise util.Abort(_('uncommitted merge - please provide a '
2388 'specific revision'))
2388 'specific revision'))
2389 ctx = repo[opts.get('rev')]
2389 ctx = repo[opts.get('rev')]
2390 node = ctx.node()
2390 node = ctx.node()
2391 mf = ctx.manifest()
2391 mf = ctx.manifest()
2392 if node == parent:
2392 if node == parent:
2393 pmf = mf
2393 pmf = mf
2394 else:
2394 else:
2395 pmf = None
2395 pmf = None
2396
2396
2397 # need all matching names in dirstate and manifest of target rev,
2397 # need all matching names in dirstate and manifest of target rev,
2398 # so have to walk both. do not print errors if files exist in one
2398 # so have to walk both. do not print errors if files exist in one
2399 # but not other.
2399 # but not other.
2400
2400
2401 names = {}
2401 names = {}
2402
2402
2403 wlock = repo.wlock()
2403 wlock = repo.wlock()
2404 try:
2404 try:
2405 # walk dirstate.
2405 # walk dirstate.
2406 files = []
2406 files = []
2407
2407
2408 m = cmdutil.match(repo, pats, opts)
2408 m = cmdutil.match(repo, pats, opts)
2409 m.bad = lambda x,y: False
2409 m.bad = lambda x,y: False
2410 for abs in repo.walk(m):
2410 for abs in repo.walk(m):
2411 names[abs] = m.rel(abs), m.exact(abs)
2411 names[abs] = m.rel(abs), m.exact(abs)
2412
2412
2413 # walk target manifest.
2413 # walk target manifest.
2414
2414
2415 def badfn(path, msg):
2415 def badfn(path, msg):
2416 if path in names:
2416 if path in names:
2417 return False
2417 return False
2418 path_ = path + '/'
2418 path_ = path + '/'
2419 for f in names:
2419 for f in names:
2420 if f.startswith(path_):
2420 if f.startswith(path_):
2421 return False
2421 return False
2422 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2422 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2423 return False
2423 return False
2424
2424
2425 m = cmdutil.match(repo, pats, opts)
2425 m = cmdutil.match(repo, pats, opts)
2426 m.bad = badfn
2426 m.bad = badfn
2427 for abs in repo[node].walk(m):
2427 for abs in repo[node].walk(m):
2428 if abs not in names:
2428 if abs not in names:
2429 names[abs] = m.rel(abs), m.exact(abs)
2429 names[abs] = m.rel(abs), m.exact(abs)
2430
2430
2431 m = cmdutil.matchfiles(repo, names)
2431 m = cmdutil.matchfiles(repo, names)
2432 changes = repo.status(match=m)[:4]
2432 changes = repo.status(match=m)[:4]
2433 modified, added, removed, deleted = map(dict.fromkeys, changes)
2433 modified, added, removed, deleted = map(dict.fromkeys, changes)
2434
2434
2435 # if f is a rename, also revert the source
2435 # if f is a rename, also revert the source
2436 cwd = repo.getcwd()
2436 cwd = repo.getcwd()
2437 for f in added:
2437 for f in added:
2438 src = repo.dirstate.copied(f)
2438 src = repo.dirstate.copied(f)
2439 if src and src not in names and repo.dirstate[src] == 'r':
2439 if src and src not in names and repo.dirstate[src] == 'r':
2440 removed[src] = None
2440 removed[src] = None
2441 names[src] = (repo.pathto(src, cwd), True)
2441 names[src] = (repo.pathto(src, cwd), True)
2442
2442
2443 def removeforget(abs):
2443 def removeforget(abs):
2444 if repo.dirstate[abs] == 'a':
2444 if repo.dirstate[abs] == 'a':
2445 return _('forgetting %s\n')
2445 return _('forgetting %s\n')
2446 return _('removing %s\n')
2446 return _('removing %s\n')
2447
2447
2448 revert = ([], _('reverting %s\n'))
2448 revert = ([], _('reverting %s\n'))
2449 add = ([], _('adding %s\n'))
2449 add = ([], _('adding %s\n'))
2450 remove = ([], removeforget)
2450 remove = ([], removeforget)
2451 undelete = ([], _('undeleting %s\n'))
2451 undelete = ([], _('undeleting %s\n'))
2452
2452
2453 disptable = (
2453 disptable = (
2454 # dispatch table:
2454 # dispatch table:
2455 # file state
2455 # file state
2456 # action if in target manifest
2456 # action if in target manifest
2457 # action if not in target manifest
2457 # action if not in target manifest
2458 # make backup if in target manifest
2458 # make backup if in target manifest
2459 # make backup if not in target manifest
2459 # make backup if not in target manifest
2460 (modified, revert, remove, True, True),
2460 (modified, revert, remove, True, True),
2461 (added, revert, remove, True, False),
2461 (added, revert, remove, True, False),
2462 (removed, undelete, None, False, False),
2462 (removed, undelete, None, False, False),
2463 (deleted, revert, remove, False, False),
2463 (deleted, revert, remove, False, False),
2464 )
2464 )
2465
2465
2466 for abs, (rel, exact) in util.sort(names.items()):
2466 for abs, (rel, exact) in util.sort(names.items()):
2467 mfentry = mf.get(abs)
2467 mfentry = mf.get(abs)
2468 target = repo.wjoin(abs)
2468 target = repo.wjoin(abs)
2469 def handle(xlist, dobackup):
2469 def handle(xlist, dobackup):
2470 xlist[0].append(abs)
2470 xlist[0].append(abs)
2471 if dobackup and not opts.get('no_backup') and util.lexists(target):
2471 if dobackup and not opts.get('no_backup') and util.lexists(target):
2472 bakname = "%s.orig" % rel
2472 bakname = "%s.orig" % rel
2473 ui.note(_('saving current version of %s as %s\n') %
2473 ui.note(_('saving current version of %s as %s\n') %
2474 (rel, bakname))
2474 (rel, bakname))
2475 if not opts.get('dry_run'):
2475 if not opts.get('dry_run'):
2476 util.copyfile(target, bakname)
2476 util.copyfile(target, bakname)
2477 if ui.verbose or not exact:
2477 if ui.verbose or not exact:
2478 msg = xlist[1]
2478 msg = xlist[1]
2479 if not isinstance(msg, basestring):
2479 if not isinstance(msg, basestring):
2480 msg = msg(abs)
2480 msg = msg(abs)
2481 ui.status(msg % rel)
2481 ui.status(msg % rel)
2482 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2482 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2483 if abs not in table: continue
2483 if abs not in table: continue
2484 # file has changed in dirstate
2484 # file has changed in dirstate
2485 if mfentry:
2485 if mfentry:
2486 handle(hitlist, backuphit)
2486 handle(hitlist, backuphit)
2487 elif misslist is not None:
2487 elif misslist is not None:
2488 handle(misslist, backupmiss)
2488 handle(misslist, backupmiss)
2489 break
2489 break
2490 else:
2490 else:
2491 if abs not in repo.dirstate:
2491 if abs not in repo.dirstate:
2492 if mfentry:
2492 if mfentry:
2493 handle(add, True)
2493 handle(add, True)
2494 elif exact:
2494 elif exact:
2495 ui.warn(_('file not managed: %s\n') % rel)
2495 ui.warn(_('file not managed: %s\n') % rel)
2496 continue
2496 continue
2497 # file has not changed in dirstate
2497 # file has not changed in dirstate
2498 if node == parent:
2498 if node == parent:
2499 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2499 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2500 continue
2500 continue
2501 if pmf is None:
2501 if pmf is None:
2502 # only need parent manifest in this unlikely case,
2502 # only need parent manifest in this unlikely case,
2503 # so do not read by default
2503 # so do not read by default
2504 pmf = repo[parent].manifest()
2504 pmf = repo[parent].manifest()
2505 if abs in pmf:
2505 if abs in pmf:
2506 if mfentry:
2506 if mfentry:
2507 # if version of file is same in parent and target
2507 # if version of file is same in parent and target
2508 # manifests, do nothing
2508 # manifests, do nothing
2509 if (pmf[abs] != mfentry or
2509 if (pmf[abs] != mfentry or
2510 pmf.flags(abs) != mf.flags(abs)):
2510 pmf.flags(abs) != mf.flags(abs)):
2511 handle(revert, False)
2511 handle(revert, False)
2512 else:
2512 else:
2513 handle(remove, False)
2513 handle(remove, False)
2514
2514
2515 if not opts.get('dry_run'):
2515 if not opts.get('dry_run'):
2516 def checkout(f):
2516 def checkout(f):
2517 fc = ctx[f]
2517 fc = ctx[f]
2518 repo.wwrite(f, fc.data(), fc.flags())
2518 repo.wwrite(f, fc.data(), fc.flags())
2519
2519
2520 audit_path = util.path_auditor(repo.root)
2520 audit_path = util.path_auditor(repo.root)
2521 for f in remove[0]:
2521 for f in remove[0]:
2522 if repo.dirstate[f] == 'a':
2522 if repo.dirstate[f] == 'a':
2523 repo.dirstate.forget(f)
2523 repo.dirstate.forget(f)
2524 continue
2524 continue
2525 audit_path(f)
2525 audit_path(f)
2526 try:
2526 try:
2527 util.unlink(repo.wjoin(f))
2527 util.unlink(repo.wjoin(f))
2528 except OSError:
2528 except OSError:
2529 pass
2529 pass
2530 repo.dirstate.remove(f)
2530 repo.dirstate.remove(f)
2531
2531
2532 normal = None
2532 normal = None
2533 if node == parent:
2533 if node == parent:
2534 # We're reverting to our parent. If possible, we'd like status
2534 # We're reverting to our parent. If possible, we'd like status
2535 # to report the file as clean. We have to use normallookup for
2535 # to report the file as clean. We have to use normallookup for
2536 # merges to avoid losing information about merged/dirty files.
2536 # merges to avoid losing information about merged/dirty files.
2537 if p2 != nullid:
2537 if p2 != nullid:
2538 normal = repo.dirstate.normallookup
2538 normal = repo.dirstate.normallookup
2539 else:
2539 else:
2540 normal = repo.dirstate.normal
2540 normal = repo.dirstate.normal
2541 for f in revert[0]:
2541 for f in revert[0]:
2542 checkout(f)
2542 checkout(f)
2543 if normal:
2543 if normal:
2544 normal(f)
2544 normal(f)
2545
2545
2546 for f in add[0]:
2546 for f in add[0]:
2547 checkout(f)
2547 checkout(f)
2548 repo.dirstate.add(f)
2548 repo.dirstate.add(f)
2549
2549
2550 normal = repo.dirstate.normallookup
2550 normal = repo.dirstate.normallookup
2551 if node == parent and p2 == nullid:
2551 if node == parent and p2 == nullid:
2552 normal = repo.dirstate.normal
2552 normal = repo.dirstate.normal
2553 for f in undelete[0]:
2553 for f in undelete[0]:
2554 checkout(f)
2554 checkout(f)
2555 normal(f)
2555 normal(f)
2556
2556
2557 finally:
2557 finally:
2558 del wlock
2558 del wlock
2559
2559
2560 def rollback(ui, repo):
2560 def rollback(ui, repo):
2561 """roll back the last transaction
2561 """roll back the last transaction
2562
2562
2563 This command should be used with care. There is only one level of
2563 This command should be used with care. There is only one level of
2564 rollback, and there is no way to undo a rollback. It will also
2564 rollback, and there is no way to undo a rollback. It will also
2565 restore the dirstate at the time of the last transaction, losing
2565 restore the dirstate at the time of the last transaction, losing
2566 any dirstate changes since that time.
2566 any dirstate changes since that time.
2567
2567
2568 Transactions are used to encapsulate the effects of all commands
2568 Transactions are used to encapsulate the effects of all commands
2569 that create new changesets or propagate existing changesets into a
2569 that create new changesets or propagate existing changesets into a
2570 repository. For example, the following commands are transactional,
2570 repository. For example, the following commands are transactional,
2571 and their effects can be rolled back:
2571 and their effects can be rolled back:
2572
2572
2573 commit
2573 commit
2574 import
2574 import
2575 pull
2575 pull
2576 push (with this repository as destination)
2576 push (with this repository as destination)
2577 unbundle
2577 unbundle
2578
2578
2579 This command is not intended for use on public repositories. Once
2579 This command is not intended for use on public repositories. Once
2580 changes are visible for pull by other users, rolling a transaction
2580 changes are visible for pull by other users, rolling a transaction
2581 back locally is ineffective (someone else may already have pulled
2581 back locally is ineffective (someone else may already have pulled
2582 the changes). Furthermore, a race is possible with readers of the
2582 the changes). Furthermore, a race is possible with readers of the
2583 repository; for example an in-progress pull from the repository
2583 repository; for example an in-progress pull from the repository
2584 may fail if a rollback is performed.
2584 may fail if a rollback is performed.
2585 """
2585 """
2586 repo.rollback()
2586 repo.rollback()
2587
2587
2588 def root(ui, repo):
2588 def root(ui, repo):
2589 """print the root (top) of the current working dir
2589 """print the root (top) of the current working dir
2590
2590
2591 Print the root directory of the current repository.
2591 Print the root directory of the current repository.
2592 """
2592 """
2593 ui.write(repo.root + "\n")
2593 ui.write(repo.root + "\n")
2594
2594
2595 def serve(ui, repo, **opts):
2595 def serve(ui, repo, **opts):
2596 """export the repository via HTTP
2596 """export the repository via HTTP
2597
2597
2598 Start a local HTTP repository browser and pull server.
2598 Start a local HTTP repository browser and pull server.
2599
2599
2600 By default, the server logs accesses to stdout and errors to
2600 By default, the server logs accesses to stdout and errors to
2601 stderr. Use the "-A" and "-E" options to log to files.
2601 stderr. Use the "-A" and "-E" options to log to files.
2602 """
2602 """
2603
2603
2604 if opts["stdio"]:
2604 if opts["stdio"]:
2605 if repo is None:
2605 if repo is None:
2606 raise RepoError(_("There is no Mercurial repository here"
2606 raise RepoError(_("There is no Mercurial repository here"
2607 " (.hg not found)"))
2607 " (.hg not found)"))
2608 s = sshserver.sshserver(ui, repo)
2608 s = sshserver.sshserver(ui, repo)
2609 s.serve_forever()
2609 s.serve_forever()
2610
2610
2611 parentui = ui.parentui or ui
2611 parentui = ui.parentui or ui
2612 optlist = ("name templates style address port prefix ipv6"
2612 optlist = ("name templates style address port prefix ipv6"
2613 " accesslog errorlog webdir_conf certificate")
2613 " accesslog errorlog webdir_conf certificate")
2614 for o in optlist.split():
2614 for o in optlist.split():
2615 if opts[o]:
2615 if opts[o]:
2616 parentui.setconfig("web", o, str(opts[o]))
2616 parentui.setconfig("web", o, str(opts[o]))
2617 if (repo is not None) and (repo.ui != parentui):
2617 if (repo is not None) and (repo.ui != parentui):
2618 repo.ui.setconfig("web", o, str(opts[o]))
2618 repo.ui.setconfig("web", o, str(opts[o]))
2619
2619
2620 if repo is None and not ui.config("web", "webdir_conf"):
2620 if repo is None and not ui.config("web", "webdir_conf"):
2621 raise RepoError(_("There is no Mercurial repository here"
2621 raise RepoError(_("There is no Mercurial repository here"
2622 " (.hg not found)"))
2622 " (.hg not found)"))
2623
2623
2624 class service:
2624 class service:
2625 def init(self):
2625 def init(self):
2626 util.set_signal_handler()
2626 util.set_signal_handler()
2627 self.httpd = hgweb.server.create_server(parentui, repo)
2627 self.httpd = hgweb.server.create_server(parentui, repo)
2628
2628
2629 if not ui.verbose: return
2629 if not ui.verbose: return
2630
2630
2631 if self.httpd.prefix:
2631 if self.httpd.prefix:
2632 prefix = self.httpd.prefix.strip('/') + '/'
2632 prefix = self.httpd.prefix.strip('/') + '/'
2633 else:
2633 else:
2634 prefix = ''
2634 prefix = ''
2635
2635
2636 port = ':%d' % self.httpd.port
2636 port = ':%d' % self.httpd.port
2637 if port == ':80':
2637 if port == ':80':
2638 port = ''
2638 port = ''
2639
2639
2640 bindaddr = self.httpd.addr
2640 bindaddr = self.httpd.addr
2641 if bindaddr == '0.0.0.0':
2641 if bindaddr == '0.0.0.0':
2642 bindaddr = '*'
2642 bindaddr = '*'
2643 elif ':' in bindaddr: # IPv6
2643 elif ':' in bindaddr: # IPv6
2644 bindaddr = '[%s]' % bindaddr
2644 bindaddr = '[%s]' % bindaddr
2645
2645
2646 fqaddr = self.httpd.fqaddr
2646 fqaddr = self.httpd.fqaddr
2647 if ':' in fqaddr:
2647 if ':' in fqaddr:
2648 fqaddr = '[%s]' % fqaddr
2648 fqaddr = '[%s]' % fqaddr
2649 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2649 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2650 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2650 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2651
2651
2652 def run(self):
2652 def run(self):
2653 self.httpd.serve_forever()
2653 self.httpd.serve_forever()
2654
2654
2655 service = service()
2655 service = service()
2656
2656
2657 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2657 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2658
2658
2659 def status(ui, repo, *pats, **opts):
2659 def status(ui, repo, *pats, **opts):
2660 """show changed files in the working directory
2660 """show changed files in the working directory
2661
2661
2662 Show status of files in the repository. If names are given, only
2662 Show status of files in the repository. If names are given, only
2663 files that match are shown. Files that are clean or ignored or
2663 files that match are shown. Files that are clean or ignored or
2664 source of a copy/move operation, are not listed unless -c (clean),
2664 source of a copy/move operation, are not listed unless -c (clean),
2665 -i (ignored), -C (copies) or -A is given. Unless options described
2665 -i (ignored), -C (copies) or -A is given. Unless options described
2666 with "show only ..." are given, the options -mardu are used.
2666 with "show only ..." are given, the options -mardu are used.
2667
2667
2668 Option -q/--quiet hides untracked (unknown and ignored) files
2668 Option -q/--quiet hides untracked (unknown and ignored) files
2669 unless explicitly requested with -u/--unknown or -i/-ignored.
2669 unless explicitly requested with -u/--unknown or -i/-ignored.
2670
2670
2671 NOTE: status may appear to disagree with diff if permissions have
2671 NOTE: status may appear to disagree with diff if permissions have
2672 changed or a merge has occurred. The standard diff format does not
2672 changed or a merge has occurred. The standard diff format does not
2673 report permission changes and diff only reports changes relative
2673 report permission changes and diff only reports changes relative
2674 to one merge parent.
2674 to one merge parent.
2675
2675
2676 If one revision is given, it is used as the base revision.
2676 If one revision is given, it is used as the base revision.
2677 If two revisions are given, the difference between them is shown.
2677 If two revisions are given, the difference between them is shown.
2678
2678
2679 The codes used to show the status of files are:
2679 The codes used to show the status of files are:
2680 M = modified
2680 M = modified
2681 A = added
2681 A = added
2682 R = removed
2682 R = removed
2683 C = clean
2683 C = clean
2684 ! = deleted, but still tracked
2684 ! = deleted, but still tracked
2685 ? = not tracked
2685 ? = not tracked
2686 I = ignored
2686 I = ignored
2687 = the previous added file was copied from here
2687 = the previous added file was copied from here
2688 """
2688 """
2689
2689
2690 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2690 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2691 cwd = (pats and repo.getcwd()) or ''
2691 cwd = (pats and repo.getcwd()) or ''
2692 end = opts.get('print0') and '\0' or '\n'
2692 end = opts.get('print0') and '\0' or '\n'
2693 copy = {}
2693 copy = {}
2694 states = 'modified added removed deleted unknown ignored clean'.split()
2694 states = 'modified added removed deleted unknown ignored clean'.split()
2695 show = [k for k in states if opts[k]]
2695 show = [k for k in states if opts[k]]
2696 if opts.get('all'):
2696 if opts.get('all'):
2697 show += ui.quiet and (states[:4] + ['clean']) or states
2697 show += ui.quiet and (states[:4] + ['clean']) or states
2698 if not show:
2698 if not show:
2699 show = ui.quiet and states[:4] or states[:5]
2699 show = ui.quiet and states[:4] or states[:5]
2700
2700
2701 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2701 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2702 'ignored' in show, 'clean' in show, 'unknown' in show)
2702 'ignored' in show, 'clean' in show, 'unknown' in show)
2703 changestates = zip(states, 'MAR!?IC', stat)
2703 changestates = zip(states, 'MAR!?IC', stat)
2704
2704
2705 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2705 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2706 ctxn = repo[nullid]
2706 ctxn = repo[nullid]
2707 ctx1 = repo[node1]
2707 ctx1 = repo[node1]
2708 ctx2 = repo[node2]
2708 ctx2 = repo[node2]
2709 added = stat[1]
2709 added = stat[1]
2710 if node2 is None:
2710 if node2 is None:
2711 added = stat[0] + stat[1] # merged?
2711 added = stat[0] + stat[1] # merged?
2712
2712
2713 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items():
2713 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items():
2714 if k in added:
2714 if k in added:
2715 copy[k] = v
2715 copy[k] = v
2716 elif v in added:
2716 elif v in added:
2717 copy[v] = k
2717 copy[v] = k
2718
2718
2719 for state, char, files in changestates:
2719 for state, char, files in changestates:
2720 if state in show:
2720 if state in show:
2721 format = "%s %%s%s" % (char, end)
2721 format = "%s %%s%s" % (char, end)
2722 if opts.get('no_status'):
2722 if opts.get('no_status'):
2723 format = "%%s%s" % end
2723 format = "%%s%s" % end
2724
2724
2725 for f in files:
2725 for f in files:
2726 ui.write(format % repo.pathto(f, cwd))
2726 ui.write(format % repo.pathto(f, cwd))
2727 if f in copy:
2727 if f in copy:
2728 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2728 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2729
2729
2730 def tag(ui, repo, name1, *names, **opts):
2730 def tag(ui, repo, name1, *names, **opts):
2731 """add one or more tags for the current or given revision
2731 """add one or more tags for the current or given revision
2732
2732
2733 Name a particular revision using <name>.
2733 Name a particular revision using <name>.
2734
2734
2735 Tags are used to name particular revisions of the repository and are
2735 Tags are used to name particular revisions of the repository and are
2736 very useful to compare different revisions, to go back to significant
2736 very useful to compare different revisions, to go back to significant
2737 earlier versions or to mark branch points as releases, etc.
2737 earlier versions or to mark branch points as releases, etc.
2738
2738
2739 If no revision is given, the parent of the working directory is used,
2739 If no revision is given, the parent of the working directory is used,
2740 or tip if no revision is checked out.
2740 or tip if no revision is checked out.
2741
2741
2742 To facilitate version control, distribution, and merging of tags,
2742 To facilitate version control, distribution, and merging of tags,
2743 they are stored as a file named ".hgtags" which is managed
2743 they are stored as a file named ".hgtags" which is managed
2744 similarly to other project files and can be hand-edited if
2744 similarly to other project files and can be hand-edited if
2745 necessary. The file '.hg/localtags' is used for local tags (not
2745 necessary. The file '.hg/localtags' is used for local tags (not
2746 shared among repositories).
2746 shared among repositories).
2747
2747
2748 See 'hg help dates' for a list of formats valid for -d/--date.
2748 See 'hg help dates' for a list of formats valid for -d/--date.
2749 """
2749 """
2750
2750
2751 rev_ = "."
2751 rev_ = "."
2752 names = (name1,) + names
2752 names = (name1,) + names
2753 if len(names) != len(dict.fromkeys(names)):
2753 if len(names) != len(dict.fromkeys(names)):
2754 raise util.Abort(_('tag names must be unique'))
2754 raise util.Abort(_('tag names must be unique'))
2755 for n in names:
2755 for n in names:
2756 if n in ['tip', '.', 'null']:
2756 if n in ['tip', '.', 'null']:
2757 raise util.Abort(_('the name \'%s\' is reserved') % n)
2757 raise util.Abort(_('the name \'%s\' is reserved') % n)
2758 if opts.get('rev') and opts.get('remove'):
2758 if opts.get('rev') and opts.get('remove'):
2759 raise util.Abort(_("--rev and --remove are incompatible"))
2759 raise util.Abort(_("--rev and --remove are incompatible"))
2760 if opts.get('rev'):
2760 if opts.get('rev'):
2761 rev_ = opts['rev']
2761 rev_ = opts['rev']
2762 message = opts.get('message')
2762 message = opts.get('message')
2763 if opts.get('remove'):
2763 if opts.get('remove'):
2764 expectedtype = opts.get('local') and 'local' or 'global'
2764 expectedtype = opts.get('local') and 'local' or 'global'
2765 for n in names:
2765 for n in names:
2766 if not repo.tagtype(n):
2766 if not repo.tagtype(n):
2767 raise util.Abort(_('tag \'%s\' does not exist') % n)
2767 raise util.Abort(_('tag \'%s\' does not exist') % n)
2768 if repo.tagtype(n) != expectedtype:
2768 if repo.tagtype(n) != expectedtype:
2769 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2769 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2770 (n, expectedtype))
2770 (n, expectedtype))
2771 rev_ = nullid
2771 rev_ = nullid
2772 if not message:
2772 if not message:
2773 message = _('Removed tag %s') % ', '.join(names)
2773 message = _('Removed tag %s') % ', '.join(names)
2774 elif not opts.get('force'):
2774 elif not opts.get('force'):
2775 for n in names:
2775 for n in names:
2776 if n in repo.tags():
2776 if n in repo.tags():
2777 raise util.Abort(_('tag \'%s\' already exists '
2777 raise util.Abort(_('tag \'%s\' already exists '
2778 '(use -f to force)') % n)
2778 '(use -f to force)') % n)
2779 if not rev_ and repo.dirstate.parents()[1] != nullid:
2779 if not rev_ and repo.dirstate.parents()[1] != nullid:
2780 raise util.Abort(_('uncommitted merge - please provide a '
2780 raise util.Abort(_('uncommitted merge - please provide a '
2781 'specific revision'))
2781 'specific revision'))
2782 r = repo[rev_].node()
2782 r = repo[rev_].node()
2783
2783
2784 if not message:
2784 if not message:
2785 message = (_('Added tag %s for changeset %s') %
2785 message = (_('Added tag %s for changeset %s') %
2786 (', '.join(names), short(r)))
2786 (', '.join(names), short(r)))
2787
2787
2788 date = opts.get('date')
2788 date = opts.get('date')
2789 if date:
2789 if date:
2790 date = util.parsedate(date)
2790 date = util.parsedate(date)
2791
2791
2792 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2792 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2793
2793
2794 def tags(ui, repo):
2794 def tags(ui, repo):
2795 """list repository tags
2795 """list repository tags
2796
2796
2797 List the repository tags.
2797 List the repository tags.
2798
2798
2799 This lists both regular and local tags. When the -v/--verbose switch
2799 This lists both regular and local tags. When the -v/--verbose switch
2800 is used, a third column "local" is printed for local tags.
2800 is used, a third column "local" is printed for local tags.
2801 """
2801 """
2802
2802
2803 l = repo.tagslist()
2803 l = repo.tagslist()
2804 l.reverse()
2804 l.reverse()
2805 hexfunc = ui.debugflag and hex or short
2805 hexfunc = ui.debugflag and hex or short
2806 tagtype = ""
2806 tagtype = ""
2807
2807
2808 for t, n in l:
2808 for t, n in l:
2809 if ui.quiet:
2809 if ui.quiet:
2810 ui.write("%s\n" % t)
2810 ui.write("%s\n" % t)
2811 continue
2811 continue
2812
2812
2813 try:
2813 try:
2814 hn = hexfunc(n)
2814 hn = hexfunc(n)
2815 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2815 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2816 except revlog.LookupError:
2816 except revlog.LookupError:
2817 r = " ?:%s" % hn
2817 r = " ?:%s" % hn
2818 else:
2818 else:
2819 spaces = " " * (30 - util.locallen(t))
2819 spaces = " " * (30 - util.locallen(t))
2820 if ui.verbose:
2820 if ui.verbose:
2821 if repo.tagtype(t) == 'local':
2821 if repo.tagtype(t) == 'local':
2822 tagtype = " local"
2822 tagtype = " local"
2823 else:
2823 else:
2824 tagtype = ""
2824 tagtype = ""
2825 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2825 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2826
2826
2827 def tip(ui, repo, **opts):
2827 def tip(ui, repo, **opts):
2828 """show the tip revision
2828 """show the tip revision
2829
2829
2830 The tip revision (usually just called the tip) is the most
2830 The tip revision (usually just called the tip) is the most
2831 recently added changeset in the repository, the most recently
2831 recently added changeset in the repository, the most recently
2832 changed head.
2832 changed head.
2833
2833
2834 If you have just made a commit, that commit will be the tip. If
2834 If you have just made a commit, that commit will be the tip. If
2835 you have just pulled changes from another repository, the tip of
2835 you have just pulled changes from another repository, the tip of
2836 that repository becomes the current tip. The "tip" tag is special
2836 that repository becomes the current tip. The "tip" tag is special
2837 and cannot be renamed or assigned to a different changeset.
2837 and cannot be renamed or assigned to a different changeset.
2838 """
2838 """
2839 cmdutil.show_changeset(ui, repo, opts).show(len(repo) - 1)
2839 cmdutil.show_changeset(ui, repo, opts).show(len(repo) - 1)
2840
2840
2841 def unbundle(ui, repo, fname1, *fnames, **opts):
2841 def unbundle(ui, repo, fname1, *fnames, **opts):
2842 """apply one or more changegroup files
2842 """apply one or more changegroup files
2843
2843
2844 Apply one or more compressed changegroup files generated by the
2844 Apply one or more compressed changegroup files generated by the
2845 bundle command.
2845 bundle command.
2846 """
2846 """
2847 fnames = (fname1,) + fnames
2847 fnames = (fname1,) + fnames
2848
2848
2849 lock = None
2849 lock = None
2850 try:
2850 try:
2851 lock = repo.lock()
2851 lock = repo.lock()
2852 for fname in fnames:
2852 for fname in fnames:
2853 f = url.open(ui, fname)
2853 f = url.open(ui, fname)
2854 gen = changegroup.readbundle(f, fname)
2854 gen = changegroup.readbundle(f, fname)
2855 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2855 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2856 finally:
2856 finally:
2857 del lock
2857 del lock
2858
2858
2859 return postincoming(ui, repo, modheads, opts.get('update'), None)
2859 return postincoming(ui, repo, modheads, opts.get('update'), None)
2860
2860
2861 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2861 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2862 """update working directory
2862 """update working directory
2863
2863
2864 Update the repository's working directory to the specified revision,
2864 Update the repository's working directory to the specified revision,
2865 or the tip of the current branch if none is specified. Use null as
2865 or the tip of the current branch if none is specified. Use null as
2866 the revision to remove the working copy (like 'hg clone -U').
2866 the revision to remove the working copy (like 'hg clone -U').
2867
2867
2868 When the working dir contains no uncommitted changes, it will be
2868 When the working dir contains no uncommitted changes, it will be
2869 replaced by the state of the requested revision from the repo. When
2869 replaced by the state of the requested revision from the repo. When
2870 the requested revision is on a different branch, the working dir
2870 the requested revision is on a different branch, the working dir
2871 will additionally be switched to that branch.
2871 will additionally be switched to that branch.
2872
2872
2873 When there are uncommitted changes, use option -C to discard them,
2873 When there are uncommitted changes, use option -C to discard them,
2874 forcibly replacing the state of the working dir with the requested
2874 forcibly replacing the state of the working dir with the requested
2875 revision.
2875 revision.
2876
2876
2877 When there are uncommitted changes and option -C is not used, and
2877 When there are uncommitted changes and option -C is not used, and
2878 the parent revision and requested revision are on the same branch,
2878 the parent revision and requested revision are on the same branch,
2879 and one of them is an ancestor of the other, then the new working
2879 and one of them is an ancestor of the other, then the new working
2880 directory will contain the requested revision merged with the
2880 directory will contain the requested revision merged with the
2881 uncommitted changes. Otherwise, the update will fail with a
2881 uncommitted changes. Otherwise, the update will fail with a
2882 suggestion to use 'merge' or 'update -C' instead.
2882 suggestion to use 'merge' or 'update -C' instead.
2883
2883
2884 If you want to update just one file to an older revision, use revert.
2884 If you want to update just one file to an older revision, use revert.
2885
2885
2886 See 'hg help dates' for a list of formats valid for --date.
2886 See 'hg help dates' for a list of formats valid for --date.
2887 """
2887 """
2888 if rev and node:
2888 if rev and node:
2889 raise util.Abort(_("please specify just one revision"))
2889 raise util.Abort(_("please specify just one revision"))
2890
2890
2891 if not rev:
2891 if not rev:
2892 rev = node
2892 rev = node
2893
2893
2894 if date:
2894 if date:
2895 if rev:
2895 if rev:
2896 raise util.Abort(_("you can't specify a revision and a date"))
2896 raise util.Abort(_("you can't specify a revision and a date"))
2897 rev = cmdutil.finddate(ui, repo, date)
2897 rev = cmdutil.finddate(ui, repo, date)
2898
2898
2899 if clean:
2899 if clean:
2900 return hg.clean(repo, rev)
2900 return hg.clean(repo, rev)
2901 else:
2901 else:
2902 return hg.update(repo, rev)
2902 return hg.update(repo, rev)
2903
2903
2904 def verify(ui, repo):
2904 def verify(ui, repo):
2905 """verify the integrity of the repository
2905 """verify the integrity of the repository
2906
2906
2907 Verify the integrity of the current repository.
2907 Verify the integrity of the current repository.
2908
2908
2909 This will perform an extensive check of the repository's
2909 This will perform an extensive check of the repository's
2910 integrity, validating the hashes and checksums of each entry in
2910 integrity, validating the hashes and checksums of each entry in
2911 the changelog, manifest, and tracked files, as well as the
2911 the changelog, manifest, and tracked files, as well as the
2912 integrity of their crosslinks and indices.
2912 integrity of their crosslinks and indices.
2913 """
2913 """
2914 return hg.verify(repo)
2914 return hg.verify(repo)
2915
2915
2916 def version_(ui):
2916 def version_(ui):
2917 """output version and copyright information"""
2917 """output version and copyright information"""
2918 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2918 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2919 % version.get_version())
2919 % version.get_version())
2920 ui.status(_(
2920 ui.status(_(
2921 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2921 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2922 "This is free software; see the source for copying conditions. "
2922 "This is free software; see the source for copying conditions. "
2923 "There is NO\nwarranty; "
2923 "There is NO\nwarranty; "
2924 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2924 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2925 ))
2925 ))
2926
2926
2927 # Command options and aliases are listed here, alphabetically
2927 # Command options and aliases are listed here, alphabetically
2928
2928
2929 globalopts = [
2929 globalopts = [
2930 ('R', 'repository', '',
2930 ('R', 'repository', '',
2931 _('repository root directory or symbolic path name')),
2931 _('repository root directory or symbolic path name')),
2932 ('', 'cwd', '', _('change working directory')),
2932 ('', 'cwd', '', _('change working directory')),
2933 ('y', 'noninteractive', None,
2933 ('y', 'noninteractive', None,
2934 _('do not prompt, assume \'yes\' for any required answers')),
2934 _('do not prompt, assume \'yes\' for any required answers')),
2935 ('q', 'quiet', None, _('suppress output')),
2935 ('q', 'quiet', None, _('suppress output')),
2936 ('v', 'verbose', None, _('enable additional output')),
2936 ('v', 'verbose', None, _('enable additional output')),
2937 ('', 'config', [], _('set/override config option')),
2937 ('', 'config', [], _('set/override config option')),
2938 ('', 'debug', None, _('enable debugging output')),
2938 ('', 'debug', None, _('enable debugging output')),
2939 ('', 'debugger', None, _('start debugger')),
2939 ('', 'debugger', None, _('start debugger')),
2940 ('', 'encoding', util._encoding, _('set the charset encoding')),
2940 ('', 'encoding', util._encoding, _('set the charset encoding')),
2941 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2941 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2942 ('', 'lsprof', None, _('print improved command execution profile')),
2942 ('', 'lsprof', None, _('print improved command execution profile')),
2943 ('', 'traceback', None, _('print traceback on exception')),
2943 ('', 'traceback', None, _('print traceback on exception')),
2944 ('', 'time', None, _('time how long the command takes')),
2944 ('', 'time', None, _('time how long the command takes')),
2945 ('', 'profile', None, _('print command execution profile')),
2945 ('', 'profile', None, _('print command execution profile')),
2946 ('', 'version', None, _('output version information and exit')),
2946 ('', 'version', None, _('output version information and exit')),
2947 ('h', 'help', None, _('display help and exit')),
2947 ('h', 'help', None, _('display help and exit')),
2948 ]
2948 ]
2949
2949
2950 dryrunopts = [('n', 'dry-run', None,
2950 dryrunopts = [('n', 'dry-run', None,
2951 _('do not perform actions, just print output'))]
2951 _('do not perform actions, just print output'))]
2952
2952
2953 remoteopts = [
2953 remoteopts = [
2954 ('e', 'ssh', '', _('specify ssh command to use')),
2954 ('e', 'ssh', '', _('specify ssh command to use')),
2955 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2955 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2956 ]
2956 ]
2957
2957
2958 walkopts = [
2958 walkopts = [
2959 ('I', 'include', [], _('include names matching the given patterns')),
2959 ('I', 'include', [], _('include names matching the given patterns')),
2960 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2960 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2961 ]
2961 ]
2962
2962
2963 commitopts = [
2963 commitopts = [
2964 ('m', 'message', '', _('use <text> as commit message')),
2964 ('m', 'message', '', _('use <text> as commit message')),
2965 ('l', 'logfile', '', _('read commit message from <file>')),
2965 ('l', 'logfile', '', _('read commit message from <file>')),
2966 ]
2966 ]
2967
2967
2968 commitopts2 = [
2968 commitopts2 = [
2969 ('d', 'date', '', _('record datecode as commit date')),
2969 ('d', 'date', '', _('record datecode as commit date')),
2970 ('u', 'user', '', _('record user as committer')),
2970 ('u', 'user', '', _('record user as committer')),
2971 ]
2971 ]
2972
2972
2973 templateopts = [
2973 templateopts = [
2974 ('', 'style', '', _('display using template map file')),
2974 ('', 'style', '', _('display using template map file')),
2975 ('', 'template', '', _('display with template')),
2975 ('', 'template', '', _('display with template')),
2976 ]
2976 ]
2977
2977
2978 logopts = [
2978 logopts = [
2979 ('p', 'patch', None, _('show patch')),
2979 ('p', 'patch', None, _('show patch')),
2980 ('l', 'limit', '', _('limit number of changes displayed')),
2980 ('l', 'limit', '', _('limit number of changes displayed')),
2981 ('M', 'no-merges', None, _('do not show merges')),
2981 ('M', 'no-merges', None, _('do not show merges')),
2982 ] + templateopts
2982 ] + templateopts
2983
2983
2984 diffopts = [
2984 diffopts = [
2985 ('a', 'text', None, _('treat all files as text')),
2985 ('a', 'text', None, _('treat all files as text')),
2986 ('g', 'git', None, _('use git extended diff format')),
2986 ('g', 'git', None, _('use git extended diff format')),
2987 ('', 'nodates', None, _("don't include dates in diff headers"))
2987 ('', 'nodates', None, _("don't include dates in diff headers"))
2988 ]
2988 ]
2989
2989
2990 diffopts2 = [
2990 diffopts2 = [
2991 ('p', 'show-function', None, _('show which function each change is in')),
2991 ('p', 'show-function', None, _('show which function each change is in')),
2992 ('w', 'ignore-all-space', None,
2992 ('w', 'ignore-all-space', None,
2993 _('ignore white space when comparing lines')),
2993 _('ignore white space when comparing lines')),
2994 ('b', 'ignore-space-change', None,
2994 ('b', 'ignore-space-change', None,
2995 _('ignore changes in the amount of white space')),
2995 _('ignore changes in the amount of white space')),
2996 ('B', 'ignore-blank-lines', None,
2996 ('B', 'ignore-blank-lines', None,
2997 _('ignore changes whose lines are all blank')),
2997 _('ignore changes whose lines are all blank')),
2998 ('U', 'unified', '', _('number of lines of context to show'))
2998 ('U', 'unified', '', _('number of lines of context to show'))
2999 ]
2999 ]
3000
3000
3001 table = {
3001 table = {
3002 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
3002 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
3003 "addremove":
3003 "addremove":
3004 (addremove,
3004 (addremove,
3005 [('s', 'similarity', '',
3005 [('s', 'similarity', '',
3006 _('guess renamed files by similarity (0<=s<=100)')),
3006 _('guess renamed files by similarity (0<=s<=100)')),
3007 ] + walkopts + dryrunopts,
3007 ] + walkopts + dryrunopts,
3008 _('hg addremove [OPTION]... [FILE]...')),
3008 _('hg addremove [OPTION]... [FILE]...')),
3009 "^annotate|blame":
3009 "^annotate|blame":
3010 (annotate,
3010 (annotate,
3011 [('r', 'rev', '', _('annotate the specified revision')),
3011 [('r', 'rev', '', _('annotate the specified revision')),
3012 ('f', 'follow', None, _('follow file copies and renames')),
3012 ('f', 'follow', None, _('follow file copies and renames')),
3013 ('a', 'text', None, _('treat all files as text')),
3013 ('a', 'text', None, _('treat all files as text')),
3014 ('u', 'user', None, _('list the author (long with -v)')),
3014 ('u', 'user', None, _('list the author (long with -v)')),
3015 ('d', 'date', None, _('list the date (short with -q)')),
3015 ('d', 'date', None, _('list the date (short with -q)')),
3016 ('n', 'number', None, _('list the revision number (default)')),
3016 ('n', 'number', None, _('list the revision number (default)')),
3017 ('c', 'changeset', None, _('list the changeset')),
3017 ('c', 'changeset', None, _('list the changeset')),
3018 ('l', 'line-number', None,
3018 ('l', 'line-number', None,
3019 _('show line number at the first appearance'))
3019 _('show line number at the first appearance'))
3020 ] + walkopts,
3020 ] + walkopts,
3021 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3021 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3022 "archive":
3022 "archive":
3023 (archive,
3023 (archive,
3024 [('', 'no-decode', None, _('do not pass files through decoders')),
3024 [('', 'no-decode', None, _('do not pass files through decoders')),
3025 ('p', 'prefix', '', _('directory prefix for files in archive')),
3025 ('p', 'prefix', '', _('directory prefix for files in archive')),
3026 ('r', 'rev', '', _('revision to distribute')),
3026 ('r', 'rev', '', _('revision to distribute')),
3027 ('t', 'type', '', _('type of distribution to create')),
3027 ('t', 'type', '', _('type of distribution to create')),
3028 ] + walkopts,
3028 ] + walkopts,
3029 _('hg archive [OPTION]... DEST')),
3029 _('hg archive [OPTION]... DEST')),
3030 "backout":
3030 "backout":
3031 (backout,
3031 (backout,
3032 [('', 'merge', None,
3032 [('', 'merge', None,
3033 _('merge with old dirstate parent after backout')),
3033 _('merge with old dirstate parent after backout')),
3034 ('', 'parent', '', _('parent to choose when backing out merge')),
3034 ('', 'parent', '', _('parent to choose when backing out merge')),
3035 ('r', 'rev', '', _('revision to backout')),
3035 ('r', 'rev', '', _('revision to backout')),
3036 ] + walkopts + commitopts + commitopts2,
3036 ] + walkopts + commitopts + commitopts2,
3037 _('hg backout [OPTION]... [-r] REV')),
3037 _('hg backout [OPTION]... [-r] REV')),
3038 "bisect":
3038 "bisect":
3039 (bisect,
3039 (bisect,
3040 [('r', 'reset', False, _('reset bisect state')),
3040 [('r', 'reset', False, _('reset bisect state')),
3041 ('g', 'good', False, _('mark changeset good')),
3041 ('g', 'good', False, _('mark changeset good')),
3042 ('b', 'bad', False, _('mark changeset bad')),
3042 ('b', 'bad', False, _('mark changeset bad')),
3043 ('s', 'skip', False, _('skip testing changeset')),
3043 ('s', 'skip', False, _('skip testing changeset')),
3044 ('c', 'command', '', _('Use command to check changeset state')),
3044 ('c', 'command', '', _('Use command to check changeset state')),
3045 ('U', 'noupdate', False, _('do not update to target'))],
3045 ('U', 'noupdate', False, _('do not update to target'))],
3046 _("hg bisect [-gbsr] [-c CMD] [REV]")),
3046 _("hg bisect [-gbsr] [-c CMD] [REV]")),
3047 "branch":
3047 "branch":
3048 (branch,
3048 (branch,
3049 [('f', 'force', None,
3049 [('f', 'force', None,
3050 _('set branch name even if it shadows an existing branch')),
3050 _('set branch name even if it shadows an existing branch')),
3051 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3051 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3052 _('hg branch [-fC] [NAME]')),
3052 _('hg branch [-fC] [NAME]')),
3053 "branches":
3053 "branches":
3054 (branches,
3054 (branches,
3055 [('a', 'active', False,
3055 [('a', 'active', False,
3056 _('show only branches that have unmerged heads'))],
3056 _('show only branches that have unmerged heads'))],
3057 _('hg branches [-a]')),
3057 _('hg branches [-a]')),
3058 "bundle":
3058 "bundle":
3059 (bundle,
3059 (bundle,
3060 [('f', 'force', None,
3060 [('f', 'force', None,
3061 _('run even when remote repository is unrelated')),
3061 _('run even when remote repository is unrelated')),
3062 ('r', 'rev', [],
3062 ('r', 'rev', [],
3063 _('a changeset up to which you would like to bundle')),
3063 _('a changeset up to which you would like to bundle')),
3064 ('', 'base', [],
3064 ('', 'base', [],
3065 _('a base changeset to specify instead of a destination')),
3065 _('a base changeset to specify instead of a destination')),
3066 ('a', 'all', None, _('bundle all changesets in the repository')),
3066 ('a', 'all', None, _('bundle all changesets in the repository')),
3067 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3067 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3068 ] + remoteopts,
3068 ] + remoteopts,
3069 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3069 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3070 "cat":
3070 "cat":
3071 (cat,
3071 (cat,
3072 [('o', 'output', '', _('print output to file with formatted name')),
3072 [('o', 'output', '', _('print output to file with formatted name')),
3073 ('r', 'rev', '', _('print the given revision')),
3073 ('r', 'rev', '', _('print the given revision')),
3074 ('', 'decode', None, _('apply any matching decode filter')),
3074 ('', 'decode', None, _('apply any matching decode filter')),
3075 ] + walkopts,
3075 ] + walkopts,
3076 _('hg cat [OPTION]... FILE...')),
3076 _('hg cat [OPTION]... FILE...')),
3077 "^clone":
3077 "^clone":
3078 (clone,
3078 (clone,
3079 [('U', 'noupdate', None,
3079 [('U', 'noupdate', None,
3080 _('the clone will only contain a repository (no working copy)')),
3080 _('the clone will only contain a repository (no working copy)')),
3081 ('r', 'rev', [],
3081 ('r', 'rev', [],
3082 _('a changeset you would like to have after cloning')),
3082 _('a changeset you would like to have after cloning')),
3083 ('', 'pull', None, _('use pull protocol to copy metadata')),
3083 ('', 'pull', None, _('use pull protocol to copy metadata')),
3084 ('', 'uncompressed', None,
3084 ('', 'uncompressed', None,
3085 _('use uncompressed transfer (fast over LAN)')),
3085 _('use uncompressed transfer (fast over LAN)')),
3086 ] + remoteopts,
3086 ] + remoteopts,
3087 _('hg clone [OPTION]... SOURCE [DEST]')),
3087 _('hg clone [OPTION]... SOURCE [DEST]')),
3088 "^commit|ci":
3088 "^commit|ci":
3089 (commit,
3089 (commit,
3090 [('A', 'addremove', None,
3090 [('A', 'addremove', None,
3091 _('mark new/missing files as added/removed before committing')),
3091 _('mark new/missing files as added/removed before committing')),
3092 ] + walkopts + commitopts + commitopts2,
3092 ] + walkopts + commitopts + commitopts2,
3093 _('hg commit [OPTION]... [FILE]...')),
3093 _('hg commit [OPTION]... [FILE]...')),
3094 "copy|cp":
3094 "copy|cp":
3095 (copy,
3095 (copy,
3096 [('A', 'after', None, _('record a copy that has already occurred')),
3096 [('A', 'after', None, _('record a copy that has already occurred')),
3097 ('f', 'force', None,
3097 ('f', 'force', None,
3098 _('forcibly copy over an existing managed file')),
3098 _('forcibly copy over an existing managed file')),
3099 ] + walkopts + dryrunopts,
3099 ] + walkopts + dryrunopts,
3100 _('hg copy [OPTION]... [SOURCE]... DEST')),
3100 _('hg copy [OPTION]... [SOURCE]... DEST')),
3101 "debugancestor": (debugancestor, [],
3101 "debugancestor": (debugancestor, [],
3102 _('hg debugancestor [INDEX] REV1 REV2')),
3102 _('hg debugancestor [INDEX] REV1 REV2')),
3103 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
3103 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
3104 "debugcomplete":
3104 "debugcomplete":
3105 (debugcomplete,
3105 (debugcomplete,
3106 [('o', 'options', None, _('show the command options'))],
3106 [('o', 'options', None, _('show the command options'))],
3107 _('hg debugcomplete [-o] CMD')),
3107 _('hg debugcomplete [-o] CMD')),
3108 "debugdate":
3108 "debugdate":
3109 (debugdate,
3109 (debugdate,
3110 [('e', 'extended', None, _('try extended date formats'))],
3110 [('e', 'extended', None, _('try extended date formats'))],
3111 _('hg debugdate [-e] DATE [RANGE]')),
3111 _('hg debugdate [-e] DATE [RANGE]')),
3112 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
3112 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
3113 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
3113 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
3114 "debugindex": (debugindex, [], _('hg debugindex FILE')),
3114 "debugindex": (debugindex, [], _('hg debugindex FILE')),
3115 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
3115 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
3116 "debuginstall": (debuginstall, [], _('hg debuginstall')),
3116 "debuginstall": (debuginstall, [], _('hg debuginstall')),
3117 "debugrawcommit|rawcommit":
3117 "debugrawcommit|rawcommit":
3118 (rawcommit,
3118 (rawcommit,
3119 [('p', 'parent', [], _('parent')),
3119 [('p', 'parent', [], _('parent')),
3120 ('F', 'files', '', _('file list'))
3120 ('F', 'files', '', _('file list'))
3121 ] + commitopts + commitopts2,
3121 ] + commitopts + commitopts2,
3122 _('hg debugrawcommit [OPTION]... [FILE]...')),
3122 _('hg debugrawcommit [OPTION]... [FILE]...')),
3123 "debugrebuildstate":
3123 "debugrebuildstate":
3124 (debugrebuildstate,
3124 (debugrebuildstate,
3125 [('r', 'rev', '', _('revision to rebuild to'))],
3125 [('r', 'rev', '', _('revision to rebuild to'))],
3126 _('hg debugrebuildstate [-r REV] [REV]')),
3126 _('hg debugrebuildstate [-r REV] [REV]')),
3127 "debugrename":
3127 "debugrename":
3128 (debugrename,
3128 (debugrename,
3129 [('r', 'rev', '', _('revision to debug'))],
3129 [('r', 'rev', '', _('revision to debug'))],
3130 _('hg debugrename [-r REV] FILE')),
3130 _('hg debugrename [-r REV] FILE')),
3131 "debugsetparents":
3131 "debugsetparents":
3132 (debugsetparents,
3132 (debugsetparents,
3133 [],
3133 [],
3134 _('hg debugsetparents REV1 [REV2]')),
3134 _('hg debugsetparents REV1 [REV2]')),
3135 "debugstate":
3135 "debugstate":
3136 (debugstate,
3136 (debugstate,
3137 [('', 'nodates', None, _('do not display the saved mtime'))],
3137 [('', 'nodates', None, _('do not display the saved mtime'))],
3138 _('hg debugstate [OPTION]...')),
3138 _('hg debugstate [OPTION]...')),
3139 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
3139 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
3140 "^diff":
3140 "^diff":
3141 (diff,
3141 (diff,
3142 [('r', 'rev', [], _('revision'))
3142 [('r', 'rev', [], _('revision'))
3143 ] + diffopts + diffopts2 + walkopts,
3143 ] + diffopts + diffopts2 + walkopts,
3144 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3144 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3145 "^export":
3145 "^export":
3146 (export,
3146 (export,
3147 [('o', 'output', '', _('print output to file with formatted name')),
3147 [('o', 'output', '', _('print output to file with formatted name')),
3148 ('', 'switch-parent', None, _('diff against the second parent'))
3148 ('', 'switch-parent', None, _('diff against the second parent'))
3149 ] + diffopts,
3149 ] + diffopts,
3150 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
3150 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
3151 "grep":
3151 "grep":
3152 (grep,
3152 (grep,
3153 [('0', 'print0', None, _('end fields with NUL')),
3153 [('0', 'print0', None, _('end fields with NUL')),
3154 ('', 'all', None, _('print all revisions that match')),
3154 ('', 'all', None, _('print all revisions that match')),
3155 ('f', 'follow', None,
3155 ('f', 'follow', None,
3156 _('follow changeset history, or file history across copies and renames')),
3156 _('follow changeset history, or file history across copies and renames')),
3157 ('i', 'ignore-case', None, _('ignore case when matching')),
3157 ('i', 'ignore-case', None, _('ignore case when matching')),
3158 ('l', 'files-with-matches', None,
3158 ('l', 'files-with-matches', None,
3159 _('print only filenames and revs that match')),
3159 _('print only filenames and revs that match')),
3160 ('n', 'line-number', None, _('print matching line numbers')),
3160 ('n', 'line-number', None, _('print matching line numbers')),
3161 ('r', 'rev', [], _('search in given revision range')),
3161 ('r', 'rev', [], _('search in given revision range')),
3162 ('u', 'user', None, _('list the author (long with -v)')),
3162 ('u', 'user', None, _('list the author (long with -v)')),
3163 ('d', 'date', None, _('list the date (short with -q)')),
3163 ('d', 'date', None, _('list the date (short with -q)')),
3164 ] + walkopts,
3164 ] + walkopts,
3165 _('hg grep [OPTION]... PATTERN [FILE]...')),
3165 _('hg grep [OPTION]... PATTERN [FILE]...')),
3166 "heads":
3166 "heads":
3167 (heads,
3167 (heads,
3168 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3168 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3169 ] + templateopts,
3169 ] + templateopts,
3170 _('hg heads [-r REV] [REV]...')),
3170 _('hg heads [-r REV] [REV]...')),
3171 "help": (help_, [], _('hg help [TOPIC]')),
3171 "help": (help_, [], _('hg help [TOPIC]')),
3172 "identify|id":
3172 "identify|id":
3173 (identify,
3173 (identify,
3174 [('r', 'rev', '', _('identify the specified rev')),
3174 [('r', 'rev', '', _('identify the specified rev')),
3175 ('n', 'num', None, _('show local revision number')),
3175 ('n', 'num', None, _('show local revision number')),
3176 ('i', 'id', None, _('show global revision id')),
3176 ('i', 'id', None, _('show global revision id')),
3177 ('b', 'branch', None, _('show branch')),
3177 ('b', 'branch', None, _('show branch')),
3178 ('t', 'tags', None, _('show tags'))],
3178 ('t', 'tags', None, _('show tags'))],
3179 _('hg identify [-nibt] [-r REV] [SOURCE]')),
3179 _('hg identify [-nibt] [-r REV] [SOURCE]')),
3180 "import|patch":
3180 "import|patch":
3181 (import_,
3181 (import_,
3182 [('p', 'strip', 1,
3182 [('p', 'strip', 1,
3183 _('directory strip option for patch. This has the same\n'
3183 _('directory strip option for patch. This has the same\n'
3184 'meaning as the corresponding patch option')),
3184 'meaning as the corresponding patch option')),
3185 ('b', 'base', '', _('base path')),
3185 ('b', 'base', '', _('base path')),
3186 ('f', 'force', None,
3186 ('f', 'force', None,
3187 _('skip check for outstanding uncommitted changes')),
3187 _('skip check for outstanding uncommitted changes')),
3188 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3188 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3189 ('', 'exact', None,
3189 ('', 'exact', None,
3190 _('apply patch to the nodes from which it was generated')),
3190 _('apply patch to the nodes from which it was generated')),
3191 ('', 'import-branch', None,
3191 ('', 'import-branch', None,
3192 _('Use any branch information in patch (implied by --exact)'))] +
3192 _('Use any branch information in patch (implied by --exact)'))] +
3193 commitopts + commitopts2,
3193 commitopts + commitopts2,
3194 _('hg import [OPTION]... PATCH...')),
3194 _('hg import [OPTION]... PATCH...')),
3195 "incoming|in":
3195 "incoming|in":
3196 (incoming,
3196 (incoming,
3197 [('f', 'force', None,
3197 [('f', 'force', None,
3198 _('run even when remote repository is unrelated')),
3198 _('run even when remote repository is unrelated')),
3199 ('n', 'newest-first', None, _('show newest record first')),
3199 ('n', 'newest-first', None, _('show newest record first')),
3200 ('', 'bundle', '', _('file to store the bundles into')),
3200 ('', 'bundle', '', _('file to store the bundles into')),
3201 ('r', 'rev', [],
3201 ('r', 'rev', [],
3202 _('a specific revision up to which you would like to pull')),
3202 _('a specific revision up to which you would like to pull')),
3203 ] + logopts + remoteopts,
3203 ] + logopts + remoteopts,
3204 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3204 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3205 ' [--bundle FILENAME] [SOURCE]')),
3205 ' [--bundle FILENAME] [SOURCE]')),
3206 "^init":
3206 "^init":
3207 (init,
3207 (init,
3208 remoteopts,
3208 remoteopts,
3209 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3209 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3210 "locate":
3210 "locate":
3211 (locate,
3211 (locate,
3212 [('r', 'rev', '', _('search the repository as it stood at rev')),
3212 [('r', 'rev', '', _('search the repository as it stood at rev')),
3213 ('0', 'print0', None,
3213 ('0', 'print0', None,
3214 _('end filenames with NUL, for use with xargs')),
3214 _('end filenames with NUL, for use with xargs')),
3215 ('f', 'fullpath', None,
3215 ('f', 'fullpath', None,
3216 _('print complete paths from the filesystem root')),
3216 _('print complete paths from the filesystem root')),
3217 ] + walkopts,
3217 ] + walkopts,
3218 _('hg locate [OPTION]... [PATTERN]...')),
3218 _('hg locate [OPTION]... [PATTERN]...')),
3219 "^log|history":
3219 "^log|history":
3220 (log,
3220 (log,
3221 [('f', 'follow', None,
3221 [('f', 'follow', None,
3222 _('follow changeset history, or file history across copies and renames')),
3222 _('follow changeset history, or file history across copies and renames')),
3223 ('', 'follow-first', None,
3223 ('', 'follow-first', None,
3224 _('only follow the first parent of merge changesets')),
3224 _('only follow the first parent of merge changesets')),
3225 ('d', 'date', '', _('show revs matching date spec')),
3225 ('d', 'date', '', _('show revs matching date spec')),
3226 ('C', 'copies', None, _('show copied files')),
3226 ('C', 'copies', None, _('show copied files')),
3227 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3227 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3228 ('r', 'rev', [], _('show the specified revision or range')),
3228 ('r', 'rev', [], _('show the specified revision or range')),
3229 ('', 'removed', None, _('include revs where files were removed')),
3229 ('', 'removed', None, _('include revs where files were removed')),
3230 ('m', 'only-merges', None, _('show only merges')),
3230 ('m', 'only-merges', None, _('show only merges')),
3231 ('u', 'user', [], _('revs committed by user')),
3231 ('u', 'user', [], _('revs committed by user')),
3232 ('b', 'only-branch', [],
3232 ('b', 'only-branch', [],
3233 _('show only changesets within the given named branch')),
3233 _('show only changesets within the given named branch')),
3234 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3234 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3235 ] + logopts + walkopts,
3235 ] + logopts + walkopts,
3236 _('hg log [OPTION]... [FILE]')),
3236 _('hg log [OPTION]... [FILE]')),
3237 "manifest":
3237 "manifest":
3238 (manifest,
3238 (manifest,
3239 [('r', 'rev', '', _('revision to display'))],
3239 [('r', 'rev', '', _('revision to display'))],
3240 _('hg manifest [-r REV]')),
3240 _('hg manifest [-r REV]')),
3241 "^merge":
3241 "^merge":
3242 (merge,
3242 (merge,
3243 [('f', 'force', None, _('force a merge with outstanding changes')),
3243 [('f', 'force', None, _('force a merge with outstanding changes')),
3244 ('r', 'rev', '', _('revision to merge')),
3244 ('r', 'rev', '', _('revision to merge')),
3245 ],
3245 ],
3246 _('hg merge [-f] [[-r] REV]')),
3246 _('hg merge [-f] [[-r] REV]')),
3247 "outgoing|out":
3247 "outgoing|out":
3248 (outgoing,
3248 (outgoing,
3249 [('f', 'force', None,
3249 [('f', 'force', None,
3250 _('run even when remote repository is unrelated')),
3250 _('run even when remote repository is unrelated')),
3251 ('r', 'rev', [],
3251 ('r', 'rev', [],
3252 _('a specific revision up to which you would like to push')),
3252 _('a specific revision up to which you would like to push')),
3253 ('n', 'newest-first', None, _('show newest record first')),
3253 ('n', 'newest-first', None, _('show newest record first')),
3254 ] + logopts + remoteopts,
3254 ] + logopts + remoteopts,
3255 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3255 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3256 "^parents":
3256 "^parents":
3257 (parents,
3257 (parents,
3258 [('r', 'rev', '', _('show parents from the specified rev')),
3258 [('r', 'rev', '', _('show parents from the specified rev')),
3259 ] + templateopts,
3259 ] + templateopts,
3260 _('hg parents [-r REV] [FILE]')),
3260 _('hg parents [-r REV] [FILE]')),
3261 "paths": (paths, [], _('hg paths [NAME]')),
3261 "paths": (paths, [], _('hg paths [NAME]')),
3262 "^pull":
3262 "^pull":
3263 (pull,
3263 (pull,
3264 [('u', 'update', None,
3264 [('u', 'update', None,
3265 _('update to new tip if changesets were pulled')),
3265 _('update to new tip if changesets were pulled')),
3266 ('f', 'force', None,
3266 ('f', 'force', None,
3267 _('run even when remote repository is unrelated')),
3267 _('run even when remote repository is unrelated')),
3268 ('r', 'rev', [],
3268 ('r', 'rev', [],
3269 _('a specific revision up to which you would like to pull')),
3269 _('a specific revision up to which you would like to pull')),
3270 ] + remoteopts,
3270 ] + remoteopts,
3271 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3271 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3272 "^push":
3272 "^push":
3273 (push,
3273 (push,
3274 [('f', 'force', None, _('force push')),
3274 [('f', 'force', None, _('force push')),
3275 ('r', 'rev', [],
3275 ('r', 'rev', [],
3276 _('a specific revision up to which you would like to push')),
3276 _('a specific revision up to which you would like to push')),
3277 ] + remoteopts,
3277 ] + remoteopts,
3278 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3278 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3279 "recover": (recover, [], _('hg recover')),
3279 "recover": (recover, [], _('hg recover')),
3280 "^remove|rm":
3280 "^remove|rm":
3281 (remove,
3281 (remove,
3282 [('A', 'after', None, _('record delete for missing files')),
3282 [('A', 'after', None, _('record delete for missing files')),
3283 ('f', 'force', None,
3283 ('f', 'force', None,
3284 _('remove (and delete) file even if added or modified')),
3284 _('remove (and delete) file even if added or modified')),
3285 ] + walkopts,
3285 ] + walkopts,
3286 _('hg remove [OPTION]... FILE...')),
3286 _('hg remove [OPTION]... FILE...')),
3287 "rename|mv":
3287 "rename|mv":
3288 (rename,
3288 (rename,
3289 [('A', 'after', None, _('record a rename that has already occurred')),
3289 [('A', 'after', None, _('record a rename that has already occurred')),
3290 ('f', 'force', None,
3290 ('f', 'force', None,
3291 _('forcibly copy over an existing managed file')),
3291 _('forcibly copy over an existing managed file')),
3292 ] + walkopts + dryrunopts,
3292 ] + walkopts + dryrunopts,
3293 _('hg rename [OPTION]... SOURCE... DEST')),
3293 _('hg rename [OPTION]... SOURCE... DEST')),
3294 "resolve":
3294 "resolve":
3295 (resolve,
3295 (resolve,
3296 [('l', 'list', None, _('list state of files needing merge')),
3296 [('l', 'list', None, _('list state of files needing merge')),
3297 ('m', 'mark', None, _('mark files as resolved')),
3297 ('m', 'mark', None, _('mark files as resolved')),
3298 ('u', 'unmark', None, _('unmark files as resolved'))],
3298 ('u', 'unmark', None, _('unmark files as resolved'))],
3299 _('hg resolve [OPTION]... [FILE]...')),
3299 _('hg resolve [OPTION]... [FILE]...')),
3300 "revert":
3300 "revert":
3301 (revert,
3301 (revert,
3302 [('a', 'all', None, _('revert all changes when no arguments given')),
3302 [('a', 'all', None, _('revert all changes when no arguments given')),
3303 ('d', 'date', '', _('tipmost revision matching date')),
3303 ('d', 'date', '', _('tipmost revision matching date')),
3304 ('r', 'rev', '', _('revision to revert to')),
3304 ('r', 'rev', '', _('revision to revert to')),
3305 ('', 'no-backup', None, _('do not save backup copies of files')),
3305 ('', 'no-backup', None, _('do not save backup copies of files')),
3306 ] + walkopts + dryrunopts,
3306 ] + walkopts + dryrunopts,
3307 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3307 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3308 "rollback": (rollback, [], _('hg rollback')),
3308 "rollback": (rollback, [], _('hg rollback')),
3309 "root": (root, [], _('hg root')),
3309 "root": (root, [], _('hg root')),
3310 "^serve":
3310 "^serve":
3311 (serve,
3311 (serve,
3312 [('A', 'accesslog', '', _('name of access log file to write to')),
3312 [('A', 'accesslog', '', _('name of access log file to write to')),
3313 ('d', 'daemon', None, _('run server in background')),
3313 ('d', 'daemon', None, _('run server in background')),
3314 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3314 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3315 ('E', 'errorlog', '', _('name of error log file to write to')),
3315 ('E', 'errorlog', '', _('name of error log file to write to')),
3316 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3316 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3317 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3317 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3318 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3318 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3319 ('n', 'name', '',
3319 ('n', 'name', '',
3320 _('name to show in web pages (default: working dir)')),
3320 _('name to show in web pages (default: working dir)')),
3321 ('', 'webdir-conf', '', _('name of the webdir config file'
3321 ('', 'webdir-conf', '', _('name of the webdir config file'
3322 ' (serve more than one repo)')),
3322 ' (serve more than one repo)')),
3323 ('', 'pid-file', '', _('name of file to write process ID to')),
3323 ('', 'pid-file', '', _('name of file to write process ID to')),
3324 ('', 'stdio', None, _('for remote clients')),
3324 ('', 'stdio', None, _('for remote clients')),
3325 ('t', 'templates', '', _('web templates to use')),
3325 ('t', 'templates', '', _('web templates to use')),
3326 ('', 'style', '', _('template style to use')),
3326 ('', 'style', '', _('template style to use')),
3327 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3327 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3328 ('', 'certificate', '', _('SSL certificate file'))],
3328 ('', 'certificate', '', _('SSL certificate file'))],
3329 _('hg serve [OPTION]...')),
3329 _('hg serve [OPTION]...')),
3330 "showconfig|debugconfig":
3330 "showconfig|debugconfig":
3331 (showconfig,
3331 (showconfig,
3332 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3332 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3333 _('hg showconfig [-u] [NAME]...')),
3333 _('hg showconfig [-u] [NAME]...')),
3334 "^status|st":
3334 "^status|st":
3335 (status,
3335 (status,
3336 [('A', 'all', None, _('show status of all files')),
3336 [('A', 'all', None, _('show status of all files')),
3337 ('m', 'modified', None, _('show only modified files')),
3337 ('m', 'modified', None, _('show only modified files')),
3338 ('a', 'added', None, _('show only added files')),
3338 ('a', 'added', None, _('show only added files')),
3339 ('r', 'removed', None, _('show only removed files')),
3339 ('r', 'removed', None, _('show only removed files')),
3340 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3340 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3341 ('c', 'clean', None, _('show only files without changes')),
3341 ('c', 'clean', None, _('show only files without changes')),
3342 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3342 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3343 ('i', 'ignored', None, _('show only ignored files')),
3343 ('i', 'ignored', None, _('show only ignored files')),
3344 ('n', 'no-status', None, _('hide status prefix')),
3344 ('n', 'no-status', None, _('hide status prefix')),
3345 ('C', 'copies', None, _('show source of copied files')),
3345 ('C', 'copies', None, _('show source of copied files')),
3346 ('0', 'print0', None,
3346 ('0', 'print0', None,
3347 _('end filenames with NUL, for use with xargs')),
3347 _('end filenames with NUL, for use with xargs')),
3348 ('', 'rev', [], _('show difference from revision')),
3348 ('', 'rev', [], _('show difference from revision')),
3349 ] + walkopts,
3349 ] + walkopts,
3350 _('hg status [OPTION]... [FILE]...')),
3350 _('hg status [OPTION]... [FILE]...')),
3351 "tag":
3351 "tag":
3352 (tag,
3352 (tag,
3353 [('f', 'force', None, _('replace existing tag')),
3353 [('f', 'force', None, _('replace existing tag')),
3354 ('l', 'local', None, _('make the tag local')),
3354 ('l', 'local', None, _('make the tag local')),
3355 ('r', 'rev', '', _('revision to tag')),
3355 ('r', 'rev', '', _('revision to tag')),
3356 ('', 'remove', None, _('remove a tag')),
3356 ('', 'remove', None, _('remove a tag')),
3357 # -l/--local is already there, commitopts cannot be used
3357 # -l/--local is already there, commitopts cannot be used
3358 ('m', 'message', '', _('use <text> as commit message')),
3358 ('m', 'message', '', _('use <text> as commit message')),
3359 ] + commitopts2,
3359 ] + commitopts2,
3360 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3360 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3361 "tags": (tags, [], _('hg tags')),
3361 "tags": (tags, [], _('hg tags')),
3362 "tip":
3362 "tip":
3363 (tip,
3363 (tip,
3364 [('p', 'patch', None, _('show patch')),
3364 [('p', 'patch', None, _('show patch')),
3365 ] + templateopts,
3365 ] + templateopts,
3366 _('hg tip [-p]')),
3366 _('hg tip [-p]')),
3367 "unbundle":
3367 "unbundle":
3368 (unbundle,
3368 (unbundle,
3369 [('u', 'update', None,
3369 [('u', 'update', None,
3370 _('update to new tip if changesets were unbundled'))],
3370 _('update to new tip if changesets were unbundled'))],
3371 _('hg unbundle [-u] FILE...')),
3371 _('hg unbundle [-u] FILE...')),
3372 "^update|up|checkout|co":
3372 "^update|up|checkout|co":
3373 (update,
3373 (update,
3374 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3374 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3375 ('d', 'date', '', _('tipmost revision matching date')),
3375 ('d', 'date', '', _('tipmost revision matching date')),
3376 ('r', 'rev', '', _('revision'))],
3376 ('r', 'rev', '', _('revision'))],
3377 _('hg update [-C] [-d DATE] [[-r] REV]')),
3377 _('hg update [-C] [-d DATE] [[-r] REV]')),
3378 "verify": (verify, [], _('hg verify')),
3378 "verify": (verify, [], _('hg verify')),
3379 "version": (version_, [], _('hg version')),
3379 "version": (version_, [], _('hg version')),
3380 }
3380 }
3381
3381
3382 norepo = ("clone init version help debugcomplete debugdata"
3382 norepo = ("clone init version help debugcomplete debugdata"
3383 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3383 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3384 optionalrepo = ("identify paths serve showconfig debugancestor")
3384 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,795 +1,795 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import nullid, nullrev, short, hex
8 from node import nullid, nullrev, short, hex
9 from i18n import _
9 from i18n import _
10 import ancestor, bdiff, revlog, util, os, errno
10 import ancestor, bdiff, revlog, util, os, errno
11
11
12 class changectx(object):
12 class changectx(object):
13 """A changecontext object makes access to data related to a particular
13 """A changecontext object makes access to data related to a particular
14 changeset convenient."""
14 changeset convenient."""
15 def __init__(self, repo, changeid=''):
15 def __init__(self, repo, changeid=''):
16 """changeid is a revision number, node, or tag"""
16 """changeid is a revision number, node, or tag"""
17 if changeid == '':
17 if changeid == '':
18 changeid = '.'
18 changeid = '.'
19 self._repo = repo
19 self._repo = repo
20 self._node = self._repo.lookup(changeid)
20 self._node = self._repo.lookup(changeid)
21 self._rev = self._repo.changelog.rev(self._node)
21 self._rev = self._repo.changelog.rev(self._node)
22
22
23 def __str__(self):
23 def __str__(self):
24 return short(self.node())
24 return short(self.node())
25
25
26 def __int__(self):
26 def __int__(self):
27 return self.rev()
27 return self.rev()
28
28
29 def __repr__(self):
29 def __repr__(self):
30 return "<changectx %s>" % str(self)
30 return "<changectx %s>" % str(self)
31
31
32 def __hash__(self):
32 def __hash__(self):
33 try:
33 try:
34 return hash(self._rev)
34 return hash(self._rev)
35 except AttributeError:
35 except AttributeError:
36 return id(self)
36 return id(self)
37
37
38 def __eq__(self, other):
38 def __eq__(self, other):
39 try:
39 try:
40 return self._rev == other._rev
40 return self._rev == other._rev
41 except AttributeError:
41 except AttributeError:
42 return False
42 return False
43
43
44 def __ne__(self, other):
44 def __ne__(self, other):
45 return not (self == other)
45 return not (self == other)
46
46
47 def __nonzero__(self):
47 def __nonzero__(self):
48 return self._rev != nullrev
48 return self._rev != nullrev
49
49
50 def __getattr__(self, name):
50 def __getattr__(self, name):
51 if name == '_changeset':
51 if name == '_changeset':
52 self._changeset = self._repo.changelog.read(self.node())
52 self._changeset = self._repo.changelog.read(self.node())
53 return self._changeset
53 return self._changeset
54 elif name == '_manifest':
54 elif name == '_manifest':
55 self._manifest = self._repo.manifest.read(self._changeset[0])
55 self._manifest = self._repo.manifest.read(self._changeset[0])
56 return self._manifest
56 return self._manifest
57 elif name == '_manifestdelta':
57 elif name == '_manifestdelta':
58 md = self._repo.manifest.readdelta(self._changeset[0])
58 md = self._repo.manifest.readdelta(self._changeset[0])
59 self._manifestdelta = md
59 self._manifestdelta = md
60 return self._manifestdelta
60 return self._manifestdelta
61 elif name == '_parents':
61 elif name == '_parents':
62 p = self._repo.changelog.parents(self._node)
62 p = self._repo.changelog.parents(self._node)
63 if p[1] == nullid:
63 if p[1] == nullid:
64 p = p[:-1]
64 p = p[:-1]
65 self._parents = [changectx(self._repo, x) for x in p]
65 self._parents = [changectx(self._repo, x) for x in p]
66 return self._parents
66 return self._parents
67 else:
67 else:
68 raise AttributeError(name)
68 raise AttributeError(name)
69
69
70 def __contains__(self, key):
70 def __contains__(self, key):
71 return key in self._manifest
71 return key in self._manifest
72
72
73 def __getitem__(self, key):
73 def __getitem__(self, key):
74 return self.filectx(key)
74 return self.filectx(key)
75
75
76 def __iter__(self):
76 def __iter__(self):
77 for f in util.sort(self._manifest):
77 for f in util.sort(self._manifest):
78 yield f
78 yield f
79
79
80 def changeset(self): return self._changeset
80 def changeset(self): return self._changeset
81 def manifest(self): return self._manifest
81 def manifest(self): return self._manifest
82
82
83 def rev(self): return self._rev
83 def rev(self): return self._rev
84 def node(self): return self._node
84 def node(self): return self._node
85 def hex(self): return hex(self._node)
85 def hex(self): return hex(self._node)
86 def user(self): return self._changeset[1]
86 def user(self): return self._changeset[1]
87 def date(self): return self._changeset[2]
87 def date(self): return self._changeset[2]
88 def files(self): return self._changeset[3]
88 def files(self): return self._changeset[3]
89 def description(self): return self._changeset[4]
89 def description(self): return self._changeset[4]
90 def branch(self): return self._changeset[5].get("branch")
90 def branch(self): return self._changeset[5].get("branch")
91 def extra(self): return self._changeset[5]
91 def extra(self): return self._changeset[5]
92 def tags(self): return self._repo.nodetags(self._node)
92 def tags(self): return self._repo.nodetags(self._node)
93
93
94 def parents(self):
94 def parents(self):
95 """return contexts for each parent changeset"""
95 """return contexts for each parent changeset"""
96 return self._parents
96 return self._parents
97
97
98 def children(self):
98 def children(self):
99 """return contexts for each child changeset"""
99 """return contexts for each child changeset"""
100 c = self._repo.changelog.children(self._node)
100 c = self._repo.changelog.children(self._node)
101 return [changectx(self._repo, x) for x in c]
101 return [changectx(self._repo, x) for x in c]
102
102
103 def ancestors(self):
103 def ancestors(self):
104 for a in self._repo.changelog.ancestors(self._rev):
104 for a in self._repo.changelog.ancestors(self._rev):
105 yield changectx(self._repo, a)
105 yield changectx(self._repo, a)
106
106
107 def descendants(self):
107 def descendants(self):
108 for d in self._repo.changelog.descendants(self._rev):
108 for d in self._repo.changelog.descendants(self._rev):
109 yield changectx(self._repo, d)
109 yield changectx(self._repo, d)
110
110
111 def _fileinfo(self, path):
111 def _fileinfo(self, path):
112 if '_manifest' in self.__dict__:
112 if '_manifest' in self.__dict__:
113 try:
113 try:
114 return self._manifest[path], self._manifest.flags(path)
114 return self._manifest[path], self._manifest.flags(path)
115 except KeyError:
115 except KeyError:
116 raise revlog.LookupError(self._node, path,
116 raise revlog.LookupError(self._node, path,
117 _('not found in manifest'))
117 _('not found in manifest'))
118 if '_manifestdelta' in self.__dict__ or path in self.files():
118 if '_manifestdelta' in self.__dict__ or path in self.files():
119 if path in self._manifestdelta:
119 if path in self._manifestdelta:
120 return self._manifestdelta[path], self._manifestdelta.flags(path)
120 return self._manifestdelta[path], self._manifestdelta.flags(path)
121 node, flag = self._repo.manifest.find(self._changeset[0], path)
121 node, flag = self._repo.manifest.find(self._changeset[0], path)
122 if not node:
122 if not node:
123 raise revlog.LookupError(self._node, path,
123 raise revlog.LookupError(self._node, path,
124 _('not found in manifest'))
124 _('not found in manifest'))
125
125
126 return node, flag
126 return node, flag
127
127
128 def filenode(self, path):
128 def filenode(self, path):
129 return self._fileinfo(path)[0]
129 return self._fileinfo(path)[0]
130
130
131 def flags(self, path):
131 def flags(self, path):
132 try:
132 try:
133 return self._fileinfo(path)[1]
133 return self._fileinfo(path)[1]
134 except revlog.LookupError:
134 except revlog.LookupError:
135 return ''
135 return ''
136
136
137 def filectx(self, path, fileid=None, filelog=None):
137 def filectx(self, path, fileid=None, filelog=None):
138 """get a file context from this changeset"""
138 """get a file context from this changeset"""
139 if fileid is None:
139 if fileid is None:
140 fileid = self.filenode(path)
140 fileid = self.filenode(path)
141 return filectx(self._repo, path, fileid=fileid,
141 return filectx(self._repo, path, fileid=fileid,
142 changectx=self, filelog=filelog)
142 changectx=self, filelog=filelog)
143
143
144 def ancestor(self, c2):
144 def ancestor(self, c2):
145 """
145 """
146 return the ancestor context of self and c2
146 return the ancestor context of self and c2
147 """
147 """
148 n = self._repo.changelog.ancestor(self._node, c2._node)
148 n = self._repo.changelog.ancestor(self._node, c2._node)
149 return changectx(self._repo, n)
149 return changectx(self._repo, n)
150
150
151 def walk(self, match):
151 def walk(self, match):
152 fdict = dict.fromkeys(match.files())
152 fdict = dict.fromkeys(match.files())
153 # for dirstate.walk, files=['.'] means "walk the whole tree".
153 # for dirstate.walk, files=['.'] means "walk the whole tree".
154 # follow that here, too
154 # follow that here, too
155 fdict.pop('.', None)
155 fdict.pop('.', None)
156 for fn in self:
156 for fn in self:
157 for ffn in fdict:
157 for ffn in fdict:
158 # match if the file is the exact name or a directory
158 # match if the file is the exact name or a directory
159 if ffn == fn or fn.startswith("%s/" % ffn):
159 if ffn == fn or fn.startswith("%s/" % ffn):
160 del fdict[ffn]
160 del fdict[ffn]
161 break
161 break
162 if match(fn):
162 if match(fn):
163 yield fn
163 yield fn
164 for fn in util.sort(fdict):
164 for fn in util.sort(fdict):
165 if match.bad(fn, 'No such file in rev ' + str(self)) and match(fn):
165 if match.bad(fn, 'No such file in rev ' + str(self)) and match(fn):
166 yield fn
166 yield fn
167
167
168 class filectx(object):
168 class filectx(object):
169 """A filecontext object makes access to data related to a particular
169 """A filecontext object makes access to data related to a particular
170 filerevision convenient."""
170 filerevision convenient."""
171 def __init__(self, repo, path, changeid=None, fileid=None,
171 def __init__(self, repo, path, changeid=None, fileid=None,
172 filelog=None, changectx=None):
172 filelog=None, changectx=None):
173 """changeid can be a changeset revision, node, or tag.
173 """changeid can be a changeset revision, node, or tag.
174 fileid can be a file revision or node."""
174 fileid can be a file revision or node."""
175 self._repo = repo
175 self._repo = repo
176 self._path = path
176 self._path = path
177
177
178 assert (changeid is not None
178 assert (changeid is not None
179 or fileid is not None
179 or fileid is not None
180 or changectx is not None)
180 or changectx is not None)
181
181
182 if filelog:
182 if filelog:
183 self._filelog = filelog
183 self._filelog = filelog
184
184
185 if changeid is not None:
185 if changeid is not None:
186 self._changeid = changeid
186 self._changeid = changeid
187 if changectx is not None:
187 if changectx is not None:
188 self._changectx = changectx
188 self._changectx = changectx
189 if fileid is not None:
189 if fileid is not None:
190 self._fileid = fileid
190 self._fileid = fileid
191
191
192 def __getattr__(self, name):
192 def __getattr__(self, name):
193 if name == '_changectx':
193 if name == '_changectx':
194 self._changectx = changectx(self._repo, self._changeid)
194 self._changectx = changectx(self._repo, self._changeid)
195 return self._changectx
195 return self._changectx
196 elif name == '_filelog':
196 elif name == '_filelog':
197 self._filelog = self._repo.file(self._path)
197 self._filelog = self._repo.file(self._path)
198 return self._filelog
198 return self._filelog
199 elif name == '_changeid':
199 elif name == '_changeid':
200 if '_changectx' in self.__dict__:
200 if '_changectx' in self.__dict__:
201 self._changeid = self._changectx.rev()
201 self._changeid = self._changectx.rev()
202 else:
202 else:
203 self._changeid = self._filelog.linkrev(self._filenode)
203 self._changeid = self._filelog.linkrev(self._filerev)
204 return self._changeid
204 return self._changeid
205 elif name == '_filenode':
205 elif name == '_filenode':
206 if '_fileid' in self.__dict__:
206 if '_fileid' in self.__dict__:
207 self._filenode = self._filelog.lookup(self._fileid)
207 self._filenode = self._filelog.lookup(self._fileid)
208 else:
208 else:
209 self._filenode = self._changectx.filenode(self._path)
209 self._filenode = self._changectx.filenode(self._path)
210 return self._filenode
210 return self._filenode
211 elif name == '_filerev':
211 elif name == '_filerev':
212 self._filerev = self._filelog.rev(self._filenode)
212 self._filerev = self._filelog.rev(self._filenode)
213 return self._filerev
213 return self._filerev
214 elif name == '_repopath':
214 elif name == '_repopath':
215 self._repopath = self._path
215 self._repopath = self._path
216 return self._repopath
216 return self._repopath
217 else:
217 else:
218 raise AttributeError(name)
218 raise AttributeError(name)
219
219
220 def __nonzero__(self):
220 def __nonzero__(self):
221 try:
221 try:
222 n = self._filenode
222 n = self._filenode
223 return True
223 return True
224 except revlog.LookupError:
224 except revlog.LookupError:
225 # file is missing
225 # file is missing
226 return False
226 return False
227
227
228 def __str__(self):
228 def __str__(self):
229 return "%s@%s" % (self.path(), short(self.node()))
229 return "%s@%s" % (self.path(), short(self.node()))
230
230
231 def __repr__(self):
231 def __repr__(self):
232 return "<filectx %s>" % str(self)
232 return "<filectx %s>" % str(self)
233
233
234 def __hash__(self):
234 def __hash__(self):
235 try:
235 try:
236 return hash((self._path, self._fileid))
236 return hash((self._path, self._fileid))
237 except AttributeError:
237 except AttributeError:
238 return id(self)
238 return id(self)
239
239
240 def __eq__(self, other):
240 def __eq__(self, other):
241 try:
241 try:
242 return (self._path == other._path
242 return (self._path == other._path
243 and self._fileid == other._fileid)
243 and self._fileid == other._fileid)
244 except AttributeError:
244 except AttributeError:
245 return False
245 return False
246
246
247 def __ne__(self, other):
247 def __ne__(self, other):
248 return not (self == other)
248 return not (self == other)
249
249
250 def filectx(self, fileid):
250 def filectx(self, fileid):
251 '''opens an arbitrary revision of the file without
251 '''opens an arbitrary revision of the file without
252 opening a new filelog'''
252 opening a new filelog'''
253 return filectx(self._repo, self._path, fileid=fileid,
253 return filectx(self._repo, self._path, fileid=fileid,
254 filelog=self._filelog)
254 filelog=self._filelog)
255
255
256 def filerev(self): return self._filerev
256 def filerev(self): return self._filerev
257 def filenode(self): return self._filenode
257 def filenode(self): return self._filenode
258 def flags(self): return self._changectx.flags(self._path)
258 def flags(self): return self._changectx.flags(self._path)
259 def filelog(self): return self._filelog
259 def filelog(self): return self._filelog
260
260
261 def rev(self):
261 def rev(self):
262 if '_changectx' in self.__dict__:
262 if '_changectx' in self.__dict__:
263 return self._changectx.rev()
263 return self._changectx.rev()
264 if '_changeid' in self.__dict__:
264 if '_changeid' in self.__dict__:
265 return self._changectx.rev()
265 return self._changectx.rev()
266 return self._filelog.linkrev(self._filenode)
266 return self._filelog.linkrev(self._filerev)
267
267
268 def linkrev(self): return self._filelog.linkrev(self._filenode)
268 def linkrev(self): return self._filelog.linkrev(self._filerev)
269 def node(self): return self._changectx.node()
269 def node(self): return self._changectx.node()
270 def user(self): return self._changectx.user()
270 def user(self): return self._changectx.user()
271 def date(self): return self._changectx.date()
271 def date(self): return self._changectx.date()
272 def files(self): return self._changectx.files()
272 def files(self): return self._changectx.files()
273 def description(self): return self._changectx.description()
273 def description(self): return self._changectx.description()
274 def branch(self): return self._changectx.branch()
274 def branch(self): return self._changectx.branch()
275 def manifest(self): return self._changectx.manifest()
275 def manifest(self): return self._changectx.manifest()
276 def changectx(self): return self._changectx
276 def changectx(self): return self._changectx
277
277
278 def data(self): return self._filelog.read(self._filenode)
278 def data(self): return self._filelog.read(self._filenode)
279 def path(self): return self._path
279 def path(self): return self._path
280 def size(self): return self._filelog.size(self._filerev)
280 def size(self): return self._filelog.size(self._filerev)
281
281
282 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
282 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
283
283
284 def renamed(self):
284 def renamed(self):
285 """check if file was actually renamed in this changeset revision
285 """check if file was actually renamed in this changeset revision
286
286
287 If rename logged in file revision, we report copy for changeset only
287 If rename logged in file revision, we report copy for changeset only
288 if file revisions linkrev points back to the changeset in question
288 if file revisions linkrev points back to the changeset in question
289 or both changeset parents contain different file revisions.
289 or both changeset parents contain different file revisions.
290 """
290 """
291
291
292 renamed = self._filelog.renamed(self._filenode)
292 renamed = self._filelog.renamed(self._filenode)
293 if not renamed:
293 if not renamed:
294 return renamed
294 return renamed
295
295
296 if self.rev() == self.linkrev():
296 if self.rev() == self.linkrev():
297 return renamed
297 return renamed
298
298
299 name = self.path()
299 name = self.path()
300 fnode = self._filenode
300 fnode = self._filenode
301 for p in self._changectx.parents():
301 for p in self._changectx.parents():
302 try:
302 try:
303 if fnode == p.filenode(name):
303 if fnode == p.filenode(name):
304 return None
304 return None
305 except revlog.LookupError:
305 except revlog.LookupError:
306 pass
306 pass
307 return renamed
307 return renamed
308
308
309 def parents(self):
309 def parents(self):
310 p = self._path
310 p = self._path
311 fl = self._filelog
311 fl = self._filelog
312 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
312 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
313
313
314 r = self._filelog.renamed(self._filenode)
314 r = self._filelog.renamed(self._filenode)
315 if r:
315 if r:
316 pl[0] = (r[0], r[1], None)
316 pl[0] = (r[0], r[1], None)
317
317
318 return [filectx(self._repo, p, fileid=n, filelog=l)
318 return [filectx(self._repo, p, fileid=n, filelog=l)
319 for p,n,l in pl if n != nullid]
319 for p,n,l in pl if n != nullid]
320
320
321 def children(self):
321 def children(self):
322 # hard for renames
322 # hard for renames
323 c = self._filelog.children(self._filenode)
323 c = self._filelog.children(self._filenode)
324 return [filectx(self._repo, self._path, fileid=x,
324 return [filectx(self._repo, self._path, fileid=x,
325 filelog=self._filelog) for x in c]
325 filelog=self._filelog) for x in c]
326
326
327 def annotate(self, follow=False, linenumber=None):
327 def annotate(self, follow=False, linenumber=None):
328 '''returns a list of tuples of (ctx, line) for each line
328 '''returns a list of tuples of (ctx, line) for each line
329 in the file, where ctx is the filectx of the node where
329 in the file, where ctx is the filectx of the node where
330 that line was last changed.
330 that line was last changed.
331 This returns tuples of ((ctx, linenumber), line) for each line,
331 This returns tuples of ((ctx, linenumber), line) for each line,
332 if "linenumber" parameter is NOT "None".
332 if "linenumber" parameter is NOT "None".
333 In such tuples, linenumber means one at the first appearance
333 In such tuples, linenumber means one at the first appearance
334 in the managed file.
334 in the managed file.
335 To reduce annotation cost,
335 To reduce annotation cost,
336 this returns fixed value(False is used) as linenumber,
336 this returns fixed value(False is used) as linenumber,
337 if "linenumber" parameter is "False".'''
337 if "linenumber" parameter is "False".'''
338
338
339 def decorate_compat(text, rev):
339 def decorate_compat(text, rev):
340 return ([rev] * len(text.splitlines()), text)
340 return ([rev] * len(text.splitlines()), text)
341
341
342 def without_linenumber(text, rev):
342 def without_linenumber(text, rev):
343 return ([(rev, False)] * len(text.splitlines()), text)
343 return ([(rev, False)] * len(text.splitlines()), text)
344
344
345 def with_linenumber(text, rev):
345 def with_linenumber(text, rev):
346 size = len(text.splitlines())
346 size = len(text.splitlines())
347 return ([(rev, i) for i in xrange(1, size + 1)], text)
347 return ([(rev, i) for i in xrange(1, size + 1)], text)
348
348
349 decorate = (((linenumber is None) and decorate_compat) or
349 decorate = (((linenumber is None) and decorate_compat) or
350 (linenumber and with_linenumber) or
350 (linenumber and with_linenumber) or
351 without_linenumber)
351 without_linenumber)
352
352
353 def pair(parent, child):
353 def pair(parent, child):
354 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
354 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
355 child[0][b1:b2] = parent[0][a1:a2]
355 child[0][b1:b2] = parent[0][a1:a2]
356 return child
356 return child
357
357
358 getlog = util.cachefunc(lambda x: self._repo.file(x))
358 getlog = util.cachefunc(lambda x: self._repo.file(x))
359 def getctx(path, fileid):
359 def getctx(path, fileid):
360 log = path == self._path and self._filelog or getlog(path)
360 log = path == self._path and self._filelog or getlog(path)
361 return filectx(self._repo, path, fileid=fileid, filelog=log)
361 return filectx(self._repo, path, fileid=fileid, filelog=log)
362 getctx = util.cachefunc(getctx)
362 getctx = util.cachefunc(getctx)
363
363
364 def parents(f):
364 def parents(f):
365 # we want to reuse filectx objects as much as possible
365 # we want to reuse filectx objects as much as possible
366 p = f._path
366 p = f._path
367 if f._filerev is None: # working dir
367 if f._filerev is None: # working dir
368 pl = [(n.path(), n.filerev()) for n in f.parents()]
368 pl = [(n.path(), n.filerev()) for n in f.parents()]
369 else:
369 else:
370 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
370 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
371
371
372 if follow:
372 if follow:
373 r = f.renamed()
373 r = f.renamed()
374 if r:
374 if r:
375 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
375 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
376
376
377 return [getctx(p, n) for p, n in pl if n != nullrev]
377 return [getctx(p, n) for p, n in pl if n != nullrev]
378
378
379 # use linkrev to find the first changeset where self appeared
379 # use linkrev to find the first changeset where self appeared
380 if self.rev() != self.linkrev():
380 if self.rev() != self.linkrev():
381 base = self.filectx(self.filerev())
381 base = self.filectx(self.filerev())
382 else:
382 else:
383 base = self
383 base = self
384
384
385 # find all ancestors
385 # find all ancestors
386 needed = {base: 1}
386 needed = {base: 1}
387 visit = [base]
387 visit = [base]
388 files = [base._path]
388 files = [base._path]
389 while visit:
389 while visit:
390 f = visit.pop(0)
390 f = visit.pop(0)
391 for p in parents(f):
391 for p in parents(f):
392 if p not in needed:
392 if p not in needed:
393 needed[p] = 1
393 needed[p] = 1
394 visit.append(p)
394 visit.append(p)
395 if p._path not in files:
395 if p._path not in files:
396 files.append(p._path)
396 files.append(p._path)
397 else:
397 else:
398 # count how many times we'll use this
398 # count how many times we'll use this
399 needed[p] += 1
399 needed[p] += 1
400
400
401 # sort by revision (per file) which is a topological order
401 # sort by revision (per file) which is a topological order
402 visit = []
402 visit = []
403 for f in files:
403 for f in files:
404 fn = [(n.rev(), n) for n in needed if n._path == f]
404 fn = [(n.rev(), n) for n in needed if n._path == f]
405 visit.extend(fn)
405 visit.extend(fn)
406
406
407 hist = {}
407 hist = {}
408 for r, f in util.sort(visit):
408 for r, f in util.sort(visit):
409 curr = decorate(f.data(), f)
409 curr = decorate(f.data(), f)
410 for p in parents(f):
410 for p in parents(f):
411 if p != nullid:
411 if p != nullid:
412 curr = pair(hist[p], curr)
412 curr = pair(hist[p], curr)
413 # trim the history of unneeded revs
413 # trim the history of unneeded revs
414 needed[p] -= 1
414 needed[p] -= 1
415 if not needed[p]:
415 if not needed[p]:
416 del hist[p]
416 del hist[p]
417 hist[f] = curr
417 hist[f] = curr
418
418
419 return zip(hist[f][0], hist[f][1].splitlines(1))
419 return zip(hist[f][0], hist[f][1].splitlines(1))
420
420
421 def ancestor(self, fc2):
421 def ancestor(self, fc2):
422 """
422 """
423 find the common ancestor file context, if any, of self, and fc2
423 find the common ancestor file context, if any, of self, and fc2
424 """
424 """
425
425
426 acache = {}
426 acache = {}
427
427
428 # prime the ancestor cache for the working directory
428 # prime the ancestor cache for the working directory
429 for c in (self, fc2):
429 for c in (self, fc2):
430 if c._filerev == None:
430 if c._filerev == None:
431 pl = [(n.path(), n.filenode()) for n in c.parents()]
431 pl = [(n.path(), n.filenode()) for n in c.parents()]
432 acache[(c._path, None)] = pl
432 acache[(c._path, None)] = pl
433
433
434 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
434 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
435 def parents(vertex):
435 def parents(vertex):
436 if vertex in acache:
436 if vertex in acache:
437 return acache[vertex]
437 return acache[vertex]
438 f, n = vertex
438 f, n = vertex
439 if f not in flcache:
439 if f not in flcache:
440 flcache[f] = self._repo.file(f)
440 flcache[f] = self._repo.file(f)
441 fl = flcache[f]
441 fl = flcache[f]
442 pl = [(f, p) for p in fl.parents(n) if p != nullid]
442 pl = [(f, p) for p in fl.parents(n) if p != nullid]
443 re = fl.renamed(n)
443 re = fl.renamed(n)
444 if re:
444 if re:
445 pl.append(re)
445 pl.append(re)
446 acache[vertex] = pl
446 acache[vertex] = pl
447 return pl
447 return pl
448
448
449 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
449 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
450 v = ancestor.ancestor(a, b, parents)
450 v = ancestor.ancestor(a, b, parents)
451 if v:
451 if v:
452 f, n = v
452 f, n = v
453 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
453 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
454
454
455 return None
455 return None
456
456
457 class workingctx(changectx):
457 class workingctx(changectx):
458 """A workingctx object makes access to data related to
458 """A workingctx object makes access to data related to
459 the current working directory convenient.
459 the current working directory convenient.
460 parents - a pair of parent nodeids, or None to use the dirstate.
460 parents - a pair of parent nodeids, or None to use the dirstate.
461 date - any valid date string or (unixtime, offset), or None.
461 date - any valid date string or (unixtime, offset), or None.
462 user - username string, or None.
462 user - username string, or None.
463 extra - a dictionary of extra values, or None.
463 extra - a dictionary of extra values, or None.
464 changes - a list of file lists as returned by localrepo.status()
464 changes - a list of file lists as returned by localrepo.status()
465 or None to use the repository status.
465 or None to use the repository status.
466 """
466 """
467 def __init__(self, repo, parents=None, text="", user=None, date=None,
467 def __init__(self, repo, parents=None, text="", user=None, date=None,
468 extra=None, changes=None):
468 extra=None, changes=None):
469 self._repo = repo
469 self._repo = repo
470 self._rev = None
470 self._rev = None
471 self._node = None
471 self._node = None
472 self._text = text
472 self._text = text
473 if date:
473 if date:
474 self._date = util.parsedate(date)
474 self._date = util.parsedate(date)
475 if user:
475 if user:
476 self._user = user
476 self._user = user
477 if parents:
477 if parents:
478 self._parents = [changectx(self._repo, p) for p in parents]
478 self._parents = [changectx(self._repo, p) for p in parents]
479 if changes:
479 if changes:
480 self._status = list(changes)
480 self._status = list(changes)
481
481
482 self._extra = {}
482 self._extra = {}
483 if extra:
483 if extra:
484 self._extra = extra.copy()
484 self._extra = extra.copy()
485 if 'branch' not in self._extra:
485 if 'branch' not in self._extra:
486 branch = self._repo.dirstate.branch()
486 branch = self._repo.dirstate.branch()
487 try:
487 try:
488 branch = branch.decode('UTF-8').encode('UTF-8')
488 branch = branch.decode('UTF-8').encode('UTF-8')
489 except UnicodeDecodeError:
489 except UnicodeDecodeError:
490 raise util.Abort(_('branch name not in UTF-8!'))
490 raise util.Abort(_('branch name not in UTF-8!'))
491 self._extra['branch'] = branch
491 self._extra['branch'] = branch
492 if self._extra['branch'] == '':
492 if self._extra['branch'] == '':
493 self._extra['branch'] = 'default'
493 self._extra['branch'] = 'default'
494
494
495 def __str__(self):
495 def __str__(self):
496 return str(self._parents[0]) + "+"
496 return str(self._parents[0]) + "+"
497
497
498 def __nonzero__(self):
498 def __nonzero__(self):
499 return True
499 return True
500
500
501 def __contains__(self, key):
501 def __contains__(self, key):
502 return self._dirstate[key] not in "?r"
502 return self._dirstate[key] not in "?r"
503
503
504 def __getattr__(self, name):
504 def __getattr__(self, name):
505 if name == '_status':
505 if name == '_status':
506 self._status = self._repo.status(unknown=True)
506 self._status = self._repo.status(unknown=True)
507 return self._status
507 return self._status
508 elif name == '_user':
508 elif name == '_user':
509 self._user = self._repo.ui.username()
509 self._user = self._repo.ui.username()
510 return self._user
510 return self._user
511 elif name == '_date':
511 elif name == '_date':
512 self._date = util.makedate()
512 self._date = util.makedate()
513 return self._date
513 return self._date
514 if name == '_manifest':
514 if name == '_manifest':
515 self._buildmanifest()
515 self._buildmanifest()
516 return self._manifest
516 return self._manifest
517 elif name == '_parents':
517 elif name == '_parents':
518 p = self._repo.dirstate.parents()
518 p = self._repo.dirstate.parents()
519 if p[1] == nullid:
519 if p[1] == nullid:
520 p = p[:-1]
520 p = p[:-1]
521 self._parents = [changectx(self._repo, x) for x in p]
521 self._parents = [changectx(self._repo, x) for x in p]
522 return self._parents
522 return self._parents
523 else:
523 else:
524 raise AttributeError(name)
524 raise AttributeError(name)
525
525
526 def _buildmanifest(self):
526 def _buildmanifest(self):
527 """generate a manifest corresponding to the working directory"""
527 """generate a manifest corresponding to the working directory"""
528
528
529 man = self._parents[0].manifest().copy()
529 man = self._parents[0].manifest().copy()
530 copied = self._repo.dirstate.copies()
530 copied = self._repo.dirstate.copies()
531 cf = lambda x: man.flags(copied.get(x, x))
531 cf = lambda x: man.flags(copied.get(x, x))
532 ff = self._repo.dirstate.flagfunc(cf)
532 ff = self._repo.dirstate.flagfunc(cf)
533 modified, added, removed, deleted, unknown = self._status[:5]
533 modified, added, removed, deleted, unknown = self._status[:5]
534 for i, l in (("a", added), ("m", modified), ("u", unknown)):
534 for i, l in (("a", added), ("m", modified), ("u", unknown)):
535 for f in l:
535 for f in l:
536 man[f] = man.get(copied.get(f, f), nullid) + i
536 man[f] = man.get(copied.get(f, f), nullid) + i
537 try:
537 try:
538 man.set(f, ff(f))
538 man.set(f, ff(f))
539 except OSError:
539 except OSError:
540 pass
540 pass
541
541
542 for f in deleted + removed:
542 for f in deleted + removed:
543 if f in man:
543 if f in man:
544 del man[f]
544 del man[f]
545
545
546 self._manifest = man
546 self._manifest = man
547
547
548 def manifest(self): return self._manifest
548 def manifest(self): return self._manifest
549
549
550 def user(self): return self._user or self._repo.ui.username()
550 def user(self): return self._user or self._repo.ui.username()
551 def date(self): return self._date
551 def date(self): return self._date
552 def description(self): return self._text
552 def description(self): return self._text
553 def files(self):
553 def files(self):
554 return util.sort(self._status[0] + self._status[1] + self._status[2])
554 return util.sort(self._status[0] + self._status[1] + self._status[2])
555
555
556 def modified(self): return self._status[0]
556 def modified(self): return self._status[0]
557 def added(self): return self._status[1]
557 def added(self): return self._status[1]
558 def removed(self): return self._status[2]
558 def removed(self): return self._status[2]
559 def deleted(self): return self._status[3]
559 def deleted(self): return self._status[3]
560 def unknown(self): return self._status[4]
560 def unknown(self): return self._status[4]
561 def clean(self): return self._status[5]
561 def clean(self): return self._status[5]
562 def branch(self): return self._extra['branch']
562 def branch(self): return self._extra['branch']
563 def extra(self): return self._extra
563 def extra(self): return self._extra
564
564
565 def tags(self):
565 def tags(self):
566 t = []
566 t = []
567 [t.extend(p.tags()) for p in self.parents()]
567 [t.extend(p.tags()) for p in self.parents()]
568 return t
568 return t
569
569
570 def children(self):
570 def children(self):
571 return []
571 return []
572
572
573 def flags(self, path):
573 def flags(self, path):
574 if '_manifest' in self.__dict__:
574 if '_manifest' in self.__dict__:
575 try:
575 try:
576 return self._manifest.flags(path)
576 return self._manifest.flags(path)
577 except KeyError:
577 except KeyError:
578 return ''
578 return ''
579
579
580 pnode = self._parents[0].changeset()[0]
580 pnode = self._parents[0].changeset()[0]
581 orig = self._repo.dirstate.copies().get(path, path)
581 orig = self._repo.dirstate.copies().get(path, path)
582 node, flag = self._repo.manifest.find(pnode, orig)
582 node, flag = self._repo.manifest.find(pnode, orig)
583 try:
583 try:
584 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
584 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
585 return ff(path)
585 return ff(path)
586 except OSError:
586 except OSError:
587 pass
587 pass
588
588
589 if not node or path in self.deleted() or path in self.removed():
589 if not node or path in self.deleted() or path in self.removed():
590 return ''
590 return ''
591 return flag
591 return flag
592
592
593 def filectx(self, path, filelog=None):
593 def filectx(self, path, filelog=None):
594 """get a file context from the working directory"""
594 """get a file context from the working directory"""
595 return workingfilectx(self._repo, path, workingctx=self,
595 return workingfilectx(self._repo, path, workingctx=self,
596 filelog=filelog)
596 filelog=filelog)
597
597
598 def ancestor(self, c2):
598 def ancestor(self, c2):
599 """return the ancestor context of self and c2"""
599 """return the ancestor context of self and c2"""
600 return self._parents[0].ancestor(c2) # punt on two parents for now
600 return self._parents[0].ancestor(c2) # punt on two parents for now
601
601
602 def walk(self, match):
602 def walk(self, match):
603 return util.sort(self._repo.dirstate.walk(match, True, False).keys())
603 return util.sort(self._repo.dirstate.walk(match, True, False).keys())
604
604
605 class workingfilectx(filectx):
605 class workingfilectx(filectx):
606 """A workingfilectx object makes access to data related to a particular
606 """A workingfilectx object makes access to data related to a particular
607 file in the working directory convenient."""
607 file in the working directory convenient."""
608 def __init__(self, repo, path, filelog=None, workingctx=None):
608 def __init__(self, repo, path, filelog=None, workingctx=None):
609 """changeid can be a changeset revision, node, or tag.
609 """changeid can be a changeset revision, node, or tag.
610 fileid can be a file revision or node."""
610 fileid can be a file revision or node."""
611 self._repo = repo
611 self._repo = repo
612 self._path = path
612 self._path = path
613 self._changeid = None
613 self._changeid = None
614 self._filerev = self._filenode = None
614 self._filerev = self._filenode = None
615
615
616 if filelog:
616 if filelog:
617 self._filelog = filelog
617 self._filelog = filelog
618 if workingctx:
618 if workingctx:
619 self._changectx = workingctx
619 self._changectx = workingctx
620
620
621 def __getattr__(self, name):
621 def __getattr__(self, name):
622 if name == '_changectx':
622 if name == '_changectx':
623 self._changectx = workingctx(self._repo)
623 self._changectx = workingctx(self._repo)
624 return self._changectx
624 return self._changectx
625 elif name == '_repopath':
625 elif name == '_repopath':
626 self._repopath = (self._repo.dirstate.copied(self._path)
626 self._repopath = (self._repo.dirstate.copied(self._path)
627 or self._path)
627 or self._path)
628 return self._repopath
628 return self._repopath
629 elif name == '_filelog':
629 elif name == '_filelog':
630 self._filelog = self._repo.file(self._repopath)
630 self._filelog = self._repo.file(self._repopath)
631 return self._filelog
631 return self._filelog
632 else:
632 else:
633 raise AttributeError(name)
633 raise AttributeError(name)
634
634
635 def __nonzero__(self):
635 def __nonzero__(self):
636 return True
636 return True
637
637
638 def __str__(self):
638 def __str__(self):
639 return "%s@%s" % (self.path(), self._changectx)
639 return "%s@%s" % (self.path(), self._changectx)
640
640
641 def filectx(self, fileid):
641 def filectx(self, fileid):
642 '''opens an arbitrary revision of the file without
642 '''opens an arbitrary revision of the file without
643 opening a new filelog'''
643 opening a new filelog'''
644 return filectx(self._repo, self._repopath, fileid=fileid,
644 return filectx(self._repo, self._repopath, fileid=fileid,
645 filelog=self._filelog)
645 filelog=self._filelog)
646
646
647 def rev(self):
647 def rev(self):
648 if '_changectx' in self.__dict__:
648 if '_changectx' in self.__dict__:
649 return self._changectx.rev()
649 return self._changectx.rev()
650 return self._filelog.linkrev(self._filenode)
650 return self._filelog.linkrev(self._filerev)
651
651
652 def data(self): return self._repo.wread(self._path)
652 def data(self): return self._repo.wread(self._path)
653 def renamed(self):
653 def renamed(self):
654 rp = self._repopath
654 rp = self._repopath
655 if rp == self._path:
655 if rp == self._path:
656 return None
656 return None
657 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
657 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
658
658
659 def parents(self):
659 def parents(self):
660 '''return parent filectxs, following copies if necessary'''
660 '''return parent filectxs, following copies if necessary'''
661 p = self._path
661 p = self._path
662 rp = self._repopath
662 rp = self._repopath
663 pcl = self._changectx._parents
663 pcl = self._changectx._parents
664 fl = self._filelog
664 fl = self._filelog
665 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
665 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
666 if len(pcl) > 1:
666 if len(pcl) > 1:
667 if rp != p:
667 if rp != p:
668 fl = None
668 fl = None
669 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
669 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
670
670
671 return [filectx(self._repo, p, fileid=n, filelog=l)
671 return [filectx(self._repo, p, fileid=n, filelog=l)
672 for p,n,l in pl if n != nullid]
672 for p,n,l in pl if n != nullid]
673
673
674 def children(self):
674 def children(self):
675 return []
675 return []
676
676
677 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
677 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
678 def date(self):
678 def date(self):
679 t, tz = self._changectx.date()
679 t, tz = self._changectx.date()
680 try:
680 try:
681 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
681 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
682 except OSError, err:
682 except OSError, err:
683 if err.errno != errno.ENOENT: raise
683 if err.errno != errno.ENOENT: raise
684 return (t, tz)
684 return (t, tz)
685
685
686 def cmp(self, text): return self._repo.wread(self._path) == text
686 def cmp(self, text): return self._repo.wread(self._path) == text
687
687
688 class memctx(object):
688 class memctx(object):
689 """Use memctx to perform in-memory commits via localrepo.commitctx().
689 """Use memctx to perform in-memory commits via localrepo.commitctx().
690
690
691 Revision information is supplied at initialization time while
691 Revision information is supplied at initialization time while
692 related files data and is made available through a callback
692 related files data and is made available through a callback
693 mechanism. 'repo' is the current localrepo, 'parents' is a
693 mechanism. 'repo' is the current localrepo, 'parents' is a
694 sequence of two parent revisions identifiers (pass None for every
694 sequence of two parent revisions identifiers (pass None for every
695 missing parent), 'text' is the commit message and 'files' lists
695 missing parent), 'text' is the commit message and 'files' lists
696 names of files touched by the revision (normalized and relative to
696 names of files touched by the revision (normalized and relative to
697 repository root).
697 repository root).
698
698
699 filectxfn(repo, memctx, path) is a callable receiving the
699 filectxfn(repo, memctx, path) is a callable receiving the
700 repository, the current memctx object and the normalized path of
700 repository, the current memctx object and the normalized path of
701 requested file, relative to repository root. It is fired by the
701 requested file, relative to repository root. It is fired by the
702 commit function for every file in 'files', but calls order is
702 commit function for every file in 'files', but calls order is
703 undefined. If the file is available in the revision being
703 undefined. If the file is available in the revision being
704 committed (updated or added), filectxfn returns a memfilectx
704 committed (updated or added), filectxfn returns a memfilectx
705 object. If the file was removed, filectxfn raises an
705 object. If the file was removed, filectxfn raises an
706 IOError. Moved files are represented by marking the source file
706 IOError. Moved files are represented by marking the source file
707 removed and the new file added with copy information (see
707 removed and the new file added with copy information (see
708 memfilectx).
708 memfilectx).
709
709
710 user receives the committer name and defaults to current
710 user receives the committer name and defaults to current
711 repository username, date is the commit date in any format
711 repository username, date is the commit date in any format
712 supported by util.parsedate() and defaults to current date, extra
712 supported by util.parsedate() and defaults to current date, extra
713 is a dictionary of metadata or is left empty.
713 is a dictionary of metadata or is left empty.
714 """
714 """
715 def __init__(self, repo, parents, text, files, filectxfn, user=None,
715 def __init__(self, repo, parents, text, files, filectxfn, user=None,
716 date=None, extra=None):
716 date=None, extra=None):
717 self._repo = repo
717 self._repo = repo
718 self._rev = None
718 self._rev = None
719 self._node = None
719 self._node = None
720 self._text = text
720 self._text = text
721 self._date = date and util.parsedate(date) or util.makedate()
721 self._date = date and util.parsedate(date) or util.makedate()
722 self._user = user
722 self._user = user
723 parents = [(p or nullid) for p in parents]
723 parents = [(p or nullid) for p in parents]
724 p1, p2 = parents
724 p1, p2 = parents
725 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
725 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
726 files = util.sort(list(files))
726 files = util.sort(list(files))
727 self._status = [files, [], [], [], []]
727 self._status = [files, [], [], [], []]
728 self._filectxfn = filectxfn
728 self._filectxfn = filectxfn
729
729
730 self._extra = extra and extra.copy() or {}
730 self._extra = extra and extra.copy() or {}
731 if 'branch' not in self._extra:
731 if 'branch' not in self._extra:
732 self._extra['branch'] = 'default'
732 self._extra['branch'] = 'default'
733 elif self._extra.get('branch') == '':
733 elif self._extra.get('branch') == '':
734 self._extra['branch'] = 'default'
734 self._extra['branch'] = 'default'
735
735
736 def __str__(self):
736 def __str__(self):
737 return str(self._parents[0]) + "+"
737 return str(self._parents[0]) + "+"
738
738
739 def __int__(self):
739 def __int__(self):
740 return self._rev
740 return self._rev
741
741
742 def __nonzero__(self):
742 def __nonzero__(self):
743 return True
743 return True
744
744
745 def user(self): return self._user or self._repo.ui.username()
745 def user(self): return self._user or self._repo.ui.username()
746 def date(self): return self._date
746 def date(self): return self._date
747 def description(self): return self._text
747 def description(self): return self._text
748 def files(self): return self.modified()
748 def files(self): return self.modified()
749 def modified(self): return self._status[0]
749 def modified(self): return self._status[0]
750 def added(self): return self._status[1]
750 def added(self): return self._status[1]
751 def removed(self): return self._status[2]
751 def removed(self): return self._status[2]
752 def deleted(self): return self._status[3]
752 def deleted(self): return self._status[3]
753 def unknown(self): return self._status[4]
753 def unknown(self): return self._status[4]
754 def clean(self): return self._status[5]
754 def clean(self): return self._status[5]
755 def branch(self): return self._extra['branch']
755 def branch(self): return self._extra['branch']
756 def extra(self): return self._extra
756 def extra(self): return self._extra
757 def flags(self, f): return self[f].flags()
757 def flags(self, f): return self[f].flags()
758
758
759 def parents(self):
759 def parents(self):
760 """return contexts for each parent changeset"""
760 """return contexts for each parent changeset"""
761 return self._parents
761 return self._parents
762
762
763 def filectx(self, path, filelog=None):
763 def filectx(self, path, filelog=None):
764 """get a file context from the working directory"""
764 """get a file context from the working directory"""
765 return self._filectxfn(self._repo, self, path)
765 return self._filectxfn(self._repo, self, path)
766
766
767 class memfilectx(object):
767 class memfilectx(object):
768 """memfilectx represents an in-memory file to commit.
768 """memfilectx represents an in-memory file to commit.
769
769
770 See memctx for more details.
770 See memctx for more details.
771 """
771 """
772 def __init__(self, path, data, islink, isexec, copied):
772 def __init__(self, path, data, islink, isexec, copied):
773 """
773 """
774 path is the normalized file path relative to repository root.
774 path is the normalized file path relative to repository root.
775 data is the file content as a string.
775 data is the file content as a string.
776 islink is True if the file is a symbolic link.
776 islink is True if the file is a symbolic link.
777 isexec is True if the file is executable.
777 isexec is True if the file is executable.
778 copied is the source file path if current file was copied in the
778 copied is the source file path if current file was copied in the
779 revision being committed, or None."""
779 revision being committed, or None."""
780 self._path = path
780 self._path = path
781 self._data = data
781 self._data = data
782 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
782 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
783 self._copied = None
783 self._copied = None
784 if copied:
784 if copied:
785 self._copied = (copied, nullid)
785 self._copied = (copied, nullid)
786
786
787 def __nonzero__(self): return True
787 def __nonzero__(self): return True
788 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
788 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
789 def path(self): return self._path
789 def path(self): return self._path
790 def data(self): return self._data
790 def data(self): return self._data
791 def flags(self): return self._flags
791 def flags(self): return self._flags
792 def isexec(self): return 'x' in self._flags
792 def isexec(self): return 'x' in self._flags
793 def islink(self): return 'l' in self._flags
793 def islink(self): return 'l' in self._flags
794 def renamed(self): return self._copied
794 def renamed(self): return self._copied
795
795
@@ -1,657 +1,657 b''
1 #
1 #
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, mimetypes, re, cgi, copy
8 import os, mimetypes, re, cgi, copy
9 import webutil
9 import webutil
10 from mercurial import revlog, archival, templatefilters
10 from mercurial import revlog, archival, templatefilters
11 from mercurial.node import short, hex, nullid
11 from mercurial.node import short, hex, nullid
12 from mercurial.util import binary, datestr
12 from mercurial.util import binary, datestr
13 from mercurial.repo import RepoError
13 from mercurial.repo import RepoError
14 from common import paritygen, staticfile, get_contact, ErrorResponse
14 from common import paritygen, staticfile, get_contact, ErrorResponse
15 from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND
15 from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND
16 from mercurial import graphmod, util
16 from mercurial import graphmod, util
17
17
18 # __all__ is populated with the allowed commands. Be sure to add to it if
18 # __all__ is populated with the allowed commands. Be sure to add to it if
19 # you're adding a new command, or the new command won't work.
19 # you're adding a new command, or the new command won't work.
20
20
21 __all__ = [
21 __all__ = [
22 'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev',
22 'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev',
23 'manifest', 'tags', 'summary', 'filediff', 'diff', 'annotate', 'filelog',
23 'manifest', 'tags', 'summary', 'filediff', 'diff', 'annotate', 'filelog',
24 'archive', 'static', 'graph',
24 'archive', 'static', 'graph',
25 ]
25 ]
26
26
27 def log(web, req, tmpl):
27 def log(web, req, tmpl):
28 if 'file' in req.form and req.form['file'][0]:
28 if 'file' in req.form and req.form['file'][0]:
29 return filelog(web, req, tmpl)
29 return filelog(web, req, tmpl)
30 else:
30 else:
31 return changelog(web, req, tmpl)
31 return changelog(web, req, tmpl)
32
32
33 def rawfile(web, req, tmpl):
33 def rawfile(web, req, tmpl):
34 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
34 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
35 if not path:
35 if not path:
36 content = manifest(web, req, tmpl)
36 content = manifest(web, req, tmpl)
37 req.respond(HTTP_OK, web.ctype)
37 req.respond(HTTP_OK, web.ctype)
38 return content
38 return content
39
39
40 try:
40 try:
41 fctx = webutil.filectx(web.repo, req)
41 fctx = webutil.filectx(web.repo, req)
42 except revlog.LookupError, inst:
42 except revlog.LookupError, inst:
43 try:
43 try:
44 content = manifest(web, req, tmpl)
44 content = manifest(web, req, tmpl)
45 req.respond(HTTP_OK, web.ctype)
45 req.respond(HTTP_OK, web.ctype)
46 return content
46 return content
47 except ErrorResponse:
47 except ErrorResponse:
48 raise inst
48 raise inst
49
49
50 path = fctx.path()
50 path = fctx.path()
51 text = fctx.data()
51 text = fctx.data()
52 mt = mimetypes.guess_type(path)[0]
52 mt = mimetypes.guess_type(path)[0]
53 if mt is None:
53 if mt is None:
54 mt = binary(text) and 'application/octet-stream' or 'text/plain'
54 mt = binary(text) and 'application/octet-stream' or 'text/plain'
55
55
56 req.respond(HTTP_OK, mt, path, len(text))
56 req.respond(HTTP_OK, mt, path, len(text))
57 return [text]
57 return [text]
58
58
59 def _filerevision(web, tmpl, fctx):
59 def _filerevision(web, tmpl, fctx):
60 f = fctx.path()
60 f = fctx.path()
61 text = fctx.data()
61 text = fctx.data()
62 parity = paritygen(web.stripecount)
62 parity = paritygen(web.stripecount)
63
63
64 if binary(text):
64 if binary(text):
65 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
65 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
66 text = '(binary:%s)' % mt
66 text = '(binary:%s)' % mt
67
67
68 def lines():
68 def lines():
69 for lineno, t in enumerate(text.splitlines(1)):
69 for lineno, t in enumerate(text.splitlines(1)):
70 yield {"line": t,
70 yield {"line": t,
71 "lineid": "l%d" % (lineno + 1),
71 "lineid": "l%d" % (lineno + 1),
72 "linenumber": "% 6d" % (lineno + 1),
72 "linenumber": "% 6d" % (lineno + 1),
73 "parity": parity.next()}
73 "parity": parity.next()}
74
74
75 return tmpl("filerevision",
75 return tmpl("filerevision",
76 file=f,
76 file=f,
77 path=webutil.up(f),
77 path=webutil.up(f),
78 text=lines(),
78 text=lines(),
79 rev=fctx.rev(),
79 rev=fctx.rev(),
80 node=hex(fctx.node()),
80 node=hex(fctx.node()),
81 author=fctx.user(),
81 author=fctx.user(),
82 date=fctx.date(),
82 date=fctx.date(),
83 desc=fctx.description(),
83 desc=fctx.description(),
84 branch=webutil.nodebranchnodefault(fctx),
84 branch=webutil.nodebranchnodefault(fctx),
85 parent=webutil.siblings(fctx.parents()),
85 parent=webutil.siblings(fctx.parents()),
86 child=webutil.siblings(fctx.children()),
86 child=webutil.siblings(fctx.children()),
87 rename=webutil.renamelink(fctx),
87 rename=webutil.renamelink(fctx),
88 permissions=fctx.manifest().flags(f))
88 permissions=fctx.manifest().flags(f))
89
89
90 def file(web, req, tmpl):
90 def file(web, req, tmpl):
91 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
91 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
92 if not path:
92 if not path:
93 return manifest(web, req, tmpl)
93 return manifest(web, req, tmpl)
94 try:
94 try:
95 return _filerevision(web, tmpl, webutil.filectx(web.repo, req))
95 return _filerevision(web, tmpl, webutil.filectx(web.repo, req))
96 except revlog.LookupError, inst:
96 except revlog.LookupError, inst:
97 try:
97 try:
98 return manifest(web, req, tmpl)
98 return manifest(web, req, tmpl)
99 except ErrorResponse:
99 except ErrorResponse:
100 raise inst
100 raise inst
101
101
102 def _search(web, tmpl, query):
102 def _search(web, tmpl, query):
103
103
104 def changelist(**map):
104 def changelist(**map):
105 cl = web.repo.changelog
105 cl = web.repo.changelog
106 count = 0
106 count = 0
107 qw = query.lower().split()
107 qw = query.lower().split()
108
108
109 def revgen():
109 def revgen():
110 for i in xrange(len(cl) - 1, 0, -100):
110 for i in xrange(len(cl) - 1, 0, -100):
111 l = []
111 l = []
112 for j in xrange(max(0, i - 100), i + 1):
112 for j in xrange(max(0, i - 100), i + 1):
113 ctx = web.repo[j]
113 ctx = web.repo[j]
114 l.append(ctx)
114 l.append(ctx)
115 l.reverse()
115 l.reverse()
116 for e in l:
116 for e in l:
117 yield e
117 yield e
118
118
119 for ctx in revgen():
119 for ctx in revgen():
120 miss = 0
120 miss = 0
121 for q in qw:
121 for q in qw:
122 if not (q in ctx.user().lower() or
122 if not (q in ctx.user().lower() or
123 q in ctx.description().lower() or
123 q in ctx.description().lower() or
124 q in " ".join(ctx.files()).lower()):
124 q in " ".join(ctx.files()).lower()):
125 miss = 1
125 miss = 1
126 break
126 break
127 if miss:
127 if miss:
128 continue
128 continue
129
129
130 count += 1
130 count += 1
131 n = ctx.node()
131 n = ctx.node()
132 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
132 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
133 files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
133 files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
134
134
135 yield tmpl('searchentry',
135 yield tmpl('searchentry',
136 parity=parity.next(),
136 parity=parity.next(),
137 author=ctx.user(),
137 author=ctx.user(),
138 parent=webutil.siblings(ctx.parents()),
138 parent=webutil.siblings(ctx.parents()),
139 child=webutil.siblings(ctx.children()),
139 child=webutil.siblings(ctx.children()),
140 changelogtag=showtags,
140 changelogtag=showtags,
141 desc=ctx.description(),
141 desc=ctx.description(),
142 date=ctx.date(),
142 date=ctx.date(),
143 files=files,
143 files=files,
144 rev=ctx.rev(),
144 rev=ctx.rev(),
145 node=hex(n),
145 node=hex(n),
146 tags=webutil.nodetagsdict(web.repo, n),
146 tags=webutil.nodetagsdict(web.repo, n),
147 inbranch=webutil.nodeinbranch(web.repo, ctx),
147 inbranch=webutil.nodeinbranch(web.repo, ctx),
148 branches=webutil.nodebranchdict(web.repo, ctx))
148 branches=webutil.nodebranchdict(web.repo, ctx))
149
149
150 if count >= web.maxchanges:
150 if count >= web.maxchanges:
151 break
151 break
152
152
153 cl = web.repo.changelog
153 cl = web.repo.changelog
154 parity = paritygen(web.stripecount)
154 parity = paritygen(web.stripecount)
155
155
156 return tmpl('search',
156 return tmpl('search',
157 query=query,
157 query=query,
158 node=hex(cl.tip()),
158 node=hex(cl.tip()),
159 entries=changelist,
159 entries=changelist,
160 archives=web.archivelist("tip"))
160 archives=web.archivelist("tip"))
161
161
162 def changelog(web, req, tmpl, shortlog = False):
162 def changelog(web, req, tmpl, shortlog = False):
163 if 'node' in req.form:
163 if 'node' in req.form:
164 ctx = webutil.changectx(web.repo, req)
164 ctx = webutil.changectx(web.repo, req)
165 else:
165 else:
166 if 'rev' in req.form:
166 if 'rev' in req.form:
167 hi = req.form['rev'][0]
167 hi = req.form['rev'][0]
168 else:
168 else:
169 hi = len(web.repo) - 1
169 hi = len(web.repo) - 1
170 try:
170 try:
171 ctx = web.repo[hi]
171 ctx = web.repo[hi]
172 except RepoError:
172 except RepoError:
173 return _search(web, tmpl, hi) # XXX redirect to 404 page?
173 return _search(web, tmpl, hi) # XXX redirect to 404 page?
174
174
175 def changelist(limit=0, **map):
175 def changelist(limit=0, **map):
176 cl = web.repo.changelog
176 cl = web.repo.changelog
177 l = [] # build a list in forward order for efficiency
177 l = [] # build a list in forward order for efficiency
178 for i in xrange(start, end):
178 for i in xrange(start, end):
179 ctx = web.repo[i]
179 ctx = web.repo[i]
180 n = ctx.node()
180 n = ctx.node()
181 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
181 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
182 files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
182 files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
183
183
184 l.insert(0, {"parity": parity.next(),
184 l.insert(0, {"parity": parity.next(),
185 "author": ctx.user(),
185 "author": ctx.user(),
186 "parent": webutil.siblings(ctx.parents(), i - 1),
186 "parent": webutil.siblings(ctx.parents(), i - 1),
187 "child": webutil.siblings(ctx.children(), i + 1),
187 "child": webutil.siblings(ctx.children(), i + 1),
188 "changelogtag": showtags,
188 "changelogtag": showtags,
189 "desc": ctx.description(),
189 "desc": ctx.description(),
190 "date": ctx.date(),
190 "date": ctx.date(),
191 "files": files,
191 "files": files,
192 "rev": i,
192 "rev": i,
193 "node": hex(n),
193 "node": hex(n),
194 "tags": webutil.nodetagsdict(web.repo, n),
194 "tags": webutil.nodetagsdict(web.repo, n),
195 "inbranch": webutil.nodeinbranch(web.repo, ctx),
195 "inbranch": webutil.nodeinbranch(web.repo, ctx),
196 "branches": webutil.nodebranchdict(web.repo, ctx)
196 "branches": webutil.nodebranchdict(web.repo, ctx)
197 })
197 })
198
198
199 if limit > 0:
199 if limit > 0:
200 l = l[:limit]
200 l = l[:limit]
201
201
202 for e in l:
202 for e in l:
203 yield e
203 yield e
204
204
205 maxchanges = shortlog and web.maxshortchanges or web.maxchanges
205 maxchanges = shortlog and web.maxshortchanges or web.maxchanges
206 cl = web.repo.changelog
206 cl = web.repo.changelog
207 count = len(cl)
207 count = len(cl)
208 pos = ctx.rev()
208 pos = ctx.rev()
209 start = max(0, pos - maxchanges + 1)
209 start = max(0, pos - maxchanges + 1)
210 end = min(count, start + maxchanges)
210 end = min(count, start + maxchanges)
211 pos = end - 1
211 pos = end - 1
212 parity = paritygen(web.stripecount, offset=start-end)
212 parity = paritygen(web.stripecount, offset=start-end)
213
213
214 changenav = webutil.revnavgen(pos, maxchanges, count, web.repo.changectx)
214 changenav = webutil.revnavgen(pos, maxchanges, count, web.repo.changectx)
215
215
216 return tmpl(shortlog and 'shortlog' or 'changelog',
216 return tmpl(shortlog and 'shortlog' or 'changelog',
217 changenav=changenav,
217 changenav=changenav,
218 node=hex(ctx.node()),
218 node=hex(ctx.node()),
219 rev=pos, changesets=count,
219 rev=pos, changesets=count,
220 entries=lambda **x: changelist(limit=0,**x),
220 entries=lambda **x: changelist(limit=0,**x),
221 latestentry=lambda **x: changelist(limit=1,**x),
221 latestentry=lambda **x: changelist(limit=1,**x),
222 archives=web.archivelist("tip"))
222 archives=web.archivelist("tip"))
223
223
224 def shortlog(web, req, tmpl):
224 def shortlog(web, req, tmpl):
225 return changelog(web, req, tmpl, shortlog = True)
225 return changelog(web, req, tmpl, shortlog = True)
226
226
227 def changeset(web, req, tmpl):
227 def changeset(web, req, tmpl):
228 ctx = webutil.changectx(web.repo, req)
228 ctx = webutil.changectx(web.repo, req)
229 showtags = webutil.showtag(web.repo, tmpl, 'changesettag', ctx.node())
229 showtags = webutil.showtag(web.repo, tmpl, 'changesettag', ctx.node())
230 parents = ctx.parents()
230 parents = ctx.parents()
231
231
232 files = []
232 files = []
233 parity = paritygen(web.stripecount)
233 parity = paritygen(web.stripecount)
234 for f in ctx.files():
234 for f in ctx.files():
235 template = f in ctx and 'filenodelink' or 'filenolink'
235 template = f in ctx and 'filenodelink' or 'filenolink'
236 files.append(tmpl(template,
236 files.append(tmpl(template,
237 node=ctx.hex(), file=f,
237 node=ctx.hex(), file=f,
238 parity=parity.next()))
238 parity=parity.next()))
239
239
240 parity = paritygen(web.stripecount)
240 parity = paritygen(web.stripecount)
241 diffs = webutil.diffs(web.repo, tmpl, ctx, None, parity)
241 diffs = webutil.diffs(web.repo, tmpl, ctx, None, parity)
242 return tmpl('changeset',
242 return tmpl('changeset',
243 diff=diffs,
243 diff=diffs,
244 rev=ctx.rev(),
244 rev=ctx.rev(),
245 node=ctx.hex(),
245 node=ctx.hex(),
246 parent=webutil.siblings(parents),
246 parent=webutil.siblings(parents),
247 child=webutil.siblings(ctx.children()),
247 child=webutil.siblings(ctx.children()),
248 changesettag=showtags,
248 changesettag=showtags,
249 author=ctx.user(),
249 author=ctx.user(),
250 desc=ctx.description(),
250 desc=ctx.description(),
251 date=ctx.date(),
251 date=ctx.date(),
252 files=files,
252 files=files,
253 archives=web.archivelist(ctx.hex()),
253 archives=web.archivelist(ctx.hex()),
254 tags=webutil.nodetagsdict(web.repo, ctx.node()),
254 tags=webutil.nodetagsdict(web.repo, ctx.node()),
255 branch=webutil.nodebranchnodefault(ctx),
255 branch=webutil.nodebranchnodefault(ctx),
256 inbranch=webutil.nodeinbranch(web.repo, ctx),
256 inbranch=webutil.nodeinbranch(web.repo, ctx),
257 branches=webutil.nodebranchdict(web.repo, ctx))
257 branches=webutil.nodebranchdict(web.repo, ctx))
258
258
259 rev = changeset
259 rev = changeset
260
260
261 def manifest(web, req, tmpl):
261 def manifest(web, req, tmpl):
262 ctx = webutil.changectx(web.repo, req)
262 ctx = webutil.changectx(web.repo, req)
263 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
263 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
264 mf = ctx.manifest()
264 mf = ctx.manifest()
265 node = ctx.node()
265 node = ctx.node()
266
266
267 files = {}
267 files = {}
268 dirs = {}
268 dirs = {}
269 parity = paritygen(web.stripecount)
269 parity = paritygen(web.stripecount)
270
270
271 if path and path[-1] != "/":
271 if path and path[-1] != "/":
272 path += "/"
272 path += "/"
273 l = len(path)
273 l = len(path)
274 abspath = "/" + path
274 abspath = "/" + path
275
275
276 for f, n in mf.items():
276 for f, n in mf.items():
277 if f[:l] != path:
277 if f[:l] != path:
278 continue
278 continue
279 remain = f[l:]
279 remain = f[l:]
280 elements = remain.split('/')
280 elements = remain.split('/')
281 if len(elements) == 1:
281 if len(elements) == 1:
282 files[remain] = f
282 files[remain] = f
283 else:
283 else:
284 h = dirs # need to retain ref to dirs (root)
284 h = dirs # need to retain ref to dirs (root)
285 for elem in elements[0:-1]:
285 for elem in elements[0:-1]:
286 if elem not in h:
286 if elem not in h:
287 h[elem] = {}
287 h[elem] = {}
288 h = h[elem]
288 h = h[elem]
289 if len(h) > 1:
289 if len(h) > 1:
290 break
290 break
291 h[None] = None # denotes files present
291 h[None] = None # denotes files present
292
292
293 if not files and not dirs:
293 if not files and not dirs:
294 raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
294 raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
295
295
296 def filelist(**map):
296 def filelist(**map):
297 for f in util.sort(files):
297 for f in util.sort(files):
298 full = files[f]
298 full = files[f]
299
299
300 fctx = ctx.filectx(full)
300 fctx = ctx.filectx(full)
301 yield {"file": full,
301 yield {"file": full,
302 "parity": parity.next(),
302 "parity": parity.next(),
303 "basename": f,
303 "basename": f,
304 "date": fctx.date(),
304 "date": fctx.date(),
305 "size": fctx.size(),
305 "size": fctx.size(),
306 "permissions": mf.flags(full)}
306 "permissions": mf.flags(full)}
307
307
308 def dirlist(**map):
308 def dirlist(**map):
309 for d in util.sort(dirs):
309 for d in util.sort(dirs):
310
310
311 emptydirs = []
311 emptydirs = []
312 h = dirs[d]
312 h = dirs[d]
313 while isinstance(h, dict) and len(h) == 1:
313 while isinstance(h, dict) and len(h) == 1:
314 k,v = h.items()[0]
314 k,v = h.items()[0]
315 if v:
315 if v:
316 emptydirs.append(k)
316 emptydirs.append(k)
317 h = v
317 h = v
318
318
319 path = "%s%s" % (abspath, d)
319 path = "%s%s" % (abspath, d)
320 yield {"parity": parity.next(),
320 yield {"parity": parity.next(),
321 "path": path,
321 "path": path,
322 "emptydirs": "/".join(emptydirs),
322 "emptydirs": "/".join(emptydirs),
323 "basename": d}
323 "basename": d}
324
324
325 return tmpl("manifest",
325 return tmpl("manifest",
326 rev=ctx.rev(),
326 rev=ctx.rev(),
327 node=hex(node),
327 node=hex(node),
328 path=abspath,
328 path=abspath,
329 up=webutil.up(abspath),
329 up=webutil.up(abspath),
330 upparity=parity.next(),
330 upparity=parity.next(),
331 fentries=filelist,
331 fentries=filelist,
332 dentries=dirlist,
332 dentries=dirlist,
333 archives=web.archivelist(hex(node)),
333 archives=web.archivelist(hex(node)),
334 tags=webutil.nodetagsdict(web.repo, node),
334 tags=webutil.nodetagsdict(web.repo, node),
335 inbranch=webutil.nodeinbranch(web.repo, ctx),
335 inbranch=webutil.nodeinbranch(web.repo, ctx),
336 branches=webutil.nodebranchdict(web.repo, ctx))
336 branches=webutil.nodebranchdict(web.repo, ctx))
337
337
338 def tags(web, req, tmpl):
338 def tags(web, req, tmpl):
339 i = web.repo.tagslist()
339 i = web.repo.tagslist()
340 i.reverse()
340 i.reverse()
341 parity = paritygen(web.stripecount)
341 parity = paritygen(web.stripecount)
342
342
343 def entries(notip=False,limit=0, **map):
343 def entries(notip=False,limit=0, **map):
344 count = 0
344 count = 0
345 for k, n in i:
345 for k, n in i:
346 if notip and k == "tip":
346 if notip and k == "tip":
347 continue
347 continue
348 if limit > 0 and count >= limit:
348 if limit > 0 and count >= limit:
349 continue
349 continue
350 count = count + 1
350 count = count + 1
351 yield {"parity": parity.next(),
351 yield {"parity": parity.next(),
352 "tag": k,
352 "tag": k,
353 "date": web.repo[n].date(),
353 "date": web.repo[n].date(),
354 "node": hex(n)}
354 "node": hex(n)}
355
355
356 return tmpl("tags",
356 return tmpl("tags",
357 node=hex(web.repo.changelog.tip()),
357 node=hex(web.repo.changelog.tip()),
358 entries=lambda **x: entries(False,0, **x),
358 entries=lambda **x: entries(False,0, **x),
359 entriesnotip=lambda **x: entries(True,0, **x),
359 entriesnotip=lambda **x: entries(True,0, **x),
360 latestentry=lambda **x: entries(True,1, **x))
360 latestentry=lambda **x: entries(True,1, **x))
361
361
362 def summary(web, req, tmpl):
362 def summary(web, req, tmpl):
363 i = web.repo.tagslist()
363 i = web.repo.tagslist()
364 i.reverse()
364 i.reverse()
365
365
366 def tagentries(**map):
366 def tagentries(**map):
367 parity = paritygen(web.stripecount)
367 parity = paritygen(web.stripecount)
368 count = 0
368 count = 0
369 for k, n in i:
369 for k, n in i:
370 if k == "tip": # skip tip
370 if k == "tip": # skip tip
371 continue
371 continue
372
372
373 count += 1
373 count += 1
374 if count > 10: # limit to 10 tags
374 if count > 10: # limit to 10 tags
375 break
375 break
376
376
377 yield tmpl("tagentry",
377 yield tmpl("tagentry",
378 parity=parity.next(),
378 parity=parity.next(),
379 tag=k,
379 tag=k,
380 node=hex(n),
380 node=hex(n),
381 date=web.repo[n].date())
381 date=web.repo[n].date())
382
382
383 def branches(**map):
383 def branches(**map):
384 parity = paritygen(web.stripecount)
384 parity = paritygen(web.stripecount)
385
385
386 b = web.repo.branchtags()
386 b = web.repo.branchtags()
387 l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.items()]
387 l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.items()]
388 for r,n,t in util.sort(l):
388 for r,n,t in util.sort(l):
389 yield {'parity': parity.next(),
389 yield {'parity': parity.next(),
390 'branch': t,
390 'branch': t,
391 'node': hex(n),
391 'node': hex(n),
392 'date': web.repo[n].date()}
392 'date': web.repo[n].date()}
393
393
394 def changelist(**map):
394 def changelist(**map):
395 parity = paritygen(web.stripecount, offset=start-end)
395 parity = paritygen(web.stripecount, offset=start-end)
396 l = [] # build a list in forward order for efficiency
396 l = [] # build a list in forward order for efficiency
397 for i in xrange(start, end):
397 for i in xrange(start, end):
398 ctx = web.repo[i]
398 ctx = web.repo[i]
399 n = ctx.node()
399 n = ctx.node()
400 hn = hex(n)
400 hn = hex(n)
401
401
402 l.insert(0, tmpl(
402 l.insert(0, tmpl(
403 'shortlogentry',
403 'shortlogentry',
404 parity=parity.next(),
404 parity=parity.next(),
405 author=ctx.user(),
405 author=ctx.user(),
406 desc=ctx.description(),
406 desc=ctx.description(),
407 date=ctx.date(),
407 date=ctx.date(),
408 rev=i,
408 rev=i,
409 node=hn,
409 node=hn,
410 tags=webutil.nodetagsdict(web.repo, n),
410 tags=webutil.nodetagsdict(web.repo, n),
411 inbranch=webutil.nodeinbranch(web.repo, ctx),
411 inbranch=webutil.nodeinbranch(web.repo, ctx),
412 branches=webutil.nodebranchdict(web.repo, ctx)))
412 branches=webutil.nodebranchdict(web.repo, ctx)))
413
413
414 yield l
414 yield l
415
415
416 cl = web.repo.changelog
416 cl = web.repo.changelog
417 count = len(cl)
417 count = len(cl)
418 start = max(0, count - web.maxchanges)
418 start = max(0, count - web.maxchanges)
419 end = min(count, start + web.maxchanges)
419 end = min(count, start + web.maxchanges)
420
420
421 return tmpl("summary",
421 return tmpl("summary",
422 desc=web.config("web", "description", "unknown"),
422 desc=web.config("web", "description", "unknown"),
423 owner=get_contact(web.config) or "unknown",
423 owner=get_contact(web.config) or "unknown",
424 lastchange=cl.read(cl.tip())[2],
424 lastchange=cl.read(cl.tip())[2],
425 tags=tagentries,
425 tags=tagentries,
426 branches=branches,
426 branches=branches,
427 shortlog=changelist,
427 shortlog=changelist,
428 node=hex(cl.tip()),
428 node=hex(cl.tip()),
429 archives=web.archivelist("tip"))
429 archives=web.archivelist("tip"))
430
430
431 def filediff(web, req, tmpl):
431 def filediff(web, req, tmpl):
432 fctx, ctx = None, None
432 fctx, ctx = None, None
433 try:
433 try:
434 fctx = webutil.filectx(web.repo, req)
434 fctx = webutil.filectx(web.repo, req)
435 except LookupError:
435 except LookupError:
436 ctx = webutil.changectx(web.repo, req)
436 ctx = webutil.changectx(web.repo, req)
437 path = webutil.cleanpath(web.repo, req.form['file'][0])
437 path = webutil.cleanpath(web.repo, req.form['file'][0])
438 if path not in ctx.files():
438 if path not in ctx.files():
439 raise
439 raise
440
440
441 if fctx is not None:
441 if fctx is not None:
442 n = fctx.node()
442 n = fctx.node()
443 path = fctx.path()
443 path = fctx.path()
444 parents = fctx.parents()
444 parents = fctx.parents()
445 p1 = parents and parents[0].node() or nullid
445 p1 = parents and parents[0].node() or nullid
446 else:
446 else:
447 n = ctx.node()
447 n = ctx.node()
448 # path already defined in except clause
448 # path already defined in except clause
449 parents = ctx.parents()
449 parents = ctx.parents()
450
450
451 parity = paritygen(web.stripecount)
451 parity = paritygen(web.stripecount)
452 diffs = webutil.diffs(web.repo, tmpl, fctx or ctx, [path], parity)
452 diffs = webutil.diffs(web.repo, tmpl, fctx or ctx, [path], parity)
453 rename = fctx and webutil.renamelink(fctx) or []
453 rename = fctx and webutil.renamelink(fctx) or []
454 ctx = fctx and fctx or ctx
454 ctx = fctx and fctx or ctx
455 return tmpl("filediff",
455 return tmpl("filediff",
456 file=path,
456 file=path,
457 node=hex(n),
457 node=hex(n),
458 rev=ctx.rev(),
458 rev=ctx.rev(),
459 date=ctx.date(),
459 date=ctx.date(),
460 desc=ctx.description(),
460 desc=ctx.description(),
461 author=ctx.user(),
461 author=ctx.user(),
462 rename=rename,
462 rename=rename,
463 branch=webutil.nodebranchnodefault(ctx),
463 branch=webutil.nodebranchnodefault(ctx),
464 parent=webutil.siblings(parents),
464 parent=webutil.siblings(parents),
465 child=webutil.siblings(ctx.children()),
465 child=webutil.siblings(ctx.children()),
466 diff=diffs)
466 diff=diffs)
467
467
468 diff = filediff
468 diff = filediff
469
469
470 def annotate(web, req, tmpl):
470 def annotate(web, req, tmpl):
471 fctx = webutil.filectx(web.repo, req)
471 fctx = webutil.filectx(web.repo, req)
472 f = fctx.path()
472 f = fctx.path()
473 parity = paritygen(web.stripecount)
473 parity = paritygen(web.stripecount)
474
474
475 def annotate(**map):
475 def annotate(**map):
476 last = None
476 last = None
477 if binary(fctx.data()):
477 if binary(fctx.data()):
478 mt = (mimetypes.guess_type(fctx.path())[0]
478 mt = (mimetypes.guess_type(fctx.path())[0]
479 or 'application/octet-stream')
479 or 'application/octet-stream')
480 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
480 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
481 '(binary:%s)' % mt)])
481 '(binary:%s)' % mt)])
482 else:
482 else:
483 lines = enumerate(fctx.annotate(follow=True, linenumber=True))
483 lines = enumerate(fctx.annotate(follow=True, linenumber=True))
484 for lineno, ((f, targetline), l) in lines:
484 for lineno, ((f, targetline), l) in lines:
485 fnode = f.filenode()
485 fnode = f.filenode()
486
486
487 if last != fnode:
487 if last != fnode:
488 last = fnode
488 last = fnode
489
489
490 yield {"parity": parity.next(),
490 yield {"parity": parity.next(),
491 "node": hex(f.node()),
491 "node": hex(f.node()),
492 "rev": f.rev(),
492 "rev": f.rev(),
493 "author": f.user(),
493 "author": f.user(),
494 "desc": f.description(),
494 "desc": f.description(),
495 "file": f.path(),
495 "file": f.path(),
496 "targetline": targetline,
496 "targetline": targetline,
497 "line": l,
497 "line": l,
498 "lineid": "l%d" % (lineno + 1),
498 "lineid": "l%d" % (lineno + 1),
499 "linenumber": "% 6d" % (lineno + 1)}
499 "linenumber": "% 6d" % (lineno + 1)}
500
500
501 return tmpl("fileannotate",
501 return tmpl("fileannotate",
502 file=f,
502 file=f,
503 annotate=annotate,
503 annotate=annotate,
504 path=webutil.up(f),
504 path=webutil.up(f),
505 rev=fctx.rev(),
505 rev=fctx.rev(),
506 node=hex(fctx.node()),
506 node=hex(fctx.node()),
507 author=fctx.user(),
507 author=fctx.user(),
508 date=fctx.date(),
508 date=fctx.date(),
509 desc=fctx.description(),
509 desc=fctx.description(),
510 rename=webutil.renamelink(fctx),
510 rename=webutil.renamelink(fctx),
511 branch=webutil.nodebranchnodefault(fctx),
511 branch=webutil.nodebranchnodefault(fctx),
512 parent=webutil.siblings(fctx.parents()),
512 parent=webutil.siblings(fctx.parents()),
513 child=webutil.siblings(fctx.children()),
513 child=webutil.siblings(fctx.children()),
514 permissions=fctx.manifest().flags(f))
514 permissions=fctx.manifest().flags(f))
515
515
516 def filelog(web, req, tmpl):
516 def filelog(web, req, tmpl):
517
517
518 try:
518 try:
519 fctx = webutil.filectx(web.repo, req)
519 fctx = webutil.filectx(web.repo, req)
520 f = fctx.path()
520 f = fctx.path()
521 fl = fctx.filelog()
521 fl = fctx.filelog()
522 except revlog.LookupError:
522 except revlog.LookupError:
523 f = webutil.cleanpath(web.repo, req.form['file'][0])
523 f = webutil.cleanpath(web.repo, req.form['file'][0])
524 fl = web.repo.file(f)
524 fl = web.repo.file(f)
525 numrevs = len(fl)
525 numrevs = len(fl)
526 if not numrevs: # file doesn't exist at all
526 if not numrevs: # file doesn't exist at all
527 raise
527 raise
528 rev = webutil.changectx(web.repo, req).rev()
528 rev = webutil.changectx(web.repo, req).rev()
529 first = fl.linkrev(fl.node(0))
529 first = fl.linkrev(0)
530 if rev < first: # current rev is from before file existed
530 if rev < first: # current rev is from before file existed
531 raise
531 raise
532 frev = numrevs - 1
532 frev = numrevs - 1
533 while fl.linkrev(fl.node(frev)) > rev:
533 while fl.linkrev(frev) > rev:
534 frev -= 1
534 frev -= 1
535 fctx = web.repo.filectx(f, fl.linkrev(fl.node(frev)))
535 fctx = web.repo.filectx(f, fl.linkrev(frev))
536
536
537 count = fctx.filerev() + 1
537 count = fctx.filerev() + 1
538 pagelen = web.maxshortchanges
538 pagelen = web.maxshortchanges
539 start = max(0, fctx.filerev() - pagelen + 1) # first rev on this page
539 start = max(0, fctx.filerev() - pagelen + 1) # first rev on this page
540 end = min(count, start + pagelen) # last rev on this page
540 end = min(count, start + pagelen) # last rev on this page
541 parity = paritygen(web.stripecount, offset=start-end)
541 parity = paritygen(web.stripecount, offset=start-end)
542
542
543 def entries(limit=0, **map):
543 def entries(limit=0, **map):
544 l = []
544 l = []
545
545
546 for i in xrange(start, end):
546 for i in xrange(start, end):
547 ctx = fctx.filectx(i)
547 ctx = fctx.filectx(i)
548
548
549 l.insert(0, {"parity": parity.next(),
549 l.insert(0, {"parity": parity.next(),
550 "filerev": i,
550 "filerev": i,
551 "file": f,
551 "file": f,
552 "node": hex(ctx.node()),
552 "node": hex(ctx.node()),
553 "author": ctx.user(),
553 "author": ctx.user(),
554 "date": ctx.date(),
554 "date": ctx.date(),
555 "rename": webutil.renamelink(fctx),
555 "rename": webutil.renamelink(fctx),
556 "parent": webutil.siblings(fctx.parents()),
556 "parent": webutil.siblings(fctx.parents()),
557 "child": webutil.siblings(fctx.children()),
557 "child": webutil.siblings(fctx.children()),
558 "desc": ctx.description()})
558 "desc": ctx.description()})
559
559
560 if limit > 0:
560 if limit > 0:
561 l = l[:limit]
561 l = l[:limit]
562
562
563 for e in l:
563 for e in l:
564 yield e
564 yield e
565
565
566 nodefunc = lambda x: fctx.filectx(fileid=x)
566 nodefunc = lambda x: fctx.filectx(fileid=x)
567 nav = webutil.revnavgen(end - 1, pagelen, count, nodefunc)
567 nav = webutil.revnavgen(end - 1, pagelen, count, nodefunc)
568 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
568 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
569 entries=lambda **x: entries(limit=0, **x),
569 entries=lambda **x: entries(limit=0, **x),
570 latestentry=lambda **x: entries(limit=1, **x))
570 latestentry=lambda **x: entries(limit=1, **x))
571
571
572
572
573 def archive(web, req, tmpl):
573 def archive(web, req, tmpl):
574 type_ = req.form.get('type', [None])[0]
574 type_ = req.form.get('type', [None])[0]
575 allowed = web.configlist("web", "allow_archive")
575 allowed = web.configlist("web", "allow_archive")
576 key = req.form['node'][0]
576 key = req.form['node'][0]
577
577
578 if type_ not in web.archives:
578 if type_ not in web.archives:
579 msg = 'Unsupported archive type: %s' % type_
579 msg = 'Unsupported archive type: %s' % type_
580 raise ErrorResponse(HTTP_NOT_FOUND, msg)
580 raise ErrorResponse(HTTP_NOT_FOUND, msg)
581
581
582 if not ((type_ in allowed or
582 if not ((type_ in allowed or
583 web.configbool("web", "allow" + type_, False))):
583 web.configbool("web", "allow" + type_, False))):
584 msg = 'Archive type not allowed: %s' % type_
584 msg = 'Archive type not allowed: %s' % type_
585 raise ErrorResponse(HTTP_FORBIDDEN, msg)
585 raise ErrorResponse(HTTP_FORBIDDEN, msg)
586
586
587 reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame))
587 reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame))
588 cnode = web.repo.lookup(key)
588 cnode = web.repo.lookup(key)
589 arch_version = key
589 arch_version = key
590 if cnode == key or key == 'tip':
590 if cnode == key or key == 'tip':
591 arch_version = short(cnode)
591 arch_version = short(cnode)
592 name = "%s-%s" % (reponame, arch_version)
592 name = "%s-%s" % (reponame, arch_version)
593 mimetype, artype, extension, encoding = web.archive_specs[type_]
593 mimetype, artype, extension, encoding = web.archive_specs[type_]
594 headers = [
594 headers = [
595 ('Content-Type', mimetype),
595 ('Content-Type', mimetype),
596 ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension))
596 ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension))
597 ]
597 ]
598 if encoding:
598 if encoding:
599 headers.append(('Content-Encoding', encoding))
599 headers.append(('Content-Encoding', encoding))
600 req.header(headers)
600 req.header(headers)
601 req.respond(HTTP_OK)
601 req.respond(HTTP_OK)
602 archival.archive(web.repo, req, cnode, artype, prefix=name)
602 archival.archive(web.repo, req, cnode, artype, prefix=name)
603 return []
603 return []
604
604
605
605
606 def static(web, req, tmpl):
606 def static(web, req, tmpl):
607 fname = req.form['file'][0]
607 fname = req.form['file'][0]
608 # a repo owner may set web.static in .hg/hgrc to get any file
608 # a repo owner may set web.static in .hg/hgrc to get any file
609 # readable by the user running the CGI script
609 # readable by the user running the CGI script
610 static = web.config("web", "static", None, untrusted=False)
610 static = web.config("web", "static", None, untrusted=False)
611 if not static:
611 if not static:
612 tp = web.templatepath
612 tp = web.templatepath
613 if isinstance(tp, str):
613 if isinstance(tp, str):
614 tp = [tp]
614 tp = [tp]
615 static = [os.path.join(p, 'static') for p in tp]
615 static = [os.path.join(p, 'static') for p in tp]
616 return [staticfile(static, fname, req)]
616 return [staticfile(static, fname, req)]
617
617
618 def graph(web, req, tmpl):
618 def graph(web, req, tmpl):
619 rev = webutil.changectx(web.repo, req).rev()
619 rev = webutil.changectx(web.repo, req).rev()
620 bg_height = 39
620 bg_height = 39
621
621
622 revcount = 25
622 revcount = 25
623 if 'revcount' in req.form:
623 if 'revcount' in req.form:
624 revcount = int(req.form.get('revcount', [revcount])[0])
624 revcount = int(req.form.get('revcount', [revcount])[0])
625 tmpl.defaults['sessionvars']['revcount'] = revcount
625 tmpl.defaults['sessionvars']['revcount'] = revcount
626
626
627 lessvars = copy.copy(tmpl.defaults['sessionvars'])
627 lessvars = copy.copy(tmpl.defaults['sessionvars'])
628 lessvars['revcount'] = revcount / 2
628 lessvars['revcount'] = revcount / 2
629 morevars = copy.copy(tmpl.defaults['sessionvars'])
629 morevars = copy.copy(tmpl.defaults['sessionvars'])
630 morevars['revcount'] = revcount * 2
630 morevars['revcount'] = revcount * 2
631
631
632 max_rev = len(web.repo) - 1
632 max_rev = len(web.repo) - 1
633 revcount = min(max_rev, revcount)
633 revcount = min(max_rev, revcount)
634 revnode = web.repo.changelog.node(rev)
634 revnode = web.repo.changelog.node(rev)
635 revnode_hex = hex(revnode)
635 revnode_hex = hex(revnode)
636 uprev = min(max_rev, rev + revcount)
636 uprev = min(max_rev, rev + revcount)
637 downrev = max(0, rev - revcount)
637 downrev = max(0, rev - revcount)
638 count = len(web.repo)
638 count = len(web.repo)
639 changenav = webutil.revnavgen(rev, revcount, count, web.repo.changectx)
639 changenav = webutil.revnavgen(rev, revcount, count, web.repo.changectx)
640
640
641 tree = list(graphmod.graph(web.repo, rev, downrev))
641 tree = list(graphmod.graph(web.repo, rev, downrev))
642 canvasheight = (len(tree) + 1) * bg_height - 27;
642 canvasheight = (len(tree) + 1) * bg_height - 27;
643 data = []
643 data = []
644 for i, (ctx, vtx, edges) in enumerate(tree):
644 for i, (ctx, vtx, edges) in enumerate(tree):
645 node = short(ctx.node())
645 node = short(ctx.node())
646 age = templatefilters.age(ctx.date())
646 age = templatefilters.age(ctx.date())
647 desc = templatefilters.firstline(ctx.description())
647 desc = templatefilters.firstline(ctx.description())
648 desc = cgi.escape(desc)
648 desc = cgi.escape(desc)
649 user = cgi.escape(templatefilters.person(ctx.user()))
649 user = cgi.escape(templatefilters.person(ctx.user()))
650 branch = ctx.branch()
650 branch = ctx.branch()
651 branch = branch, web.repo.branchtags().get(branch) == ctx.node()
651 branch = branch, web.repo.branchtags().get(branch) == ctx.node()
652 data.append((node, vtx, edges, desc, user, age, branch, ctx.tags()))
652 data.append((node, vtx, edges, desc, user, age, branch, ctx.tags()))
653
653
654 return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev,
654 return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev,
655 lessvars=lessvars, morevars=morevars, downrev=downrev,
655 lessvars=lessvars, morevars=morevars, downrev=downrev,
656 canvasheight=canvasheight, jsdata=data, bg_height=bg_height,
656 canvasheight=canvasheight, jsdata=data, bg_height=bg_height,
657 node=revnode_hex, changenav=changenav)
657 node=revnode_hex, changenav=changenav)
@@ -1,214 +1,214 b''
1 # hgweb/webutil.py - utility library for the web interface.
1 # hgweb/webutil.py - utility library for the web interface.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, copy
9 import os, copy
10 from mercurial import match, patch
10 from mercurial import match, patch
11 from mercurial.node import hex, nullid
11 from mercurial.node import hex, nullid
12 from mercurial.repo import RepoError
12 from mercurial.repo import RepoError
13 from mercurial import util
13 from mercurial import util
14
14
15 def up(p):
15 def up(p):
16 if p[0] != "/":
16 if p[0] != "/":
17 p = "/" + p
17 p = "/" + p
18 if p[-1] == "/":
18 if p[-1] == "/":
19 p = p[:-1]
19 p = p[:-1]
20 up = os.path.dirname(p)
20 up = os.path.dirname(p)
21 if up == "/":
21 if up == "/":
22 return "/"
22 return "/"
23 return up + "/"
23 return up + "/"
24
24
25 def revnavgen(pos, pagelen, limit, nodefunc):
25 def revnavgen(pos, pagelen, limit, nodefunc):
26 def seq(factor, limit=None):
26 def seq(factor, limit=None):
27 if limit:
27 if limit:
28 yield limit
28 yield limit
29 if limit >= 20 and limit <= 40:
29 if limit >= 20 and limit <= 40:
30 yield 50
30 yield 50
31 else:
31 else:
32 yield 1 * factor
32 yield 1 * factor
33 yield 3 * factor
33 yield 3 * factor
34 for f in seq(factor * 10):
34 for f in seq(factor * 10):
35 yield f
35 yield f
36
36
37 def nav(**map):
37 def nav(**map):
38 l = []
38 l = []
39 last = 0
39 last = 0
40 for f in seq(1, pagelen):
40 for f in seq(1, pagelen):
41 if f < pagelen or f <= last:
41 if f < pagelen or f <= last:
42 continue
42 continue
43 if f > limit:
43 if f > limit:
44 break
44 break
45 last = f
45 last = f
46 if pos + f < limit:
46 if pos + f < limit:
47 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
47 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
48 if pos - f >= 0:
48 if pos - f >= 0:
49 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
49 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
50
50
51 try:
51 try:
52 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
52 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
53
53
54 for label, node in l:
54 for label, node in l:
55 yield {"label": label, "node": node}
55 yield {"label": label, "node": node}
56
56
57 yield {"label": "tip", "node": "tip"}
57 yield {"label": "tip", "node": "tip"}
58 except RepoError:
58 except RepoError:
59 pass
59 pass
60
60
61 return nav
61 return nav
62
62
63 def siblings(siblings=[], hiderev=None, **args):
63 def siblings(siblings=[], hiderev=None, **args):
64 siblings = [s for s in siblings if s.node() != nullid]
64 siblings = [s for s in siblings if s.node() != nullid]
65 if len(siblings) == 1 and siblings[0].rev() == hiderev:
65 if len(siblings) == 1 and siblings[0].rev() == hiderev:
66 return
66 return
67 for s in siblings:
67 for s in siblings:
68 d = {'node': hex(s.node()), 'rev': s.rev()}
68 d = {'node': hex(s.node()), 'rev': s.rev()}
69 d['user'] = s.user()
69 d['user'] = s.user()
70 d['date'] = s.date()
70 d['date'] = s.date()
71 d['description'] = s.description()
71 d['description'] = s.description()
72 if hasattr(s, 'path'):
72 if hasattr(s, 'path'):
73 d['file'] = s.path()
73 d['file'] = s.path()
74 d.update(args)
74 d.update(args)
75 yield d
75 yield d
76
76
77 def renamelink(fctx):
77 def renamelink(fctx):
78 r = fctx.renamed()
78 r = fctx.renamed()
79 if r:
79 if r:
80 return [dict(file=r[0], node=hex(r[1]))]
80 return [dict(file=r[0], node=hex(r[1]))]
81 return []
81 return []
82
82
83 def nodetagsdict(repo, node):
83 def nodetagsdict(repo, node):
84 return [{"name": i} for i in repo.nodetags(node)]
84 return [{"name": i} for i in repo.nodetags(node)]
85
85
86 def nodebranchdict(repo, ctx):
86 def nodebranchdict(repo, ctx):
87 branches = []
87 branches = []
88 branch = ctx.branch()
88 branch = ctx.branch()
89 # If this is an empty repo, ctx.node() == nullid,
89 # If this is an empty repo, ctx.node() == nullid,
90 # ctx.branch() == 'default', but branchtags() is
90 # ctx.branch() == 'default', but branchtags() is
91 # an empty dict. Using dict.get avoids a traceback.
91 # an empty dict. Using dict.get avoids a traceback.
92 if repo.branchtags().get(branch) == ctx.node():
92 if repo.branchtags().get(branch) == ctx.node():
93 branches.append({"name": branch})
93 branches.append({"name": branch})
94 return branches
94 return branches
95
95
96 def nodeinbranch(repo, ctx):
96 def nodeinbranch(repo, ctx):
97 branches = []
97 branches = []
98 branch = ctx.branch()
98 branch = ctx.branch()
99 if branch != 'default' and repo.branchtags().get(branch) != ctx.node():
99 if branch != 'default' and repo.branchtags().get(branch) != ctx.node():
100 branches.append({"name": branch})
100 branches.append({"name": branch})
101 return branches
101 return branches
102
102
103 def nodebranchnodefault(ctx):
103 def nodebranchnodefault(ctx):
104 branches = []
104 branches = []
105 branch = ctx.branch()
105 branch = ctx.branch()
106 if branch != 'default':
106 if branch != 'default':
107 branches.append({"name": branch})
107 branches.append({"name": branch})
108 return branches
108 return branches
109
109
110 def showtag(repo, tmpl, t1, node=nullid, **args):
110 def showtag(repo, tmpl, t1, node=nullid, **args):
111 for t in repo.nodetags(node):
111 for t in repo.nodetags(node):
112 yield tmpl(t1, tag=t, **args)
112 yield tmpl(t1, tag=t, **args)
113
113
114 def cleanpath(repo, path):
114 def cleanpath(repo, path):
115 path = path.lstrip('/')
115 path = path.lstrip('/')
116 return util.canonpath(repo.root, '', path)
116 return util.canonpath(repo.root, '', path)
117
117
118 def changectx(repo, req):
118 def changectx(repo, req):
119 changeid = "tip"
119 changeid = "tip"
120 if 'node' in req.form:
120 if 'node' in req.form:
121 changeid = req.form['node'][0]
121 changeid = req.form['node'][0]
122 elif 'manifest' in req.form:
122 elif 'manifest' in req.form:
123 changeid = req.form['manifest'][0]
123 changeid = req.form['manifest'][0]
124
124
125 try:
125 try:
126 ctx = repo[changeid]
126 ctx = repo[changeid]
127 except RepoError:
127 except RepoError:
128 man = repo.manifest
128 man = repo.manifest
129 ctx = repo[man.linkrev(man.lookup(changeid))]
129 ctx = repo[man.linkrev(man.rev(man.lookup(changeid)))]
130
130
131 return ctx
131 return ctx
132
132
133 def filectx(repo, req):
133 def filectx(repo, req):
134 path = cleanpath(repo, req.form['file'][0])
134 path = cleanpath(repo, req.form['file'][0])
135 if 'node' in req.form:
135 if 'node' in req.form:
136 changeid = req.form['node'][0]
136 changeid = req.form['node'][0]
137 else:
137 else:
138 changeid = req.form['filenode'][0]
138 changeid = req.form['filenode'][0]
139 try:
139 try:
140 fctx = repo[changeid][path]
140 fctx = repo[changeid][path]
141 except RepoError:
141 except RepoError:
142 fctx = repo.filectx(path, fileid=changeid)
142 fctx = repo.filectx(path, fileid=changeid)
143
143
144 return fctx
144 return fctx
145
145
146 def listfilediffs(tmpl, files, node, max):
146 def listfilediffs(tmpl, files, node, max):
147 for f in files[:max]:
147 for f in files[:max]:
148 yield tmpl('filedifflink', node=hex(node), file=f)
148 yield tmpl('filedifflink', node=hex(node), file=f)
149 if len(files) > max:
149 if len(files) > max:
150 yield tmpl('fileellipses')
150 yield tmpl('fileellipses')
151
151
152 def diffs(repo, tmpl, ctx, files, parity):
152 def diffs(repo, tmpl, ctx, files, parity):
153
153
154 def countgen():
154 def countgen():
155 start = 1
155 start = 1
156 while True:
156 while True:
157 yield start
157 yield start
158 start += 1
158 start += 1
159
159
160 blockcount = countgen()
160 blockcount = countgen()
161 def prettyprintlines(diff):
161 def prettyprintlines(diff):
162 blockno = blockcount.next()
162 blockno = blockcount.next()
163 for lineno, l in enumerate(diff.splitlines(True)):
163 for lineno, l in enumerate(diff.splitlines(True)):
164 lineno = "%d.%d" % (blockno, lineno + 1)
164 lineno = "%d.%d" % (blockno, lineno + 1)
165 if l.startswith('+'):
165 if l.startswith('+'):
166 ltype = "difflineplus"
166 ltype = "difflineplus"
167 elif l.startswith('-'):
167 elif l.startswith('-'):
168 ltype = "difflineminus"
168 ltype = "difflineminus"
169 elif l.startswith('@'):
169 elif l.startswith('@'):
170 ltype = "difflineat"
170 ltype = "difflineat"
171 else:
171 else:
172 ltype = "diffline"
172 ltype = "diffline"
173 yield tmpl(ltype,
173 yield tmpl(ltype,
174 line=l,
174 line=l,
175 lineid="l%s" % lineno,
175 lineid="l%s" % lineno,
176 linenumber="% 8s" % lineno)
176 linenumber="% 8s" % lineno)
177
177
178 if files:
178 if files:
179 m = match.exact(repo.root, repo.getcwd(), files)
179 m = match.exact(repo.root, repo.getcwd(), files)
180 else:
180 else:
181 m = match.always(repo.root, repo.getcwd())
181 m = match.always(repo.root, repo.getcwd())
182
182
183 diffopts = patch.diffopts(repo.ui, untrusted=True)
183 diffopts = patch.diffopts(repo.ui, untrusted=True)
184 parents = ctx.parents()
184 parents = ctx.parents()
185 node1 = parents and parents[0].node() or nullid
185 node1 = parents and parents[0].node() or nullid
186 node2 = ctx.node()
186 node2 = ctx.node()
187
187
188 block = []
188 block = []
189 for chunk in patch.diff(repo, node1, node2, m, opts=diffopts):
189 for chunk in patch.diff(repo, node1, node2, m, opts=diffopts):
190 if chunk.startswith('diff') and block:
190 if chunk.startswith('diff') and block:
191 yield tmpl('diffblock', parity=parity.next(),
191 yield tmpl('diffblock', parity=parity.next(),
192 lines=prettyprintlines(''.join(block)))
192 lines=prettyprintlines(''.join(block)))
193 block = []
193 block = []
194 if chunk.startswith('diff'):
194 if chunk.startswith('diff'):
195 chunk = ''.join(chunk.splitlines(True)[1:])
195 chunk = ''.join(chunk.splitlines(True)[1:])
196 block.append(chunk)
196 block.append(chunk)
197 yield tmpl('diffblock', parity=parity.next(),
197 yield tmpl('diffblock', parity=parity.next(),
198 lines=prettyprintlines(''.join(block)))
198 lines=prettyprintlines(''.join(block)))
199
199
200 class sessionvars(object):
200 class sessionvars(object):
201 def __init__(self, vars, start='?'):
201 def __init__(self, vars, start='?'):
202 self.start = start
202 self.start = start
203 self.vars = vars
203 self.vars = vars
204 def __getitem__(self, key):
204 def __getitem__(self, key):
205 return self.vars[key]
205 return self.vars[key]
206 def __setitem__(self, key, value):
206 def __setitem__(self, key, value):
207 self.vars[key] = value
207 self.vars[key] = value
208 def __copy__(self):
208 def __copy__(self):
209 return sessionvars(copy.copy(self.vars), self.start)
209 return sessionvars(copy.copy(self.vars), self.start)
210 def __iter__(self):
210 def __iter__(self):
211 separator = self.start
211 separator = self.start
212 for key, value in self.vars.iteritems():
212 for key, value in self.vars.iteritems():
213 yield {'name': key, 'value': str(value), 'separator': separator}
213 yield {'name': key, 'value': str(value), 'separator': separator}
214 separator = '&'
214 separator = '&'
@@ -1,2126 +1,2125 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import lock, transaction, stat, errno, ui, store
12 import lock, transaction, stat, errno, ui, store
13 import os, revlog, time, util, extensions, hook, inspect
13 import os, revlog, time, util, extensions, hook, inspect
14 import match as match_
14 import match as match_
15 import merge as merge_
15 import merge as merge_
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = util.set(('lookup', 'changegroupsubset'))
18 capabilities = util.set(('lookup', 'changegroupsubset'))
19 supported = ('revlogv1', 'store', 'fncache')
19 supported = ('revlogv1', 'store', 'fncache')
20
20
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 self.root = os.path.realpath(path)
23 self.root = os.path.realpath(path)
24 self.path = os.path.join(self.root, ".hg")
24 self.path = os.path.join(self.root, ".hg")
25 self.origroot = path
25 self.origroot = path
26 self.opener = util.opener(self.path)
26 self.opener = util.opener(self.path)
27 self.wopener = util.opener(self.root)
27 self.wopener = util.opener(self.root)
28
28
29 if not os.path.isdir(self.path):
29 if not os.path.isdir(self.path):
30 if create:
30 if create:
31 if not os.path.exists(path):
31 if not os.path.exists(path):
32 os.mkdir(path)
32 os.mkdir(path)
33 os.mkdir(self.path)
33 os.mkdir(self.path)
34 requirements = ["revlogv1"]
34 requirements = ["revlogv1"]
35 if parentui.configbool('format', 'usestore', True):
35 if parentui.configbool('format', 'usestore', True):
36 os.mkdir(os.path.join(self.path, "store"))
36 os.mkdir(os.path.join(self.path, "store"))
37 requirements.append("store")
37 requirements.append("store")
38 if parentui.configbool('format', 'usefncache', True):
38 if parentui.configbool('format', 'usefncache', True):
39 requirements.append("fncache")
39 requirements.append("fncache")
40 # create an invalid changelog
40 # create an invalid changelog
41 self.opener("00changelog.i", "a").write(
41 self.opener("00changelog.i", "a").write(
42 '\0\0\0\2' # represents revlogv2
42 '\0\0\0\2' # represents revlogv2
43 ' dummy changelog to prevent using the old repo layout'
43 ' dummy changelog to prevent using the old repo layout'
44 )
44 )
45 reqfile = self.opener("requires", "w")
45 reqfile = self.opener("requires", "w")
46 for r in requirements:
46 for r in requirements:
47 reqfile.write("%s\n" % r)
47 reqfile.write("%s\n" % r)
48 reqfile.close()
48 reqfile.close()
49 else:
49 else:
50 raise repo.RepoError(_("repository %s not found") % path)
50 raise repo.RepoError(_("repository %s not found") % path)
51 elif create:
51 elif create:
52 raise repo.RepoError(_("repository %s already exists") % path)
52 raise repo.RepoError(_("repository %s already exists") % path)
53 else:
53 else:
54 # find requirements
54 # find requirements
55 requirements = []
55 requirements = []
56 try:
56 try:
57 requirements = self.opener("requires").read().splitlines()
57 requirements = self.opener("requires").read().splitlines()
58 for r in requirements:
58 for r in requirements:
59 if r not in self.supported:
59 if r not in self.supported:
60 raise repo.RepoError(_("requirement '%s' not supported") % r)
60 raise repo.RepoError(_("requirement '%s' not supported") % r)
61 except IOError, inst:
61 except IOError, inst:
62 if inst.errno != errno.ENOENT:
62 if inst.errno != errno.ENOENT:
63 raise
63 raise
64
64
65 self.store = store.store(requirements, self.path, util.opener)
65 self.store = store.store(requirements, self.path, util.opener)
66 self.spath = self.store.path
66 self.spath = self.store.path
67 self.sopener = self.store.opener
67 self.sopener = self.store.opener
68 self.sjoin = self.store.join
68 self.sjoin = self.store.join
69 self.opener.createmode = self.store.createmode
69 self.opener.createmode = self.store.createmode
70
70
71 self.ui = ui.ui(parentui=parentui)
71 self.ui = ui.ui(parentui=parentui)
72 try:
72 try:
73 self.ui.readconfig(self.join("hgrc"), self.root)
73 self.ui.readconfig(self.join("hgrc"), self.root)
74 extensions.loadall(self.ui)
74 extensions.loadall(self.ui)
75 except IOError:
75 except IOError:
76 pass
76 pass
77
77
78 self.tagscache = None
78 self.tagscache = None
79 self._tagstypecache = None
79 self._tagstypecache = None
80 self.branchcache = None
80 self.branchcache = None
81 self._ubranchcache = None # UTF-8 version of branchcache
81 self._ubranchcache = None # UTF-8 version of branchcache
82 self._branchcachetip = None
82 self._branchcachetip = None
83 self.nodetagscache = None
83 self.nodetagscache = None
84 self.filterpats = {}
84 self.filterpats = {}
85 self._datafilters = {}
85 self._datafilters = {}
86 self._transref = self._lockref = self._wlockref = None
86 self._transref = self._lockref = self._wlockref = None
87
87
88 def __getattr__(self, name):
88 def __getattr__(self, name):
89 if name == 'changelog':
89 if name == 'changelog':
90 self.changelog = changelog.changelog(self.sopener)
90 self.changelog = changelog.changelog(self.sopener)
91 self.sopener.defversion = self.changelog.version
91 self.sopener.defversion = self.changelog.version
92 return self.changelog
92 return self.changelog
93 if name == 'manifest':
93 if name == 'manifest':
94 self.changelog
94 self.changelog
95 self.manifest = manifest.manifest(self.sopener)
95 self.manifest = manifest.manifest(self.sopener)
96 return self.manifest
96 return self.manifest
97 if name == 'dirstate':
97 if name == 'dirstate':
98 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
98 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
99 return self.dirstate
99 return self.dirstate
100 else:
100 else:
101 raise AttributeError(name)
101 raise AttributeError(name)
102
102
103 def __getitem__(self, changeid):
103 def __getitem__(self, changeid):
104 if changeid == None:
104 if changeid == None:
105 return context.workingctx(self)
105 return context.workingctx(self)
106 return context.changectx(self, changeid)
106 return context.changectx(self, changeid)
107
107
108 def __nonzero__(self):
108 def __nonzero__(self):
109 return True
109 return True
110
110
111 def __len__(self):
111 def __len__(self):
112 return len(self.changelog)
112 return len(self.changelog)
113
113
114 def __iter__(self):
114 def __iter__(self):
115 for i in xrange(len(self)):
115 for i in xrange(len(self)):
116 yield i
116 yield i
117
117
118 def url(self):
118 def url(self):
119 return 'file:' + self.root
119 return 'file:' + self.root
120
120
121 def hook(self, name, throw=False, **args):
121 def hook(self, name, throw=False, **args):
122 return hook.hook(self.ui, self, name, throw, **args)
122 return hook.hook(self.ui, self, name, throw, **args)
123
123
124 tag_disallowed = ':\r\n'
124 tag_disallowed = ':\r\n'
125
125
126 def _tag(self, names, node, message, local, user, date, parent=None,
126 def _tag(self, names, node, message, local, user, date, parent=None,
127 extra={}):
127 extra={}):
128 use_dirstate = parent is None
128 use_dirstate = parent is None
129
129
130 if isinstance(names, str):
130 if isinstance(names, str):
131 allchars = names
131 allchars = names
132 names = (names,)
132 names = (names,)
133 else:
133 else:
134 allchars = ''.join(names)
134 allchars = ''.join(names)
135 for c in self.tag_disallowed:
135 for c in self.tag_disallowed:
136 if c in allchars:
136 if c in allchars:
137 raise util.Abort(_('%r cannot be used in a tag name') % c)
137 raise util.Abort(_('%r cannot be used in a tag name') % c)
138
138
139 for name in names:
139 for name in names:
140 self.hook('pretag', throw=True, node=hex(node), tag=name,
140 self.hook('pretag', throw=True, node=hex(node), tag=name,
141 local=local)
141 local=local)
142
142
143 def writetags(fp, names, munge, prevtags):
143 def writetags(fp, names, munge, prevtags):
144 fp.seek(0, 2)
144 fp.seek(0, 2)
145 if prevtags and prevtags[-1] != '\n':
145 if prevtags and prevtags[-1] != '\n':
146 fp.write('\n')
146 fp.write('\n')
147 for name in names:
147 for name in names:
148 m = munge and munge(name) or name
148 m = munge and munge(name) or name
149 if self._tagstypecache and name in self._tagstypecache:
149 if self._tagstypecache and name in self._tagstypecache:
150 old = self.tagscache.get(name, nullid)
150 old = self.tagscache.get(name, nullid)
151 fp.write('%s %s\n' % (hex(old), m))
151 fp.write('%s %s\n' % (hex(old), m))
152 fp.write('%s %s\n' % (hex(node), m))
152 fp.write('%s %s\n' % (hex(node), m))
153 fp.close()
153 fp.close()
154
154
155 prevtags = ''
155 prevtags = ''
156 if local:
156 if local:
157 try:
157 try:
158 fp = self.opener('localtags', 'r+')
158 fp = self.opener('localtags', 'r+')
159 except IOError, err:
159 except IOError, err:
160 fp = self.opener('localtags', 'a')
160 fp = self.opener('localtags', 'a')
161 else:
161 else:
162 prevtags = fp.read()
162 prevtags = fp.read()
163
163
164 # local tags are stored in the current charset
164 # local tags are stored in the current charset
165 writetags(fp, names, None, prevtags)
165 writetags(fp, names, None, prevtags)
166 for name in names:
166 for name in names:
167 self.hook('tag', node=hex(node), tag=name, local=local)
167 self.hook('tag', node=hex(node), tag=name, local=local)
168 return
168 return
169
169
170 if use_dirstate:
170 if use_dirstate:
171 try:
171 try:
172 fp = self.wfile('.hgtags', 'rb+')
172 fp = self.wfile('.hgtags', 'rb+')
173 except IOError, err:
173 except IOError, err:
174 fp = self.wfile('.hgtags', 'ab')
174 fp = self.wfile('.hgtags', 'ab')
175 else:
175 else:
176 prevtags = fp.read()
176 prevtags = fp.read()
177 else:
177 else:
178 try:
178 try:
179 prevtags = self.filectx('.hgtags', parent).data()
179 prevtags = self.filectx('.hgtags', parent).data()
180 except revlog.LookupError:
180 except revlog.LookupError:
181 pass
181 pass
182 fp = self.wfile('.hgtags', 'wb')
182 fp = self.wfile('.hgtags', 'wb')
183 if prevtags:
183 if prevtags:
184 fp.write(prevtags)
184 fp.write(prevtags)
185
185
186 # committed tags are stored in UTF-8
186 # committed tags are stored in UTF-8
187 writetags(fp, names, util.fromlocal, prevtags)
187 writetags(fp, names, util.fromlocal, prevtags)
188
188
189 if use_dirstate and '.hgtags' not in self.dirstate:
189 if use_dirstate and '.hgtags' not in self.dirstate:
190 self.add(['.hgtags'])
190 self.add(['.hgtags'])
191
191
192 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
192 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
193 extra=extra)
193 extra=extra)
194
194
195 for name in names:
195 for name in names:
196 self.hook('tag', node=hex(node), tag=name, local=local)
196 self.hook('tag', node=hex(node), tag=name, local=local)
197
197
198 return tagnode
198 return tagnode
199
199
200 def tag(self, names, node, message, local, user, date):
200 def tag(self, names, node, message, local, user, date):
201 '''tag a revision with one or more symbolic names.
201 '''tag a revision with one or more symbolic names.
202
202
203 names is a list of strings or, when adding a single tag, names may be a
203 names is a list of strings or, when adding a single tag, names may be a
204 string.
204 string.
205
205
206 if local is True, the tags are stored in a per-repository file.
206 if local is True, the tags are stored in a per-repository file.
207 otherwise, they are stored in the .hgtags file, and a new
207 otherwise, they are stored in the .hgtags file, and a new
208 changeset is committed with the change.
208 changeset is committed with the change.
209
209
210 keyword arguments:
210 keyword arguments:
211
211
212 local: whether to store tags in non-version-controlled file
212 local: whether to store tags in non-version-controlled file
213 (default False)
213 (default False)
214
214
215 message: commit message to use if committing
215 message: commit message to use if committing
216
216
217 user: name of user to use if committing
217 user: name of user to use if committing
218
218
219 date: date tuple to use if committing'''
219 date: date tuple to use if committing'''
220
220
221 for x in self.status()[:5]:
221 for x in self.status()[:5]:
222 if '.hgtags' in x:
222 if '.hgtags' in x:
223 raise util.Abort(_('working copy of .hgtags is changed '
223 raise util.Abort(_('working copy of .hgtags is changed '
224 '(please commit .hgtags manually)'))
224 '(please commit .hgtags manually)'))
225
225
226 self._tag(names, node, message, local, user, date)
226 self._tag(names, node, message, local, user, date)
227
227
228 def tags(self):
228 def tags(self):
229 '''return a mapping of tag to node'''
229 '''return a mapping of tag to node'''
230 if self.tagscache:
230 if self.tagscache:
231 return self.tagscache
231 return self.tagscache
232
232
233 globaltags = {}
233 globaltags = {}
234 tagtypes = {}
234 tagtypes = {}
235
235
236 def readtags(lines, fn, tagtype):
236 def readtags(lines, fn, tagtype):
237 filetags = {}
237 filetags = {}
238 count = 0
238 count = 0
239
239
240 def warn(msg):
240 def warn(msg):
241 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
241 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
242
242
243 for l in lines:
243 for l in lines:
244 count += 1
244 count += 1
245 if not l:
245 if not l:
246 continue
246 continue
247 s = l.split(" ", 1)
247 s = l.split(" ", 1)
248 if len(s) != 2:
248 if len(s) != 2:
249 warn(_("cannot parse entry"))
249 warn(_("cannot parse entry"))
250 continue
250 continue
251 node, key = s
251 node, key = s
252 key = util.tolocal(key.strip()) # stored in UTF-8
252 key = util.tolocal(key.strip()) # stored in UTF-8
253 try:
253 try:
254 bin_n = bin(node)
254 bin_n = bin(node)
255 except TypeError:
255 except TypeError:
256 warn(_("node '%s' is not well formed") % node)
256 warn(_("node '%s' is not well formed") % node)
257 continue
257 continue
258 if bin_n not in self.changelog.nodemap:
258 if bin_n not in self.changelog.nodemap:
259 warn(_("tag '%s' refers to unknown node") % key)
259 warn(_("tag '%s' refers to unknown node") % key)
260 continue
260 continue
261
261
262 h = []
262 h = []
263 if key in filetags:
263 if key in filetags:
264 n, h = filetags[key]
264 n, h = filetags[key]
265 h.append(n)
265 h.append(n)
266 filetags[key] = (bin_n, h)
266 filetags[key] = (bin_n, h)
267
267
268 for k, nh in filetags.items():
268 for k, nh in filetags.items():
269 if k not in globaltags:
269 if k not in globaltags:
270 globaltags[k] = nh
270 globaltags[k] = nh
271 tagtypes[k] = tagtype
271 tagtypes[k] = tagtype
272 continue
272 continue
273
273
274 # we prefer the global tag if:
274 # we prefer the global tag if:
275 # it supercedes us OR
275 # it supercedes us OR
276 # mutual supercedes and it has a higher rank
276 # mutual supercedes and it has a higher rank
277 # otherwise we win because we're tip-most
277 # otherwise we win because we're tip-most
278 an, ah = nh
278 an, ah = nh
279 bn, bh = globaltags[k]
279 bn, bh = globaltags[k]
280 if (bn != an and an in bh and
280 if (bn != an and an in bh and
281 (bn not in ah or len(bh) > len(ah))):
281 (bn not in ah or len(bh) > len(ah))):
282 an = bn
282 an = bn
283 ah.extend([n for n in bh if n not in ah])
283 ah.extend([n for n in bh if n not in ah])
284 globaltags[k] = an, ah
284 globaltags[k] = an, ah
285 tagtypes[k] = tagtype
285 tagtypes[k] = tagtype
286
286
287 # read the tags file from each head, ending with the tip
287 # read the tags file from each head, ending with the tip
288 f = None
288 f = None
289 for rev, node, fnode in self._hgtagsnodes():
289 for rev, node, fnode in self._hgtagsnodes():
290 f = (f and f.filectx(fnode) or
290 f = (f and f.filectx(fnode) or
291 self.filectx('.hgtags', fileid=fnode))
291 self.filectx('.hgtags', fileid=fnode))
292 readtags(f.data().splitlines(), f, "global")
292 readtags(f.data().splitlines(), f, "global")
293
293
294 try:
294 try:
295 data = util.fromlocal(self.opener("localtags").read())
295 data = util.fromlocal(self.opener("localtags").read())
296 # localtags are stored in the local character set
296 # localtags are stored in the local character set
297 # while the internal tag table is stored in UTF-8
297 # while the internal tag table is stored in UTF-8
298 readtags(data.splitlines(), "localtags", "local")
298 readtags(data.splitlines(), "localtags", "local")
299 except IOError:
299 except IOError:
300 pass
300 pass
301
301
302 self.tagscache = {}
302 self.tagscache = {}
303 self._tagstypecache = {}
303 self._tagstypecache = {}
304 for k,nh in globaltags.items():
304 for k,nh in globaltags.items():
305 n = nh[0]
305 n = nh[0]
306 if n != nullid:
306 if n != nullid:
307 self.tagscache[k] = n
307 self.tagscache[k] = n
308 self._tagstypecache[k] = tagtypes[k]
308 self._tagstypecache[k] = tagtypes[k]
309 self.tagscache['tip'] = self.changelog.tip()
309 self.tagscache['tip'] = self.changelog.tip()
310 return self.tagscache
310 return self.tagscache
311
311
312 def tagtype(self, tagname):
312 def tagtype(self, tagname):
313 '''
313 '''
314 return the type of the given tag. result can be:
314 return the type of the given tag. result can be:
315
315
316 'local' : a local tag
316 'local' : a local tag
317 'global' : a global tag
317 'global' : a global tag
318 None : tag does not exist
318 None : tag does not exist
319 '''
319 '''
320
320
321 self.tags()
321 self.tags()
322
322
323 return self._tagstypecache.get(tagname)
323 return self._tagstypecache.get(tagname)
324
324
325 def _hgtagsnodes(self):
325 def _hgtagsnodes(self):
326 heads = self.heads()
326 heads = self.heads()
327 heads.reverse()
327 heads.reverse()
328 last = {}
328 last = {}
329 ret = []
329 ret = []
330 for node in heads:
330 for node in heads:
331 c = self[node]
331 c = self[node]
332 rev = c.rev()
332 rev = c.rev()
333 try:
333 try:
334 fnode = c.filenode('.hgtags')
334 fnode = c.filenode('.hgtags')
335 except revlog.LookupError:
335 except revlog.LookupError:
336 continue
336 continue
337 ret.append((rev, node, fnode))
337 ret.append((rev, node, fnode))
338 if fnode in last:
338 if fnode in last:
339 ret[last[fnode]] = None
339 ret[last[fnode]] = None
340 last[fnode] = len(ret) - 1
340 last[fnode] = len(ret) - 1
341 return [item for item in ret if item]
341 return [item for item in ret if item]
342
342
343 def tagslist(self):
343 def tagslist(self):
344 '''return a list of tags ordered by revision'''
344 '''return a list of tags ordered by revision'''
345 l = []
345 l = []
346 for t, n in self.tags().items():
346 for t, n in self.tags().items():
347 try:
347 try:
348 r = self.changelog.rev(n)
348 r = self.changelog.rev(n)
349 except:
349 except:
350 r = -2 # sort to the beginning of the list if unknown
350 r = -2 # sort to the beginning of the list if unknown
351 l.append((r, t, n))
351 l.append((r, t, n))
352 return [(t, n) for r, t, n in util.sort(l)]
352 return [(t, n) for r, t, n in util.sort(l)]
353
353
354 def nodetags(self, node):
354 def nodetags(self, node):
355 '''return the tags associated with a node'''
355 '''return the tags associated with a node'''
356 if not self.nodetagscache:
356 if not self.nodetagscache:
357 self.nodetagscache = {}
357 self.nodetagscache = {}
358 for t, n in self.tags().items():
358 for t, n in self.tags().items():
359 self.nodetagscache.setdefault(n, []).append(t)
359 self.nodetagscache.setdefault(n, []).append(t)
360 return self.nodetagscache.get(node, [])
360 return self.nodetagscache.get(node, [])
361
361
362 def _branchtags(self, partial, lrev):
362 def _branchtags(self, partial, lrev):
363 tiprev = len(self) - 1
363 tiprev = len(self) - 1
364 if lrev != tiprev:
364 if lrev != tiprev:
365 self._updatebranchcache(partial, lrev+1, tiprev+1)
365 self._updatebranchcache(partial, lrev+1, tiprev+1)
366 self._writebranchcache(partial, self.changelog.tip(), tiprev)
366 self._writebranchcache(partial, self.changelog.tip(), tiprev)
367
367
368 return partial
368 return partial
369
369
370 def branchtags(self):
370 def branchtags(self):
371 tip = self.changelog.tip()
371 tip = self.changelog.tip()
372 if self.branchcache is not None and self._branchcachetip == tip:
372 if self.branchcache is not None and self._branchcachetip == tip:
373 return self.branchcache
373 return self.branchcache
374
374
375 oldtip = self._branchcachetip
375 oldtip = self._branchcachetip
376 self._branchcachetip = tip
376 self._branchcachetip = tip
377 if self.branchcache is None:
377 if self.branchcache is None:
378 self.branchcache = {} # avoid recursion in changectx
378 self.branchcache = {} # avoid recursion in changectx
379 else:
379 else:
380 self.branchcache.clear() # keep using the same dict
380 self.branchcache.clear() # keep using the same dict
381 if oldtip is None or oldtip not in self.changelog.nodemap:
381 if oldtip is None or oldtip not in self.changelog.nodemap:
382 partial, last, lrev = self._readbranchcache()
382 partial, last, lrev = self._readbranchcache()
383 else:
383 else:
384 lrev = self.changelog.rev(oldtip)
384 lrev = self.changelog.rev(oldtip)
385 partial = self._ubranchcache
385 partial = self._ubranchcache
386
386
387 self._branchtags(partial, lrev)
387 self._branchtags(partial, lrev)
388
388
389 # the branch cache is stored on disk as UTF-8, but in the local
389 # the branch cache is stored on disk as UTF-8, but in the local
390 # charset internally
390 # charset internally
391 for k, v in partial.items():
391 for k, v in partial.items():
392 self.branchcache[util.tolocal(k)] = v
392 self.branchcache[util.tolocal(k)] = v
393 self._ubranchcache = partial
393 self._ubranchcache = partial
394 return self.branchcache
394 return self.branchcache
395
395
396 def _readbranchcache(self):
396 def _readbranchcache(self):
397 partial = {}
397 partial = {}
398 try:
398 try:
399 f = self.opener("branch.cache")
399 f = self.opener("branch.cache")
400 lines = f.read().split('\n')
400 lines = f.read().split('\n')
401 f.close()
401 f.close()
402 except (IOError, OSError):
402 except (IOError, OSError):
403 return {}, nullid, nullrev
403 return {}, nullid, nullrev
404
404
405 try:
405 try:
406 last, lrev = lines.pop(0).split(" ", 1)
406 last, lrev = lines.pop(0).split(" ", 1)
407 last, lrev = bin(last), int(lrev)
407 last, lrev = bin(last), int(lrev)
408 if lrev >= len(self) or self[lrev].node() != last:
408 if lrev >= len(self) or self[lrev].node() != last:
409 # invalidate the cache
409 # invalidate the cache
410 raise ValueError('invalidating branch cache (tip differs)')
410 raise ValueError('invalidating branch cache (tip differs)')
411 for l in lines:
411 for l in lines:
412 if not l: continue
412 if not l: continue
413 node, label = l.split(" ", 1)
413 node, label = l.split(" ", 1)
414 partial[label.strip()] = bin(node)
414 partial[label.strip()] = bin(node)
415 except (KeyboardInterrupt, util.SignalInterrupt):
415 except (KeyboardInterrupt, util.SignalInterrupt):
416 raise
416 raise
417 except Exception, inst:
417 except Exception, inst:
418 if self.ui.debugflag:
418 if self.ui.debugflag:
419 self.ui.warn(str(inst), '\n')
419 self.ui.warn(str(inst), '\n')
420 partial, last, lrev = {}, nullid, nullrev
420 partial, last, lrev = {}, nullid, nullrev
421 return partial, last, lrev
421 return partial, last, lrev
422
422
423 def _writebranchcache(self, branches, tip, tiprev):
423 def _writebranchcache(self, branches, tip, tiprev):
424 try:
424 try:
425 f = self.opener("branch.cache", "w", atomictemp=True)
425 f = self.opener("branch.cache", "w", atomictemp=True)
426 f.write("%s %s\n" % (hex(tip), tiprev))
426 f.write("%s %s\n" % (hex(tip), tiprev))
427 for label, node in branches.iteritems():
427 for label, node in branches.iteritems():
428 f.write("%s %s\n" % (hex(node), label))
428 f.write("%s %s\n" % (hex(node), label))
429 f.rename()
429 f.rename()
430 except (IOError, OSError):
430 except (IOError, OSError):
431 pass
431 pass
432
432
433 def _updatebranchcache(self, partial, start, end):
433 def _updatebranchcache(self, partial, start, end):
434 for r in xrange(start, end):
434 for r in xrange(start, end):
435 c = self[r]
435 c = self[r]
436 b = c.branch()
436 b = c.branch()
437 partial[b] = c.node()
437 partial[b] = c.node()
438
438
439 def lookup(self, key):
439 def lookup(self, key):
440 if key == '.':
440 if key == '.':
441 return self.dirstate.parents()[0]
441 return self.dirstate.parents()[0]
442 elif key == 'null':
442 elif key == 'null':
443 return nullid
443 return nullid
444 n = self.changelog._match(key)
444 n = self.changelog._match(key)
445 if n:
445 if n:
446 return n
446 return n
447 if key in self.tags():
447 if key in self.tags():
448 return self.tags()[key]
448 return self.tags()[key]
449 if key in self.branchtags():
449 if key in self.branchtags():
450 return self.branchtags()[key]
450 return self.branchtags()[key]
451 n = self.changelog._partialmatch(key)
451 n = self.changelog._partialmatch(key)
452 if n:
452 if n:
453 return n
453 return n
454 try:
454 try:
455 if len(key) == 20:
455 if len(key) == 20:
456 key = hex(key)
456 key = hex(key)
457 except:
457 except:
458 pass
458 pass
459 raise repo.RepoError(_("unknown revision '%s'") % key)
459 raise repo.RepoError(_("unknown revision '%s'") % key)
460
460
461 def local(self):
461 def local(self):
462 return True
462 return True
463
463
464 def join(self, f):
464 def join(self, f):
465 return os.path.join(self.path, f)
465 return os.path.join(self.path, f)
466
466
467 def wjoin(self, f):
467 def wjoin(self, f):
468 return os.path.join(self.root, f)
468 return os.path.join(self.root, f)
469
469
470 def rjoin(self, f):
470 def rjoin(self, f):
471 return os.path.join(self.root, util.pconvert(f))
471 return os.path.join(self.root, util.pconvert(f))
472
472
473 def file(self, f):
473 def file(self, f):
474 if f[0] == '/':
474 if f[0] == '/':
475 f = f[1:]
475 f = f[1:]
476 return filelog.filelog(self.sopener, f)
476 return filelog.filelog(self.sopener, f)
477
477
478 def changectx(self, changeid):
478 def changectx(self, changeid):
479 return self[changeid]
479 return self[changeid]
480
480
481 def parents(self, changeid=None):
481 def parents(self, changeid=None):
482 '''get list of changectxs for parents of changeid'''
482 '''get list of changectxs for parents of changeid'''
483 return self[changeid].parents()
483 return self[changeid].parents()
484
484
485 def filectx(self, path, changeid=None, fileid=None):
485 def filectx(self, path, changeid=None, fileid=None):
486 """changeid can be a changeset revision, node, or tag.
486 """changeid can be a changeset revision, node, or tag.
487 fileid can be a file revision or node."""
487 fileid can be a file revision or node."""
488 return context.filectx(self, path, changeid, fileid)
488 return context.filectx(self, path, changeid, fileid)
489
489
490 def getcwd(self):
490 def getcwd(self):
491 return self.dirstate.getcwd()
491 return self.dirstate.getcwd()
492
492
493 def pathto(self, f, cwd=None):
493 def pathto(self, f, cwd=None):
494 return self.dirstate.pathto(f, cwd)
494 return self.dirstate.pathto(f, cwd)
495
495
496 def wfile(self, f, mode='r'):
496 def wfile(self, f, mode='r'):
497 return self.wopener(f, mode)
497 return self.wopener(f, mode)
498
498
499 def _link(self, f):
499 def _link(self, f):
500 return os.path.islink(self.wjoin(f))
500 return os.path.islink(self.wjoin(f))
501
501
502 def _filter(self, filter, filename, data):
502 def _filter(self, filter, filename, data):
503 if filter not in self.filterpats:
503 if filter not in self.filterpats:
504 l = []
504 l = []
505 for pat, cmd in self.ui.configitems(filter):
505 for pat, cmd in self.ui.configitems(filter):
506 if cmd == '!':
506 if cmd == '!':
507 continue
507 continue
508 mf = util.matcher(self.root, "", [pat], [], [])[1]
508 mf = util.matcher(self.root, "", [pat], [], [])[1]
509 fn = None
509 fn = None
510 params = cmd
510 params = cmd
511 for name, filterfn in self._datafilters.iteritems():
511 for name, filterfn in self._datafilters.iteritems():
512 if cmd.startswith(name):
512 if cmd.startswith(name):
513 fn = filterfn
513 fn = filterfn
514 params = cmd[len(name):].lstrip()
514 params = cmd[len(name):].lstrip()
515 break
515 break
516 if not fn:
516 if not fn:
517 fn = lambda s, c, **kwargs: util.filter(s, c)
517 fn = lambda s, c, **kwargs: util.filter(s, c)
518 # Wrap old filters not supporting keyword arguments
518 # Wrap old filters not supporting keyword arguments
519 if not inspect.getargspec(fn)[2]:
519 if not inspect.getargspec(fn)[2]:
520 oldfn = fn
520 oldfn = fn
521 fn = lambda s, c, **kwargs: oldfn(s, c)
521 fn = lambda s, c, **kwargs: oldfn(s, c)
522 l.append((mf, fn, params))
522 l.append((mf, fn, params))
523 self.filterpats[filter] = l
523 self.filterpats[filter] = l
524
524
525 for mf, fn, cmd in self.filterpats[filter]:
525 for mf, fn, cmd in self.filterpats[filter]:
526 if mf(filename):
526 if mf(filename):
527 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
527 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
528 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
528 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
529 break
529 break
530
530
531 return data
531 return data
532
532
533 def adddatafilter(self, name, filter):
533 def adddatafilter(self, name, filter):
534 self._datafilters[name] = filter
534 self._datafilters[name] = filter
535
535
536 def wread(self, filename):
536 def wread(self, filename):
537 if self._link(filename):
537 if self._link(filename):
538 data = os.readlink(self.wjoin(filename))
538 data = os.readlink(self.wjoin(filename))
539 else:
539 else:
540 data = self.wopener(filename, 'r').read()
540 data = self.wopener(filename, 'r').read()
541 return self._filter("encode", filename, data)
541 return self._filter("encode", filename, data)
542
542
543 def wwrite(self, filename, data, flags):
543 def wwrite(self, filename, data, flags):
544 data = self._filter("decode", filename, data)
544 data = self._filter("decode", filename, data)
545 try:
545 try:
546 os.unlink(self.wjoin(filename))
546 os.unlink(self.wjoin(filename))
547 except OSError:
547 except OSError:
548 pass
548 pass
549 if 'l' in flags:
549 if 'l' in flags:
550 self.wopener.symlink(data, filename)
550 self.wopener.symlink(data, filename)
551 else:
551 else:
552 self.wopener(filename, 'w').write(data)
552 self.wopener(filename, 'w').write(data)
553 if 'x' in flags:
553 if 'x' in flags:
554 util.set_flags(self.wjoin(filename), False, True)
554 util.set_flags(self.wjoin(filename), False, True)
555
555
556 def wwritedata(self, filename, data):
556 def wwritedata(self, filename, data):
557 return self._filter("decode", filename, data)
557 return self._filter("decode", filename, data)
558
558
559 def transaction(self):
559 def transaction(self):
560 if self._transref and self._transref():
560 if self._transref and self._transref():
561 return self._transref().nest()
561 return self._transref().nest()
562
562
563 # abort here if the journal already exists
563 # abort here if the journal already exists
564 if os.path.exists(self.sjoin("journal")):
564 if os.path.exists(self.sjoin("journal")):
565 raise repo.RepoError(_("journal already exists - run hg recover"))
565 raise repo.RepoError(_("journal already exists - run hg recover"))
566
566
567 # save dirstate for rollback
567 # save dirstate for rollback
568 try:
568 try:
569 ds = self.opener("dirstate").read()
569 ds = self.opener("dirstate").read()
570 except IOError:
570 except IOError:
571 ds = ""
571 ds = ""
572 self.opener("journal.dirstate", "w").write(ds)
572 self.opener("journal.dirstate", "w").write(ds)
573 self.opener("journal.branch", "w").write(self.dirstate.branch())
573 self.opener("journal.branch", "w").write(self.dirstate.branch())
574
574
575 renames = [(self.sjoin("journal"), self.sjoin("undo")),
575 renames = [(self.sjoin("journal"), self.sjoin("undo")),
576 (self.join("journal.dirstate"), self.join("undo.dirstate")),
576 (self.join("journal.dirstate"), self.join("undo.dirstate")),
577 (self.join("journal.branch"), self.join("undo.branch"))]
577 (self.join("journal.branch"), self.join("undo.branch"))]
578 tr = transaction.transaction(self.ui.warn, self.sopener,
578 tr = transaction.transaction(self.ui.warn, self.sopener,
579 self.sjoin("journal"),
579 self.sjoin("journal"),
580 aftertrans(renames),
580 aftertrans(renames),
581 self.store.createmode)
581 self.store.createmode)
582 self._transref = weakref.ref(tr)
582 self._transref = weakref.ref(tr)
583 return tr
583 return tr
584
584
585 def recover(self):
585 def recover(self):
586 l = self.lock()
586 l = self.lock()
587 try:
587 try:
588 if os.path.exists(self.sjoin("journal")):
588 if os.path.exists(self.sjoin("journal")):
589 self.ui.status(_("rolling back interrupted transaction\n"))
589 self.ui.status(_("rolling back interrupted transaction\n"))
590 transaction.rollback(self.sopener, self.sjoin("journal"))
590 transaction.rollback(self.sopener, self.sjoin("journal"))
591 self.invalidate()
591 self.invalidate()
592 return True
592 return True
593 else:
593 else:
594 self.ui.warn(_("no interrupted transaction available\n"))
594 self.ui.warn(_("no interrupted transaction available\n"))
595 return False
595 return False
596 finally:
596 finally:
597 del l
597 del l
598
598
599 def rollback(self):
599 def rollback(self):
600 wlock = lock = None
600 wlock = lock = None
601 try:
601 try:
602 wlock = self.wlock()
602 wlock = self.wlock()
603 lock = self.lock()
603 lock = self.lock()
604 if os.path.exists(self.sjoin("undo")):
604 if os.path.exists(self.sjoin("undo")):
605 self.ui.status(_("rolling back last transaction\n"))
605 self.ui.status(_("rolling back last transaction\n"))
606 transaction.rollback(self.sopener, self.sjoin("undo"))
606 transaction.rollback(self.sopener, self.sjoin("undo"))
607 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
607 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
608 try:
608 try:
609 branch = self.opener("undo.branch").read()
609 branch = self.opener("undo.branch").read()
610 self.dirstate.setbranch(branch)
610 self.dirstate.setbranch(branch)
611 except IOError:
611 except IOError:
612 self.ui.warn(_("Named branch could not be reset, "
612 self.ui.warn(_("Named branch could not be reset, "
613 "current branch still is: %s\n")
613 "current branch still is: %s\n")
614 % util.tolocal(self.dirstate.branch()))
614 % util.tolocal(self.dirstate.branch()))
615 self.invalidate()
615 self.invalidate()
616 self.dirstate.invalidate()
616 self.dirstate.invalidate()
617 else:
617 else:
618 self.ui.warn(_("no rollback information available\n"))
618 self.ui.warn(_("no rollback information available\n"))
619 finally:
619 finally:
620 del lock, wlock
620 del lock, wlock
621
621
622 def invalidate(self):
622 def invalidate(self):
623 for a in "changelog manifest".split():
623 for a in "changelog manifest".split():
624 if a in self.__dict__:
624 if a in self.__dict__:
625 delattr(self, a)
625 delattr(self, a)
626 self.tagscache = None
626 self.tagscache = None
627 self._tagstypecache = None
627 self._tagstypecache = None
628 self.nodetagscache = None
628 self.nodetagscache = None
629 self.branchcache = None
629 self.branchcache = None
630 self._ubranchcache = None
630 self._ubranchcache = None
631 self._branchcachetip = None
631 self._branchcachetip = None
632
632
633 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
633 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
634 try:
634 try:
635 l = lock.lock(lockname, 0, releasefn, desc=desc)
635 l = lock.lock(lockname, 0, releasefn, desc=desc)
636 except lock.LockHeld, inst:
636 except lock.LockHeld, inst:
637 if not wait:
637 if not wait:
638 raise
638 raise
639 self.ui.warn(_("waiting for lock on %s held by %r\n") %
639 self.ui.warn(_("waiting for lock on %s held by %r\n") %
640 (desc, inst.locker))
640 (desc, inst.locker))
641 # default to 600 seconds timeout
641 # default to 600 seconds timeout
642 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
642 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
643 releasefn, desc=desc)
643 releasefn, desc=desc)
644 if acquirefn:
644 if acquirefn:
645 acquirefn()
645 acquirefn()
646 return l
646 return l
647
647
648 def lock(self, wait=True):
648 def lock(self, wait=True):
649 if self._lockref and self._lockref():
649 if self._lockref and self._lockref():
650 return self._lockref()
650 return self._lockref()
651
651
652 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
652 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
653 _('repository %s') % self.origroot)
653 _('repository %s') % self.origroot)
654 self._lockref = weakref.ref(l)
654 self._lockref = weakref.ref(l)
655 return l
655 return l
656
656
657 def wlock(self, wait=True):
657 def wlock(self, wait=True):
658 if self._wlockref and self._wlockref():
658 if self._wlockref and self._wlockref():
659 return self._wlockref()
659 return self._wlockref()
660
660
661 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
661 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
662 self.dirstate.invalidate, _('working directory of %s') %
662 self.dirstate.invalidate, _('working directory of %s') %
663 self.origroot)
663 self.origroot)
664 self._wlockref = weakref.ref(l)
664 self._wlockref = weakref.ref(l)
665 return l
665 return l
666
666
667 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
667 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
668 """
668 """
669 commit an individual file as part of a larger transaction
669 commit an individual file as part of a larger transaction
670 """
670 """
671
671
672 fn = fctx.path()
672 fn = fctx.path()
673 t = fctx.data()
673 t = fctx.data()
674 fl = self.file(fn)
674 fl = self.file(fn)
675 fp1 = manifest1.get(fn, nullid)
675 fp1 = manifest1.get(fn, nullid)
676 fp2 = manifest2.get(fn, nullid)
676 fp2 = manifest2.get(fn, nullid)
677
677
678 meta = {}
678 meta = {}
679 cp = fctx.renamed()
679 cp = fctx.renamed()
680 if cp and cp[0] != fn:
680 if cp and cp[0] != fn:
681 # Mark the new revision of this file as a copy of another
681 # Mark the new revision of this file as a copy of another
682 # file. This copy data will effectively act as a parent
682 # file. This copy data will effectively act as a parent
683 # of this new revision. If this is a merge, the first
683 # of this new revision. If this is a merge, the first
684 # parent will be the nullid (meaning "look up the copy data")
684 # parent will be the nullid (meaning "look up the copy data")
685 # and the second one will be the other parent. For example:
685 # and the second one will be the other parent. For example:
686 #
686 #
687 # 0 --- 1 --- 3 rev1 changes file foo
687 # 0 --- 1 --- 3 rev1 changes file foo
688 # \ / rev2 renames foo to bar and changes it
688 # \ / rev2 renames foo to bar and changes it
689 # \- 2 -/ rev3 should have bar with all changes and
689 # \- 2 -/ rev3 should have bar with all changes and
690 # should record that bar descends from
690 # should record that bar descends from
691 # bar in rev2 and foo in rev1
691 # bar in rev2 and foo in rev1
692 #
692 #
693 # this allows this merge to succeed:
693 # this allows this merge to succeed:
694 #
694 #
695 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
695 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
696 # \ / merging rev3 and rev4 should use bar@rev2
696 # \ / merging rev3 and rev4 should use bar@rev2
697 # \- 2 --- 4 as the merge base
697 # \- 2 --- 4 as the merge base
698 #
698 #
699
699
700 cf = cp[0]
700 cf = cp[0]
701 cr = manifest1.get(cf)
701 cr = manifest1.get(cf)
702 nfp = fp2
702 nfp = fp2
703
703
704 if manifest2: # branch merge
704 if manifest2: # branch merge
705 if fp2 == nullid: # copied on remote side
705 if fp2 == nullid: # copied on remote side
706 if fp1 != nullid or cf in manifest2:
706 if fp1 != nullid or cf in manifest2:
707 cr = manifest2[cf]
707 cr = manifest2[cf]
708 nfp = fp1
708 nfp = fp1
709
709
710 # find source in nearest ancestor if we've lost track
710 # find source in nearest ancestor if we've lost track
711 if not cr:
711 if not cr:
712 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
712 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
713 (fn, cf))
713 (fn, cf))
714 for a in self['.'].ancestors():
714 for a in self['.'].ancestors():
715 if cf in a:
715 if cf in a:
716 cr = a[cf].filenode()
716 cr = a[cf].filenode()
717 break
717 break
718
718
719 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
719 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
720 meta["copy"] = cf
720 meta["copy"] = cf
721 meta["copyrev"] = hex(cr)
721 meta["copyrev"] = hex(cr)
722 fp1, fp2 = nullid, nfp
722 fp1, fp2 = nullid, nfp
723 elif fp2 != nullid:
723 elif fp2 != nullid:
724 # is one parent an ancestor of the other?
724 # is one parent an ancestor of the other?
725 fpa = fl.ancestor(fp1, fp2)
725 fpa = fl.ancestor(fp1, fp2)
726 if fpa == fp1:
726 if fpa == fp1:
727 fp1, fp2 = fp2, nullid
727 fp1, fp2 = fp2, nullid
728 elif fpa == fp2:
728 elif fpa == fp2:
729 fp2 = nullid
729 fp2 = nullid
730
730
731 # is the file unmodified from the parent? report existing entry
731 # is the file unmodified from the parent? report existing entry
732 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
732 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
733 return fp1
733 return fp1
734
734
735 changelist.append(fn)
735 changelist.append(fn)
736 return fl.add(t, meta, tr, linkrev, fp1, fp2)
736 return fl.add(t, meta, tr, linkrev, fp1, fp2)
737
737
738 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
738 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
739 if p1 is None:
739 if p1 is None:
740 p1, p2 = self.dirstate.parents()
740 p1, p2 = self.dirstate.parents()
741 return self.commit(files=files, text=text, user=user, date=date,
741 return self.commit(files=files, text=text, user=user, date=date,
742 p1=p1, p2=p2, extra=extra, empty_ok=True)
742 p1=p1, p2=p2, extra=extra, empty_ok=True)
743
743
744 def commit(self, files=None, text="", user=None, date=None,
744 def commit(self, files=None, text="", user=None, date=None,
745 match=None, force=False, force_editor=False,
745 match=None, force=False, force_editor=False,
746 p1=None, p2=None, extra={}, empty_ok=False):
746 p1=None, p2=None, extra={}, empty_ok=False):
747 wlock = lock = None
747 wlock = lock = None
748 if files:
748 if files:
749 files = util.unique(files)
749 files = util.unique(files)
750 try:
750 try:
751 wlock = self.wlock()
751 wlock = self.wlock()
752 lock = self.lock()
752 lock = self.lock()
753 use_dirstate = (p1 is None) # not rawcommit
753 use_dirstate = (p1 is None) # not rawcommit
754
754
755 if use_dirstate:
755 if use_dirstate:
756 p1, p2 = self.dirstate.parents()
756 p1, p2 = self.dirstate.parents()
757 update_dirstate = True
757 update_dirstate = True
758
758
759 if (not force and p2 != nullid and
759 if (not force and p2 != nullid and
760 (match and (match.files() or match.anypats()))):
760 (match and (match.files() or match.anypats()))):
761 raise util.Abort(_('cannot partially commit a merge '
761 raise util.Abort(_('cannot partially commit a merge '
762 '(do not specify files or patterns)'))
762 '(do not specify files or patterns)'))
763
763
764 if files:
764 if files:
765 modified, removed = [], []
765 modified, removed = [], []
766 for f in files:
766 for f in files:
767 s = self.dirstate[f]
767 s = self.dirstate[f]
768 if s in 'nma':
768 if s in 'nma':
769 modified.append(f)
769 modified.append(f)
770 elif s == 'r':
770 elif s == 'r':
771 removed.append(f)
771 removed.append(f)
772 else:
772 else:
773 self.ui.warn(_("%s not tracked!\n") % f)
773 self.ui.warn(_("%s not tracked!\n") % f)
774 changes = [modified, [], removed, [], []]
774 changes = [modified, [], removed, [], []]
775 else:
775 else:
776 changes = self.status(match=match)
776 changes = self.status(match=match)
777 else:
777 else:
778 p1, p2 = p1, p2 or nullid
778 p1, p2 = p1, p2 or nullid
779 update_dirstate = (self.dirstate.parents()[0] == p1)
779 update_dirstate = (self.dirstate.parents()[0] == p1)
780 changes = [files, [], [], [], []]
780 changes = [files, [], [], [], []]
781
781
782 ms = merge_.mergestate(self)
782 ms = merge_.mergestate(self)
783 for f in changes[0]:
783 for f in changes[0]:
784 if f in ms and ms[f] == 'u':
784 if f in ms and ms[f] == 'u':
785 raise util.Abort(_("unresolved merge conflicts "
785 raise util.Abort(_("unresolved merge conflicts "
786 "(see hg resolve)"))
786 "(see hg resolve)"))
787 wctx = context.workingctx(self, (p1, p2), text, user, date,
787 wctx = context.workingctx(self, (p1, p2), text, user, date,
788 extra, changes)
788 extra, changes)
789 return self._commitctx(wctx, force, force_editor, empty_ok,
789 return self._commitctx(wctx, force, force_editor, empty_ok,
790 use_dirstate, update_dirstate)
790 use_dirstate, update_dirstate)
791 finally:
791 finally:
792 del lock, wlock
792 del lock, wlock
793
793
794 def commitctx(self, ctx):
794 def commitctx(self, ctx):
795 """Add a new revision to current repository.
795 """Add a new revision to current repository.
796
796
797 Revision information is passed in the context.memctx argument.
797 Revision information is passed in the context.memctx argument.
798 commitctx() does not touch the working directory.
798 commitctx() does not touch the working directory.
799 """
799 """
800 wlock = lock = None
800 wlock = lock = None
801 try:
801 try:
802 wlock = self.wlock()
802 wlock = self.wlock()
803 lock = self.lock()
803 lock = self.lock()
804 return self._commitctx(ctx, force=True, force_editor=False,
804 return self._commitctx(ctx, force=True, force_editor=False,
805 empty_ok=True, use_dirstate=False,
805 empty_ok=True, use_dirstate=False,
806 update_dirstate=False)
806 update_dirstate=False)
807 finally:
807 finally:
808 del lock, wlock
808 del lock, wlock
809
809
810 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
810 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
811 use_dirstate=True, update_dirstate=True):
811 use_dirstate=True, update_dirstate=True):
812 tr = None
812 tr = None
813 valid = 0 # don't save the dirstate if this isn't set
813 valid = 0 # don't save the dirstate if this isn't set
814 try:
814 try:
815 commit = util.sort(wctx.modified() + wctx.added())
815 commit = util.sort(wctx.modified() + wctx.added())
816 remove = wctx.removed()
816 remove = wctx.removed()
817 extra = wctx.extra().copy()
817 extra = wctx.extra().copy()
818 branchname = extra['branch']
818 branchname = extra['branch']
819 user = wctx.user()
819 user = wctx.user()
820 text = wctx.description()
820 text = wctx.description()
821
821
822 p1, p2 = [p.node() for p in wctx.parents()]
822 p1, p2 = [p.node() for p in wctx.parents()]
823 c1 = self.changelog.read(p1)
823 c1 = self.changelog.read(p1)
824 c2 = self.changelog.read(p2)
824 c2 = self.changelog.read(p2)
825 m1 = self.manifest.read(c1[0]).copy()
825 m1 = self.manifest.read(c1[0]).copy()
826 m2 = self.manifest.read(c2[0])
826 m2 = self.manifest.read(c2[0])
827
827
828 if use_dirstate:
828 if use_dirstate:
829 oldname = c1[5].get("branch") # stored in UTF-8
829 oldname = c1[5].get("branch") # stored in UTF-8
830 if (not commit and not remove and not force and p2 == nullid
830 if (not commit and not remove and not force and p2 == nullid
831 and branchname == oldname):
831 and branchname == oldname):
832 self.ui.status(_("nothing changed\n"))
832 self.ui.status(_("nothing changed\n"))
833 return None
833 return None
834
834
835 xp1 = hex(p1)
835 xp1 = hex(p1)
836 if p2 == nullid: xp2 = ''
836 if p2 == nullid: xp2 = ''
837 else: xp2 = hex(p2)
837 else: xp2 = hex(p2)
838
838
839 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
839 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
840
840
841 tr = self.transaction()
841 tr = self.transaction()
842 trp = weakref.proxy(tr)
842 trp = weakref.proxy(tr)
843
843
844 # check in files
844 # check in files
845 new = {}
845 new = {}
846 changed = []
846 changed = []
847 linkrev = len(self)
847 linkrev = len(self)
848 for f in commit:
848 for f in commit:
849 self.ui.note(f + "\n")
849 self.ui.note(f + "\n")
850 try:
850 try:
851 fctx = wctx.filectx(f)
851 fctx = wctx.filectx(f)
852 newflags = fctx.flags()
852 newflags = fctx.flags()
853 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
853 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
854 if ((not changed or changed[-1] != f) and
854 if ((not changed or changed[-1] != f) and
855 m2.get(f) != new[f]):
855 m2.get(f) != new[f]):
856 # mention the file in the changelog if some
856 # mention the file in the changelog if some
857 # flag changed, even if there was no content
857 # flag changed, even if there was no content
858 # change.
858 # change.
859 if m1.flags(f) != newflags:
859 if m1.flags(f) != newflags:
860 changed.append(f)
860 changed.append(f)
861 m1.set(f, newflags)
861 m1.set(f, newflags)
862 if use_dirstate:
862 if use_dirstate:
863 self.dirstate.normal(f)
863 self.dirstate.normal(f)
864
864
865 except (OSError, IOError):
865 except (OSError, IOError):
866 if use_dirstate:
866 if use_dirstate:
867 self.ui.warn(_("trouble committing %s!\n") % f)
867 self.ui.warn(_("trouble committing %s!\n") % f)
868 raise
868 raise
869 else:
869 else:
870 remove.append(f)
870 remove.append(f)
871
871
872 updated, added = [], []
872 updated, added = [], []
873 for f in util.sort(changed):
873 for f in util.sort(changed):
874 if f in m1 or f in m2:
874 if f in m1 or f in m2:
875 updated.append(f)
875 updated.append(f)
876 else:
876 else:
877 added.append(f)
877 added.append(f)
878
878
879 # update manifest
879 # update manifest
880 m1.update(new)
880 m1.update(new)
881 removed = []
881 removed = []
882
882
883 for f in util.sort(remove):
883 for f in util.sort(remove):
884 if f in m1:
884 if f in m1:
885 del m1[f]
885 del m1[f]
886 removed.append(f)
886 removed.append(f)
887 elif f in m2:
887 elif f in m2:
888 removed.append(f)
888 removed.append(f)
889 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
889 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
890 (new, removed))
890 (new, removed))
891
891
892 # add changeset
892 # add changeset
893 if (not empty_ok and not text) or force_editor:
893 if (not empty_ok and not text) or force_editor:
894 edittext = []
894 edittext = []
895 if text:
895 if text:
896 edittext.append(text)
896 edittext.append(text)
897 edittext.append("")
897 edittext.append("")
898 edittext.append("") # Empty line between message and comments.
898 edittext.append("") # Empty line between message and comments.
899 edittext.append(_("HG: Enter commit message."
899 edittext.append(_("HG: Enter commit message."
900 " Lines beginning with 'HG:' are removed."))
900 " Lines beginning with 'HG:' are removed."))
901 edittext.append("HG: --")
901 edittext.append("HG: --")
902 edittext.append("HG: user: %s" % user)
902 edittext.append("HG: user: %s" % user)
903 if p2 != nullid:
903 if p2 != nullid:
904 edittext.append("HG: branch merge")
904 edittext.append("HG: branch merge")
905 if branchname:
905 if branchname:
906 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
906 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
907 edittext.extend(["HG: added %s" % f for f in added])
907 edittext.extend(["HG: added %s" % f for f in added])
908 edittext.extend(["HG: changed %s" % f for f in updated])
908 edittext.extend(["HG: changed %s" % f for f in updated])
909 edittext.extend(["HG: removed %s" % f for f in removed])
909 edittext.extend(["HG: removed %s" % f for f in removed])
910 if not added and not updated and not removed:
910 if not added and not updated and not removed:
911 edittext.append("HG: no files changed")
911 edittext.append("HG: no files changed")
912 edittext.append("")
912 edittext.append("")
913 # run editor in the repository root
913 # run editor in the repository root
914 olddir = os.getcwd()
914 olddir = os.getcwd()
915 os.chdir(self.root)
915 os.chdir(self.root)
916 text = self.ui.edit("\n".join(edittext), user)
916 text = self.ui.edit("\n".join(edittext), user)
917 os.chdir(olddir)
917 os.chdir(olddir)
918
918
919 lines = [line.rstrip() for line in text.rstrip().splitlines()]
919 lines = [line.rstrip() for line in text.rstrip().splitlines()]
920 while lines and not lines[0]:
920 while lines and not lines[0]:
921 del lines[0]
921 del lines[0]
922 if not lines and use_dirstate:
922 if not lines and use_dirstate:
923 raise util.Abort(_("empty commit message"))
923 raise util.Abort(_("empty commit message"))
924 text = '\n'.join(lines)
924 text = '\n'.join(lines)
925
925
926 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
926 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
927 user, wctx.date(), extra)
927 user, wctx.date(), extra)
928 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
928 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
929 parent2=xp2)
929 parent2=xp2)
930 tr.close()
930 tr.close()
931
931
932 if self.branchcache:
932 if self.branchcache:
933 self.branchtags()
933 self.branchtags()
934
934
935 if use_dirstate or update_dirstate:
935 if use_dirstate or update_dirstate:
936 self.dirstate.setparents(n)
936 self.dirstate.setparents(n)
937 if use_dirstate:
937 if use_dirstate:
938 for f in removed:
938 for f in removed:
939 self.dirstate.forget(f)
939 self.dirstate.forget(f)
940 valid = 1 # our dirstate updates are complete
940 valid = 1 # our dirstate updates are complete
941
941
942 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
942 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
943 return n
943 return n
944 finally:
944 finally:
945 if not valid: # don't save our updated dirstate
945 if not valid: # don't save our updated dirstate
946 self.dirstate.invalidate()
946 self.dirstate.invalidate()
947 del tr
947 del tr
948
948
949 def walk(self, match, node=None):
949 def walk(self, match, node=None):
950 '''
950 '''
951 walk recursively through the directory tree or a given
951 walk recursively through the directory tree or a given
952 changeset, finding all files matched by the match
952 changeset, finding all files matched by the match
953 function
953 function
954 '''
954 '''
955 return self[node].walk(match)
955 return self[node].walk(match)
956
956
957 def status(self, node1='.', node2=None, match=None,
957 def status(self, node1='.', node2=None, match=None,
958 ignored=False, clean=False, unknown=False):
958 ignored=False, clean=False, unknown=False):
959 """return status of files between two nodes or node and working directory
959 """return status of files between two nodes or node and working directory
960
960
961 If node1 is None, use the first dirstate parent instead.
961 If node1 is None, use the first dirstate parent instead.
962 If node2 is None, compare node1 with working directory.
962 If node2 is None, compare node1 with working directory.
963 """
963 """
964
964
965 def mfmatches(ctx):
965 def mfmatches(ctx):
966 mf = ctx.manifest().copy()
966 mf = ctx.manifest().copy()
967 for fn in mf.keys():
967 for fn in mf.keys():
968 if not match(fn):
968 if not match(fn):
969 del mf[fn]
969 del mf[fn]
970 return mf
970 return mf
971
971
972 if isinstance(node1, context.changectx):
972 if isinstance(node1, context.changectx):
973 ctx1 = node1
973 ctx1 = node1
974 else:
974 else:
975 ctx1 = self[node1]
975 ctx1 = self[node1]
976 if isinstance(node2, context.changectx):
976 if isinstance(node2, context.changectx):
977 ctx2 = node2
977 ctx2 = node2
978 else:
978 else:
979 ctx2 = self[node2]
979 ctx2 = self[node2]
980
980
981 working = ctx2 == self[None]
981 working = ctx2 == self[None]
982 parentworking = working and ctx1 == self['.']
982 parentworking = working and ctx1 == self['.']
983 match = match or match_.always(self.root, self.getcwd())
983 match = match or match_.always(self.root, self.getcwd())
984 listignored, listclean, listunknown = ignored, clean, unknown
984 listignored, listclean, listunknown = ignored, clean, unknown
985
985
986 # load earliest manifest first for caching reasons
986 # load earliest manifest first for caching reasons
987 if not working and ctx2.rev() < ctx1.rev():
987 if not working and ctx2.rev() < ctx1.rev():
988 ctx2.manifest()
988 ctx2.manifest()
989
989
990 if not parentworking:
990 if not parentworking:
991 def bad(f, msg):
991 def bad(f, msg):
992 if f not in ctx1:
992 if f not in ctx1:
993 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
993 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
994 return False
994 return False
995 match.bad = bad
995 match.bad = bad
996
996
997 if working: # we need to scan the working dir
997 if working: # we need to scan the working dir
998 s = self.dirstate.status(match, listignored, listclean, listunknown)
998 s = self.dirstate.status(match, listignored, listclean, listunknown)
999 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
999 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1000
1000
1001 # check for any possibly clean files
1001 # check for any possibly clean files
1002 if parentworking and cmp:
1002 if parentworking and cmp:
1003 fixup = []
1003 fixup = []
1004 # do a full compare of any files that might have changed
1004 # do a full compare of any files that might have changed
1005 for f in cmp:
1005 for f in cmp:
1006 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1006 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1007 or ctx1[f].cmp(ctx2[f].data())):
1007 or ctx1[f].cmp(ctx2[f].data())):
1008 modified.append(f)
1008 modified.append(f)
1009 else:
1009 else:
1010 fixup.append(f)
1010 fixup.append(f)
1011
1011
1012 if listclean:
1012 if listclean:
1013 clean += fixup
1013 clean += fixup
1014
1014
1015 # update dirstate for files that are actually clean
1015 # update dirstate for files that are actually clean
1016 if fixup:
1016 if fixup:
1017 wlock = None
1017 wlock = None
1018 try:
1018 try:
1019 try:
1019 try:
1020 wlock = self.wlock(False)
1020 wlock = self.wlock(False)
1021 for f in fixup:
1021 for f in fixup:
1022 self.dirstate.normal(f)
1022 self.dirstate.normal(f)
1023 except lock.LockException:
1023 except lock.LockException:
1024 pass
1024 pass
1025 finally:
1025 finally:
1026 del wlock
1026 del wlock
1027
1027
1028 if not parentworking:
1028 if not parentworking:
1029 mf1 = mfmatches(ctx1)
1029 mf1 = mfmatches(ctx1)
1030 if working:
1030 if working:
1031 # we are comparing working dir against non-parent
1031 # we are comparing working dir against non-parent
1032 # generate a pseudo-manifest for the working dir
1032 # generate a pseudo-manifest for the working dir
1033 mf2 = mfmatches(self['.'])
1033 mf2 = mfmatches(self['.'])
1034 for f in cmp + modified + added:
1034 for f in cmp + modified + added:
1035 mf2[f] = None
1035 mf2[f] = None
1036 mf2.set(f, ctx2.flags(f))
1036 mf2.set(f, ctx2.flags(f))
1037 for f in removed:
1037 for f in removed:
1038 if f in mf2:
1038 if f in mf2:
1039 del mf2[f]
1039 del mf2[f]
1040 else:
1040 else:
1041 # we are comparing two revisions
1041 # we are comparing two revisions
1042 deleted, unknown, ignored = [], [], []
1042 deleted, unknown, ignored = [], [], []
1043 mf2 = mfmatches(ctx2)
1043 mf2 = mfmatches(ctx2)
1044
1044
1045 modified, added, clean = [], [], []
1045 modified, added, clean = [], [], []
1046 for fn in mf2:
1046 for fn in mf2:
1047 if fn in mf1:
1047 if fn in mf1:
1048 if (mf1.flags(fn) != mf2.flags(fn) or
1048 if (mf1.flags(fn) != mf2.flags(fn) or
1049 (mf1[fn] != mf2[fn] and
1049 (mf1[fn] != mf2[fn] and
1050 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1050 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1051 modified.append(fn)
1051 modified.append(fn)
1052 elif listclean:
1052 elif listclean:
1053 clean.append(fn)
1053 clean.append(fn)
1054 del mf1[fn]
1054 del mf1[fn]
1055 else:
1055 else:
1056 added.append(fn)
1056 added.append(fn)
1057 removed = mf1.keys()
1057 removed = mf1.keys()
1058
1058
1059 r = modified, added, removed, deleted, unknown, ignored, clean
1059 r = modified, added, removed, deleted, unknown, ignored, clean
1060 [l.sort() for l in r]
1060 [l.sort() for l in r]
1061 return r
1061 return r
1062
1062
1063 def add(self, list):
1063 def add(self, list):
1064 wlock = self.wlock()
1064 wlock = self.wlock()
1065 try:
1065 try:
1066 rejected = []
1066 rejected = []
1067 for f in list:
1067 for f in list:
1068 p = self.wjoin(f)
1068 p = self.wjoin(f)
1069 try:
1069 try:
1070 st = os.lstat(p)
1070 st = os.lstat(p)
1071 except:
1071 except:
1072 self.ui.warn(_("%s does not exist!\n") % f)
1072 self.ui.warn(_("%s does not exist!\n") % f)
1073 rejected.append(f)
1073 rejected.append(f)
1074 continue
1074 continue
1075 if st.st_size > 10000000:
1075 if st.st_size > 10000000:
1076 self.ui.warn(_("%s: files over 10MB may cause memory and"
1076 self.ui.warn(_("%s: files over 10MB may cause memory and"
1077 " performance problems\n"
1077 " performance problems\n"
1078 "(use 'hg revert %s' to unadd the file)\n")
1078 "(use 'hg revert %s' to unadd the file)\n")
1079 % (f, f))
1079 % (f, f))
1080 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1080 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1081 self.ui.warn(_("%s not added: only files and symlinks "
1081 self.ui.warn(_("%s not added: only files and symlinks "
1082 "supported currently\n") % f)
1082 "supported currently\n") % f)
1083 rejected.append(p)
1083 rejected.append(p)
1084 elif self.dirstate[f] in 'amn':
1084 elif self.dirstate[f] in 'amn':
1085 self.ui.warn(_("%s already tracked!\n") % f)
1085 self.ui.warn(_("%s already tracked!\n") % f)
1086 elif self.dirstate[f] == 'r':
1086 elif self.dirstate[f] == 'r':
1087 self.dirstate.normallookup(f)
1087 self.dirstate.normallookup(f)
1088 else:
1088 else:
1089 self.dirstate.add(f)
1089 self.dirstate.add(f)
1090 return rejected
1090 return rejected
1091 finally:
1091 finally:
1092 del wlock
1092 del wlock
1093
1093
1094 def forget(self, list):
1094 def forget(self, list):
1095 wlock = self.wlock()
1095 wlock = self.wlock()
1096 try:
1096 try:
1097 for f in list:
1097 for f in list:
1098 if self.dirstate[f] != 'a':
1098 if self.dirstate[f] != 'a':
1099 self.ui.warn(_("%s not added!\n") % f)
1099 self.ui.warn(_("%s not added!\n") % f)
1100 else:
1100 else:
1101 self.dirstate.forget(f)
1101 self.dirstate.forget(f)
1102 finally:
1102 finally:
1103 del wlock
1103 del wlock
1104
1104
1105 def remove(self, list, unlink=False):
1105 def remove(self, list, unlink=False):
1106 wlock = None
1106 wlock = None
1107 try:
1107 try:
1108 if unlink:
1108 if unlink:
1109 for f in list:
1109 for f in list:
1110 try:
1110 try:
1111 util.unlink(self.wjoin(f))
1111 util.unlink(self.wjoin(f))
1112 except OSError, inst:
1112 except OSError, inst:
1113 if inst.errno != errno.ENOENT:
1113 if inst.errno != errno.ENOENT:
1114 raise
1114 raise
1115 wlock = self.wlock()
1115 wlock = self.wlock()
1116 for f in list:
1116 for f in list:
1117 if unlink and os.path.exists(self.wjoin(f)):
1117 if unlink and os.path.exists(self.wjoin(f)):
1118 self.ui.warn(_("%s still exists!\n") % f)
1118 self.ui.warn(_("%s still exists!\n") % f)
1119 elif self.dirstate[f] == 'a':
1119 elif self.dirstate[f] == 'a':
1120 self.dirstate.forget(f)
1120 self.dirstate.forget(f)
1121 elif f not in self.dirstate:
1121 elif f not in self.dirstate:
1122 self.ui.warn(_("%s not tracked!\n") % f)
1122 self.ui.warn(_("%s not tracked!\n") % f)
1123 else:
1123 else:
1124 self.dirstate.remove(f)
1124 self.dirstate.remove(f)
1125 finally:
1125 finally:
1126 del wlock
1126 del wlock
1127
1127
1128 def undelete(self, list):
1128 def undelete(self, list):
1129 wlock = None
1129 wlock = None
1130 try:
1130 try:
1131 manifests = [self.manifest.read(self.changelog.read(p)[0])
1131 manifests = [self.manifest.read(self.changelog.read(p)[0])
1132 for p in self.dirstate.parents() if p != nullid]
1132 for p in self.dirstate.parents() if p != nullid]
1133 wlock = self.wlock()
1133 wlock = self.wlock()
1134 for f in list:
1134 for f in list:
1135 if self.dirstate[f] != 'r':
1135 if self.dirstate[f] != 'r':
1136 self.ui.warn(_("%s not removed!\n") % f)
1136 self.ui.warn(_("%s not removed!\n") % f)
1137 else:
1137 else:
1138 m = f in manifests[0] and manifests[0] or manifests[1]
1138 m = f in manifests[0] and manifests[0] or manifests[1]
1139 t = self.file(f).read(m[f])
1139 t = self.file(f).read(m[f])
1140 self.wwrite(f, t, m.flags(f))
1140 self.wwrite(f, t, m.flags(f))
1141 self.dirstate.normal(f)
1141 self.dirstate.normal(f)
1142 finally:
1142 finally:
1143 del wlock
1143 del wlock
1144
1144
1145 def copy(self, source, dest):
1145 def copy(self, source, dest):
1146 wlock = None
1146 wlock = None
1147 try:
1147 try:
1148 p = self.wjoin(dest)
1148 p = self.wjoin(dest)
1149 if not (os.path.exists(p) or os.path.islink(p)):
1149 if not (os.path.exists(p) or os.path.islink(p)):
1150 self.ui.warn(_("%s does not exist!\n") % dest)
1150 self.ui.warn(_("%s does not exist!\n") % dest)
1151 elif not (os.path.isfile(p) or os.path.islink(p)):
1151 elif not (os.path.isfile(p) or os.path.islink(p)):
1152 self.ui.warn(_("copy failed: %s is not a file or a "
1152 self.ui.warn(_("copy failed: %s is not a file or a "
1153 "symbolic link\n") % dest)
1153 "symbolic link\n") % dest)
1154 else:
1154 else:
1155 wlock = self.wlock()
1155 wlock = self.wlock()
1156 if self.dirstate[dest] in '?r':
1156 if self.dirstate[dest] in '?r':
1157 self.dirstate.add(dest)
1157 self.dirstate.add(dest)
1158 self.dirstate.copy(source, dest)
1158 self.dirstate.copy(source, dest)
1159 finally:
1159 finally:
1160 del wlock
1160 del wlock
1161
1161
1162 def heads(self, start=None):
1162 def heads(self, start=None):
1163 heads = self.changelog.heads(start)
1163 heads = self.changelog.heads(start)
1164 # sort the output in rev descending order
1164 # sort the output in rev descending order
1165 heads = [(-self.changelog.rev(h), h) for h in heads]
1165 heads = [(-self.changelog.rev(h), h) for h in heads]
1166 return [n for (r, n) in util.sort(heads)]
1166 return [n for (r, n) in util.sort(heads)]
1167
1167
1168 def branchheads(self, branch=None, start=None):
1168 def branchheads(self, branch=None, start=None):
1169 if branch is None:
1169 if branch is None:
1170 branch = self[None].branch()
1170 branch = self[None].branch()
1171 branches = self.branchtags()
1171 branches = self.branchtags()
1172 if branch not in branches:
1172 if branch not in branches:
1173 return []
1173 return []
1174 # The basic algorithm is this:
1174 # The basic algorithm is this:
1175 #
1175 #
1176 # Start from the branch tip since there are no later revisions that can
1176 # Start from the branch tip since there are no later revisions that can
1177 # possibly be in this branch, and the tip is a guaranteed head.
1177 # possibly be in this branch, and the tip is a guaranteed head.
1178 #
1178 #
1179 # Remember the tip's parents as the first ancestors, since these by
1179 # Remember the tip's parents as the first ancestors, since these by
1180 # definition are not heads.
1180 # definition are not heads.
1181 #
1181 #
1182 # Step backwards from the brach tip through all the revisions. We are
1182 # Step backwards from the brach tip through all the revisions. We are
1183 # guaranteed by the rules of Mercurial that we will now be visiting the
1183 # guaranteed by the rules of Mercurial that we will now be visiting the
1184 # nodes in reverse topological order (children before parents).
1184 # nodes in reverse topological order (children before parents).
1185 #
1185 #
1186 # If a revision is one of the ancestors of a head then we can toss it
1186 # If a revision is one of the ancestors of a head then we can toss it
1187 # out of the ancestors set (we've already found it and won't be
1187 # out of the ancestors set (we've already found it and won't be
1188 # visiting it again) and put its parents in the ancestors set.
1188 # visiting it again) and put its parents in the ancestors set.
1189 #
1189 #
1190 # Otherwise, if a revision is in the branch it's another head, since it
1190 # Otherwise, if a revision is in the branch it's another head, since it
1191 # wasn't in the ancestor list of an existing head. So add it to the
1191 # wasn't in the ancestor list of an existing head. So add it to the
1192 # head list, and add its parents to the ancestor list.
1192 # head list, and add its parents to the ancestor list.
1193 #
1193 #
1194 # If it is not in the branch ignore it.
1194 # If it is not in the branch ignore it.
1195 #
1195 #
1196 # Once we have a list of heads, use nodesbetween to filter out all the
1196 # Once we have a list of heads, use nodesbetween to filter out all the
1197 # heads that cannot be reached from startrev. There may be a more
1197 # heads that cannot be reached from startrev. There may be a more
1198 # efficient way to do this as part of the previous algorithm.
1198 # efficient way to do this as part of the previous algorithm.
1199
1199
1200 set = util.set
1200 set = util.set
1201 heads = [self.changelog.rev(branches[branch])]
1201 heads = [self.changelog.rev(branches[branch])]
1202 # Don't care if ancestors contains nullrev or not.
1202 # Don't care if ancestors contains nullrev or not.
1203 ancestors = set(self.changelog.parentrevs(heads[0]))
1203 ancestors = set(self.changelog.parentrevs(heads[0]))
1204 for rev in xrange(heads[0] - 1, nullrev, -1):
1204 for rev in xrange(heads[0] - 1, nullrev, -1):
1205 if rev in ancestors:
1205 if rev in ancestors:
1206 ancestors.update(self.changelog.parentrevs(rev))
1206 ancestors.update(self.changelog.parentrevs(rev))
1207 ancestors.remove(rev)
1207 ancestors.remove(rev)
1208 elif self[rev].branch() == branch:
1208 elif self[rev].branch() == branch:
1209 heads.append(rev)
1209 heads.append(rev)
1210 ancestors.update(self.changelog.parentrevs(rev))
1210 ancestors.update(self.changelog.parentrevs(rev))
1211 heads = [self.changelog.node(rev) for rev in heads]
1211 heads = [self.changelog.node(rev) for rev in heads]
1212 if start is not None:
1212 if start is not None:
1213 heads = self.changelog.nodesbetween([start], heads)[2]
1213 heads = self.changelog.nodesbetween([start], heads)[2]
1214 return heads
1214 return heads
1215
1215
1216 def branches(self, nodes):
1216 def branches(self, nodes):
1217 if not nodes:
1217 if not nodes:
1218 nodes = [self.changelog.tip()]
1218 nodes = [self.changelog.tip()]
1219 b = []
1219 b = []
1220 for n in nodes:
1220 for n in nodes:
1221 t = n
1221 t = n
1222 while 1:
1222 while 1:
1223 p = self.changelog.parents(n)
1223 p = self.changelog.parents(n)
1224 if p[1] != nullid or p[0] == nullid:
1224 if p[1] != nullid or p[0] == nullid:
1225 b.append((t, n, p[0], p[1]))
1225 b.append((t, n, p[0], p[1]))
1226 break
1226 break
1227 n = p[0]
1227 n = p[0]
1228 return b
1228 return b
1229
1229
1230 def between(self, pairs):
1230 def between(self, pairs):
1231 r = []
1231 r = []
1232
1232
1233 for top, bottom in pairs:
1233 for top, bottom in pairs:
1234 n, l, i = top, [], 0
1234 n, l, i = top, [], 0
1235 f = 1
1235 f = 1
1236
1236
1237 while n != bottom:
1237 while n != bottom:
1238 p = self.changelog.parents(n)[0]
1238 p = self.changelog.parents(n)[0]
1239 if i == f:
1239 if i == f:
1240 l.append(n)
1240 l.append(n)
1241 f = f * 2
1241 f = f * 2
1242 n = p
1242 n = p
1243 i += 1
1243 i += 1
1244
1244
1245 r.append(l)
1245 r.append(l)
1246
1246
1247 return r
1247 return r
1248
1248
1249 def findincoming(self, remote, base=None, heads=None, force=False):
1249 def findincoming(self, remote, base=None, heads=None, force=False):
1250 """Return list of roots of the subsets of missing nodes from remote
1250 """Return list of roots of the subsets of missing nodes from remote
1251
1251
1252 If base dict is specified, assume that these nodes and their parents
1252 If base dict is specified, assume that these nodes and their parents
1253 exist on the remote side and that no child of a node of base exists
1253 exist on the remote side and that no child of a node of base exists
1254 in both remote and self.
1254 in both remote and self.
1255 Furthermore base will be updated to include the nodes that exists
1255 Furthermore base will be updated to include the nodes that exists
1256 in self and remote but no children exists in self and remote.
1256 in self and remote but no children exists in self and remote.
1257 If a list of heads is specified, return only nodes which are heads
1257 If a list of heads is specified, return only nodes which are heads
1258 or ancestors of these heads.
1258 or ancestors of these heads.
1259
1259
1260 All the ancestors of base are in self and in remote.
1260 All the ancestors of base are in self and in remote.
1261 All the descendants of the list returned are missing in self.
1261 All the descendants of the list returned are missing in self.
1262 (and so we know that the rest of the nodes are missing in remote, see
1262 (and so we know that the rest of the nodes are missing in remote, see
1263 outgoing)
1263 outgoing)
1264 """
1264 """
1265 m = self.changelog.nodemap
1265 m = self.changelog.nodemap
1266 search = []
1266 search = []
1267 fetch = {}
1267 fetch = {}
1268 seen = {}
1268 seen = {}
1269 seenbranch = {}
1269 seenbranch = {}
1270 if base == None:
1270 if base == None:
1271 base = {}
1271 base = {}
1272
1272
1273 if not heads:
1273 if not heads:
1274 heads = remote.heads()
1274 heads = remote.heads()
1275
1275
1276 if self.changelog.tip() == nullid:
1276 if self.changelog.tip() == nullid:
1277 base[nullid] = 1
1277 base[nullid] = 1
1278 if heads != [nullid]:
1278 if heads != [nullid]:
1279 return [nullid]
1279 return [nullid]
1280 return []
1280 return []
1281
1281
1282 # assume we're closer to the tip than the root
1282 # assume we're closer to the tip than the root
1283 # and start by examining the heads
1283 # and start by examining the heads
1284 self.ui.status(_("searching for changes\n"))
1284 self.ui.status(_("searching for changes\n"))
1285
1285
1286 unknown = []
1286 unknown = []
1287 for h in heads:
1287 for h in heads:
1288 if h not in m:
1288 if h not in m:
1289 unknown.append(h)
1289 unknown.append(h)
1290 else:
1290 else:
1291 base[h] = 1
1291 base[h] = 1
1292
1292
1293 if not unknown:
1293 if not unknown:
1294 return []
1294 return []
1295
1295
1296 req = dict.fromkeys(unknown)
1296 req = dict.fromkeys(unknown)
1297 reqcnt = 0
1297 reqcnt = 0
1298
1298
1299 # search through remote branches
1299 # search through remote branches
1300 # a 'branch' here is a linear segment of history, with four parts:
1300 # a 'branch' here is a linear segment of history, with four parts:
1301 # head, root, first parent, second parent
1301 # head, root, first parent, second parent
1302 # (a branch always has two parents (or none) by definition)
1302 # (a branch always has two parents (or none) by definition)
1303 unknown = remote.branches(unknown)
1303 unknown = remote.branches(unknown)
1304 while unknown:
1304 while unknown:
1305 r = []
1305 r = []
1306 while unknown:
1306 while unknown:
1307 n = unknown.pop(0)
1307 n = unknown.pop(0)
1308 if n[0] in seen:
1308 if n[0] in seen:
1309 continue
1309 continue
1310
1310
1311 self.ui.debug(_("examining %s:%s\n")
1311 self.ui.debug(_("examining %s:%s\n")
1312 % (short(n[0]), short(n[1])))
1312 % (short(n[0]), short(n[1])))
1313 if n[0] == nullid: # found the end of the branch
1313 if n[0] == nullid: # found the end of the branch
1314 pass
1314 pass
1315 elif n in seenbranch:
1315 elif n in seenbranch:
1316 self.ui.debug(_("branch already found\n"))
1316 self.ui.debug(_("branch already found\n"))
1317 continue
1317 continue
1318 elif n[1] and n[1] in m: # do we know the base?
1318 elif n[1] and n[1] in m: # do we know the base?
1319 self.ui.debug(_("found incomplete branch %s:%s\n")
1319 self.ui.debug(_("found incomplete branch %s:%s\n")
1320 % (short(n[0]), short(n[1])))
1320 % (short(n[0]), short(n[1])))
1321 search.append(n[0:2]) # schedule branch range for scanning
1321 search.append(n[0:2]) # schedule branch range for scanning
1322 seenbranch[n] = 1
1322 seenbranch[n] = 1
1323 else:
1323 else:
1324 if n[1] not in seen and n[1] not in fetch:
1324 if n[1] not in seen and n[1] not in fetch:
1325 if n[2] in m and n[3] in m:
1325 if n[2] in m and n[3] in m:
1326 self.ui.debug(_("found new changeset %s\n") %
1326 self.ui.debug(_("found new changeset %s\n") %
1327 short(n[1]))
1327 short(n[1]))
1328 fetch[n[1]] = 1 # earliest unknown
1328 fetch[n[1]] = 1 # earliest unknown
1329 for p in n[2:4]:
1329 for p in n[2:4]:
1330 if p in m:
1330 if p in m:
1331 base[p] = 1 # latest known
1331 base[p] = 1 # latest known
1332
1332
1333 for p in n[2:4]:
1333 for p in n[2:4]:
1334 if p not in req and p not in m:
1334 if p not in req and p not in m:
1335 r.append(p)
1335 r.append(p)
1336 req[p] = 1
1336 req[p] = 1
1337 seen[n[0]] = 1
1337 seen[n[0]] = 1
1338
1338
1339 if r:
1339 if r:
1340 reqcnt += 1
1340 reqcnt += 1
1341 self.ui.debug(_("request %d: %s\n") %
1341 self.ui.debug(_("request %d: %s\n") %
1342 (reqcnt, " ".join(map(short, r))))
1342 (reqcnt, " ".join(map(short, r))))
1343 for p in xrange(0, len(r), 10):
1343 for p in xrange(0, len(r), 10):
1344 for b in remote.branches(r[p:p+10]):
1344 for b in remote.branches(r[p:p+10]):
1345 self.ui.debug(_("received %s:%s\n") %
1345 self.ui.debug(_("received %s:%s\n") %
1346 (short(b[0]), short(b[1])))
1346 (short(b[0]), short(b[1])))
1347 unknown.append(b)
1347 unknown.append(b)
1348
1348
1349 # do binary search on the branches we found
1349 # do binary search on the branches we found
1350 while search:
1350 while search:
1351 newsearch = []
1351 newsearch = []
1352 reqcnt += 1
1352 reqcnt += 1
1353 for n, l in zip(search, remote.between(search)):
1353 for n, l in zip(search, remote.between(search)):
1354 l.append(n[1])
1354 l.append(n[1])
1355 p = n[0]
1355 p = n[0]
1356 f = 1
1356 f = 1
1357 for i in l:
1357 for i in l:
1358 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1358 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1359 if i in m:
1359 if i in m:
1360 if f <= 2:
1360 if f <= 2:
1361 self.ui.debug(_("found new branch changeset %s\n") %
1361 self.ui.debug(_("found new branch changeset %s\n") %
1362 short(p))
1362 short(p))
1363 fetch[p] = 1
1363 fetch[p] = 1
1364 base[i] = 1
1364 base[i] = 1
1365 else:
1365 else:
1366 self.ui.debug(_("narrowed branch search to %s:%s\n")
1366 self.ui.debug(_("narrowed branch search to %s:%s\n")
1367 % (short(p), short(i)))
1367 % (short(p), short(i)))
1368 newsearch.append((p, i))
1368 newsearch.append((p, i))
1369 break
1369 break
1370 p, f = i, f * 2
1370 p, f = i, f * 2
1371 search = newsearch
1371 search = newsearch
1372
1372
1373 # sanity check our fetch list
1373 # sanity check our fetch list
1374 for f in fetch.keys():
1374 for f in fetch.keys():
1375 if f in m:
1375 if f in m:
1376 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1376 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1377
1377
1378 if base.keys() == [nullid]:
1378 if base.keys() == [nullid]:
1379 if force:
1379 if force:
1380 self.ui.warn(_("warning: repository is unrelated\n"))
1380 self.ui.warn(_("warning: repository is unrelated\n"))
1381 else:
1381 else:
1382 raise util.Abort(_("repository is unrelated"))
1382 raise util.Abort(_("repository is unrelated"))
1383
1383
1384 self.ui.debug(_("found new changesets starting at ") +
1384 self.ui.debug(_("found new changesets starting at ") +
1385 " ".join([short(f) for f in fetch]) + "\n")
1385 " ".join([short(f) for f in fetch]) + "\n")
1386
1386
1387 self.ui.debug(_("%d total queries\n") % reqcnt)
1387 self.ui.debug(_("%d total queries\n") % reqcnt)
1388
1388
1389 return fetch.keys()
1389 return fetch.keys()
1390
1390
1391 def findoutgoing(self, remote, base=None, heads=None, force=False):
1391 def findoutgoing(self, remote, base=None, heads=None, force=False):
1392 """Return list of nodes that are roots of subsets not in remote
1392 """Return list of nodes that are roots of subsets not in remote
1393
1393
1394 If base dict is specified, assume that these nodes and their parents
1394 If base dict is specified, assume that these nodes and their parents
1395 exist on the remote side.
1395 exist on the remote side.
1396 If a list of heads is specified, return only nodes which are heads
1396 If a list of heads is specified, return only nodes which are heads
1397 or ancestors of these heads, and return a second element which
1397 or ancestors of these heads, and return a second element which
1398 contains all remote heads which get new children.
1398 contains all remote heads which get new children.
1399 """
1399 """
1400 if base == None:
1400 if base == None:
1401 base = {}
1401 base = {}
1402 self.findincoming(remote, base, heads, force=force)
1402 self.findincoming(remote, base, heads, force=force)
1403
1403
1404 self.ui.debug(_("common changesets up to ")
1404 self.ui.debug(_("common changesets up to ")
1405 + " ".join(map(short, base.keys())) + "\n")
1405 + " ".join(map(short, base.keys())) + "\n")
1406
1406
1407 remain = dict.fromkeys(self.changelog.nodemap)
1407 remain = dict.fromkeys(self.changelog.nodemap)
1408
1408
1409 # prune everything remote has from the tree
1409 # prune everything remote has from the tree
1410 del remain[nullid]
1410 del remain[nullid]
1411 remove = base.keys()
1411 remove = base.keys()
1412 while remove:
1412 while remove:
1413 n = remove.pop(0)
1413 n = remove.pop(0)
1414 if n in remain:
1414 if n in remain:
1415 del remain[n]
1415 del remain[n]
1416 for p in self.changelog.parents(n):
1416 for p in self.changelog.parents(n):
1417 remove.append(p)
1417 remove.append(p)
1418
1418
1419 # find every node whose parents have been pruned
1419 # find every node whose parents have been pruned
1420 subset = []
1420 subset = []
1421 # find every remote head that will get new children
1421 # find every remote head that will get new children
1422 updated_heads = {}
1422 updated_heads = {}
1423 for n in remain:
1423 for n in remain:
1424 p1, p2 = self.changelog.parents(n)
1424 p1, p2 = self.changelog.parents(n)
1425 if p1 not in remain and p2 not in remain:
1425 if p1 not in remain and p2 not in remain:
1426 subset.append(n)
1426 subset.append(n)
1427 if heads:
1427 if heads:
1428 if p1 in heads:
1428 if p1 in heads:
1429 updated_heads[p1] = True
1429 updated_heads[p1] = True
1430 if p2 in heads:
1430 if p2 in heads:
1431 updated_heads[p2] = True
1431 updated_heads[p2] = True
1432
1432
1433 # this is the set of all roots we have to push
1433 # this is the set of all roots we have to push
1434 if heads:
1434 if heads:
1435 return subset, updated_heads.keys()
1435 return subset, updated_heads.keys()
1436 else:
1436 else:
1437 return subset
1437 return subset
1438
1438
1439 def pull(self, remote, heads=None, force=False):
1439 def pull(self, remote, heads=None, force=False):
1440 lock = self.lock()
1440 lock = self.lock()
1441 try:
1441 try:
1442 fetch = self.findincoming(remote, heads=heads, force=force)
1442 fetch = self.findincoming(remote, heads=heads, force=force)
1443 if fetch == [nullid]:
1443 if fetch == [nullid]:
1444 self.ui.status(_("requesting all changes\n"))
1444 self.ui.status(_("requesting all changes\n"))
1445
1445
1446 if not fetch:
1446 if not fetch:
1447 self.ui.status(_("no changes found\n"))
1447 self.ui.status(_("no changes found\n"))
1448 return 0
1448 return 0
1449
1449
1450 if heads is None:
1450 if heads is None:
1451 cg = remote.changegroup(fetch, 'pull')
1451 cg = remote.changegroup(fetch, 'pull')
1452 else:
1452 else:
1453 if 'changegroupsubset' not in remote.capabilities:
1453 if 'changegroupsubset' not in remote.capabilities:
1454 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1454 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1455 cg = remote.changegroupsubset(fetch, heads, 'pull')
1455 cg = remote.changegroupsubset(fetch, heads, 'pull')
1456 return self.addchangegroup(cg, 'pull', remote.url())
1456 return self.addchangegroup(cg, 'pull', remote.url())
1457 finally:
1457 finally:
1458 del lock
1458 del lock
1459
1459
1460 def push(self, remote, force=False, revs=None):
1460 def push(self, remote, force=False, revs=None):
1461 # there are two ways to push to remote repo:
1461 # there are two ways to push to remote repo:
1462 #
1462 #
1463 # addchangegroup assumes local user can lock remote
1463 # addchangegroup assumes local user can lock remote
1464 # repo (local filesystem, old ssh servers).
1464 # repo (local filesystem, old ssh servers).
1465 #
1465 #
1466 # unbundle assumes local user cannot lock remote repo (new ssh
1466 # unbundle assumes local user cannot lock remote repo (new ssh
1467 # servers, http servers).
1467 # servers, http servers).
1468
1468
1469 if remote.capable('unbundle'):
1469 if remote.capable('unbundle'):
1470 return self.push_unbundle(remote, force, revs)
1470 return self.push_unbundle(remote, force, revs)
1471 return self.push_addchangegroup(remote, force, revs)
1471 return self.push_addchangegroup(remote, force, revs)
1472
1472
1473 def prepush(self, remote, force, revs):
1473 def prepush(self, remote, force, revs):
1474 base = {}
1474 base = {}
1475 remote_heads = remote.heads()
1475 remote_heads = remote.heads()
1476 inc = self.findincoming(remote, base, remote_heads, force=force)
1476 inc = self.findincoming(remote, base, remote_heads, force=force)
1477
1477
1478 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1478 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1479 if revs is not None:
1479 if revs is not None:
1480 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1480 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1481 else:
1481 else:
1482 bases, heads = update, self.changelog.heads()
1482 bases, heads = update, self.changelog.heads()
1483
1483
1484 if not bases:
1484 if not bases:
1485 self.ui.status(_("no changes found\n"))
1485 self.ui.status(_("no changes found\n"))
1486 return None, 1
1486 return None, 1
1487 elif not force:
1487 elif not force:
1488 # check if we're creating new remote heads
1488 # check if we're creating new remote heads
1489 # to be a remote head after push, node must be either
1489 # to be a remote head after push, node must be either
1490 # - unknown locally
1490 # - unknown locally
1491 # - a local outgoing head descended from update
1491 # - a local outgoing head descended from update
1492 # - a remote head that's known locally and not
1492 # - a remote head that's known locally and not
1493 # ancestral to an outgoing head
1493 # ancestral to an outgoing head
1494
1494
1495 warn = 0
1495 warn = 0
1496
1496
1497 if remote_heads == [nullid]:
1497 if remote_heads == [nullid]:
1498 warn = 0
1498 warn = 0
1499 elif not revs and len(heads) > len(remote_heads):
1499 elif not revs and len(heads) > len(remote_heads):
1500 warn = 1
1500 warn = 1
1501 else:
1501 else:
1502 newheads = list(heads)
1502 newheads = list(heads)
1503 for r in remote_heads:
1503 for r in remote_heads:
1504 if r in self.changelog.nodemap:
1504 if r in self.changelog.nodemap:
1505 desc = self.changelog.heads(r, heads)
1505 desc = self.changelog.heads(r, heads)
1506 l = [h for h in heads if h in desc]
1506 l = [h for h in heads if h in desc]
1507 if not l:
1507 if not l:
1508 newheads.append(r)
1508 newheads.append(r)
1509 else:
1509 else:
1510 newheads.append(r)
1510 newheads.append(r)
1511 if len(newheads) > len(remote_heads):
1511 if len(newheads) > len(remote_heads):
1512 warn = 1
1512 warn = 1
1513
1513
1514 if warn:
1514 if warn:
1515 self.ui.warn(_("abort: push creates new remote heads!\n"))
1515 self.ui.warn(_("abort: push creates new remote heads!\n"))
1516 self.ui.status(_("(did you forget to merge?"
1516 self.ui.status(_("(did you forget to merge?"
1517 " use push -f to force)\n"))
1517 " use push -f to force)\n"))
1518 return None, 0
1518 return None, 0
1519 elif inc:
1519 elif inc:
1520 self.ui.warn(_("note: unsynced remote changes!\n"))
1520 self.ui.warn(_("note: unsynced remote changes!\n"))
1521
1521
1522
1522
1523 if revs is None:
1523 if revs is None:
1524 cg = self.changegroup(update, 'push')
1524 cg = self.changegroup(update, 'push')
1525 else:
1525 else:
1526 cg = self.changegroupsubset(update, revs, 'push')
1526 cg = self.changegroupsubset(update, revs, 'push')
1527 return cg, remote_heads
1527 return cg, remote_heads
1528
1528
1529 def push_addchangegroup(self, remote, force, revs):
1529 def push_addchangegroup(self, remote, force, revs):
1530 lock = remote.lock()
1530 lock = remote.lock()
1531 try:
1531 try:
1532 ret = self.prepush(remote, force, revs)
1532 ret = self.prepush(remote, force, revs)
1533 if ret[0] is not None:
1533 if ret[0] is not None:
1534 cg, remote_heads = ret
1534 cg, remote_heads = ret
1535 return remote.addchangegroup(cg, 'push', self.url())
1535 return remote.addchangegroup(cg, 'push', self.url())
1536 return ret[1]
1536 return ret[1]
1537 finally:
1537 finally:
1538 del lock
1538 del lock
1539
1539
1540 def push_unbundle(self, remote, force, revs):
1540 def push_unbundle(self, remote, force, revs):
1541 # local repo finds heads on server, finds out what revs it
1541 # local repo finds heads on server, finds out what revs it
1542 # must push. once revs transferred, if server finds it has
1542 # must push. once revs transferred, if server finds it has
1543 # different heads (someone else won commit/push race), server
1543 # different heads (someone else won commit/push race), server
1544 # aborts.
1544 # aborts.
1545
1545
1546 ret = self.prepush(remote, force, revs)
1546 ret = self.prepush(remote, force, revs)
1547 if ret[0] is not None:
1547 if ret[0] is not None:
1548 cg, remote_heads = ret
1548 cg, remote_heads = ret
1549 if force: remote_heads = ['force']
1549 if force: remote_heads = ['force']
1550 return remote.unbundle(cg, remote_heads, 'push')
1550 return remote.unbundle(cg, remote_heads, 'push')
1551 return ret[1]
1551 return ret[1]
1552
1552
1553 def changegroupinfo(self, nodes, source):
1553 def changegroupinfo(self, nodes, source):
1554 if self.ui.verbose or source == 'bundle':
1554 if self.ui.verbose or source == 'bundle':
1555 self.ui.status(_("%d changesets found\n") % len(nodes))
1555 self.ui.status(_("%d changesets found\n") % len(nodes))
1556 if self.ui.debugflag:
1556 if self.ui.debugflag:
1557 self.ui.debug(_("List of changesets:\n"))
1557 self.ui.debug(_("List of changesets:\n"))
1558 for node in nodes:
1558 for node in nodes:
1559 self.ui.debug("%s\n" % hex(node))
1559 self.ui.debug("%s\n" % hex(node))
1560
1560
1561 def changegroupsubset(self, bases, heads, source, extranodes=None):
1561 def changegroupsubset(self, bases, heads, source, extranodes=None):
1562 """This function generates a changegroup consisting of all the nodes
1562 """This function generates a changegroup consisting of all the nodes
1563 that are descendents of any of the bases, and ancestors of any of
1563 that are descendents of any of the bases, and ancestors of any of
1564 the heads.
1564 the heads.
1565
1565
1566 It is fairly complex as determining which filenodes and which
1566 It is fairly complex as determining which filenodes and which
1567 manifest nodes need to be included for the changeset to be complete
1567 manifest nodes need to be included for the changeset to be complete
1568 is non-trivial.
1568 is non-trivial.
1569
1569
1570 Another wrinkle is doing the reverse, figuring out which changeset in
1570 Another wrinkle is doing the reverse, figuring out which changeset in
1571 the changegroup a particular filenode or manifestnode belongs to.
1571 the changegroup a particular filenode or manifestnode belongs to.
1572
1572
1573 The caller can specify some nodes that must be included in the
1573 The caller can specify some nodes that must be included in the
1574 changegroup using the extranodes argument. It should be a dict
1574 changegroup using the extranodes argument. It should be a dict
1575 where the keys are the filenames (or 1 for the manifest), and the
1575 where the keys are the filenames (or 1 for the manifest), and the
1576 values are lists of (node, linknode) tuples, where node is a wanted
1576 values are lists of (node, linknode) tuples, where node is a wanted
1577 node and linknode is the changelog node that should be transmitted as
1577 node and linknode is the changelog node that should be transmitted as
1578 the linkrev.
1578 the linkrev.
1579 """
1579 """
1580
1580
1581 if extranodes is None:
1581 if extranodes is None:
1582 # can we go through the fast path ?
1582 # can we go through the fast path ?
1583 heads.sort()
1583 heads.sort()
1584 allheads = self.heads()
1584 allheads = self.heads()
1585 allheads.sort()
1585 allheads.sort()
1586 if heads == allheads:
1586 if heads == allheads:
1587 common = []
1587 common = []
1588 # parents of bases are known from both sides
1588 # parents of bases are known from both sides
1589 for n in bases:
1589 for n in bases:
1590 for p in self.changelog.parents(n):
1590 for p in self.changelog.parents(n):
1591 if p != nullid:
1591 if p != nullid:
1592 common.append(p)
1592 common.append(p)
1593 return self._changegroup(common, source)
1593 return self._changegroup(common, source)
1594
1594
1595 self.hook('preoutgoing', throw=True, source=source)
1595 self.hook('preoutgoing', throw=True, source=source)
1596
1596
1597 # Set up some initial variables
1597 # Set up some initial variables
1598 # Make it easy to refer to self.changelog
1598 # Make it easy to refer to self.changelog
1599 cl = self.changelog
1599 cl = self.changelog
1600 # msng is short for missing - compute the list of changesets in this
1600 # msng is short for missing - compute the list of changesets in this
1601 # changegroup.
1601 # changegroup.
1602 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1602 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1603 self.changegroupinfo(msng_cl_lst, source)
1603 self.changegroupinfo(msng_cl_lst, source)
1604 # Some bases may turn out to be superfluous, and some heads may be
1604 # Some bases may turn out to be superfluous, and some heads may be
1605 # too. nodesbetween will return the minimal set of bases and heads
1605 # too. nodesbetween will return the minimal set of bases and heads
1606 # necessary to re-create the changegroup.
1606 # necessary to re-create the changegroup.
1607
1607
1608 # Known heads are the list of heads that it is assumed the recipient
1608 # Known heads are the list of heads that it is assumed the recipient
1609 # of this changegroup will know about.
1609 # of this changegroup will know about.
1610 knownheads = {}
1610 knownheads = {}
1611 # We assume that all parents of bases are known heads.
1611 # We assume that all parents of bases are known heads.
1612 for n in bases:
1612 for n in bases:
1613 for p in cl.parents(n):
1613 for p in cl.parents(n):
1614 if p != nullid:
1614 if p != nullid:
1615 knownheads[p] = 1
1615 knownheads[p] = 1
1616 knownheads = knownheads.keys()
1616 knownheads = knownheads.keys()
1617 if knownheads:
1617 if knownheads:
1618 # Now that we know what heads are known, we can compute which
1618 # Now that we know what heads are known, we can compute which
1619 # changesets are known. The recipient must know about all
1619 # changesets are known. The recipient must know about all
1620 # changesets required to reach the known heads from the null
1620 # changesets required to reach the known heads from the null
1621 # changeset.
1621 # changeset.
1622 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1622 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1623 junk = None
1623 junk = None
1624 # Transform the list into an ersatz set.
1624 # Transform the list into an ersatz set.
1625 has_cl_set = dict.fromkeys(has_cl_set)
1625 has_cl_set = dict.fromkeys(has_cl_set)
1626 else:
1626 else:
1627 # If there were no known heads, the recipient cannot be assumed to
1627 # If there were no known heads, the recipient cannot be assumed to
1628 # know about any changesets.
1628 # know about any changesets.
1629 has_cl_set = {}
1629 has_cl_set = {}
1630
1630
1631 # Make it easy to refer to self.manifest
1631 # Make it easy to refer to self.manifest
1632 mnfst = self.manifest
1632 mnfst = self.manifest
1633 # We don't know which manifests are missing yet
1633 # We don't know which manifests are missing yet
1634 msng_mnfst_set = {}
1634 msng_mnfst_set = {}
1635 # Nor do we know which filenodes are missing.
1635 # Nor do we know which filenodes are missing.
1636 msng_filenode_set = {}
1636 msng_filenode_set = {}
1637
1637
1638 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1638 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1639 junk = None
1639 junk = None
1640
1640
1641 # A changeset always belongs to itself, so the changenode lookup
1641 # A changeset always belongs to itself, so the changenode lookup
1642 # function for a changenode is identity.
1642 # function for a changenode is identity.
1643 def identity(x):
1643 def identity(x):
1644 return x
1644 return x
1645
1645
1646 # A function generating function. Sets up an environment for the
1646 # A function generating function. Sets up an environment for the
1647 # inner function.
1647 # inner function.
1648 def cmp_by_rev_func(revlog):
1648 def cmp_by_rev_func(revlog):
1649 # Compare two nodes by their revision number in the environment's
1649 # Compare two nodes by their revision number in the environment's
1650 # revision history. Since the revision number both represents the
1650 # revision history. Since the revision number both represents the
1651 # most efficient order to read the nodes in, and represents a
1651 # most efficient order to read the nodes in, and represents a
1652 # topological sorting of the nodes, this function is often useful.
1652 # topological sorting of the nodes, this function is often useful.
1653 def cmp_by_rev(a, b):
1653 def cmp_by_rev(a, b):
1654 return cmp(revlog.rev(a), revlog.rev(b))
1654 return cmp(revlog.rev(a), revlog.rev(b))
1655 return cmp_by_rev
1655 return cmp_by_rev
1656
1656
1657 # If we determine that a particular file or manifest node must be a
1657 # If we determine that a particular file or manifest node must be a
1658 # node that the recipient of the changegroup will already have, we can
1658 # node that the recipient of the changegroup will already have, we can
1659 # also assume the recipient will have all the parents. This function
1659 # also assume the recipient will have all the parents. This function
1660 # prunes them from the set of missing nodes.
1660 # prunes them from the set of missing nodes.
1661 def prune_parents(revlog, hasset, msngset):
1661 def prune_parents(revlog, hasset, msngset):
1662 haslst = hasset.keys()
1662 haslst = hasset.keys()
1663 haslst.sort(cmp_by_rev_func(revlog))
1663 haslst.sort(cmp_by_rev_func(revlog))
1664 for node in haslst:
1664 for node in haslst:
1665 parentlst = [p for p in revlog.parents(node) if p != nullid]
1665 parentlst = [p for p in revlog.parents(node) if p != nullid]
1666 while parentlst:
1666 while parentlst:
1667 n = parentlst.pop()
1667 n = parentlst.pop()
1668 if n not in hasset:
1668 if n not in hasset:
1669 hasset[n] = 1
1669 hasset[n] = 1
1670 p = [p for p in revlog.parents(n) if p != nullid]
1670 p = [p for p in revlog.parents(n) if p != nullid]
1671 parentlst.extend(p)
1671 parentlst.extend(p)
1672 for n in hasset:
1672 for n in hasset:
1673 msngset.pop(n, None)
1673 msngset.pop(n, None)
1674
1674
1675 # This is a function generating function used to set up an environment
1675 # This is a function generating function used to set up an environment
1676 # for the inner function to execute in.
1676 # for the inner function to execute in.
1677 def manifest_and_file_collector(changedfileset):
1677 def manifest_and_file_collector(changedfileset):
1678 # This is an information gathering function that gathers
1678 # This is an information gathering function that gathers
1679 # information from each changeset node that goes out as part of
1679 # information from each changeset node that goes out as part of
1680 # the changegroup. The information gathered is a list of which
1680 # the changegroup. The information gathered is a list of which
1681 # manifest nodes are potentially required (the recipient may
1681 # manifest nodes are potentially required (the recipient may
1682 # already have them) and total list of all files which were
1682 # already have them) and total list of all files which were
1683 # changed in any changeset in the changegroup.
1683 # changed in any changeset in the changegroup.
1684 #
1684 #
1685 # We also remember the first changenode we saw any manifest
1685 # We also remember the first changenode we saw any manifest
1686 # referenced by so we can later determine which changenode 'owns'
1686 # referenced by so we can later determine which changenode 'owns'
1687 # the manifest.
1687 # the manifest.
1688 def collect_manifests_and_files(clnode):
1688 def collect_manifests_and_files(clnode):
1689 c = cl.read(clnode)
1689 c = cl.read(clnode)
1690 for f in c[3]:
1690 for f in c[3]:
1691 # This is to make sure we only have one instance of each
1691 # This is to make sure we only have one instance of each
1692 # filename string for each filename.
1692 # filename string for each filename.
1693 changedfileset.setdefault(f, f)
1693 changedfileset.setdefault(f, f)
1694 msng_mnfst_set.setdefault(c[0], clnode)
1694 msng_mnfst_set.setdefault(c[0], clnode)
1695 return collect_manifests_and_files
1695 return collect_manifests_and_files
1696
1696
1697 # Figure out which manifest nodes (of the ones we think might be part
1697 # Figure out which manifest nodes (of the ones we think might be part
1698 # of the changegroup) the recipient must know about and remove them
1698 # of the changegroup) the recipient must know about and remove them
1699 # from the changegroup.
1699 # from the changegroup.
1700 def prune_manifests():
1700 def prune_manifests():
1701 has_mnfst_set = {}
1701 has_mnfst_set = {}
1702 for n in msng_mnfst_set:
1702 for n in msng_mnfst_set:
1703 # If a 'missing' manifest thinks it belongs to a changenode
1703 # If a 'missing' manifest thinks it belongs to a changenode
1704 # the recipient is assumed to have, obviously the recipient
1704 # the recipient is assumed to have, obviously the recipient
1705 # must have that manifest.
1705 # must have that manifest.
1706 linknode = cl.node(mnfst.linkrev(n))
1706 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1707 if linknode in has_cl_set:
1707 if linknode in has_cl_set:
1708 has_mnfst_set[n] = 1
1708 has_mnfst_set[n] = 1
1709 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1709 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1710
1710
1711 # Use the information collected in collect_manifests_and_files to say
1711 # Use the information collected in collect_manifests_and_files to say
1712 # which changenode any manifestnode belongs to.
1712 # which changenode any manifestnode belongs to.
1713 def lookup_manifest_link(mnfstnode):
1713 def lookup_manifest_link(mnfstnode):
1714 return msng_mnfst_set[mnfstnode]
1714 return msng_mnfst_set[mnfstnode]
1715
1715
1716 # A function generating function that sets up the initial environment
1716 # A function generating function that sets up the initial environment
1717 # the inner function.
1717 # the inner function.
1718 def filenode_collector(changedfiles):
1718 def filenode_collector(changedfiles):
1719 next_rev = [0]
1719 next_rev = [0]
1720 # This gathers information from each manifestnode included in the
1720 # This gathers information from each manifestnode included in the
1721 # changegroup about which filenodes the manifest node references
1721 # changegroup about which filenodes the manifest node references
1722 # so we can include those in the changegroup too.
1722 # so we can include those in the changegroup too.
1723 #
1723 #
1724 # It also remembers which changenode each filenode belongs to. It
1724 # It also remembers which changenode each filenode belongs to. It
1725 # does this by assuming the a filenode belongs to the changenode
1725 # does this by assuming the a filenode belongs to the changenode
1726 # the first manifest that references it belongs to.
1726 # the first manifest that references it belongs to.
1727 def collect_msng_filenodes(mnfstnode):
1727 def collect_msng_filenodes(mnfstnode):
1728 r = mnfst.rev(mnfstnode)
1728 r = mnfst.rev(mnfstnode)
1729 if r == next_rev[0]:
1729 if r == next_rev[0]:
1730 # If the last rev we looked at was the one just previous,
1730 # If the last rev we looked at was the one just previous,
1731 # we only need to see a diff.
1731 # we only need to see a diff.
1732 deltamf = mnfst.readdelta(mnfstnode)
1732 deltamf = mnfst.readdelta(mnfstnode)
1733 # For each line in the delta
1733 # For each line in the delta
1734 for f, fnode in deltamf.items():
1734 for f, fnode in deltamf.items():
1735 f = changedfiles.get(f, None)
1735 f = changedfiles.get(f, None)
1736 # And if the file is in the list of files we care
1736 # And if the file is in the list of files we care
1737 # about.
1737 # about.
1738 if f is not None:
1738 if f is not None:
1739 # Get the changenode this manifest belongs to
1739 # Get the changenode this manifest belongs to
1740 clnode = msng_mnfst_set[mnfstnode]
1740 clnode = msng_mnfst_set[mnfstnode]
1741 # Create the set of filenodes for the file if
1741 # Create the set of filenodes for the file if
1742 # there isn't one already.
1742 # there isn't one already.
1743 ndset = msng_filenode_set.setdefault(f, {})
1743 ndset = msng_filenode_set.setdefault(f, {})
1744 # And set the filenode's changelog node to the
1744 # And set the filenode's changelog node to the
1745 # manifest's if it hasn't been set already.
1745 # manifest's if it hasn't been set already.
1746 ndset.setdefault(fnode, clnode)
1746 ndset.setdefault(fnode, clnode)
1747 else:
1747 else:
1748 # Otherwise we need a full manifest.
1748 # Otherwise we need a full manifest.
1749 m = mnfst.read(mnfstnode)
1749 m = mnfst.read(mnfstnode)
1750 # For every file in we care about.
1750 # For every file in we care about.
1751 for f in changedfiles:
1751 for f in changedfiles:
1752 fnode = m.get(f, None)
1752 fnode = m.get(f, None)
1753 # If it's in the manifest
1753 # If it's in the manifest
1754 if fnode is not None:
1754 if fnode is not None:
1755 # See comments above.
1755 # See comments above.
1756 clnode = msng_mnfst_set[mnfstnode]
1756 clnode = msng_mnfst_set[mnfstnode]
1757 ndset = msng_filenode_set.setdefault(f, {})
1757 ndset = msng_filenode_set.setdefault(f, {})
1758 ndset.setdefault(fnode, clnode)
1758 ndset.setdefault(fnode, clnode)
1759 # Remember the revision we hope to see next.
1759 # Remember the revision we hope to see next.
1760 next_rev[0] = r + 1
1760 next_rev[0] = r + 1
1761 return collect_msng_filenodes
1761 return collect_msng_filenodes
1762
1762
1763 # We have a list of filenodes we think we need for a file, lets remove
1763 # We have a list of filenodes we think we need for a file, lets remove
1764 # all those we now the recipient must have.
1764 # all those we now the recipient must have.
1765 def prune_filenodes(f, filerevlog):
1765 def prune_filenodes(f, filerevlog):
1766 msngset = msng_filenode_set[f]
1766 msngset = msng_filenode_set[f]
1767 hasset = {}
1767 hasset = {}
1768 # If a 'missing' filenode thinks it belongs to a changenode we
1768 # If a 'missing' filenode thinks it belongs to a changenode we
1769 # assume the recipient must have, then the recipient must have
1769 # assume the recipient must have, then the recipient must have
1770 # that filenode.
1770 # that filenode.
1771 for n in msngset:
1771 for n in msngset:
1772 clnode = cl.node(filerevlog.linkrev(n))
1772 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1773 if clnode in has_cl_set:
1773 if clnode in has_cl_set:
1774 hasset[n] = 1
1774 hasset[n] = 1
1775 prune_parents(filerevlog, hasset, msngset)
1775 prune_parents(filerevlog, hasset, msngset)
1776
1776
1777 # A function generator function that sets up the a context for the
1777 # A function generator function that sets up the a context for the
1778 # inner function.
1778 # inner function.
1779 def lookup_filenode_link_func(fname):
1779 def lookup_filenode_link_func(fname):
1780 msngset = msng_filenode_set[fname]
1780 msngset = msng_filenode_set[fname]
1781 # Lookup the changenode the filenode belongs to.
1781 # Lookup the changenode the filenode belongs to.
1782 def lookup_filenode_link(fnode):
1782 def lookup_filenode_link(fnode):
1783 return msngset[fnode]
1783 return msngset[fnode]
1784 return lookup_filenode_link
1784 return lookup_filenode_link
1785
1785
1786 # Add the nodes that were explicitly requested.
1786 # Add the nodes that were explicitly requested.
1787 def add_extra_nodes(name, nodes):
1787 def add_extra_nodes(name, nodes):
1788 if not extranodes or name not in extranodes:
1788 if not extranodes or name not in extranodes:
1789 return
1789 return
1790
1790
1791 for node, linknode in extranodes[name]:
1791 for node, linknode in extranodes[name]:
1792 if node not in nodes:
1792 if node not in nodes:
1793 nodes[node] = linknode
1793 nodes[node] = linknode
1794
1794
1795 # Now that we have all theses utility functions to help out and
1795 # Now that we have all theses utility functions to help out and
1796 # logically divide up the task, generate the group.
1796 # logically divide up the task, generate the group.
1797 def gengroup():
1797 def gengroup():
1798 # The set of changed files starts empty.
1798 # The set of changed files starts empty.
1799 changedfiles = {}
1799 changedfiles = {}
1800 # Create a changenode group generator that will call our functions
1800 # Create a changenode group generator that will call our functions
1801 # back to lookup the owning changenode and collect information.
1801 # back to lookup the owning changenode and collect information.
1802 group = cl.group(msng_cl_lst, identity,
1802 group = cl.group(msng_cl_lst, identity,
1803 manifest_and_file_collector(changedfiles))
1803 manifest_and_file_collector(changedfiles))
1804 for chnk in group:
1804 for chnk in group:
1805 yield chnk
1805 yield chnk
1806
1806
1807 # The list of manifests has been collected by the generator
1807 # The list of manifests has been collected by the generator
1808 # calling our functions back.
1808 # calling our functions back.
1809 prune_manifests()
1809 prune_manifests()
1810 add_extra_nodes(1, msng_mnfst_set)
1810 add_extra_nodes(1, msng_mnfst_set)
1811 msng_mnfst_lst = msng_mnfst_set.keys()
1811 msng_mnfst_lst = msng_mnfst_set.keys()
1812 # Sort the manifestnodes by revision number.
1812 # Sort the manifestnodes by revision number.
1813 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1813 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1814 # Create a generator for the manifestnodes that calls our lookup
1814 # Create a generator for the manifestnodes that calls our lookup
1815 # and data collection functions back.
1815 # and data collection functions back.
1816 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1816 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1817 filenode_collector(changedfiles))
1817 filenode_collector(changedfiles))
1818 for chnk in group:
1818 for chnk in group:
1819 yield chnk
1819 yield chnk
1820
1820
1821 # These are no longer needed, dereference and toss the memory for
1821 # These are no longer needed, dereference and toss the memory for
1822 # them.
1822 # them.
1823 msng_mnfst_lst = None
1823 msng_mnfst_lst = None
1824 msng_mnfst_set.clear()
1824 msng_mnfst_set.clear()
1825
1825
1826 if extranodes:
1826 if extranodes:
1827 for fname in extranodes:
1827 for fname in extranodes:
1828 if isinstance(fname, int):
1828 if isinstance(fname, int):
1829 continue
1829 continue
1830 msng_filenode_set.setdefault(fname, {})
1830 msng_filenode_set.setdefault(fname, {})
1831 changedfiles[fname] = 1
1831 changedfiles[fname] = 1
1832 # Go through all our files in order sorted by name.
1832 # Go through all our files in order sorted by name.
1833 for fname in util.sort(changedfiles):
1833 for fname in util.sort(changedfiles):
1834 filerevlog = self.file(fname)
1834 filerevlog = self.file(fname)
1835 if not len(filerevlog):
1835 if not len(filerevlog):
1836 raise util.Abort(_("empty or missing revlog for %s") % fname)
1836 raise util.Abort(_("empty or missing revlog for %s") % fname)
1837 # Toss out the filenodes that the recipient isn't really
1837 # Toss out the filenodes that the recipient isn't really
1838 # missing.
1838 # missing.
1839 if fname in msng_filenode_set:
1839 if fname in msng_filenode_set:
1840 prune_filenodes(fname, filerevlog)
1840 prune_filenodes(fname, filerevlog)
1841 add_extra_nodes(fname, msng_filenode_set[fname])
1841 add_extra_nodes(fname, msng_filenode_set[fname])
1842 msng_filenode_lst = msng_filenode_set[fname].keys()
1842 msng_filenode_lst = msng_filenode_set[fname].keys()
1843 else:
1843 else:
1844 msng_filenode_lst = []
1844 msng_filenode_lst = []
1845 # If any filenodes are left, generate the group for them,
1845 # If any filenodes are left, generate the group for them,
1846 # otherwise don't bother.
1846 # otherwise don't bother.
1847 if len(msng_filenode_lst) > 0:
1847 if len(msng_filenode_lst) > 0:
1848 yield changegroup.chunkheader(len(fname))
1848 yield changegroup.chunkheader(len(fname))
1849 yield fname
1849 yield fname
1850 # Sort the filenodes by their revision #
1850 # Sort the filenodes by their revision #
1851 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1851 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1852 # Create a group generator and only pass in a changenode
1852 # Create a group generator and only pass in a changenode
1853 # lookup function as we need to collect no information
1853 # lookup function as we need to collect no information
1854 # from filenodes.
1854 # from filenodes.
1855 group = filerevlog.group(msng_filenode_lst,
1855 group = filerevlog.group(msng_filenode_lst,
1856 lookup_filenode_link_func(fname))
1856 lookup_filenode_link_func(fname))
1857 for chnk in group:
1857 for chnk in group:
1858 yield chnk
1858 yield chnk
1859 if fname in msng_filenode_set:
1859 if fname in msng_filenode_set:
1860 # Don't need this anymore, toss it to free memory.
1860 # Don't need this anymore, toss it to free memory.
1861 del msng_filenode_set[fname]
1861 del msng_filenode_set[fname]
1862 # Signal that no more groups are left.
1862 # Signal that no more groups are left.
1863 yield changegroup.closechunk()
1863 yield changegroup.closechunk()
1864
1864
1865 if msng_cl_lst:
1865 if msng_cl_lst:
1866 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1866 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1867
1867
1868 return util.chunkbuffer(gengroup())
1868 return util.chunkbuffer(gengroup())
1869
1869
1870 def changegroup(self, basenodes, source):
1870 def changegroup(self, basenodes, source):
1871 # to avoid a race we use changegroupsubset() (issue1320)
1871 # to avoid a race we use changegroupsubset() (issue1320)
1872 return self.changegroupsubset(basenodes, self.heads(), source)
1872 return self.changegroupsubset(basenodes, self.heads(), source)
1873
1873
1874 def _changegroup(self, common, source):
1874 def _changegroup(self, common, source):
1875 """Generate a changegroup of all nodes that we have that a recipient
1875 """Generate a changegroup of all nodes that we have that a recipient
1876 doesn't.
1876 doesn't.
1877
1877
1878 This is much easier than the previous function as we can assume that
1878 This is much easier than the previous function as we can assume that
1879 the recipient has any changenode we aren't sending them.
1879 the recipient has any changenode we aren't sending them.
1880
1880
1881 common is the set of common nodes between remote and self"""
1881 common is the set of common nodes between remote and self"""
1882
1882
1883 self.hook('preoutgoing', throw=True, source=source)
1883 self.hook('preoutgoing', throw=True, source=source)
1884
1884
1885 cl = self.changelog
1885 cl = self.changelog
1886 nodes = cl.findmissing(common)
1886 nodes = cl.findmissing(common)
1887 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1887 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1888 self.changegroupinfo(nodes, source)
1888 self.changegroupinfo(nodes, source)
1889
1889
1890 def identity(x):
1890 def identity(x):
1891 return x
1891 return x
1892
1892
1893 def gennodelst(log):
1893 def gennodelst(log):
1894 for r in log:
1894 for r in log:
1895 n = log.node(r)
1895 if log.linkrev(r) in revset:
1896 if log.linkrev(n) in revset:
1896 yield log.node(r)
1897 yield n
1898
1897
1899 def changed_file_collector(changedfileset):
1898 def changed_file_collector(changedfileset):
1900 def collect_changed_files(clnode):
1899 def collect_changed_files(clnode):
1901 c = cl.read(clnode)
1900 c = cl.read(clnode)
1902 for fname in c[3]:
1901 for fname in c[3]:
1903 changedfileset[fname] = 1
1902 changedfileset[fname] = 1
1904 return collect_changed_files
1903 return collect_changed_files
1905
1904
1906 def lookuprevlink_func(revlog):
1905 def lookuprevlink_func(revlog):
1907 def lookuprevlink(n):
1906 def lookuprevlink(n):
1908 return cl.node(revlog.linkrev(n))
1907 return cl.node(revlog.linkrev(revlog.rev(n)))
1909 return lookuprevlink
1908 return lookuprevlink
1910
1909
1911 def gengroup():
1910 def gengroup():
1912 # construct a list of all changed files
1911 # construct a list of all changed files
1913 changedfiles = {}
1912 changedfiles = {}
1914
1913
1915 for chnk in cl.group(nodes, identity,
1914 for chnk in cl.group(nodes, identity,
1916 changed_file_collector(changedfiles)):
1915 changed_file_collector(changedfiles)):
1917 yield chnk
1916 yield chnk
1918
1917
1919 mnfst = self.manifest
1918 mnfst = self.manifest
1920 nodeiter = gennodelst(mnfst)
1919 nodeiter = gennodelst(mnfst)
1921 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1920 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1922 yield chnk
1921 yield chnk
1923
1922
1924 for fname in util.sort(changedfiles):
1923 for fname in util.sort(changedfiles):
1925 filerevlog = self.file(fname)
1924 filerevlog = self.file(fname)
1926 if not len(filerevlog):
1925 if not len(filerevlog):
1927 raise util.Abort(_("empty or missing revlog for %s") % fname)
1926 raise util.Abort(_("empty or missing revlog for %s") % fname)
1928 nodeiter = gennodelst(filerevlog)
1927 nodeiter = gennodelst(filerevlog)
1929 nodeiter = list(nodeiter)
1928 nodeiter = list(nodeiter)
1930 if nodeiter:
1929 if nodeiter:
1931 yield changegroup.chunkheader(len(fname))
1930 yield changegroup.chunkheader(len(fname))
1932 yield fname
1931 yield fname
1933 lookup = lookuprevlink_func(filerevlog)
1932 lookup = lookuprevlink_func(filerevlog)
1934 for chnk in filerevlog.group(nodeiter, lookup):
1933 for chnk in filerevlog.group(nodeiter, lookup):
1935 yield chnk
1934 yield chnk
1936
1935
1937 yield changegroup.closechunk()
1936 yield changegroup.closechunk()
1938
1937
1939 if nodes:
1938 if nodes:
1940 self.hook('outgoing', node=hex(nodes[0]), source=source)
1939 self.hook('outgoing', node=hex(nodes[0]), source=source)
1941
1940
1942 return util.chunkbuffer(gengroup())
1941 return util.chunkbuffer(gengroup())
1943
1942
1944 def addchangegroup(self, source, srctype, url, emptyok=False):
1943 def addchangegroup(self, source, srctype, url, emptyok=False):
1945 """add changegroup to repo.
1944 """add changegroup to repo.
1946
1945
1947 return values:
1946 return values:
1948 - nothing changed or no source: 0
1947 - nothing changed or no source: 0
1949 - more heads than before: 1+added heads (2..n)
1948 - more heads than before: 1+added heads (2..n)
1950 - less heads than before: -1-removed heads (-2..-n)
1949 - less heads than before: -1-removed heads (-2..-n)
1951 - number of heads stays the same: 1
1950 - number of heads stays the same: 1
1952 """
1951 """
1953 def csmap(x):
1952 def csmap(x):
1954 self.ui.debug(_("add changeset %s\n") % short(x))
1953 self.ui.debug(_("add changeset %s\n") % short(x))
1955 return len(cl)
1954 return len(cl)
1956
1955
1957 def revmap(x):
1956 def revmap(x):
1958 return cl.rev(x)
1957 return cl.rev(x)
1959
1958
1960 if not source:
1959 if not source:
1961 return 0
1960 return 0
1962
1961
1963 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1962 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1964
1963
1965 changesets = files = revisions = 0
1964 changesets = files = revisions = 0
1966
1965
1967 # write changelog data to temp files so concurrent readers will not see
1966 # write changelog data to temp files so concurrent readers will not see
1968 # inconsistent view
1967 # inconsistent view
1969 cl = self.changelog
1968 cl = self.changelog
1970 cl.delayupdate()
1969 cl.delayupdate()
1971 oldheads = len(cl.heads())
1970 oldheads = len(cl.heads())
1972
1971
1973 tr = self.transaction()
1972 tr = self.transaction()
1974 try:
1973 try:
1975 trp = weakref.proxy(tr)
1974 trp = weakref.proxy(tr)
1976 # pull off the changeset group
1975 # pull off the changeset group
1977 self.ui.status(_("adding changesets\n"))
1976 self.ui.status(_("adding changesets\n"))
1978 cor = len(cl) - 1
1977 cor = len(cl) - 1
1979 chunkiter = changegroup.chunkiter(source)
1978 chunkiter = changegroup.chunkiter(source)
1980 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1979 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1981 raise util.Abort(_("received changelog group is empty"))
1980 raise util.Abort(_("received changelog group is empty"))
1982 cnr = len(cl) - 1
1981 cnr = len(cl) - 1
1983 changesets = cnr - cor
1982 changesets = cnr - cor
1984
1983
1985 # pull off the manifest group
1984 # pull off the manifest group
1986 self.ui.status(_("adding manifests\n"))
1985 self.ui.status(_("adding manifests\n"))
1987 chunkiter = changegroup.chunkiter(source)
1986 chunkiter = changegroup.chunkiter(source)
1988 # no need to check for empty manifest group here:
1987 # no need to check for empty manifest group here:
1989 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1988 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1990 # no new manifest will be created and the manifest group will
1989 # no new manifest will be created and the manifest group will
1991 # be empty during the pull
1990 # be empty during the pull
1992 self.manifest.addgroup(chunkiter, revmap, trp)
1991 self.manifest.addgroup(chunkiter, revmap, trp)
1993
1992
1994 # process the files
1993 # process the files
1995 self.ui.status(_("adding file changes\n"))
1994 self.ui.status(_("adding file changes\n"))
1996 while 1:
1995 while 1:
1997 f = changegroup.getchunk(source)
1996 f = changegroup.getchunk(source)
1998 if not f:
1997 if not f:
1999 break
1998 break
2000 self.ui.debug(_("adding %s revisions\n") % f)
1999 self.ui.debug(_("adding %s revisions\n") % f)
2001 fl = self.file(f)
2000 fl = self.file(f)
2002 o = len(fl)
2001 o = len(fl)
2003 chunkiter = changegroup.chunkiter(source)
2002 chunkiter = changegroup.chunkiter(source)
2004 if fl.addgroup(chunkiter, revmap, trp) is None:
2003 if fl.addgroup(chunkiter, revmap, trp) is None:
2005 raise util.Abort(_("received file revlog group is empty"))
2004 raise util.Abort(_("received file revlog group is empty"))
2006 revisions += len(fl) - o
2005 revisions += len(fl) - o
2007 files += 1
2006 files += 1
2008
2007
2009 # make changelog see real files again
2008 # make changelog see real files again
2010 cl.finalize(trp)
2009 cl.finalize(trp)
2011
2010
2012 newheads = len(self.changelog.heads())
2011 newheads = len(self.changelog.heads())
2013 heads = ""
2012 heads = ""
2014 if oldheads and newheads != oldheads:
2013 if oldheads and newheads != oldheads:
2015 heads = _(" (%+d heads)") % (newheads - oldheads)
2014 heads = _(" (%+d heads)") % (newheads - oldheads)
2016
2015
2017 self.ui.status(_("added %d changesets"
2016 self.ui.status(_("added %d changesets"
2018 " with %d changes to %d files%s\n")
2017 " with %d changes to %d files%s\n")
2019 % (changesets, revisions, files, heads))
2018 % (changesets, revisions, files, heads))
2020
2019
2021 if changesets > 0:
2020 if changesets > 0:
2022 self.hook('pretxnchangegroup', throw=True,
2021 self.hook('pretxnchangegroup', throw=True,
2023 node=hex(self.changelog.node(cor+1)), source=srctype,
2022 node=hex(self.changelog.node(cor+1)), source=srctype,
2024 url=url)
2023 url=url)
2025
2024
2026 tr.close()
2025 tr.close()
2027 finally:
2026 finally:
2028 del tr
2027 del tr
2029
2028
2030 if changesets > 0:
2029 if changesets > 0:
2031 # forcefully update the on-disk branch cache
2030 # forcefully update the on-disk branch cache
2032 self.ui.debug(_("updating the branch cache\n"))
2031 self.ui.debug(_("updating the branch cache\n"))
2033 self.branchtags()
2032 self.branchtags()
2034 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2033 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2035 source=srctype, url=url)
2034 source=srctype, url=url)
2036
2035
2037 for i in xrange(cor + 1, cnr + 1):
2036 for i in xrange(cor + 1, cnr + 1):
2038 self.hook("incoming", node=hex(self.changelog.node(i)),
2037 self.hook("incoming", node=hex(self.changelog.node(i)),
2039 source=srctype, url=url)
2038 source=srctype, url=url)
2040
2039
2041 # never return 0 here:
2040 # never return 0 here:
2042 if newheads < oldheads:
2041 if newheads < oldheads:
2043 return newheads - oldheads - 1
2042 return newheads - oldheads - 1
2044 else:
2043 else:
2045 return newheads - oldheads + 1
2044 return newheads - oldheads + 1
2046
2045
2047
2046
2048 def stream_in(self, remote):
2047 def stream_in(self, remote):
2049 fp = remote.stream_out()
2048 fp = remote.stream_out()
2050 l = fp.readline()
2049 l = fp.readline()
2051 try:
2050 try:
2052 resp = int(l)
2051 resp = int(l)
2053 except ValueError:
2052 except ValueError:
2054 raise util.UnexpectedOutput(
2053 raise util.UnexpectedOutput(
2055 _('Unexpected response from remote server:'), l)
2054 _('Unexpected response from remote server:'), l)
2056 if resp == 1:
2055 if resp == 1:
2057 raise util.Abort(_('operation forbidden by server'))
2056 raise util.Abort(_('operation forbidden by server'))
2058 elif resp == 2:
2057 elif resp == 2:
2059 raise util.Abort(_('locking the remote repository failed'))
2058 raise util.Abort(_('locking the remote repository failed'))
2060 elif resp != 0:
2059 elif resp != 0:
2061 raise util.Abort(_('the server sent an unknown error code'))
2060 raise util.Abort(_('the server sent an unknown error code'))
2062 self.ui.status(_('streaming all changes\n'))
2061 self.ui.status(_('streaming all changes\n'))
2063 l = fp.readline()
2062 l = fp.readline()
2064 try:
2063 try:
2065 total_files, total_bytes = map(int, l.split(' ', 1))
2064 total_files, total_bytes = map(int, l.split(' ', 1))
2066 except (ValueError, TypeError):
2065 except (ValueError, TypeError):
2067 raise util.UnexpectedOutput(
2066 raise util.UnexpectedOutput(
2068 _('Unexpected response from remote server:'), l)
2067 _('Unexpected response from remote server:'), l)
2069 self.ui.status(_('%d files to transfer, %s of data\n') %
2068 self.ui.status(_('%d files to transfer, %s of data\n') %
2070 (total_files, util.bytecount(total_bytes)))
2069 (total_files, util.bytecount(total_bytes)))
2071 start = time.time()
2070 start = time.time()
2072 for i in xrange(total_files):
2071 for i in xrange(total_files):
2073 # XXX doesn't support '\n' or '\r' in filenames
2072 # XXX doesn't support '\n' or '\r' in filenames
2074 l = fp.readline()
2073 l = fp.readline()
2075 try:
2074 try:
2076 name, size = l.split('\0', 1)
2075 name, size = l.split('\0', 1)
2077 size = int(size)
2076 size = int(size)
2078 except (ValueError, TypeError):
2077 except (ValueError, TypeError):
2079 raise util.UnexpectedOutput(
2078 raise util.UnexpectedOutput(
2080 _('Unexpected response from remote server:'), l)
2079 _('Unexpected response from remote server:'), l)
2081 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2080 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2082 ofp = self.sopener(name, 'w')
2081 ofp = self.sopener(name, 'w')
2083 for chunk in util.filechunkiter(fp, limit=size):
2082 for chunk in util.filechunkiter(fp, limit=size):
2084 ofp.write(chunk)
2083 ofp.write(chunk)
2085 ofp.close()
2084 ofp.close()
2086 elapsed = time.time() - start
2085 elapsed = time.time() - start
2087 if elapsed <= 0:
2086 if elapsed <= 0:
2088 elapsed = 0.001
2087 elapsed = 0.001
2089 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2088 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2090 (util.bytecount(total_bytes), elapsed,
2089 (util.bytecount(total_bytes), elapsed,
2091 util.bytecount(total_bytes / elapsed)))
2090 util.bytecount(total_bytes / elapsed)))
2092 self.invalidate()
2091 self.invalidate()
2093 return len(self.heads()) + 1
2092 return len(self.heads()) + 1
2094
2093
2095 def clone(self, remote, heads=[], stream=False):
2094 def clone(self, remote, heads=[], stream=False):
2096 '''clone remote repository.
2095 '''clone remote repository.
2097
2096
2098 keyword arguments:
2097 keyword arguments:
2099 heads: list of revs to clone (forces use of pull)
2098 heads: list of revs to clone (forces use of pull)
2100 stream: use streaming clone if possible'''
2099 stream: use streaming clone if possible'''
2101
2100
2102 # now, all clients that can request uncompressed clones can
2101 # now, all clients that can request uncompressed clones can
2103 # read repo formats supported by all servers that can serve
2102 # read repo formats supported by all servers that can serve
2104 # them.
2103 # them.
2105
2104
2106 # if revlog format changes, client will have to check version
2105 # if revlog format changes, client will have to check version
2107 # and format flags on "stream" capability, and use
2106 # and format flags on "stream" capability, and use
2108 # uncompressed only if compatible.
2107 # uncompressed only if compatible.
2109
2108
2110 if stream and not heads and remote.capable('stream'):
2109 if stream and not heads and remote.capable('stream'):
2111 return self.stream_in(remote)
2110 return self.stream_in(remote)
2112 return self.pull(remote, heads)
2111 return self.pull(remote, heads)
2113
2112
2114 # used to avoid circular references so destructors work
2113 # used to avoid circular references so destructors work
2115 def aftertrans(files):
2114 def aftertrans(files):
2116 renamefiles = [tuple(t) for t in files]
2115 renamefiles = [tuple(t) for t in files]
2117 def a():
2116 def a():
2118 for src, dest in renamefiles:
2117 for src, dest in renamefiles:
2119 util.rename(src, dest)
2118 util.rename(src, dest)
2120 return a
2119 return a
2121
2120
2122 def instance(ui, path, create):
2121 def instance(ui, path, create):
2123 return localrepository(ui, util.drop_scheme('file', path), create)
2122 return localrepository(ui, util.drop_scheme('file', path), create)
2124
2123
2125 def islocal(path):
2124 def islocal(path):
2126 return True
2125 return True
@@ -1,136 +1,135 b''
1 # repair.py - functions for repository repair for mercurial
1 # repair.py - functions for repository repair for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 # Copyright 2007 Matt Mackall
4 # Copyright 2007 Matt Mackall
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import changegroup, os
9 import changegroup, os
10 from node import nullrev, short
10 from node import nullrev, short
11 from i18n import _
11 from i18n import _
12
12
13 def _bundle(repo, bases, heads, node, suffix, extranodes=None):
13 def _bundle(repo, bases, heads, node, suffix, extranodes=None):
14 """create a bundle with the specified revisions as a backup"""
14 """create a bundle with the specified revisions as a backup"""
15 cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
15 cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
16 backupdir = repo.join("strip-backup")
16 backupdir = repo.join("strip-backup")
17 if not os.path.isdir(backupdir):
17 if not os.path.isdir(backupdir):
18 os.mkdir(backupdir)
18 os.mkdir(backupdir)
19 name = os.path.join(backupdir, "%s-%s" % (short(node), suffix))
19 name = os.path.join(backupdir, "%s-%s" % (short(node), suffix))
20 repo.ui.warn(_("saving bundle to %s\n") % name)
20 repo.ui.warn(_("saving bundle to %s\n") % name)
21 return changegroup.writebundle(cg, name, "HG10BZ")
21 return changegroup.writebundle(cg, name, "HG10BZ")
22
22
23 def _collectfiles(repo, striprev):
23 def _collectfiles(repo, striprev):
24 """find out the filelogs affected by the strip"""
24 """find out the filelogs affected by the strip"""
25 files = {}
25 files = {}
26
26
27 for x in xrange(striprev, len(repo)):
27 for x in xrange(striprev, len(repo)):
28 for name in repo[x].files():
28 for name in repo[x].files():
29 if name in files:
29 if name in files:
30 continue
30 continue
31 files[name] = 1
31 files[name] = 1
32
32
33 files = files.keys()
33 files = files.keys()
34 files.sort()
34 files.sort()
35 return files
35 return files
36
36
37 def _collectextranodes(repo, files, link):
37 def _collectextranodes(repo, files, link):
38 """return the nodes that have to be saved before the strip"""
38 """return the nodes that have to be saved before the strip"""
39 def collectone(revlog):
39 def collectone(revlog):
40 extra = []
40 extra = []
41 startrev = count = len(revlog)
41 startrev = count = len(revlog)
42 # find the truncation point of the revlog
42 # find the truncation point of the revlog
43 for i in xrange(0, count):
43 for i in xrange(0, count):
44 node = revlog.node(i)
44 lrev = revlog.linkrev(i)
45 lrev = revlog.linkrev(node)
46 if lrev >= link:
45 if lrev >= link:
47 startrev = i + 1
46 startrev = i + 1
48 break
47 break
49
48
50 # see if any revision after that point has a linkrev less than link
49 # see if any revision after that point has a linkrev less than link
51 # (we have to manually save these guys)
50 # (we have to manually save these guys)
52 for i in xrange(startrev, count):
51 for i in xrange(startrev, count):
53 node = revlog.node(i)
52 node = revlog.node(i)
54 lrev = revlog.linkrev(node)
53 lrev = revlog.linkrev(i)
55 if lrev < link:
54 if lrev < link:
56 extra.append((node, cl.node(lrev)))
55 extra.append((node, cl.node(lrev)))
57
56
58 return extra
57 return extra
59
58
60 extranodes = {}
59 extranodes = {}
61 cl = repo.changelog
60 cl = repo.changelog
62 extra = collectone(repo.manifest)
61 extra = collectone(repo.manifest)
63 if extra:
62 if extra:
64 extranodes[1] = extra
63 extranodes[1] = extra
65 for fname in files:
64 for fname in files:
66 f = repo.file(fname)
65 f = repo.file(fname)
67 extra = collectone(f)
66 extra = collectone(f)
68 if extra:
67 if extra:
69 extranodes[fname] = extra
68 extranodes[fname] = extra
70
69
71 return extranodes
70 return extranodes
72
71
73 def strip(ui, repo, node, backup="all"):
72 def strip(ui, repo, node, backup="all"):
74 cl = repo.changelog
73 cl = repo.changelog
75 # TODO delete the undo files, and handle undo of merge sets
74 # TODO delete the undo files, and handle undo of merge sets
76 striprev = cl.rev(node)
75 striprev = cl.rev(node)
77
76
78 # Some revisions with rev > striprev may not be descendants of striprev.
77 # Some revisions with rev > striprev may not be descendants of striprev.
79 # We have to find these revisions and put them in a bundle, so that
78 # We have to find these revisions and put them in a bundle, so that
80 # we can restore them after the truncations.
79 # we can restore them after the truncations.
81 # To create the bundle we use repo.changegroupsubset which requires
80 # To create the bundle we use repo.changegroupsubset which requires
82 # the list of heads and bases of the set of interesting revisions.
81 # the list of heads and bases of the set of interesting revisions.
83 # (head = revision in the set that has no descendant in the set;
82 # (head = revision in the set that has no descendant in the set;
84 # base = revision in the set that has no ancestor in the set)
83 # base = revision in the set that has no ancestor in the set)
85 tostrip = {striprev: 1}
84 tostrip = {striprev: 1}
86 saveheads = {}
85 saveheads = {}
87 savebases = []
86 savebases = []
88 for r in xrange(striprev + 1, len(cl)):
87 for r in xrange(striprev + 1, len(cl)):
89 parents = cl.parentrevs(r)
88 parents = cl.parentrevs(r)
90 if parents[0] in tostrip or parents[1] in tostrip:
89 if parents[0] in tostrip or parents[1] in tostrip:
91 # r is a descendant of striprev
90 # r is a descendant of striprev
92 tostrip[r] = 1
91 tostrip[r] = 1
93 # if this is a merge and one of the parents does not descend
92 # if this is a merge and one of the parents does not descend
94 # from striprev, mark that parent as a savehead.
93 # from striprev, mark that parent as a savehead.
95 if parents[1] != nullrev:
94 if parents[1] != nullrev:
96 for p in parents:
95 for p in parents:
97 if p not in tostrip and p > striprev:
96 if p not in tostrip and p > striprev:
98 saveheads[p] = 1
97 saveheads[p] = 1
99 else:
98 else:
100 # if no parents of this revision will be stripped, mark it as
99 # if no parents of this revision will be stripped, mark it as
101 # a savebase
100 # a savebase
102 if parents[0] < striprev and parents[1] < striprev:
101 if parents[0] < striprev and parents[1] < striprev:
103 savebases.append(cl.node(r))
102 savebases.append(cl.node(r))
104
103
105 for p in parents:
104 for p in parents:
106 if p in saveheads:
105 if p in saveheads:
107 del saveheads[p]
106 del saveheads[p]
108 saveheads[r] = 1
107 saveheads[r] = 1
109
108
110 saveheads = [cl.node(r) for r in saveheads]
109 saveheads = [cl.node(r) for r in saveheads]
111 files = _collectfiles(repo, striprev)
110 files = _collectfiles(repo, striprev)
112
111
113 extranodes = _collectextranodes(repo, files, striprev)
112 extranodes = _collectextranodes(repo, files, striprev)
114
113
115 # create a changegroup for all the branches we need to keep
114 # create a changegroup for all the branches we need to keep
116 if backup == "all":
115 if backup == "all":
117 _bundle(repo, [node], cl.heads(), node, 'backup')
116 _bundle(repo, [node], cl.heads(), node, 'backup')
118 if saveheads or extranodes:
117 if saveheads or extranodes:
119 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
118 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
120 extranodes)
119 extranodes)
121
120
122 cl.strip(striprev)
121 cl.strip(striprev)
123 repo.manifest.strip(striprev)
122 repo.manifest.strip(striprev)
124 for name in files:
123 for name in files:
125 f = repo.file(name)
124 f = repo.file(name)
126 f.strip(striprev)
125 f.strip(striprev)
127
126
128 if saveheads or extranodes:
127 if saveheads or extranodes:
129 ui.status(_("adding branch\n"))
128 ui.status(_("adding branch\n"))
130 f = open(chgrpfile, "rb")
129 f = open(chgrpfile, "rb")
131 gen = changegroup.readbundle(f, chgrpfile)
130 gen = changegroup.readbundle(f, chgrpfile)
132 repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
131 repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
133 f.close()
132 f.close()
134 if backup != "strip":
133 if backup != "strip":
135 os.unlink(chgrpfile)
134 os.unlink(chgrpfile)
136
135
@@ -1,1374 +1,1374 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
7 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import bin, hex, nullid, nullrev, short
13 from node import bin, hex, nullid, nullrev, short
14 from i18n import _
14 from i18n import _
15 import changegroup, errno, ancestor, mdiff, parsers
15 import changegroup, errno, ancestor, mdiff, parsers
16 import struct, util, zlib
16 import struct, util, zlib
17
17
18 _pack = struct.pack
18 _pack = struct.pack
19 _unpack = struct.unpack
19 _unpack = struct.unpack
20 _compress = zlib.compress
20 _compress = zlib.compress
21 _decompress = zlib.decompress
21 _decompress = zlib.decompress
22 _sha = util.sha1
22 _sha = util.sha1
23
23
24 # revlog flags
24 # revlog flags
25 REVLOGV0 = 0
25 REVLOGV0 = 0
26 REVLOGNG = 1
26 REVLOGNG = 1
27 REVLOGNGINLINEDATA = (1 << 16)
27 REVLOGNGINLINEDATA = (1 << 16)
28 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
28 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
29 REVLOG_DEFAULT_FORMAT = REVLOGNG
29 REVLOG_DEFAULT_FORMAT = REVLOGNG
30 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
30 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
31
31
32 class RevlogError(Exception):
32 class RevlogError(Exception):
33 pass
33 pass
34
34
35 class LookupError(RevlogError, KeyError):
35 class LookupError(RevlogError, KeyError):
36 def __init__(self, name, index, message):
36 def __init__(self, name, index, message):
37 self.name = name
37 self.name = name
38 if isinstance(name, str) and len(name) == 20:
38 if isinstance(name, str) and len(name) == 20:
39 name = short(name)
39 name = short(name)
40 RevlogError.__init__(self, _('%s@%s: %s') % (index, name, message))
40 RevlogError.__init__(self, _('%s@%s: %s') % (index, name, message))
41
41
42 def __str__(self):
42 def __str__(self):
43 return RevlogError.__str__(self)
43 return RevlogError.__str__(self)
44
44
45 def getoffset(q):
45 def getoffset(q):
46 return int(q >> 16)
46 return int(q >> 16)
47
47
48 def gettype(q):
48 def gettype(q):
49 return int(q & 0xFFFF)
49 return int(q & 0xFFFF)
50
50
51 def offset_type(offset, type):
51 def offset_type(offset, type):
52 return long(long(offset) << 16 | type)
52 return long(long(offset) << 16 | type)
53
53
54 def hash(text, p1, p2):
54 def hash(text, p1, p2):
55 """generate a hash from the given text and its parent hashes
55 """generate a hash from the given text and its parent hashes
56
56
57 This hash combines both the current file contents and its history
57 This hash combines both the current file contents and its history
58 in a manner that makes it easy to distinguish nodes with the same
58 in a manner that makes it easy to distinguish nodes with the same
59 content in the revision graph.
59 content in the revision graph.
60 """
60 """
61 l = [p1, p2]
61 l = [p1, p2]
62 l.sort()
62 l.sort()
63 s = _sha(l[0])
63 s = _sha(l[0])
64 s.update(l[1])
64 s.update(l[1])
65 s.update(text)
65 s.update(text)
66 return s.digest()
66 return s.digest()
67
67
68 def compress(text):
68 def compress(text):
69 """ generate a possibly-compressed representation of text """
69 """ generate a possibly-compressed representation of text """
70 if not text:
70 if not text:
71 return ("", text)
71 return ("", text)
72 l = len(text)
72 l = len(text)
73 bin = None
73 bin = None
74 if l < 44:
74 if l < 44:
75 pass
75 pass
76 elif l > 1000000:
76 elif l > 1000000:
77 # zlib makes an internal copy, thus doubling memory usage for
77 # zlib makes an internal copy, thus doubling memory usage for
78 # large files, so lets do this in pieces
78 # large files, so lets do this in pieces
79 z = zlib.compressobj()
79 z = zlib.compressobj()
80 p = []
80 p = []
81 pos = 0
81 pos = 0
82 while pos < l:
82 while pos < l:
83 pos2 = pos + 2**20
83 pos2 = pos + 2**20
84 p.append(z.compress(text[pos:pos2]))
84 p.append(z.compress(text[pos:pos2]))
85 pos = pos2
85 pos = pos2
86 p.append(z.flush())
86 p.append(z.flush())
87 if sum(map(len, p)) < l:
87 if sum(map(len, p)) < l:
88 bin = "".join(p)
88 bin = "".join(p)
89 else:
89 else:
90 bin = _compress(text)
90 bin = _compress(text)
91 if bin is None or len(bin) > l:
91 if bin is None or len(bin) > l:
92 if text[0] == '\0':
92 if text[0] == '\0':
93 return ("", text)
93 return ("", text)
94 return ('u', text)
94 return ('u', text)
95 return ("", bin)
95 return ("", bin)
96
96
97 def decompress(bin):
97 def decompress(bin):
98 """ decompress the given input """
98 """ decompress the given input """
99 if not bin:
99 if not bin:
100 return bin
100 return bin
101 t = bin[0]
101 t = bin[0]
102 if t == '\0':
102 if t == '\0':
103 return bin
103 return bin
104 if t == 'x':
104 if t == 'x':
105 return _decompress(bin)
105 return _decompress(bin)
106 if t == 'u':
106 if t == 'u':
107 return bin[1:]
107 return bin[1:]
108 raise RevlogError(_("unknown compression type %r") % t)
108 raise RevlogError(_("unknown compression type %r") % t)
109
109
110 class lazyparser(object):
110 class lazyparser(object):
111 """
111 """
112 this class avoids the need to parse the entirety of large indices
112 this class avoids the need to parse the entirety of large indices
113 """
113 """
114
114
115 # lazyparser is not safe to use on windows if win32 extensions not
115 # lazyparser is not safe to use on windows if win32 extensions not
116 # available. it keeps file handle open, which make it not possible
116 # available. it keeps file handle open, which make it not possible
117 # to break hardlinks on local cloned repos.
117 # to break hardlinks on local cloned repos.
118
118
119 def __init__(self, dataf, size):
119 def __init__(self, dataf, size):
120 self.dataf = dataf
120 self.dataf = dataf
121 self.s = struct.calcsize(indexformatng)
121 self.s = struct.calcsize(indexformatng)
122 self.datasize = size
122 self.datasize = size
123 self.l = size/self.s
123 self.l = size/self.s
124 self.index = [None] * self.l
124 self.index = [None] * self.l
125 self.map = {nullid: nullrev}
125 self.map = {nullid: nullrev}
126 self.allmap = 0
126 self.allmap = 0
127 self.all = 0
127 self.all = 0
128 self.mapfind_count = 0
128 self.mapfind_count = 0
129
129
130 def loadmap(self):
130 def loadmap(self):
131 """
131 """
132 during a commit, we need to make sure the rev being added is
132 during a commit, we need to make sure the rev being added is
133 not a duplicate. This requires loading the entire index,
133 not a duplicate. This requires loading the entire index,
134 which is fairly slow. loadmap can load up just the node map,
134 which is fairly slow. loadmap can load up just the node map,
135 which takes much less time.
135 which takes much less time.
136 """
136 """
137 if self.allmap:
137 if self.allmap:
138 return
138 return
139 end = self.datasize
139 end = self.datasize
140 self.allmap = 1
140 self.allmap = 1
141 cur = 0
141 cur = 0
142 count = 0
142 count = 0
143 blocksize = self.s * 256
143 blocksize = self.s * 256
144 self.dataf.seek(0)
144 self.dataf.seek(0)
145 while cur < end:
145 while cur < end:
146 data = self.dataf.read(blocksize)
146 data = self.dataf.read(blocksize)
147 off = 0
147 off = 0
148 for x in xrange(256):
148 for x in xrange(256):
149 n = data[off + ngshaoffset:off + ngshaoffset + 20]
149 n = data[off + ngshaoffset:off + ngshaoffset + 20]
150 self.map[n] = count
150 self.map[n] = count
151 count += 1
151 count += 1
152 if count >= self.l:
152 if count >= self.l:
153 break
153 break
154 off += self.s
154 off += self.s
155 cur += blocksize
155 cur += blocksize
156
156
157 def loadblock(self, blockstart, blocksize, data=None):
157 def loadblock(self, blockstart, blocksize, data=None):
158 if self.all:
158 if self.all:
159 return
159 return
160 if data is None:
160 if data is None:
161 self.dataf.seek(blockstart)
161 self.dataf.seek(blockstart)
162 if blockstart + blocksize > self.datasize:
162 if blockstart + blocksize > self.datasize:
163 # the revlog may have grown since we've started running,
163 # the revlog may have grown since we've started running,
164 # but we don't have space in self.index for more entries.
164 # but we don't have space in self.index for more entries.
165 # limit blocksize so that we don't get too much data.
165 # limit blocksize so that we don't get too much data.
166 blocksize = max(self.datasize - blockstart, 0)
166 blocksize = max(self.datasize - blockstart, 0)
167 data = self.dataf.read(blocksize)
167 data = self.dataf.read(blocksize)
168 lend = len(data) / self.s
168 lend = len(data) / self.s
169 i = blockstart / self.s
169 i = blockstart / self.s
170 off = 0
170 off = 0
171 # lazyindex supports __delitem__
171 # lazyindex supports __delitem__
172 if lend > len(self.index) - i:
172 if lend > len(self.index) - i:
173 lend = len(self.index) - i
173 lend = len(self.index) - i
174 for x in xrange(lend):
174 for x in xrange(lend):
175 if self.index[i + x] == None:
175 if self.index[i + x] == None:
176 b = data[off : off + self.s]
176 b = data[off : off + self.s]
177 self.index[i + x] = b
177 self.index[i + x] = b
178 n = b[ngshaoffset:ngshaoffset + 20]
178 n = b[ngshaoffset:ngshaoffset + 20]
179 self.map[n] = i + x
179 self.map[n] = i + x
180 off += self.s
180 off += self.s
181
181
182 def findnode(self, node):
182 def findnode(self, node):
183 """search backwards through the index file for a specific node"""
183 """search backwards through the index file for a specific node"""
184 if self.allmap:
184 if self.allmap:
185 return None
185 return None
186
186
187 # hg log will cause many many searches for the manifest
187 # hg log will cause many many searches for the manifest
188 # nodes. After we get called a few times, just load the whole
188 # nodes. After we get called a few times, just load the whole
189 # thing.
189 # thing.
190 if self.mapfind_count > 8:
190 if self.mapfind_count > 8:
191 self.loadmap()
191 self.loadmap()
192 if node in self.map:
192 if node in self.map:
193 return node
193 return node
194 return None
194 return None
195 self.mapfind_count += 1
195 self.mapfind_count += 1
196 last = self.l - 1
196 last = self.l - 1
197 while self.index[last] != None:
197 while self.index[last] != None:
198 if last == 0:
198 if last == 0:
199 self.all = 1
199 self.all = 1
200 self.allmap = 1
200 self.allmap = 1
201 return None
201 return None
202 last -= 1
202 last -= 1
203 end = (last + 1) * self.s
203 end = (last + 1) * self.s
204 blocksize = self.s * 256
204 blocksize = self.s * 256
205 while end >= 0:
205 while end >= 0:
206 start = max(end - blocksize, 0)
206 start = max(end - blocksize, 0)
207 self.dataf.seek(start)
207 self.dataf.seek(start)
208 data = self.dataf.read(end - start)
208 data = self.dataf.read(end - start)
209 findend = end - start
209 findend = end - start
210 while True:
210 while True:
211 # we're searching backwards, so we have to make sure
211 # we're searching backwards, so we have to make sure
212 # we don't find a changeset where this node is a parent
212 # we don't find a changeset where this node is a parent
213 off = data.find(node, 0, findend)
213 off = data.find(node, 0, findend)
214 findend = off
214 findend = off
215 if off >= 0:
215 if off >= 0:
216 i = off / self.s
216 i = off / self.s
217 off = i * self.s
217 off = i * self.s
218 n = data[off + ngshaoffset:off + ngshaoffset + 20]
218 n = data[off + ngshaoffset:off + ngshaoffset + 20]
219 if n == node:
219 if n == node:
220 self.map[n] = i + start / self.s
220 self.map[n] = i + start / self.s
221 return node
221 return node
222 else:
222 else:
223 break
223 break
224 end -= blocksize
224 end -= blocksize
225 return None
225 return None
226
226
227 def loadindex(self, i=None, end=None):
227 def loadindex(self, i=None, end=None):
228 if self.all:
228 if self.all:
229 return
229 return
230 all = False
230 all = False
231 if i == None:
231 if i == None:
232 blockstart = 0
232 blockstart = 0
233 blocksize = (65536 / self.s) * self.s
233 blocksize = (65536 / self.s) * self.s
234 end = self.datasize
234 end = self.datasize
235 all = True
235 all = True
236 else:
236 else:
237 if end:
237 if end:
238 blockstart = i * self.s
238 blockstart = i * self.s
239 end = end * self.s
239 end = end * self.s
240 blocksize = end - blockstart
240 blocksize = end - blockstart
241 else:
241 else:
242 blockstart = (i & ~1023) * self.s
242 blockstart = (i & ~1023) * self.s
243 blocksize = self.s * 1024
243 blocksize = self.s * 1024
244 end = blockstart + blocksize
244 end = blockstart + blocksize
245 while blockstart < end:
245 while blockstart < end:
246 self.loadblock(blockstart, blocksize)
246 self.loadblock(blockstart, blocksize)
247 blockstart += blocksize
247 blockstart += blocksize
248 if all:
248 if all:
249 self.all = True
249 self.all = True
250
250
251 class lazyindex(object):
251 class lazyindex(object):
252 """a lazy version of the index array"""
252 """a lazy version of the index array"""
253 def __init__(self, parser):
253 def __init__(self, parser):
254 self.p = parser
254 self.p = parser
255 def __len__(self):
255 def __len__(self):
256 return len(self.p.index)
256 return len(self.p.index)
257 def load(self, pos):
257 def load(self, pos):
258 if pos < 0:
258 if pos < 0:
259 pos += len(self.p.index)
259 pos += len(self.p.index)
260 self.p.loadindex(pos)
260 self.p.loadindex(pos)
261 return self.p.index[pos]
261 return self.p.index[pos]
262 def __getitem__(self, pos):
262 def __getitem__(self, pos):
263 return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
263 return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
264 def __setitem__(self, pos, item):
264 def __setitem__(self, pos, item):
265 self.p.index[pos] = _pack(indexformatng, *item)
265 self.p.index[pos] = _pack(indexformatng, *item)
266 def __delitem__(self, pos):
266 def __delitem__(self, pos):
267 del self.p.index[pos]
267 del self.p.index[pos]
268 def insert(self, pos, e):
268 def insert(self, pos, e):
269 self.p.index.insert(pos, _pack(indexformatng, *e))
269 self.p.index.insert(pos, _pack(indexformatng, *e))
270 def append(self, e):
270 def append(self, e):
271 self.p.index.append(_pack(indexformatng, *e))
271 self.p.index.append(_pack(indexformatng, *e))
272
272
273 class lazymap(object):
273 class lazymap(object):
274 """a lazy version of the node map"""
274 """a lazy version of the node map"""
275 def __init__(self, parser):
275 def __init__(self, parser):
276 self.p = parser
276 self.p = parser
277 def load(self, key):
277 def load(self, key):
278 n = self.p.findnode(key)
278 n = self.p.findnode(key)
279 if n == None:
279 if n == None:
280 raise KeyError(key)
280 raise KeyError(key)
281 def __contains__(self, key):
281 def __contains__(self, key):
282 if key in self.p.map:
282 if key in self.p.map:
283 return True
283 return True
284 self.p.loadmap()
284 self.p.loadmap()
285 return key in self.p.map
285 return key in self.p.map
286 def __iter__(self):
286 def __iter__(self):
287 yield nullid
287 yield nullid
288 for i in xrange(self.p.l):
288 for i in xrange(self.p.l):
289 ret = self.p.index[i]
289 ret = self.p.index[i]
290 if not ret:
290 if not ret:
291 self.p.loadindex(i)
291 self.p.loadindex(i)
292 ret = self.p.index[i]
292 ret = self.p.index[i]
293 if isinstance(ret, str):
293 if isinstance(ret, str):
294 ret = _unpack(indexformatng, ret)
294 ret = _unpack(indexformatng, ret)
295 yield ret[7]
295 yield ret[7]
296 def __getitem__(self, key):
296 def __getitem__(self, key):
297 try:
297 try:
298 return self.p.map[key]
298 return self.p.map[key]
299 except KeyError:
299 except KeyError:
300 try:
300 try:
301 self.load(key)
301 self.load(key)
302 return self.p.map[key]
302 return self.p.map[key]
303 except KeyError:
303 except KeyError:
304 raise KeyError("node " + hex(key))
304 raise KeyError("node " + hex(key))
305 def __setitem__(self, key, val):
305 def __setitem__(self, key, val):
306 self.p.map[key] = val
306 self.p.map[key] = val
307 def __delitem__(self, key):
307 def __delitem__(self, key):
308 del self.p.map[key]
308 del self.p.map[key]
309
309
310 indexformatv0 = ">4l20s20s20s"
310 indexformatv0 = ">4l20s20s20s"
311 v0shaoffset = 56
311 v0shaoffset = 56
312
312
313 class revlogoldio(object):
313 class revlogoldio(object):
314 def __init__(self):
314 def __init__(self):
315 self.size = struct.calcsize(indexformatv0)
315 self.size = struct.calcsize(indexformatv0)
316
316
317 def parseindex(self, fp, inline):
317 def parseindex(self, fp, inline):
318 s = self.size
318 s = self.size
319 index = []
319 index = []
320 nodemap = {nullid: nullrev}
320 nodemap = {nullid: nullrev}
321 n = off = 0
321 n = off = 0
322 data = fp.read()
322 data = fp.read()
323 l = len(data)
323 l = len(data)
324 while off + s <= l:
324 while off + s <= l:
325 cur = data[off:off + s]
325 cur = data[off:off + s]
326 off += s
326 off += s
327 e = _unpack(indexformatv0, cur)
327 e = _unpack(indexformatv0, cur)
328 # transform to revlogv1 format
328 # transform to revlogv1 format
329 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
329 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
330 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
330 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
331 index.append(e2)
331 index.append(e2)
332 nodemap[e[6]] = n
332 nodemap[e[6]] = n
333 n += 1
333 n += 1
334
334
335 return index, nodemap, None
335 return index, nodemap, None
336
336
337 def packentry(self, entry, node, version, rev):
337 def packentry(self, entry, node, version, rev):
338 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
338 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
339 node(entry[5]), node(entry[6]), entry[7])
339 node(entry[5]), node(entry[6]), entry[7])
340 return _pack(indexformatv0, *e2)
340 return _pack(indexformatv0, *e2)
341
341
342 # index ng:
342 # index ng:
343 # 6 bytes offset
343 # 6 bytes offset
344 # 2 bytes flags
344 # 2 bytes flags
345 # 4 bytes compressed length
345 # 4 bytes compressed length
346 # 4 bytes uncompressed length
346 # 4 bytes uncompressed length
347 # 4 bytes: base rev
347 # 4 bytes: base rev
348 # 4 bytes link rev
348 # 4 bytes link rev
349 # 4 bytes parent 1 rev
349 # 4 bytes parent 1 rev
350 # 4 bytes parent 2 rev
350 # 4 bytes parent 2 rev
351 # 32 bytes: nodeid
351 # 32 bytes: nodeid
352 indexformatng = ">Qiiiiii20s12x"
352 indexformatng = ">Qiiiiii20s12x"
353 ngshaoffset = 32
353 ngshaoffset = 32
354 versionformat = ">I"
354 versionformat = ">I"
355
355
356 class revlogio(object):
356 class revlogio(object):
357 def __init__(self):
357 def __init__(self):
358 self.size = struct.calcsize(indexformatng)
358 self.size = struct.calcsize(indexformatng)
359
359
360 def parseindex(self, fp, inline):
360 def parseindex(self, fp, inline):
361 try:
361 try:
362 size = util.fstat(fp).st_size
362 size = util.fstat(fp).st_size
363 except AttributeError:
363 except AttributeError:
364 size = 0
364 size = 0
365
365
366 if util.openhardlinks() and not inline and size > 1000000:
366 if util.openhardlinks() and not inline and size > 1000000:
367 # big index, let's parse it on demand
367 # big index, let's parse it on demand
368 parser = lazyparser(fp, size)
368 parser = lazyparser(fp, size)
369 index = lazyindex(parser)
369 index = lazyindex(parser)
370 nodemap = lazymap(parser)
370 nodemap = lazymap(parser)
371 e = list(index[0])
371 e = list(index[0])
372 type = gettype(e[0])
372 type = gettype(e[0])
373 e[0] = offset_type(0, type)
373 e[0] = offset_type(0, type)
374 index[0] = e
374 index[0] = e
375 return index, nodemap, None
375 return index, nodemap, None
376
376
377 data = fp.read()
377 data = fp.read()
378 # call the C implementation to parse the index data
378 # call the C implementation to parse the index data
379 index, nodemap, cache = parsers.parse_index(data, inline)
379 index, nodemap, cache = parsers.parse_index(data, inline)
380 return index, nodemap, cache
380 return index, nodemap, cache
381
381
382 def packentry(self, entry, node, version, rev):
382 def packentry(self, entry, node, version, rev):
383 p = _pack(indexformatng, *entry)
383 p = _pack(indexformatng, *entry)
384 if rev == 0:
384 if rev == 0:
385 p = _pack(versionformat, version) + p[4:]
385 p = _pack(versionformat, version) + p[4:]
386 return p
386 return p
387
387
388 class revlog(object):
388 class revlog(object):
389 """
389 """
390 the underlying revision storage object
390 the underlying revision storage object
391
391
392 A revlog consists of two parts, an index and the revision data.
392 A revlog consists of two parts, an index and the revision data.
393
393
394 The index is a file with a fixed record size containing
394 The index is a file with a fixed record size containing
395 information on each revision, including its nodeid (hash), the
395 information on each revision, including its nodeid (hash), the
396 nodeids of its parents, the position and offset of its data within
396 nodeids of its parents, the position and offset of its data within
397 the data file, and the revision it's based on. Finally, each entry
397 the data file, and the revision it's based on. Finally, each entry
398 contains a linkrev entry that can serve as a pointer to external
398 contains a linkrev entry that can serve as a pointer to external
399 data.
399 data.
400
400
401 The revision data itself is a linear collection of data chunks.
401 The revision data itself is a linear collection of data chunks.
402 Each chunk represents a revision and is usually represented as a
402 Each chunk represents a revision and is usually represented as a
403 delta against the previous chunk. To bound lookup time, runs of
403 delta against the previous chunk. To bound lookup time, runs of
404 deltas are limited to about 2 times the length of the original
404 deltas are limited to about 2 times the length of the original
405 version data. This makes retrieval of a version proportional to
405 version data. This makes retrieval of a version proportional to
406 its size, or O(1) relative to the number of revisions.
406 its size, or O(1) relative to the number of revisions.
407
407
408 Both pieces of the revlog are written to in an append-only
408 Both pieces of the revlog are written to in an append-only
409 fashion, which means we never need to rewrite a file to insert or
409 fashion, which means we never need to rewrite a file to insert or
410 remove data, and can use some simple techniques to avoid the need
410 remove data, and can use some simple techniques to avoid the need
411 for locking while reading.
411 for locking while reading.
412 """
412 """
413 def __init__(self, opener, indexfile):
413 def __init__(self, opener, indexfile):
414 """
414 """
415 create a revlog object
415 create a revlog object
416
416
417 opener is a function that abstracts the file opening operation
417 opener is a function that abstracts the file opening operation
418 and can be used to implement COW semantics or the like.
418 and can be used to implement COW semantics or the like.
419 """
419 """
420 self.indexfile = indexfile
420 self.indexfile = indexfile
421 self.datafile = indexfile[:-2] + ".d"
421 self.datafile = indexfile[:-2] + ".d"
422 self.opener = opener
422 self.opener = opener
423 self._cache = None
423 self._cache = None
424 self._chunkcache = None
424 self._chunkcache = None
425 self.nodemap = {nullid: nullrev}
425 self.nodemap = {nullid: nullrev}
426 self.index = []
426 self.index = []
427
427
428 v = REVLOG_DEFAULT_VERSION
428 v = REVLOG_DEFAULT_VERSION
429 if hasattr(opener, "defversion"):
429 if hasattr(opener, "defversion"):
430 v = opener.defversion
430 v = opener.defversion
431 if v & REVLOGNG:
431 if v & REVLOGNG:
432 v |= REVLOGNGINLINEDATA
432 v |= REVLOGNGINLINEDATA
433
433
434 i = ""
434 i = ""
435 try:
435 try:
436 f = self.opener(self.indexfile)
436 f = self.opener(self.indexfile)
437 i = f.read(4)
437 i = f.read(4)
438 f.seek(0)
438 f.seek(0)
439 if len(i) > 0:
439 if len(i) > 0:
440 v = struct.unpack(versionformat, i)[0]
440 v = struct.unpack(versionformat, i)[0]
441 except IOError, inst:
441 except IOError, inst:
442 if inst.errno != errno.ENOENT:
442 if inst.errno != errno.ENOENT:
443 raise
443 raise
444
444
445 self.version = v
445 self.version = v
446 self._inline = v & REVLOGNGINLINEDATA
446 self._inline = v & REVLOGNGINLINEDATA
447 flags = v & ~0xFFFF
447 flags = v & ~0xFFFF
448 fmt = v & 0xFFFF
448 fmt = v & 0xFFFF
449 if fmt == REVLOGV0 and flags:
449 if fmt == REVLOGV0 and flags:
450 raise RevlogError(_("index %s unknown flags %#04x for format v0")
450 raise RevlogError(_("index %s unknown flags %#04x for format v0")
451 % (self.indexfile, flags >> 16))
451 % (self.indexfile, flags >> 16))
452 elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
452 elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
453 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
453 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
454 % (self.indexfile, flags >> 16))
454 % (self.indexfile, flags >> 16))
455 elif fmt > REVLOGNG:
455 elif fmt > REVLOGNG:
456 raise RevlogError(_("index %s unknown format %d")
456 raise RevlogError(_("index %s unknown format %d")
457 % (self.indexfile, fmt))
457 % (self.indexfile, fmt))
458
458
459 self._io = revlogio()
459 self._io = revlogio()
460 if self.version == REVLOGV0:
460 if self.version == REVLOGV0:
461 self._io = revlogoldio()
461 self._io = revlogoldio()
462 if i:
462 if i:
463 d = self._io.parseindex(f, self._inline)
463 d = self._io.parseindex(f, self._inline)
464 self.index, self.nodemap, self._chunkcache = d
464 self.index, self.nodemap, self._chunkcache = d
465
465
466 # add the magic null revision at -1 (if it hasn't been done already)
466 # add the magic null revision at -1 (if it hasn't been done already)
467 if (self.index == [] or isinstance(self.index, lazyindex) or
467 if (self.index == [] or isinstance(self.index, lazyindex) or
468 self.index[-1][7] != nullid) :
468 self.index[-1][7] != nullid) :
469 self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
469 self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
470
470
471 def _loadindex(self, start, end):
471 def _loadindex(self, start, end):
472 """load a block of indexes all at once from the lazy parser"""
472 """load a block of indexes all at once from the lazy parser"""
473 if isinstance(self.index, lazyindex):
473 if isinstance(self.index, lazyindex):
474 self.index.p.loadindex(start, end)
474 self.index.p.loadindex(start, end)
475
475
476 def _loadindexmap(self):
476 def _loadindexmap(self):
477 """loads both the map and the index from the lazy parser"""
477 """loads both the map and the index from the lazy parser"""
478 if isinstance(self.index, lazyindex):
478 if isinstance(self.index, lazyindex):
479 p = self.index.p
479 p = self.index.p
480 p.loadindex()
480 p.loadindex()
481 self.nodemap = p.map
481 self.nodemap = p.map
482
482
483 def _loadmap(self):
483 def _loadmap(self):
484 """loads the map from the lazy parser"""
484 """loads the map from the lazy parser"""
485 if isinstance(self.nodemap, lazymap):
485 if isinstance(self.nodemap, lazymap):
486 self.nodemap.p.loadmap()
486 self.nodemap.p.loadmap()
487 self.nodemap = self.nodemap.p.map
487 self.nodemap = self.nodemap.p.map
488
488
489 def tip(self):
489 def tip(self):
490 return self.node(len(self.index) - 2)
490 return self.node(len(self.index) - 2)
491 def __len__(self):
491 def __len__(self):
492 return len(self.index) - 1
492 return len(self.index) - 1
493 def __iter__(self):
493 def __iter__(self):
494 for i in xrange(len(self)):
494 for i in xrange(len(self)):
495 yield i
495 yield i
496 def rev(self, node):
496 def rev(self, node):
497 try:
497 try:
498 return self.nodemap[node]
498 return self.nodemap[node]
499 except KeyError:
499 except KeyError:
500 raise LookupError(node, self.indexfile, _('no node'))
500 raise LookupError(node, self.indexfile, _('no node'))
501 def node(self, rev):
501 def node(self, rev):
502 return self.index[rev][7]
502 return self.index[rev][7]
503 def linkrev(self, node):
503 def linkrev(self, rev):
504 return self.index[self.rev(node)][4]
504 return self.index[rev][4]
505 def parents(self, node):
505 def parents(self, node):
506 d = self.index[self.rev(node)][5:7]
506 d = self.index[self.rev(node)][5:7]
507 return (self.node(d[0]), self.node(d[1]))
507 return (self.node(d[0]), self.node(d[1]))
508 def parentrevs(self, rev):
508 def parentrevs(self, rev):
509 return self.index[rev][5:7]
509 return self.index[rev][5:7]
510 def start(self, rev):
510 def start(self, rev):
511 return int(self.index[rev][0] >> 16)
511 return int(self.index[rev][0] >> 16)
512 def end(self, rev):
512 def end(self, rev):
513 return self.start(rev) + self.length(rev)
513 return self.start(rev) + self.length(rev)
514 def length(self, rev):
514 def length(self, rev):
515 return self.index[rev][1]
515 return self.index[rev][1]
516 def base(self, rev):
516 def base(self, rev):
517 return self.index[rev][3]
517 return self.index[rev][3]
518
518
519 def size(self, rev):
519 def size(self, rev):
520 """return the length of the uncompressed text for a given revision"""
520 """return the length of the uncompressed text for a given revision"""
521 l = self.index[rev][2]
521 l = self.index[rev][2]
522 if l >= 0:
522 if l >= 0:
523 return l
523 return l
524
524
525 t = self.revision(self.node(rev))
525 t = self.revision(self.node(rev))
526 return len(t)
526 return len(t)
527
527
528 # alternate implementation, The advantage to this code is it
528 # alternate implementation, The advantage to this code is it
529 # will be faster for a single revision. But, the results are not
529 # will be faster for a single revision. But, the results are not
530 # cached, so finding the size of every revision will be slower.
530 # cached, so finding the size of every revision will be slower.
531 """
531 """
532 if self.cache and self.cache[1] == rev:
532 if self.cache and self.cache[1] == rev:
533 return len(self.cache[2])
533 return len(self.cache[2])
534
534
535 base = self.base(rev)
535 base = self.base(rev)
536 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
536 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
537 base = self.cache[1]
537 base = self.cache[1]
538 text = self.cache[2]
538 text = self.cache[2]
539 else:
539 else:
540 text = self.revision(self.node(base))
540 text = self.revision(self.node(base))
541
541
542 l = len(text)
542 l = len(text)
543 for x in xrange(base + 1, rev + 1):
543 for x in xrange(base + 1, rev + 1):
544 l = mdiff.patchedsize(l, self.chunk(x))
544 l = mdiff.patchedsize(l, self.chunk(x))
545 return l
545 return l
546 """
546 """
547
547
548 def reachable(self, node, stop=None):
548 def reachable(self, node, stop=None):
549 """return a hash of all nodes ancestral to a given node, including
549 """return a hash of all nodes ancestral to a given node, including
550 the node itself, stopping when stop is matched"""
550 the node itself, stopping when stop is matched"""
551 reachable = {}
551 reachable = {}
552 visit = [node]
552 visit = [node]
553 reachable[node] = 1
553 reachable[node] = 1
554 if stop:
554 if stop:
555 stopn = self.rev(stop)
555 stopn = self.rev(stop)
556 else:
556 else:
557 stopn = 0
557 stopn = 0
558 while visit:
558 while visit:
559 n = visit.pop(0)
559 n = visit.pop(0)
560 if n == stop:
560 if n == stop:
561 continue
561 continue
562 if n == nullid:
562 if n == nullid:
563 continue
563 continue
564 for p in self.parents(n):
564 for p in self.parents(n):
565 if self.rev(p) < stopn:
565 if self.rev(p) < stopn:
566 continue
566 continue
567 if p not in reachable:
567 if p not in reachable:
568 reachable[p] = 1
568 reachable[p] = 1
569 visit.append(p)
569 visit.append(p)
570 return reachable
570 return reachable
571
571
572 def ancestors(self, *revs):
572 def ancestors(self, *revs):
573 'Generate the ancestors of revs using a breadth-first visit'
573 'Generate the ancestors of revs using a breadth-first visit'
574 visit = list(revs)
574 visit = list(revs)
575 seen = util.set([nullrev])
575 seen = util.set([nullrev])
576 while visit:
576 while visit:
577 for parent in self.parentrevs(visit.pop(0)):
577 for parent in self.parentrevs(visit.pop(0)):
578 if parent not in seen:
578 if parent not in seen:
579 visit.append(parent)
579 visit.append(parent)
580 seen.add(parent)
580 seen.add(parent)
581 yield parent
581 yield parent
582
582
583 def descendants(self, *revs):
583 def descendants(self, *revs):
584 'Generate the descendants of revs in topological order'
584 'Generate the descendants of revs in topological order'
585 seen = util.set(revs)
585 seen = util.set(revs)
586 for i in xrange(min(revs) + 1, len(self)):
586 for i in xrange(min(revs) + 1, len(self)):
587 for x in self.parentrevs(i):
587 for x in self.parentrevs(i):
588 if x != nullrev and x in seen:
588 if x != nullrev and x in seen:
589 seen.add(i)
589 seen.add(i)
590 yield i
590 yield i
591 break
591 break
592
592
593 def findmissing(self, common=None, heads=None):
593 def findmissing(self, common=None, heads=None):
594 '''
594 '''
595 returns the topologically sorted list of nodes from the set:
595 returns the topologically sorted list of nodes from the set:
596 missing = (ancestors(heads) \ ancestors(common))
596 missing = (ancestors(heads) \ ancestors(common))
597
597
598 where ancestors() is the set of ancestors from heads, heads included
598 where ancestors() is the set of ancestors from heads, heads included
599
599
600 if heads is None, the heads of the revlog are used
600 if heads is None, the heads of the revlog are used
601 if common is None, nullid is assumed to be a common node
601 if common is None, nullid is assumed to be a common node
602 '''
602 '''
603 if common is None:
603 if common is None:
604 common = [nullid]
604 common = [nullid]
605 if heads is None:
605 if heads is None:
606 heads = self.heads()
606 heads = self.heads()
607
607
608 common = [self.rev(n) for n in common]
608 common = [self.rev(n) for n in common]
609 heads = [self.rev(n) for n in heads]
609 heads = [self.rev(n) for n in heads]
610
610
611 # we want the ancestors, but inclusive
611 # we want the ancestors, but inclusive
612 has = dict.fromkeys(self.ancestors(*common))
612 has = dict.fromkeys(self.ancestors(*common))
613 has[nullrev] = None
613 has[nullrev] = None
614 for r in common:
614 for r in common:
615 has[r] = None
615 has[r] = None
616
616
617 # take all ancestors from heads that aren't in has
617 # take all ancestors from heads that aren't in has
618 missing = {}
618 missing = {}
619 visit = [r for r in heads if r not in has]
619 visit = [r for r in heads if r not in has]
620 while visit:
620 while visit:
621 r = visit.pop(0)
621 r = visit.pop(0)
622 if r in missing:
622 if r in missing:
623 continue
623 continue
624 else:
624 else:
625 missing[r] = None
625 missing[r] = None
626 for p in self.parentrevs(r):
626 for p in self.parentrevs(r):
627 if p not in has:
627 if p not in has:
628 visit.append(p)
628 visit.append(p)
629 missing = missing.keys()
629 missing = missing.keys()
630 missing.sort()
630 missing.sort()
631 return [self.node(r) for r in missing]
631 return [self.node(r) for r in missing]
632
632
633 def nodesbetween(self, roots=None, heads=None):
633 def nodesbetween(self, roots=None, heads=None):
634 """Return a tuple containing three elements. Elements 1 and 2 contain
634 """Return a tuple containing three elements. Elements 1 and 2 contain
635 a final list bases and heads after all the unreachable ones have been
635 a final list bases and heads after all the unreachable ones have been
636 pruned. Element 0 contains a topologically sorted list of all
636 pruned. Element 0 contains a topologically sorted list of all
637
637
638 nodes that satisfy these constraints:
638 nodes that satisfy these constraints:
639 1. All nodes must be descended from a node in roots (the nodes on
639 1. All nodes must be descended from a node in roots (the nodes on
640 roots are considered descended from themselves).
640 roots are considered descended from themselves).
641 2. All nodes must also be ancestors of a node in heads (the nodes in
641 2. All nodes must also be ancestors of a node in heads (the nodes in
642 heads are considered to be their own ancestors).
642 heads are considered to be their own ancestors).
643
643
644 If roots is unspecified, nullid is assumed as the only root.
644 If roots is unspecified, nullid is assumed as the only root.
645 If heads is unspecified, it is taken to be the output of the
645 If heads is unspecified, it is taken to be the output of the
646 heads method (i.e. a list of all nodes in the repository that
646 heads method (i.e. a list of all nodes in the repository that
647 have no children)."""
647 have no children)."""
648 nonodes = ([], [], [])
648 nonodes = ([], [], [])
649 if roots is not None:
649 if roots is not None:
650 roots = list(roots)
650 roots = list(roots)
651 if not roots:
651 if not roots:
652 return nonodes
652 return nonodes
653 lowestrev = min([self.rev(n) for n in roots])
653 lowestrev = min([self.rev(n) for n in roots])
654 else:
654 else:
655 roots = [nullid] # Everybody's a descendent of nullid
655 roots = [nullid] # Everybody's a descendent of nullid
656 lowestrev = nullrev
656 lowestrev = nullrev
657 if (lowestrev == nullrev) and (heads is None):
657 if (lowestrev == nullrev) and (heads is None):
658 # We want _all_ the nodes!
658 # We want _all_ the nodes!
659 return ([self.node(r) for r in self], [nullid], list(self.heads()))
659 return ([self.node(r) for r in self], [nullid], list(self.heads()))
660 if heads is None:
660 if heads is None:
661 # All nodes are ancestors, so the latest ancestor is the last
661 # All nodes are ancestors, so the latest ancestor is the last
662 # node.
662 # node.
663 highestrev = len(self) - 1
663 highestrev = len(self) - 1
664 # Set ancestors to None to signal that every node is an ancestor.
664 # Set ancestors to None to signal that every node is an ancestor.
665 ancestors = None
665 ancestors = None
666 # Set heads to an empty dictionary for later discovery of heads
666 # Set heads to an empty dictionary for later discovery of heads
667 heads = {}
667 heads = {}
668 else:
668 else:
669 heads = list(heads)
669 heads = list(heads)
670 if not heads:
670 if not heads:
671 return nonodes
671 return nonodes
672 ancestors = {}
672 ancestors = {}
673 # Turn heads into a dictionary so we can remove 'fake' heads.
673 # Turn heads into a dictionary so we can remove 'fake' heads.
674 # Also, later we will be using it to filter out the heads we can't
674 # Also, later we will be using it to filter out the heads we can't
675 # find from roots.
675 # find from roots.
676 heads = dict.fromkeys(heads, 0)
676 heads = dict.fromkeys(heads, 0)
677 # Start at the top and keep marking parents until we're done.
677 # Start at the top and keep marking parents until we're done.
678 nodestotag = heads.keys()
678 nodestotag = heads.keys()
679 # Remember where the top was so we can use it as a limit later.
679 # Remember where the top was so we can use it as a limit later.
680 highestrev = max([self.rev(n) for n in nodestotag])
680 highestrev = max([self.rev(n) for n in nodestotag])
681 while nodestotag:
681 while nodestotag:
682 # grab a node to tag
682 # grab a node to tag
683 n = nodestotag.pop()
683 n = nodestotag.pop()
684 # Never tag nullid
684 # Never tag nullid
685 if n == nullid:
685 if n == nullid:
686 continue
686 continue
687 # A node's revision number represents its place in a
687 # A node's revision number represents its place in a
688 # topologically sorted list of nodes.
688 # topologically sorted list of nodes.
689 r = self.rev(n)
689 r = self.rev(n)
690 if r >= lowestrev:
690 if r >= lowestrev:
691 if n not in ancestors:
691 if n not in ancestors:
692 # If we are possibly a descendent of one of the roots
692 # If we are possibly a descendent of one of the roots
693 # and we haven't already been marked as an ancestor
693 # and we haven't already been marked as an ancestor
694 ancestors[n] = 1 # Mark as ancestor
694 ancestors[n] = 1 # Mark as ancestor
695 # Add non-nullid parents to list of nodes to tag.
695 # Add non-nullid parents to list of nodes to tag.
696 nodestotag.extend([p for p in self.parents(n) if
696 nodestotag.extend([p for p in self.parents(n) if
697 p != nullid])
697 p != nullid])
698 elif n in heads: # We've seen it before, is it a fake head?
698 elif n in heads: # We've seen it before, is it a fake head?
699 # So it is, real heads should not be the ancestors of
699 # So it is, real heads should not be the ancestors of
700 # any other heads.
700 # any other heads.
701 heads.pop(n)
701 heads.pop(n)
702 if not ancestors:
702 if not ancestors:
703 return nonodes
703 return nonodes
704 # Now that we have our set of ancestors, we want to remove any
704 # Now that we have our set of ancestors, we want to remove any
705 # roots that are not ancestors.
705 # roots that are not ancestors.
706
706
707 # If one of the roots was nullid, everything is included anyway.
707 # If one of the roots was nullid, everything is included anyway.
708 if lowestrev > nullrev:
708 if lowestrev > nullrev:
709 # But, since we weren't, let's recompute the lowest rev to not
709 # But, since we weren't, let's recompute the lowest rev to not
710 # include roots that aren't ancestors.
710 # include roots that aren't ancestors.
711
711
712 # Filter out roots that aren't ancestors of heads
712 # Filter out roots that aren't ancestors of heads
713 roots = [n for n in roots if n in ancestors]
713 roots = [n for n in roots if n in ancestors]
714 # Recompute the lowest revision
714 # Recompute the lowest revision
715 if roots:
715 if roots:
716 lowestrev = min([self.rev(n) for n in roots])
716 lowestrev = min([self.rev(n) for n in roots])
717 else:
717 else:
718 # No more roots? Return empty list
718 # No more roots? Return empty list
719 return nonodes
719 return nonodes
720 else:
720 else:
721 # We are descending from nullid, and don't need to care about
721 # We are descending from nullid, and don't need to care about
722 # any other roots.
722 # any other roots.
723 lowestrev = nullrev
723 lowestrev = nullrev
724 roots = [nullid]
724 roots = [nullid]
725 # Transform our roots list into a 'set' (i.e. a dictionary where the
725 # Transform our roots list into a 'set' (i.e. a dictionary where the
726 # values don't matter.
726 # values don't matter.
727 descendents = dict.fromkeys(roots, 1)
727 descendents = dict.fromkeys(roots, 1)
728 # Also, keep the original roots so we can filter out roots that aren't
728 # Also, keep the original roots so we can filter out roots that aren't
729 # 'real' roots (i.e. are descended from other roots).
729 # 'real' roots (i.e. are descended from other roots).
730 roots = descendents.copy()
730 roots = descendents.copy()
731 # Our topologically sorted list of output nodes.
731 # Our topologically sorted list of output nodes.
732 orderedout = []
732 orderedout = []
733 # Don't start at nullid since we don't want nullid in our output list,
733 # Don't start at nullid since we don't want nullid in our output list,
734 # and if nullid shows up in descedents, empty parents will look like
734 # and if nullid shows up in descedents, empty parents will look like
735 # they're descendents.
735 # they're descendents.
736 for r in xrange(max(lowestrev, 0), highestrev + 1):
736 for r in xrange(max(lowestrev, 0), highestrev + 1):
737 n = self.node(r)
737 n = self.node(r)
738 isdescendent = False
738 isdescendent = False
739 if lowestrev == nullrev: # Everybody is a descendent of nullid
739 if lowestrev == nullrev: # Everybody is a descendent of nullid
740 isdescendent = True
740 isdescendent = True
741 elif n in descendents:
741 elif n in descendents:
742 # n is already a descendent
742 # n is already a descendent
743 isdescendent = True
743 isdescendent = True
744 # This check only needs to be done here because all the roots
744 # This check only needs to be done here because all the roots
745 # will start being marked is descendents before the loop.
745 # will start being marked is descendents before the loop.
746 if n in roots:
746 if n in roots:
747 # If n was a root, check if it's a 'real' root.
747 # If n was a root, check if it's a 'real' root.
748 p = tuple(self.parents(n))
748 p = tuple(self.parents(n))
749 # If any of its parents are descendents, it's not a root.
749 # If any of its parents are descendents, it's not a root.
750 if (p[0] in descendents) or (p[1] in descendents):
750 if (p[0] in descendents) or (p[1] in descendents):
751 roots.pop(n)
751 roots.pop(n)
752 else:
752 else:
753 p = tuple(self.parents(n))
753 p = tuple(self.parents(n))
754 # A node is a descendent if either of its parents are
754 # A node is a descendent if either of its parents are
755 # descendents. (We seeded the dependents list with the roots
755 # descendents. (We seeded the dependents list with the roots
756 # up there, remember?)
756 # up there, remember?)
757 if (p[0] in descendents) or (p[1] in descendents):
757 if (p[0] in descendents) or (p[1] in descendents):
758 descendents[n] = 1
758 descendents[n] = 1
759 isdescendent = True
759 isdescendent = True
760 if isdescendent and ((ancestors is None) or (n in ancestors)):
760 if isdescendent and ((ancestors is None) or (n in ancestors)):
761 # Only include nodes that are both descendents and ancestors.
761 # Only include nodes that are both descendents and ancestors.
762 orderedout.append(n)
762 orderedout.append(n)
763 if (ancestors is not None) and (n in heads):
763 if (ancestors is not None) and (n in heads):
764 # We're trying to figure out which heads are reachable
764 # We're trying to figure out which heads are reachable
765 # from roots.
765 # from roots.
766 # Mark this head as having been reached
766 # Mark this head as having been reached
767 heads[n] = 1
767 heads[n] = 1
768 elif ancestors is None:
768 elif ancestors is None:
769 # Otherwise, we're trying to discover the heads.
769 # Otherwise, we're trying to discover the heads.
770 # Assume this is a head because if it isn't, the next step
770 # Assume this is a head because if it isn't, the next step
771 # will eventually remove it.
771 # will eventually remove it.
772 heads[n] = 1
772 heads[n] = 1
773 # But, obviously its parents aren't.
773 # But, obviously its parents aren't.
774 for p in self.parents(n):
774 for p in self.parents(n):
775 heads.pop(p, None)
775 heads.pop(p, None)
776 heads = [n for n in heads.iterkeys() if heads[n] != 0]
776 heads = [n for n in heads.iterkeys() if heads[n] != 0]
777 roots = roots.keys()
777 roots = roots.keys()
778 assert orderedout
778 assert orderedout
779 assert roots
779 assert roots
780 assert heads
780 assert heads
781 return (orderedout, roots, heads)
781 return (orderedout, roots, heads)
782
782
783 def heads(self, start=None, stop=None):
783 def heads(self, start=None, stop=None):
784 """return the list of all nodes that have no children
784 """return the list of all nodes that have no children
785
785
786 if start is specified, only heads that are descendants of
786 if start is specified, only heads that are descendants of
787 start will be returned
787 start will be returned
788 if stop is specified, it will consider all the revs from stop
788 if stop is specified, it will consider all the revs from stop
789 as if they had no children
789 as if they had no children
790 """
790 """
791 if start is None and stop is None:
791 if start is None and stop is None:
792 count = len(self)
792 count = len(self)
793 if not count:
793 if not count:
794 return [nullid]
794 return [nullid]
795 ishead = [1] * (count + 1)
795 ishead = [1] * (count + 1)
796 index = self.index
796 index = self.index
797 for r in xrange(count):
797 for r in xrange(count):
798 e = index[r]
798 e = index[r]
799 ishead[e[5]] = ishead[e[6]] = 0
799 ishead[e[5]] = ishead[e[6]] = 0
800 return [self.node(r) for r in xrange(count) if ishead[r]]
800 return [self.node(r) for r in xrange(count) if ishead[r]]
801
801
802 if start is None:
802 if start is None:
803 start = nullid
803 start = nullid
804 if stop is None:
804 if stop is None:
805 stop = []
805 stop = []
806 stoprevs = dict.fromkeys([self.rev(n) for n in stop])
806 stoprevs = dict.fromkeys([self.rev(n) for n in stop])
807 startrev = self.rev(start)
807 startrev = self.rev(start)
808 reachable = {startrev: 1}
808 reachable = {startrev: 1}
809 heads = {startrev: 1}
809 heads = {startrev: 1}
810
810
811 parentrevs = self.parentrevs
811 parentrevs = self.parentrevs
812 for r in xrange(startrev + 1, len(self)):
812 for r in xrange(startrev + 1, len(self)):
813 for p in parentrevs(r):
813 for p in parentrevs(r):
814 if p in reachable:
814 if p in reachable:
815 if r not in stoprevs:
815 if r not in stoprevs:
816 reachable[r] = 1
816 reachable[r] = 1
817 heads[r] = 1
817 heads[r] = 1
818 if p in heads and p not in stoprevs:
818 if p in heads and p not in stoprevs:
819 del heads[p]
819 del heads[p]
820
820
821 return [self.node(r) for r in heads]
821 return [self.node(r) for r in heads]
822
822
823 def children(self, node):
823 def children(self, node):
824 """find the children of a given node"""
824 """find the children of a given node"""
825 c = []
825 c = []
826 p = self.rev(node)
826 p = self.rev(node)
827 for r in range(p + 1, len(self)):
827 for r in range(p + 1, len(self)):
828 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
828 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
829 if prevs:
829 if prevs:
830 for pr in prevs:
830 for pr in prevs:
831 if pr == p:
831 if pr == p:
832 c.append(self.node(r))
832 c.append(self.node(r))
833 elif p == nullrev:
833 elif p == nullrev:
834 c.append(self.node(r))
834 c.append(self.node(r))
835 return c
835 return c
836
836
837 def _match(self, id):
837 def _match(self, id):
838 if isinstance(id, (long, int)):
838 if isinstance(id, (long, int)):
839 # rev
839 # rev
840 return self.node(id)
840 return self.node(id)
841 if len(id) == 20:
841 if len(id) == 20:
842 # possibly a binary node
842 # possibly a binary node
843 # odds of a binary node being all hex in ASCII are 1 in 10**25
843 # odds of a binary node being all hex in ASCII are 1 in 10**25
844 try:
844 try:
845 node = id
845 node = id
846 r = self.rev(node) # quick search the index
846 r = self.rev(node) # quick search the index
847 return node
847 return node
848 except LookupError:
848 except LookupError:
849 pass # may be partial hex id
849 pass # may be partial hex id
850 try:
850 try:
851 # str(rev)
851 # str(rev)
852 rev = int(id)
852 rev = int(id)
853 if str(rev) != id:
853 if str(rev) != id:
854 raise ValueError
854 raise ValueError
855 if rev < 0:
855 if rev < 0:
856 rev = len(self) + rev
856 rev = len(self) + rev
857 if rev < 0 or rev >= len(self):
857 if rev < 0 or rev >= len(self):
858 raise ValueError
858 raise ValueError
859 return self.node(rev)
859 return self.node(rev)
860 except (ValueError, OverflowError):
860 except (ValueError, OverflowError):
861 pass
861 pass
862 if len(id) == 40:
862 if len(id) == 40:
863 try:
863 try:
864 # a full hex nodeid?
864 # a full hex nodeid?
865 node = bin(id)
865 node = bin(id)
866 r = self.rev(node)
866 r = self.rev(node)
867 return node
867 return node
868 except (TypeError, LookupError):
868 except (TypeError, LookupError):
869 pass
869 pass
870
870
871 def _partialmatch(self, id):
871 def _partialmatch(self, id):
872 if len(id) < 40:
872 if len(id) < 40:
873 try:
873 try:
874 # hex(node)[:...]
874 # hex(node)[:...]
875 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
875 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
876 node = None
876 node = None
877 for n in self.nodemap:
877 for n in self.nodemap:
878 if n.startswith(bin_id) and hex(n).startswith(id):
878 if n.startswith(bin_id) and hex(n).startswith(id):
879 if node is not None:
879 if node is not None:
880 raise LookupError(id, self.indexfile,
880 raise LookupError(id, self.indexfile,
881 _('ambiguous identifier'))
881 _('ambiguous identifier'))
882 node = n
882 node = n
883 if node is not None:
883 if node is not None:
884 return node
884 return node
885 except TypeError:
885 except TypeError:
886 pass
886 pass
887
887
888 def lookup(self, id):
888 def lookup(self, id):
889 """locate a node based on:
889 """locate a node based on:
890 - revision number or str(revision number)
890 - revision number or str(revision number)
891 - nodeid or subset of hex nodeid
891 - nodeid or subset of hex nodeid
892 """
892 """
893 n = self._match(id)
893 n = self._match(id)
894 if n is not None:
894 if n is not None:
895 return n
895 return n
896 n = self._partialmatch(id)
896 n = self._partialmatch(id)
897 if n:
897 if n:
898 return n
898 return n
899
899
900 raise LookupError(id, self.indexfile, _('no match found'))
900 raise LookupError(id, self.indexfile, _('no match found'))
901
901
902 def cmp(self, node, text):
902 def cmp(self, node, text):
903 """compare text with a given file revision"""
903 """compare text with a given file revision"""
904 p1, p2 = self.parents(node)
904 p1, p2 = self.parents(node)
905 return hash(text, p1, p2) != node
905 return hash(text, p1, p2) != node
906
906
907 def chunk(self, rev, df=None):
907 def chunk(self, rev, df=None):
908 def loadcache(df):
908 def loadcache(df):
909 if not df:
909 if not df:
910 if self._inline:
910 if self._inline:
911 df = self.opener(self.indexfile)
911 df = self.opener(self.indexfile)
912 else:
912 else:
913 df = self.opener(self.datafile)
913 df = self.opener(self.datafile)
914 df.seek(start)
914 df.seek(start)
915 self._chunkcache = (start, df.read(cache_length))
915 self._chunkcache = (start, df.read(cache_length))
916
916
917 start, length = self.start(rev), self.length(rev)
917 start, length = self.start(rev), self.length(rev)
918 if self._inline:
918 if self._inline:
919 start += (rev + 1) * self._io.size
919 start += (rev + 1) * self._io.size
920 end = start + length
920 end = start + length
921
921
922 offset = 0
922 offset = 0
923 if not self._chunkcache:
923 if not self._chunkcache:
924 cache_length = max(65536, length)
924 cache_length = max(65536, length)
925 loadcache(df)
925 loadcache(df)
926 else:
926 else:
927 cache_start = self._chunkcache[0]
927 cache_start = self._chunkcache[0]
928 cache_length = len(self._chunkcache[1])
928 cache_length = len(self._chunkcache[1])
929 cache_end = cache_start + cache_length
929 cache_end = cache_start + cache_length
930 if start >= cache_start and end <= cache_end:
930 if start >= cache_start and end <= cache_end:
931 # it is cached
931 # it is cached
932 offset = start - cache_start
932 offset = start - cache_start
933 else:
933 else:
934 cache_length = max(65536, length)
934 cache_length = max(65536, length)
935 loadcache(df)
935 loadcache(df)
936
936
937 # avoid copying large chunks
937 # avoid copying large chunks
938 c = self._chunkcache[1]
938 c = self._chunkcache[1]
939 if cache_length != length:
939 if cache_length != length:
940 c = c[offset:offset + length]
940 c = c[offset:offset + length]
941
941
942 return decompress(c)
942 return decompress(c)
943
943
944 def delta(self, node):
944 def delta(self, node):
945 """return or calculate a delta between a node and its predecessor"""
945 """return or calculate a delta between a node and its predecessor"""
946 r = self.rev(node)
946 r = self.rev(node)
947 return self.revdiff(r - 1, r)
947 return self.revdiff(r - 1, r)
948
948
949 def revdiff(self, rev1, rev2):
949 def revdiff(self, rev1, rev2):
950 """return or calculate a delta between two revisions"""
950 """return or calculate a delta between two revisions"""
951 if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
951 if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
952 return self.chunk(rev2)
952 return self.chunk(rev2)
953
953
954 return mdiff.textdiff(self.revision(self.node(rev1)),
954 return mdiff.textdiff(self.revision(self.node(rev1)),
955 self.revision(self.node(rev2)))
955 self.revision(self.node(rev2)))
956
956
957 def revision(self, node):
957 def revision(self, node):
958 """return an uncompressed revision of a given node"""
958 """return an uncompressed revision of a given node"""
959 if node == nullid:
959 if node == nullid:
960 return ""
960 return ""
961 if self._cache and self._cache[0] == node:
961 if self._cache and self._cache[0] == node:
962 return str(self._cache[2])
962 return str(self._cache[2])
963
963
964 # look up what we need to read
964 # look up what we need to read
965 text = None
965 text = None
966 rev = self.rev(node)
966 rev = self.rev(node)
967 base = self.base(rev)
967 base = self.base(rev)
968
968
969 # check rev flags
969 # check rev flags
970 if self.index[rev][0] & 0xFFFF:
970 if self.index[rev][0] & 0xFFFF:
971 raise RevlogError(_('incompatible revision flag %x') %
971 raise RevlogError(_('incompatible revision flag %x') %
972 (self.index[rev][0] & 0xFFFF))
972 (self.index[rev][0] & 0xFFFF))
973
973
974 df = None
974 df = None
975
975
976 # do we have useful data cached?
976 # do we have useful data cached?
977 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
977 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
978 base = self._cache[1]
978 base = self._cache[1]
979 text = str(self._cache[2])
979 text = str(self._cache[2])
980 self._loadindex(base, rev + 1)
980 self._loadindex(base, rev + 1)
981 if not self._inline and rev > base + 1:
981 if not self._inline and rev > base + 1:
982 df = self.opener(self.datafile)
982 df = self.opener(self.datafile)
983 else:
983 else:
984 self._loadindex(base, rev + 1)
984 self._loadindex(base, rev + 1)
985 if not self._inline and rev > base:
985 if not self._inline and rev > base:
986 df = self.opener(self.datafile)
986 df = self.opener(self.datafile)
987 text = self.chunk(base, df=df)
987 text = self.chunk(base, df=df)
988
988
989 bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
989 bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
990 text = mdiff.patches(text, bins)
990 text = mdiff.patches(text, bins)
991 p1, p2 = self.parents(node)
991 p1, p2 = self.parents(node)
992 if node != hash(text, p1, p2):
992 if node != hash(text, p1, p2):
993 raise RevlogError(_("integrity check failed on %s:%d")
993 raise RevlogError(_("integrity check failed on %s:%d")
994 % (self.datafile, rev))
994 % (self.datafile, rev))
995
995
996 self._cache = (node, rev, text)
996 self._cache = (node, rev, text)
997 return text
997 return text
998
998
999 def checkinlinesize(self, tr, fp=None):
999 def checkinlinesize(self, tr, fp=None):
1000 if not self._inline:
1000 if not self._inline:
1001 return
1001 return
1002 if not fp:
1002 if not fp:
1003 fp = self.opener(self.indexfile, 'r')
1003 fp = self.opener(self.indexfile, 'r')
1004 fp.seek(0, 2)
1004 fp.seek(0, 2)
1005 size = fp.tell()
1005 size = fp.tell()
1006 if size < 131072:
1006 if size < 131072:
1007 return
1007 return
1008 trinfo = tr.find(self.indexfile)
1008 trinfo = tr.find(self.indexfile)
1009 if trinfo == None:
1009 if trinfo == None:
1010 raise RevlogError(_("%s not found in the transaction")
1010 raise RevlogError(_("%s not found in the transaction")
1011 % self.indexfile)
1011 % self.indexfile)
1012
1012
1013 trindex = trinfo[2]
1013 trindex = trinfo[2]
1014 dataoff = self.start(trindex)
1014 dataoff = self.start(trindex)
1015
1015
1016 tr.add(self.datafile, dataoff)
1016 tr.add(self.datafile, dataoff)
1017 df = self.opener(self.datafile, 'w')
1017 df = self.opener(self.datafile, 'w')
1018 try:
1018 try:
1019 calc = self._io.size
1019 calc = self._io.size
1020 for r in self:
1020 for r in self:
1021 start = self.start(r) + (r + 1) * calc
1021 start = self.start(r) + (r + 1) * calc
1022 length = self.length(r)
1022 length = self.length(r)
1023 fp.seek(start)
1023 fp.seek(start)
1024 d = fp.read(length)
1024 d = fp.read(length)
1025 df.write(d)
1025 df.write(d)
1026 finally:
1026 finally:
1027 df.close()
1027 df.close()
1028
1028
1029 fp.close()
1029 fp.close()
1030 fp = self.opener(self.indexfile, 'w', atomictemp=True)
1030 fp = self.opener(self.indexfile, 'w', atomictemp=True)
1031 self.version &= ~(REVLOGNGINLINEDATA)
1031 self.version &= ~(REVLOGNGINLINEDATA)
1032 self._inline = False
1032 self._inline = False
1033 for i in self:
1033 for i in self:
1034 e = self._io.packentry(self.index[i], self.node, self.version, i)
1034 e = self._io.packentry(self.index[i], self.node, self.version, i)
1035 fp.write(e)
1035 fp.write(e)
1036
1036
1037 # if we don't call rename, the temp file will never replace the
1037 # if we don't call rename, the temp file will never replace the
1038 # real index
1038 # real index
1039 fp.rename()
1039 fp.rename()
1040
1040
1041 tr.replace(self.indexfile, trindex * calc)
1041 tr.replace(self.indexfile, trindex * calc)
1042 self._chunkcache = None
1042 self._chunkcache = None
1043
1043
1044 def addrevision(self, text, transaction, link, p1, p2, d=None):
1044 def addrevision(self, text, transaction, link, p1, p2, d=None):
1045 """add a revision to the log
1045 """add a revision to the log
1046
1046
1047 text - the revision data to add
1047 text - the revision data to add
1048 transaction - the transaction object used for rollback
1048 transaction - the transaction object used for rollback
1049 link - the linkrev data to add
1049 link - the linkrev data to add
1050 p1, p2 - the parent nodeids of the revision
1050 p1, p2 - the parent nodeids of the revision
1051 d - an optional precomputed delta
1051 d - an optional precomputed delta
1052 """
1052 """
1053 dfh = None
1053 dfh = None
1054 if not self._inline:
1054 if not self._inline:
1055 dfh = self.opener(self.datafile, "a")
1055 dfh = self.opener(self.datafile, "a")
1056 ifh = self.opener(self.indexfile, "a+")
1056 ifh = self.opener(self.indexfile, "a+")
1057 try:
1057 try:
1058 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1058 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1059 finally:
1059 finally:
1060 if dfh:
1060 if dfh:
1061 dfh.close()
1061 dfh.close()
1062 ifh.close()
1062 ifh.close()
1063
1063
1064 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1064 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1065 node = hash(text, p1, p2)
1065 node = hash(text, p1, p2)
1066 if node in self.nodemap:
1066 if node in self.nodemap:
1067 return node
1067 return node
1068
1068
1069 curr = len(self)
1069 curr = len(self)
1070 prev = curr - 1
1070 prev = curr - 1
1071 base = self.base(prev)
1071 base = self.base(prev)
1072 offset = self.end(prev)
1072 offset = self.end(prev)
1073
1073
1074 if curr:
1074 if curr:
1075 if not d:
1075 if not d:
1076 ptext = self.revision(self.node(prev))
1076 ptext = self.revision(self.node(prev))
1077 d = mdiff.textdiff(ptext, text)
1077 d = mdiff.textdiff(ptext, text)
1078 data = compress(d)
1078 data = compress(d)
1079 l = len(data[1]) + len(data[0])
1079 l = len(data[1]) + len(data[0])
1080 dist = l + offset - self.start(base)
1080 dist = l + offset - self.start(base)
1081
1081
1082 # full versions are inserted when the needed deltas
1082 # full versions are inserted when the needed deltas
1083 # become comparable to the uncompressed text
1083 # become comparable to the uncompressed text
1084 if not curr or dist > len(text) * 2:
1084 if not curr or dist > len(text) * 2:
1085 data = compress(text)
1085 data = compress(text)
1086 l = len(data[1]) + len(data[0])
1086 l = len(data[1]) + len(data[0])
1087 base = curr
1087 base = curr
1088
1088
1089 e = (offset_type(offset, 0), l, len(text),
1089 e = (offset_type(offset, 0), l, len(text),
1090 base, link, self.rev(p1), self.rev(p2), node)
1090 base, link, self.rev(p1), self.rev(p2), node)
1091 self.index.insert(-1, e)
1091 self.index.insert(-1, e)
1092 self.nodemap[node] = curr
1092 self.nodemap[node] = curr
1093
1093
1094 entry = self._io.packentry(e, self.node, self.version, curr)
1094 entry = self._io.packentry(e, self.node, self.version, curr)
1095 if not self._inline:
1095 if not self._inline:
1096 transaction.add(self.datafile, offset)
1096 transaction.add(self.datafile, offset)
1097 transaction.add(self.indexfile, curr * len(entry))
1097 transaction.add(self.indexfile, curr * len(entry))
1098 if data[0]:
1098 if data[0]:
1099 dfh.write(data[0])
1099 dfh.write(data[0])
1100 dfh.write(data[1])
1100 dfh.write(data[1])
1101 dfh.flush()
1101 dfh.flush()
1102 ifh.write(entry)
1102 ifh.write(entry)
1103 else:
1103 else:
1104 offset += curr * self._io.size
1104 offset += curr * self._io.size
1105 transaction.add(self.indexfile, offset, curr)
1105 transaction.add(self.indexfile, offset, curr)
1106 ifh.write(entry)
1106 ifh.write(entry)
1107 ifh.write(data[0])
1107 ifh.write(data[0])
1108 ifh.write(data[1])
1108 ifh.write(data[1])
1109 self.checkinlinesize(transaction, ifh)
1109 self.checkinlinesize(transaction, ifh)
1110
1110
1111 self._cache = (node, curr, text)
1111 self._cache = (node, curr, text)
1112 return node
1112 return node
1113
1113
1114 def ancestor(self, a, b):
1114 def ancestor(self, a, b):
1115 """calculate the least common ancestor of nodes a and b"""
1115 """calculate the least common ancestor of nodes a and b"""
1116
1116
1117 def parents(rev):
1117 def parents(rev):
1118 return [p for p in self.parentrevs(rev) if p != nullrev]
1118 return [p for p in self.parentrevs(rev) if p != nullrev]
1119
1119
1120 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1120 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1121 if c is None:
1121 if c is None:
1122 return nullid
1122 return nullid
1123
1123
1124 return self.node(c)
1124 return self.node(c)
1125
1125
1126 def group(self, nodelist, lookup, infocollect=None):
1126 def group(self, nodelist, lookup, infocollect=None):
1127 """calculate a delta group
1127 """calculate a delta group
1128
1128
1129 Given a list of changeset revs, return a set of deltas and
1129 Given a list of changeset revs, return a set of deltas and
1130 metadata corresponding to nodes. the first delta is
1130 metadata corresponding to nodes. the first delta is
1131 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1131 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1132 have this parent as it has all history before these
1132 have this parent as it has all history before these
1133 changesets. parent is parent[0]
1133 changesets. parent is parent[0]
1134 """
1134 """
1135 revs = [self.rev(n) for n in nodelist]
1135 revs = [self.rev(n) for n in nodelist]
1136
1136
1137 # if we don't have any revisions touched by these changesets, bail
1137 # if we don't have any revisions touched by these changesets, bail
1138 if not revs:
1138 if not revs:
1139 yield changegroup.closechunk()
1139 yield changegroup.closechunk()
1140 return
1140 return
1141
1141
1142 # add the parent of the first rev
1142 # add the parent of the first rev
1143 p = self.parents(self.node(revs[0]))[0]
1143 p = self.parents(self.node(revs[0]))[0]
1144 revs.insert(0, self.rev(p))
1144 revs.insert(0, self.rev(p))
1145
1145
1146 # build deltas
1146 # build deltas
1147 for d in xrange(0, len(revs) - 1):
1147 for d in xrange(0, len(revs) - 1):
1148 a, b = revs[d], revs[d + 1]
1148 a, b = revs[d], revs[d + 1]
1149 nb = self.node(b)
1149 nb = self.node(b)
1150
1150
1151 if infocollect is not None:
1151 if infocollect is not None:
1152 infocollect(nb)
1152 infocollect(nb)
1153
1153
1154 p = self.parents(nb)
1154 p = self.parents(nb)
1155 meta = nb + p[0] + p[1] + lookup(nb)
1155 meta = nb + p[0] + p[1] + lookup(nb)
1156 if a == -1:
1156 if a == -1:
1157 d = self.revision(nb)
1157 d = self.revision(nb)
1158 meta += mdiff.trivialdiffheader(len(d))
1158 meta += mdiff.trivialdiffheader(len(d))
1159 else:
1159 else:
1160 d = self.revdiff(a, b)
1160 d = self.revdiff(a, b)
1161 yield changegroup.chunkheader(len(meta) + len(d))
1161 yield changegroup.chunkheader(len(meta) + len(d))
1162 yield meta
1162 yield meta
1163 if len(d) > 2**20:
1163 if len(d) > 2**20:
1164 pos = 0
1164 pos = 0
1165 while pos < len(d):
1165 while pos < len(d):
1166 pos2 = pos + 2 ** 18
1166 pos2 = pos + 2 ** 18
1167 yield d[pos:pos2]
1167 yield d[pos:pos2]
1168 pos = pos2
1168 pos = pos2
1169 else:
1169 else:
1170 yield d
1170 yield d
1171
1171
1172 yield changegroup.closechunk()
1172 yield changegroup.closechunk()
1173
1173
1174 def addgroup(self, revs, linkmapper, transaction):
1174 def addgroup(self, revs, linkmapper, transaction):
1175 """
1175 """
1176 add a delta group
1176 add a delta group
1177
1177
1178 given a set of deltas, add them to the revision log. the
1178 given a set of deltas, add them to the revision log. the
1179 first delta is against its parent, which should be in our
1179 first delta is against its parent, which should be in our
1180 log, the rest are against the previous delta.
1180 log, the rest are against the previous delta.
1181 """
1181 """
1182
1182
1183 #track the base of the current delta log
1183 #track the base of the current delta log
1184 r = len(self)
1184 r = len(self)
1185 t = r - 1
1185 t = r - 1
1186 node = None
1186 node = None
1187
1187
1188 base = prev = nullrev
1188 base = prev = nullrev
1189 start = end = textlen = 0
1189 start = end = textlen = 0
1190 if r:
1190 if r:
1191 end = self.end(t)
1191 end = self.end(t)
1192
1192
1193 ifh = self.opener(self.indexfile, "a+")
1193 ifh = self.opener(self.indexfile, "a+")
1194 isize = r * self._io.size
1194 isize = r * self._io.size
1195 if self._inline:
1195 if self._inline:
1196 transaction.add(self.indexfile, end + isize, r)
1196 transaction.add(self.indexfile, end + isize, r)
1197 dfh = None
1197 dfh = None
1198 else:
1198 else:
1199 transaction.add(self.indexfile, isize, r)
1199 transaction.add(self.indexfile, isize, r)
1200 transaction.add(self.datafile, end)
1200 transaction.add(self.datafile, end)
1201 dfh = self.opener(self.datafile, "a")
1201 dfh = self.opener(self.datafile, "a")
1202
1202
1203 try:
1203 try:
1204 # loop through our set of deltas
1204 # loop through our set of deltas
1205 chain = None
1205 chain = None
1206 for chunk in revs:
1206 for chunk in revs:
1207 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1207 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1208 link = linkmapper(cs)
1208 link = linkmapper(cs)
1209 if node in self.nodemap:
1209 if node in self.nodemap:
1210 # this can happen if two branches make the same change
1210 # this can happen if two branches make the same change
1211 chain = node
1211 chain = node
1212 continue
1212 continue
1213 delta = buffer(chunk, 80)
1213 delta = buffer(chunk, 80)
1214 del chunk
1214 del chunk
1215
1215
1216 for p in (p1, p2):
1216 for p in (p1, p2):
1217 if not p in self.nodemap:
1217 if not p in self.nodemap:
1218 raise LookupError(p, self.indexfile, _('unknown parent'))
1218 raise LookupError(p, self.indexfile, _('unknown parent'))
1219
1219
1220 if not chain:
1220 if not chain:
1221 # retrieve the parent revision of the delta chain
1221 # retrieve the parent revision of the delta chain
1222 chain = p1
1222 chain = p1
1223 if not chain in self.nodemap:
1223 if not chain in self.nodemap:
1224 raise LookupError(chain, self.indexfile, _('unknown base'))
1224 raise LookupError(chain, self.indexfile, _('unknown base'))
1225
1225
1226 # full versions are inserted when the needed deltas become
1226 # full versions are inserted when the needed deltas become
1227 # comparable to the uncompressed text or when the previous
1227 # comparable to the uncompressed text or when the previous
1228 # version is not the one we have a delta against. We use
1228 # version is not the one we have a delta against. We use
1229 # the size of the previous full rev as a proxy for the
1229 # the size of the previous full rev as a proxy for the
1230 # current size.
1230 # current size.
1231
1231
1232 if chain == prev:
1232 if chain == prev:
1233 cdelta = compress(delta)
1233 cdelta = compress(delta)
1234 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1234 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1235 textlen = mdiff.patchedsize(textlen, delta)
1235 textlen = mdiff.patchedsize(textlen, delta)
1236
1236
1237 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1237 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1238 # flush our writes here so we can read it in revision
1238 # flush our writes here so we can read it in revision
1239 if dfh:
1239 if dfh:
1240 dfh.flush()
1240 dfh.flush()
1241 ifh.flush()
1241 ifh.flush()
1242 text = self.revision(chain)
1242 text = self.revision(chain)
1243 if len(text) == 0:
1243 if len(text) == 0:
1244 # skip over trivial delta header
1244 # skip over trivial delta header
1245 text = buffer(delta, 12)
1245 text = buffer(delta, 12)
1246 else:
1246 else:
1247 text = mdiff.patches(text, [delta])
1247 text = mdiff.patches(text, [delta])
1248 del delta
1248 del delta
1249 chk = self._addrevision(text, transaction, link, p1, p2, None,
1249 chk = self._addrevision(text, transaction, link, p1, p2, None,
1250 ifh, dfh)
1250 ifh, dfh)
1251 if not dfh and not self._inline:
1251 if not dfh and not self._inline:
1252 # addrevision switched from inline to conventional
1252 # addrevision switched from inline to conventional
1253 # reopen the index
1253 # reopen the index
1254 dfh = self.opener(self.datafile, "a")
1254 dfh = self.opener(self.datafile, "a")
1255 ifh = self.opener(self.indexfile, "a")
1255 ifh = self.opener(self.indexfile, "a")
1256 if chk != node:
1256 if chk != node:
1257 raise RevlogError(_("consistency error adding group"))
1257 raise RevlogError(_("consistency error adding group"))
1258 textlen = len(text)
1258 textlen = len(text)
1259 else:
1259 else:
1260 e = (offset_type(end, 0), cdeltalen, textlen, base,
1260 e = (offset_type(end, 0), cdeltalen, textlen, base,
1261 link, self.rev(p1), self.rev(p2), node)
1261 link, self.rev(p1), self.rev(p2), node)
1262 self.index.insert(-1, e)
1262 self.index.insert(-1, e)
1263 self.nodemap[node] = r
1263 self.nodemap[node] = r
1264 entry = self._io.packentry(e, self.node, self.version, r)
1264 entry = self._io.packentry(e, self.node, self.version, r)
1265 if self._inline:
1265 if self._inline:
1266 ifh.write(entry)
1266 ifh.write(entry)
1267 ifh.write(cdelta[0])
1267 ifh.write(cdelta[0])
1268 ifh.write(cdelta[1])
1268 ifh.write(cdelta[1])
1269 self.checkinlinesize(transaction, ifh)
1269 self.checkinlinesize(transaction, ifh)
1270 if not self._inline:
1270 if not self._inline:
1271 dfh = self.opener(self.datafile, "a")
1271 dfh = self.opener(self.datafile, "a")
1272 ifh = self.opener(self.indexfile, "a")
1272 ifh = self.opener(self.indexfile, "a")
1273 else:
1273 else:
1274 dfh.write(cdelta[0])
1274 dfh.write(cdelta[0])
1275 dfh.write(cdelta[1])
1275 dfh.write(cdelta[1])
1276 ifh.write(entry)
1276 ifh.write(entry)
1277
1277
1278 t, r, chain, prev = r, r + 1, node, node
1278 t, r, chain, prev = r, r + 1, node, node
1279 base = self.base(t)
1279 base = self.base(t)
1280 start = self.start(base)
1280 start = self.start(base)
1281 end = self.end(t)
1281 end = self.end(t)
1282 finally:
1282 finally:
1283 if dfh:
1283 if dfh:
1284 dfh.close()
1284 dfh.close()
1285 ifh.close()
1285 ifh.close()
1286
1286
1287 return node
1287 return node
1288
1288
1289 def strip(self, minlink):
1289 def strip(self, minlink):
1290 """truncate the revlog on the first revision with a linkrev >= minlink
1290 """truncate the revlog on the first revision with a linkrev >= minlink
1291
1291
1292 This function is called when we're stripping revision minlink and
1292 This function is called when we're stripping revision minlink and
1293 its descendants from the repository.
1293 its descendants from the repository.
1294
1294
1295 We have to remove all revisions with linkrev >= minlink, because
1295 We have to remove all revisions with linkrev >= minlink, because
1296 the equivalent changelog revisions will be renumbered after the
1296 the equivalent changelog revisions will be renumbered after the
1297 strip.
1297 strip.
1298
1298
1299 So we truncate the revlog on the first of these revisions, and
1299 So we truncate the revlog on the first of these revisions, and
1300 trust that the caller has saved the revisions that shouldn't be
1300 trust that the caller has saved the revisions that shouldn't be
1301 removed and that it'll readd them after this truncation.
1301 removed and that it'll readd them after this truncation.
1302 """
1302 """
1303 if len(self) == 0:
1303 if len(self) == 0:
1304 return
1304 return
1305
1305
1306 if isinstance(self.index, lazyindex):
1306 if isinstance(self.index, lazyindex):
1307 self._loadindexmap()
1307 self._loadindexmap()
1308
1308
1309 for rev in self:
1309 for rev in self:
1310 if self.index[rev][4] >= minlink:
1310 if self.index[rev][4] >= minlink:
1311 break
1311 break
1312 else:
1312 else:
1313 return
1313 return
1314
1314
1315 # first truncate the files on disk
1315 # first truncate the files on disk
1316 end = self.start(rev)
1316 end = self.start(rev)
1317 if not self._inline:
1317 if not self._inline:
1318 df = self.opener(self.datafile, "a")
1318 df = self.opener(self.datafile, "a")
1319 df.truncate(end)
1319 df.truncate(end)
1320 end = rev * self._io.size
1320 end = rev * self._io.size
1321 else:
1321 else:
1322 end += rev * self._io.size
1322 end += rev * self._io.size
1323
1323
1324 indexf = self.opener(self.indexfile, "a")
1324 indexf = self.opener(self.indexfile, "a")
1325 indexf.truncate(end)
1325 indexf.truncate(end)
1326
1326
1327 # then reset internal state in memory to forget those revisions
1327 # then reset internal state in memory to forget those revisions
1328 self._cache = None
1328 self._cache = None
1329 self._chunkcache = None
1329 self._chunkcache = None
1330 for x in xrange(rev, len(self)):
1330 for x in xrange(rev, len(self)):
1331 del self.nodemap[self.node(x)]
1331 del self.nodemap[self.node(x)]
1332
1332
1333 del self.index[rev:-1]
1333 del self.index[rev:-1]
1334
1334
1335 def checksize(self):
1335 def checksize(self):
1336 expected = 0
1336 expected = 0
1337 if len(self):
1337 if len(self):
1338 expected = max(0, self.end(len(self) - 1))
1338 expected = max(0, self.end(len(self) - 1))
1339
1339
1340 try:
1340 try:
1341 f = self.opener(self.datafile)
1341 f = self.opener(self.datafile)
1342 f.seek(0, 2)
1342 f.seek(0, 2)
1343 actual = f.tell()
1343 actual = f.tell()
1344 dd = actual - expected
1344 dd = actual - expected
1345 except IOError, inst:
1345 except IOError, inst:
1346 if inst.errno != errno.ENOENT:
1346 if inst.errno != errno.ENOENT:
1347 raise
1347 raise
1348 dd = 0
1348 dd = 0
1349
1349
1350 try:
1350 try:
1351 f = self.opener(self.indexfile)
1351 f = self.opener(self.indexfile)
1352 f.seek(0, 2)
1352 f.seek(0, 2)
1353 actual = f.tell()
1353 actual = f.tell()
1354 s = self._io.size
1354 s = self._io.size
1355 i = max(0, actual / s)
1355 i = max(0, actual / s)
1356 di = actual - (i * s)
1356 di = actual - (i * s)
1357 if self._inline:
1357 if self._inline:
1358 databytes = 0
1358 databytes = 0
1359 for r in self:
1359 for r in self:
1360 databytes += max(0, self.length(r))
1360 databytes += max(0, self.length(r))
1361 dd = 0
1361 dd = 0
1362 di = actual - len(self) * s - databytes
1362 di = actual - len(self) * s - databytes
1363 except IOError, inst:
1363 except IOError, inst:
1364 if inst.errno != errno.ENOENT:
1364 if inst.errno != errno.ENOENT:
1365 raise
1365 raise
1366 di = 0
1366 di = 0
1367
1367
1368 return (dd, di)
1368 return (dd, di)
1369
1369
1370 def files(self):
1370 def files(self):
1371 res = [ self.indexfile ]
1371 res = [ self.indexfile ]
1372 if not self._inline:
1372 if not self._inline:
1373 res.append(self.datafile)
1373 res.append(self.datafile)
1374 return res
1374 return res
@@ -1,238 +1,239 b''
1 # verify.py - repository integrity checking for Mercurial
1 # verify.py - repository integrity checking for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import revlog, util
10 import revlog, util
11
11
12 def verify(repo):
12 def verify(repo):
13 lock = repo.lock()
13 lock = repo.lock()
14 try:
14 try:
15 return _verify(repo)
15 return _verify(repo)
16 finally:
16 finally:
17 del lock
17 del lock
18
18
19 def _verify(repo):
19 def _verify(repo):
20 mflinkrevs = {}
20 mflinkrevs = {}
21 filelinkrevs = {}
21 filelinkrevs = {}
22 filenodes = {}
22 filenodes = {}
23 revisions = 0
23 revisions = 0
24 badrevs = {}
24 badrevs = {}
25 errors = [0]
25 errors = [0]
26 warnings = [0]
26 warnings = [0]
27 ui = repo.ui
27 ui = repo.ui
28 cl = repo.changelog
28 cl = repo.changelog
29 mf = repo.manifest
29 mf = repo.manifest
30
30
31 if not repo.cancopy():
31 if not repo.cancopy():
32 raise util.Abort(_("cannot verify bundle or remote repos"))
32 raise util.Abort(_("cannot verify bundle or remote repos"))
33
33
34 def err(linkrev, msg, filename=None):
34 def err(linkrev, msg, filename=None):
35 if linkrev != None:
35 if linkrev != None:
36 badrevs[linkrev] = True
36 badrevs[linkrev] = True
37 else:
37 else:
38 linkrev = '?'
38 linkrev = '?'
39 msg = "%s: %s" % (linkrev, msg)
39 msg = "%s: %s" % (linkrev, msg)
40 if filename:
40 if filename:
41 msg = "%s@%s" % (filename, msg)
41 msg = "%s@%s" % (filename, msg)
42 ui.warn(" " + msg + "\n")
42 ui.warn(" " + msg + "\n")
43 errors[0] += 1
43 errors[0] += 1
44
44
45 def exc(linkrev, msg, inst, filename=None):
45 def exc(linkrev, msg, inst, filename=None):
46 if isinstance(inst, KeyboardInterrupt):
46 if isinstance(inst, KeyboardInterrupt):
47 ui.warn(_("interrupted"))
47 ui.warn(_("interrupted"))
48 raise
48 raise
49 err(linkrev, "%s: %s" % (msg, inst), filename)
49 err(linkrev, "%s: %s" % (msg, inst), filename)
50
50
51 def warn(msg):
51 def warn(msg):
52 ui.warn(msg + "\n")
52 ui.warn(msg + "\n")
53 warnings[0] += 1
53 warnings[0] += 1
54
54
55 def checklog(obj, name):
55 def checklog(obj, name):
56 if not len(obj) and (havecl or havemf):
56 if not len(obj) and (havecl or havemf):
57 err(0, _("empty or missing %s") % name)
57 err(0, _("empty or missing %s") % name)
58 return
58 return
59
59
60 d = obj.checksize()
60 d = obj.checksize()
61 if d[0]:
61 if d[0]:
62 err(None, _("data length off by %d bytes") % d[0], name)
62 err(None, _("data length off by %d bytes") % d[0], name)
63 if d[1]:
63 if d[1]:
64 err(None, _("index contains %d extra bytes") % d[1], name)
64 err(None, _("index contains %d extra bytes") % d[1], name)
65
65
66 if obj.version != revlog.REVLOGV0:
66 if obj.version != revlog.REVLOGV0:
67 if not revlogv1:
67 if not revlogv1:
68 warn(_("warning: `%s' uses revlog format 1") % name)
68 warn(_("warning: `%s' uses revlog format 1") % name)
69 elif revlogv1:
69 elif revlogv1:
70 warn(_("warning: `%s' uses revlog format 0") % name)
70 warn(_("warning: `%s' uses revlog format 0") % name)
71
71
72 def checkentry(obj, i, node, seen, linkrevs, f):
72 def checkentry(obj, i, node, seen, linkrevs, f):
73 lr = obj.linkrev(node)
73 lr = obj.linkrev(obj.rev(node))
74 if lr < 0 or (havecl and lr not in linkrevs):
74 if lr < 0 or (havecl and lr not in linkrevs):
75 t = "unexpected"
75 t = "unexpected"
76 if lr < 0 or lr >= len(cl):
76 if lr < 0 or lr >= len(cl):
77 t = "nonexistent"
77 t = "nonexistent"
78 err(None, _("rev %d point to %s changeset %d") % (i, t, lr), f)
78 err(None, _("rev %d point to %s changeset %d") % (i, t, lr), f)
79 if linkrevs:
79 if linkrevs:
80 warn(_(" (expected %s)") % " ".join(map(str,linkrevs)))
80 warn(_(" (expected %s)") % " ".join(map(str,linkrevs)))
81 lr = None # can't be trusted
81 lr = None # can't be trusted
82
82
83 try:
83 try:
84 p1, p2 = obj.parents(node)
84 p1, p2 = obj.parents(node)
85 if p1 not in seen and p1 != nullid:
85 if p1 not in seen and p1 != nullid:
86 err(lr, _("unknown parent 1 %s of %s") %
86 err(lr, _("unknown parent 1 %s of %s") %
87 (short(p1), short(n)), f)
87 (short(p1), short(n)), f)
88 if p2 not in seen and p2 != nullid:
88 if p2 not in seen and p2 != nullid:
89 err(lr, _("unknown parent 2 %s of %s") %
89 err(lr, _("unknown parent 2 %s of %s") %
90 (short(p2), short(p1)), f)
90 (short(p2), short(p1)), f)
91 except Exception, inst:
91 except Exception, inst:
92 exc(lr, _("checking parents of %s") % short(node), inst, f)
92 exc(lr, _("checking parents of %s") % short(node), inst, f)
93
93
94 if node in seen:
94 if node in seen:
95 err(lr, _("duplicate revision %d (%d)") % (i, seen[n]), f)
95 err(lr, _("duplicate revision %d (%d)") % (i, seen[n]), f)
96 seen[n] = i
96 seen[n] = i
97 return lr
97 return lr
98
98
99 revlogv1 = cl.version != revlog.REVLOGV0
99 revlogv1 = cl.version != revlog.REVLOGV0
100 if ui.verbose or not revlogv1:
100 if ui.verbose or not revlogv1:
101 ui.status(_("repository uses revlog format %d\n") %
101 ui.status(_("repository uses revlog format %d\n") %
102 (revlogv1 and 1 or 0))
102 (revlogv1 and 1 or 0))
103
103
104 havecl = len(cl) > 0
104 havecl = len(cl) > 0
105 havemf = len(mf) > 0
105 havemf = len(mf) > 0
106
106
107 ui.status(_("checking changesets\n"))
107 ui.status(_("checking changesets\n"))
108 seen = {}
108 seen = {}
109 checklog(cl, "changelog")
109 checklog(cl, "changelog")
110 for i in repo:
110 for i in repo:
111 n = cl.node(i)
111 n = cl.node(i)
112 checkentry(cl, i, n, seen, [i], "changelog")
112 checkentry(cl, i, n, seen, [i], "changelog")
113
113
114 try:
114 try:
115 changes = cl.read(n)
115 changes = cl.read(n)
116 mflinkrevs.setdefault(changes[0], []).append(i)
116 mflinkrevs.setdefault(changes[0], []).append(i)
117 for f in changes[3]:
117 for f in changes[3]:
118 filelinkrevs.setdefault(f, []).append(i)
118 filelinkrevs.setdefault(f, []).append(i)
119 except Exception, inst:
119 except Exception, inst:
120 exc(i, _("unpacking changeset %s") % short(n), inst)
120 exc(i, _("unpacking changeset %s") % short(n), inst)
121
121
122 ui.status(_("checking manifests\n"))
122 ui.status(_("checking manifests\n"))
123 seen = {}
123 seen = {}
124 checklog(mf, "manifest")
124 checklog(mf, "manifest")
125 for i in mf:
125 for i in mf:
126 n = mf.node(i)
126 n = mf.node(i)
127 lr = checkentry(mf, i, n, seen, mflinkrevs.get(n, []), "manifest")
127 lr = checkentry(mf, i, n, seen, mflinkrevs.get(n, []), "manifest")
128 if n in mflinkrevs:
128 if n in mflinkrevs:
129 del mflinkrevs[n]
129 del mflinkrevs[n]
130
130
131 try:
131 try:
132 for f, fn in mf.readdelta(n).iteritems():
132 for f, fn in mf.readdelta(n).iteritems():
133 if not f:
133 if not f:
134 err(lr, _("file without name in manifest"))
134 err(lr, _("file without name in manifest"))
135 elif f != "/dev/null":
135 elif f != "/dev/null":
136 fns = filenodes.setdefault(f, {})
136 fns = filenodes.setdefault(f, {})
137 if fn not in fns:
137 if fn not in fns:
138 fns[fn] = n
138 fns[fn] = i
139 except Exception, inst:
139 except Exception, inst:
140 exc(lr, _("reading manifest delta %s") % short(n), inst)
140 exc(lr, _("reading manifest delta %s") % short(n), inst)
141
141
142 ui.status(_("crosschecking files in changesets and manifests\n"))
142 ui.status(_("crosschecking files in changesets and manifests\n"))
143
143
144 if havemf:
144 if havemf:
145 for c, m in util.sort([(c, m) for m in mflinkrevs for c in mflinkrevs[m]]):
145 for c, m in util.sort([(c, m) for m in mflinkrevs for c in mflinkrevs[m]]):
146 err(c, _("changeset refers to unknown manifest %s") % short(m))
146 err(c, _("changeset refers to unknown manifest %s") % short(m))
147 del mflinkrevs
147 del mflinkrevs
148
148
149 for f in util.sort(filelinkrevs):
149 for f in util.sort(filelinkrevs):
150 if f not in filenodes:
150 if f not in filenodes:
151 lr = filelinkrevs[f][0]
151 lr = filelinkrevs[f][0]
152 err(lr, _("in changeset but not in manifest"), f)
152 err(lr, _("in changeset but not in manifest"), f)
153
153
154 if havecl:
154 if havecl:
155 for f in util.sort(filenodes):
155 for f in util.sort(filenodes):
156 if f not in filelinkrevs:
156 if f not in filelinkrevs:
157 try:
157 try:
158 lr = min([repo.file(f).linkrev(n) for n in filenodes[f]])
158 fl = repo.file(f)
159 lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]])
159 except:
160 except:
160 lr = None
161 lr = None
161 err(lr, _("in manifest but not in changeset"), f)
162 err(lr, _("in manifest but not in changeset"), f)
162
163
163 ui.status(_("checking files\n"))
164 ui.status(_("checking files\n"))
164
165
165 storefiles = {}
166 storefiles = {}
166 for f, f2, size in repo.store.datafiles():
167 for f, f2, size in repo.store.datafiles():
167 if not f:
168 if not f:
168 err(None, _("cannot decode filename '%s'") % f2)
169 err(None, _("cannot decode filename '%s'") % f2)
169 elif size > 0:
170 elif size > 0:
170 storefiles[f] = True
171 storefiles[f] = True
171
172
172 files = util.sort(util.unique(filenodes.keys() + filelinkrevs.keys()))
173 files = util.sort(util.unique(filenodes.keys() + filelinkrevs.keys()))
173 for f in files:
174 for f in files:
174 fl = repo.file(f)
175 fl = repo.file(f)
175
176
176 for ff in fl.files():
177 for ff in fl.files():
177 try:
178 try:
178 del storefiles[ff]
179 del storefiles[ff]
179 except KeyError:
180 except KeyError:
180 err(0, _("missing revlog!"), ff)
181 err(0, _("missing revlog!"), ff)
181
182
182 checklog(fl, f)
183 checklog(fl, f)
183 seen = {}
184 seen = {}
184 for i in fl:
185 for i in fl:
185 revisions += 1
186 revisions += 1
186 n = fl.node(i)
187 n = fl.node(i)
187 lr = checkentry(fl, i, n, seen, filelinkrevs.get(f, []), f)
188 lr = checkentry(fl, i, n, seen, filelinkrevs.get(f, []), f)
188 if f in filenodes:
189 if f in filenodes:
189 if havemf and n not in filenodes[f]:
190 if havemf and n not in filenodes[f]:
190 err(lr, _("%s not in manifests") % (short(n)), f)
191 err(lr, _("%s not in manifests") % (short(n)), f)
191 else:
192 else:
192 del filenodes[f][n]
193 del filenodes[f][n]
193
194
194 # verify contents
195 # verify contents
195 try:
196 try:
196 t = fl.read(n)
197 t = fl.read(n)
197 rp = fl.renamed(n)
198 rp = fl.renamed(n)
198 if len(t) != fl.size(i):
199 if len(t) != fl.size(i):
199 if not fl._readmeta(n): # ancient copy?
200 if not fl._readmeta(n): # ancient copy?
200 err(lr, _("unpacked size is %s, %s expected") %
201 err(lr, _("unpacked size is %s, %s expected") %
201 (len(t), fl.size(i)), f)
202 (len(t), fl.size(i)), f)
202 except Exception, inst:
203 except Exception, inst:
203 exc(lr, _("unpacking %s") % short(n), inst, f)
204 exc(lr, _("unpacking %s") % short(n), inst, f)
204
205
205 # check renames
206 # check renames
206 try:
207 try:
207 if rp:
208 if rp:
208 fl2 = repo.file(rp[0])
209 fl2 = repo.file(rp[0])
209 if not len(fl2):
210 if not len(fl2):
210 err(lr, _("empty or missing copy source revlog %s:%s")
211 err(lr, _("empty or missing copy source revlog %s:%s")
211 % (rp[0], short(rp[1])), f)
212 % (rp[0], short(rp[1])), f)
212 elif rp[1] == nullid:
213 elif rp[1] == nullid:
213 warn(_("warning: %s@%s: copy source revision is nullid %s:%s")
214 warn(_("warning: %s@%s: copy source revision is nullid %s:%s")
214 % (f, lr, rp[0], short(rp[1])))
215 % (f, lr, rp[0], short(rp[1])))
215 else:
216 else:
216 rev = fl2.rev(rp[1])
217 rev = fl2.rev(rp[1])
217 except Exception, inst:
218 except Exception, inst:
218 exc(lr, _("checking rename of %s") % short(n), inst, f)
219 exc(lr, _("checking rename of %s") % short(n), inst, f)
219
220
220 # cross-check
221 # cross-check
221 if f in filenodes:
222 if f in filenodes:
222 fns = [(mf.linkrev(l), n) for n,l in filenodes[f].items()]
223 fns = [(mf.linkrev(l), n) for n,l in filenodes[f].items()]
223 for lr, node in util.sort(fns):
224 for lr, node in util.sort(fns):
224 err(lr, _("%s in manifests not found") % short(node), f)
225 err(lr, _("%s in manifests not found") % short(node), f)
225
226
226 for f in storefiles:
227 for f in storefiles:
227 warn(_("warning: orphan revlog '%s'") % f)
228 warn(_("warning: orphan revlog '%s'") % f)
228
229
229 ui.status(_("%d files, %d changesets, %d total revisions\n") %
230 ui.status(_("%d files, %d changesets, %d total revisions\n") %
230 (len(files), len(cl), revisions))
231 (len(files), len(cl), revisions))
231 if warnings[0]:
232 if warnings[0]:
232 ui.warn(_("%d warnings encountered!\n") % warnings[0])
233 ui.warn(_("%d warnings encountered!\n") % warnings[0])
233 if errors[0]:
234 if errors[0]:
234 ui.warn(_("%d integrity errors encountered!\n") % errors[0])
235 ui.warn(_("%d integrity errors encountered!\n") % errors[0])
235 if badrevs:
236 if badrevs:
236 ui.warn(_("(first damaged changeset appears to be %d)\n")
237 ui.warn(_("(first damaged changeset appears to be %d)\n")
237 % min(badrevs))
238 % min(badrevs))
238 return 1
239 return 1
General Comments 0
You need to be logged in to leave comments. Login now