##// END OF EJS Templates
Merge with crew
Bryan O'Sullivan -
r6151:8bc4fe42 merge default
parent child Browse files
Show More
@@ -1,1163 +1,1163 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import os, sys, bisect, stat
10 import os, sys, bisect, stat
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12
12
13 revrangesep = ':'
13 revrangesep = ':'
14
14
15 class UnknownCommand(Exception):
15 class UnknownCommand(Exception):
16 """Exception raised if command is not in the command table."""
16 """Exception raised if command is not in the command table."""
17 class AmbiguousCommand(Exception):
17 class AmbiguousCommand(Exception):
18 """Exception raised if command shortcut matches more than one command."""
18 """Exception raised if command shortcut matches more than one command."""
19
19
20 def findpossible(ui, cmd, table):
20 def findpossible(ui, cmd, table):
21 """
21 """
22 Return cmd -> (aliases, command table entry)
22 Return cmd -> (aliases, command table entry)
23 for each matching command.
23 for each matching command.
24 Return debug commands (or their aliases) only if no normal command matches.
24 Return debug commands (or their aliases) only if no normal command matches.
25 """
25 """
26 choice = {}
26 choice = {}
27 debugchoice = {}
27 debugchoice = {}
28 for e in table.keys():
28 for e in table.keys():
29 aliases = e.lstrip("^").split("|")
29 aliases = e.lstrip("^").split("|")
30 found = None
30 found = None
31 if cmd in aliases:
31 if cmd in aliases:
32 found = cmd
32 found = cmd
33 elif not ui.config("ui", "strict"):
33 elif not ui.config("ui", "strict"):
34 for a in aliases:
34 for a in aliases:
35 if a.startswith(cmd):
35 if a.startswith(cmd):
36 found = a
36 found = a
37 break
37 break
38 if found is not None:
38 if found is not None:
39 if aliases[0].startswith("debug") or found.startswith("debug"):
39 if aliases[0].startswith("debug") or found.startswith("debug"):
40 debugchoice[found] = (aliases, table[e])
40 debugchoice[found] = (aliases, table[e])
41 else:
41 else:
42 choice[found] = (aliases, table[e])
42 choice[found] = (aliases, table[e])
43
43
44 if not choice and debugchoice:
44 if not choice and debugchoice:
45 choice = debugchoice
45 choice = debugchoice
46
46
47 return choice
47 return choice
48
48
49 def findcmd(ui, cmd, table):
49 def findcmd(ui, cmd, table):
50 """Return (aliases, command table entry) for command string."""
50 """Return (aliases, command table entry) for command string."""
51 choice = findpossible(ui, cmd, table)
51 choice = findpossible(ui, cmd, table)
52
52
53 if cmd in choice:
53 if cmd in choice:
54 return choice[cmd]
54 return choice[cmd]
55
55
56 if len(choice) > 1:
56 if len(choice) > 1:
57 clist = choice.keys()
57 clist = choice.keys()
58 clist.sort()
58 clist.sort()
59 raise AmbiguousCommand(cmd, clist)
59 raise AmbiguousCommand(cmd, clist)
60
60
61 if choice:
61 if choice:
62 return choice.values()[0]
62 return choice.values()[0]
63
63
64 raise UnknownCommand(cmd)
64 raise UnknownCommand(cmd)
65
65
66 def bail_if_changed(repo):
66 def bail_if_changed(repo):
67 if repo.dirstate.parents()[1] != nullid:
67 if repo.dirstate.parents()[1] != nullid:
68 raise util.Abort(_('outstanding uncommitted merge'))
68 raise util.Abort(_('outstanding uncommitted merge'))
69 modified, added, removed, deleted = repo.status()[:4]
69 modified, added, removed, deleted = repo.status()[:4]
70 if modified or added or removed or deleted:
70 if modified or added or removed or deleted:
71 raise util.Abort(_("outstanding uncommitted changes"))
71 raise util.Abort(_("outstanding uncommitted changes"))
72
72
73 def logmessage(opts):
73 def logmessage(opts):
74 """ get the log message according to -m and -l option """
74 """ get the log message according to -m and -l option """
75 message = opts['message']
75 message = opts['message']
76 logfile = opts['logfile']
76 logfile = opts['logfile']
77
77
78 if message and logfile:
78 if message and logfile:
79 raise util.Abort(_('options --message and --logfile are mutually '
79 raise util.Abort(_('options --message and --logfile are mutually '
80 'exclusive'))
80 'exclusive'))
81 if not message and logfile:
81 if not message and logfile:
82 try:
82 try:
83 if logfile == '-':
83 if logfile == '-':
84 message = sys.stdin.read()
84 message = sys.stdin.read()
85 else:
85 else:
86 message = open(logfile).read()
86 message = open(logfile).read()
87 except IOError, inst:
87 except IOError, inst:
88 raise util.Abort(_("can't read commit message '%s': %s") %
88 raise util.Abort(_("can't read commit message '%s': %s") %
89 (logfile, inst.strerror))
89 (logfile, inst.strerror))
90 return message
90 return message
91
91
92 def setremoteconfig(ui, opts):
92 def setremoteconfig(ui, opts):
93 "copy remote options to ui tree"
93 "copy remote options to ui tree"
94 if opts.get('ssh'):
94 if opts.get('ssh'):
95 ui.setconfig("ui", "ssh", opts['ssh'])
95 ui.setconfig("ui", "ssh", opts['ssh'])
96 if opts.get('remotecmd'):
96 if opts.get('remotecmd'):
97 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
97 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
98
98
99 def revpair(repo, revs):
99 def revpair(repo, revs):
100 '''return pair of nodes, given list of revisions. second item can
100 '''return pair of nodes, given list of revisions. second item can
101 be None, meaning use working dir.'''
101 be None, meaning use working dir.'''
102
102
103 def revfix(repo, val, defval):
103 def revfix(repo, val, defval):
104 if not val and val != 0 and defval is not None:
104 if not val and val != 0 and defval is not None:
105 val = defval
105 val = defval
106 return repo.lookup(val)
106 return repo.lookup(val)
107
107
108 if not revs:
108 if not revs:
109 return repo.dirstate.parents()[0], None
109 return repo.dirstate.parents()[0], None
110 end = None
110 end = None
111 if len(revs) == 1:
111 if len(revs) == 1:
112 if revrangesep in revs[0]:
112 if revrangesep in revs[0]:
113 start, end = revs[0].split(revrangesep, 1)
113 start, end = revs[0].split(revrangesep, 1)
114 start = revfix(repo, start, 0)
114 start = revfix(repo, start, 0)
115 end = revfix(repo, end, repo.changelog.count() - 1)
115 end = revfix(repo, end, repo.changelog.count() - 1)
116 else:
116 else:
117 start = revfix(repo, revs[0], None)
117 start = revfix(repo, revs[0], None)
118 elif len(revs) == 2:
118 elif len(revs) == 2:
119 if revrangesep in revs[0] or revrangesep in revs[1]:
119 if revrangesep in revs[0] or revrangesep in revs[1]:
120 raise util.Abort(_('too many revisions specified'))
120 raise util.Abort(_('too many revisions specified'))
121 start = revfix(repo, revs[0], None)
121 start = revfix(repo, revs[0], None)
122 end = revfix(repo, revs[1], None)
122 end = revfix(repo, revs[1], None)
123 else:
123 else:
124 raise util.Abort(_('too many revisions specified'))
124 raise util.Abort(_('too many revisions specified'))
125 return start, end
125 return start, end
126
126
127 def revrange(repo, revs):
127 def revrange(repo, revs):
128 """Yield revision as strings from a list of revision specifications."""
128 """Yield revision as strings from a list of revision specifications."""
129
129
130 def revfix(repo, val, defval):
130 def revfix(repo, val, defval):
131 if not val and val != 0 and defval is not None:
131 if not val and val != 0 and defval is not None:
132 return defval
132 return defval
133 return repo.changelog.rev(repo.lookup(val))
133 return repo.changelog.rev(repo.lookup(val))
134
134
135 seen, l = {}, []
135 seen, l = {}, []
136 for spec in revs:
136 for spec in revs:
137 if revrangesep in spec:
137 if revrangesep in spec:
138 start, end = spec.split(revrangesep, 1)
138 start, end = spec.split(revrangesep, 1)
139 start = revfix(repo, start, 0)
139 start = revfix(repo, start, 0)
140 end = revfix(repo, end, repo.changelog.count() - 1)
140 end = revfix(repo, end, repo.changelog.count() - 1)
141 step = start > end and -1 or 1
141 step = start > end and -1 or 1
142 for rev in xrange(start, end+step, step):
142 for rev in xrange(start, end+step, step):
143 if rev in seen:
143 if rev in seen:
144 continue
144 continue
145 seen[rev] = 1
145 seen[rev] = 1
146 l.append(rev)
146 l.append(rev)
147 else:
147 else:
148 rev = revfix(repo, spec, None)
148 rev = revfix(repo, spec, None)
149 if rev in seen:
149 if rev in seen:
150 continue
150 continue
151 seen[rev] = 1
151 seen[rev] = 1
152 l.append(rev)
152 l.append(rev)
153
153
154 return l
154 return l
155
155
156 def make_filename(repo, pat, node,
156 def make_filename(repo, pat, node,
157 total=None, seqno=None, revwidth=None, pathname=None):
157 total=None, seqno=None, revwidth=None, pathname=None):
158 node_expander = {
158 node_expander = {
159 'H': lambda: hex(node),
159 'H': lambda: hex(node),
160 'R': lambda: str(repo.changelog.rev(node)),
160 'R': lambda: str(repo.changelog.rev(node)),
161 'h': lambda: short(node),
161 'h': lambda: short(node),
162 }
162 }
163 expander = {
163 expander = {
164 '%': lambda: '%',
164 '%': lambda: '%',
165 'b': lambda: os.path.basename(repo.root),
165 'b': lambda: os.path.basename(repo.root),
166 }
166 }
167
167
168 try:
168 try:
169 if node:
169 if node:
170 expander.update(node_expander)
170 expander.update(node_expander)
171 if node:
171 if node:
172 expander['r'] = (lambda:
172 expander['r'] = (lambda:
173 str(repo.changelog.rev(node)).zfill(revwidth or 0))
173 str(repo.changelog.rev(node)).zfill(revwidth or 0))
174 if total is not None:
174 if total is not None:
175 expander['N'] = lambda: str(total)
175 expander['N'] = lambda: str(total)
176 if seqno is not None:
176 if seqno is not None:
177 expander['n'] = lambda: str(seqno)
177 expander['n'] = lambda: str(seqno)
178 if total is not None and seqno is not None:
178 if total is not None and seqno is not None:
179 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
179 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
180 if pathname is not None:
180 if pathname is not None:
181 expander['s'] = lambda: os.path.basename(pathname)
181 expander['s'] = lambda: os.path.basename(pathname)
182 expander['d'] = lambda: os.path.dirname(pathname) or '.'
182 expander['d'] = lambda: os.path.dirname(pathname) or '.'
183 expander['p'] = lambda: pathname
183 expander['p'] = lambda: pathname
184
184
185 newname = []
185 newname = []
186 patlen = len(pat)
186 patlen = len(pat)
187 i = 0
187 i = 0
188 while i < patlen:
188 while i < patlen:
189 c = pat[i]
189 c = pat[i]
190 if c == '%':
190 if c == '%':
191 i += 1
191 i += 1
192 c = pat[i]
192 c = pat[i]
193 c = expander[c]()
193 c = expander[c]()
194 newname.append(c)
194 newname.append(c)
195 i += 1
195 i += 1
196 return ''.join(newname)
196 return ''.join(newname)
197 except KeyError, inst:
197 except KeyError, inst:
198 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
198 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
199 inst.args[0])
199 inst.args[0])
200
200
201 def make_file(repo, pat, node=None,
201 def make_file(repo, pat, node=None,
202 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
202 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
203 if not pat or pat == '-':
203 if not pat or pat == '-':
204 return 'w' in mode and sys.stdout or sys.stdin
204 return 'w' in mode and sys.stdout or sys.stdin
205 if hasattr(pat, 'write') and 'w' in mode:
205 if hasattr(pat, 'write') and 'w' in mode:
206 return pat
206 return pat
207 if hasattr(pat, 'read') and 'r' in mode:
207 if hasattr(pat, 'read') and 'r' in mode:
208 return pat
208 return pat
209 return open(make_filename(repo, pat, node, total, seqno, revwidth,
209 return open(make_filename(repo, pat, node, total, seqno, revwidth,
210 pathname),
210 pathname),
211 mode)
211 mode)
212
212
213 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
213 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
214 cwd = repo.getcwd()
214 cwd = repo.getcwd()
215 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
215 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
216 opts.get('exclude'), globbed=globbed,
216 opts.get('exclude'), globbed=globbed,
217 default=default)
217 default=default)
218
218
219 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
219 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
220 default=None):
220 default=None):
221 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
221 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
222 default=default)
222 default=default)
223 exact = dict.fromkeys(files)
223 exact = dict.fromkeys(files)
224 cwd = repo.getcwd()
224 cwd = repo.getcwd()
225 for src, fn in repo.walk(node=node, files=files, match=matchfn,
225 for src, fn in repo.walk(node=node, files=files, match=matchfn,
226 badmatch=badmatch):
226 badmatch=badmatch):
227 yield src, fn, repo.pathto(fn, cwd), fn in exact
227 yield src, fn, repo.pathto(fn, cwd), fn in exact
228
228
229 def findrenames(repo, added=None, removed=None, threshold=0.5):
229 def findrenames(repo, added=None, removed=None, threshold=0.5):
230 '''find renamed files -- yields (before, after, score) tuples'''
230 '''find renamed files -- yields (before, after, score) tuples'''
231 if added is None or removed is None:
231 if added is None or removed is None:
232 added, removed = repo.status()[1:3]
232 added, removed = repo.status()[1:3]
233 ctx = repo.changectx()
233 ctx = repo.changectx()
234 for a in added:
234 for a in added:
235 aa = repo.wread(a)
235 aa = repo.wread(a)
236 bestname, bestscore = None, threshold
236 bestname, bestscore = None, threshold
237 for r in removed:
237 for r in removed:
238 rr = ctx.filectx(r).data()
238 rr = ctx.filectx(r).data()
239
239
240 # bdiff.blocks() returns blocks of matching lines
240 # bdiff.blocks() returns blocks of matching lines
241 # count the number of bytes in each
241 # count the number of bytes in each
242 equal = 0
242 equal = 0
243 alines = mdiff.splitnewlines(aa)
243 alines = mdiff.splitnewlines(aa)
244 matches = bdiff.blocks(aa, rr)
244 matches = bdiff.blocks(aa, rr)
245 for x1,x2,y1,y2 in matches:
245 for x1,x2,y1,y2 in matches:
246 for line in alines[x1:x2]:
246 for line in alines[x1:x2]:
247 equal += len(line)
247 equal += len(line)
248
248
249 lengths = len(aa) + len(rr)
249 lengths = len(aa) + len(rr)
250 if lengths:
250 if lengths:
251 myscore = equal*2.0 / lengths
251 myscore = equal*2.0 / lengths
252 if myscore >= bestscore:
252 if myscore >= bestscore:
253 bestname, bestscore = r, myscore
253 bestname, bestscore = r, myscore
254 if bestname:
254 if bestname:
255 yield bestname, a, bestscore
255 yield bestname, a, bestscore
256
256
257 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
257 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
258 if dry_run is None:
258 if dry_run is None:
259 dry_run = opts.get('dry_run')
259 dry_run = opts.get('dry_run')
260 if similarity is None:
260 if similarity is None:
261 similarity = float(opts.get('similarity') or 0)
261 similarity = float(opts.get('similarity') or 0)
262 add, remove = [], []
262 add, remove = [], []
263 mapping = {}
263 mapping = {}
264 for src, abs, rel, exact in walk(repo, pats, opts):
264 for src, abs, rel, exact in walk(repo, pats, opts):
265 target = repo.wjoin(abs)
265 target = repo.wjoin(abs)
266 if src == 'f' and abs not in repo.dirstate:
266 if src == 'f' and abs not in repo.dirstate:
267 add.append(abs)
267 add.append(abs)
268 mapping[abs] = rel, exact
268 mapping[abs] = rel, exact
269 if repo.ui.verbose or not exact:
269 if repo.ui.verbose or not exact:
270 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
270 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
271 if repo.dirstate[abs] != 'r' and (not util.lexists(target)
271 if repo.dirstate[abs] != 'r' and (not util.lexists(target)
272 or (os.path.isdir(target) and not os.path.islink(target))):
272 or (os.path.isdir(target) and not os.path.islink(target))):
273 remove.append(abs)
273 remove.append(abs)
274 mapping[abs] = rel, exact
274 mapping[abs] = rel, exact
275 if repo.ui.verbose or not exact:
275 if repo.ui.verbose or not exact:
276 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
276 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
277 if not dry_run:
277 if not dry_run:
278 repo.remove(remove)
278 repo.remove(remove)
279 repo.add(add)
279 repo.add(add)
280 if similarity > 0:
280 if similarity > 0:
281 for old, new, score in findrenames(repo, add, remove, similarity):
281 for old, new, score in findrenames(repo, add, remove, similarity):
282 oldrel, oldexact = mapping[old]
282 oldrel, oldexact = mapping[old]
283 newrel, newexact = mapping[new]
283 newrel, newexact = mapping[new]
284 if repo.ui.verbose or not oldexact or not newexact:
284 if repo.ui.verbose or not oldexact or not newexact:
285 repo.ui.status(_('recording removal of %s as rename to %s '
285 repo.ui.status(_('recording removal of %s as rename to %s '
286 '(%d%% similar)\n') %
286 '(%d%% similar)\n') %
287 (oldrel, newrel, score * 100))
287 (oldrel, newrel, score * 100))
288 if not dry_run:
288 if not dry_run:
289 repo.copy(old, new)
289 repo.copy(old, new)
290
290
291 def copy(ui, repo, pats, opts, rename=False):
291 def copy(ui, repo, pats, opts, rename=False):
292 # called with the repo lock held
292 # called with the repo lock held
293 #
293 #
294 # hgsep => pathname that uses "/" to separate directories
294 # hgsep => pathname that uses "/" to separate directories
295 # ossep => pathname that uses os.sep to separate directories
295 # ossep => pathname that uses os.sep to separate directories
296 cwd = repo.getcwd()
296 cwd = repo.getcwd()
297 targets = {}
297 targets = {}
298 after = opts.get("after")
298 after = opts.get("after")
299 dryrun = opts.get("dry_run")
299 dryrun = opts.get("dry_run")
300
300
301 def walkpat(pat):
301 def walkpat(pat):
302 srcs = []
302 srcs = []
303 for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True):
303 for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True):
304 state = repo.dirstate[abs]
304 state = repo.dirstate[abs]
305 if state in '?r':
305 if state in '?r':
306 if exact and state == '?':
306 if exact and state == '?':
307 ui.warn(_('%s: not copying - file is not managed\n') % rel)
307 ui.warn(_('%s: not copying - file is not managed\n') % rel)
308 if exact and state == 'r':
308 if exact and state == 'r':
309 ui.warn(_('%s: not copying - file has been marked for'
309 ui.warn(_('%s: not copying - file has been marked for'
310 ' remove\n') % rel)
310 ' remove\n') % rel)
311 continue
311 continue
312 # abs: hgsep
312 # abs: hgsep
313 # rel: ossep
313 # rel: ossep
314 srcs.append((abs, rel, exact))
314 srcs.append((abs, rel, exact))
315 return srcs
315 return srcs
316
316
317 # abssrc: hgsep
317 # abssrc: hgsep
318 # relsrc: ossep
318 # relsrc: ossep
319 # otarget: ossep
319 # otarget: ossep
320 def copyfile(abssrc, relsrc, otarget, exact):
320 def copyfile(abssrc, relsrc, otarget, exact):
321 abstarget = util.canonpath(repo.root, cwd, otarget)
321 abstarget = util.canonpath(repo.root, cwd, otarget)
322 reltarget = repo.pathto(abstarget, cwd)
322 reltarget = repo.pathto(abstarget, cwd)
323 target = repo.wjoin(abstarget)
323 target = repo.wjoin(abstarget)
324 src = repo.wjoin(abssrc)
324 src = repo.wjoin(abssrc)
325 state = repo.dirstate[abstarget]
325 state = repo.dirstate[abstarget]
326
326
327 # check for collisions
327 # check for collisions
328 prevsrc = targets.get(abstarget)
328 prevsrc = targets.get(abstarget)
329 if prevsrc is not None:
329 if prevsrc is not None:
330 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
330 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
331 (reltarget, repo.pathto(abssrc, cwd),
331 (reltarget, repo.pathto(abssrc, cwd),
332 repo.pathto(prevsrc, cwd)))
332 repo.pathto(prevsrc, cwd)))
333 return
333 return
334
334
335 # check for overwrites
335 # check for overwrites
336 exists = os.path.exists(target)
336 exists = os.path.exists(target)
337 if (not after and exists or after and state in 'mn'):
337 if (not after and exists or after and state in 'mn'):
338 if not opts['force']:
338 if not opts['force']:
339 ui.warn(_('%s: not overwriting - file exists\n') %
339 ui.warn(_('%s: not overwriting - file exists\n') %
340 reltarget)
340 reltarget)
341 return
341 return
342
342
343 if after:
343 if after:
344 if not exists:
344 if not exists:
345 return
345 return
346 elif not dryrun:
346 elif not dryrun:
347 try:
347 try:
348 if exists:
348 if exists:
349 os.unlink(target)
349 os.unlink(target)
350 targetdir = os.path.dirname(target) or '.'
350 targetdir = os.path.dirname(target) or '.'
351 if not os.path.isdir(targetdir):
351 if not os.path.isdir(targetdir):
352 os.makedirs(targetdir)
352 os.makedirs(targetdir)
353 util.copyfile(src, target)
353 util.copyfile(src, target)
354 except IOError, inst:
354 except IOError, inst:
355 if inst.errno == errno.ENOENT:
355 if inst.errno == errno.ENOENT:
356 ui.warn(_('%s: deleted in working copy\n') % relsrc)
356 ui.warn(_('%s: deleted in working copy\n') % relsrc)
357 else:
357 else:
358 ui.warn(_('%s: cannot copy - %s\n') %
358 ui.warn(_('%s: cannot copy - %s\n') %
359 (relsrc, inst.strerror))
359 (relsrc, inst.strerror))
360 return True # report a failure
360 return True # report a failure
361
361
362 if ui.verbose or not exact:
362 if ui.verbose or not exact:
363 action = rename and "moving" or "copying"
363 action = rename and "moving" or "copying"
364 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
364 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
365
365
366 targets[abstarget] = abssrc
366 targets[abstarget] = abssrc
367
367
368 # fix up dirstate
368 # fix up dirstate
369 origsrc = repo.dirstate.copied(abssrc) or abssrc
369 origsrc = repo.dirstate.copied(abssrc) or abssrc
370 if abstarget == origsrc: # copying back a copy?
370 if abstarget == origsrc: # copying back a copy?
371 if state not in 'mn' and not dryrun:
371 if state not in 'mn' and not dryrun:
372 repo.dirstate.normallookup(abstarget)
372 repo.dirstate.normallookup(abstarget)
373 else:
373 else:
374 if repo.dirstate[origsrc] == 'a':
374 if repo.dirstate[origsrc] == 'a':
375 if not ui.quiet:
375 if not ui.quiet:
376 ui.warn(_("%s has not been committed yet, so no copy "
376 ui.warn(_("%s has not been committed yet, so no copy "
377 "data will be stored for %s.\n")
377 "data will be stored for %s.\n")
378 % (repo.pathto(origsrc, cwd), reltarget))
378 % (repo.pathto(origsrc, cwd), reltarget))
379 if abstarget not in repo.dirstate and not dryrun:
379 if abstarget not in repo.dirstate and not dryrun:
380 repo.add([abstarget])
380 repo.add([abstarget])
381 elif not dryrun:
381 elif not dryrun:
382 repo.copy(origsrc, abstarget)
382 repo.copy(origsrc, abstarget)
383
383
384 if rename and not dryrun:
384 if rename and not dryrun:
385 repo.remove([abssrc], True)
385 repo.remove([abssrc], True)
386
386
387 # pat: ossep
387 # pat: ossep
388 # dest ossep
388 # dest ossep
389 # srcs: list of (hgsep, hgsep, ossep, bool)
389 # srcs: list of (hgsep, hgsep, ossep, bool)
390 # return: function that takes hgsep and returns ossep
390 # return: function that takes hgsep and returns ossep
391 def targetpathfn(pat, dest, srcs):
391 def targetpathfn(pat, dest, srcs):
392 if os.path.isdir(pat):
392 if os.path.isdir(pat):
393 abspfx = util.canonpath(repo.root, cwd, pat)
393 abspfx = util.canonpath(repo.root, cwd, pat)
394 abspfx = util.localpath(abspfx)
394 abspfx = util.localpath(abspfx)
395 if destdirexists:
395 if destdirexists:
396 striplen = len(os.path.split(abspfx)[0])
396 striplen = len(os.path.split(abspfx)[0])
397 else:
397 else:
398 striplen = len(abspfx)
398 striplen = len(abspfx)
399 if striplen:
399 if striplen:
400 striplen += len(os.sep)
400 striplen += len(os.sep)
401 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
401 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
402 elif destdirexists:
402 elif destdirexists:
403 res = lambda p: os.path.join(dest,
403 res = lambda p: os.path.join(dest,
404 os.path.basename(util.localpath(p)))
404 os.path.basename(util.localpath(p)))
405 else:
405 else:
406 res = lambda p: dest
406 res = lambda p: dest
407 return res
407 return res
408
408
409 # pat: ossep
409 # pat: ossep
410 # dest ossep
410 # dest ossep
411 # srcs: list of (hgsep, hgsep, ossep, bool)
411 # srcs: list of (hgsep, hgsep, ossep, bool)
412 # return: function that takes hgsep and returns ossep
412 # return: function that takes hgsep and returns ossep
413 def targetpathafterfn(pat, dest, srcs):
413 def targetpathafterfn(pat, dest, srcs):
414 if util.patkind(pat, None)[0]:
414 if util.patkind(pat, None)[0]:
415 # a mercurial pattern
415 # a mercurial pattern
416 res = lambda p: os.path.join(dest,
416 res = lambda p: os.path.join(dest,
417 os.path.basename(util.localpath(p)))
417 os.path.basename(util.localpath(p)))
418 else:
418 else:
419 abspfx = util.canonpath(repo.root, cwd, pat)
419 abspfx = util.canonpath(repo.root, cwd, pat)
420 if len(abspfx) < len(srcs[0][0]):
420 if len(abspfx) < len(srcs[0][0]):
421 # A directory. Either the target path contains the last
421 # A directory. Either the target path contains the last
422 # component of the source path or it does not.
422 # component of the source path or it does not.
423 def evalpath(striplen):
423 def evalpath(striplen):
424 score = 0
424 score = 0
425 for s in srcs:
425 for s in srcs:
426 t = os.path.join(dest, util.localpath(s[0])[striplen:])
426 t = os.path.join(dest, util.localpath(s[0])[striplen:])
427 if os.path.exists(t):
427 if os.path.exists(t):
428 score += 1
428 score += 1
429 return score
429 return score
430
430
431 abspfx = util.localpath(abspfx)
431 abspfx = util.localpath(abspfx)
432 striplen = len(abspfx)
432 striplen = len(abspfx)
433 if striplen:
433 if striplen:
434 striplen += len(os.sep)
434 striplen += len(os.sep)
435 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
435 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
436 score = evalpath(striplen)
436 score = evalpath(striplen)
437 striplen1 = len(os.path.split(abspfx)[0])
437 striplen1 = len(os.path.split(abspfx)[0])
438 if striplen1:
438 if striplen1:
439 striplen1 += len(os.sep)
439 striplen1 += len(os.sep)
440 if evalpath(striplen1) > score:
440 if evalpath(striplen1) > score:
441 striplen = striplen1
441 striplen = striplen1
442 res = lambda p: os.path.join(dest,
442 res = lambda p: os.path.join(dest,
443 util.localpath(p)[striplen:])
443 util.localpath(p)[striplen:])
444 else:
444 else:
445 # a file
445 # a file
446 if destdirexists:
446 if destdirexists:
447 res = lambda p: os.path.join(dest,
447 res = lambda p: os.path.join(dest,
448 os.path.basename(util.localpath(p)))
448 os.path.basename(util.localpath(p)))
449 else:
449 else:
450 res = lambda p: dest
450 res = lambda p: dest
451 return res
451 return res
452
452
453
453
454 pats = util.expand_glob(pats)
454 pats = util.expand_glob(pats)
455 if not pats:
455 if not pats:
456 raise util.Abort(_('no source or destination specified'))
456 raise util.Abort(_('no source or destination specified'))
457 if len(pats) == 1:
457 if len(pats) == 1:
458 raise util.Abort(_('no destination specified'))
458 raise util.Abort(_('no destination specified'))
459 dest = pats.pop()
459 dest = pats.pop()
460 destdirexists = os.path.isdir(dest)
460 destdirexists = os.path.isdir(dest)
461 if not destdirexists:
461 if not destdirexists:
462 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
462 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
463 raise util.Abort(_('with multiple sources, destination must be an '
463 raise util.Abort(_('with multiple sources, destination must be an '
464 'existing directory'))
464 'existing directory'))
465 if util.endswithsep(dest):
465 if util.endswithsep(dest):
466 raise util.Abort(_('destination %s is not a directory') % dest)
466 raise util.Abort(_('destination %s is not a directory') % dest)
467
467
468 tfn = targetpathfn
468 tfn = targetpathfn
469 if after:
469 if after:
470 tfn = targetpathafterfn
470 tfn = targetpathafterfn
471 copylist = []
471 copylist = []
472 for pat in pats:
472 for pat in pats:
473 srcs = walkpat(pat)
473 srcs = walkpat(pat)
474 if not srcs:
474 if not srcs:
475 continue
475 continue
476 copylist.append((tfn(pat, dest, srcs), srcs))
476 copylist.append((tfn(pat, dest, srcs), srcs))
477 if not copylist:
477 if not copylist:
478 raise util.Abort(_('no files to copy'))
478 raise util.Abort(_('no files to copy'))
479
479
480 errors = 0
480 errors = 0
481 for targetpath, srcs in copylist:
481 for targetpath, srcs in copylist:
482 for abssrc, relsrc, exact in srcs:
482 for abssrc, relsrc, exact in srcs:
483 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
483 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
484 errors += 1
484 errors += 1
485
485
486 if errors:
486 if errors:
487 ui.warn(_('(consider using --after)\n'))
487 ui.warn(_('(consider using --after)\n'))
488
488
489 return errors
489 return errors
490
490
491 def service(opts, parentfn=None, initfn=None, runfn=None):
491 def service(opts, parentfn=None, initfn=None, runfn=None):
492 '''Run a command as a service.'''
492 '''Run a command as a service.'''
493
493
494 if opts['daemon'] and not opts['daemon_pipefds']:
494 if opts['daemon'] and not opts['daemon_pipefds']:
495 rfd, wfd = os.pipe()
495 rfd, wfd = os.pipe()
496 args = sys.argv[:]
496 args = sys.argv[:]
497 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
497 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
498 # Don't pass --cwd to the child process, because we've already
498 # Don't pass --cwd to the child process, because we've already
499 # changed directory.
499 # changed directory.
500 for i in xrange(1,len(args)):
500 for i in xrange(1,len(args)):
501 if args[i].startswith('--cwd='):
501 if args[i].startswith('--cwd='):
502 del args[i]
502 del args[i]
503 break
503 break
504 elif args[i].startswith('--cwd'):
504 elif args[i].startswith('--cwd'):
505 del args[i:i+2]
505 del args[i:i+2]
506 break
506 break
507 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
507 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
508 args[0], args)
508 args[0], args)
509 os.close(wfd)
509 os.close(wfd)
510 os.read(rfd, 1)
510 os.read(rfd, 1)
511 if parentfn:
511 if parentfn:
512 return parentfn(pid)
512 return parentfn(pid)
513 else:
513 else:
514 os._exit(0)
514 os._exit(0)
515
515
516 if initfn:
516 if initfn:
517 initfn()
517 initfn()
518
518
519 if opts['pid_file']:
519 if opts['pid_file']:
520 fp = open(opts['pid_file'], 'w')
520 fp = open(opts['pid_file'], 'w')
521 fp.write(str(os.getpid()) + '\n')
521 fp.write(str(os.getpid()) + '\n')
522 fp.close()
522 fp.close()
523
523
524 if opts['daemon_pipefds']:
524 if opts['daemon_pipefds']:
525 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
525 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
526 os.close(rfd)
526 os.close(rfd)
527 try:
527 try:
528 os.setsid()
528 os.setsid()
529 except AttributeError:
529 except AttributeError:
530 pass
530 pass
531 os.write(wfd, 'y')
531 os.write(wfd, 'y')
532 os.close(wfd)
532 os.close(wfd)
533 sys.stdout.flush()
533 sys.stdout.flush()
534 sys.stderr.flush()
534 sys.stderr.flush()
535 fd = os.open(util.nulldev, os.O_RDWR)
535 fd = os.open(util.nulldev, os.O_RDWR)
536 if fd != 0: os.dup2(fd, 0)
536 if fd != 0: os.dup2(fd, 0)
537 if fd != 1: os.dup2(fd, 1)
537 if fd != 1: os.dup2(fd, 1)
538 if fd != 2: os.dup2(fd, 2)
538 if fd != 2: os.dup2(fd, 2)
539 if fd not in (0, 1, 2): os.close(fd)
539 if fd not in (0, 1, 2): os.close(fd)
540
540
541 if runfn:
541 if runfn:
542 return runfn()
542 return runfn()
543
543
544 class changeset_printer(object):
544 class changeset_printer(object):
545 '''show changeset information when templating not requested.'''
545 '''show changeset information when templating not requested.'''
546
546
547 def __init__(self, ui, repo, patch, buffered):
547 def __init__(self, ui, repo, patch, buffered):
548 self.ui = ui
548 self.ui = ui
549 self.repo = repo
549 self.repo = repo
550 self.buffered = buffered
550 self.buffered = buffered
551 self.patch = patch
551 self.patch = patch
552 self.header = {}
552 self.header = {}
553 self.hunk = {}
553 self.hunk = {}
554 self.lastheader = None
554 self.lastheader = None
555
555
556 def flush(self, rev):
556 def flush(self, rev):
557 if rev in self.header:
557 if rev in self.header:
558 h = self.header[rev]
558 h = self.header[rev]
559 if h != self.lastheader:
559 if h != self.lastheader:
560 self.lastheader = h
560 self.lastheader = h
561 self.ui.write(h)
561 self.ui.write(h)
562 del self.header[rev]
562 del self.header[rev]
563 if rev in self.hunk:
563 if rev in self.hunk:
564 self.ui.write(self.hunk[rev])
564 self.ui.write(self.hunk[rev])
565 del self.hunk[rev]
565 del self.hunk[rev]
566 return 1
566 return 1
567 return 0
567 return 0
568
568
569 def show(self, rev=0, changenode=None, copies=(), **props):
569 def show(self, rev=0, changenode=None, copies=(), **props):
570 if self.buffered:
570 if self.buffered:
571 self.ui.pushbuffer()
571 self.ui.pushbuffer()
572 self._show(rev, changenode, copies, props)
572 self._show(rev, changenode, copies, props)
573 self.hunk[rev] = self.ui.popbuffer()
573 self.hunk[rev] = self.ui.popbuffer()
574 else:
574 else:
575 self._show(rev, changenode, copies, props)
575 self._show(rev, changenode, copies, props)
576
576
577 def _show(self, rev, changenode, copies, props):
577 def _show(self, rev, changenode, copies, props):
578 '''show a single changeset or file revision'''
578 '''show a single changeset or file revision'''
579 log = self.repo.changelog
579 log = self.repo.changelog
580 if changenode is None:
580 if changenode is None:
581 changenode = log.node(rev)
581 changenode = log.node(rev)
582 elif not rev:
582 elif not rev:
583 rev = log.rev(changenode)
583 rev = log.rev(changenode)
584
584
585 if self.ui.quiet:
585 if self.ui.quiet:
586 self.ui.write("%d:%s\n" % (rev, short(changenode)))
586 self.ui.write("%d:%s\n" % (rev, short(changenode)))
587 return
587 return
588
588
589 changes = log.read(changenode)
589 changes = log.read(changenode)
590 date = util.datestr(changes[2])
590 date = util.datestr(changes[2])
591 extra = changes[5]
591 extra = changes[5]
592 branch = extra.get("branch")
592 branch = extra.get("branch")
593
593
594 hexfunc = self.ui.debugflag and hex or short
594 hexfunc = self.ui.debugflag and hex or short
595
595
596 parents = [(p, hexfunc(log.node(p)))
596 parents = [(p, hexfunc(log.node(p)))
597 for p in self._meaningful_parentrevs(log, rev)]
597 for p in self._meaningful_parentrevs(log, rev)]
598
598
599 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
599 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
600
600
601 # don't show the default branch name
601 # don't show the default branch name
602 if branch != 'default':
602 if branch != 'default':
603 branch = util.tolocal(branch)
603 branch = util.tolocal(branch)
604 self.ui.write(_("branch: %s\n") % branch)
604 self.ui.write(_("branch: %s\n") % branch)
605 for tag in self.repo.nodetags(changenode):
605 for tag in self.repo.nodetags(changenode):
606 self.ui.write(_("tag: %s\n") % tag)
606 self.ui.write(_("tag: %s\n") % tag)
607 for parent in parents:
607 for parent in parents:
608 self.ui.write(_("parent: %d:%s\n") % parent)
608 self.ui.write(_("parent: %d:%s\n") % parent)
609
609
610 if self.ui.debugflag:
610 if self.ui.debugflag:
611 self.ui.write(_("manifest: %d:%s\n") %
611 self.ui.write(_("manifest: %d:%s\n") %
612 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
612 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
613 self.ui.write(_("user: %s\n") % changes[1])
613 self.ui.write(_("user: %s\n") % changes[1])
614 self.ui.write(_("date: %s\n") % date)
614 self.ui.write(_("date: %s\n") % date)
615
615
616 if self.ui.debugflag:
616 if self.ui.debugflag:
617 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
617 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
618 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
618 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
619 files):
619 files):
620 if value:
620 if value:
621 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
621 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
622 elif changes[3] and self.ui.verbose:
622 elif changes[3] and self.ui.verbose:
623 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
623 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
624 if copies and self.ui.verbose:
624 if copies and self.ui.verbose:
625 copies = ['%s (%s)' % c for c in copies]
625 copies = ['%s (%s)' % c for c in copies]
626 self.ui.write(_("copies: %s\n") % ' '.join(copies))
626 self.ui.write(_("copies: %s\n") % ' '.join(copies))
627
627
628 if extra and self.ui.debugflag:
628 if extra and self.ui.debugflag:
629 extraitems = extra.items()
629 extraitems = extra.items()
630 extraitems.sort()
630 extraitems.sort()
631 for key, value in extraitems:
631 for key, value in extraitems:
632 self.ui.write(_("extra: %s=%s\n")
632 self.ui.write(_("extra: %s=%s\n")
633 % (key, value.encode('string_escape')))
633 % (key, value.encode('string_escape')))
634
634
635 description = changes[4].strip()
635 description = changes[4].strip()
636 if description:
636 if description:
637 if self.ui.verbose:
637 if self.ui.verbose:
638 self.ui.write(_("description:\n"))
638 self.ui.write(_("description:\n"))
639 self.ui.write(description)
639 self.ui.write(description)
640 self.ui.write("\n\n")
640 self.ui.write("\n\n")
641 else:
641 else:
642 self.ui.write(_("summary: %s\n") %
642 self.ui.write(_("summary: %s\n") %
643 description.splitlines()[0])
643 description.splitlines()[0])
644 self.ui.write("\n")
644 self.ui.write("\n")
645
645
646 self.showpatch(changenode)
646 self.showpatch(changenode)
647
647
648 def showpatch(self, node):
648 def showpatch(self, node):
649 if self.patch:
649 if self.patch:
650 prev = self.repo.changelog.parents(node)[0]
650 prev = self.repo.changelog.parents(node)[0]
651 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
651 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
652 opts=patch.diffopts(self.ui))
652 opts=patch.diffopts(self.ui))
653 self.ui.write("\n")
653 self.ui.write("\n")
654
654
655 def _meaningful_parentrevs(self, log, rev):
655 def _meaningful_parentrevs(self, log, rev):
656 """Return list of meaningful (or all if debug) parentrevs for rev.
656 """Return list of meaningful (or all if debug) parentrevs for rev.
657
657
658 For merges (two non-nullrev revisions) both parents are meaningful.
658 For merges (two non-nullrev revisions) both parents are meaningful.
659 Otherwise the first parent revision is considered meaningful if it
659 Otherwise the first parent revision is considered meaningful if it
660 is not the preceding revision.
660 is not the preceding revision.
661 """
661 """
662 parents = log.parentrevs(rev)
662 parents = log.parentrevs(rev)
663 if not self.ui.debugflag and parents[1] == nullrev:
663 if not self.ui.debugflag and parents[1] == nullrev:
664 if parents[0] >= rev - 1:
664 if parents[0] >= rev - 1:
665 parents = []
665 parents = []
666 else:
666 else:
667 parents = [parents[0]]
667 parents = [parents[0]]
668 return parents
668 return parents
669
669
670
670
671 class changeset_templater(changeset_printer):
671 class changeset_templater(changeset_printer):
672 '''format changeset information.'''
672 '''format changeset information.'''
673
673
674 def __init__(self, ui, repo, patch, mapfile, buffered):
674 def __init__(self, ui, repo, patch, mapfile, buffered):
675 changeset_printer.__init__(self, ui, repo, patch, buffered)
675 changeset_printer.__init__(self, ui, repo, patch, buffered)
676 filters = templatefilters.filters.copy()
676 filters = templatefilters.filters.copy()
677 filters['formatnode'] = (ui.debugflag and (lambda x: x)
677 filters['formatnode'] = (ui.debugflag and (lambda x: x)
678 or (lambda x: x[:12]))
678 or (lambda x: x[:12]))
679 self.t = templater.templater(mapfile, filters,
679 self.t = templater.templater(mapfile, filters,
680 cache={
680 cache={
681 'parent': '{rev}:{node|formatnode} ',
681 'parent': '{rev}:{node|formatnode} ',
682 'manifest': '{rev}:{node|formatnode}',
682 'manifest': '{rev}:{node|formatnode}',
683 'filecopy': '{name} ({source})'})
683 'filecopy': '{name} ({source})'})
684
684
685 def use_template(self, t):
685 def use_template(self, t):
686 '''set template string to use'''
686 '''set template string to use'''
687 self.t.cache['changeset'] = t
687 self.t.cache['changeset'] = t
688
688
689 def _show(self, rev, changenode, copies, props):
689 def _show(self, rev, changenode, copies, props):
690 '''show a single changeset or file revision'''
690 '''show a single changeset or file revision'''
691 log = self.repo.changelog
691 log = self.repo.changelog
692 if changenode is None:
692 if changenode is None:
693 changenode = log.node(rev)
693 changenode = log.node(rev)
694 elif not rev:
694 elif not rev:
695 rev = log.rev(changenode)
695 rev = log.rev(changenode)
696
696
697 changes = log.read(changenode)
697 changes = log.read(changenode)
698
698
699 def showlist(name, values, plural=None, **args):
699 def showlist(name, values, plural=None, **args):
700 '''expand set of values.
700 '''expand set of values.
701 name is name of key in template map.
701 name is name of key in template map.
702 values is list of strings or dicts.
702 values is list of strings or dicts.
703 plural is plural of name, if not simply name + 's'.
703 plural is plural of name, if not simply name + 's'.
704
704
705 expansion works like this, given name 'foo'.
705 expansion works like this, given name 'foo'.
706
706
707 if values is empty, expand 'no_foos'.
707 if values is empty, expand 'no_foos'.
708
708
709 if 'foo' not in template map, return values as a string,
709 if 'foo' not in template map, return values as a string,
710 joined by space.
710 joined by space.
711
711
712 expand 'start_foos'.
712 expand 'start_foos'.
713
713
714 for each value, expand 'foo'. if 'last_foo' in template
714 for each value, expand 'foo'. if 'last_foo' in template
715 map, expand it instead of 'foo' for last key.
715 map, expand it instead of 'foo' for last key.
716
716
717 expand 'end_foos'.
717 expand 'end_foos'.
718 '''
718 '''
719 if plural: names = plural
719 if plural: names = plural
720 else: names = name + 's'
720 else: names = name + 's'
721 if not values:
721 if not values:
722 noname = 'no_' + names
722 noname = 'no_' + names
723 if noname in self.t:
723 if noname in self.t:
724 yield self.t(noname, **args)
724 yield self.t(noname, **args)
725 return
725 return
726 if name not in self.t:
726 if name not in self.t:
727 if isinstance(values[0], str):
727 if isinstance(values[0], str):
728 yield ' '.join(values)
728 yield ' '.join(values)
729 else:
729 else:
730 for v in values:
730 for v in values:
731 yield dict(v, **args)
731 yield dict(v, **args)
732 return
732 return
733 startname = 'start_' + names
733 startname = 'start_' + names
734 if startname in self.t:
734 if startname in self.t:
735 yield self.t(startname, **args)
735 yield self.t(startname, **args)
736 vargs = args.copy()
736 vargs = args.copy()
737 def one(v, tag=name):
737 def one(v, tag=name):
738 try:
738 try:
739 vargs.update(v)
739 vargs.update(v)
740 except (AttributeError, ValueError):
740 except (AttributeError, ValueError):
741 try:
741 try:
742 for a, b in v:
742 for a, b in v:
743 vargs[a] = b
743 vargs[a] = b
744 except ValueError:
744 except ValueError:
745 vargs[name] = v
745 vargs[name] = v
746 return self.t(tag, **vargs)
746 return self.t(tag, **vargs)
747 lastname = 'last_' + name
747 lastname = 'last_' + name
748 if lastname in self.t:
748 if lastname in self.t:
749 last = values.pop()
749 last = values.pop()
750 else:
750 else:
751 last = None
751 last = None
752 for v in values:
752 for v in values:
753 yield one(v)
753 yield one(v)
754 if last is not None:
754 if last is not None:
755 yield one(last, tag=lastname)
755 yield one(last, tag=lastname)
756 endname = 'end_' + names
756 endname = 'end_' + names
757 if endname in self.t:
757 if endname in self.t:
758 yield self.t(endname, **args)
758 yield self.t(endname, **args)
759
759
760 def showbranches(**args):
760 def showbranches(**args):
761 branch = changes[5].get("branch")
761 branch = changes[5].get("branch")
762 if branch != 'default':
762 if branch != 'default':
763 branch = util.tolocal(branch)
763 branch = util.tolocal(branch)
764 return showlist('branch', [branch], plural='branches', **args)
764 return showlist('branch', [branch], plural='branches', **args)
765
765
766 def showparents(**args):
766 def showparents(**args):
767 parents = [[('rev', p), ('node', hex(log.node(p)))]
767 parents = [[('rev', p), ('node', hex(log.node(p)))]
768 for p in self._meaningful_parentrevs(log, rev)]
768 for p in self._meaningful_parentrevs(log, rev)]
769 return showlist('parent', parents, **args)
769 return showlist('parent', parents, **args)
770
770
771 def showtags(**args):
771 def showtags(**args):
772 return showlist('tag', self.repo.nodetags(changenode), **args)
772 return showlist('tag', self.repo.nodetags(changenode), **args)
773
773
774 def showextras(**args):
774 def showextras(**args):
775 extras = changes[5].items()
775 extras = changes[5].items()
776 extras.sort()
776 extras.sort()
777 for key, value in extras:
777 for key, value in extras:
778 args = args.copy()
778 args = args.copy()
779 args.update(dict(key=key, value=value))
779 args.update(dict(key=key, value=value))
780 yield self.t('extra', **args)
780 yield self.t('extra', **args)
781
781
782 def showcopies(**args):
782 def showcopies(**args):
783 c = [{'name': x[0], 'source': x[1]} for x in copies]
783 c = [{'name': x[0], 'source': x[1]} for x in copies]
784 return showlist('file_copy', c, plural='file_copies', **args)
784 return showlist('file_copy', c, plural='file_copies', **args)
785
785
786 files = []
786 files = []
787 def getfiles():
787 def getfiles():
788 if not files:
788 if not files:
789 files[:] = self.repo.status(
789 files[:] = self.repo.status(
790 log.parents(changenode)[0], changenode)[:3]
790 log.parents(changenode)[0], changenode)[:3]
791 return files
791 return files
792 def showfiles(**args):
792 def showfiles(**args):
793 return showlist('file', changes[3], **args)
793 return showlist('file', changes[3], **args)
794 def showmods(**args):
794 def showmods(**args):
795 return showlist('file_mod', getfiles()[0], **args)
795 return showlist('file_mod', getfiles()[0], **args)
796 def showadds(**args):
796 def showadds(**args):
797 return showlist('file_add', getfiles()[1], **args)
797 return showlist('file_add', getfiles()[1], **args)
798 def showdels(**args):
798 def showdels(**args):
799 return showlist('file_del', getfiles()[2], **args)
799 return showlist('file_del', getfiles()[2], **args)
800 def showmanifest(**args):
800 def showmanifest(**args):
801 args = args.copy()
801 args = args.copy()
802 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
802 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
803 node=hex(changes[0])))
803 node=hex(changes[0])))
804 return self.t('manifest', **args)
804 return self.t('manifest', **args)
805
805
806 defprops = {
806 defprops = {
807 'author': changes[1],
807 'author': changes[1],
808 'branches': showbranches,
808 'branches': showbranches,
809 'date': changes[2],
809 'date': changes[2],
810 'desc': changes[4].strip(),
810 'desc': changes[4].strip(),
811 'file_adds': showadds,
811 'file_adds': showadds,
812 'file_dels': showdels,
812 'file_dels': showdels,
813 'file_mods': showmods,
813 'file_mods': showmods,
814 'files': showfiles,
814 'files': showfiles,
815 'file_copies': showcopies,
815 'file_copies': showcopies,
816 'manifest': showmanifest,
816 'manifest': showmanifest,
817 'node': hex(changenode),
817 'node': hex(changenode),
818 'parents': showparents,
818 'parents': showparents,
819 'rev': rev,
819 'rev': rev,
820 'tags': showtags,
820 'tags': showtags,
821 'extras': showextras,
821 'extras': showextras,
822 }
822 }
823 props = props.copy()
823 props = props.copy()
824 props.update(defprops)
824 props.update(defprops)
825
825
826 try:
826 try:
827 if self.ui.debugflag and 'header_debug' in self.t:
827 if self.ui.debugflag and 'header_debug' in self.t:
828 key = 'header_debug'
828 key = 'header_debug'
829 elif self.ui.quiet and 'header_quiet' in self.t:
829 elif self.ui.quiet and 'header_quiet' in self.t:
830 key = 'header_quiet'
830 key = 'header_quiet'
831 elif self.ui.verbose and 'header_verbose' in self.t:
831 elif self.ui.verbose and 'header_verbose' in self.t:
832 key = 'header_verbose'
832 key = 'header_verbose'
833 elif 'header' in self.t:
833 elif 'header' in self.t:
834 key = 'header'
834 key = 'header'
835 else:
835 else:
836 key = ''
836 key = ''
837 if key:
837 if key:
838 h = templater.stringify(self.t(key, **props))
838 h = templater.stringify(self.t(key, **props))
839 if self.buffered:
839 if self.buffered:
840 self.header[rev] = h
840 self.header[rev] = h
841 else:
841 else:
842 self.ui.write(h)
842 self.ui.write(h)
843 if self.ui.debugflag and 'changeset_debug' in self.t:
843 if self.ui.debugflag and 'changeset_debug' in self.t:
844 key = 'changeset_debug'
844 key = 'changeset_debug'
845 elif self.ui.quiet and 'changeset_quiet' in self.t:
845 elif self.ui.quiet and 'changeset_quiet' in self.t:
846 key = 'changeset_quiet'
846 key = 'changeset_quiet'
847 elif self.ui.verbose and 'changeset_verbose' in self.t:
847 elif self.ui.verbose and 'changeset_verbose' in self.t:
848 key = 'changeset_verbose'
848 key = 'changeset_verbose'
849 else:
849 else:
850 key = 'changeset'
850 key = 'changeset'
851 self.ui.write(templater.stringify(self.t(key, **props)))
851 self.ui.write(templater.stringify(self.t(key, **props)))
852 self.showpatch(changenode)
852 self.showpatch(changenode)
853 except KeyError, inst:
853 except KeyError, inst:
854 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
854 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
855 inst.args[0]))
855 inst.args[0]))
856 except SyntaxError, inst:
856 except SyntaxError, inst:
857 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
857 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
858
858
859 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
859 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
860 """show one changeset using template or regular display.
860 """show one changeset using template or regular display.
861
861
862 Display format will be the first non-empty hit of:
862 Display format will be the first non-empty hit of:
863 1. option 'template'
863 1. option 'template'
864 2. option 'style'
864 2. option 'style'
865 3. [ui] setting 'logtemplate'
865 3. [ui] setting 'logtemplate'
866 4. [ui] setting 'style'
866 4. [ui] setting 'style'
867 If all of these values are either the unset or the empty string,
867 If all of these values are either the unset or the empty string,
868 regular display via changeset_printer() is done.
868 regular display via changeset_printer() is done.
869 """
869 """
870 # options
870 # options
871 patch = False
871 patch = False
872 if opts.get('patch'):
872 if opts.get('patch'):
873 patch = matchfn or util.always
873 patch = matchfn or util.always
874
874
875 tmpl = opts.get('template')
875 tmpl = opts.get('template')
876 mapfile = None
876 mapfile = None
877 if tmpl:
877 if tmpl:
878 tmpl = templater.parsestring(tmpl, quoted=False)
878 tmpl = templater.parsestring(tmpl, quoted=False)
879 else:
879 else:
880 mapfile = opts.get('style')
880 mapfile = opts.get('style')
881 # ui settings
881 # ui settings
882 if not mapfile:
882 if not mapfile:
883 tmpl = ui.config('ui', 'logtemplate')
883 tmpl = ui.config('ui', 'logtemplate')
884 if tmpl:
884 if tmpl:
885 tmpl = templater.parsestring(tmpl)
885 tmpl = templater.parsestring(tmpl)
886 else:
886 else:
887 mapfile = ui.config('ui', 'style')
887 mapfile = ui.config('ui', 'style')
888
888
889 if tmpl or mapfile:
889 if tmpl or mapfile:
890 if mapfile:
890 if mapfile:
891 if not os.path.split(mapfile)[0]:
891 if not os.path.split(mapfile)[0]:
892 mapname = (templater.templatepath('map-cmdline.' + mapfile)
892 mapname = (templater.templatepath('map-cmdline.' + mapfile)
893 or templater.templatepath(mapfile))
893 or templater.templatepath(mapfile))
894 if mapname: mapfile = mapname
894 if mapname: mapfile = mapname
895 try:
895 try:
896 t = changeset_templater(ui, repo, patch, mapfile, buffered)
896 t = changeset_templater(ui, repo, patch, mapfile, buffered)
897 except SyntaxError, inst:
897 except SyntaxError, inst:
898 raise util.Abort(inst.args[0])
898 raise util.Abort(inst.args[0])
899 if tmpl: t.use_template(tmpl)
899 if tmpl: t.use_template(tmpl)
900 return t
900 return t
901 return changeset_printer(ui, repo, patch, buffered)
901 return changeset_printer(ui, repo, patch, buffered)
902
902
903 def finddate(ui, repo, date):
903 def finddate(ui, repo, date):
904 """Find the tipmost changeset that matches the given date spec"""
904 """Find the tipmost changeset that matches the given date spec"""
905 df = util.matchdate(date)
905 df = util.matchdate(date)
906 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
906 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
907 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
907 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
908 results = {}
908 results = {}
909 for st, rev, fns in changeiter:
909 for st, rev, fns in changeiter:
910 if st == 'add':
910 if st == 'add':
911 d = get(rev)[2]
911 d = get(rev)[2]
912 if df(d[0]):
912 if df(d[0]):
913 results[rev] = d
913 results[rev] = d
914 elif st == 'iter':
914 elif st == 'iter':
915 if rev in results:
915 if rev in results:
916 ui.status("Found revision %s from %s\n" %
916 ui.status("Found revision %s from %s\n" %
917 (rev, util.datestr(results[rev])))
917 (rev, util.datestr(results[rev])))
918 return str(rev)
918 return str(rev)
919
919
920 raise util.Abort(_("revision matching date not found"))
920 raise util.Abort(_("revision matching date not found"))
921
921
922 def walkchangerevs(ui, repo, pats, change, opts):
922 def walkchangerevs(ui, repo, pats, change, opts):
923 '''Iterate over files and the revs they changed in.
923 '''Iterate over files and the revs they changed in.
924
924
925 Callers most commonly need to iterate backwards over the history
925 Callers most commonly need to iterate backwards over the history
926 it is interested in. Doing so has awful (quadratic-looking)
926 it is interested in. Doing so has awful (quadratic-looking)
927 performance, so we use iterators in a "windowed" way.
927 performance, so we use iterators in a "windowed" way.
928
928
929 We walk a window of revisions in the desired order. Within the
929 We walk a window of revisions in the desired order. Within the
930 window, we first walk forwards to gather data, then in the desired
930 window, we first walk forwards to gather data, then in the desired
931 order (usually backwards) to display it.
931 order (usually backwards) to display it.
932
932
933 This function returns an (iterator, matchfn) tuple. The iterator
933 This function returns an (iterator, matchfn) tuple. The iterator
934 yields 3-tuples. They will be of one of the following forms:
934 yields 3-tuples. They will be of one of the following forms:
935
935
936 "window", incrementing, lastrev: stepping through a window,
936 "window", incrementing, lastrev: stepping through a window,
937 positive if walking forwards through revs, last rev in the
937 positive if walking forwards through revs, last rev in the
938 sequence iterated over - use to reset state for the current window
938 sequence iterated over - use to reset state for the current window
939
939
940 "add", rev, fns: out-of-order traversal of the given file names
940 "add", rev, fns: out-of-order traversal of the given file names
941 fns, which changed during revision rev - use to gather data for
941 fns, which changed during revision rev - use to gather data for
942 possible display
942 possible display
943
943
944 "iter", rev, None: in-order traversal of the revs earlier iterated
944 "iter", rev, None: in-order traversal of the revs earlier iterated
945 over with "add" - use to display data'''
945 over with "add" - use to display data'''
946
946
947 def increasing_windows(start, end, windowsize=8, sizelimit=512):
947 def increasing_windows(start, end, windowsize=8, sizelimit=512):
948 if start < end:
948 if start < end:
949 while start < end:
949 while start < end:
950 yield start, min(windowsize, end-start)
950 yield start, min(windowsize, end-start)
951 start += windowsize
951 start += windowsize
952 if windowsize < sizelimit:
952 if windowsize < sizelimit:
953 windowsize *= 2
953 windowsize *= 2
954 else:
954 else:
955 while start > end:
955 while start > end:
956 yield start, min(windowsize, start-end-1)
956 yield start, min(windowsize, start-end-1)
957 start -= windowsize
957 start -= windowsize
958 if windowsize < sizelimit:
958 if windowsize < sizelimit:
959 windowsize *= 2
959 windowsize *= 2
960
960
961 files, matchfn, anypats = matchpats(repo, pats, opts)
961 files, matchfn, anypats = matchpats(repo, pats, opts)
962 follow = opts.get('follow') or opts.get('follow_first')
962 follow = opts.get('follow') or opts.get('follow_first')
963
963
964 if repo.changelog.count() == 0:
964 if repo.changelog.count() == 0:
965 return [], matchfn
965 return [], matchfn
966
966
967 if follow:
967 if follow:
968 defrange = '%s:0' % repo.changectx().rev()
968 defrange = '%s:0' % repo.changectx().rev()
969 else:
969 else:
970 defrange = 'tip:0'
970 defrange = '-1:0'
971 revs = revrange(repo, opts['rev'] or [defrange])
971 revs = revrange(repo, opts['rev'] or [defrange])
972 wanted = {}
972 wanted = {}
973 slowpath = anypats or opts.get('removed')
973 slowpath = anypats or opts.get('removed')
974 fncache = {}
974 fncache = {}
975
975
976 if not slowpath and not files:
976 if not slowpath and not files:
977 # No files, no patterns. Display all revs.
977 # No files, no patterns. Display all revs.
978 wanted = dict.fromkeys(revs)
978 wanted = dict.fromkeys(revs)
979 copies = []
979 copies = []
980 if not slowpath:
980 if not slowpath:
981 # Only files, no patterns. Check the history of each file.
981 # Only files, no patterns. Check the history of each file.
982 def filerevgen(filelog, node):
982 def filerevgen(filelog, node):
983 cl_count = repo.changelog.count()
983 cl_count = repo.changelog.count()
984 if node is None:
984 if node is None:
985 last = filelog.count() - 1
985 last = filelog.count() - 1
986 else:
986 else:
987 last = filelog.rev(node)
987 last = filelog.rev(node)
988 for i, window in increasing_windows(last, nullrev):
988 for i, window in increasing_windows(last, nullrev):
989 revs = []
989 revs = []
990 for j in xrange(i - window, i + 1):
990 for j in xrange(i - window, i + 1):
991 n = filelog.node(j)
991 n = filelog.node(j)
992 revs.append((filelog.linkrev(n),
992 revs.append((filelog.linkrev(n),
993 follow and filelog.renamed(n)))
993 follow and filelog.renamed(n)))
994 revs.reverse()
994 revs.reverse()
995 for rev in revs:
995 for rev in revs:
996 # only yield rev for which we have the changelog, it can
996 # only yield rev for which we have the changelog, it can
997 # happen while doing "hg log" during a pull or commit
997 # happen while doing "hg log" during a pull or commit
998 if rev[0] < cl_count:
998 if rev[0] < cl_count:
999 yield rev
999 yield rev
1000 def iterfiles():
1000 def iterfiles():
1001 for filename in files:
1001 for filename in files:
1002 yield filename, None
1002 yield filename, None
1003 for filename_node in copies:
1003 for filename_node in copies:
1004 yield filename_node
1004 yield filename_node
1005 minrev, maxrev = min(revs), max(revs)
1005 minrev, maxrev = min(revs), max(revs)
1006 for file_, node in iterfiles():
1006 for file_, node in iterfiles():
1007 filelog = repo.file(file_)
1007 filelog = repo.file(file_)
1008 # A zero count may be a directory or deleted file, so
1008 # A zero count may be a directory or deleted file, so
1009 # try to find matching entries on the slow path.
1009 # try to find matching entries on the slow path.
1010 if filelog.count() == 0:
1010 if filelog.count() == 0:
1011 slowpath = True
1011 slowpath = True
1012 break
1012 break
1013 for rev, copied in filerevgen(filelog, node):
1013 for rev, copied in filerevgen(filelog, node):
1014 if rev <= maxrev:
1014 if rev <= maxrev:
1015 if rev < minrev:
1015 if rev < minrev:
1016 break
1016 break
1017 fncache.setdefault(rev, [])
1017 fncache.setdefault(rev, [])
1018 fncache[rev].append(file_)
1018 fncache[rev].append(file_)
1019 wanted[rev] = 1
1019 wanted[rev] = 1
1020 if follow and copied:
1020 if follow and copied:
1021 copies.append(copied)
1021 copies.append(copied)
1022 if slowpath:
1022 if slowpath:
1023 if follow:
1023 if follow:
1024 raise util.Abort(_('can only follow copies/renames for explicit '
1024 raise util.Abort(_('can only follow copies/renames for explicit '
1025 'file names'))
1025 'file names'))
1026
1026
1027 # The slow path checks files modified in every changeset.
1027 # The slow path checks files modified in every changeset.
1028 def changerevgen():
1028 def changerevgen():
1029 for i, window in increasing_windows(repo.changelog.count()-1,
1029 for i, window in increasing_windows(repo.changelog.count()-1,
1030 nullrev):
1030 nullrev):
1031 for j in xrange(i - window, i + 1):
1031 for j in xrange(i - window, i + 1):
1032 yield j, change(j)[3]
1032 yield j, change(j)[3]
1033
1033
1034 for rev, changefiles in changerevgen():
1034 for rev, changefiles in changerevgen():
1035 matches = filter(matchfn, changefiles)
1035 matches = filter(matchfn, changefiles)
1036 if matches:
1036 if matches:
1037 fncache[rev] = matches
1037 fncache[rev] = matches
1038 wanted[rev] = 1
1038 wanted[rev] = 1
1039
1039
1040 class followfilter:
1040 class followfilter:
1041 def __init__(self, onlyfirst=False):
1041 def __init__(self, onlyfirst=False):
1042 self.startrev = nullrev
1042 self.startrev = nullrev
1043 self.roots = []
1043 self.roots = []
1044 self.onlyfirst = onlyfirst
1044 self.onlyfirst = onlyfirst
1045
1045
1046 def match(self, rev):
1046 def match(self, rev):
1047 def realparents(rev):
1047 def realparents(rev):
1048 if self.onlyfirst:
1048 if self.onlyfirst:
1049 return repo.changelog.parentrevs(rev)[0:1]
1049 return repo.changelog.parentrevs(rev)[0:1]
1050 else:
1050 else:
1051 return filter(lambda x: x != nullrev,
1051 return filter(lambda x: x != nullrev,
1052 repo.changelog.parentrevs(rev))
1052 repo.changelog.parentrevs(rev))
1053
1053
1054 if self.startrev == nullrev:
1054 if self.startrev == nullrev:
1055 self.startrev = rev
1055 self.startrev = rev
1056 return True
1056 return True
1057
1057
1058 if rev > self.startrev:
1058 if rev > self.startrev:
1059 # forward: all descendants
1059 # forward: all descendants
1060 if not self.roots:
1060 if not self.roots:
1061 self.roots.append(self.startrev)
1061 self.roots.append(self.startrev)
1062 for parent in realparents(rev):
1062 for parent in realparents(rev):
1063 if parent in self.roots:
1063 if parent in self.roots:
1064 self.roots.append(rev)
1064 self.roots.append(rev)
1065 return True
1065 return True
1066 else:
1066 else:
1067 # backwards: all parents
1067 # backwards: all parents
1068 if not self.roots:
1068 if not self.roots:
1069 self.roots.extend(realparents(self.startrev))
1069 self.roots.extend(realparents(self.startrev))
1070 if rev in self.roots:
1070 if rev in self.roots:
1071 self.roots.remove(rev)
1071 self.roots.remove(rev)
1072 self.roots.extend(realparents(rev))
1072 self.roots.extend(realparents(rev))
1073 return True
1073 return True
1074
1074
1075 return False
1075 return False
1076
1076
1077 # it might be worthwhile to do this in the iterator if the rev range
1077 # it might be worthwhile to do this in the iterator if the rev range
1078 # is descending and the prune args are all within that range
1078 # is descending and the prune args are all within that range
1079 for rev in opts.get('prune', ()):
1079 for rev in opts.get('prune', ()):
1080 rev = repo.changelog.rev(repo.lookup(rev))
1080 rev = repo.changelog.rev(repo.lookup(rev))
1081 ff = followfilter()
1081 ff = followfilter()
1082 stop = min(revs[0], revs[-1])
1082 stop = min(revs[0], revs[-1])
1083 for x in xrange(rev, stop-1, -1):
1083 for x in xrange(rev, stop-1, -1):
1084 if ff.match(x) and x in wanted:
1084 if ff.match(x) and x in wanted:
1085 del wanted[x]
1085 del wanted[x]
1086
1086
1087 def iterate():
1087 def iterate():
1088 if follow and not files:
1088 if follow and not files:
1089 ff = followfilter(onlyfirst=opts.get('follow_first'))
1089 ff = followfilter(onlyfirst=opts.get('follow_first'))
1090 def want(rev):
1090 def want(rev):
1091 if ff.match(rev) and rev in wanted:
1091 if ff.match(rev) and rev in wanted:
1092 return True
1092 return True
1093 return False
1093 return False
1094 else:
1094 else:
1095 def want(rev):
1095 def want(rev):
1096 return rev in wanted
1096 return rev in wanted
1097
1097
1098 for i, window in increasing_windows(0, len(revs)):
1098 for i, window in increasing_windows(0, len(revs)):
1099 yield 'window', revs[0] < revs[-1], revs[-1]
1099 yield 'window', revs[0] < revs[-1], revs[-1]
1100 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1100 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1101 srevs = list(nrevs)
1101 srevs = list(nrevs)
1102 srevs.sort()
1102 srevs.sort()
1103 for rev in srevs:
1103 for rev in srevs:
1104 fns = fncache.get(rev)
1104 fns = fncache.get(rev)
1105 if not fns:
1105 if not fns:
1106 def fns_generator():
1106 def fns_generator():
1107 for f in change(rev)[3]:
1107 for f in change(rev)[3]:
1108 if matchfn(f):
1108 if matchfn(f):
1109 yield f
1109 yield f
1110 fns = fns_generator()
1110 fns = fns_generator()
1111 yield 'add', rev, fns
1111 yield 'add', rev, fns
1112 for rev in nrevs:
1112 for rev in nrevs:
1113 yield 'iter', rev, None
1113 yield 'iter', rev, None
1114 return iterate(), matchfn
1114 return iterate(), matchfn
1115
1115
1116 def commit(ui, repo, commitfunc, pats, opts):
1116 def commit(ui, repo, commitfunc, pats, opts):
1117 '''commit the specified files or all outstanding changes'''
1117 '''commit the specified files or all outstanding changes'''
1118 date = opts.get('date')
1118 date = opts.get('date')
1119 if date:
1119 if date:
1120 opts['date'] = util.parsedate(date)
1120 opts['date'] = util.parsedate(date)
1121 message = logmessage(opts)
1121 message = logmessage(opts)
1122
1122
1123 # extract addremove carefully -- this function can be called from a command
1123 # extract addremove carefully -- this function can be called from a command
1124 # that doesn't support addremove
1124 # that doesn't support addremove
1125 if opts.get('addremove'):
1125 if opts.get('addremove'):
1126 addremove(repo, pats, opts)
1126 addremove(repo, pats, opts)
1127
1127
1128 fns, match, anypats = matchpats(repo, pats, opts)
1128 fns, match, anypats = matchpats(repo, pats, opts)
1129 if pats:
1129 if pats:
1130 status = repo.status(files=fns, match=match)
1130 status = repo.status(files=fns, match=match)
1131 modified, added, removed, deleted, unknown = status[:5]
1131 modified, added, removed, deleted, unknown = status[:5]
1132 files = modified + added + removed
1132 files = modified + added + removed
1133 slist = None
1133 slist = None
1134 for f in fns:
1134 for f in fns:
1135 if f == '.':
1135 if f == '.':
1136 continue
1136 continue
1137 if f not in files:
1137 if f not in files:
1138 rf = repo.wjoin(f)
1138 rf = repo.wjoin(f)
1139 rel = repo.pathto(f)
1139 rel = repo.pathto(f)
1140 try:
1140 try:
1141 mode = os.lstat(rf)[stat.ST_MODE]
1141 mode = os.lstat(rf)[stat.ST_MODE]
1142 except OSError:
1142 except OSError:
1143 raise util.Abort(_("file %s not found!") % rel)
1143 raise util.Abort(_("file %s not found!") % rel)
1144 if stat.S_ISDIR(mode):
1144 if stat.S_ISDIR(mode):
1145 name = f + '/'
1145 name = f + '/'
1146 if slist is None:
1146 if slist is None:
1147 slist = list(files)
1147 slist = list(files)
1148 slist.sort()
1148 slist.sort()
1149 i = bisect.bisect(slist, name)
1149 i = bisect.bisect(slist, name)
1150 if i >= len(slist) or not slist[i].startswith(name):
1150 if i >= len(slist) or not slist[i].startswith(name):
1151 raise util.Abort(_("no match under directory %s!")
1151 raise util.Abort(_("no match under directory %s!")
1152 % rel)
1152 % rel)
1153 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1153 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1154 raise util.Abort(_("can't commit %s: "
1154 raise util.Abort(_("can't commit %s: "
1155 "unsupported file type!") % rel)
1155 "unsupported file type!") % rel)
1156 elif f not in repo.dirstate:
1156 elif f not in repo.dirstate:
1157 raise util.Abort(_("file %s not tracked!") % rel)
1157 raise util.Abort(_("file %s not tracked!") % rel)
1158 else:
1158 else:
1159 files = []
1159 files = []
1160 try:
1160 try:
1161 return commitfunc(ui, repo, files, message, match, opts)
1161 return commitfunc(ui, repo, files, message, match, opts)
1162 except ValueError, inst:
1162 except ValueError, inst:
1163 raise util.Abort(str(inst))
1163 raise util.Abort(str(inst))
@@ -1,3136 +1,3136 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import os, re, sys, urllib
10 import os, re, sys, urllib
11 import hg, util, revlog, bundlerepo, extensions
11 import hg, util, revlog, bundlerepo, extensions
12 import difflib, patch, time, help, mdiff, tempfile
12 import difflib, patch, time, help, mdiff, tempfile
13 import errno, version, socket
13 import errno, version, socket
14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
14 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15
15
16 # Commands start here, listed alphabetically
16 # Commands start here, listed alphabetically
17
17
18 def add(ui, repo, *pats, **opts):
18 def add(ui, repo, *pats, **opts):
19 """add the specified files on the next commit
19 """add the specified files on the next commit
20
20
21 Schedule files to be version controlled and added to the repository.
21 Schedule files to be version controlled and added to the repository.
22
22
23 The files will be added to the repository at the next commit. To
23 The files will be added to the repository at the next commit. To
24 undo an add before that, see hg revert.
24 undo an add before that, see hg revert.
25
25
26 If no names are given, add all files in the repository.
26 If no names are given, add all files in the repository.
27 """
27 """
28
28
29 rejected = None
29 rejected = None
30 exacts = {}
30 exacts = {}
31 names = []
31 names = []
32 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
32 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
33 badmatch=util.always):
33 badmatch=util.always):
34 if exact:
34 if exact:
35 if ui.verbose:
35 if ui.verbose:
36 ui.status(_('adding %s\n') % rel)
36 ui.status(_('adding %s\n') % rel)
37 names.append(abs)
37 names.append(abs)
38 exacts[abs] = 1
38 exacts[abs] = 1
39 elif abs not in repo.dirstate:
39 elif abs not in repo.dirstate:
40 ui.status(_('adding %s\n') % rel)
40 ui.status(_('adding %s\n') % rel)
41 names.append(abs)
41 names.append(abs)
42 if not opts.get('dry_run'):
42 if not opts.get('dry_run'):
43 rejected = repo.add(names)
43 rejected = repo.add(names)
44 rejected = [p for p in rejected if p in exacts]
44 rejected = [p for p in rejected if p in exacts]
45 return rejected and 1 or 0
45 return rejected and 1 or 0
46
46
47 def addremove(ui, repo, *pats, **opts):
47 def addremove(ui, repo, *pats, **opts):
48 """add all new files, delete all missing files
48 """add all new files, delete all missing files
49
49
50 Add all new files and remove all missing files from the repository.
50 Add all new files and remove all missing files from the repository.
51
51
52 New files are ignored if they match any of the patterns in .hgignore. As
52 New files are ignored if they match any of the patterns in .hgignore. As
53 with add, these changes take effect at the next commit.
53 with add, these changes take effect at the next commit.
54
54
55 Use the -s option to detect renamed files. With a parameter > 0,
55 Use the -s option to detect renamed files. With a parameter > 0,
56 this compares every removed file with every added file and records
56 this compares every removed file with every added file and records
57 those similar enough as renames. This option takes a percentage
57 those similar enough as renames. This option takes a percentage
58 between 0 (disabled) and 100 (files must be identical) as its
58 between 0 (disabled) and 100 (files must be identical) as its
59 parameter. Detecting renamed files this way can be expensive.
59 parameter. Detecting renamed files this way can be expensive.
60 """
60 """
61 try:
61 try:
62 sim = float(opts.get('similarity') or 0)
62 sim = float(opts.get('similarity') or 0)
63 except ValueError:
63 except ValueError:
64 raise util.Abort(_('similarity must be a number'))
64 raise util.Abort(_('similarity must be a number'))
65 if sim < 0 or sim > 100:
65 if sim < 0 or sim > 100:
66 raise util.Abort(_('similarity must be between 0 and 100'))
66 raise util.Abort(_('similarity must be between 0 and 100'))
67 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
67 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
68
68
69 def annotate(ui, repo, *pats, **opts):
69 def annotate(ui, repo, *pats, **opts):
70 """show changeset information per file line
70 """show changeset information per file line
71
71
72 List changes in files, showing the revision id responsible for each line
72 List changes in files, showing the revision id responsible for each line
73
73
74 This command is useful to discover who did a change or when a change took
74 This command is useful to discover who did a change or when a change took
75 place.
75 place.
76
76
77 Without the -a option, annotate will avoid processing files it
77 Without the -a option, annotate will avoid processing files it
78 detects as binary. With -a, annotate will generate an annotation
78 detects as binary. With -a, annotate will generate an annotation
79 anyway, probably with undesirable results.
79 anyway, probably with undesirable results.
80 """
80 """
81 datefunc = ui.quiet and util.shortdate or util.datestr
81 datefunc = ui.quiet and util.shortdate or util.datestr
82 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
82 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
83
83
84 if not pats:
84 if not pats:
85 raise util.Abort(_('at least one file name or pattern required'))
85 raise util.Abort(_('at least one file name or pattern required'))
86
86
87 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
87 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
88 ('number', lambda x: str(x[0].rev())),
88 ('number', lambda x: str(x[0].rev())),
89 ('changeset', lambda x: short(x[0].node())),
89 ('changeset', lambda x: short(x[0].node())),
90 ('date', getdate),
90 ('date', getdate),
91 ('follow', lambda x: x[0].path()),
91 ('follow', lambda x: x[0].path()),
92 ]
92 ]
93
93
94 if (not opts['user'] and not opts['changeset'] and not opts['date']
94 if (not opts['user'] and not opts['changeset'] and not opts['date']
95 and not opts['follow']):
95 and not opts['follow']):
96 opts['number'] = 1
96 opts['number'] = 1
97
97
98 linenumber = opts.get('line_number') is not None
98 linenumber = opts.get('line_number') is not None
99 if (linenumber and (not opts['changeset']) and (not opts['number'])):
99 if (linenumber and (not opts['changeset']) and (not opts['number'])):
100 raise util.Abort(_('at least one of -n/-c is required for -l'))
100 raise util.Abort(_('at least one of -n/-c is required for -l'))
101
101
102 funcmap = [func for op, func in opmap if opts.get(op)]
102 funcmap = [func for op, func in opmap if opts.get(op)]
103 if linenumber:
103 if linenumber:
104 lastfunc = funcmap[-1]
104 lastfunc = funcmap[-1]
105 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
105 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
106
106
107 ctx = repo.changectx(opts['rev'])
107 ctx = repo.changectx(opts['rev'])
108
108
109 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
109 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
110 node=ctx.node()):
110 node=ctx.node()):
111 fctx = ctx.filectx(abs)
111 fctx = ctx.filectx(abs)
112 if not opts['text'] and util.binary(fctx.data()):
112 if not opts['text'] and util.binary(fctx.data()):
113 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
113 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
114 continue
114 continue
115
115
116 lines = fctx.annotate(follow=opts.get('follow'),
116 lines = fctx.annotate(follow=opts.get('follow'),
117 linenumber=linenumber)
117 linenumber=linenumber)
118 pieces = []
118 pieces = []
119
119
120 for f in funcmap:
120 for f in funcmap:
121 l = [f(n) for n, dummy in lines]
121 l = [f(n) for n, dummy in lines]
122 if l:
122 if l:
123 m = max(map(len, l))
123 m = max(map(len, l))
124 pieces.append(["%*s" % (m, x) for x in l])
124 pieces.append(["%*s" % (m, x) for x in l])
125
125
126 if pieces:
126 if pieces:
127 for p, l in zip(zip(*pieces), lines):
127 for p, l in zip(zip(*pieces), lines):
128 ui.write("%s: %s" % (" ".join(p), l[1]))
128 ui.write("%s: %s" % (" ".join(p), l[1]))
129
129
130 def archive(ui, repo, dest, **opts):
130 def archive(ui, repo, dest, **opts):
131 '''create unversioned archive of a repository revision
131 '''create unversioned archive of a repository revision
132
132
133 By default, the revision used is the parent of the working
133 By default, the revision used is the parent of the working
134 directory; use "-r" to specify a different revision.
134 directory; use "-r" to specify a different revision.
135
135
136 To specify the type of archive to create, use "-t". Valid
136 To specify the type of archive to create, use "-t". Valid
137 types are:
137 types are:
138
138
139 "files" (default): a directory full of files
139 "files" (default): a directory full of files
140 "tar": tar archive, uncompressed
140 "tar": tar archive, uncompressed
141 "tbz2": tar archive, compressed using bzip2
141 "tbz2": tar archive, compressed using bzip2
142 "tgz": tar archive, compressed using gzip
142 "tgz": tar archive, compressed using gzip
143 "uzip": zip archive, uncompressed
143 "uzip": zip archive, uncompressed
144 "zip": zip archive, compressed using deflate
144 "zip": zip archive, compressed using deflate
145
145
146 The exact name of the destination archive or directory is given
146 The exact name of the destination archive or directory is given
147 using a format string; see "hg help export" for details.
147 using a format string; see "hg help export" for details.
148
148
149 Each member added to an archive file has a directory prefix
149 Each member added to an archive file has a directory prefix
150 prepended. Use "-p" to specify a format string for the prefix.
150 prepended. Use "-p" to specify a format string for the prefix.
151 The default is the basename of the archive, with suffixes removed.
151 The default is the basename of the archive, with suffixes removed.
152 '''
152 '''
153
153
154 ctx = repo.changectx(opts['rev'])
154 ctx = repo.changectx(opts['rev'])
155 if not ctx:
155 if not ctx:
156 raise util.Abort(_('repository has no revisions'))
156 raise util.Abort(_('repository has no revisions'))
157 node = ctx.node()
157 node = ctx.node()
158 dest = cmdutil.make_filename(repo, dest, node)
158 dest = cmdutil.make_filename(repo, dest, node)
159 if os.path.realpath(dest) == repo.root:
159 if os.path.realpath(dest) == repo.root:
160 raise util.Abort(_('repository root cannot be destination'))
160 raise util.Abort(_('repository root cannot be destination'))
161 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
161 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
162 kind = opts.get('type') or 'files'
162 kind = opts.get('type') or 'files'
163 prefix = opts['prefix']
163 prefix = opts['prefix']
164 if dest == '-':
164 if dest == '-':
165 if kind == 'files':
165 if kind == 'files':
166 raise util.Abort(_('cannot archive plain files to stdout'))
166 raise util.Abort(_('cannot archive plain files to stdout'))
167 dest = sys.stdout
167 dest = sys.stdout
168 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
168 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
169 prefix = cmdutil.make_filename(repo, prefix, node)
169 prefix = cmdutil.make_filename(repo, prefix, node)
170 archival.archive(repo, dest, node, kind, not opts['no_decode'],
170 archival.archive(repo, dest, node, kind, not opts['no_decode'],
171 matchfn, prefix)
171 matchfn, prefix)
172
172
173 def backout(ui, repo, node=None, rev=None, **opts):
173 def backout(ui, repo, node=None, rev=None, **opts):
174 '''reverse effect of earlier changeset
174 '''reverse effect of earlier changeset
175
175
176 Commit the backed out changes as a new changeset. The new
176 Commit the backed out changes as a new changeset. The new
177 changeset is a child of the backed out changeset.
177 changeset is a child of the backed out changeset.
178
178
179 If you back out a changeset other than the tip, a new head is
179 If you back out a changeset other than the tip, a new head is
180 created. This head is the parent of the working directory. If
180 created. This head is the parent of the working directory. If
181 you back out an old changeset, your working directory will appear
181 you back out an old changeset, your working directory will appear
182 old after the backout. You should merge the backout changeset
182 old after the backout. You should merge the backout changeset
183 with another head.
183 with another head.
184
184
185 The --merge option remembers the parent of the working directory
185 The --merge option remembers the parent of the working directory
186 before starting the backout, then merges the new head with that
186 before starting the backout, then merges the new head with that
187 changeset afterwards. This saves you from doing the merge by
187 changeset afterwards. This saves you from doing the merge by
188 hand. The result of this merge is not committed, as for a normal
188 hand. The result of this merge is not committed, as for a normal
189 merge.'''
189 merge.'''
190 if rev and node:
190 if rev and node:
191 raise util.Abort(_("please specify just one revision"))
191 raise util.Abort(_("please specify just one revision"))
192
192
193 if not rev:
193 if not rev:
194 rev = node
194 rev = node
195
195
196 if not rev:
196 if not rev:
197 raise util.Abort(_("please specify a revision to backout"))
197 raise util.Abort(_("please specify a revision to backout"))
198
198
199 date = opts.get('date')
199 date = opts.get('date')
200 if date:
200 if date:
201 opts['date'] = util.parsedate(date)
201 opts['date'] = util.parsedate(date)
202
202
203 cmdutil.bail_if_changed(repo)
203 cmdutil.bail_if_changed(repo)
204 node = repo.lookup(rev)
204 node = repo.lookup(rev)
205
205
206 op1, op2 = repo.dirstate.parents()
206 op1, op2 = repo.dirstate.parents()
207 a = repo.changelog.ancestor(op1, node)
207 a = repo.changelog.ancestor(op1, node)
208 if a != node:
208 if a != node:
209 raise util.Abort(_('cannot back out change on a different branch'))
209 raise util.Abort(_('cannot back out change on a different branch'))
210
210
211 p1, p2 = repo.changelog.parents(node)
211 p1, p2 = repo.changelog.parents(node)
212 if p1 == nullid:
212 if p1 == nullid:
213 raise util.Abort(_('cannot back out a change with no parents'))
213 raise util.Abort(_('cannot back out a change with no parents'))
214 if p2 != nullid:
214 if p2 != nullid:
215 if not opts['parent']:
215 if not opts['parent']:
216 raise util.Abort(_('cannot back out a merge changeset without '
216 raise util.Abort(_('cannot back out a merge changeset without '
217 '--parent'))
217 '--parent'))
218 p = repo.lookup(opts['parent'])
218 p = repo.lookup(opts['parent'])
219 if p not in (p1, p2):
219 if p not in (p1, p2):
220 raise util.Abort(_('%s is not a parent of %s') %
220 raise util.Abort(_('%s is not a parent of %s') %
221 (short(p), short(node)))
221 (short(p), short(node)))
222 parent = p
222 parent = p
223 else:
223 else:
224 if opts['parent']:
224 if opts['parent']:
225 raise util.Abort(_('cannot use --parent on non-merge changeset'))
225 raise util.Abort(_('cannot use --parent on non-merge changeset'))
226 parent = p1
226 parent = p1
227
227
228 hg.clean(repo, node, show_stats=False)
228 hg.clean(repo, node, show_stats=False)
229 revert_opts = opts.copy()
229 revert_opts = opts.copy()
230 revert_opts['date'] = None
230 revert_opts['date'] = None
231 revert_opts['all'] = True
231 revert_opts['all'] = True
232 revert_opts['rev'] = hex(parent)
232 revert_opts['rev'] = hex(parent)
233 revert_opts['no_backup'] = None
233 revert_opts['no_backup'] = None
234 revert(ui, repo, **revert_opts)
234 revert(ui, repo, **revert_opts)
235 commit_opts = opts.copy()
235 commit_opts = opts.copy()
236 commit_opts['addremove'] = False
236 commit_opts['addremove'] = False
237 if not commit_opts['message'] and not commit_opts['logfile']:
237 if not commit_opts['message'] and not commit_opts['logfile']:
238 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
238 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
239 commit_opts['force_editor'] = True
239 commit_opts['force_editor'] = True
240 commit(ui, repo, **commit_opts)
240 commit(ui, repo, **commit_opts)
241 def nice(node):
241 def nice(node):
242 return '%d:%s' % (repo.changelog.rev(node), short(node))
242 return '%d:%s' % (repo.changelog.rev(node), short(node))
243 ui.status(_('changeset %s backs out changeset %s\n') %
243 ui.status(_('changeset %s backs out changeset %s\n') %
244 (nice(repo.changelog.tip()), nice(node)))
244 (nice(repo.changelog.tip()), nice(node)))
245 if op1 != node:
245 if op1 != node:
246 if opts['merge']:
246 if opts['merge']:
247 ui.status(_('merging with changeset %s\n') % nice(op1))
247 ui.status(_('merging with changeset %s\n') % nice(op1))
248 hg.merge(repo, hex(op1))
248 hg.merge(repo, hex(op1))
249 else:
249 else:
250 ui.status(_('the backout changeset is a new head - '
250 ui.status(_('the backout changeset is a new head - '
251 'do not forget to merge\n'))
251 'do not forget to merge\n'))
252 ui.status(_('(use "backout --merge" '
252 ui.status(_('(use "backout --merge" '
253 'if you want to auto-merge)\n'))
253 'if you want to auto-merge)\n'))
254
254
255 def bisect(ui, repo, rev=None, extra=None,
255 def bisect(ui, repo, rev=None, extra=None,
256 reset=None, good=None, bad=None, skip=None, noupdate=None):
256 reset=None, good=None, bad=None, skip=None, noupdate=None):
257 """subdivision search of changesets
257 """subdivision search of changesets
258
258
259 This command helps to find changesets which introduce problems.
259 This command helps to find changesets which introduce problems.
260 To use, mark the earliest changeset you know exhibits the problem
260 To use, mark the earliest changeset you know exhibits the problem
261 as bad, then mark the latest changeset which is free from the
261 as bad, then mark the latest changeset which is free from the
262 problem as good. Bisect will update your working directory to a
262 problem as good. Bisect will update your working directory to a
263 revision for testing. Once you have performed tests, mark the
263 revision for testing. Once you have performed tests, mark the
264 working directory as bad or good and bisect will either update to
264 working directory as bad or good and bisect will either update to
265 another candidate changeset or announce that it has found the bad
265 another candidate changeset or announce that it has found the bad
266 revision.
266 revision.
267 """
267 """
268 # backward compatibility
268 # backward compatibility
269 if rev in "good bad reset init".split():
269 if rev in "good bad reset init".split():
270 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
270 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
271 cmd, rev, extra = rev, extra, None
271 cmd, rev, extra = rev, extra, None
272 if cmd == "good":
272 if cmd == "good":
273 good = True
273 good = True
274 elif cmd == "bad":
274 elif cmd == "bad":
275 bad = True
275 bad = True
276 else:
276 else:
277 reset = True
277 reset = True
278 elif extra or good + bad + skip + reset > 1:
278 elif extra or good + bad + skip + reset > 1:
279 raise util.Abort("Incompatible arguments")
279 raise util.Abort("Incompatible arguments")
280
280
281 if reset:
281 if reset:
282 p = repo.join("bisect.state")
282 p = repo.join("bisect.state")
283 if os.path.exists(p):
283 if os.path.exists(p):
284 os.unlink(p)
284 os.unlink(p)
285 return
285 return
286
286
287 # load state
287 # load state
288 state = {'good': [], 'bad': [], 'skip': []}
288 state = {'good': [], 'bad': [], 'skip': []}
289 if os.path.exists(repo.join("bisect.state")):
289 if os.path.exists(repo.join("bisect.state")):
290 for l in repo.opener("bisect.state"):
290 for l in repo.opener("bisect.state"):
291 kind, node = l[:-1].split()
291 kind, node = l[:-1].split()
292 node = repo.lookup(node)
292 node = repo.lookup(node)
293 if kind not in state:
293 if kind not in state:
294 raise util.Abort(_("unknown bisect kind %s") % kind)
294 raise util.Abort(_("unknown bisect kind %s") % kind)
295 state[kind].append(node)
295 state[kind].append(node)
296
296
297 # update state
297 # update state
298 node = repo.lookup(rev or '.')
298 node = repo.lookup(rev or '.')
299 if good:
299 if good:
300 state['good'].append(node)
300 state['good'].append(node)
301 elif bad:
301 elif bad:
302 state['bad'].append(node)
302 state['bad'].append(node)
303 elif skip:
303 elif skip:
304 state['skip'].append(node)
304 state['skip'].append(node)
305
305
306 # save state
306 # save state
307 f = repo.opener("bisect.state", "w", atomictemp=True)
307 f = repo.opener("bisect.state", "w", atomictemp=True)
308 wlock = repo.wlock()
308 wlock = repo.wlock()
309 try:
309 try:
310 for kind in state:
310 for kind in state:
311 for node in state[kind]:
311 for node in state[kind]:
312 f.write("%s %s\n" % (kind, hg.hex(node)))
312 f.write("%s %s\n" % (kind, hg.hex(node)))
313 f.rename()
313 f.rename()
314 finally:
314 finally:
315 del wlock
315 del wlock
316
316
317 if not state['good'] or not state['bad']:
317 if not state['good'] or not state['bad']:
318 return
318 return
319
319
320 # actually bisect
320 # actually bisect
321 node, changesets, good = hbisect.bisect(repo.changelog, state)
321 node, changesets, good = hbisect.bisect(repo.changelog, state)
322 if changesets == 0:
322 if changesets == 0:
323 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
323 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
324 displayer = cmdutil.show_changeset(ui, repo, {})
324 displayer = cmdutil.show_changeset(ui, repo, {})
325 displayer.show(changenode=node)
325 displayer.show(changenode=node)
326 elif node is not None:
326 elif node is not None:
327 # compute the approximate number of remaining tests
327 # compute the approximate number of remaining tests
328 tests, size = 0, 2
328 tests, size = 0, 2
329 while size <= changesets:
329 while size <= changesets:
330 tests, size = tests + 1, size * 2
330 tests, size = tests + 1, size * 2
331 rev = repo.changelog.rev(node)
331 rev = repo.changelog.rev(node)
332 ui.write(_("Testing changeset %s:%s "
332 ui.write(_("Testing changeset %s:%s "
333 "(%s changesets remaining, ~%s tests)\n")
333 "(%s changesets remaining, ~%s tests)\n")
334 % (rev, hg.short(node), changesets, tests))
334 % (rev, hg.short(node), changesets, tests))
335 if not noupdate:
335 if not noupdate:
336 cmdutil.bail_if_changed(repo)
336 cmdutil.bail_if_changed(repo)
337 return hg.clean(repo, node)
337 return hg.clean(repo, node)
338
338
339 def branch(ui, repo, label=None, **opts):
339 def branch(ui, repo, label=None, **opts):
340 """set or show the current branch name
340 """set or show the current branch name
341
341
342 With no argument, show the current branch name. With one argument,
342 With no argument, show the current branch name. With one argument,
343 set the working directory branch name (the branch does not exist in
343 set the working directory branch name (the branch does not exist in
344 the repository until the next commit).
344 the repository until the next commit).
345
345
346 Unless --force is specified, branch will not let you set a
346 Unless --force is specified, branch will not let you set a
347 branch name that shadows an existing branch.
347 branch name that shadows an existing branch.
348
348
349 Use the command 'hg update' to switch to an existing branch.
349 Use the command 'hg update' to switch to an existing branch.
350 """
350 """
351
351
352 if label:
352 if label:
353 if not opts.get('force') and label in repo.branchtags():
353 if not opts.get('force') and label in repo.branchtags():
354 if label not in [p.branch() for p in repo.workingctx().parents()]:
354 if label not in [p.branch() for p in repo.workingctx().parents()]:
355 raise util.Abort(_('a branch of the same name already exists'
355 raise util.Abort(_('a branch of the same name already exists'
356 ' (use --force to override)'))
356 ' (use --force to override)'))
357 repo.dirstate.setbranch(util.fromlocal(label))
357 repo.dirstate.setbranch(util.fromlocal(label))
358 ui.status(_('marked working directory as branch %s\n') % label)
358 ui.status(_('marked working directory as branch %s\n') % label)
359 else:
359 else:
360 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
360 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
361
361
362 def branches(ui, repo, active=False):
362 def branches(ui, repo, active=False):
363 """list repository named branches
363 """list repository named branches
364
364
365 List the repository's named branches, indicating which ones are
365 List the repository's named branches, indicating which ones are
366 inactive. If active is specified, only show active branches.
366 inactive. If active is specified, only show active branches.
367
367
368 A branch is considered active if it contains unmerged heads.
368 A branch is considered active if it contains unmerged heads.
369
369
370 Use the command 'hg update' to switch to an existing branch.
370 Use the command 'hg update' to switch to an existing branch.
371 """
371 """
372 b = repo.branchtags()
372 b = repo.branchtags()
373 heads = dict.fromkeys(repo.heads(), 1)
373 heads = dict.fromkeys(repo.heads(), 1)
374 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
374 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
375 l.sort()
375 l.sort()
376 l.reverse()
376 l.reverse()
377 for ishead, r, n, t in l:
377 for ishead, r, n, t in l:
378 if active and not ishead:
378 if active and not ishead:
379 # If we're only displaying active branches, abort the loop on
379 # If we're only displaying active branches, abort the loop on
380 # encountering the first inactive head
380 # encountering the first inactive head
381 break
381 break
382 else:
382 else:
383 hexfunc = ui.debugflag and hex or short
383 hexfunc = ui.debugflag and hex or short
384 if ui.quiet:
384 if ui.quiet:
385 ui.write("%s\n" % t)
385 ui.write("%s\n" % t)
386 else:
386 else:
387 spaces = " " * (30 - util.locallen(t))
387 spaces = " " * (30 - util.locallen(t))
388 # The code only gets here if inactive branches are being
388 # The code only gets here if inactive branches are being
389 # displayed or the branch is active.
389 # displayed or the branch is active.
390 isinactive = ((not ishead) and " (inactive)") or ''
390 isinactive = ((not ishead) and " (inactive)") or ''
391 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
391 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
392
392
393 def bundle(ui, repo, fname, dest=None, **opts):
393 def bundle(ui, repo, fname, dest=None, **opts):
394 """create a changegroup file
394 """create a changegroup file
395
395
396 Generate a compressed changegroup file collecting changesets not
396 Generate a compressed changegroup file collecting changesets not
397 found in the other repository.
397 found in the other repository.
398
398
399 If no destination repository is specified the destination is assumed
399 If no destination repository is specified the destination is assumed
400 to have all the nodes specified by one or more --base parameters.
400 to have all the nodes specified by one or more --base parameters.
401 To create a bundle containing all changesets, use --base null.
401 To create a bundle containing all changesets, use --base null.
402
402
403 The bundle file can then be transferred using conventional means and
403 The bundle file can then be transferred using conventional means and
404 applied to another repository with the unbundle or pull command.
404 applied to another repository with the unbundle or pull command.
405 This is useful when direct push and pull are not available or when
405 This is useful when direct push and pull are not available or when
406 exporting an entire repository is undesirable.
406 exporting an entire repository is undesirable.
407
407
408 Applying bundles preserves all changeset contents including
408 Applying bundles preserves all changeset contents including
409 permissions, copy/rename information, and revision history.
409 permissions, copy/rename information, and revision history.
410 """
410 """
411 revs = opts.get('rev') or None
411 revs = opts.get('rev') or None
412 if revs:
412 if revs:
413 revs = [repo.lookup(rev) for rev in revs]
413 revs = [repo.lookup(rev) for rev in revs]
414 base = opts.get('base')
414 base = opts.get('base')
415 if base:
415 if base:
416 if dest:
416 if dest:
417 raise util.Abort(_("--base is incompatible with specifiying "
417 raise util.Abort(_("--base is incompatible with specifiying "
418 "a destination"))
418 "a destination"))
419 base = [repo.lookup(rev) for rev in base]
419 base = [repo.lookup(rev) for rev in base]
420 # create the right base
420 # create the right base
421 # XXX: nodesbetween / changegroup* should be "fixed" instead
421 # XXX: nodesbetween / changegroup* should be "fixed" instead
422 o = []
422 o = []
423 has = {nullid: None}
423 has = {nullid: None}
424 for n in base:
424 for n in base:
425 has.update(repo.changelog.reachable(n))
425 has.update(repo.changelog.reachable(n))
426 if revs:
426 if revs:
427 visit = list(revs)
427 visit = list(revs)
428 else:
428 else:
429 visit = repo.changelog.heads()
429 visit = repo.changelog.heads()
430 seen = {}
430 seen = {}
431 while visit:
431 while visit:
432 n = visit.pop(0)
432 n = visit.pop(0)
433 parents = [p for p in repo.changelog.parents(n) if p not in has]
433 parents = [p for p in repo.changelog.parents(n) if p not in has]
434 if len(parents) == 0:
434 if len(parents) == 0:
435 o.insert(0, n)
435 o.insert(0, n)
436 else:
436 else:
437 for p in parents:
437 for p in parents:
438 if p not in seen:
438 if p not in seen:
439 seen[p] = 1
439 seen[p] = 1
440 visit.append(p)
440 visit.append(p)
441 else:
441 else:
442 cmdutil.setremoteconfig(ui, opts)
442 cmdutil.setremoteconfig(ui, opts)
443 dest, revs, checkout = hg.parseurl(
443 dest, revs, checkout = hg.parseurl(
444 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
444 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
445 other = hg.repository(ui, dest)
445 other = hg.repository(ui, dest)
446 o = repo.findoutgoing(other, force=opts['force'])
446 o = repo.findoutgoing(other, force=opts['force'])
447
447
448 if revs:
448 if revs:
449 cg = repo.changegroupsubset(o, revs, 'bundle')
449 cg = repo.changegroupsubset(o, revs, 'bundle')
450 else:
450 else:
451 cg = repo.changegroup(o, 'bundle')
451 cg = repo.changegroup(o, 'bundle')
452 changegroup.writebundle(cg, fname, "HG10BZ")
452 changegroup.writebundle(cg, fname, "HG10BZ")
453
453
454 def cat(ui, repo, file1, *pats, **opts):
454 def cat(ui, repo, file1, *pats, **opts):
455 """output the current or given revision of files
455 """output the current or given revision of files
456
456
457 Print the specified files as they were at the given revision.
457 Print the specified files as they were at the given revision.
458 If no revision is given, the parent of the working directory is used,
458 If no revision is given, the parent of the working directory is used,
459 or tip if no revision is checked out.
459 or tip if no revision is checked out.
460
460
461 Output may be to a file, in which case the name of the file is
461 Output may be to a file, in which case the name of the file is
462 given using a format string. The formatting rules are the same as
462 given using a format string. The formatting rules are the same as
463 for the export command, with the following additions:
463 for the export command, with the following additions:
464
464
465 %s basename of file being printed
465 %s basename of file being printed
466 %d dirname of file being printed, or '.' if in repo root
466 %d dirname of file being printed, or '.' if in repo root
467 %p root-relative path name of file being printed
467 %p root-relative path name of file being printed
468 """
468 """
469 ctx = repo.changectx(opts['rev'])
469 ctx = repo.changectx(opts['rev'])
470 err = 1
470 err = 1
471 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
471 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
472 ctx.node()):
472 ctx.node()):
473 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
473 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
474 data = ctx.filectx(abs).data()
474 data = ctx.filectx(abs).data()
475 if opts.get('decode'):
475 if opts.get('decode'):
476 data = repo.wwritedata(abs, data)
476 data = repo.wwritedata(abs, data)
477 fp.write(data)
477 fp.write(data)
478 err = 0
478 err = 0
479 return err
479 return err
480
480
481 def clone(ui, source, dest=None, **opts):
481 def clone(ui, source, dest=None, **opts):
482 """make a copy of an existing repository
482 """make a copy of an existing repository
483
483
484 Create a copy of an existing repository in a new directory.
484 Create a copy of an existing repository in a new directory.
485
485
486 If no destination directory name is specified, it defaults to the
486 If no destination directory name is specified, it defaults to the
487 basename of the source.
487 basename of the source.
488
488
489 The location of the source is added to the new repository's
489 The location of the source is added to the new repository's
490 .hg/hgrc file, as the default to be used for future pulls.
490 .hg/hgrc file, as the default to be used for future pulls.
491
491
492 For efficiency, hardlinks are used for cloning whenever the source
492 For efficiency, hardlinks are used for cloning whenever the source
493 and destination are on the same filesystem (note this applies only
493 and destination are on the same filesystem (note this applies only
494 to the repository data, not to the checked out files). Some
494 to the repository data, not to the checked out files). Some
495 filesystems, such as AFS, implement hardlinking incorrectly, but
495 filesystems, such as AFS, implement hardlinking incorrectly, but
496 do not report errors. In these cases, use the --pull option to
496 do not report errors. In these cases, use the --pull option to
497 avoid hardlinking.
497 avoid hardlinking.
498
498
499 You can safely clone repositories and checked out files using full
499 You can safely clone repositories and checked out files using full
500 hardlinks with
500 hardlinks with
501
501
502 $ cp -al REPO REPOCLONE
502 $ cp -al REPO REPOCLONE
503
503
504 which is the fastest way to clone. However, the operation is not
504 which is the fastest way to clone. However, the operation is not
505 atomic (making sure REPO is not modified during the operation is
505 atomic (making sure REPO is not modified during the operation is
506 up to you) and you have to make sure your editor breaks hardlinks
506 up to you) and you have to make sure your editor breaks hardlinks
507 (Emacs and most Linux Kernel tools do so).
507 (Emacs and most Linux Kernel tools do so).
508
508
509 If you use the -r option to clone up to a specific revision, no
509 If you use the -r option to clone up to a specific revision, no
510 subsequent revisions will be present in the cloned repository.
510 subsequent revisions will be present in the cloned repository.
511 This option implies --pull, even on local repositories.
511 This option implies --pull, even on local repositories.
512
512
513 See pull for valid source format details.
513 See pull for valid source format details.
514
514
515 It is possible to specify an ssh:// URL as the destination, but no
515 It is possible to specify an ssh:// URL as the destination, but no
516 .hg/hgrc and working directory will be created on the remote side.
516 .hg/hgrc and working directory will be created on the remote side.
517 Look at the help text for the pull command for important details
517 Look at the help text for the pull command for important details
518 about ssh:// URLs.
518 about ssh:// URLs.
519 """
519 """
520 cmdutil.setremoteconfig(ui, opts)
520 cmdutil.setremoteconfig(ui, opts)
521 hg.clone(ui, source, dest,
521 hg.clone(ui, source, dest,
522 pull=opts['pull'],
522 pull=opts['pull'],
523 stream=opts['uncompressed'],
523 stream=opts['uncompressed'],
524 rev=opts['rev'],
524 rev=opts['rev'],
525 update=not opts['noupdate'])
525 update=not opts['noupdate'])
526
526
527 def commit(ui, repo, *pats, **opts):
527 def commit(ui, repo, *pats, **opts):
528 """commit the specified files or all outstanding changes
528 """commit the specified files or all outstanding changes
529
529
530 Commit changes to the given files into the repository.
530 Commit changes to the given files into the repository.
531
531
532 If a list of files is omitted, all changes reported by "hg status"
532 If a list of files is omitted, all changes reported by "hg status"
533 will be committed.
533 will be committed.
534
534
535 If no commit message is specified, the configured editor is started to
535 If no commit message is specified, the configured editor is started to
536 enter a message.
536 enter a message.
537 """
537 """
538 def commitfunc(ui, repo, files, message, match, opts):
538 def commitfunc(ui, repo, files, message, match, opts):
539 return repo.commit(files, message, opts['user'], opts['date'], match,
539 return repo.commit(files, message, opts['user'], opts['date'], match,
540 force_editor=opts.get('force_editor'))
540 force_editor=opts.get('force_editor'))
541 cmdutil.commit(ui, repo, commitfunc, pats, opts)
541 cmdutil.commit(ui, repo, commitfunc, pats, opts)
542
542
543 def copy(ui, repo, *pats, **opts):
543 def copy(ui, repo, *pats, **opts):
544 """mark files as copied for the next commit
544 """mark files as copied for the next commit
545
545
546 Mark dest as having copies of source files. If dest is a
546 Mark dest as having copies of source files. If dest is a
547 directory, copies are put in that directory. If dest is a file,
547 directory, copies are put in that directory. If dest is a file,
548 there can only be one source.
548 there can only be one source.
549
549
550 By default, this command copies the contents of files as they
550 By default, this command copies the contents of files as they
551 stand in the working directory. If invoked with --after, the
551 stand in the working directory. If invoked with --after, the
552 operation is recorded, but no copying is performed.
552 operation is recorded, but no copying is performed.
553
553
554 This command takes effect in the next commit. To undo a copy
554 This command takes effect in the next commit. To undo a copy
555 before that, see hg revert.
555 before that, see hg revert.
556 """
556 """
557 wlock = repo.wlock(False)
557 wlock = repo.wlock(False)
558 try:
558 try:
559 return cmdutil.copy(ui, repo, pats, opts)
559 return cmdutil.copy(ui, repo, pats, opts)
560 finally:
560 finally:
561 del wlock
561 del wlock
562
562
563 def debugancestor(ui, index, rev1, rev2):
563 def debugancestor(ui, index, rev1, rev2):
564 """find the ancestor revision of two revisions in a given index"""
564 """find the ancestor revision of two revisions in a given index"""
565 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
565 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
566 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
566 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
567 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
567 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
568
568
569 def debugcomplete(ui, cmd='', **opts):
569 def debugcomplete(ui, cmd='', **opts):
570 """returns the completion list associated with the given command"""
570 """returns the completion list associated with the given command"""
571
571
572 if opts['options']:
572 if opts['options']:
573 options = []
573 options = []
574 otables = [globalopts]
574 otables = [globalopts]
575 if cmd:
575 if cmd:
576 aliases, entry = cmdutil.findcmd(ui, cmd, table)
576 aliases, entry = cmdutil.findcmd(ui, cmd, table)
577 otables.append(entry[1])
577 otables.append(entry[1])
578 for t in otables:
578 for t in otables:
579 for o in t:
579 for o in t:
580 if o[0]:
580 if o[0]:
581 options.append('-%s' % o[0])
581 options.append('-%s' % o[0])
582 options.append('--%s' % o[1])
582 options.append('--%s' % o[1])
583 ui.write("%s\n" % "\n".join(options))
583 ui.write("%s\n" % "\n".join(options))
584 return
584 return
585
585
586 clist = cmdutil.findpossible(ui, cmd, table).keys()
586 clist = cmdutil.findpossible(ui, cmd, table).keys()
587 clist.sort()
587 clist.sort()
588 ui.write("%s\n" % "\n".join(clist))
588 ui.write("%s\n" % "\n".join(clist))
589
589
590 def debugfsinfo(ui, path = "."):
590 def debugfsinfo(ui, path = "."):
591 file('.debugfsinfo', 'w').write('')
591 file('.debugfsinfo', 'w').write('')
592 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
592 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
593 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
593 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
594 ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
594 ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
595 and 'yes' or 'no'))
595 and 'yes' or 'no'))
596 os.unlink('.debugfsinfo')
596 os.unlink('.debugfsinfo')
597
597
598 def debugrebuildstate(ui, repo, rev=""):
598 def debugrebuildstate(ui, repo, rev=""):
599 """rebuild the dirstate as it would look like for the given revision"""
599 """rebuild the dirstate as it would look like for the given revision"""
600 if rev == "":
600 if rev == "":
601 rev = repo.changelog.tip()
601 rev = repo.changelog.tip()
602 ctx = repo.changectx(rev)
602 ctx = repo.changectx(rev)
603 files = ctx.manifest()
603 files = ctx.manifest()
604 wlock = repo.wlock()
604 wlock = repo.wlock()
605 try:
605 try:
606 repo.dirstate.rebuild(rev, files)
606 repo.dirstate.rebuild(rev, files)
607 finally:
607 finally:
608 del wlock
608 del wlock
609
609
610 def debugcheckstate(ui, repo):
610 def debugcheckstate(ui, repo):
611 """validate the correctness of the current dirstate"""
611 """validate the correctness of the current dirstate"""
612 parent1, parent2 = repo.dirstate.parents()
612 parent1, parent2 = repo.dirstate.parents()
613 m1 = repo.changectx(parent1).manifest()
613 m1 = repo.changectx(parent1).manifest()
614 m2 = repo.changectx(parent2).manifest()
614 m2 = repo.changectx(parent2).manifest()
615 errors = 0
615 errors = 0
616 for f in repo.dirstate:
616 for f in repo.dirstate:
617 state = repo.dirstate[f]
617 state = repo.dirstate[f]
618 if state in "nr" and f not in m1:
618 if state in "nr" and f not in m1:
619 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
619 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
620 errors += 1
620 errors += 1
621 if state in "a" and f in m1:
621 if state in "a" and f in m1:
622 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
622 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
623 errors += 1
623 errors += 1
624 if state in "m" and f not in m1 and f not in m2:
624 if state in "m" and f not in m1 and f not in m2:
625 ui.warn(_("%s in state %s, but not in either manifest\n") %
625 ui.warn(_("%s in state %s, but not in either manifest\n") %
626 (f, state))
626 (f, state))
627 errors += 1
627 errors += 1
628 for f in m1:
628 for f in m1:
629 state = repo.dirstate[f]
629 state = repo.dirstate[f]
630 if state not in "nrm":
630 if state not in "nrm":
631 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
631 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
632 errors += 1
632 errors += 1
633 if errors:
633 if errors:
634 error = _(".hg/dirstate inconsistent with current parent's manifest")
634 error = _(".hg/dirstate inconsistent with current parent's manifest")
635 raise util.Abort(error)
635 raise util.Abort(error)
636
636
637 def showconfig(ui, repo, *values, **opts):
637 def showconfig(ui, repo, *values, **opts):
638 """show combined config settings from all hgrc files
638 """show combined config settings from all hgrc files
639
639
640 With no args, print names and values of all config items.
640 With no args, print names and values of all config items.
641
641
642 With one arg of the form section.name, print just the value of
642 With one arg of the form section.name, print just the value of
643 that config item.
643 that config item.
644
644
645 With multiple args, print names and values of all config items
645 With multiple args, print names and values of all config items
646 with matching section names."""
646 with matching section names."""
647
647
648 untrusted = bool(opts.get('untrusted'))
648 untrusted = bool(opts.get('untrusted'))
649 if values:
649 if values:
650 if len([v for v in values if '.' in v]) > 1:
650 if len([v for v in values if '.' in v]) > 1:
651 raise util.Abort(_('only one config item permitted'))
651 raise util.Abort(_('only one config item permitted'))
652 for section, name, value in ui.walkconfig(untrusted=untrusted):
652 for section, name, value in ui.walkconfig(untrusted=untrusted):
653 sectname = section + '.' + name
653 sectname = section + '.' + name
654 if values:
654 if values:
655 for v in values:
655 for v in values:
656 if v == section:
656 if v == section:
657 ui.write('%s=%s\n' % (sectname, value))
657 ui.write('%s=%s\n' % (sectname, value))
658 elif v == sectname:
658 elif v == sectname:
659 ui.write(value, '\n')
659 ui.write(value, '\n')
660 else:
660 else:
661 ui.write('%s=%s\n' % (sectname, value))
661 ui.write('%s=%s\n' % (sectname, value))
662
662
663 def debugsetparents(ui, repo, rev1, rev2=None):
663 def debugsetparents(ui, repo, rev1, rev2=None):
664 """manually set the parents of the current working directory
664 """manually set the parents of the current working directory
665
665
666 This is useful for writing repository conversion tools, but should
666 This is useful for writing repository conversion tools, but should
667 be used with care.
667 be used with care.
668 """
668 """
669
669
670 if not rev2:
670 if not rev2:
671 rev2 = hex(nullid)
671 rev2 = hex(nullid)
672
672
673 wlock = repo.wlock()
673 wlock = repo.wlock()
674 try:
674 try:
675 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
675 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
676 finally:
676 finally:
677 del wlock
677 del wlock
678
678
679 def debugstate(ui, repo):
679 def debugstate(ui, repo):
680 """show the contents of the current dirstate"""
680 """show the contents of the current dirstate"""
681 k = repo.dirstate._map.items()
681 k = repo.dirstate._map.items()
682 k.sort()
682 k.sort()
683 for file_, ent in k:
683 for file_, ent in k:
684 if ent[3] == -1:
684 if ent[3] == -1:
685 # Pad or slice to locale representation
685 # Pad or slice to locale representation
686 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(0)))
686 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(0)))
687 timestr = 'unset'
687 timestr = 'unset'
688 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
688 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
689 else:
689 else:
690 timestr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ent[3]))
690 timestr = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(ent[3]))
691 if ent[1] & 020000:
691 if ent[1] & 020000:
692 mode = 'lnk'
692 mode = 'lnk'
693 else:
693 else:
694 mode = '%3o' % (ent[1] & 0777)
694 mode = '%3o' % (ent[1] & 0777)
695 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
695 ui.write("%c %s %10d %s %s\n" % (ent[0], mode, ent[2], timestr, file_))
696 for f in repo.dirstate.copies():
696 for f in repo.dirstate.copies():
697 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
697 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
698
698
699 def debugdata(ui, file_, rev):
699 def debugdata(ui, file_, rev):
700 """dump the contents of a data file revision"""
700 """dump the contents of a data file revision"""
701 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
701 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
702 try:
702 try:
703 ui.write(r.revision(r.lookup(rev)))
703 ui.write(r.revision(r.lookup(rev)))
704 except KeyError:
704 except KeyError:
705 raise util.Abort(_('invalid revision identifier %s') % rev)
705 raise util.Abort(_('invalid revision identifier %s') % rev)
706
706
707 def debugdate(ui, date, range=None, **opts):
707 def debugdate(ui, date, range=None, **opts):
708 """parse and display a date"""
708 """parse and display a date"""
709 if opts["extended"]:
709 if opts["extended"]:
710 d = util.parsedate(date, util.extendeddateformats)
710 d = util.parsedate(date, util.extendeddateformats)
711 else:
711 else:
712 d = util.parsedate(date)
712 d = util.parsedate(date)
713 ui.write("internal: %s %s\n" % d)
713 ui.write("internal: %s %s\n" % d)
714 ui.write("standard: %s\n" % util.datestr(d))
714 ui.write("standard: %s\n" % util.datestr(d))
715 if range:
715 if range:
716 m = util.matchdate(range)
716 m = util.matchdate(range)
717 ui.write("match: %s\n" % m(d[0]))
717 ui.write("match: %s\n" % m(d[0]))
718
718
719 def debugindex(ui, file_):
719 def debugindex(ui, file_):
720 """dump the contents of an index file"""
720 """dump the contents of an index file"""
721 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
721 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
722 ui.write(" rev offset length base linkrev" +
722 ui.write(" rev offset length base linkrev" +
723 " nodeid p1 p2\n")
723 " nodeid p1 p2\n")
724 for i in xrange(r.count()):
724 for i in xrange(r.count()):
725 node = r.node(i)
725 node = r.node(i)
726 try:
726 try:
727 pp = r.parents(node)
727 pp = r.parents(node)
728 except:
728 except:
729 pp = [nullid, nullid]
729 pp = [nullid, nullid]
730 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
730 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
731 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
731 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
732 short(node), short(pp[0]), short(pp[1])))
732 short(node), short(pp[0]), short(pp[1])))
733
733
734 def debugindexdot(ui, file_):
734 def debugindexdot(ui, file_):
735 """dump an index DAG as a .dot file"""
735 """dump an index DAG as a .dot file"""
736 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
736 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
737 ui.write("digraph G {\n")
737 ui.write("digraph G {\n")
738 for i in xrange(r.count()):
738 for i in xrange(r.count()):
739 node = r.node(i)
739 node = r.node(i)
740 pp = r.parents(node)
740 pp = r.parents(node)
741 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
741 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
742 if pp[1] != nullid:
742 if pp[1] != nullid:
743 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
743 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
744 ui.write("}\n")
744 ui.write("}\n")
745
745
746 def debuginstall(ui):
746 def debuginstall(ui):
747 '''test Mercurial installation'''
747 '''test Mercurial installation'''
748
748
749 def writetemp(contents):
749 def writetemp(contents):
750 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
750 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
751 f = os.fdopen(fd, "wb")
751 f = os.fdopen(fd, "wb")
752 f.write(contents)
752 f.write(contents)
753 f.close()
753 f.close()
754 return name
754 return name
755
755
756 problems = 0
756 problems = 0
757
757
758 # encoding
758 # encoding
759 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
759 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
760 try:
760 try:
761 util.fromlocal("test")
761 util.fromlocal("test")
762 except util.Abort, inst:
762 except util.Abort, inst:
763 ui.write(" %s\n" % inst)
763 ui.write(" %s\n" % inst)
764 ui.write(_(" (check that your locale is properly set)\n"))
764 ui.write(_(" (check that your locale is properly set)\n"))
765 problems += 1
765 problems += 1
766
766
767 # compiled modules
767 # compiled modules
768 ui.status(_("Checking extensions...\n"))
768 ui.status(_("Checking extensions...\n"))
769 try:
769 try:
770 import bdiff, mpatch, base85
770 import bdiff, mpatch, base85
771 except Exception, inst:
771 except Exception, inst:
772 ui.write(" %s\n" % inst)
772 ui.write(" %s\n" % inst)
773 ui.write(_(" One or more extensions could not be found"))
773 ui.write(_(" One or more extensions could not be found"))
774 ui.write(_(" (check that you compiled the extensions)\n"))
774 ui.write(_(" (check that you compiled the extensions)\n"))
775 problems += 1
775 problems += 1
776
776
777 # templates
777 # templates
778 ui.status(_("Checking templates...\n"))
778 ui.status(_("Checking templates...\n"))
779 try:
779 try:
780 import templater
780 import templater
781 t = templater.templater(templater.templatepath("map-cmdline.default"))
781 t = templater.templater(templater.templatepath("map-cmdline.default"))
782 except Exception, inst:
782 except Exception, inst:
783 ui.write(" %s\n" % inst)
783 ui.write(" %s\n" % inst)
784 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
784 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
785 problems += 1
785 problems += 1
786
786
787 # patch
787 # patch
788 ui.status(_("Checking patch...\n"))
788 ui.status(_("Checking patch...\n"))
789 patchproblems = 0
789 patchproblems = 0
790 a = "1\n2\n3\n4\n"
790 a = "1\n2\n3\n4\n"
791 b = "1\n2\n3\ninsert\n4\n"
791 b = "1\n2\n3\ninsert\n4\n"
792 fa = writetemp(a)
792 fa = writetemp(a)
793 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
793 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
794 os.path.basename(fa))
794 os.path.basename(fa))
795 fd = writetemp(d)
795 fd = writetemp(d)
796
796
797 files = {}
797 files = {}
798 try:
798 try:
799 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
799 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
800 except util.Abort, e:
800 except util.Abort, e:
801 ui.write(_(" patch call failed:\n"))
801 ui.write(_(" patch call failed:\n"))
802 ui.write(" " + str(e) + "\n")
802 ui.write(" " + str(e) + "\n")
803 patchproblems += 1
803 patchproblems += 1
804 else:
804 else:
805 if list(files) != [os.path.basename(fa)]:
805 if list(files) != [os.path.basename(fa)]:
806 ui.write(_(" unexpected patch output!\n"))
806 ui.write(_(" unexpected patch output!\n"))
807 patchproblems += 1
807 patchproblems += 1
808 a = file(fa).read()
808 a = file(fa).read()
809 if a != b:
809 if a != b:
810 ui.write(_(" patch test failed!\n"))
810 ui.write(_(" patch test failed!\n"))
811 patchproblems += 1
811 patchproblems += 1
812
812
813 if patchproblems:
813 if patchproblems:
814 if ui.config('ui', 'patch'):
814 if ui.config('ui', 'patch'):
815 ui.write(_(" (Current patch tool may be incompatible with patch,"
815 ui.write(_(" (Current patch tool may be incompatible with patch,"
816 " or misconfigured. Please check your .hgrc file)\n"))
816 " or misconfigured. Please check your .hgrc file)\n"))
817 else:
817 else:
818 ui.write(_(" Internal patcher failure, please report this error"
818 ui.write(_(" Internal patcher failure, please report this error"
819 " to http://www.selenic.com/mercurial/bts\n"))
819 " to http://www.selenic.com/mercurial/bts\n"))
820 problems += patchproblems
820 problems += patchproblems
821
821
822 os.unlink(fa)
822 os.unlink(fa)
823 os.unlink(fd)
823 os.unlink(fd)
824
824
825 # editor
825 # editor
826 ui.status(_("Checking commit editor...\n"))
826 ui.status(_("Checking commit editor...\n"))
827 editor = ui.geteditor()
827 editor = ui.geteditor()
828 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
828 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
829 if not cmdpath:
829 if not cmdpath:
830 if editor == 'vi':
830 if editor == 'vi':
831 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
831 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
832 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
832 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
833 else:
833 else:
834 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
834 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
835 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
835 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
836 problems += 1
836 problems += 1
837
837
838 # check username
838 # check username
839 ui.status(_("Checking username...\n"))
839 ui.status(_("Checking username...\n"))
840 user = os.environ.get("HGUSER")
840 user = os.environ.get("HGUSER")
841 if user is None:
841 if user is None:
842 user = ui.config("ui", "username")
842 user = ui.config("ui", "username")
843 if user is None:
843 if user is None:
844 user = os.environ.get("EMAIL")
844 user = os.environ.get("EMAIL")
845 if not user:
845 if not user:
846 ui.warn(" ")
846 ui.warn(" ")
847 ui.username()
847 ui.username()
848 ui.write(_(" (specify a username in your .hgrc file)\n"))
848 ui.write(_(" (specify a username in your .hgrc file)\n"))
849
849
850 if not problems:
850 if not problems:
851 ui.status(_("No problems detected\n"))
851 ui.status(_("No problems detected\n"))
852 else:
852 else:
853 ui.write(_("%s problems detected,"
853 ui.write(_("%s problems detected,"
854 " please check your install!\n") % problems)
854 " please check your install!\n") % problems)
855
855
856 return problems
856 return problems
857
857
858 def debugrename(ui, repo, file1, *pats, **opts):
858 def debugrename(ui, repo, file1, *pats, **opts):
859 """dump rename information"""
859 """dump rename information"""
860
860
861 ctx = repo.changectx(opts.get('rev', 'tip'))
861 ctx = repo.changectx(opts.get('rev', 'tip'))
862 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
862 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
863 ctx.node()):
863 ctx.node()):
864 fctx = ctx.filectx(abs)
864 fctx = ctx.filectx(abs)
865 m = fctx.filelog().renamed(fctx.filenode())
865 m = fctx.filelog().renamed(fctx.filenode())
866 if m:
866 if m:
867 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
867 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
868 else:
868 else:
869 ui.write(_("%s not renamed\n") % rel)
869 ui.write(_("%s not renamed\n") % rel)
870
870
871 def debugwalk(ui, repo, *pats, **opts):
871 def debugwalk(ui, repo, *pats, **opts):
872 """show how files match on given patterns"""
872 """show how files match on given patterns"""
873 items = list(cmdutil.walk(repo, pats, opts))
873 items = list(cmdutil.walk(repo, pats, opts))
874 if not items:
874 if not items:
875 return
875 return
876 fmt = '%%s %%-%ds %%-%ds %%s' % (
876 fmt = '%%s %%-%ds %%-%ds %%s' % (
877 max([len(abs) for (src, abs, rel, exact) in items]),
877 max([len(abs) for (src, abs, rel, exact) in items]),
878 max([len(rel) for (src, abs, rel, exact) in items]))
878 max([len(rel) for (src, abs, rel, exact) in items]))
879 for src, abs, rel, exact in items:
879 for src, abs, rel, exact in items:
880 line = fmt % (src, abs, rel, exact and 'exact' or '')
880 line = fmt % (src, abs, rel, exact and 'exact' or '')
881 ui.write("%s\n" % line.rstrip())
881 ui.write("%s\n" % line.rstrip())
882
882
883 def diff(ui, repo, *pats, **opts):
883 def diff(ui, repo, *pats, **opts):
884 """diff repository (or selected files)
884 """diff repository (or selected files)
885
885
886 Show differences between revisions for the specified files.
886 Show differences between revisions for the specified files.
887
887
888 Differences between files are shown using the unified diff format.
888 Differences between files are shown using the unified diff format.
889
889
890 NOTE: diff may generate unexpected results for merges, as it will
890 NOTE: diff may generate unexpected results for merges, as it will
891 default to comparing against the working directory's first parent
891 default to comparing against the working directory's first parent
892 changeset if no revisions are specified.
892 changeset if no revisions are specified.
893
893
894 When two revision arguments are given, then changes are shown
894 When two revision arguments are given, then changes are shown
895 between those revisions. If only one revision is specified then
895 between those revisions. If only one revision is specified then
896 that revision is compared to the working directory, and, when no
896 that revision is compared to the working directory, and, when no
897 revisions are specified, the working directory files are compared
897 revisions are specified, the working directory files are compared
898 to its parent.
898 to its parent.
899
899
900 Without the -a option, diff will avoid generating diffs of files
900 Without the -a option, diff will avoid generating diffs of files
901 it detects as binary. With -a, diff will generate a diff anyway,
901 it detects as binary. With -a, diff will generate a diff anyway,
902 probably with undesirable results.
902 probably with undesirable results.
903 """
903 """
904 node1, node2 = cmdutil.revpair(repo, opts['rev'])
904 node1, node2 = cmdutil.revpair(repo, opts['rev'])
905
905
906 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
906 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
907
907
908 patch.diff(repo, node1, node2, fns, match=matchfn,
908 patch.diff(repo, node1, node2, fns, match=matchfn,
909 opts=patch.diffopts(ui, opts))
909 opts=patch.diffopts(ui, opts))
910
910
911 def export(ui, repo, *changesets, **opts):
911 def export(ui, repo, *changesets, **opts):
912 """dump the header and diffs for one or more changesets
912 """dump the header and diffs for one or more changesets
913
913
914 Print the changeset header and diffs for one or more revisions.
914 Print the changeset header and diffs for one or more revisions.
915
915
916 The information shown in the changeset header is: author,
916 The information shown in the changeset header is: author,
917 changeset hash, parent(s) and commit comment.
917 changeset hash, parent(s) and commit comment.
918
918
919 NOTE: export may generate unexpected diff output for merge changesets,
919 NOTE: export may generate unexpected diff output for merge changesets,
920 as it will compare the merge changeset against its first parent only.
920 as it will compare the merge changeset against its first parent only.
921
921
922 Output may be to a file, in which case the name of the file is
922 Output may be to a file, in which case the name of the file is
923 given using a format string. The formatting rules are as follows:
923 given using a format string. The formatting rules are as follows:
924
924
925 %% literal "%" character
925 %% literal "%" character
926 %H changeset hash (40 bytes of hexadecimal)
926 %H changeset hash (40 bytes of hexadecimal)
927 %N number of patches being generated
927 %N number of patches being generated
928 %R changeset revision number
928 %R changeset revision number
929 %b basename of the exporting repository
929 %b basename of the exporting repository
930 %h short-form changeset hash (12 bytes of hexadecimal)
930 %h short-form changeset hash (12 bytes of hexadecimal)
931 %n zero-padded sequence number, starting at 1
931 %n zero-padded sequence number, starting at 1
932 %r zero-padded changeset revision number
932 %r zero-padded changeset revision number
933
933
934 Without the -a option, export will avoid generating diffs of files
934 Without the -a option, export will avoid generating diffs of files
935 it detects as binary. With -a, export will generate a diff anyway,
935 it detects as binary. With -a, export will generate a diff anyway,
936 probably with undesirable results.
936 probably with undesirable results.
937
937
938 With the --switch-parent option, the diff will be against the second
938 With the --switch-parent option, the diff will be against the second
939 parent. It can be useful to review a merge.
939 parent. It can be useful to review a merge.
940 """
940 """
941 if not changesets:
941 if not changesets:
942 raise util.Abort(_("export requires at least one changeset"))
942 raise util.Abort(_("export requires at least one changeset"))
943 revs = cmdutil.revrange(repo, changesets)
943 revs = cmdutil.revrange(repo, changesets)
944 if len(revs) > 1:
944 if len(revs) > 1:
945 ui.note(_('exporting patches:\n'))
945 ui.note(_('exporting patches:\n'))
946 else:
946 else:
947 ui.note(_('exporting patch:\n'))
947 ui.note(_('exporting patch:\n'))
948 patch.export(repo, revs, template=opts['output'],
948 patch.export(repo, revs, template=opts['output'],
949 switch_parent=opts['switch_parent'],
949 switch_parent=opts['switch_parent'],
950 opts=patch.diffopts(ui, opts))
950 opts=patch.diffopts(ui, opts))
951
951
952 def grep(ui, repo, pattern, *pats, **opts):
952 def grep(ui, repo, pattern, *pats, **opts):
953 """search for a pattern in specified files and revisions
953 """search for a pattern in specified files and revisions
954
954
955 Search revisions of files for a regular expression.
955 Search revisions of files for a regular expression.
956
956
957 This command behaves differently than Unix grep. It only accepts
957 This command behaves differently than Unix grep. It only accepts
958 Python/Perl regexps. It searches repository history, not the
958 Python/Perl regexps. It searches repository history, not the
959 working directory. It always prints the revision number in which
959 working directory. It always prints the revision number in which
960 a match appears.
960 a match appears.
961
961
962 By default, grep only prints output for the first revision of a
962 By default, grep only prints output for the first revision of a
963 file in which it finds a match. To get it to print every revision
963 file in which it finds a match. To get it to print every revision
964 that contains a change in match status ("-" for a match that
964 that contains a change in match status ("-" for a match that
965 becomes a non-match, or "+" for a non-match that becomes a match),
965 becomes a non-match, or "+" for a non-match that becomes a match),
966 use the --all flag.
966 use the --all flag.
967 """
967 """
968 reflags = 0
968 reflags = 0
969 if opts['ignore_case']:
969 if opts['ignore_case']:
970 reflags |= re.I
970 reflags |= re.I
971 try:
971 try:
972 regexp = re.compile(pattern, reflags)
972 regexp = re.compile(pattern, reflags)
973 except Exception, inst:
973 except Exception, inst:
974 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
974 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
975 return None
975 return None
976 sep, eol = ':', '\n'
976 sep, eol = ':', '\n'
977 if opts['print0']:
977 if opts['print0']:
978 sep = eol = '\0'
978 sep = eol = '\0'
979
979
980 fcache = {}
980 fcache = {}
981 def getfile(fn):
981 def getfile(fn):
982 if fn not in fcache:
982 if fn not in fcache:
983 fcache[fn] = repo.file(fn)
983 fcache[fn] = repo.file(fn)
984 return fcache[fn]
984 return fcache[fn]
985
985
986 def matchlines(body):
986 def matchlines(body):
987 begin = 0
987 begin = 0
988 linenum = 0
988 linenum = 0
989 while True:
989 while True:
990 match = regexp.search(body, begin)
990 match = regexp.search(body, begin)
991 if not match:
991 if not match:
992 break
992 break
993 mstart, mend = match.span()
993 mstart, mend = match.span()
994 linenum += body.count('\n', begin, mstart) + 1
994 linenum += body.count('\n', begin, mstart) + 1
995 lstart = body.rfind('\n', begin, mstart) + 1 or begin
995 lstart = body.rfind('\n', begin, mstart) + 1 or begin
996 lend = body.find('\n', mend)
996 lend = body.find('\n', mend)
997 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
997 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
998 begin = lend + 1
998 begin = lend + 1
999
999
1000 class linestate(object):
1000 class linestate(object):
1001 def __init__(self, line, linenum, colstart, colend):
1001 def __init__(self, line, linenum, colstart, colend):
1002 self.line = line
1002 self.line = line
1003 self.linenum = linenum
1003 self.linenum = linenum
1004 self.colstart = colstart
1004 self.colstart = colstart
1005 self.colend = colend
1005 self.colend = colend
1006
1006
1007 def __eq__(self, other):
1007 def __eq__(self, other):
1008 return self.line == other.line
1008 return self.line == other.line
1009
1009
1010 matches = {}
1010 matches = {}
1011 copies = {}
1011 copies = {}
1012 def grepbody(fn, rev, body):
1012 def grepbody(fn, rev, body):
1013 matches[rev].setdefault(fn, [])
1013 matches[rev].setdefault(fn, [])
1014 m = matches[rev][fn]
1014 m = matches[rev][fn]
1015 for lnum, cstart, cend, line in matchlines(body):
1015 for lnum, cstart, cend, line in matchlines(body):
1016 s = linestate(line, lnum, cstart, cend)
1016 s = linestate(line, lnum, cstart, cend)
1017 m.append(s)
1017 m.append(s)
1018
1018
1019 def difflinestates(a, b):
1019 def difflinestates(a, b):
1020 sm = difflib.SequenceMatcher(None, a, b)
1020 sm = difflib.SequenceMatcher(None, a, b)
1021 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1021 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1022 if tag == 'insert':
1022 if tag == 'insert':
1023 for i in xrange(blo, bhi):
1023 for i in xrange(blo, bhi):
1024 yield ('+', b[i])
1024 yield ('+', b[i])
1025 elif tag == 'delete':
1025 elif tag == 'delete':
1026 for i in xrange(alo, ahi):
1026 for i in xrange(alo, ahi):
1027 yield ('-', a[i])
1027 yield ('-', a[i])
1028 elif tag == 'replace':
1028 elif tag == 'replace':
1029 for i in xrange(alo, ahi):
1029 for i in xrange(alo, ahi):
1030 yield ('-', a[i])
1030 yield ('-', a[i])
1031 for i in xrange(blo, bhi):
1031 for i in xrange(blo, bhi):
1032 yield ('+', b[i])
1032 yield ('+', b[i])
1033
1033
1034 prev = {}
1034 prev = {}
1035 def display(fn, rev, states, prevstates):
1035 def display(fn, rev, states, prevstates):
1036 datefunc = ui.quiet and util.shortdate or util.datestr
1036 datefunc = ui.quiet and util.shortdate or util.datestr
1037 found = False
1037 found = False
1038 filerevmatches = {}
1038 filerevmatches = {}
1039 r = prev.get(fn, -1)
1039 r = prev.get(fn, -1)
1040 if opts['all']:
1040 if opts['all']:
1041 iter = difflinestates(states, prevstates)
1041 iter = difflinestates(states, prevstates)
1042 else:
1042 else:
1043 iter = [('', l) for l in prevstates]
1043 iter = [('', l) for l in prevstates]
1044 for change, l in iter:
1044 for change, l in iter:
1045 cols = [fn, str(r)]
1045 cols = [fn, str(r)]
1046 if opts['line_number']:
1046 if opts['line_number']:
1047 cols.append(str(l.linenum))
1047 cols.append(str(l.linenum))
1048 if opts['all']:
1048 if opts['all']:
1049 cols.append(change)
1049 cols.append(change)
1050 if opts['user']:
1050 if opts['user']:
1051 cols.append(ui.shortuser(get(r)[1]))
1051 cols.append(ui.shortuser(get(r)[1]))
1052 if opts.get('date'):
1052 if opts.get('date'):
1053 cols.append(datefunc(get(r)[2]))
1053 cols.append(datefunc(get(r)[2]))
1054 if opts['files_with_matches']:
1054 if opts['files_with_matches']:
1055 c = (fn, r)
1055 c = (fn, r)
1056 if c in filerevmatches:
1056 if c in filerevmatches:
1057 continue
1057 continue
1058 filerevmatches[c] = 1
1058 filerevmatches[c] = 1
1059 else:
1059 else:
1060 cols.append(l.line)
1060 cols.append(l.line)
1061 ui.write(sep.join(cols), eol)
1061 ui.write(sep.join(cols), eol)
1062 found = True
1062 found = True
1063 return found
1063 return found
1064
1064
1065 fstate = {}
1065 fstate = {}
1066 skip = {}
1066 skip = {}
1067 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1067 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1068 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1068 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1069 found = False
1069 found = False
1070 follow = opts.get('follow')
1070 follow = opts.get('follow')
1071 for st, rev, fns in changeiter:
1071 for st, rev, fns in changeiter:
1072 if st == 'window':
1072 if st == 'window':
1073 matches.clear()
1073 matches.clear()
1074 elif st == 'add':
1074 elif st == 'add':
1075 mf = repo.changectx(rev).manifest()
1075 ctx = repo.changectx(rev)
1076 matches[rev] = {}
1076 matches[rev] = {}
1077 for fn in fns:
1077 for fn in fns:
1078 if fn in skip:
1078 if fn in skip:
1079 continue
1079 continue
1080 try:
1080 try:
1081 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1081 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1082 fstate.setdefault(fn, [])
1082 fstate.setdefault(fn, [])
1083 if follow:
1083 if follow:
1084 copied = getfile(fn).renamed(mf[fn])
1084 copied = getfile(fn).renamed(ctx.filenode(fn))
1085 if copied:
1085 if copied:
1086 copies.setdefault(rev, {})[fn] = copied[0]
1086 copies.setdefault(rev, {})[fn] = copied[0]
1087 except KeyError:
1087 except revlog.LookupError:
1088 pass
1088 pass
1089 elif st == 'iter':
1089 elif st == 'iter':
1090 states = matches[rev].items()
1090 states = matches[rev].items()
1091 states.sort()
1091 states.sort()
1092 for fn, m in states:
1092 for fn, m in states:
1093 copy = copies.get(rev, {}).get(fn)
1093 copy = copies.get(rev, {}).get(fn)
1094 if fn in skip:
1094 if fn in skip:
1095 if copy:
1095 if copy:
1096 skip[copy] = True
1096 skip[copy] = True
1097 continue
1097 continue
1098 if fn in prev or fstate[fn]:
1098 if fn in prev or fstate[fn]:
1099 r = display(fn, rev, m, fstate[fn])
1099 r = display(fn, rev, m, fstate[fn])
1100 found = found or r
1100 found = found or r
1101 if r and not opts['all']:
1101 if r and not opts['all']:
1102 skip[fn] = True
1102 skip[fn] = True
1103 if copy:
1103 if copy:
1104 skip[copy] = True
1104 skip[copy] = True
1105 fstate[fn] = m
1105 fstate[fn] = m
1106 if copy:
1106 if copy:
1107 fstate[copy] = m
1107 fstate[copy] = m
1108 prev[fn] = rev
1108 prev[fn] = rev
1109
1109
1110 fstate = fstate.items()
1110 fstate = fstate.items()
1111 fstate.sort()
1111 fstate.sort()
1112 for fn, state in fstate:
1112 for fn, state in fstate:
1113 if fn in skip:
1113 if fn in skip:
1114 continue
1114 continue
1115 if fn not in copies.get(prev[fn], {}):
1115 if fn not in copies.get(prev[fn], {}):
1116 found = display(fn, rev, {}, state) or found
1116 found = display(fn, rev, {}, state) or found
1117 return (not found and 1) or 0
1117 return (not found and 1) or 0
1118
1118
1119 def heads(ui, repo, *branchrevs, **opts):
1119 def heads(ui, repo, *branchrevs, **opts):
1120 """show current repository heads or show branch heads
1120 """show current repository heads or show branch heads
1121
1121
1122 With no arguments, show all repository head changesets.
1122 With no arguments, show all repository head changesets.
1123
1123
1124 If branch or revisions names are given this will show the heads of
1124 If branch or revisions names are given this will show the heads of
1125 the specified branches or the branches those revisions are tagged
1125 the specified branches or the branches those revisions are tagged
1126 with.
1126 with.
1127
1127
1128 Repository "heads" are changesets that don't have child
1128 Repository "heads" are changesets that don't have child
1129 changesets. They are where development generally takes place and
1129 changesets. They are where development generally takes place and
1130 are the usual targets for update and merge operations.
1130 are the usual targets for update and merge operations.
1131
1131
1132 Branch heads are changesets that have a given branch tag, but have
1132 Branch heads are changesets that have a given branch tag, but have
1133 no child changesets with that tag. They are usually where
1133 no child changesets with that tag. They are usually where
1134 development on the given branch takes place.
1134 development on the given branch takes place.
1135 """
1135 """
1136 if opts['rev']:
1136 if opts['rev']:
1137 start = repo.lookup(opts['rev'])
1137 start = repo.lookup(opts['rev'])
1138 else:
1138 else:
1139 start = None
1139 start = None
1140 if not branchrevs:
1140 if not branchrevs:
1141 # Assume we're looking repo-wide heads if no revs were specified.
1141 # Assume we're looking repo-wide heads if no revs were specified.
1142 heads = repo.heads(start)
1142 heads = repo.heads(start)
1143 else:
1143 else:
1144 heads = []
1144 heads = []
1145 visitedset = util.set()
1145 visitedset = util.set()
1146 for branchrev in branchrevs:
1146 for branchrev in branchrevs:
1147 branch = repo.changectx(branchrev).branch()
1147 branch = repo.changectx(branchrev).branch()
1148 if branch in visitedset:
1148 if branch in visitedset:
1149 continue
1149 continue
1150 visitedset.add(branch)
1150 visitedset.add(branch)
1151 bheads = repo.branchheads(branch, start)
1151 bheads = repo.branchheads(branch, start)
1152 if not bheads:
1152 if not bheads:
1153 if branch != branchrev:
1153 if branch != branchrev:
1154 ui.warn(_("no changes on branch %s containing %s are "
1154 ui.warn(_("no changes on branch %s containing %s are "
1155 "reachable from %s\n")
1155 "reachable from %s\n")
1156 % (branch, branchrev, opts['rev']))
1156 % (branch, branchrev, opts['rev']))
1157 else:
1157 else:
1158 ui.warn(_("no changes on branch %s are reachable from %s\n")
1158 ui.warn(_("no changes on branch %s are reachable from %s\n")
1159 % (branch, opts['rev']))
1159 % (branch, opts['rev']))
1160 heads.extend(bheads)
1160 heads.extend(bheads)
1161 if not heads:
1161 if not heads:
1162 return 1
1162 return 1
1163 displayer = cmdutil.show_changeset(ui, repo, opts)
1163 displayer = cmdutil.show_changeset(ui, repo, opts)
1164 for n in heads:
1164 for n in heads:
1165 displayer.show(changenode=n)
1165 displayer.show(changenode=n)
1166
1166
1167 def help_(ui, name=None, with_version=False):
1167 def help_(ui, name=None, with_version=False):
1168 """show help for a command, extension, or list of commands
1168 """show help for a command, extension, or list of commands
1169
1169
1170 With no arguments, print a list of commands and short help.
1170 With no arguments, print a list of commands and short help.
1171
1171
1172 Given a command name, print help for that command.
1172 Given a command name, print help for that command.
1173
1173
1174 Given an extension name, print help for that extension, and the
1174 Given an extension name, print help for that extension, and the
1175 commands it provides."""
1175 commands it provides."""
1176 option_lists = []
1176 option_lists = []
1177
1177
1178 def addglobalopts(aliases):
1178 def addglobalopts(aliases):
1179 if ui.verbose:
1179 if ui.verbose:
1180 option_lists.append((_("global options:"), globalopts))
1180 option_lists.append((_("global options:"), globalopts))
1181 if name == 'shortlist':
1181 if name == 'shortlist':
1182 option_lists.append((_('use "hg help" for the full list '
1182 option_lists.append((_('use "hg help" for the full list '
1183 'of commands'), ()))
1183 'of commands'), ()))
1184 else:
1184 else:
1185 if name == 'shortlist':
1185 if name == 'shortlist':
1186 msg = _('use "hg help" for the full list of commands '
1186 msg = _('use "hg help" for the full list of commands '
1187 'or "hg -v" for details')
1187 'or "hg -v" for details')
1188 elif aliases:
1188 elif aliases:
1189 msg = _('use "hg -v help%s" to show aliases and '
1189 msg = _('use "hg -v help%s" to show aliases and '
1190 'global options') % (name and " " + name or "")
1190 'global options') % (name and " " + name or "")
1191 else:
1191 else:
1192 msg = _('use "hg -v help %s" to show global options') % name
1192 msg = _('use "hg -v help %s" to show global options') % name
1193 option_lists.append((msg, ()))
1193 option_lists.append((msg, ()))
1194
1194
1195 def helpcmd(name):
1195 def helpcmd(name):
1196 if with_version:
1196 if with_version:
1197 version_(ui)
1197 version_(ui)
1198 ui.write('\n')
1198 ui.write('\n')
1199 aliases, i = cmdutil.findcmd(ui, name, table)
1199 aliases, i = cmdutil.findcmd(ui, name, table)
1200 # synopsis
1200 # synopsis
1201 ui.write("%s\n" % i[2])
1201 ui.write("%s\n" % i[2])
1202
1202
1203 # aliases
1203 # aliases
1204 if not ui.quiet and len(aliases) > 1:
1204 if not ui.quiet and len(aliases) > 1:
1205 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1205 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1206
1206
1207 # description
1207 # description
1208 doc = i[0].__doc__
1208 doc = i[0].__doc__
1209 if not doc:
1209 if not doc:
1210 doc = _("(No help text available)")
1210 doc = _("(No help text available)")
1211 if ui.quiet:
1211 if ui.quiet:
1212 doc = doc.splitlines(0)[0]
1212 doc = doc.splitlines(0)[0]
1213 ui.write("\n%s\n" % doc.rstrip())
1213 ui.write("\n%s\n" % doc.rstrip())
1214
1214
1215 if not ui.quiet:
1215 if not ui.quiet:
1216 # options
1216 # options
1217 if i[1]:
1217 if i[1]:
1218 option_lists.append((_("options:\n"), i[1]))
1218 option_lists.append((_("options:\n"), i[1]))
1219
1219
1220 addglobalopts(False)
1220 addglobalopts(False)
1221
1221
1222 def helplist(header, select=None):
1222 def helplist(header, select=None):
1223 h = {}
1223 h = {}
1224 cmds = {}
1224 cmds = {}
1225 for c, e in table.items():
1225 for c, e in table.items():
1226 f = c.split("|", 1)[0]
1226 f = c.split("|", 1)[0]
1227 if select and not select(f):
1227 if select and not select(f):
1228 continue
1228 continue
1229 if name == "shortlist" and not f.startswith("^"):
1229 if name == "shortlist" and not f.startswith("^"):
1230 continue
1230 continue
1231 f = f.lstrip("^")
1231 f = f.lstrip("^")
1232 if not ui.debugflag and f.startswith("debug"):
1232 if not ui.debugflag and f.startswith("debug"):
1233 continue
1233 continue
1234 doc = e[0].__doc__
1234 doc = e[0].__doc__
1235 if not doc:
1235 if not doc:
1236 doc = _("(No help text available)")
1236 doc = _("(No help text available)")
1237 h[f] = doc.splitlines(0)[0].rstrip()
1237 h[f] = doc.splitlines(0)[0].rstrip()
1238 cmds[f] = c.lstrip("^")
1238 cmds[f] = c.lstrip("^")
1239
1239
1240 if not h:
1240 if not h:
1241 ui.status(_('no commands defined\n'))
1241 ui.status(_('no commands defined\n'))
1242 return
1242 return
1243
1243
1244 ui.status(header)
1244 ui.status(header)
1245 fns = h.keys()
1245 fns = h.keys()
1246 fns.sort()
1246 fns.sort()
1247 m = max(map(len, fns))
1247 m = max(map(len, fns))
1248 for f in fns:
1248 for f in fns:
1249 if ui.verbose:
1249 if ui.verbose:
1250 commands = cmds[f].replace("|",", ")
1250 commands = cmds[f].replace("|",", ")
1251 ui.write(" %s:\n %s\n"%(commands, h[f]))
1251 ui.write(" %s:\n %s\n"%(commands, h[f]))
1252 else:
1252 else:
1253 ui.write(' %-*s %s\n' % (m, f, h[f]))
1253 ui.write(' %-*s %s\n' % (m, f, h[f]))
1254
1254
1255 if not ui.quiet:
1255 if not ui.quiet:
1256 addglobalopts(True)
1256 addglobalopts(True)
1257
1257
1258 def helptopic(name):
1258 def helptopic(name):
1259 v = None
1259 v = None
1260 for i in help.helptable:
1260 for i in help.helptable:
1261 l = i.split('|')
1261 l = i.split('|')
1262 if name in l:
1262 if name in l:
1263 v = i
1263 v = i
1264 header = l[-1]
1264 header = l[-1]
1265 if not v:
1265 if not v:
1266 raise cmdutil.UnknownCommand(name)
1266 raise cmdutil.UnknownCommand(name)
1267
1267
1268 # description
1268 # description
1269 doc = help.helptable[v]
1269 doc = help.helptable[v]
1270 if not doc:
1270 if not doc:
1271 doc = _("(No help text available)")
1271 doc = _("(No help text available)")
1272 if callable(doc):
1272 if callable(doc):
1273 doc = doc()
1273 doc = doc()
1274
1274
1275 ui.write("%s\n" % header)
1275 ui.write("%s\n" % header)
1276 ui.write("%s\n" % doc.rstrip())
1276 ui.write("%s\n" % doc.rstrip())
1277
1277
1278 def helpext(name):
1278 def helpext(name):
1279 try:
1279 try:
1280 mod = extensions.find(name)
1280 mod = extensions.find(name)
1281 except KeyError:
1281 except KeyError:
1282 raise cmdutil.UnknownCommand(name)
1282 raise cmdutil.UnknownCommand(name)
1283
1283
1284 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1284 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1285 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1285 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1286 for d in doc[1:]:
1286 for d in doc[1:]:
1287 ui.write(d, '\n')
1287 ui.write(d, '\n')
1288
1288
1289 ui.status('\n')
1289 ui.status('\n')
1290
1290
1291 try:
1291 try:
1292 ct = mod.cmdtable
1292 ct = mod.cmdtable
1293 except AttributeError:
1293 except AttributeError:
1294 ct = {}
1294 ct = {}
1295
1295
1296 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1296 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1297 helplist(_('list of commands:\n\n'), modcmds.has_key)
1297 helplist(_('list of commands:\n\n'), modcmds.has_key)
1298
1298
1299 if name and name != 'shortlist':
1299 if name and name != 'shortlist':
1300 i = None
1300 i = None
1301 for f in (helpcmd, helptopic, helpext):
1301 for f in (helpcmd, helptopic, helpext):
1302 try:
1302 try:
1303 f(name)
1303 f(name)
1304 i = None
1304 i = None
1305 break
1305 break
1306 except cmdutil.UnknownCommand, inst:
1306 except cmdutil.UnknownCommand, inst:
1307 i = inst
1307 i = inst
1308 if i:
1308 if i:
1309 raise i
1309 raise i
1310
1310
1311 else:
1311 else:
1312 # program name
1312 # program name
1313 if ui.verbose or with_version:
1313 if ui.verbose or with_version:
1314 version_(ui)
1314 version_(ui)
1315 else:
1315 else:
1316 ui.status(_("Mercurial Distributed SCM\n"))
1316 ui.status(_("Mercurial Distributed SCM\n"))
1317 ui.status('\n')
1317 ui.status('\n')
1318
1318
1319 # list of commands
1319 # list of commands
1320 if name == "shortlist":
1320 if name == "shortlist":
1321 header = _('basic commands:\n\n')
1321 header = _('basic commands:\n\n')
1322 else:
1322 else:
1323 header = _('list of commands:\n\n')
1323 header = _('list of commands:\n\n')
1324
1324
1325 helplist(header)
1325 helplist(header)
1326
1326
1327 # list all option lists
1327 # list all option lists
1328 opt_output = []
1328 opt_output = []
1329 for title, options in option_lists:
1329 for title, options in option_lists:
1330 opt_output.append(("\n%s" % title, None))
1330 opt_output.append(("\n%s" % title, None))
1331 for shortopt, longopt, default, desc in options:
1331 for shortopt, longopt, default, desc in options:
1332 if "DEPRECATED" in desc and not ui.verbose: continue
1332 if "DEPRECATED" in desc and not ui.verbose: continue
1333 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1333 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1334 longopt and " --%s" % longopt),
1334 longopt and " --%s" % longopt),
1335 "%s%s" % (desc,
1335 "%s%s" % (desc,
1336 default
1336 default
1337 and _(" (default: %s)") % default
1337 and _(" (default: %s)") % default
1338 or "")))
1338 or "")))
1339
1339
1340 if opt_output:
1340 if opt_output:
1341 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1341 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1342 for first, second in opt_output:
1342 for first, second in opt_output:
1343 if second:
1343 if second:
1344 ui.write(" %-*s %s\n" % (opts_len, first, second))
1344 ui.write(" %-*s %s\n" % (opts_len, first, second))
1345 else:
1345 else:
1346 ui.write("%s\n" % first)
1346 ui.write("%s\n" % first)
1347
1347
1348 def identify(ui, repo, source=None,
1348 def identify(ui, repo, source=None,
1349 rev=None, num=None, id=None, branch=None, tags=None):
1349 rev=None, num=None, id=None, branch=None, tags=None):
1350 """identify the working copy or specified revision
1350 """identify the working copy or specified revision
1351
1351
1352 With no revision, print a summary of the current state of the repo.
1352 With no revision, print a summary of the current state of the repo.
1353
1353
1354 With a path, do a lookup in another repository.
1354 With a path, do a lookup in another repository.
1355
1355
1356 This summary identifies the repository state using one or two parent
1356 This summary identifies the repository state using one or two parent
1357 hash identifiers, followed by a "+" if there are uncommitted changes
1357 hash identifiers, followed by a "+" if there are uncommitted changes
1358 in the working directory, a list of tags for this revision and a branch
1358 in the working directory, a list of tags for this revision and a branch
1359 name for non-default branches.
1359 name for non-default branches.
1360 """
1360 """
1361
1361
1362 if not repo and not source:
1362 if not repo and not source:
1363 raise util.Abort(_("There is no Mercurial repository here "
1363 raise util.Abort(_("There is no Mercurial repository here "
1364 "(.hg not found)"))
1364 "(.hg not found)"))
1365
1365
1366 hexfunc = ui.debugflag and hex or short
1366 hexfunc = ui.debugflag and hex or short
1367 default = not (num or id or branch or tags)
1367 default = not (num or id or branch or tags)
1368 output = []
1368 output = []
1369
1369
1370 if source:
1370 if source:
1371 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1371 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1372 srepo = hg.repository(ui, source)
1372 srepo = hg.repository(ui, source)
1373 if not rev and revs:
1373 if not rev and revs:
1374 rev = revs[0]
1374 rev = revs[0]
1375 if not rev:
1375 if not rev:
1376 rev = "tip"
1376 rev = "tip"
1377 if num or branch or tags:
1377 if num or branch or tags:
1378 raise util.Abort(
1378 raise util.Abort(
1379 "can't query remote revision number, branch, or tags")
1379 "can't query remote revision number, branch, or tags")
1380 output = [hexfunc(srepo.lookup(rev))]
1380 output = [hexfunc(srepo.lookup(rev))]
1381 elif not rev:
1381 elif not rev:
1382 ctx = repo.workingctx()
1382 ctx = repo.workingctx()
1383 parents = ctx.parents()
1383 parents = ctx.parents()
1384 changed = False
1384 changed = False
1385 if default or id or num:
1385 if default or id or num:
1386 changed = ctx.files() + ctx.deleted()
1386 changed = ctx.files() + ctx.deleted()
1387 if default or id:
1387 if default or id:
1388 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1388 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1389 (changed) and "+" or "")]
1389 (changed) and "+" or "")]
1390 if num:
1390 if num:
1391 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1391 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1392 (changed) and "+" or ""))
1392 (changed) and "+" or ""))
1393 else:
1393 else:
1394 ctx = repo.changectx(rev)
1394 ctx = repo.changectx(rev)
1395 if default or id:
1395 if default or id:
1396 output = [hexfunc(ctx.node())]
1396 output = [hexfunc(ctx.node())]
1397 if num:
1397 if num:
1398 output.append(str(ctx.rev()))
1398 output.append(str(ctx.rev()))
1399
1399
1400 if not source and default and not ui.quiet:
1400 if not source and default and not ui.quiet:
1401 b = util.tolocal(ctx.branch())
1401 b = util.tolocal(ctx.branch())
1402 if b != 'default':
1402 if b != 'default':
1403 output.append("(%s)" % b)
1403 output.append("(%s)" % b)
1404
1404
1405 # multiple tags for a single parent separated by '/'
1405 # multiple tags for a single parent separated by '/'
1406 t = "/".join(ctx.tags())
1406 t = "/".join(ctx.tags())
1407 if t:
1407 if t:
1408 output.append(t)
1408 output.append(t)
1409
1409
1410 if branch:
1410 if branch:
1411 output.append(util.tolocal(ctx.branch()))
1411 output.append(util.tolocal(ctx.branch()))
1412
1412
1413 if tags:
1413 if tags:
1414 output.extend(ctx.tags())
1414 output.extend(ctx.tags())
1415
1415
1416 ui.write("%s\n" % ' '.join(output))
1416 ui.write("%s\n" % ' '.join(output))
1417
1417
1418 def import_(ui, repo, patch1, *patches, **opts):
1418 def import_(ui, repo, patch1, *patches, **opts):
1419 """import an ordered set of patches
1419 """import an ordered set of patches
1420
1420
1421 Import a list of patches and commit them individually.
1421 Import a list of patches and commit them individually.
1422
1422
1423 If there are outstanding changes in the working directory, import
1423 If there are outstanding changes in the working directory, import
1424 will abort unless given the -f flag.
1424 will abort unless given the -f flag.
1425
1425
1426 You can import a patch straight from a mail message. Even patches
1426 You can import a patch straight from a mail message. Even patches
1427 as attachments work (body part must be type text/plain or
1427 as attachments work (body part must be type text/plain or
1428 text/x-patch to be used). From and Subject headers of email
1428 text/x-patch to be used). From and Subject headers of email
1429 message are used as default committer and commit message. All
1429 message are used as default committer and commit message. All
1430 text/plain body parts before first diff are added to commit
1430 text/plain body parts before first diff are added to commit
1431 message.
1431 message.
1432
1432
1433 If the imported patch was generated by hg export, user and description
1433 If the imported patch was generated by hg export, user and description
1434 from patch override values from message headers and body. Values
1434 from patch override values from message headers and body. Values
1435 given on command line with -m and -u override these.
1435 given on command line with -m and -u override these.
1436
1436
1437 If --exact is specified, import will set the working directory
1437 If --exact is specified, import will set the working directory
1438 to the parent of each patch before applying it, and will abort
1438 to the parent of each patch before applying it, and will abort
1439 if the resulting changeset has a different ID than the one
1439 if the resulting changeset has a different ID than the one
1440 recorded in the patch. This may happen due to character set
1440 recorded in the patch. This may happen due to character set
1441 problems or other deficiencies in the text patch format.
1441 problems or other deficiencies in the text patch format.
1442
1442
1443 To read a patch from standard input, use patch name "-".
1443 To read a patch from standard input, use patch name "-".
1444 """
1444 """
1445 patches = (patch1,) + patches
1445 patches = (patch1,) + patches
1446
1446
1447 date = opts.get('date')
1447 date = opts.get('date')
1448 if date:
1448 if date:
1449 opts['date'] = util.parsedate(date)
1449 opts['date'] = util.parsedate(date)
1450
1450
1451 if opts.get('exact') or not opts['force']:
1451 if opts.get('exact') or not opts['force']:
1452 cmdutil.bail_if_changed(repo)
1452 cmdutil.bail_if_changed(repo)
1453
1453
1454 d = opts["base"]
1454 d = opts["base"]
1455 strip = opts["strip"]
1455 strip = opts["strip"]
1456 wlock = lock = None
1456 wlock = lock = None
1457 try:
1457 try:
1458 wlock = repo.wlock()
1458 wlock = repo.wlock()
1459 lock = repo.lock()
1459 lock = repo.lock()
1460 for p in patches:
1460 for p in patches:
1461 pf = os.path.join(d, p)
1461 pf = os.path.join(d, p)
1462
1462
1463 if pf == '-':
1463 if pf == '-':
1464 ui.status(_("applying patch from stdin\n"))
1464 ui.status(_("applying patch from stdin\n"))
1465 data = patch.extract(ui, sys.stdin)
1465 data = patch.extract(ui, sys.stdin)
1466 else:
1466 else:
1467 ui.status(_("applying %s\n") % p)
1467 ui.status(_("applying %s\n") % p)
1468 if os.path.exists(pf):
1468 if os.path.exists(pf):
1469 data = patch.extract(ui, file(pf, 'rb'))
1469 data = patch.extract(ui, file(pf, 'rb'))
1470 else:
1470 else:
1471 data = patch.extract(ui, urllib.urlopen(pf))
1471 data = patch.extract(ui, urllib.urlopen(pf))
1472 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1472 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1473
1473
1474 if tmpname is None:
1474 if tmpname is None:
1475 raise util.Abort(_('no diffs found'))
1475 raise util.Abort(_('no diffs found'))
1476
1476
1477 try:
1477 try:
1478 cmdline_message = cmdutil.logmessage(opts)
1478 cmdline_message = cmdutil.logmessage(opts)
1479 if cmdline_message:
1479 if cmdline_message:
1480 # pickup the cmdline msg
1480 # pickup the cmdline msg
1481 message = cmdline_message
1481 message = cmdline_message
1482 elif message:
1482 elif message:
1483 # pickup the patch msg
1483 # pickup the patch msg
1484 message = message.strip()
1484 message = message.strip()
1485 else:
1485 else:
1486 # launch the editor
1486 # launch the editor
1487 message = None
1487 message = None
1488 ui.debug(_('message:\n%s\n') % message)
1488 ui.debug(_('message:\n%s\n') % message)
1489
1489
1490 wp = repo.workingctx().parents()
1490 wp = repo.workingctx().parents()
1491 if opts.get('exact'):
1491 if opts.get('exact'):
1492 if not nodeid or not p1:
1492 if not nodeid or not p1:
1493 raise util.Abort(_('not a mercurial patch'))
1493 raise util.Abort(_('not a mercurial patch'))
1494 p1 = repo.lookup(p1)
1494 p1 = repo.lookup(p1)
1495 p2 = repo.lookup(p2 or hex(nullid))
1495 p2 = repo.lookup(p2 or hex(nullid))
1496
1496
1497 if p1 != wp[0].node():
1497 if p1 != wp[0].node():
1498 hg.clean(repo, p1)
1498 hg.clean(repo, p1)
1499 repo.dirstate.setparents(p1, p2)
1499 repo.dirstate.setparents(p1, p2)
1500 elif p2:
1500 elif p2:
1501 try:
1501 try:
1502 p1 = repo.lookup(p1)
1502 p1 = repo.lookup(p1)
1503 p2 = repo.lookup(p2)
1503 p2 = repo.lookup(p2)
1504 if p1 == wp[0].node():
1504 if p1 == wp[0].node():
1505 repo.dirstate.setparents(p1, p2)
1505 repo.dirstate.setparents(p1, p2)
1506 except hg.RepoError:
1506 except hg.RepoError:
1507 pass
1507 pass
1508 if opts.get('exact') or opts.get('import_branch'):
1508 if opts.get('exact') or opts.get('import_branch'):
1509 repo.dirstate.setbranch(branch or 'default')
1509 repo.dirstate.setbranch(branch or 'default')
1510
1510
1511 files = {}
1511 files = {}
1512 try:
1512 try:
1513 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1513 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1514 files=files)
1514 files=files)
1515 finally:
1515 finally:
1516 files = patch.updatedir(ui, repo, files)
1516 files = patch.updatedir(ui, repo, files)
1517 if not opts.get('no_commit'):
1517 if not opts.get('no_commit'):
1518 n = repo.commit(files, message, opts.get('user') or user,
1518 n = repo.commit(files, message, opts.get('user') or user,
1519 opts.get('date') or date)
1519 opts.get('date') or date)
1520 if opts.get('exact'):
1520 if opts.get('exact'):
1521 if hex(n) != nodeid:
1521 if hex(n) != nodeid:
1522 repo.rollback()
1522 repo.rollback()
1523 raise util.Abort(_('patch is damaged'
1523 raise util.Abort(_('patch is damaged'
1524 ' or loses information'))
1524 ' or loses information'))
1525 # Force a dirstate write so that the next transaction
1525 # Force a dirstate write so that the next transaction
1526 # backups an up-do-date file.
1526 # backups an up-do-date file.
1527 repo.dirstate.write()
1527 repo.dirstate.write()
1528 finally:
1528 finally:
1529 os.unlink(tmpname)
1529 os.unlink(tmpname)
1530 finally:
1530 finally:
1531 del lock, wlock
1531 del lock, wlock
1532
1532
1533 def incoming(ui, repo, source="default", **opts):
1533 def incoming(ui, repo, source="default", **opts):
1534 """show new changesets found in source
1534 """show new changesets found in source
1535
1535
1536 Show new changesets found in the specified path/URL or the default
1536 Show new changesets found in the specified path/URL or the default
1537 pull location. These are the changesets that would be pulled if a pull
1537 pull location. These are the changesets that would be pulled if a pull
1538 was requested.
1538 was requested.
1539
1539
1540 For remote repository, using --bundle avoids downloading the changesets
1540 For remote repository, using --bundle avoids downloading the changesets
1541 twice if the incoming is followed by a pull.
1541 twice if the incoming is followed by a pull.
1542
1542
1543 See pull for valid source format details.
1543 See pull for valid source format details.
1544 """
1544 """
1545 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1545 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1546 cmdutil.setremoteconfig(ui, opts)
1546 cmdutil.setremoteconfig(ui, opts)
1547
1547
1548 other = hg.repository(ui, source)
1548 other = hg.repository(ui, source)
1549 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1549 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1550 if revs:
1550 if revs:
1551 revs = [other.lookup(rev) for rev in revs]
1551 revs = [other.lookup(rev) for rev in revs]
1552 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1552 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1553 if not incoming:
1553 if not incoming:
1554 try:
1554 try:
1555 os.unlink(opts["bundle"])
1555 os.unlink(opts["bundle"])
1556 except:
1556 except:
1557 pass
1557 pass
1558 ui.status(_("no changes found\n"))
1558 ui.status(_("no changes found\n"))
1559 return 1
1559 return 1
1560
1560
1561 cleanup = None
1561 cleanup = None
1562 try:
1562 try:
1563 fname = opts["bundle"]
1563 fname = opts["bundle"]
1564 if fname or not other.local():
1564 if fname or not other.local():
1565 # create a bundle (uncompressed if other repo is not local)
1565 # create a bundle (uncompressed if other repo is not local)
1566 if revs is None:
1566 if revs is None:
1567 cg = other.changegroup(incoming, "incoming")
1567 cg = other.changegroup(incoming, "incoming")
1568 else:
1568 else:
1569 cg = other.changegroupsubset(incoming, revs, 'incoming')
1569 cg = other.changegroupsubset(incoming, revs, 'incoming')
1570 bundletype = other.local() and "HG10BZ" or "HG10UN"
1570 bundletype = other.local() and "HG10BZ" or "HG10UN"
1571 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1571 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1572 # keep written bundle?
1572 # keep written bundle?
1573 if opts["bundle"]:
1573 if opts["bundle"]:
1574 cleanup = None
1574 cleanup = None
1575 if not other.local():
1575 if not other.local():
1576 # use the created uncompressed bundlerepo
1576 # use the created uncompressed bundlerepo
1577 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1577 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1578
1578
1579 o = other.changelog.nodesbetween(incoming, revs)[0]
1579 o = other.changelog.nodesbetween(incoming, revs)[0]
1580 if opts['newest_first']:
1580 if opts['newest_first']:
1581 o.reverse()
1581 o.reverse()
1582 displayer = cmdutil.show_changeset(ui, other, opts)
1582 displayer = cmdutil.show_changeset(ui, other, opts)
1583 for n in o:
1583 for n in o:
1584 parents = [p for p in other.changelog.parents(n) if p != nullid]
1584 parents = [p for p in other.changelog.parents(n) if p != nullid]
1585 if opts['no_merges'] and len(parents) == 2:
1585 if opts['no_merges'] and len(parents) == 2:
1586 continue
1586 continue
1587 displayer.show(changenode=n)
1587 displayer.show(changenode=n)
1588 finally:
1588 finally:
1589 if hasattr(other, 'close'):
1589 if hasattr(other, 'close'):
1590 other.close()
1590 other.close()
1591 if cleanup:
1591 if cleanup:
1592 os.unlink(cleanup)
1592 os.unlink(cleanup)
1593
1593
1594 def init(ui, dest=".", **opts):
1594 def init(ui, dest=".", **opts):
1595 """create a new repository in the given directory
1595 """create a new repository in the given directory
1596
1596
1597 Initialize a new repository in the given directory. If the given
1597 Initialize a new repository in the given directory. If the given
1598 directory does not exist, it is created.
1598 directory does not exist, it is created.
1599
1599
1600 If no directory is given, the current directory is used.
1600 If no directory is given, the current directory is used.
1601
1601
1602 It is possible to specify an ssh:// URL as the destination.
1602 It is possible to specify an ssh:// URL as the destination.
1603 Look at the help text for the pull command for important details
1603 Look at the help text for the pull command for important details
1604 about ssh:// URLs.
1604 about ssh:// URLs.
1605 """
1605 """
1606 cmdutil.setremoteconfig(ui, opts)
1606 cmdutil.setremoteconfig(ui, opts)
1607 hg.repository(ui, dest, create=1)
1607 hg.repository(ui, dest, create=1)
1608
1608
1609 def locate(ui, repo, *pats, **opts):
1609 def locate(ui, repo, *pats, **opts):
1610 """locate files matching specific patterns
1610 """locate files matching specific patterns
1611
1611
1612 Print all files under Mercurial control whose names match the
1612 Print all files under Mercurial control whose names match the
1613 given patterns.
1613 given patterns.
1614
1614
1615 This command searches the entire repository by default. To search
1615 This command searches the entire repository by default. To search
1616 just the current directory and its subdirectories, use
1616 just the current directory and its subdirectories, use
1617 "--include .".
1617 "--include .".
1618
1618
1619 If no patterns are given to match, this command prints all file
1619 If no patterns are given to match, this command prints all file
1620 names.
1620 names.
1621
1621
1622 If you want to feed the output of this command into the "xargs"
1622 If you want to feed the output of this command into the "xargs"
1623 command, use the "-0" option to both this command and "xargs".
1623 command, use the "-0" option to both this command and "xargs".
1624 This will avoid the problem of "xargs" treating single filenames
1624 This will avoid the problem of "xargs" treating single filenames
1625 that contain white space as multiple filenames.
1625 that contain white space as multiple filenames.
1626 """
1626 """
1627 end = opts['print0'] and '\0' or '\n'
1627 end = opts['print0'] and '\0' or '\n'
1628 rev = opts['rev']
1628 rev = opts['rev']
1629 if rev:
1629 if rev:
1630 node = repo.lookup(rev)
1630 node = repo.lookup(rev)
1631 else:
1631 else:
1632 node = None
1632 node = None
1633
1633
1634 ret = 1
1634 ret = 1
1635 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1635 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1636 badmatch=util.always,
1636 badmatch=util.always,
1637 default='relglob'):
1637 default='relglob'):
1638 if src == 'b':
1638 if src == 'b':
1639 continue
1639 continue
1640 if not node and abs not in repo.dirstate:
1640 if not node and abs not in repo.dirstate:
1641 continue
1641 continue
1642 if opts['fullpath']:
1642 if opts['fullpath']:
1643 ui.write(os.path.join(repo.root, abs), end)
1643 ui.write(os.path.join(repo.root, abs), end)
1644 else:
1644 else:
1645 ui.write(((pats and rel) or abs), end)
1645 ui.write(((pats and rel) or abs), end)
1646 ret = 0
1646 ret = 0
1647
1647
1648 return ret
1648 return ret
1649
1649
1650 def log(ui, repo, *pats, **opts):
1650 def log(ui, repo, *pats, **opts):
1651 """show revision history of entire repository or files
1651 """show revision history of entire repository or files
1652
1652
1653 Print the revision history of the specified files or the entire
1653 Print the revision history of the specified files or the entire
1654 project.
1654 project.
1655
1655
1656 File history is shown without following rename or copy history of
1656 File history is shown without following rename or copy history of
1657 files. Use -f/--follow with a file name to follow history across
1657 files. Use -f/--follow with a file name to follow history across
1658 renames and copies. --follow without a file name will only show
1658 renames and copies. --follow without a file name will only show
1659 ancestors or descendants of the starting revision. --follow-first
1659 ancestors or descendants of the starting revision. --follow-first
1660 only follows the first parent of merge revisions.
1660 only follows the first parent of merge revisions.
1661
1661
1662 If no revision range is specified, the default is tip:0 unless
1662 If no revision range is specified, the default is tip:0 unless
1663 --follow is set, in which case the working directory parent is
1663 --follow is set, in which case the working directory parent is
1664 used as the starting revision.
1664 used as the starting revision.
1665
1665
1666 By default this command outputs: changeset id and hash, tags,
1666 By default this command outputs: changeset id and hash, tags,
1667 non-trivial parents, user, date and time, and a summary for each
1667 non-trivial parents, user, date and time, and a summary for each
1668 commit. When the -v/--verbose switch is used, the list of changed
1668 commit. When the -v/--verbose switch is used, the list of changed
1669 files and full commit message is shown.
1669 files and full commit message is shown.
1670
1670
1671 NOTE: log -p may generate unexpected diff output for merge
1671 NOTE: log -p may generate unexpected diff output for merge
1672 changesets, as it will compare the merge changeset against its
1672 changesets, as it will compare the merge changeset against its
1673 first parent only. Also, the files: list will only reflect files
1673 first parent only. Also, the files: list will only reflect files
1674 that are different from BOTH parents.
1674 that are different from BOTH parents.
1675
1675
1676 """
1676 """
1677
1677
1678 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1678 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1679 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1679 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1680
1680
1681 if opts['limit']:
1681 if opts['limit']:
1682 try:
1682 try:
1683 limit = int(opts['limit'])
1683 limit = int(opts['limit'])
1684 except ValueError:
1684 except ValueError:
1685 raise util.Abort(_('limit must be a positive integer'))
1685 raise util.Abort(_('limit must be a positive integer'))
1686 if limit <= 0: raise util.Abort(_('limit must be positive'))
1686 if limit <= 0: raise util.Abort(_('limit must be positive'))
1687 else:
1687 else:
1688 limit = sys.maxint
1688 limit = sys.maxint
1689 count = 0
1689 count = 0
1690
1690
1691 if opts['copies'] and opts['rev']:
1691 if opts['copies'] and opts['rev']:
1692 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1692 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1693 else:
1693 else:
1694 endrev = repo.changelog.count()
1694 endrev = repo.changelog.count()
1695 rcache = {}
1695 rcache = {}
1696 ncache = {}
1696 ncache = {}
1697 def getrenamed(fn, rev):
1697 def getrenamed(fn, rev):
1698 '''looks up all renames for a file (up to endrev) the first
1698 '''looks up all renames for a file (up to endrev) the first
1699 time the file is given. It indexes on the changerev and only
1699 time the file is given. It indexes on the changerev and only
1700 parses the manifest if linkrev != changerev.
1700 parses the manifest if linkrev != changerev.
1701 Returns rename info for fn at changerev rev.'''
1701 Returns rename info for fn at changerev rev.'''
1702 if fn not in rcache:
1702 if fn not in rcache:
1703 rcache[fn] = {}
1703 rcache[fn] = {}
1704 ncache[fn] = {}
1704 ncache[fn] = {}
1705 fl = repo.file(fn)
1705 fl = repo.file(fn)
1706 for i in xrange(fl.count()):
1706 for i in xrange(fl.count()):
1707 node = fl.node(i)
1707 node = fl.node(i)
1708 lr = fl.linkrev(node)
1708 lr = fl.linkrev(node)
1709 renamed = fl.renamed(node)
1709 renamed = fl.renamed(node)
1710 rcache[fn][lr] = renamed
1710 rcache[fn][lr] = renamed
1711 if renamed:
1711 if renamed:
1712 ncache[fn][node] = renamed
1712 ncache[fn][node] = renamed
1713 if lr >= endrev:
1713 if lr >= endrev:
1714 break
1714 break
1715 if rev in rcache[fn]:
1715 if rev in rcache[fn]:
1716 return rcache[fn][rev]
1716 return rcache[fn][rev]
1717
1717
1718 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1718 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1719 # filectx logic.
1719 # filectx logic.
1720
1720
1721 try:
1721 try:
1722 return repo.changectx(rev).filectx(fn).renamed()
1722 return repo.changectx(rev).filectx(fn).renamed()
1723 except revlog.LookupError:
1723 except revlog.LookupError:
1724 pass
1724 pass
1725 return None
1725 return None
1726
1726
1727 df = False
1727 df = False
1728 if opts["date"]:
1728 if opts["date"]:
1729 df = util.matchdate(opts["date"])
1729 df = util.matchdate(opts["date"])
1730
1730
1731 only_branches = opts['only_branch']
1731 only_branches = opts['only_branch']
1732
1732
1733 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1733 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1734 for st, rev, fns in changeiter:
1734 for st, rev, fns in changeiter:
1735 if st == 'add':
1735 if st == 'add':
1736 changenode = repo.changelog.node(rev)
1736 changenode = repo.changelog.node(rev)
1737 parents = [p for p in repo.changelog.parentrevs(rev)
1737 parents = [p for p in repo.changelog.parentrevs(rev)
1738 if p != nullrev]
1738 if p != nullrev]
1739 if opts['no_merges'] and len(parents) == 2:
1739 if opts['no_merges'] and len(parents) == 2:
1740 continue
1740 continue
1741 if opts['only_merges'] and len(parents) != 2:
1741 if opts['only_merges'] and len(parents) != 2:
1742 continue
1742 continue
1743
1743
1744 if only_branches:
1744 if only_branches:
1745 revbranch = get(rev)[5]['branch']
1745 revbranch = get(rev)[5]['branch']
1746 if revbranch not in only_branches:
1746 if revbranch not in only_branches:
1747 continue
1747 continue
1748
1748
1749 if df:
1749 if df:
1750 changes = get(rev)
1750 changes = get(rev)
1751 if not df(changes[2][0]):
1751 if not df(changes[2][0]):
1752 continue
1752 continue
1753
1753
1754 if opts['keyword']:
1754 if opts['keyword']:
1755 changes = get(rev)
1755 changes = get(rev)
1756 miss = 0
1756 miss = 0
1757 for k in [kw.lower() for kw in opts['keyword']]:
1757 for k in [kw.lower() for kw in opts['keyword']]:
1758 if not (k in changes[1].lower() or
1758 if not (k in changes[1].lower() or
1759 k in changes[4].lower() or
1759 k in changes[4].lower() or
1760 k in " ".join(changes[3]).lower()):
1760 k in " ".join(changes[3]).lower()):
1761 miss = 1
1761 miss = 1
1762 break
1762 break
1763 if miss:
1763 if miss:
1764 continue
1764 continue
1765
1765
1766 copies = []
1766 copies = []
1767 if opts.get('copies') and rev:
1767 if opts.get('copies') and rev:
1768 for fn in get(rev)[3]:
1768 for fn in get(rev)[3]:
1769 rename = getrenamed(fn, rev)
1769 rename = getrenamed(fn, rev)
1770 if rename:
1770 if rename:
1771 copies.append((fn, rename[0]))
1771 copies.append((fn, rename[0]))
1772 displayer.show(rev, changenode, copies=copies)
1772 displayer.show(rev, changenode, copies=copies)
1773 elif st == 'iter':
1773 elif st == 'iter':
1774 if count == limit: break
1774 if count == limit: break
1775 if displayer.flush(rev):
1775 if displayer.flush(rev):
1776 count += 1
1776 count += 1
1777
1777
1778 def manifest(ui, repo, node=None, rev=None):
1778 def manifest(ui, repo, node=None, rev=None):
1779 """output the current or given revision of the project manifest
1779 """output the current or given revision of the project manifest
1780
1780
1781 Print a list of version controlled files for the given revision.
1781 Print a list of version controlled files for the given revision.
1782 If no revision is given, the parent of the working directory is used,
1782 If no revision is given, the parent of the working directory is used,
1783 or tip if no revision is checked out.
1783 or tip if no revision is checked out.
1784
1784
1785 The manifest is the list of files being version controlled. If no revision
1785 The manifest is the list of files being version controlled. If no revision
1786 is given then the first parent of the working directory is used.
1786 is given then the first parent of the working directory is used.
1787
1787
1788 With -v flag, print file permissions, symlink and executable bits. With
1788 With -v flag, print file permissions, symlink and executable bits. With
1789 --debug flag, print file revision hashes.
1789 --debug flag, print file revision hashes.
1790 """
1790 """
1791
1791
1792 if rev and node:
1792 if rev and node:
1793 raise util.Abort(_("please specify just one revision"))
1793 raise util.Abort(_("please specify just one revision"))
1794
1794
1795 if not node:
1795 if not node:
1796 node = rev
1796 node = rev
1797
1797
1798 m = repo.changectx(node).manifest()
1798 m = repo.changectx(node).manifest()
1799 files = m.keys()
1799 files = m.keys()
1800 files.sort()
1800 files.sort()
1801
1801
1802 for f in files:
1802 for f in files:
1803 if ui.debugflag:
1803 if ui.debugflag:
1804 ui.write("%40s " % hex(m[f]))
1804 ui.write("%40s " % hex(m[f]))
1805 if ui.verbose:
1805 if ui.verbose:
1806 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1806 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1807 perm = m.execf(f) and "755" or "644"
1807 perm = m.execf(f) and "755" or "644"
1808 ui.write("%3s %1s " % (perm, type))
1808 ui.write("%3s %1s " % (perm, type))
1809 ui.write("%s\n" % f)
1809 ui.write("%s\n" % f)
1810
1810
1811 def merge(ui, repo, node=None, force=None, rev=None):
1811 def merge(ui, repo, node=None, force=None, rev=None):
1812 """merge working directory with another revision
1812 """merge working directory with another revision
1813
1813
1814 Merge the contents of the current working directory and the
1814 Merge the contents of the current working directory and the
1815 requested revision. Files that changed between either parent are
1815 requested revision. Files that changed between either parent are
1816 marked as changed for the next commit and a commit must be
1816 marked as changed for the next commit and a commit must be
1817 performed before any further updates are allowed.
1817 performed before any further updates are allowed.
1818
1818
1819 If no revision is specified, the working directory's parent is a
1819 If no revision is specified, the working directory's parent is a
1820 head revision, and the repository contains exactly one other head,
1820 head revision, and the repository contains exactly one other head,
1821 the other head is merged with by default. Otherwise, an explicit
1821 the other head is merged with by default. Otherwise, an explicit
1822 revision to merge with must be provided.
1822 revision to merge with must be provided.
1823 """
1823 """
1824
1824
1825 if rev and node:
1825 if rev and node:
1826 raise util.Abort(_("please specify just one revision"))
1826 raise util.Abort(_("please specify just one revision"))
1827 if not node:
1827 if not node:
1828 node = rev
1828 node = rev
1829
1829
1830 if not node:
1830 if not node:
1831 heads = repo.heads()
1831 heads = repo.heads()
1832 if len(heads) > 2:
1832 if len(heads) > 2:
1833 raise util.Abort(_('repo has %d heads - '
1833 raise util.Abort(_('repo has %d heads - '
1834 'please merge with an explicit rev') %
1834 'please merge with an explicit rev') %
1835 len(heads))
1835 len(heads))
1836 parent = repo.dirstate.parents()[0]
1836 parent = repo.dirstate.parents()[0]
1837 if len(heads) == 1:
1837 if len(heads) == 1:
1838 msg = _('there is nothing to merge')
1838 msg = _('there is nothing to merge')
1839 if parent != repo.lookup(repo.workingctx().branch()):
1839 if parent != repo.lookup(repo.workingctx().branch()):
1840 msg = _('%s - use "hg update" instead') % msg
1840 msg = _('%s - use "hg update" instead') % msg
1841 raise util.Abort(msg)
1841 raise util.Abort(msg)
1842
1842
1843 if parent not in heads:
1843 if parent not in heads:
1844 raise util.Abort(_('working dir not at a head rev - '
1844 raise util.Abort(_('working dir not at a head rev - '
1845 'use "hg update" or merge with an explicit rev'))
1845 'use "hg update" or merge with an explicit rev'))
1846 node = parent == heads[0] and heads[-1] or heads[0]
1846 node = parent == heads[0] and heads[-1] or heads[0]
1847 return hg.merge(repo, node, force=force)
1847 return hg.merge(repo, node, force=force)
1848
1848
1849 def outgoing(ui, repo, dest=None, **opts):
1849 def outgoing(ui, repo, dest=None, **opts):
1850 """show changesets not found in destination
1850 """show changesets not found in destination
1851
1851
1852 Show changesets not found in the specified destination repository or
1852 Show changesets not found in the specified destination repository or
1853 the default push location. These are the changesets that would be pushed
1853 the default push location. These are the changesets that would be pushed
1854 if a push was requested.
1854 if a push was requested.
1855
1855
1856 See pull for valid destination format details.
1856 See pull for valid destination format details.
1857 """
1857 """
1858 dest, revs, checkout = hg.parseurl(
1858 dest, revs, checkout = hg.parseurl(
1859 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1859 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1860 cmdutil.setremoteconfig(ui, opts)
1860 cmdutil.setremoteconfig(ui, opts)
1861 if revs:
1861 if revs:
1862 revs = [repo.lookup(rev) for rev in revs]
1862 revs = [repo.lookup(rev) for rev in revs]
1863
1863
1864 other = hg.repository(ui, dest)
1864 other = hg.repository(ui, dest)
1865 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1865 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1866 o = repo.findoutgoing(other, force=opts['force'])
1866 o = repo.findoutgoing(other, force=opts['force'])
1867 if not o:
1867 if not o:
1868 ui.status(_("no changes found\n"))
1868 ui.status(_("no changes found\n"))
1869 return 1
1869 return 1
1870 o = repo.changelog.nodesbetween(o, revs)[0]
1870 o = repo.changelog.nodesbetween(o, revs)[0]
1871 if opts['newest_first']:
1871 if opts['newest_first']:
1872 o.reverse()
1872 o.reverse()
1873 displayer = cmdutil.show_changeset(ui, repo, opts)
1873 displayer = cmdutil.show_changeset(ui, repo, opts)
1874 for n in o:
1874 for n in o:
1875 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1875 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1876 if opts['no_merges'] and len(parents) == 2:
1876 if opts['no_merges'] and len(parents) == 2:
1877 continue
1877 continue
1878 displayer.show(changenode=n)
1878 displayer.show(changenode=n)
1879
1879
1880 def parents(ui, repo, file_=None, **opts):
1880 def parents(ui, repo, file_=None, **opts):
1881 """show the parents of the working dir or revision
1881 """show the parents of the working dir or revision
1882
1882
1883 Print the working directory's parent revisions. If a
1883 Print the working directory's parent revisions. If a
1884 revision is given via --rev, the parent of that revision
1884 revision is given via --rev, the parent of that revision
1885 will be printed. If a file argument is given, revision in
1885 will be printed. If a file argument is given, revision in
1886 which the file was last changed (before the working directory
1886 which the file was last changed (before the working directory
1887 revision or the argument to --rev if given) is printed.
1887 revision or the argument to --rev if given) is printed.
1888 """
1888 """
1889 rev = opts.get('rev')
1889 rev = opts.get('rev')
1890 if rev:
1890 if rev:
1891 ctx = repo.changectx(rev)
1891 ctx = repo.changectx(rev)
1892 else:
1892 else:
1893 ctx = repo.workingctx()
1893 ctx = repo.workingctx()
1894
1894
1895 if file_:
1895 if file_:
1896 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1896 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1897 if anypats or len(files) != 1:
1897 if anypats or len(files) != 1:
1898 raise util.Abort(_('can only specify an explicit file name'))
1898 raise util.Abort(_('can only specify an explicit file name'))
1899 file_ = files[0]
1899 file_ = files[0]
1900 filenodes = []
1900 filenodes = []
1901 for cp in ctx.parents():
1901 for cp in ctx.parents():
1902 if not cp:
1902 if not cp:
1903 continue
1903 continue
1904 try:
1904 try:
1905 filenodes.append(cp.filenode(file_))
1905 filenodes.append(cp.filenode(file_))
1906 except revlog.LookupError:
1906 except revlog.LookupError:
1907 pass
1907 pass
1908 if not filenodes:
1908 if not filenodes:
1909 raise util.Abort(_("'%s' not found in manifest!") % file_)
1909 raise util.Abort(_("'%s' not found in manifest!") % file_)
1910 fl = repo.file(file_)
1910 fl = repo.file(file_)
1911 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1911 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1912 else:
1912 else:
1913 p = [cp.node() for cp in ctx.parents()]
1913 p = [cp.node() for cp in ctx.parents()]
1914
1914
1915 displayer = cmdutil.show_changeset(ui, repo, opts)
1915 displayer = cmdutil.show_changeset(ui, repo, opts)
1916 for n in p:
1916 for n in p:
1917 if n != nullid:
1917 if n != nullid:
1918 displayer.show(changenode=n)
1918 displayer.show(changenode=n)
1919
1919
1920 def paths(ui, repo, search=None):
1920 def paths(ui, repo, search=None):
1921 """show definition of symbolic path names
1921 """show definition of symbolic path names
1922
1922
1923 Show definition of symbolic path name NAME. If no name is given, show
1923 Show definition of symbolic path name NAME. If no name is given, show
1924 definition of available names.
1924 definition of available names.
1925
1925
1926 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1926 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1927 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1927 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1928 """
1928 """
1929 if search:
1929 if search:
1930 for name, path in ui.configitems("paths"):
1930 for name, path in ui.configitems("paths"):
1931 if name == search:
1931 if name == search:
1932 ui.write("%s\n" % path)
1932 ui.write("%s\n" % path)
1933 return
1933 return
1934 ui.warn(_("not found!\n"))
1934 ui.warn(_("not found!\n"))
1935 return 1
1935 return 1
1936 else:
1936 else:
1937 for name, path in ui.configitems("paths"):
1937 for name, path in ui.configitems("paths"):
1938 ui.write("%s = %s\n" % (name, path))
1938 ui.write("%s = %s\n" % (name, path))
1939
1939
1940 def postincoming(ui, repo, modheads, optupdate, checkout):
1940 def postincoming(ui, repo, modheads, optupdate, checkout):
1941 if modheads == 0:
1941 if modheads == 0:
1942 return
1942 return
1943 if optupdate:
1943 if optupdate:
1944 if modheads <= 1 or checkout:
1944 if modheads <= 1 or checkout:
1945 return hg.update(repo, checkout)
1945 return hg.update(repo, checkout)
1946 else:
1946 else:
1947 ui.status(_("not updating, since new heads added\n"))
1947 ui.status(_("not updating, since new heads added\n"))
1948 if modheads > 1:
1948 if modheads > 1:
1949 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1949 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1950 else:
1950 else:
1951 ui.status(_("(run 'hg update' to get a working copy)\n"))
1951 ui.status(_("(run 'hg update' to get a working copy)\n"))
1952
1952
1953 def pull(ui, repo, source="default", **opts):
1953 def pull(ui, repo, source="default", **opts):
1954 """pull changes from the specified source
1954 """pull changes from the specified source
1955
1955
1956 Pull changes from a remote repository to a local one.
1956 Pull changes from a remote repository to a local one.
1957
1957
1958 This finds all changes from the repository at the specified path
1958 This finds all changes from the repository at the specified path
1959 or URL and adds them to the local repository. By default, this
1959 or URL and adds them to the local repository. By default, this
1960 does not update the copy of the project in the working directory.
1960 does not update the copy of the project in the working directory.
1961
1961
1962 Valid URLs are of the form:
1962 Valid URLs are of the form:
1963
1963
1964 local/filesystem/path (or file://local/filesystem/path)
1964 local/filesystem/path (or file://local/filesystem/path)
1965 http://[user@]host[:port]/[path]
1965 http://[user@]host[:port]/[path]
1966 https://[user@]host[:port]/[path]
1966 https://[user@]host[:port]/[path]
1967 ssh://[user@]host[:port]/[path]
1967 ssh://[user@]host[:port]/[path]
1968 static-http://host[:port]/[path]
1968 static-http://host[:port]/[path]
1969
1969
1970 Paths in the local filesystem can either point to Mercurial
1970 Paths in the local filesystem can either point to Mercurial
1971 repositories or to bundle files (as created by 'hg bundle' or
1971 repositories or to bundle files (as created by 'hg bundle' or
1972 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1972 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1973 allows access to a Mercurial repository where you simply use a web
1973 allows access to a Mercurial repository where you simply use a web
1974 server to publish the .hg directory as static content.
1974 server to publish the .hg directory as static content.
1975
1975
1976 An optional identifier after # indicates a particular branch, tag,
1976 An optional identifier after # indicates a particular branch, tag,
1977 or changeset to pull.
1977 or changeset to pull.
1978
1978
1979 Some notes about using SSH with Mercurial:
1979 Some notes about using SSH with Mercurial:
1980 - SSH requires an accessible shell account on the destination machine
1980 - SSH requires an accessible shell account on the destination machine
1981 and a copy of hg in the remote path or specified with as remotecmd.
1981 and a copy of hg in the remote path or specified with as remotecmd.
1982 - path is relative to the remote user's home directory by default.
1982 - path is relative to the remote user's home directory by default.
1983 Use an extra slash at the start of a path to specify an absolute path:
1983 Use an extra slash at the start of a path to specify an absolute path:
1984 ssh://example.com//tmp/repository
1984 ssh://example.com//tmp/repository
1985 - Mercurial doesn't use its own compression via SSH; the right thing
1985 - Mercurial doesn't use its own compression via SSH; the right thing
1986 to do is to configure it in your ~/.ssh/config, e.g.:
1986 to do is to configure it in your ~/.ssh/config, e.g.:
1987 Host *.mylocalnetwork.example.com
1987 Host *.mylocalnetwork.example.com
1988 Compression no
1988 Compression no
1989 Host *
1989 Host *
1990 Compression yes
1990 Compression yes
1991 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1991 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1992 with the --ssh command line option.
1992 with the --ssh command line option.
1993 """
1993 """
1994 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1994 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1995 cmdutil.setremoteconfig(ui, opts)
1995 cmdutil.setremoteconfig(ui, opts)
1996
1996
1997 other = hg.repository(ui, source)
1997 other = hg.repository(ui, source)
1998 ui.status(_('pulling from %s\n') % util.hidepassword(source))
1998 ui.status(_('pulling from %s\n') % util.hidepassword(source))
1999 if revs:
1999 if revs:
2000 try:
2000 try:
2001 revs = [other.lookup(rev) for rev in revs]
2001 revs = [other.lookup(rev) for rev in revs]
2002 except repo.NoCapability:
2002 except repo.NoCapability:
2003 error = _("Other repository doesn't support revision lookup, "
2003 error = _("Other repository doesn't support revision lookup, "
2004 "so a rev cannot be specified.")
2004 "so a rev cannot be specified.")
2005 raise util.Abort(error)
2005 raise util.Abort(error)
2006
2006
2007 modheads = repo.pull(other, heads=revs, force=opts['force'])
2007 modheads = repo.pull(other, heads=revs, force=opts['force'])
2008 return postincoming(ui, repo, modheads, opts['update'], checkout)
2008 return postincoming(ui, repo, modheads, opts['update'], checkout)
2009
2009
2010 def push(ui, repo, dest=None, **opts):
2010 def push(ui, repo, dest=None, **opts):
2011 """push changes to the specified destination
2011 """push changes to the specified destination
2012
2012
2013 Push changes from the local repository to the given destination.
2013 Push changes from the local repository to the given destination.
2014
2014
2015 This is the symmetrical operation for pull. It helps to move
2015 This is the symmetrical operation for pull. It helps to move
2016 changes from the current repository to a different one. If the
2016 changes from the current repository to a different one. If the
2017 destination is local this is identical to a pull in that directory
2017 destination is local this is identical to a pull in that directory
2018 from the current one.
2018 from the current one.
2019
2019
2020 By default, push will refuse to run if it detects the result would
2020 By default, push will refuse to run if it detects the result would
2021 increase the number of remote heads. This generally indicates the
2021 increase the number of remote heads. This generally indicates the
2022 the client has forgotten to sync and merge before pushing.
2022 the client has forgotten to sync and merge before pushing.
2023
2023
2024 Valid URLs are of the form:
2024 Valid URLs are of the form:
2025
2025
2026 local/filesystem/path (or file://local/filesystem/path)
2026 local/filesystem/path (or file://local/filesystem/path)
2027 ssh://[user@]host[:port]/[path]
2027 ssh://[user@]host[:port]/[path]
2028 http://[user@]host[:port]/[path]
2028 http://[user@]host[:port]/[path]
2029 https://[user@]host[:port]/[path]
2029 https://[user@]host[:port]/[path]
2030
2030
2031 An optional identifier after # indicates a particular branch, tag,
2031 An optional identifier after # indicates a particular branch, tag,
2032 or changeset to push.
2032 or changeset to push.
2033
2033
2034 Look at the help text for the pull command for important details
2034 Look at the help text for the pull command for important details
2035 about ssh:// URLs.
2035 about ssh:// URLs.
2036
2036
2037 Pushing to http:// and https:// URLs is only possible, if this
2037 Pushing to http:// and https:// URLs is only possible, if this
2038 feature is explicitly enabled on the remote Mercurial server.
2038 feature is explicitly enabled on the remote Mercurial server.
2039 """
2039 """
2040 dest, revs, checkout = hg.parseurl(
2040 dest, revs, checkout = hg.parseurl(
2041 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2041 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2042 cmdutil.setremoteconfig(ui, opts)
2042 cmdutil.setremoteconfig(ui, opts)
2043
2043
2044 other = hg.repository(ui, dest)
2044 other = hg.repository(ui, dest)
2045 ui.status('pushing to %s\n' % util.hidepassword(dest))
2045 ui.status('pushing to %s\n' % util.hidepassword(dest))
2046 if revs:
2046 if revs:
2047 revs = [repo.lookup(rev) for rev in revs]
2047 revs = [repo.lookup(rev) for rev in revs]
2048 r = repo.push(other, opts['force'], revs=revs)
2048 r = repo.push(other, opts['force'], revs=revs)
2049 return r == 0
2049 return r == 0
2050
2050
2051 def rawcommit(ui, repo, *pats, **opts):
2051 def rawcommit(ui, repo, *pats, **opts):
2052 """raw commit interface (DEPRECATED)
2052 """raw commit interface (DEPRECATED)
2053
2053
2054 (DEPRECATED)
2054 (DEPRECATED)
2055 Lowlevel commit, for use in helper scripts.
2055 Lowlevel commit, for use in helper scripts.
2056
2056
2057 This command is not intended to be used by normal users, as it is
2057 This command is not intended to be used by normal users, as it is
2058 primarily useful for importing from other SCMs.
2058 primarily useful for importing from other SCMs.
2059
2059
2060 This command is now deprecated and will be removed in a future
2060 This command is now deprecated and will be removed in a future
2061 release, please use debugsetparents and commit instead.
2061 release, please use debugsetparents and commit instead.
2062 """
2062 """
2063
2063
2064 ui.warn(_("(the rawcommit command is deprecated)\n"))
2064 ui.warn(_("(the rawcommit command is deprecated)\n"))
2065
2065
2066 message = cmdutil.logmessage(opts)
2066 message = cmdutil.logmessage(opts)
2067
2067
2068 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2068 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2069 if opts['files']:
2069 if opts['files']:
2070 files += open(opts['files']).read().splitlines()
2070 files += open(opts['files']).read().splitlines()
2071
2071
2072 parents = [repo.lookup(p) for p in opts['parent']]
2072 parents = [repo.lookup(p) for p in opts['parent']]
2073
2073
2074 try:
2074 try:
2075 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2075 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2076 except ValueError, inst:
2076 except ValueError, inst:
2077 raise util.Abort(str(inst))
2077 raise util.Abort(str(inst))
2078
2078
2079 def recover(ui, repo):
2079 def recover(ui, repo):
2080 """roll back an interrupted transaction
2080 """roll back an interrupted transaction
2081
2081
2082 Recover from an interrupted commit or pull.
2082 Recover from an interrupted commit or pull.
2083
2083
2084 This command tries to fix the repository status after an interrupted
2084 This command tries to fix the repository status after an interrupted
2085 operation. It should only be necessary when Mercurial suggests it.
2085 operation. It should only be necessary when Mercurial suggests it.
2086 """
2086 """
2087 if repo.recover():
2087 if repo.recover():
2088 return hg.verify(repo)
2088 return hg.verify(repo)
2089 return 1
2089 return 1
2090
2090
2091 def remove(ui, repo, *pats, **opts):
2091 def remove(ui, repo, *pats, **opts):
2092 """remove the specified files on the next commit
2092 """remove the specified files on the next commit
2093
2093
2094 Schedule the indicated files for removal from the repository.
2094 Schedule the indicated files for removal from the repository.
2095
2095
2096 This only removes files from the current branch, not from the
2096 This only removes files from the current branch, not from the
2097 entire project history. If the files still exist in the working
2097 entire project history. If the files still exist in the working
2098 directory, they will be deleted from it. If invoked with --after,
2098 directory, they will be deleted from it. If invoked with --after,
2099 files are marked as removed, but not actually unlinked unless --force
2099 files are marked as removed, but not actually unlinked unless --force
2100 is also given. Without exact file names, --after will only mark
2100 is also given. Without exact file names, --after will only mark
2101 files as removed if they are no longer in the working directory.
2101 files as removed if they are no longer in the working directory.
2102
2102
2103 This command schedules the files to be removed at the next commit.
2103 This command schedules the files to be removed at the next commit.
2104 To undo a remove before that, see hg revert.
2104 To undo a remove before that, see hg revert.
2105
2105
2106 Modified files and added files are not removed by default. To
2106 Modified files and added files are not removed by default. To
2107 remove them, use the -f/--force option.
2107 remove them, use the -f/--force option.
2108 """
2108 """
2109 if not opts['after'] and not pats:
2109 if not opts['after'] and not pats:
2110 raise util.Abort(_('no files specified'))
2110 raise util.Abort(_('no files specified'))
2111 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2111 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2112 exact = dict.fromkeys(files)
2112 exact = dict.fromkeys(files)
2113 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2113 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2114 modified, added, removed, deleted, unknown = mardu
2114 modified, added, removed, deleted, unknown = mardu
2115 remove, forget = [], []
2115 remove, forget = [], []
2116 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2116 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2117 reason = None
2117 reason = None
2118 if abs in modified and not opts['force']:
2118 if abs in modified and not opts['force']:
2119 reason = _('is modified (use -f to force removal)')
2119 reason = _('is modified (use -f to force removal)')
2120 elif abs in added:
2120 elif abs in added:
2121 if opts['force']:
2121 if opts['force']:
2122 forget.append(abs)
2122 forget.append(abs)
2123 continue
2123 continue
2124 reason = _('has been marked for add (use -f to force removal)')
2124 reason = _('has been marked for add (use -f to force removal)')
2125 exact = 1 # force the message
2125 exact = 1 # force the message
2126 elif abs not in repo.dirstate:
2126 elif abs not in repo.dirstate:
2127 reason = _('is not managed')
2127 reason = _('is not managed')
2128 elif opts['after'] and not exact and abs not in deleted:
2128 elif opts['after'] and not exact and abs not in deleted:
2129 continue
2129 continue
2130 elif abs in removed:
2130 elif abs in removed:
2131 continue
2131 continue
2132 if reason:
2132 if reason:
2133 if exact:
2133 if exact:
2134 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2134 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2135 else:
2135 else:
2136 if ui.verbose or not exact:
2136 if ui.verbose or not exact:
2137 ui.status(_('removing %s\n') % rel)
2137 ui.status(_('removing %s\n') % rel)
2138 remove.append(abs)
2138 remove.append(abs)
2139 repo.forget(forget)
2139 repo.forget(forget)
2140 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2140 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2141
2141
2142 def rename(ui, repo, *pats, **opts):
2142 def rename(ui, repo, *pats, **opts):
2143 """rename files; equivalent of copy + remove
2143 """rename files; equivalent of copy + remove
2144
2144
2145 Mark dest as copies of sources; mark sources for deletion. If
2145 Mark dest as copies of sources; mark sources for deletion. If
2146 dest is a directory, copies are put in that directory. If dest is
2146 dest is a directory, copies are put in that directory. If dest is
2147 a file, there can only be one source.
2147 a file, there can only be one source.
2148
2148
2149 By default, this command copies the contents of files as they
2149 By default, this command copies the contents of files as they
2150 stand in the working directory. If invoked with --after, the
2150 stand in the working directory. If invoked with --after, the
2151 operation is recorded, but no copying is performed.
2151 operation is recorded, but no copying is performed.
2152
2152
2153 This command takes effect in the next commit. To undo a rename
2153 This command takes effect in the next commit. To undo a rename
2154 before that, see hg revert.
2154 before that, see hg revert.
2155 """
2155 """
2156 wlock = repo.wlock(False)
2156 wlock = repo.wlock(False)
2157 try:
2157 try:
2158 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2158 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2159 finally:
2159 finally:
2160 del wlock
2160 del wlock
2161
2161
2162 def revert(ui, repo, *pats, **opts):
2162 def revert(ui, repo, *pats, **opts):
2163 """restore individual files or dirs to an earlier state
2163 """restore individual files or dirs to an earlier state
2164
2164
2165 (use update -r to check out earlier revisions, revert does not
2165 (use update -r to check out earlier revisions, revert does not
2166 change the working dir parents)
2166 change the working dir parents)
2167
2167
2168 With no revision specified, revert the named files or directories
2168 With no revision specified, revert the named files or directories
2169 to the contents they had in the parent of the working directory.
2169 to the contents they had in the parent of the working directory.
2170 This restores the contents of the affected files to an unmodified
2170 This restores the contents of the affected files to an unmodified
2171 state and unschedules adds, removes, copies, and renames. If the
2171 state and unschedules adds, removes, copies, and renames. If the
2172 working directory has two parents, you must explicitly specify the
2172 working directory has two parents, you must explicitly specify the
2173 revision to revert to.
2173 revision to revert to.
2174
2174
2175 Using the -r option, revert the given files or directories to their
2175 Using the -r option, revert the given files or directories to their
2176 contents as of a specific revision. This can be helpful to "roll
2176 contents as of a specific revision. This can be helpful to "roll
2177 back" some or all of an earlier change.
2177 back" some or all of an earlier change.
2178
2178
2179 Revert modifies the working directory. It does not commit any
2179 Revert modifies the working directory. It does not commit any
2180 changes, or change the parent of the working directory. If you
2180 changes, or change the parent of the working directory. If you
2181 revert to a revision other than the parent of the working
2181 revert to a revision other than the parent of the working
2182 directory, the reverted files will thus appear modified
2182 directory, the reverted files will thus appear modified
2183 afterwards.
2183 afterwards.
2184
2184
2185 If a file has been deleted, it is restored. If the executable
2185 If a file has been deleted, it is restored. If the executable
2186 mode of a file was changed, it is reset.
2186 mode of a file was changed, it is reset.
2187
2187
2188 If names are given, all files matching the names are reverted.
2188 If names are given, all files matching the names are reverted.
2189
2189
2190 If no arguments are given, no files are reverted.
2190 If no arguments are given, no files are reverted.
2191
2191
2192 Modified files are saved with a .orig suffix before reverting.
2192 Modified files are saved with a .orig suffix before reverting.
2193 To disable these backups, use --no-backup.
2193 To disable these backups, use --no-backup.
2194 """
2194 """
2195
2195
2196 if opts["date"]:
2196 if opts["date"]:
2197 if opts["rev"]:
2197 if opts["rev"]:
2198 raise util.Abort(_("you can't specify a revision and a date"))
2198 raise util.Abort(_("you can't specify a revision and a date"))
2199 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2199 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2200
2200
2201 if not pats and not opts['all']:
2201 if not pats and not opts['all']:
2202 raise util.Abort(_('no files or directories specified; '
2202 raise util.Abort(_('no files or directories specified; '
2203 'use --all to revert the whole repo'))
2203 'use --all to revert the whole repo'))
2204
2204
2205 parent, p2 = repo.dirstate.parents()
2205 parent, p2 = repo.dirstate.parents()
2206 if not opts['rev'] and p2 != nullid:
2206 if not opts['rev'] and p2 != nullid:
2207 raise util.Abort(_('uncommitted merge - please provide a '
2207 raise util.Abort(_('uncommitted merge - please provide a '
2208 'specific revision'))
2208 'specific revision'))
2209 ctx = repo.changectx(opts['rev'])
2209 ctx = repo.changectx(opts['rev'])
2210 node = ctx.node()
2210 node = ctx.node()
2211 mf = ctx.manifest()
2211 mf = ctx.manifest()
2212 if node == parent:
2212 if node == parent:
2213 pmf = mf
2213 pmf = mf
2214 else:
2214 else:
2215 pmf = None
2215 pmf = None
2216
2216
2217 # need all matching names in dirstate and manifest of target rev,
2217 # need all matching names in dirstate and manifest of target rev,
2218 # so have to walk both. do not print errors if files exist in one
2218 # so have to walk both. do not print errors if files exist in one
2219 # but not other.
2219 # but not other.
2220
2220
2221 names = {}
2221 names = {}
2222
2222
2223 wlock = repo.wlock()
2223 wlock = repo.wlock()
2224 try:
2224 try:
2225 # walk dirstate.
2225 # walk dirstate.
2226 files = []
2226 files = []
2227 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2227 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2228 badmatch=mf.has_key):
2228 badmatch=mf.has_key):
2229 names[abs] = (rel, exact)
2229 names[abs] = (rel, exact)
2230 if src != 'b':
2230 if src != 'b':
2231 files.append(abs)
2231 files.append(abs)
2232
2232
2233 # walk target manifest.
2233 # walk target manifest.
2234
2234
2235 def badmatch(path):
2235 def badmatch(path):
2236 if path in names:
2236 if path in names:
2237 return True
2237 return True
2238 path_ = path + '/'
2238 path_ = path + '/'
2239 for f in names:
2239 for f in names:
2240 if f.startswith(path_):
2240 if f.startswith(path_):
2241 return True
2241 return True
2242 return False
2242 return False
2243
2243
2244 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2244 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2245 badmatch=badmatch):
2245 badmatch=badmatch):
2246 if abs in names or src == 'b':
2246 if abs in names or src == 'b':
2247 continue
2247 continue
2248 names[abs] = (rel, exact)
2248 names[abs] = (rel, exact)
2249
2249
2250 changes = repo.status(files=files, match=names.has_key)[:4]
2250 changes = repo.status(files=files, match=names.has_key)[:4]
2251 modified, added, removed, deleted = map(dict.fromkeys, changes)
2251 modified, added, removed, deleted = map(dict.fromkeys, changes)
2252
2252
2253 # if f is a rename, also revert the source
2253 # if f is a rename, also revert the source
2254 cwd = repo.getcwd()
2254 cwd = repo.getcwd()
2255 for f in added:
2255 for f in added:
2256 src = repo.dirstate.copied(f)
2256 src = repo.dirstate.copied(f)
2257 if src and src not in names and repo.dirstate[src] == 'r':
2257 if src and src not in names and repo.dirstate[src] == 'r':
2258 removed[src] = None
2258 removed[src] = None
2259 names[src] = (repo.pathto(src, cwd), True)
2259 names[src] = (repo.pathto(src, cwd), True)
2260
2260
2261 def removeforget(abs):
2261 def removeforget(abs):
2262 if repo.dirstate[abs] == 'a':
2262 if repo.dirstate[abs] == 'a':
2263 return _('forgetting %s\n')
2263 return _('forgetting %s\n')
2264 return _('removing %s\n')
2264 return _('removing %s\n')
2265
2265
2266 revert = ([], _('reverting %s\n'))
2266 revert = ([], _('reverting %s\n'))
2267 add = ([], _('adding %s\n'))
2267 add = ([], _('adding %s\n'))
2268 remove = ([], removeforget)
2268 remove = ([], removeforget)
2269 undelete = ([], _('undeleting %s\n'))
2269 undelete = ([], _('undeleting %s\n'))
2270
2270
2271 disptable = (
2271 disptable = (
2272 # dispatch table:
2272 # dispatch table:
2273 # file state
2273 # file state
2274 # action if in target manifest
2274 # action if in target manifest
2275 # action if not in target manifest
2275 # action if not in target manifest
2276 # make backup if in target manifest
2276 # make backup if in target manifest
2277 # make backup if not in target manifest
2277 # make backup if not in target manifest
2278 (modified, revert, remove, True, True),
2278 (modified, revert, remove, True, True),
2279 (added, revert, remove, True, False),
2279 (added, revert, remove, True, False),
2280 (removed, undelete, None, False, False),
2280 (removed, undelete, None, False, False),
2281 (deleted, revert, remove, False, False),
2281 (deleted, revert, remove, False, False),
2282 )
2282 )
2283
2283
2284 entries = names.items()
2284 entries = names.items()
2285 entries.sort()
2285 entries.sort()
2286
2286
2287 for abs, (rel, exact) in entries:
2287 for abs, (rel, exact) in entries:
2288 mfentry = mf.get(abs)
2288 mfentry = mf.get(abs)
2289 target = repo.wjoin(abs)
2289 target = repo.wjoin(abs)
2290 def handle(xlist, dobackup):
2290 def handle(xlist, dobackup):
2291 xlist[0].append(abs)
2291 xlist[0].append(abs)
2292 if dobackup and not opts['no_backup'] and util.lexists(target):
2292 if dobackup and not opts['no_backup'] and util.lexists(target):
2293 bakname = "%s.orig" % rel
2293 bakname = "%s.orig" % rel
2294 ui.note(_('saving current version of %s as %s\n') %
2294 ui.note(_('saving current version of %s as %s\n') %
2295 (rel, bakname))
2295 (rel, bakname))
2296 if not opts.get('dry_run'):
2296 if not opts.get('dry_run'):
2297 util.copyfile(target, bakname)
2297 util.copyfile(target, bakname)
2298 if ui.verbose or not exact:
2298 if ui.verbose or not exact:
2299 msg = xlist[1]
2299 msg = xlist[1]
2300 if not isinstance(msg, basestring):
2300 if not isinstance(msg, basestring):
2301 msg = msg(abs)
2301 msg = msg(abs)
2302 ui.status(msg % rel)
2302 ui.status(msg % rel)
2303 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2303 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2304 if abs not in table: continue
2304 if abs not in table: continue
2305 # file has changed in dirstate
2305 # file has changed in dirstate
2306 if mfentry:
2306 if mfentry:
2307 handle(hitlist, backuphit)
2307 handle(hitlist, backuphit)
2308 elif misslist is not None:
2308 elif misslist is not None:
2309 handle(misslist, backupmiss)
2309 handle(misslist, backupmiss)
2310 break
2310 break
2311 else:
2311 else:
2312 if abs not in repo.dirstate:
2312 if abs not in repo.dirstate:
2313 if mfentry:
2313 if mfentry:
2314 handle(add, True)
2314 handle(add, True)
2315 elif exact:
2315 elif exact:
2316 ui.warn(_('file not managed: %s\n') % rel)
2316 ui.warn(_('file not managed: %s\n') % rel)
2317 continue
2317 continue
2318 # file has not changed in dirstate
2318 # file has not changed in dirstate
2319 if node == parent:
2319 if node == parent:
2320 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2320 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2321 continue
2321 continue
2322 if pmf is None:
2322 if pmf is None:
2323 # only need parent manifest in this unlikely case,
2323 # only need parent manifest in this unlikely case,
2324 # so do not read by default
2324 # so do not read by default
2325 pmf = repo.changectx(parent).manifest()
2325 pmf = repo.changectx(parent).manifest()
2326 if abs in pmf:
2326 if abs in pmf:
2327 if mfentry:
2327 if mfentry:
2328 # if version of file is same in parent and target
2328 # if version of file is same in parent and target
2329 # manifests, do nothing
2329 # manifests, do nothing
2330 if (pmf[abs] != mfentry or
2330 if (pmf[abs] != mfentry or
2331 pmf.flags(abs) != mf.flags(abs)):
2331 pmf.flags(abs) != mf.flags(abs)):
2332 handle(revert, False)
2332 handle(revert, False)
2333 else:
2333 else:
2334 handle(remove, False)
2334 handle(remove, False)
2335
2335
2336 if not opts.get('dry_run'):
2336 if not opts.get('dry_run'):
2337 def checkout(f):
2337 def checkout(f):
2338 fc = ctx[f]
2338 fc = ctx[f]
2339 repo.wwrite(f, fc.data(), fc.fileflags())
2339 repo.wwrite(f, fc.data(), fc.fileflags())
2340
2340
2341 audit_path = util.path_auditor(repo.root)
2341 audit_path = util.path_auditor(repo.root)
2342 for f in remove[0]:
2342 for f in remove[0]:
2343 if repo.dirstate[f] == 'a':
2343 if repo.dirstate[f] == 'a':
2344 repo.dirstate.forget(f)
2344 repo.dirstate.forget(f)
2345 continue
2345 continue
2346 audit_path(f)
2346 audit_path(f)
2347 try:
2347 try:
2348 util.unlink(repo.wjoin(f))
2348 util.unlink(repo.wjoin(f))
2349 except OSError:
2349 except OSError:
2350 pass
2350 pass
2351 repo.dirstate.remove(f)
2351 repo.dirstate.remove(f)
2352
2352
2353 for f in revert[0]:
2353 for f in revert[0]:
2354 checkout(f)
2354 checkout(f)
2355
2355
2356 for f in add[0]:
2356 for f in add[0]:
2357 checkout(f)
2357 checkout(f)
2358 repo.dirstate.add(f)
2358 repo.dirstate.add(f)
2359
2359
2360 normal = repo.dirstate.normallookup
2360 normal = repo.dirstate.normallookup
2361 if node == parent and p2 == nullid:
2361 if node == parent and p2 == nullid:
2362 normal = repo.dirstate.normal
2362 normal = repo.dirstate.normal
2363 for f in undelete[0]:
2363 for f in undelete[0]:
2364 checkout(f)
2364 checkout(f)
2365 normal(f)
2365 normal(f)
2366
2366
2367 finally:
2367 finally:
2368 del wlock
2368 del wlock
2369
2369
2370 def rollback(ui, repo):
2370 def rollback(ui, repo):
2371 """roll back the last transaction
2371 """roll back the last transaction
2372
2372
2373 This command should be used with care. There is only one level of
2373 This command should be used with care. There is only one level of
2374 rollback, and there is no way to undo a rollback. It will also
2374 rollback, and there is no way to undo a rollback. It will also
2375 restore the dirstate at the time of the last transaction, losing
2375 restore the dirstate at the time of the last transaction, losing
2376 any dirstate changes since that time.
2376 any dirstate changes since that time.
2377
2377
2378 Transactions are used to encapsulate the effects of all commands
2378 Transactions are used to encapsulate the effects of all commands
2379 that create new changesets or propagate existing changesets into a
2379 that create new changesets or propagate existing changesets into a
2380 repository. For example, the following commands are transactional,
2380 repository. For example, the following commands are transactional,
2381 and their effects can be rolled back:
2381 and their effects can be rolled back:
2382
2382
2383 commit
2383 commit
2384 import
2384 import
2385 pull
2385 pull
2386 push (with this repository as destination)
2386 push (with this repository as destination)
2387 unbundle
2387 unbundle
2388
2388
2389 This command is not intended for use on public repositories. Once
2389 This command is not intended for use on public repositories. Once
2390 changes are visible for pull by other users, rolling a transaction
2390 changes are visible for pull by other users, rolling a transaction
2391 back locally is ineffective (someone else may already have pulled
2391 back locally is ineffective (someone else may already have pulled
2392 the changes). Furthermore, a race is possible with readers of the
2392 the changes). Furthermore, a race is possible with readers of the
2393 repository; for example an in-progress pull from the repository
2393 repository; for example an in-progress pull from the repository
2394 may fail if a rollback is performed.
2394 may fail if a rollback is performed.
2395 """
2395 """
2396 repo.rollback()
2396 repo.rollback()
2397
2397
2398 def root(ui, repo):
2398 def root(ui, repo):
2399 """print the root (top) of the current working dir
2399 """print the root (top) of the current working dir
2400
2400
2401 Print the root directory of the current repository.
2401 Print the root directory of the current repository.
2402 """
2402 """
2403 ui.write(repo.root + "\n")
2403 ui.write(repo.root + "\n")
2404
2404
2405 def serve(ui, repo, **opts):
2405 def serve(ui, repo, **opts):
2406 """export the repository via HTTP
2406 """export the repository via HTTP
2407
2407
2408 Start a local HTTP repository browser and pull server.
2408 Start a local HTTP repository browser and pull server.
2409
2409
2410 By default, the server logs accesses to stdout and errors to
2410 By default, the server logs accesses to stdout and errors to
2411 stderr. Use the "-A" and "-E" options to log to files.
2411 stderr. Use the "-A" and "-E" options to log to files.
2412 """
2412 """
2413
2413
2414 if opts["stdio"]:
2414 if opts["stdio"]:
2415 if repo is None:
2415 if repo is None:
2416 raise hg.RepoError(_("There is no Mercurial repository here"
2416 raise hg.RepoError(_("There is no Mercurial repository here"
2417 " (.hg not found)"))
2417 " (.hg not found)"))
2418 s = sshserver.sshserver(ui, repo)
2418 s = sshserver.sshserver(ui, repo)
2419 s.serve_forever()
2419 s.serve_forever()
2420
2420
2421 parentui = ui.parentui or ui
2421 parentui = ui.parentui or ui
2422 optlist = ("name templates style address port prefix ipv6"
2422 optlist = ("name templates style address port prefix ipv6"
2423 " accesslog errorlog webdir_conf certificate")
2423 " accesslog errorlog webdir_conf certificate")
2424 for o in optlist.split():
2424 for o in optlist.split():
2425 if opts[o]:
2425 if opts[o]:
2426 parentui.setconfig("web", o, str(opts[o]))
2426 parentui.setconfig("web", o, str(opts[o]))
2427 if (repo is not None) and (repo.ui != parentui):
2427 if (repo is not None) and (repo.ui != parentui):
2428 repo.ui.setconfig("web", o, str(opts[o]))
2428 repo.ui.setconfig("web", o, str(opts[o]))
2429
2429
2430 if repo is None and not ui.config("web", "webdir_conf"):
2430 if repo is None and not ui.config("web", "webdir_conf"):
2431 raise hg.RepoError(_("There is no Mercurial repository here"
2431 raise hg.RepoError(_("There is no Mercurial repository here"
2432 " (.hg not found)"))
2432 " (.hg not found)"))
2433
2433
2434 class service:
2434 class service:
2435 def init(self):
2435 def init(self):
2436 util.set_signal_handler()
2436 util.set_signal_handler()
2437 try:
2437 try:
2438 self.httpd = hgweb.server.create_server(parentui, repo)
2438 self.httpd = hgweb.server.create_server(parentui, repo)
2439 except socket.error, inst:
2439 except socket.error, inst:
2440 raise util.Abort(_('cannot start server: ') + inst.args[1])
2440 raise util.Abort(_('cannot start server: ') + inst.args[1])
2441
2441
2442 if not ui.verbose: return
2442 if not ui.verbose: return
2443
2443
2444 if self.httpd.prefix:
2444 if self.httpd.prefix:
2445 prefix = self.httpd.prefix.strip('/') + '/'
2445 prefix = self.httpd.prefix.strip('/') + '/'
2446 else:
2446 else:
2447 prefix = ''
2447 prefix = ''
2448
2448
2449 if self.httpd.port != 80:
2449 if self.httpd.port != 80:
2450 ui.status(_('listening at http://%s:%d/%s\n') %
2450 ui.status(_('listening at http://%s:%d/%s\n') %
2451 (self.httpd.addr, self.httpd.port, prefix))
2451 (self.httpd.addr, self.httpd.port, prefix))
2452 else:
2452 else:
2453 ui.status(_('listening at http://%s/%s\n') %
2453 ui.status(_('listening at http://%s/%s\n') %
2454 (self.httpd.addr, prefix))
2454 (self.httpd.addr, prefix))
2455
2455
2456 def run(self):
2456 def run(self):
2457 self.httpd.serve_forever()
2457 self.httpd.serve_forever()
2458
2458
2459 service = service()
2459 service = service()
2460
2460
2461 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2461 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2462
2462
2463 def status(ui, repo, *pats, **opts):
2463 def status(ui, repo, *pats, **opts):
2464 """show changed files in the working directory
2464 """show changed files in the working directory
2465
2465
2466 Show status of files in the repository. If names are given, only
2466 Show status of files in the repository. If names are given, only
2467 files that match are shown. Files that are clean or ignored or
2467 files that match are shown. Files that are clean or ignored or
2468 source of a copy/move operation, are not listed unless -c (clean),
2468 source of a copy/move operation, are not listed unless -c (clean),
2469 -i (ignored), -C (copies) or -A is given. Unless options described
2469 -i (ignored), -C (copies) or -A is given. Unless options described
2470 with "show only ..." are given, the options -mardu are used.
2470 with "show only ..." are given, the options -mardu are used.
2471
2471
2472 NOTE: status may appear to disagree with diff if permissions have
2472 NOTE: status may appear to disagree with diff if permissions have
2473 changed or a merge has occurred. The standard diff format does not
2473 changed or a merge has occurred. The standard diff format does not
2474 report permission changes and diff only reports changes relative
2474 report permission changes and diff only reports changes relative
2475 to one merge parent.
2475 to one merge parent.
2476
2476
2477 If one revision is given, it is used as the base revision.
2477 If one revision is given, it is used as the base revision.
2478 If two revisions are given, the difference between them is shown.
2478 If two revisions are given, the difference between them is shown.
2479
2479
2480 The codes used to show the status of files are:
2480 The codes used to show the status of files are:
2481 M = modified
2481 M = modified
2482 A = added
2482 A = added
2483 R = removed
2483 R = removed
2484 C = clean
2484 C = clean
2485 ! = deleted, but still tracked
2485 ! = deleted, but still tracked
2486 ? = not tracked
2486 ? = not tracked
2487 I = ignored
2487 I = ignored
2488 = the previous added file was copied from here
2488 = the previous added file was copied from here
2489 """
2489 """
2490
2490
2491 all = opts['all']
2491 all = opts['all']
2492 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2492 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2493
2493
2494 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2494 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2495 cwd = (pats and repo.getcwd()) or ''
2495 cwd = (pats and repo.getcwd()) or ''
2496 modified, added, removed, deleted, unknown, ignored, clean = [
2496 modified, added, removed, deleted, unknown, ignored, clean = [
2497 n for n in repo.status(node1=node1, node2=node2, files=files,
2497 n for n in repo.status(node1=node1, node2=node2, files=files,
2498 match=matchfn,
2498 match=matchfn,
2499 list_ignored=all or opts['ignored'],
2499 list_ignored=all or opts['ignored'],
2500 list_clean=all or opts['clean'])]
2500 list_clean=all or opts['clean'])]
2501
2501
2502 changetypes = (('modified', 'M', modified),
2502 changetypes = (('modified', 'M', modified),
2503 ('added', 'A', added),
2503 ('added', 'A', added),
2504 ('removed', 'R', removed),
2504 ('removed', 'R', removed),
2505 ('deleted', '!', deleted),
2505 ('deleted', '!', deleted),
2506 ('unknown', '?', unknown),
2506 ('unknown', '?', unknown),
2507 ('ignored', 'I', ignored))
2507 ('ignored', 'I', ignored))
2508
2508
2509 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2509 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2510
2510
2511 end = opts['print0'] and '\0' or '\n'
2511 end = opts['print0'] and '\0' or '\n'
2512
2512
2513 for opt, char, changes in ([ct for ct in explicit_changetypes
2513 for opt, char, changes in ([ct for ct in explicit_changetypes
2514 if all or opts[ct[0]]]
2514 if all or opts[ct[0]]]
2515 or changetypes):
2515 or changetypes):
2516 if opts['no_status']:
2516 if opts['no_status']:
2517 format = "%%s%s" % end
2517 format = "%%s%s" % end
2518 else:
2518 else:
2519 format = "%s %%s%s" % (char, end)
2519 format = "%s %%s%s" % (char, end)
2520
2520
2521 for f in changes:
2521 for f in changes:
2522 ui.write(format % repo.pathto(f, cwd))
2522 ui.write(format % repo.pathto(f, cwd))
2523 if ((all or opts.get('copies')) and not opts.get('no_status')):
2523 if ((all or opts.get('copies')) and not opts.get('no_status')):
2524 copied = repo.dirstate.copied(f)
2524 copied = repo.dirstate.copied(f)
2525 if copied:
2525 if copied:
2526 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2526 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2527
2527
2528 def tag(ui, repo, name, rev_=None, **opts):
2528 def tag(ui, repo, name, rev_=None, **opts):
2529 """add a tag for the current or given revision
2529 """add a tag for the current or given revision
2530
2530
2531 Name a particular revision using <name>.
2531 Name a particular revision using <name>.
2532
2532
2533 Tags are used to name particular revisions of the repository and are
2533 Tags are used to name particular revisions of the repository and are
2534 very useful to compare different revision, to go back to significant
2534 very useful to compare different revision, to go back to significant
2535 earlier versions or to mark branch points as releases, etc.
2535 earlier versions or to mark branch points as releases, etc.
2536
2536
2537 If no revision is given, the parent of the working directory is used,
2537 If no revision is given, the parent of the working directory is used,
2538 or tip if no revision is checked out.
2538 or tip if no revision is checked out.
2539
2539
2540 To facilitate version control, distribution, and merging of tags,
2540 To facilitate version control, distribution, and merging of tags,
2541 they are stored as a file named ".hgtags" which is managed
2541 they are stored as a file named ".hgtags" which is managed
2542 similarly to other project files and can be hand-edited if
2542 similarly to other project files and can be hand-edited if
2543 necessary. The file '.hg/localtags' is used for local tags (not
2543 necessary. The file '.hg/localtags' is used for local tags (not
2544 shared among repositories).
2544 shared among repositories).
2545 """
2545 """
2546 if name in ['tip', '.', 'null']:
2546 if name in ['tip', '.', 'null']:
2547 raise util.Abort(_("the name '%s' is reserved") % name)
2547 raise util.Abort(_("the name '%s' is reserved") % name)
2548 if rev_ is not None:
2548 if rev_ is not None:
2549 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2549 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2550 "please use 'hg tag [-r REV] NAME' instead\n"))
2550 "please use 'hg tag [-r REV] NAME' instead\n"))
2551 if opts['rev']:
2551 if opts['rev']:
2552 raise util.Abort(_("use only one form to specify the revision"))
2552 raise util.Abort(_("use only one form to specify the revision"))
2553 if opts['rev'] and opts['remove']:
2553 if opts['rev'] and opts['remove']:
2554 raise util.Abort(_("--rev and --remove are incompatible"))
2554 raise util.Abort(_("--rev and --remove are incompatible"))
2555 if opts['rev']:
2555 if opts['rev']:
2556 rev_ = opts['rev']
2556 rev_ = opts['rev']
2557 message = opts['message']
2557 message = opts['message']
2558 if opts['remove']:
2558 if opts['remove']:
2559 tagtype = repo.tagtype(name)
2559 tagtype = repo.tagtype(name)
2560
2560
2561 if not tagtype:
2561 if not tagtype:
2562 raise util.Abort(_('tag %s does not exist') % name)
2562 raise util.Abort(_('tag %s does not exist') % name)
2563 if opts['local'] and tagtype == 'global':
2563 if opts['local'] and tagtype == 'global':
2564 raise util.Abort(_('%s tag is global') % name)
2564 raise util.Abort(_('%s tag is global') % name)
2565 if not opts['local'] and tagtype == 'local':
2565 if not opts['local'] and tagtype == 'local':
2566 raise util.Abort(_('%s tag is local') % name)
2566 raise util.Abort(_('%s tag is local') % name)
2567
2567
2568 rev_ = nullid
2568 rev_ = nullid
2569 if not message:
2569 if not message:
2570 message = _('Removed tag %s') % name
2570 message = _('Removed tag %s') % name
2571 elif name in repo.tags() and not opts['force']:
2571 elif name in repo.tags() and not opts['force']:
2572 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2572 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2573 % name)
2573 % name)
2574 if not rev_ and repo.dirstate.parents()[1] != nullid:
2574 if not rev_ and repo.dirstate.parents()[1] != nullid:
2575 raise util.Abort(_('uncommitted merge - please provide a '
2575 raise util.Abort(_('uncommitted merge - please provide a '
2576 'specific revision'))
2576 'specific revision'))
2577 r = repo.changectx(rev_).node()
2577 r = repo.changectx(rev_).node()
2578
2578
2579 if not message:
2579 if not message:
2580 message = _('Added tag %s for changeset %s') % (name, short(r))
2580 message = _('Added tag %s for changeset %s') % (name, short(r))
2581
2581
2582 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2582 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2583
2583
2584 def tags(ui, repo):
2584 def tags(ui, repo):
2585 """list repository tags
2585 """list repository tags
2586
2586
2587 List the repository tags.
2587 List the repository tags.
2588
2588
2589 This lists both regular and local tags. When the -v/--verbose switch
2589 This lists both regular and local tags. When the -v/--verbose switch
2590 is used, a third column "local" is printed for local tags.
2590 is used, a third column "local" is printed for local tags.
2591 """
2591 """
2592
2592
2593 l = repo.tagslist()
2593 l = repo.tagslist()
2594 l.reverse()
2594 l.reverse()
2595 hexfunc = ui.debugflag and hex or short
2595 hexfunc = ui.debugflag and hex or short
2596 tagtype = ""
2596 tagtype = ""
2597
2597
2598 for t, n in l:
2598 for t, n in l:
2599 if ui.quiet:
2599 if ui.quiet:
2600 ui.write("%s\n" % t)
2600 ui.write("%s\n" % t)
2601 continue
2601 continue
2602
2602
2603 try:
2603 try:
2604 hn = hexfunc(n)
2604 hn = hexfunc(n)
2605 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2605 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2606 except revlog.LookupError:
2606 except revlog.LookupError:
2607 r = " ?:%s" % hn
2607 r = " ?:%s" % hn
2608 else:
2608 else:
2609 spaces = " " * (30 - util.locallen(t))
2609 spaces = " " * (30 - util.locallen(t))
2610 if ui.verbose:
2610 if ui.verbose:
2611 if repo.tagtype(t) == 'local':
2611 if repo.tagtype(t) == 'local':
2612 tagtype = " local"
2612 tagtype = " local"
2613 else:
2613 else:
2614 tagtype = ""
2614 tagtype = ""
2615 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2615 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2616
2616
2617 def tip(ui, repo, **opts):
2617 def tip(ui, repo, **opts):
2618 """show the tip revision
2618 """show the tip revision
2619
2619
2620 Show the tip revision.
2620 Show the tip revision.
2621 """
2621 """
2622 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2622 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2623
2623
2624 def unbundle(ui, repo, fname1, *fnames, **opts):
2624 def unbundle(ui, repo, fname1, *fnames, **opts):
2625 """apply one or more changegroup files
2625 """apply one or more changegroup files
2626
2626
2627 Apply one or more compressed changegroup files generated by the
2627 Apply one or more compressed changegroup files generated by the
2628 bundle command.
2628 bundle command.
2629 """
2629 """
2630 fnames = (fname1,) + fnames
2630 fnames = (fname1,) + fnames
2631 for fname in fnames:
2631 for fname in fnames:
2632 if os.path.exists(fname):
2632 if os.path.exists(fname):
2633 f = open(fname, "rb")
2633 f = open(fname, "rb")
2634 else:
2634 else:
2635 f = urllib.urlopen(fname)
2635 f = urllib.urlopen(fname)
2636 gen = changegroup.readbundle(f, fname)
2636 gen = changegroup.readbundle(f, fname)
2637 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2637 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2638
2638
2639 return postincoming(ui, repo, modheads, opts['update'], None)
2639 return postincoming(ui, repo, modheads, opts['update'], None)
2640
2640
2641 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2641 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2642 """update working directory
2642 """update working directory
2643
2643
2644 Update the working directory to the specified revision, or the
2644 Update the working directory to the specified revision, or the
2645 tip of the current branch if none is specified.
2645 tip of the current branch if none is specified.
2646
2646
2647 If there are no outstanding changes in the working directory and
2647 If there are no outstanding changes in the working directory and
2648 there is a linear relationship between the current version and the
2648 there is a linear relationship between the current version and the
2649 requested version, the result is the requested version.
2649 requested version, the result is the requested version.
2650
2650
2651 To merge the working directory with another revision, use the
2651 To merge the working directory with another revision, use the
2652 merge command.
2652 merge command.
2653
2653
2654 By default, update will refuse to run if doing so would require
2654 By default, update will refuse to run if doing so would require
2655 discarding local changes.
2655 discarding local changes.
2656 """
2656 """
2657 if rev and node:
2657 if rev and node:
2658 raise util.Abort(_("please specify just one revision"))
2658 raise util.Abort(_("please specify just one revision"))
2659
2659
2660 if not rev:
2660 if not rev:
2661 rev = node
2661 rev = node
2662
2662
2663 if date:
2663 if date:
2664 if rev:
2664 if rev:
2665 raise util.Abort(_("you can't specify a revision and a date"))
2665 raise util.Abort(_("you can't specify a revision and a date"))
2666 rev = cmdutil.finddate(ui, repo, date)
2666 rev = cmdutil.finddate(ui, repo, date)
2667
2667
2668 if clean:
2668 if clean:
2669 return hg.clean(repo, rev)
2669 return hg.clean(repo, rev)
2670 else:
2670 else:
2671 return hg.update(repo, rev)
2671 return hg.update(repo, rev)
2672
2672
2673 def verify(ui, repo):
2673 def verify(ui, repo):
2674 """verify the integrity of the repository
2674 """verify the integrity of the repository
2675
2675
2676 Verify the integrity of the current repository.
2676 Verify the integrity of the current repository.
2677
2677
2678 This will perform an extensive check of the repository's
2678 This will perform an extensive check of the repository's
2679 integrity, validating the hashes and checksums of each entry in
2679 integrity, validating the hashes and checksums of each entry in
2680 the changelog, manifest, and tracked files, as well as the
2680 the changelog, manifest, and tracked files, as well as the
2681 integrity of their crosslinks and indices.
2681 integrity of their crosslinks and indices.
2682 """
2682 """
2683 return hg.verify(repo)
2683 return hg.verify(repo)
2684
2684
2685 def version_(ui):
2685 def version_(ui):
2686 """output version and copyright information"""
2686 """output version and copyright information"""
2687 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2687 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2688 % version.get_version())
2688 % version.get_version())
2689 ui.status(_(
2689 ui.status(_(
2690 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2690 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2691 "This is free software; see the source for copying conditions. "
2691 "This is free software; see the source for copying conditions. "
2692 "There is NO\nwarranty; "
2692 "There is NO\nwarranty; "
2693 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2693 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2694 ))
2694 ))
2695
2695
2696 # Command options and aliases are listed here, alphabetically
2696 # Command options and aliases are listed here, alphabetically
2697
2697
2698 globalopts = [
2698 globalopts = [
2699 ('R', 'repository', '',
2699 ('R', 'repository', '',
2700 _('repository root directory or symbolic path name')),
2700 _('repository root directory or symbolic path name')),
2701 ('', 'cwd', '', _('change working directory')),
2701 ('', 'cwd', '', _('change working directory')),
2702 ('y', 'noninteractive', None,
2702 ('y', 'noninteractive', None,
2703 _('do not prompt, assume \'yes\' for any required answers')),
2703 _('do not prompt, assume \'yes\' for any required answers')),
2704 ('q', 'quiet', None, _('suppress output')),
2704 ('q', 'quiet', None, _('suppress output')),
2705 ('v', 'verbose', None, _('enable additional output')),
2705 ('v', 'verbose', None, _('enable additional output')),
2706 ('', 'config', [], _('set/override config option')),
2706 ('', 'config', [], _('set/override config option')),
2707 ('', 'debug', None, _('enable debugging output')),
2707 ('', 'debug', None, _('enable debugging output')),
2708 ('', 'debugger', None, _('start debugger')),
2708 ('', 'debugger', None, _('start debugger')),
2709 ('', 'encoding', util._encoding, _('set the charset encoding')),
2709 ('', 'encoding', util._encoding, _('set the charset encoding')),
2710 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2710 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2711 ('', 'lsprof', None, _('print improved command execution profile')),
2711 ('', 'lsprof', None, _('print improved command execution profile')),
2712 ('', 'traceback', None, _('print traceback on exception')),
2712 ('', 'traceback', None, _('print traceback on exception')),
2713 ('', 'time', None, _('time how long the command takes')),
2713 ('', 'time', None, _('time how long the command takes')),
2714 ('', 'profile', None, _('print command execution profile')),
2714 ('', 'profile', None, _('print command execution profile')),
2715 ('', 'version', None, _('output version information and exit')),
2715 ('', 'version', None, _('output version information and exit')),
2716 ('h', 'help', None, _('display help and exit')),
2716 ('h', 'help', None, _('display help and exit')),
2717 ]
2717 ]
2718
2718
2719 dryrunopts = [('n', 'dry-run', None,
2719 dryrunopts = [('n', 'dry-run', None,
2720 _('do not perform actions, just print output'))]
2720 _('do not perform actions, just print output'))]
2721
2721
2722 remoteopts = [
2722 remoteopts = [
2723 ('e', 'ssh', '', _('specify ssh command to use')),
2723 ('e', 'ssh', '', _('specify ssh command to use')),
2724 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2724 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2725 ]
2725 ]
2726
2726
2727 walkopts = [
2727 walkopts = [
2728 ('I', 'include', [], _('include names matching the given patterns')),
2728 ('I', 'include', [], _('include names matching the given patterns')),
2729 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2729 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2730 ]
2730 ]
2731
2731
2732 commitopts = [
2732 commitopts = [
2733 ('m', 'message', '', _('use <text> as commit message')),
2733 ('m', 'message', '', _('use <text> as commit message')),
2734 ('l', 'logfile', '', _('read commit message from <file>')),
2734 ('l', 'logfile', '', _('read commit message from <file>')),
2735 ]
2735 ]
2736
2736
2737 commitopts2 = [
2737 commitopts2 = [
2738 ('d', 'date', '', _('record datecode as commit date')),
2738 ('d', 'date', '', _('record datecode as commit date')),
2739 ('u', 'user', '', _('record user as committer')),
2739 ('u', 'user', '', _('record user as committer')),
2740 ]
2740 ]
2741
2741
2742 table = {
2742 table = {
2743 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2743 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2744 "addremove":
2744 "addremove":
2745 (addremove,
2745 (addremove,
2746 [('s', 'similarity', '',
2746 [('s', 'similarity', '',
2747 _('guess renamed files by similarity (0<=s<=100)')),
2747 _('guess renamed files by similarity (0<=s<=100)')),
2748 ] + walkopts + dryrunopts,
2748 ] + walkopts + dryrunopts,
2749 _('hg addremove [OPTION]... [FILE]...')),
2749 _('hg addremove [OPTION]... [FILE]...')),
2750 "^annotate":
2750 "^annotate":
2751 (annotate,
2751 (annotate,
2752 [('r', 'rev', '', _('annotate the specified revision')),
2752 [('r', 'rev', '', _('annotate the specified revision')),
2753 ('f', 'follow', None, _('follow file copies and renames')),
2753 ('f', 'follow', None, _('follow file copies and renames')),
2754 ('a', 'text', None, _('treat all files as text')),
2754 ('a', 'text', None, _('treat all files as text')),
2755 ('u', 'user', None, _('list the author (long with -v)')),
2755 ('u', 'user', None, _('list the author (long with -v)')),
2756 ('d', 'date', None, _('list the date (short with -q)')),
2756 ('d', 'date', None, _('list the date (short with -q)')),
2757 ('n', 'number', None, _('list the revision number (default)')),
2757 ('n', 'number', None, _('list the revision number (default)')),
2758 ('c', 'changeset', None, _('list the changeset')),
2758 ('c', 'changeset', None, _('list the changeset')),
2759 ('l', 'line-number', None,
2759 ('l', 'line-number', None,
2760 _('show line number at the first appearance'))
2760 _('show line number at the first appearance'))
2761 ] + walkopts,
2761 ] + walkopts,
2762 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2762 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2763 "archive":
2763 "archive":
2764 (archive,
2764 (archive,
2765 [('', 'no-decode', None, _('do not pass files through decoders')),
2765 [('', 'no-decode', None, _('do not pass files through decoders')),
2766 ('p', 'prefix', '', _('directory prefix for files in archive')),
2766 ('p', 'prefix', '', _('directory prefix for files in archive')),
2767 ('r', 'rev', '', _('revision to distribute')),
2767 ('r', 'rev', '', _('revision to distribute')),
2768 ('t', 'type', '', _('type of distribution to create')),
2768 ('t', 'type', '', _('type of distribution to create')),
2769 ] + walkopts,
2769 ] + walkopts,
2770 _('hg archive [OPTION]... DEST')),
2770 _('hg archive [OPTION]... DEST')),
2771 "backout":
2771 "backout":
2772 (backout,
2772 (backout,
2773 [('', 'merge', None,
2773 [('', 'merge', None,
2774 _('merge with old dirstate parent after backout')),
2774 _('merge with old dirstate parent after backout')),
2775 ('', 'parent', '', _('parent to choose when backing out merge')),
2775 ('', 'parent', '', _('parent to choose when backing out merge')),
2776 ('r', 'rev', '', _('revision to backout')),
2776 ('r', 'rev', '', _('revision to backout')),
2777 ] + walkopts + commitopts + commitopts2,
2777 ] + walkopts + commitopts + commitopts2,
2778 _('hg backout [OPTION]... [-r] REV')),
2778 _('hg backout [OPTION]... [-r] REV')),
2779 "bisect":
2779 "bisect":
2780 (bisect,
2780 (bisect,
2781 [('r', 'reset', False, _('reset bisect state')),
2781 [('r', 'reset', False, _('reset bisect state')),
2782 ('g', 'good', False, _('mark changeset good')),
2782 ('g', 'good', False, _('mark changeset good')),
2783 ('b', 'bad', False, _('mark changeset bad')),
2783 ('b', 'bad', False, _('mark changeset bad')),
2784 ('s', 'skip', False, _('skip testing changeset')),
2784 ('s', 'skip', False, _('skip testing changeset')),
2785 ('U', 'noupdate', False, _('do not update to target'))],
2785 ('U', 'noupdate', False, _('do not update to target'))],
2786 _("hg bisect [-gbsr] [REV]")),
2786 _("hg bisect [-gbsr] [REV]")),
2787 "branch":
2787 "branch":
2788 (branch,
2788 (branch,
2789 [('f', 'force', None,
2789 [('f', 'force', None,
2790 _('set branch name even if it shadows an existing branch'))],
2790 _('set branch name even if it shadows an existing branch'))],
2791 _('hg branch [-f] [NAME]')),
2791 _('hg branch [-f] [NAME]')),
2792 "branches":
2792 "branches":
2793 (branches,
2793 (branches,
2794 [('a', 'active', False,
2794 [('a', 'active', False,
2795 _('show only branches that have unmerged heads'))],
2795 _('show only branches that have unmerged heads'))],
2796 _('hg branches [-a]')),
2796 _('hg branches [-a]')),
2797 "bundle":
2797 "bundle":
2798 (bundle,
2798 (bundle,
2799 [('f', 'force', None,
2799 [('f', 'force', None,
2800 _('run even when remote repository is unrelated')),
2800 _('run even when remote repository is unrelated')),
2801 ('r', 'rev', [],
2801 ('r', 'rev', [],
2802 _('a changeset you would like to bundle')),
2802 _('a changeset you would like to bundle')),
2803 ('', 'base', [],
2803 ('', 'base', [],
2804 _('a base changeset to specify instead of a destination')),
2804 _('a base changeset to specify instead of a destination')),
2805 ] + remoteopts,
2805 ] + remoteopts,
2806 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2806 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2807 "cat":
2807 "cat":
2808 (cat,
2808 (cat,
2809 [('o', 'output', '', _('print output to file with formatted name')),
2809 [('o', 'output', '', _('print output to file with formatted name')),
2810 ('r', 'rev', '', _('print the given revision')),
2810 ('r', 'rev', '', _('print the given revision')),
2811 ('', 'decode', None, _('apply any matching decode filter')),
2811 ('', 'decode', None, _('apply any matching decode filter')),
2812 ] + walkopts,
2812 ] + walkopts,
2813 _('hg cat [OPTION]... FILE...')),
2813 _('hg cat [OPTION]... FILE...')),
2814 "^clone":
2814 "^clone":
2815 (clone,
2815 (clone,
2816 [('U', 'noupdate', None, _('do not update the new working directory')),
2816 [('U', 'noupdate', None, _('do not update the new working directory')),
2817 ('r', 'rev', [],
2817 ('r', 'rev', [],
2818 _('a changeset you would like to have after cloning')),
2818 _('a changeset you would like to have after cloning')),
2819 ('', 'pull', None, _('use pull protocol to copy metadata')),
2819 ('', 'pull', None, _('use pull protocol to copy metadata')),
2820 ('', 'uncompressed', None,
2820 ('', 'uncompressed', None,
2821 _('use uncompressed transfer (fast over LAN)')),
2821 _('use uncompressed transfer (fast over LAN)')),
2822 ] + remoteopts,
2822 ] + remoteopts,
2823 _('hg clone [OPTION]... SOURCE [DEST]')),
2823 _('hg clone [OPTION]... SOURCE [DEST]')),
2824 "^commit|ci":
2824 "^commit|ci":
2825 (commit,
2825 (commit,
2826 [('A', 'addremove', None,
2826 [('A', 'addremove', None,
2827 _('mark new/missing files as added/removed before committing')),
2827 _('mark new/missing files as added/removed before committing')),
2828 ] + walkopts + commitopts + commitopts2,
2828 ] + walkopts + commitopts + commitopts2,
2829 _('hg commit [OPTION]... [FILE]...')),
2829 _('hg commit [OPTION]... [FILE]...')),
2830 "copy|cp":
2830 "copy|cp":
2831 (copy,
2831 (copy,
2832 [('A', 'after', None, _('record a copy that has already occurred')),
2832 [('A', 'after', None, _('record a copy that has already occurred')),
2833 ('f', 'force', None,
2833 ('f', 'force', None,
2834 _('forcibly copy over an existing managed file')),
2834 _('forcibly copy over an existing managed file')),
2835 ] + walkopts + dryrunopts,
2835 ] + walkopts + dryrunopts,
2836 _('hg copy [OPTION]... [SOURCE]... DEST')),
2836 _('hg copy [OPTION]... [SOURCE]... DEST')),
2837 "debugancestor": (debugancestor, [], _('hg debugancestor INDEX REV1 REV2')),
2837 "debugancestor": (debugancestor, [], _('hg debugancestor INDEX REV1 REV2')),
2838 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
2838 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
2839 "debugcomplete":
2839 "debugcomplete":
2840 (debugcomplete,
2840 (debugcomplete,
2841 [('o', 'options', None, _('show the command options'))],
2841 [('o', 'options', None, _('show the command options'))],
2842 _('hg debugcomplete [-o] CMD')),
2842 _('hg debugcomplete [-o] CMD')),
2843 "debugdate":
2843 "debugdate":
2844 (debugdate,
2844 (debugdate,
2845 [('e', 'extended', None, _('try extended date formats'))],
2845 [('e', 'extended', None, _('try extended date formats'))],
2846 _('hg debugdate [-e] DATE [RANGE]')),
2846 _('hg debugdate [-e] DATE [RANGE]')),
2847 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
2847 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
2848 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
2848 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
2849 "debugindex": (debugindex, [], _('hg debugindex FILE')),
2849 "debugindex": (debugindex, [], _('hg debugindex FILE')),
2850 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
2850 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
2851 "debuginstall": (debuginstall, [], _('hg debuginstall')),
2851 "debuginstall": (debuginstall, [], _('hg debuginstall')),
2852 "debugrawcommit|rawcommit":
2852 "debugrawcommit|rawcommit":
2853 (rawcommit,
2853 (rawcommit,
2854 [('p', 'parent', [], _('parent')),
2854 [('p', 'parent', [], _('parent')),
2855 ('F', 'files', '', _('file list'))
2855 ('F', 'files', '', _('file list'))
2856 ] + commitopts + commitopts2,
2856 ] + commitopts + commitopts2,
2857 _('hg debugrawcommit [OPTION]... [FILE]...')),
2857 _('hg debugrawcommit [OPTION]... [FILE]...')),
2858 "debugrebuildstate":
2858 "debugrebuildstate":
2859 (debugrebuildstate,
2859 (debugrebuildstate,
2860 [('r', 'rev', '', _('revision to rebuild to'))],
2860 [('r', 'rev', '', _('revision to rebuild to'))],
2861 _('hg debugrebuildstate [-r REV] [REV]')),
2861 _('hg debugrebuildstate [-r REV] [REV]')),
2862 "debugrename":
2862 "debugrename":
2863 (debugrename,
2863 (debugrename,
2864 [('r', 'rev', '', _('revision to debug'))],
2864 [('r', 'rev', '', _('revision to debug'))],
2865 _('hg debugrename [-r REV] FILE')),
2865 _('hg debugrename [-r REV] FILE')),
2866 "debugsetparents":
2866 "debugsetparents":
2867 (debugsetparents,
2867 (debugsetparents,
2868 [],
2868 [],
2869 _('hg debugsetparents REV1 [REV2]')),
2869 _('hg debugsetparents REV1 [REV2]')),
2870 "debugstate": (debugstate, [], _('hg debugstate')),
2870 "debugstate": (debugstate, [], _('hg debugstate')),
2871 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
2871 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
2872 "^diff":
2872 "^diff":
2873 (diff,
2873 (diff,
2874 [('r', 'rev', [], _('revision')),
2874 [('r', 'rev', [], _('revision')),
2875 ('a', 'text', None, _('treat all files as text')),
2875 ('a', 'text', None, _('treat all files as text')),
2876 ('p', 'show-function', None,
2876 ('p', 'show-function', None,
2877 _('show which function each change is in')),
2877 _('show which function each change is in')),
2878 ('g', 'git', None, _('use git extended diff format')),
2878 ('g', 'git', None, _('use git extended diff format')),
2879 ('', 'nodates', None, _("don't include dates in diff headers")),
2879 ('', 'nodates', None, _("don't include dates in diff headers")),
2880 ('w', 'ignore-all-space', None,
2880 ('w', 'ignore-all-space', None,
2881 _('ignore white space when comparing lines')),
2881 _('ignore white space when comparing lines')),
2882 ('b', 'ignore-space-change', None,
2882 ('b', 'ignore-space-change', None,
2883 _('ignore changes in the amount of white space')),
2883 _('ignore changes in the amount of white space')),
2884 ('B', 'ignore-blank-lines', None,
2884 ('B', 'ignore-blank-lines', None,
2885 _('ignore changes whose lines are all blank')),
2885 _('ignore changes whose lines are all blank')),
2886 ('U', 'unified', 3,
2886 ('U', 'unified', 3,
2887 _('number of lines of context to show'))
2887 _('number of lines of context to show'))
2888 ] + walkopts,
2888 ] + walkopts,
2889 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2889 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2890 "^export":
2890 "^export":
2891 (export,
2891 (export,
2892 [('o', 'output', '', _('print output to file with formatted name')),
2892 [('o', 'output', '', _('print output to file with formatted name')),
2893 ('a', 'text', None, _('treat all files as text')),
2893 ('a', 'text', None, _('treat all files as text')),
2894 ('g', 'git', None, _('use git extended diff format')),
2894 ('g', 'git', None, _('use git extended diff format')),
2895 ('', 'nodates', None, _("don't include dates in diff headers")),
2895 ('', 'nodates', None, _("don't include dates in diff headers")),
2896 ('', 'switch-parent', None, _('diff against the second parent'))],
2896 ('', 'switch-parent', None, _('diff against the second parent'))],
2897 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2897 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2898 "grep":
2898 "grep":
2899 (grep,
2899 (grep,
2900 [('0', 'print0', None, _('end fields with NUL')),
2900 [('0', 'print0', None, _('end fields with NUL')),
2901 ('', 'all', None, _('print all revisions that match')),
2901 ('', 'all', None, _('print all revisions that match')),
2902 ('f', 'follow', None,
2902 ('f', 'follow', None,
2903 _('follow changeset history, or file history across copies and renames')),
2903 _('follow changeset history, or file history across copies and renames')),
2904 ('i', 'ignore-case', None, _('ignore case when matching')),
2904 ('i', 'ignore-case', None, _('ignore case when matching')),
2905 ('l', 'files-with-matches', None,
2905 ('l', 'files-with-matches', None,
2906 _('print only filenames and revs that match')),
2906 _('print only filenames and revs that match')),
2907 ('n', 'line-number', None, _('print matching line numbers')),
2907 ('n', 'line-number', None, _('print matching line numbers')),
2908 ('r', 'rev', [], _('search in given revision range')),
2908 ('r', 'rev', [], _('search in given revision range')),
2909 ('u', 'user', None, _('list the author (long with -v)')),
2909 ('u', 'user', None, _('list the author (long with -v)')),
2910 ('d', 'date', None, _('list the date (short with -q)')),
2910 ('d', 'date', None, _('list the date (short with -q)')),
2911 ] + walkopts,
2911 ] + walkopts,
2912 _('hg grep [OPTION]... PATTERN [FILE]...')),
2912 _('hg grep [OPTION]... PATTERN [FILE]...')),
2913 "heads":
2913 "heads":
2914 (heads,
2914 (heads,
2915 [('', 'style', '', _('display using template map file')),
2915 [('', 'style', '', _('display using template map file')),
2916 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2916 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2917 ('', 'template', '', _('display with template'))],
2917 ('', 'template', '', _('display with template'))],
2918 _('hg heads [-r REV] [REV]...')),
2918 _('hg heads [-r REV] [REV]...')),
2919 "help": (help_, [], _('hg help [COMMAND]')),
2919 "help": (help_, [], _('hg help [COMMAND]')),
2920 "identify|id":
2920 "identify|id":
2921 (identify,
2921 (identify,
2922 [('r', 'rev', '', _('identify the specified rev')),
2922 [('r', 'rev', '', _('identify the specified rev')),
2923 ('n', 'num', None, _('show local revision number')),
2923 ('n', 'num', None, _('show local revision number')),
2924 ('i', 'id', None, _('show global revision id')),
2924 ('i', 'id', None, _('show global revision id')),
2925 ('b', 'branch', None, _('show branch')),
2925 ('b', 'branch', None, _('show branch')),
2926 ('t', 'tags', None, _('show tags'))],
2926 ('t', 'tags', None, _('show tags'))],
2927 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2927 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2928 "import|patch":
2928 "import|patch":
2929 (import_,
2929 (import_,
2930 [('p', 'strip', 1,
2930 [('p', 'strip', 1,
2931 _('directory strip option for patch. This has the same\n'
2931 _('directory strip option for patch. This has the same\n'
2932 'meaning as the corresponding patch option')),
2932 'meaning as the corresponding patch option')),
2933 ('b', 'base', '', _('base path')),
2933 ('b', 'base', '', _('base path')),
2934 ('f', 'force', None,
2934 ('f', 'force', None,
2935 _('skip check for outstanding uncommitted changes')),
2935 _('skip check for outstanding uncommitted changes')),
2936 ('', 'no-commit', None, _("don't commit, just update the working directory")),
2936 ('', 'no-commit', None, _("don't commit, just update the working directory")),
2937 ('', 'exact', None,
2937 ('', 'exact', None,
2938 _('apply patch to the nodes from which it was generated')),
2938 _('apply patch to the nodes from which it was generated')),
2939 ('', 'import-branch', None,
2939 ('', 'import-branch', None,
2940 _('Use any branch information in patch (implied by --exact)'))] +
2940 _('Use any branch information in patch (implied by --exact)'))] +
2941 commitopts + commitopts2,
2941 commitopts + commitopts2,
2942 _('hg import [OPTION]... PATCH...')),
2942 _('hg import [OPTION]... PATCH...')),
2943 "incoming|in":
2943 "incoming|in":
2944 (incoming,
2944 (incoming,
2945 [('M', 'no-merges', None, _('do not show merges')),
2945 [('M', 'no-merges', None, _('do not show merges')),
2946 ('f', 'force', None,
2946 ('f', 'force', None,
2947 _('run even when remote repository is unrelated')),
2947 _('run even when remote repository is unrelated')),
2948 ('', 'style', '', _('display using template map file')),
2948 ('', 'style', '', _('display using template map file')),
2949 ('n', 'newest-first', None, _('show newest record first')),
2949 ('n', 'newest-first', None, _('show newest record first')),
2950 ('', 'bundle', '', _('file to store the bundles into')),
2950 ('', 'bundle', '', _('file to store the bundles into')),
2951 ('p', 'patch', None, _('show patch')),
2951 ('p', 'patch', None, _('show patch')),
2952 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2952 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2953 ('', 'template', '', _('display with template')),
2953 ('', 'template', '', _('display with template')),
2954 ] + remoteopts,
2954 ] + remoteopts,
2955 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2955 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2956 ' [--bundle FILENAME] [SOURCE]')),
2956 ' [--bundle FILENAME] [SOURCE]')),
2957 "^init":
2957 "^init":
2958 (init,
2958 (init,
2959 remoteopts,
2959 remoteopts,
2960 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2960 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2961 "locate":
2961 "locate":
2962 (locate,
2962 (locate,
2963 [('r', 'rev', '', _('search the repository as it stood at rev')),
2963 [('r', 'rev', '', _('search the repository as it stood at rev')),
2964 ('0', 'print0', None,
2964 ('0', 'print0', None,
2965 _('end filenames with NUL, for use with xargs')),
2965 _('end filenames with NUL, for use with xargs')),
2966 ('f', 'fullpath', None,
2966 ('f', 'fullpath', None,
2967 _('print complete paths from the filesystem root')),
2967 _('print complete paths from the filesystem root')),
2968 ] + walkopts,
2968 ] + walkopts,
2969 _('hg locate [OPTION]... [PATTERN]...')),
2969 _('hg locate [OPTION]... [PATTERN]...')),
2970 "^log|history":
2970 "^log|history":
2971 (log,
2971 (log,
2972 [('f', 'follow', None,
2972 [('f', 'follow', None,
2973 _('follow changeset history, or file history across copies and renames')),
2973 _('follow changeset history, or file history across copies and renames')),
2974 ('', 'follow-first', None,
2974 ('', 'follow-first', None,
2975 _('only follow the first parent of merge changesets')),
2975 _('only follow the first parent of merge changesets')),
2976 ('d', 'date', '', _('show revs matching date spec')),
2976 ('d', 'date', '', _('show revs matching date spec')),
2977 ('C', 'copies', None, _('show copied files')),
2977 ('C', 'copies', None, _('show copied files')),
2978 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2978 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2979 ('l', 'limit', '', _('limit number of changes displayed')),
2979 ('l', 'limit', '', _('limit number of changes displayed')),
2980 ('r', 'rev', [], _('show the specified revision or range')),
2980 ('r', 'rev', [], _('show the specified revision or range')),
2981 ('', 'removed', None, _('include revs where files were removed')),
2981 ('', 'removed', None, _('include revs where files were removed')),
2982 ('M', 'no-merges', None, _('do not show merges')),
2982 ('M', 'no-merges', None, _('do not show merges')),
2983 ('', 'style', '', _('display using template map file')),
2983 ('', 'style', '', _('display using template map file')),
2984 ('m', 'only-merges', None, _('show only merges')),
2984 ('m', 'only-merges', None, _('show only merges')),
2985 ('b', 'only-branch', [],
2985 ('b', 'only-branch', [],
2986 _('show only changesets within the given named branch')),
2986 _('show only changesets within the given named branch')),
2987 ('p', 'patch', None, _('show patch')),
2987 ('p', 'patch', None, _('show patch')),
2988 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2988 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2989 ('', 'template', '', _('display with template')),
2989 ('', 'template', '', _('display with template')),
2990 ] + walkopts,
2990 ] + walkopts,
2991 _('hg log [OPTION]... [FILE]')),
2991 _('hg log [OPTION]... [FILE]')),
2992 "manifest":
2992 "manifest":
2993 (manifest,
2993 (manifest,
2994 [('r', 'rev', '', _('revision to display'))],
2994 [('r', 'rev', '', _('revision to display'))],
2995 _('hg manifest [-r REV]')),
2995 _('hg manifest [-r REV]')),
2996 "^merge":
2996 "^merge":
2997 (merge,
2997 (merge,
2998 [('f', 'force', None, _('force a merge with outstanding changes')),
2998 [('f', 'force', None, _('force a merge with outstanding changes')),
2999 ('r', 'rev', '', _('revision to merge')),
2999 ('r', 'rev', '', _('revision to merge')),
3000 ],
3000 ],
3001 _('hg merge [-f] [[-r] REV]')),
3001 _('hg merge [-f] [[-r] REV]')),
3002 "outgoing|out":
3002 "outgoing|out":
3003 (outgoing,
3003 (outgoing,
3004 [('M', 'no-merges', None, _('do not show merges')),
3004 [('M', 'no-merges', None, _('do not show merges')),
3005 ('f', 'force', None,
3005 ('f', 'force', None,
3006 _('run even when remote repository is unrelated')),
3006 _('run even when remote repository is unrelated')),
3007 ('p', 'patch', None, _('show patch')),
3007 ('p', 'patch', None, _('show patch')),
3008 ('', 'style', '', _('display using template map file')),
3008 ('', 'style', '', _('display using template map file')),
3009 ('r', 'rev', [], _('a specific revision you would like to push')),
3009 ('r', 'rev', [], _('a specific revision you would like to push')),
3010 ('n', 'newest-first', None, _('show newest record first')),
3010 ('n', 'newest-first', None, _('show newest record first')),
3011 ('', 'template', '', _('display with template')),
3011 ('', 'template', '', _('display with template')),
3012 ] + remoteopts,
3012 ] + remoteopts,
3013 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3013 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3014 "^parents":
3014 "^parents":
3015 (parents,
3015 (parents,
3016 [('r', 'rev', '', _('show parents from the specified rev')),
3016 [('r', 'rev', '', _('show parents from the specified rev')),
3017 ('', 'style', '', _('display using template map file')),
3017 ('', 'style', '', _('display using template map file')),
3018 ('', 'template', '', _('display with template'))],
3018 ('', 'template', '', _('display with template'))],
3019 _('hg parents [-r REV] [FILE]')),
3019 _('hg parents [-r REV] [FILE]')),
3020 "paths": (paths, [], _('hg paths [NAME]')),
3020 "paths": (paths, [], _('hg paths [NAME]')),
3021 "^pull":
3021 "^pull":
3022 (pull,
3022 (pull,
3023 [('u', 'update', None,
3023 [('u', 'update', None,
3024 _('update to new tip if changesets were pulled')),
3024 _('update to new tip if changesets were pulled')),
3025 ('f', 'force', None,
3025 ('f', 'force', None,
3026 _('run even when remote repository is unrelated')),
3026 _('run even when remote repository is unrelated')),
3027 ('r', 'rev', [],
3027 ('r', 'rev', [],
3028 _('a specific revision up to which you would like to pull')),
3028 _('a specific revision up to which you would like to pull')),
3029 ] + remoteopts,
3029 ] + remoteopts,
3030 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3030 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3031 "^push":
3031 "^push":
3032 (push,
3032 (push,
3033 [('f', 'force', None, _('force push')),
3033 [('f', 'force', None, _('force push')),
3034 ('r', 'rev', [], _('a specific revision you would like to push')),
3034 ('r', 'rev', [], _('a specific revision you would like to push')),
3035 ] + remoteopts,
3035 ] + remoteopts,
3036 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3036 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3037 "recover": (recover, [], _('hg recover')),
3037 "recover": (recover, [], _('hg recover')),
3038 "^remove|rm":
3038 "^remove|rm":
3039 (remove,
3039 (remove,
3040 [('A', 'after', None, _('record remove without deleting')),
3040 [('A', 'after', None, _('record remove without deleting')),
3041 ('f', 'force', None, _('remove file even if modified')),
3041 ('f', 'force', None, _('remove file even if modified')),
3042 ] + walkopts,
3042 ] + walkopts,
3043 _('hg remove [OPTION]... FILE...')),
3043 _('hg remove [OPTION]... FILE...')),
3044 "rename|mv":
3044 "rename|mv":
3045 (rename,
3045 (rename,
3046 [('A', 'after', None, _('record a rename that has already occurred')),
3046 [('A', 'after', None, _('record a rename that has already occurred')),
3047 ('f', 'force', None,
3047 ('f', 'force', None,
3048 _('forcibly copy over an existing managed file')),
3048 _('forcibly copy over an existing managed file')),
3049 ] + walkopts + dryrunopts,
3049 ] + walkopts + dryrunopts,
3050 _('hg rename [OPTION]... SOURCE... DEST')),
3050 _('hg rename [OPTION]... SOURCE... DEST')),
3051 "revert":
3051 "revert":
3052 (revert,
3052 (revert,
3053 [('a', 'all', None, _('revert all changes when no arguments given')),
3053 [('a', 'all', None, _('revert all changes when no arguments given')),
3054 ('d', 'date', '', _('tipmost revision matching date')),
3054 ('d', 'date', '', _('tipmost revision matching date')),
3055 ('r', 'rev', '', _('revision to revert to')),
3055 ('r', 'rev', '', _('revision to revert to')),
3056 ('', 'no-backup', None, _('do not save backup copies of files')),
3056 ('', 'no-backup', None, _('do not save backup copies of files')),
3057 ] + walkopts + dryrunopts,
3057 ] + walkopts + dryrunopts,
3058 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3058 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3059 "rollback": (rollback, [], _('hg rollback')),
3059 "rollback": (rollback, [], _('hg rollback')),
3060 "root": (root, [], _('hg root')),
3060 "root": (root, [], _('hg root')),
3061 "^serve":
3061 "^serve":
3062 (serve,
3062 (serve,
3063 [('A', 'accesslog', '', _('name of access log file to write to')),
3063 [('A', 'accesslog', '', _('name of access log file to write to')),
3064 ('d', 'daemon', None, _('run server in background')),
3064 ('d', 'daemon', None, _('run server in background')),
3065 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3065 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3066 ('E', 'errorlog', '', _('name of error log file to write to')),
3066 ('E', 'errorlog', '', _('name of error log file to write to')),
3067 ('p', 'port', 0, _('port to use (default: 8000)')),
3067 ('p', 'port', 0, _('port to use (default: 8000)')),
3068 ('a', 'address', '', _('address to use')),
3068 ('a', 'address', '', _('address to use')),
3069 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3069 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3070 ('n', 'name', '',
3070 ('n', 'name', '',
3071 _('name to show in web pages (default: working dir)')),
3071 _('name to show in web pages (default: working dir)')),
3072 ('', 'webdir-conf', '', _('name of the webdir config file'
3072 ('', 'webdir-conf', '', _('name of the webdir config file'
3073 ' (serve more than one repo)')),
3073 ' (serve more than one repo)')),
3074 ('', 'pid-file', '', _('name of file to write process ID to')),
3074 ('', 'pid-file', '', _('name of file to write process ID to')),
3075 ('', 'stdio', None, _('for remote clients')),
3075 ('', 'stdio', None, _('for remote clients')),
3076 ('t', 'templates', '', _('web templates to use')),
3076 ('t', 'templates', '', _('web templates to use')),
3077 ('', 'style', '', _('template style to use')),
3077 ('', 'style', '', _('template style to use')),
3078 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3078 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3079 ('', 'certificate', '', _('SSL certificate file'))],
3079 ('', 'certificate', '', _('SSL certificate file'))],
3080 _('hg serve [OPTION]...')),
3080 _('hg serve [OPTION]...')),
3081 "showconfig|debugconfig":
3081 "showconfig|debugconfig":
3082 (showconfig,
3082 (showconfig,
3083 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3083 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3084 _('hg showconfig [-u] [NAME]...')),
3084 _('hg showconfig [-u] [NAME]...')),
3085 "^status|st":
3085 "^status|st":
3086 (status,
3086 (status,
3087 [('A', 'all', None, _('show status of all files')),
3087 [('A', 'all', None, _('show status of all files')),
3088 ('m', 'modified', None, _('show only modified files')),
3088 ('m', 'modified', None, _('show only modified files')),
3089 ('a', 'added', None, _('show only added files')),
3089 ('a', 'added', None, _('show only added files')),
3090 ('r', 'removed', None, _('show only removed files')),
3090 ('r', 'removed', None, _('show only removed files')),
3091 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3091 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3092 ('c', 'clean', None, _('show only files without changes')),
3092 ('c', 'clean', None, _('show only files without changes')),
3093 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3093 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3094 ('i', 'ignored', None, _('show only ignored files')),
3094 ('i', 'ignored', None, _('show only ignored files')),
3095 ('n', 'no-status', None, _('hide status prefix')),
3095 ('n', 'no-status', None, _('hide status prefix')),
3096 ('C', 'copies', None, _('show source of copied files')),
3096 ('C', 'copies', None, _('show source of copied files')),
3097 ('0', 'print0', None,
3097 ('0', 'print0', None,
3098 _('end filenames with NUL, for use with xargs')),
3098 _('end filenames with NUL, for use with xargs')),
3099 ('', 'rev', [], _('show difference from revision')),
3099 ('', 'rev', [], _('show difference from revision')),
3100 ] + walkopts,
3100 ] + walkopts,
3101 _('hg status [OPTION]... [FILE]...')),
3101 _('hg status [OPTION]... [FILE]...')),
3102 "tag":
3102 "tag":
3103 (tag,
3103 (tag,
3104 [('f', 'force', None, _('replace existing tag')),
3104 [('f', 'force', None, _('replace existing tag')),
3105 ('l', 'local', None, _('make the tag local')),
3105 ('l', 'local', None, _('make the tag local')),
3106 ('r', 'rev', '', _('revision to tag')),
3106 ('r', 'rev', '', _('revision to tag')),
3107 ('', 'remove', None, _('remove a tag')),
3107 ('', 'remove', None, _('remove a tag')),
3108 # -l/--local is already there, commitopts cannot be used
3108 # -l/--local is already there, commitopts cannot be used
3109 ('m', 'message', '', _('use <text> as commit message')),
3109 ('m', 'message', '', _('use <text> as commit message')),
3110 ] + commitopts2,
3110 ] + commitopts2,
3111 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3111 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3112 "tags": (tags, [], _('hg tags')),
3112 "tags": (tags, [], _('hg tags')),
3113 "tip":
3113 "tip":
3114 (tip,
3114 (tip,
3115 [('', 'style', '', _('display using template map file')),
3115 [('', 'style', '', _('display using template map file')),
3116 ('p', 'patch', None, _('show patch')),
3116 ('p', 'patch', None, _('show patch')),
3117 ('', 'template', '', _('display with template'))],
3117 ('', 'template', '', _('display with template'))],
3118 _('hg tip [-p]')),
3118 _('hg tip [-p]')),
3119 "unbundle":
3119 "unbundle":
3120 (unbundle,
3120 (unbundle,
3121 [('u', 'update', None,
3121 [('u', 'update', None,
3122 _('update to new tip if changesets were unbundled'))],
3122 _('update to new tip if changesets were unbundled'))],
3123 _('hg unbundle [-u] FILE...')),
3123 _('hg unbundle [-u] FILE...')),
3124 "^update|up|checkout|co":
3124 "^update|up|checkout|co":
3125 (update,
3125 (update,
3126 [('C', 'clean', None, _('overwrite locally modified files')),
3126 [('C', 'clean', None, _('overwrite locally modified files')),
3127 ('d', 'date', '', _('tipmost revision matching date')),
3127 ('d', 'date', '', _('tipmost revision matching date')),
3128 ('r', 'rev', '', _('revision'))],
3128 ('r', 'rev', '', _('revision'))],
3129 _('hg update [-C] [-d DATE] [[-r] REV]')),
3129 _('hg update [-C] [-d DATE] [[-r] REV]')),
3130 "verify": (verify, [], _('hg verify')),
3130 "verify": (verify, [], _('hg verify')),
3131 "version": (version_, [], _('hg version')),
3131 "version": (version_, [], _('hg version')),
3132 }
3132 }
3133
3133
3134 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3134 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3135 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3135 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3136 optionalrepo = ("identify paths serve showconfig")
3136 optionalrepo = ("identify paths serve showconfig")
@@ -1,926 +1,929 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, mimetypes, re
9 import os, mimetypes, re
10 from mercurial.node import *
10 from mercurial.node import *
11 from mercurial import mdiff, ui, hg, util, archival, patch, hook
11 from mercurial import mdiff, ui, hg, util, archival, patch, hook
12 from mercurial import revlog, templater, templatefilters
12 from mercurial import revlog, templater, templatefilters
13 from common import get_mtime, style_map, paritygen, countgen, get_contact
13 from common import get_mtime, style_map, paritygen, countgen, get_contact
14 from common import ErrorResponse
14 from common import ErrorResponse
15 from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
15 from common import HTTP_OK, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
16 from request import wsgirequest
16 from request import wsgirequest
17 import webcommands, protocol
17 import webcommands, protocol
18
18
19 shortcuts = {
19 shortcuts = {
20 'cl': [('cmd', ['changelog']), ('rev', None)],
20 'cl': [('cmd', ['changelog']), ('rev', None)],
21 'sl': [('cmd', ['shortlog']), ('rev', None)],
21 'sl': [('cmd', ['shortlog']), ('rev', None)],
22 'cs': [('cmd', ['changeset']), ('node', None)],
22 'cs': [('cmd', ['changeset']), ('node', None)],
23 'f': [('cmd', ['file']), ('filenode', None)],
23 'f': [('cmd', ['file']), ('filenode', None)],
24 'fl': [('cmd', ['filelog']), ('filenode', None)],
24 'fl': [('cmd', ['filelog']), ('filenode', None)],
25 'fd': [('cmd', ['filediff']), ('node', None)],
25 'fd': [('cmd', ['filediff']), ('node', None)],
26 'fa': [('cmd', ['annotate']), ('filenode', None)],
26 'fa': [('cmd', ['annotate']), ('filenode', None)],
27 'mf': [('cmd', ['manifest']), ('manifest', None)],
27 'mf': [('cmd', ['manifest']), ('manifest', None)],
28 'ca': [('cmd', ['archive']), ('node', None)],
28 'ca': [('cmd', ['archive']), ('node', None)],
29 'tags': [('cmd', ['tags'])],
29 'tags': [('cmd', ['tags'])],
30 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
30 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
31 'static': [('cmd', ['static']), ('file', None)]
31 'static': [('cmd', ['static']), ('file', None)]
32 }
32 }
33
33
34 def _up(p):
34 def _up(p):
35 if p[0] != "/":
35 if p[0] != "/":
36 p = "/" + p
36 p = "/" + p
37 if p[-1] == "/":
37 if p[-1] == "/":
38 p = p[:-1]
38 p = p[:-1]
39 up = os.path.dirname(p)
39 up = os.path.dirname(p)
40 if up == "/":
40 if up == "/":
41 return "/"
41 return "/"
42 return up + "/"
42 return up + "/"
43
43
44 def revnavgen(pos, pagelen, limit, nodefunc):
44 def revnavgen(pos, pagelen, limit, nodefunc):
45 def seq(factor, limit=None):
45 def seq(factor, limit=None):
46 if limit:
46 if limit:
47 yield limit
47 yield limit
48 if limit >= 20 and limit <= 40:
48 if limit >= 20 and limit <= 40:
49 yield 50
49 yield 50
50 else:
50 else:
51 yield 1 * factor
51 yield 1 * factor
52 yield 3 * factor
52 yield 3 * factor
53 for f in seq(factor * 10):
53 for f in seq(factor * 10):
54 yield f
54 yield f
55
55
56 def nav(**map):
56 def nav(**map):
57 l = []
57 l = []
58 last = 0
58 last = 0
59 for f in seq(1, pagelen):
59 for f in seq(1, pagelen):
60 if f < pagelen or f <= last:
60 if f < pagelen or f <= last:
61 continue
61 continue
62 if f > limit:
62 if f > limit:
63 break
63 break
64 last = f
64 last = f
65 if pos + f < limit:
65 if pos + f < limit:
66 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
66 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
67 if pos - f >= 0:
67 if pos - f >= 0:
68 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
68 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
69
69
70 try:
70 try:
71 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
71 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
72
72
73 for label, node in l:
73 for label, node in l:
74 yield {"label": label, "node": node}
74 yield {"label": label, "node": node}
75
75
76 yield {"label": "tip", "node": "tip"}
76 yield {"label": "tip", "node": "tip"}
77 except hg.RepoError:
77 except hg.RepoError:
78 pass
78 pass
79
79
80 return nav
80 return nav
81
81
82 class hgweb(object):
82 class hgweb(object):
83 def __init__(self, repo, name=None):
83 def __init__(self, repo, name=None):
84 if isinstance(repo, str):
84 if isinstance(repo, str):
85 parentui = ui.ui(report_untrusted=False, interactive=False)
85 parentui = ui.ui(report_untrusted=False, interactive=False)
86 self.repo = hg.repository(parentui, repo)
86 self.repo = hg.repository(parentui, repo)
87 else:
87 else:
88 self.repo = repo
88 self.repo = repo
89
89
90 hook.redirect(True)
90 hook.redirect(True)
91 self.mtime = -1
91 self.mtime = -1
92 self.reponame = name
92 self.reponame = name
93 self.archives = 'zip', 'gz', 'bz2'
93 self.archives = 'zip', 'gz', 'bz2'
94 self.stripecount = 1
94 self.stripecount = 1
95 # a repo owner may set web.templates in .hg/hgrc to get any file
95 # a repo owner may set web.templates in .hg/hgrc to get any file
96 # readable by the user running the CGI script
96 # readable by the user running the CGI script
97 self.templatepath = self.config("web", "templates",
97 self.templatepath = self.config("web", "templates",
98 templater.templatepath(),
98 templater.templatepath(),
99 untrusted=False)
99 untrusted=False)
100
100
101 # The CGI scripts are often run by a user different from the repo owner.
101 # The CGI scripts are often run by a user different from the repo owner.
102 # Trust the settings from the .hg/hgrc files by default.
102 # Trust the settings from the .hg/hgrc files by default.
103 def config(self, section, name, default=None, untrusted=True):
103 def config(self, section, name, default=None, untrusted=True):
104 return self.repo.ui.config(section, name, default,
104 return self.repo.ui.config(section, name, default,
105 untrusted=untrusted)
105 untrusted=untrusted)
106
106
107 def configbool(self, section, name, default=False, untrusted=True):
107 def configbool(self, section, name, default=False, untrusted=True):
108 return self.repo.ui.configbool(section, name, default,
108 return self.repo.ui.configbool(section, name, default,
109 untrusted=untrusted)
109 untrusted=untrusted)
110
110
111 def configlist(self, section, name, default=None, untrusted=True):
111 def configlist(self, section, name, default=None, untrusted=True):
112 return self.repo.ui.configlist(section, name, default,
112 return self.repo.ui.configlist(section, name, default,
113 untrusted=untrusted)
113 untrusted=untrusted)
114
114
115 def refresh(self):
115 def refresh(self):
116 mtime = get_mtime(self.repo.root)
116 mtime = get_mtime(self.repo.root)
117 if mtime != self.mtime:
117 if mtime != self.mtime:
118 self.mtime = mtime
118 self.mtime = mtime
119 self.repo = hg.repository(self.repo.ui, self.repo.root)
119 self.repo = hg.repository(self.repo.ui, self.repo.root)
120 self.maxchanges = int(self.config("web", "maxchanges", 10))
120 self.maxchanges = int(self.config("web", "maxchanges", 10))
121 self.stripecount = int(self.config("web", "stripes", 1))
121 self.stripecount = int(self.config("web", "stripes", 1))
122 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
122 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
123 self.maxfiles = int(self.config("web", "maxfiles", 10))
123 self.maxfiles = int(self.config("web", "maxfiles", 10))
124 self.allowpull = self.configbool("web", "allowpull", True)
124 self.allowpull = self.configbool("web", "allowpull", True)
125 self.encoding = self.config("web", "encoding", util._encoding)
125 self.encoding = self.config("web", "encoding", util._encoding)
126
126
127 def run(self):
127 def run(self):
128 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
128 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
129 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
129 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
130 import mercurial.hgweb.wsgicgi as wsgicgi
130 import mercurial.hgweb.wsgicgi as wsgicgi
131 wsgicgi.launch(self)
131 wsgicgi.launch(self)
132
132
133 def __call__(self, env, respond):
133 def __call__(self, env, respond):
134 req = wsgirequest(env, respond)
134 req = wsgirequest(env, respond)
135 self.run_wsgi(req)
135 self.run_wsgi(req)
136 return req
136 return req
137
137
138 def run_wsgi(self, req):
138 def run_wsgi(self, req):
139
139
140 self.refresh()
140 self.refresh()
141
141
142 # expand form shortcuts
142 # expand form shortcuts
143
143
144 for k in shortcuts.iterkeys():
144 for k in shortcuts.iterkeys():
145 if k in req.form:
145 if k in req.form:
146 for name, value in shortcuts[k]:
146 for name, value in shortcuts[k]:
147 if value is None:
147 if value is None:
148 value = req.form[k]
148 value = req.form[k]
149 req.form[name] = value
149 req.form[name] = value
150 del req.form[k]
150 del req.form[k]
151
151
152 # work with CGI variables to create coherent structure
152 # work with CGI variables to create coherent structure
153 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
153 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
154
154
155 req.url = req.env['SCRIPT_NAME']
155 req.url = req.env['SCRIPT_NAME']
156 if not req.url.endswith('/'):
156 if not req.url.endswith('/'):
157 req.url += '/'
157 req.url += '/'
158 if 'REPO_NAME' in req.env:
158 if 'REPO_NAME' in req.env:
159 req.url += req.env['REPO_NAME'] + '/'
159 req.url += req.env['REPO_NAME'] + '/'
160
160
161 if req.env.get('PATH_INFO'):
161 if req.env.get('PATH_INFO'):
162 parts = req.env.get('PATH_INFO').strip('/').split('/')
162 parts = req.env.get('PATH_INFO').strip('/').split('/')
163 repo_parts = req.env.get('REPO_NAME', '').split('/')
163 repo_parts = req.env.get('REPO_NAME', '').split('/')
164 if parts[:len(repo_parts)] == repo_parts:
164 if parts[:len(repo_parts)] == repo_parts:
165 parts = parts[len(repo_parts):]
165 parts = parts[len(repo_parts):]
166 query = '/'.join(parts)
166 query = '/'.join(parts)
167 else:
167 else:
168 query = req.env['QUERY_STRING'].split('&', 1)[0]
168 query = req.env['QUERY_STRING'].split('&', 1)[0]
169 query = query.split(';', 1)[0]
169 query = query.split(';', 1)[0]
170
170
171 # translate user-visible url structure to internal structure
171 # translate user-visible url structure to internal structure
172
172
173 args = query.split('/', 2)
173 args = query.split('/', 2)
174 if 'cmd' not in req.form and args and args[0]:
174 if 'cmd' not in req.form and args and args[0]:
175
175
176 cmd = args.pop(0)
176 cmd = args.pop(0)
177 style = cmd.rfind('-')
177 style = cmd.rfind('-')
178 if style != -1:
178 if style != -1:
179 req.form['style'] = [cmd[:style]]
179 req.form['style'] = [cmd[:style]]
180 cmd = cmd[style+1:]
180 cmd = cmd[style+1:]
181
181
182 # avoid accepting e.g. style parameter as command
182 # avoid accepting e.g. style parameter as command
183 if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
183 if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
184 req.form['cmd'] = [cmd]
184 req.form['cmd'] = [cmd]
185
185
186 if args and args[0]:
186 if args and args[0]:
187 node = args.pop(0)
187 node = args.pop(0)
188 req.form['node'] = [node]
188 req.form['node'] = [node]
189 if args:
189 if args:
190 req.form['file'] = args
190 req.form['file'] = args
191
191
192 if cmd == 'static':
192 if cmd == 'static':
193 req.form['file'] = req.form['node']
193 req.form['file'] = req.form['node']
194 elif cmd == 'archive':
194 elif cmd == 'archive':
195 fn = req.form['node'][0]
195 fn = req.form['node'][0]
196 for type_, spec in self.archive_specs.iteritems():
196 for type_, spec in self.archive_specs.iteritems():
197 ext = spec[2]
197 ext = spec[2]
198 if fn.endswith(ext):
198 if fn.endswith(ext):
199 req.form['node'] = [fn[:-len(ext)]]
199 req.form['node'] = [fn[:-len(ext)]]
200 req.form['type'] = [type_]
200 req.form['type'] = [type_]
201
201
202 # actually process the request
202 # process this if it's a protocol request
203
204 cmd = req.form.get('cmd', [''])[0]
205 if cmd in protocol.__all__:
206 method = getattr(protocol, cmd)
207 method(self, req)
208 return
209
210 # process the web interface request
203
211
204 try:
212 try:
205
213
206 cmd = req.form.get('cmd', [''])[0]
214 tmpl = self.templater(req)
207 if cmd in protocol.__all__:
215 ctype = tmpl('mimetype', encoding=self.encoding)
208 method = getattr(protocol, cmd)
216 ctype = templater.stringify(ctype)
209 method(self, req)
217
210 else:
218 if cmd == '':
211 tmpl = self.templater(req)
219 req.form['cmd'] = [tmpl.cache['default']]
212 ctype = tmpl('mimetype', encoding=self.encoding)
220 cmd = req.form['cmd'][0]
213 ctype = templater.stringify(ctype)
214
215 if cmd == '':
216 req.form['cmd'] = [tmpl.cache['default']]
217 cmd = req.form['cmd'][0]
218
221
219 if cmd not in webcommands.__all__:
222 if cmd not in webcommands.__all__:
220 msg = 'No such method: %s' % cmd
223 msg = 'No such method: %s' % cmd
221 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
224 raise ErrorResponse(HTTP_BAD_REQUEST, msg)
222 elif cmd == 'file' and 'raw' in req.form.get('style', []):
225 elif cmd == 'file' and 'raw' in req.form.get('style', []):
223 self.ctype = ctype
226 self.ctype = ctype
224 content = webcommands.rawfile(self, req, tmpl)
227 content = webcommands.rawfile(self, req, tmpl)
225 else:
228 else:
226 content = getattr(webcommands, cmd)(self, req, tmpl)
229 content = getattr(webcommands, cmd)(self, req, tmpl)
227 req.respond(HTTP_OK, ctype)
230 req.respond(HTTP_OK, ctype)
228
231
229 req.write(content)
232 req.write(content)
230 del tmpl
233 del tmpl
231
234
232 except revlog.LookupError, err:
235 except revlog.LookupError, err:
233 req.respond(HTTP_NOT_FOUND, ctype)
236 req.respond(HTTP_NOT_FOUND, ctype)
234 req.write(tmpl('error', error='revision not found: %s' % err.name))
237 req.write(tmpl('error', error='revision not found: %s' % err.name))
235 except (hg.RepoError, revlog.RevlogError), inst:
238 except (hg.RepoError, revlog.RevlogError), inst:
236 req.respond(HTTP_SERVER_ERROR, ctype)
239 req.respond(HTTP_SERVER_ERROR, ctype)
237 req.write(tmpl('error', error=str(inst)))
240 req.write(tmpl('error', error=str(inst)))
238 except ErrorResponse, inst:
241 except ErrorResponse, inst:
239 req.respond(inst.code, ctype)
242 req.respond(inst.code, ctype)
240 req.write(tmpl('error', error=inst.message))
243 req.write(tmpl('error', error=inst.message))
241
244
242 def templater(self, req):
245 def templater(self, req):
243
246
244 # determine scheme, port and server name
247 # determine scheme, port and server name
245 # this is needed to create absolute urls
248 # this is needed to create absolute urls
246
249
247 proto = req.env.get('wsgi.url_scheme')
250 proto = req.env.get('wsgi.url_scheme')
248 if proto == 'https':
251 if proto == 'https':
249 proto = 'https'
252 proto = 'https'
250 default_port = "443"
253 default_port = "443"
251 else:
254 else:
252 proto = 'http'
255 proto = 'http'
253 default_port = "80"
256 default_port = "80"
254
257
255 port = req.env["SERVER_PORT"]
258 port = req.env["SERVER_PORT"]
256 port = port != default_port and (":" + port) or ""
259 port = port != default_port and (":" + port) or ""
257 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
260 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
258 staticurl = self.config("web", "staticurl") or req.url + 'static/'
261 staticurl = self.config("web", "staticurl") or req.url + 'static/'
259 if not staticurl.endswith('/'):
262 if not staticurl.endswith('/'):
260 staticurl += '/'
263 staticurl += '/'
261
264
262 # some functions for the templater
265 # some functions for the templater
263
266
264 def header(**map):
267 def header(**map):
265 yield tmpl('header', encoding=self.encoding, **map)
268 yield tmpl('header', encoding=self.encoding, **map)
266
269
267 def footer(**map):
270 def footer(**map):
268 yield tmpl("footer", **map)
271 yield tmpl("footer", **map)
269
272
270 def motd(**map):
273 def motd(**map):
271 yield self.config("web", "motd", "")
274 yield self.config("web", "motd", "")
272
275
273 def sessionvars(**map):
276 def sessionvars(**map):
274 fields = []
277 fields = []
275 if 'style' in req.form:
278 if 'style' in req.form:
276 style = req.form['style'][0]
279 style = req.form['style'][0]
277 if style != self.config('web', 'style', ''):
280 if style != self.config('web', 'style', ''):
278 fields.append(('style', style))
281 fields.append(('style', style))
279
282
280 separator = req.url[-1] == '?' and ';' or '?'
283 separator = req.url[-1] == '?' and ';' or '?'
281 for name, value in fields:
284 for name, value in fields:
282 yield dict(name=name, value=value, separator=separator)
285 yield dict(name=name, value=value, separator=separator)
283 separator = ';'
286 separator = ';'
284
287
285 # figure out which style to use
288 # figure out which style to use
286
289
287 style = self.config("web", "style", "")
290 style = self.config("web", "style", "")
288 if 'style' in req.form:
291 if 'style' in req.form:
289 style = req.form['style'][0]
292 style = req.form['style'][0]
290 mapfile = style_map(self.templatepath, style)
293 mapfile = style_map(self.templatepath, style)
291
294
292 if not self.reponame:
295 if not self.reponame:
293 self.reponame = (self.config("web", "name")
296 self.reponame = (self.config("web", "name")
294 or req.env.get('REPO_NAME')
297 or req.env.get('REPO_NAME')
295 or req.url.strip('/') or self.repo.root)
298 or req.url.strip('/') or self.repo.root)
296
299
297 # create the templater
300 # create the templater
298
301
299 tmpl = templater.templater(mapfile, templatefilters.filters,
302 tmpl = templater.templater(mapfile, templatefilters.filters,
300 defaults={"url": req.url,
303 defaults={"url": req.url,
301 "staticurl": staticurl,
304 "staticurl": staticurl,
302 "urlbase": urlbase,
305 "urlbase": urlbase,
303 "repo": self.reponame,
306 "repo": self.reponame,
304 "header": header,
307 "header": header,
305 "footer": footer,
308 "footer": footer,
306 "motd": motd,
309 "motd": motd,
307 "sessionvars": sessionvars
310 "sessionvars": sessionvars
308 })
311 })
309 return tmpl
312 return tmpl
310
313
311 def archivelist(self, nodeid):
314 def archivelist(self, nodeid):
312 allowed = self.configlist("web", "allow_archive")
315 allowed = self.configlist("web", "allow_archive")
313 for i, spec in self.archive_specs.iteritems():
316 for i, spec in self.archive_specs.iteritems():
314 if i in allowed or self.configbool("web", "allow" + i):
317 if i in allowed or self.configbool("web", "allow" + i):
315 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
318 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
316
319
317 def listfilediffs(self, tmpl, files, changeset):
320 def listfilediffs(self, tmpl, files, changeset):
318 for f in files[:self.maxfiles]:
321 for f in files[:self.maxfiles]:
319 yield tmpl("filedifflink", node=hex(changeset), file=f)
322 yield tmpl("filedifflink", node=hex(changeset), file=f)
320 if len(files) > self.maxfiles:
323 if len(files) > self.maxfiles:
321 yield tmpl("fileellipses")
324 yield tmpl("fileellipses")
322
325
323 def siblings(self, siblings=[], hiderev=None, **args):
326 def siblings(self, siblings=[], hiderev=None, **args):
324 siblings = [s for s in siblings if s.node() != nullid]
327 siblings = [s for s in siblings if s.node() != nullid]
325 if len(siblings) == 1 and siblings[0].rev() == hiderev:
328 if len(siblings) == 1 and siblings[0].rev() == hiderev:
326 return
329 return
327 for s in siblings:
330 for s in siblings:
328 d = {'node': hex(s.node()), 'rev': s.rev()}
331 d = {'node': hex(s.node()), 'rev': s.rev()}
329 if hasattr(s, 'path'):
332 if hasattr(s, 'path'):
330 d['file'] = s.path()
333 d['file'] = s.path()
331 d.update(args)
334 d.update(args)
332 yield d
335 yield d
333
336
334 def renamelink(self, fl, node):
337 def renamelink(self, fl, node):
335 r = fl.renamed(node)
338 r = fl.renamed(node)
336 if r:
339 if r:
337 return [dict(file=r[0], node=hex(r[1]))]
340 return [dict(file=r[0], node=hex(r[1]))]
338 return []
341 return []
339
342
340 def nodetagsdict(self, node):
343 def nodetagsdict(self, node):
341 return [{"name": i} for i in self.repo.nodetags(node)]
344 return [{"name": i} for i in self.repo.nodetags(node)]
342
345
343 def nodebranchdict(self, ctx):
346 def nodebranchdict(self, ctx):
344 branches = []
347 branches = []
345 branch = ctx.branch()
348 branch = ctx.branch()
346 # If this is an empty repo, ctx.node() == nullid,
349 # If this is an empty repo, ctx.node() == nullid,
347 # ctx.branch() == 'default', but branchtags() is
350 # ctx.branch() == 'default', but branchtags() is
348 # an empty dict. Using dict.get avoids a traceback.
351 # an empty dict. Using dict.get avoids a traceback.
349 if self.repo.branchtags().get(branch) == ctx.node():
352 if self.repo.branchtags().get(branch) == ctx.node():
350 branches.append({"name": branch})
353 branches.append({"name": branch})
351 return branches
354 return branches
352
355
353 def showtag(self, tmpl, t1, node=nullid, **args):
356 def showtag(self, tmpl, t1, node=nullid, **args):
354 for t in self.repo.nodetags(node):
357 for t in self.repo.nodetags(node):
355 yield tmpl(t1, tag=t, **args)
358 yield tmpl(t1, tag=t, **args)
356
359
357 def diff(self, tmpl, node1, node2, files):
360 def diff(self, tmpl, node1, node2, files):
358 def filterfiles(filters, files):
361 def filterfiles(filters, files):
359 l = [x for x in files if x in filters]
362 l = [x for x in files if x in filters]
360
363
361 for t in filters:
364 for t in filters:
362 if t and t[-1] != os.sep:
365 if t and t[-1] != os.sep:
363 t += os.sep
366 t += os.sep
364 l += [x for x in files if x.startswith(t)]
367 l += [x for x in files if x.startswith(t)]
365 return l
368 return l
366
369
367 parity = paritygen(self.stripecount)
370 parity = paritygen(self.stripecount)
368 def diffblock(diff, f, fn):
371 def diffblock(diff, f, fn):
369 yield tmpl("diffblock",
372 yield tmpl("diffblock",
370 lines=prettyprintlines(diff),
373 lines=prettyprintlines(diff),
371 parity=parity.next(),
374 parity=parity.next(),
372 file=f,
375 file=f,
373 filenode=hex(fn or nullid))
376 filenode=hex(fn or nullid))
374
377
375 blockcount = countgen()
378 blockcount = countgen()
376 def prettyprintlines(diff):
379 def prettyprintlines(diff):
377 blockno = blockcount.next()
380 blockno = blockcount.next()
378 for lineno, l in enumerate(diff.splitlines(1)):
381 for lineno, l in enumerate(diff.splitlines(1)):
379 if blockno == 0:
382 if blockno == 0:
380 lineno = lineno + 1
383 lineno = lineno + 1
381 else:
384 else:
382 lineno = "%d.%d" % (blockno, lineno + 1)
385 lineno = "%d.%d" % (blockno, lineno + 1)
383 if l.startswith('+'):
386 if l.startswith('+'):
384 ltype = "difflineplus"
387 ltype = "difflineplus"
385 elif l.startswith('-'):
388 elif l.startswith('-'):
386 ltype = "difflineminus"
389 ltype = "difflineminus"
387 elif l.startswith('@'):
390 elif l.startswith('@'):
388 ltype = "difflineat"
391 ltype = "difflineat"
389 else:
392 else:
390 ltype = "diffline"
393 ltype = "diffline"
391 yield tmpl(ltype,
394 yield tmpl(ltype,
392 line=l,
395 line=l,
393 lineid="l%s" % lineno,
396 lineid="l%s" % lineno,
394 linenumber="% 8s" % lineno)
397 linenumber="% 8s" % lineno)
395
398
396 r = self.repo
399 r = self.repo
397 c1 = r.changectx(node1)
400 c1 = r.changectx(node1)
398 c2 = r.changectx(node2)
401 c2 = r.changectx(node2)
399 date1 = util.datestr(c1.date())
402 date1 = util.datestr(c1.date())
400 date2 = util.datestr(c2.date())
403 date2 = util.datestr(c2.date())
401
404
402 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
405 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
403 if files:
406 if files:
404 modified, added, removed = map(lambda x: filterfiles(files, x),
407 modified, added, removed = map(lambda x: filterfiles(files, x),
405 (modified, added, removed))
408 (modified, added, removed))
406
409
407 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
410 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
408 for f in modified:
411 for f in modified:
409 to = c1.filectx(f).data()
412 to = c1.filectx(f).data()
410 tn = c2.filectx(f).data()
413 tn = c2.filectx(f).data()
411 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
414 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
412 opts=diffopts), f, tn)
415 opts=diffopts), f, tn)
413 for f in added:
416 for f in added:
414 to = None
417 to = None
415 tn = c2.filectx(f).data()
418 tn = c2.filectx(f).data()
416 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
419 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
417 opts=diffopts), f, tn)
420 opts=diffopts), f, tn)
418 for f in removed:
421 for f in removed:
419 to = c1.filectx(f).data()
422 to = c1.filectx(f).data()
420 tn = None
423 tn = None
421 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
424 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
422 opts=diffopts), f, tn)
425 opts=diffopts), f, tn)
423
426
424 def changelog(self, tmpl, ctx, shortlog=False):
427 def changelog(self, tmpl, ctx, shortlog=False):
425 def changelist(limit=0,**map):
428 def changelist(limit=0,**map):
426 cl = self.repo.changelog
429 cl = self.repo.changelog
427 l = [] # build a list in forward order for efficiency
430 l = [] # build a list in forward order for efficiency
428 for i in xrange(start, end):
431 for i in xrange(start, end):
429 ctx = self.repo.changectx(i)
432 ctx = self.repo.changectx(i)
430 n = ctx.node()
433 n = ctx.node()
431
434
432 l.insert(0, {"parity": parity.next(),
435 l.insert(0, {"parity": parity.next(),
433 "author": ctx.user(),
436 "author": ctx.user(),
434 "parent": self.siblings(ctx.parents(), i - 1),
437 "parent": self.siblings(ctx.parents(), i - 1),
435 "child": self.siblings(ctx.children(), i + 1),
438 "child": self.siblings(ctx.children(), i + 1),
436 "changelogtag": self.showtag("changelogtag",n),
439 "changelogtag": self.showtag("changelogtag",n),
437 "desc": ctx.description(),
440 "desc": ctx.description(),
438 "date": ctx.date(),
441 "date": ctx.date(),
439 "files": self.listfilediffs(tmpl, ctx.files(), n),
442 "files": self.listfilediffs(tmpl, ctx.files(), n),
440 "rev": i,
443 "rev": i,
441 "node": hex(n),
444 "node": hex(n),
442 "tags": self.nodetagsdict(n),
445 "tags": self.nodetagsdict(n),
443 "branches": self.nodebranchdict(ctx)})
446 "branches": self.nodebranchdict(ctx)})
444
447
445 if limit > 0:
448 if limit > 0:
446 l = l[:limit]
449 l = l[:limit]
447
450
448 for e in l:
451 for e in l:
449 yield e
452 yield e
450
453
451 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
454 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
452 cl = self.repo.changelog
455 cl = self.repo.changelog
453 count = cl.count()
456 count = cl.count()
454 pos = ctx.rev()
457 pos = ctx.rev()
455 start = max(0, pos - maxchanges + 1)
458 start = max(0, pos - maxchanges + 1)
456 end = min(count, start + maxchanges)
459 end = min(count, start + maxchanges)
457 pos = end - 1
460 pos = end - 1
458 parity = paritygen(self.stripecount, offset=start-end)
461 parity = paritygen(self.stripecount, offset=start-end)
459
462
460 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
463 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
461
464
462 return tmpl(shortlog and 'shortlog' or 'changelog',
465 return tmpl(shortlog and 'shortlog' or 'changelog',
463 changenav=changenav,
466 changenav=changenav,
464 node=hex(cl.tip()),
467 node=hex(cl.tip()),
465 rev=pos, changesets=count,
468 rev=pos, changesets=count,
466 entries=lambda **x: changelist(limit=0,**x),
469 entries=lambda **x: changelist(limit=0,**x),
467 latestentry=lambda **x: changelist(limit=1,**x),
470 latestentry=lambda **x: changelist(limit=1,**x),
468 archives=self.archivelist("tip"))
471 archives=self.archivelist("tip"))
469
472
470 def search(self, tmpl, query):
473 def search(self, tmpl, query):
471
474
472 def changelist(**map):
475 def changelist(**map):
473 cl = self.repo.changelog
476 cl = self.repo.changelog
474 count = 0
477 count = 0
475 qw = query.lower().split()
478 qw = query.lower().split()
476
479
477 def revgen():
480 def revgen():
478 for i in xrange(cl.count() - 1, 0, -100):
481 for i in xrange(cl.count() - 1, 0, -100):
479 l = []
482 l = []
480 for j in xrange(max(0, i - 100), i + 1):
483 for j in xrange(max(0, i - 100), i + 1):
481 ctx = self.repo.changectx(j)
484 ctx = self.repo.changectx(j)
482 l.append(ctx)
485 l.append(ctx)
483 l.reverse()
486 l.reverse()
484 for e in l:
487 for e in l:
485 yield e
488 yield e
486
489
487 for ctx in revgen():
490 for ctx in revgen():
488 miss = 0
491 miss = 0
489 for q in qw:
492 for q in qw:
490 if not (q in ctx.user().lower() or
493 if not (q in ctx.user().lower() or
491 q in ctx.description().lower() or
494 q in ctx.description().lower() or
492 q in " ".join(ctx.files()).lower()):
495 q in " ".join(ctx.files()).lower()):
493 miss = 1
496 miss = 1
494 break
497 break
495 if miss:
498 if miss:
496 continue
499 continue
497
500
498 count += 1
501 count += 1
499 n = ctx.node()
502 n = ctx.node()
500
503
501 yield tmpl('searchentry',
504 yield tmpl('searchentry',
502 parity=parity.next(),
505 parity=parity.next(),
503 author=ctx.user(),
506 author=ctx.user(),
504 parent=self.siblings(ctx.parents()),
507 parent=self.siblings(ctx.parents()),
505 child=self.siblings(ctx.children()),
508 child=self.siblings(ctx.children()),
506 changelogtag=self.showtag("changelogtag",n),
509 changelogtag=self.showtag("changelogtag",n),
507 desc=ctx.description(),
510 desc=ctx.description(),
508 date=ctx.date(),
511 date=ctx.date(),
509 files=self.listfilediffs(tmpl, ctx.files(), n),
512 files=self.listfilediffs(tmpl, ctx.files(), n),
510 rev=ctx.rev(),
513 rev=ctx.rev(),
511 node=hex(n),
514 node=hex(n),
512 tags=self.nodetagsdict(n),
515 tags=self.nodetagsdict(n),
513 branches=self.nodebranchdict(ctx))
516 branches=self.nodebranchdict(ctx))
514
517
515 if count >= self.maxchanges:
518 if count >= self.maxchanges:
516 break
519 break
517
520
518 cl = self.repo.changelog
521 cl = self.repo.changelog
519 parity = paritygen(self.stripecount)
522 parity = paritygen(self.stripecount)
520
523
521 return tmpl('search',
524 return tmpl('search',
522 query=query,
525 query=query,
523 node=hex(cl.tip()),
526 node=hex(cl.tip()),
524 entries=changelist,
527 entries=changelist,
525 archives=self.archivelist("tip"))
528 archives=self.archivelist("tip"))
526
529
527 def changeset(self, tmpl, ctx):
530 def changeset(self, tmpl, ctx):
528 n = ctx.node()
531 n = ctx.node()
529 parents = ctx.parents()
532 parents = ctx.parents()
530 p1 = parents[0].node()
533 p1 = parents[0].node()
531
534
532 files = []
535 files = []
533 parity = paritygen(self.stripecount)
536 parity = paritygen(self.stripecount)
534 for f in ctx.files():
537 for f in ctx.files():
535 files.append(tmpl("filenodelink",
538 files.append(tmpl("filenodelink",
536 node=hex(n), file=f,
539 node=hex(n), file=f,
537 parity=parity.next()))
540 parity=parity.next()))
538
541
539 def diff(**map):
542 def diff(**map):
540 yield self.diff(tmpl, p1, n, None)
543 yield self.diff(tmpl, p1, n, None)
541
544
542 return tmpl('changeset',
545 return tmpl('changeset',
543 diff=diff,
546 diff=diff,
544 rev=ctx.rev(),
547 rev=ctx.rev(),
545 node=hex(n),
548 node=hex(n),
546 parent=self.siblings(parents),
549 parent=self.siblings(parents),
547 child=self.siblings(ctx.children()),
550 child=self.siblings(ctx.children()),
548 changesettag=self.showtag("changesettag",n),
551 changesettag=self.showtag("changesettag",n),
549 author=ctx.user(),
552 author=ctx.user(),
550 desc=ctx.description(),
553 desc=ctx.description(),
551 date=ctx.date(),
554 date=ctx.date(),
552 files=files,
555 files=files,
553 archives=self.archivelist(hex(n)),
556 archives=self.archivelist(hex(n)),
554 tags=self.nodetagsdict(n),
557 tags=self.nodetagsdict(n),
555 branches=self.nodebranchdict(ctx))
558 branches=self.nodebranchdict(ctx))
556
559
557 def filelog(self, tmpl, fctx):
560 def filelog(self, tmpl, fctx):
558 f = fctx.path()
561 f = fctx.path()
559 fl = fctx.filelog()
562 fl = fctx.filelog()
560 count = fl.count()
563 count = fl.count()
561 pagelen = self.maxshortchanges
564 pagelen = self.maxshortchanges
562 pos = fctx.filerev()
565 pos = fctx.filerev()
563 start = max(0, pos - pagelen + 1)
566 start = max(0, pos - pagelen + 1)
564 end = min(count, start + pagelen)
567 end = min(count, start + pagelen)
565 pos = end - 1
568 pos = end - 1
566 parity = paritygen(self.stripecount, offset=start-end)
569 parity = paritygen(self.stripecount, offset=start-end)
567
570
568 def entries(limit=0, **map):
571 def entries(limit=0, **map):
569 l = []
572 l = []
570
573
571 for i in xrange(start, end):
574 for i in xrange(start, end):
572 ctx = fctx.filectx(i)
575 ctx = fctx.filectx(i)
573 n = fl.node(i)
576 n = fl.node(i)
574
577
575 l.insert(0, {"parity": parity.next(),
578 l.insert(0, {"parity": parity.next(),
576 "filerev": i,
579 "filerev": i,
577 "file": f,
580 "file": f,
578 "node": hex(ctx.node()),
581 "node": hex(ctx.node()),
579 "author": ctx.user(),
582 "author": ctx.user(),
580 "date": ctx.date(),
583 "date": ctx.date(),
581 "rename": self.renamelink(fl, n),
584 "rename": self.renamelink(fl, n),
582 "parent": self.siblings(fctx.parents()),
585 "parent": self.siblings(fctx.parents()),
583 "child": self.siblings(fctx.children()),
586 "child": self.siblings(fctx.children()),
584 "desc": ctx.description()})
587 "desc": ctx.description()})
585
588
586 if limit > 0:
589 if limit > 0:
587 l = l[:limit]
590 l = l[:limit]
588
591
589 for e in l:
592 for e in l:
590 yield e
593 yield e
591
594
592 nodefunc = lambda x: fctx.filectx(fileid=x)
595 nodefunc = lambda x: fctx.filectx(fileid=x)
593 nav = revnavgen(pos, pagelen, count, nodefunc)
596 nav = revnavgen(pos, pagelen, count, nodefunc)
594 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
597 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
595 entries=lambda **x: entries(limit=0, **x),
598 entries=lambda **x: entries(limit=0, **x),
596 latestentry=lambda **x: entries(limit=1, **x))
599 latestentry=lambda **x: entries(limit=1, **x))
597
600
598 def filerevision(self, tmpl, fctx):
601 def filerevision(self, tmpl, fctx):
599 f = fctx.path()
602 f = fctx.path()
600 text = fctx.data()
603 text = fctx.data()
601 fl = fctx.filelog()
604 fl = fctx.filelog()
602 n = fctx.filenode()
605 n = fctx.filenode()
603 parity = paritygen(self.stripecount)
606 parity = paritygen(self.stripecount)
604
607
605 if util.binary(text):
608 if util.binary(text):
606 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
609 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
607 text = '(binary:%s)' % mt
610 text = '(binary:%s)' % mt
608
611
609 def lines():
612 def lines():
610 for lineno, t in enumerate(text.splitlines(1)):
613 for lineno, t in enumerate(text.splitlines(1)):
611 yield {"line": t,
614 yield {"line": t,
612 "lineid": "l%d" % (lineno + 1),
615 "lineid": "l%d" % (lineno + 1),
613 "linenumber": "% 6d" % (lineno + 1),
616 "linenumber": "% 6d" % (lineno + 1),
614 "parity": parity.next()}
617 "parity": parity.next()}
615
618
616 return tmpl("filerevision",
619 return tmpl("filerevision",
617 file=f,
620 file=f,
618 path=_up(f),
621 path=_up(f),
619 text=lines(),
622 text=lines(),
620 rev=fctx.rev(),
623 rev=fctx.rev(),
621 node=hex(fctx.node()),
624 node=hex(fctx.node()),
622 author=fctx.user(),
625 author=fctx.user(),
623 date=fctx.date(),
626 date=fctx.date(),
624 desc=fctx.description(),
627 desc=fctx.description(),
625 parent=self.siblings(fctx.parents()),
628 parent=self.siblings(fctx.parents()),
626 child=self.siblings(fctx.children()),
629 child=self.siblings(fctx.children()),
627 rename=self.renamelink(fl, n),
630 rename=self.renamelink(fl, n),
628 permissions=fctx.manifest().flags(f))
631 permissions=fctx.manifest().flags(f))
629
632
630 def fileannotate(self, tmpl, fctx):
633 def fileannotate(self, tmpl, fctx):
631 f = fctx.path()
634 f = fctx.path()
632 n = fctx.filenode()
635 n = fctx.filenode()
633 fl = fctx.filelog()
636 fl = fctx.filelog()
634 parity = paritygen(self.stripecount)
637 parity = paritygen(self.stripecount)
635
638
636 def annotate(**map):
639 def annotate(**map):
637 last = None
640 last = None
638 lines = enumerate(fctx.annotate(follow=True, linenumber=True))
641 lines = enumerate(fctx.annotate(follow=True, linenumber=True))
639 for lineno, ((f, targetline), l) in lines:
642 for lineno, ((f, targetline), l) in lines:
640 fnode = f.filenode()
643 fnode = f.filenode()
641 name = self.repo.ui.shortuser(f.user())
644 name = self.repo.ui.shortuser(f.user())
642
645
643 if last != fnode:
646 if last != fnode:
644 last = fnode
647 last = fnode
645
648
646 yield {"parity": parity.next(),
649 yield {"parity": parity.next(),
647 "node": hex(f.node()),
650 "node": hex(f.node()),
648 "rev": f.rev(),
651 "rev": f.rev(),
649 "author": name,
652 "author": name,
650 "file": f.path(),
653 "file": f.path(),
651 "targetline": targetline,
654 "targetline": targetline,
652 "line": l,
655 "line": l,
653 "lineid": "l%d" % (lineno + 1),
656 "lineid": "l%d" % (lineno + 1),
654 "linenumber": "% 6d" % (lineno + 1)}
657 "linenumber": "% 6d" % (lineno + 1)}
655
658
656 return tmpl("fileannotate",
659 return tmpl("fileannotate",
657 file=f,
660 file=f,
658 annotate=annotate,
661 annotate=annotate,
659 path=_up(f),
662 path=_up(f),
660 rev=fctx.rev(),
663 rev=fctx.rev(),
661 node=hex(fctx.node()),
664 node=hex(fctx.node()),
662 author=fctx.user(),
665 author=fctx.user(),
663 date=fctx.date(),
666 date=fctx.date(),
664 desc=fctx.description(),
667 desc=fctx.description(),
665 rename=self.renamelink(fl, n),
668 rename=self.renamelink(fl, n),
666 parent=self.siblings(fctx.parents()),
669 parent=self.siblings(fctx.parents()),
667 child=self.siblings(fctx.children()),
670 child=self.siblings(fctx.children()),
668 permissions=fctx.manifest().flags(f))
671 permissions=fctx.manifest().flags(f))
669
672
670 def manifest(self, tmpl, ctx, path):
673 def manifest(self, tmpl, ctx, path):
671 mf = ctx.manifest()
674 mf = ctx.manifest()
672 node = ctx.node()
675 node = ctx.node()
673
676
674 files = {}
677 files = {}
675 parity = paritygen(self.stripecount)
678 parity = paritygen(self.stripecount)
676
679
677 if path and path[-1] != "/":
680 if path and path[-1] != "/":
678 path += "/"
681 path += "/"
679 l = len(path)
682 l = len(path)
680 abspath = "/" + path
683 abspath = "/" + path
681
684
682 for f, n in mf.items():
685 for f, n in mf.items():
683 if f[:l] != path:
686 if f[:l] != path:
684 continue
687 continue
685 remain = f[l:]
688 remain = f[l:]
686 if "/" in remain:
689 if "/" in remain:
687 short = remain[:remain.index("/") + 1] # bleah
690 short = remain[:remain.index("/") + 1] # bleah
688 files[short] = (f, None)
691 files[short] = (f, None)
689 else:
692 else:
690 short = os.path.basename(remain)
693 short = os.path.basename(remain)
691 files[short] = (f, n)
694 files[short] = (f, n)
692
695
693 if not files:
696 if not files:
694 raise ErrorResponse(HTTP_NOT_FOUND, 'Path not found: ' + path)
697 raise ErrorResponse(HTTP_NOT_FOUND, 'Path not found: ' + path)
695
698
696 def filelist(**map):
699 def filelist(**map):
697 fl = files.keys()
700 fl = files.keys()
698 fl.sort()
701 fl.sort()
699 for f in fl:
702 for f in fl:
700 full, fnode = files[f]
703 full, fnode = files[f]
701 if not fnode:
704 if not fnode:
702 continue
705 continue
703
706
704 fctx = ctx.filectx(full)
707 fctx = ctx.filectx(full)
705 yield {"file": full,
708 yield {"file": full,
706 "parity": parity.next(),
709 "parity": parity.next(),
707 "basename": f,
710 "basename": f,
708 "date": fctx.changectx().date(),
711 "date": fctx.changectx().date(),
709 "size": fctx.size(),
712 "size": fctx.size(),
710 "permissions": mf.flags(full)}
713 "permissions": mf.flags(full)}
711
714
712 def dirlist(**map):
715 def dirlist(**map):
713 fl = files.keys()
716 fl = files.keys()
714 fl.sort()
717 fl.sort()
715 for f in fl:
718 for f in fl:
716 full, fnode = files[f]
719 full, fnode = files[f]
717 if fnode:
720 if fnode:
718 continue
721 continue
719
722
720 yield {"parity": parity.next(),
723 yield {"parity": parity.next(),
721 "path": "%s%s" % (abspath, f),
724 "path": "%s%s" % (abspath, f),
722 "basename": f[:-1]}
725 "basename": f[:-1]}
723
726
724 return tmpl("manifest",
727 return tmpl("manifest",
725 rev=ctx.rev(),
728 rev=ctx.rev(),
726 node=hex(node),
729 node=hex(node),
727 path=abspath,
730 path=abspath,
728 up=_up(abspath),
731 up=_up(abspath),
729 upparity=parity.next(),
732 upparity=parity.next(),
730 fentries=filelist,
733 fentries=filelist,
731 dentries=dirlist,
734 dentries=dirlist,
732 archives=self.archivelist(hex(node)),
735 archives=self.archivelist(hex(node)),
733 tags=self.nodetagsdict(node),
736 tags=self.nodetagsdict(node),
734 branches=self.nodebranchdict(ctx))
737 branches=self.nodebranchdict(ctx))
735
738
736 def tags(self, tmpl):
739 def tags(self, tmpl):
737 i = self.repo.tagslist()
740 i = self.repo.tagslist()
738 i.reverse()
741 i.reverse()
739 parity = paritygen(self.stripecount)
742 parity = paritygen(self.stripecount)
740
743
741 def entries(notip=False,limit=0, **map):
744 def entries(notip=False,limit=0, **map):
742 count = 0
745 count = 0
743 for k, n in i:
746 for k, n in i:
744 if notip and k == "tip":
747 if notip and k == "tip":
745 continue
748 continue
746 if limit > 0 and count >= limit:
749 if limit > 0 and count >= limit:
747 continue
750 continue
748 count = count + 1
751 count = count + 1
749 yield {"parity": parity.next(),
752 yield {"parity": parity.next(),
750 "tag": k,
753 "tag": k,
751 "date": self.repo.changectx(n).date(),
754 "date": self.repo.changectx(n).date(),
752 "node": hex(n)}
755 "node": hex(n)}
753
756
754 return tmpl("tags",
757 return tmpl("tags",
755 node=hex(self.repo.changelog.tip()),
758 node=hex(self.repo.changelog.tip()),
756 entries=lambda **x: entries(False,0, **x),
759 entries=lambda **x: entries(False,0, **x),
757 entriesnotip=lambda **x: entries(True,0, **x),
760 entriesnotip=lambda **x: entries(True,0, **x),
758 latestentry=lambda **x: entries(True,1, **x))
761 latestentry=lambda **x: entries(True,1, **x))
759
762
760 def summary(self, tmpl):
763 def summary(self, tmpl):
761 i = self.repo.tagslist()
764 i = self.repo.tagslist()
762 i.reverse()
765 i.reverse()
763
766
764 def tagentries(**map):
767 def tagentries(**map):
765 parity = paritygen(self.stripecount)
768 parity = paritygen(self.stripecount)
766 count = 0
769 count = 0
767 for k, n in i:
770 for k, n in i:
768 if k == "tip": # skip tip
771 if k == "tip": # skip tip
769 continue;
772 continue;
770
773
771 count += 1
774 count += 1
772 if count > 10: # limit to 10 tags
775 if count > 10: # limit to 10 tags
773 break;
776 break;
774
777
775 yield tmpl("tagentry",
778 yield tmpl("tagentry",
776 parity=parity.next(),
779 parity=parity.next(),
777 tag=k,
780 tag=k,
778 node=hex(n),
781 node=hex(n),
779 date=self.repo.changectx(n).date())
782 date=self.repo.changectx(n).date())
780
783
781
784
782 def branches(**map):
785 def branches(**map):
783 parity = paritygen(self.stripecount)
786 parity = paritygen(self.stripecount)
784
787
785 b = self.repo.branchtags()
788 b = self.repo.branchtags()
786 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
789 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
787 l.sort()
790 l.sort()
788
791
789 for r,n,t in l:
792 for r,n,t in l:
790 ctx = self.repo.changectx(n)
793 ctx = self.repo.changectx(n)
791
794
792 yield {'parity': parity.next(),
795 yield {'parity': parity.next(),
793 'branch': t,
796 'branch': t,
794 'node': hex(n),
797 'node': hex(n),
795 'date': ctx.date()}
798 'date': ctx.date()}
796
799
797 def changelist(**map):
800 def changelist(**map):
798 parity = paritygen(self.stripecount, offset=start-end)
801 parity = paritygen(self.stripecount, offset=start-end)
799 l = [] # build a list in forward order for efficiency
802 l = [] # build a list in forward order for efficiency
800 for i in xrange(start, end):
803 for i in xrange(start, end):
801 ctx = self.repo.changectx(i)
804 ctx = self.repo.changectx(i)
802 n = ctx.node()
805 n = ctx.node()
803 hn = hex(n)
806 hn = hex(n)
804
807
805 l.insert(0, tmpl(
808 l.insert(0, tmpl(
806 'shortlogentry',
809 'shortlogentry',
807 parity=parity.next(),
810 parity=parity.next(),
808 author=ctx.user(),
811 author=ctx.user(),
809 desc=ctx.description(),
812 desc=ctx.description(),
810 date=ctx.date(),
813 date=ctx.date(),
811 rev=i,
814 rev=i,
812 node=hn,
815 node=hn,
813 tags=self.nodetagsdict(n),
816 tags=self.nodetagsdict(n),
814 branches=self.nodebranchdict(ctx)))
817 branches=self.nodebranchdict(ctx)))
815
818
816 yield l
819 yield l
817
820
818 cl = self.repo.changelog
821 cl = self.repo.changelog
819 count = cl.count()
822 count = cl.count()
820 start = max(0, count - self.maxchanges)
823 start = max(0, count - self.maxchanges)
821 end = min(count, start + self.maxchanges)
824 end = min(count, start + self.maxchanges)
822
825
823 return tmpl("summary",
826 return tmpl("summary",
824 desc=self.config("web", "description", "unknown"),
827 desc=self.config("web", "description", "unknown"),
825 owner=get_contact(self.config) or "unknown",
828 owner=get_contact(self.config) or "unknown",
826 lastchange=cl.read(cl.tip())[2],
829 lastchange=cl.read(cl.tip())[2],
827 tags=tagentries,
830 tags=tagentries,
828 branches=branches,
831 branches=branches,
829 shortlog=changelist,
832 shortlog=changelist,
830 node=hex(cl.tip()),
833 node=hex(cl.tip()),
831 archives=self.archivelist("tip"))
834 archives=self.archivelist("tip"))
832
835
833 def filediff(self, tmpl, fctx):
836 def filediff(self, tmpl, fctx):
834 n = fctx.node()
837 n = fctx.node()
835 path = fctx.path()
838 path = fctx.path()
836 parents = fctx.parents()
839 parents = fctx.parents()
837 p1 = parents and parents[0].node() or nullid
840 p1 = parents and parents[0].node() or nullid
838
841
839 def diff(**map):
842 def diff(**map):
840 yield self.diff(tmpl, p1, n, [path])
843 yield self.diff(tmpl, p1, n, [path])
841
844
842 return tmpl("filediff",
845 return tmpl("filediff",
843 file=path,
846 file=path,
844 node=hex(n),
847 node=hex(n),
845 rev=fctx.rev(),
848 rev=fctx.rev(),
846 parent=self.siblings(parents),
849 parent=self.siblings(parents),
847 child=self.siblings(fctx.children()),
850 child=self.siblings(fctx.children()),
848 diff=diff)
851 diff=diff)
849
852
850 archive_specs = {
853 archive_specs = {
851 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
854 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
852 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
855 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
853 'zip': ('application/zip', 'zip', '.zip', None),
856 'zip': ('application/zip', 'zip', '.zip', None),
854 }
857 }
855
858
856 def archive(self, tmpl, req, key, type_):
859 def archive(self, tmpl, req, key, type_):
857 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
860 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
858 cnode = self.repo.lookup(key)
861 cnode = self.repo.lookup(key)
859 arch_version = key
862 arch_version = key
860 if cnode == key or key == 'tip':
863 if cnode == key or key == 'tip':
861 arch_version = short(cnode)
864 arch_version = short(cnode)
862 name = "%s-%s" % (reponame, arch_version)
865 name = "%s-%s" % (reponame, arch_version)
863 mimetype, artype, extension, encoding = self.archive_specs[type_]
866 mimetype, artype, extension, encoding = self.archive_specs[type_]
864 headers = [
867 headers = [
865 ('Content-Type', mimetype),
868 ('Content-Type', mimetype),
866 ('Content-Disposition', 'attachment; filename=%s%s' %
869 ('Content-Disposition', 'attachment; filename=%s%s' %
867 (name, extension))
870 (name, extension))
868 ]
871 ]
869 if encoding:
872 if encoding:
870 headers.append(('Content-Encoding', encoding))
873 headers.append(('Content-Encoding', encoding))
871 req.header(headers)
874 req.header(headers)
872 req.respond(HTTP_OK)
875 req.respond(HTTP_OK)
873 archival.archive(self.repo, req, cnode, artype, prefix=name)
876 archival.archive(self.repo, req, cnode, artype, prefix=name)
874
877
875 # add tags to things
878 # add tags to things
876 # tags -> list of changesets corresponding to tags
879 # tags -> list of changesets corresponding to tags
877 # find tag, changeset, file
880 # find tag, changeset, file
878
881
879 def cleanpath(self, path):
882 def cleanpath(self, path):
880 path = path.lstrip('/')
883 path = path.lstrip('/')
881 return util.canonpath(self.repo.root, '', path)
884 return util.canonpath(self.repo.root, '', path)
882
885
883 def changectx(self, req):
886 def changectx(self, req):
884 if 'node' in req.form:
887 if 'node' in req.form:
885 changeid = req.form['node'][0]
888 changeid = req.form['node'][0]
886 elif 'manifest' in req.form:
889 elif 'manifest' in req.form:
887 changeid = req.form['manifest'][0]
890 changeid = req.form['manifest'][0]
888 else:
891 else:
889 changeid = self.repo.changelog.count() - 1
892 changeid = self.repo.changelog.count() - 1
890
893
891 try:
894 try:
892 ctx = self.repo.changectx(changeid)
895 ctx = self.repo.changectx(changeid)
893 except hg.RepoError:
896 except hg.RepoError:
894 man = self.repo.manifest
897 man = self.repo.manifest
895 mn = man.lookup(changeid)
898 mn = man.lookup(changeid)
896 ctx = self.repo.changectx(man.linkrev(mn))
899 ctx = self.repo.changectx(man.linkrev(mn))
897
900
898 return ctx
901 return ctx
899
902
900 def filectx(self, req):
903 def filectx(self, req):
901 path = self.cleanpath(req.form['file'][0])
904 path = self.cleanpath(req.form['file'][0])
902 if 'node' in req.form:
905 if 'node' in req.form:
903 changeid = req.form['node'][0]
906 changeid = req.form['node'][0]
904 else:
907 else:
905 changeid = req.form['filenode'][0]
908 changeid = req.form['filenode'][0]
906 try:
909 try:
907 ctx = self.repo.changectx(changeid)
910 ctx = self.repo.changectx(changeid)
908 fctx = ctx.filectx(path)
911 fctx = ctx.filectx(path)
909 except hg.RepoError:
912 except hg.RepoError:
910 fctx = self.repo.filectx(path, fileid=changeid)
913 fctx = self.repo.filectx(path, fileid=changeid)
911
914
912 return fctx
915 return fctx
913
916
914 def check_perm(self, req, op, default):
917 def check_perm(self, req, op, default):
915 '''check permission for operation based on user auth.
918 '''check permission for operation based on user auth.
916 return true if op allowed, else false.
919 return true if op allowed, else false.
917 default is policy to use if no config given.'''
920 default is policy to use if no config given.'''
918
921
919 user = req.env.get('REMOTE_USER')
922 user = req.env.get('REMOTE_USER')
920
923
921 deny = self.configlist('web', 'deny_' + op)
924 deny = self.configlist('web', 'deny_' + op)
922 if deny and (not user or deny == ['*'] or user in deny):
925 if deny and (not user or deny == ['*'] or user in deny):
923 return False
926 return False
924
927
925 allow = self.configlist('web', 'allow_' + op)
928 allow = self.configlist('web', 'allow_' + op)
926 return (allow and (allow == ['*'] or user in allow)) or default
929 return (allow and (allow == ['*'] or user in allow)) or default
@@ -1,151 +1,136 b''
1 # repair.py - functions for repository repair for mercurial
1 # repair.py - functions for repository repair for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 # Copyright 2007 Matt Mackall
4 # Copyright 2007 Matt Mackall
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import changegroup, os
9 import changegroup, os
10 from node import *
10 from node import *
11
11
12 def _limitheads(cl, stoprev):
13 """return the list of all revs >= stoprev that have no children"""
14 seen = {}
15 heads = []
16
17 for r in xrange(cl.count() - 1, stoprev - 1, -1):
18 if r not in seen:
19 heads.append(r)
20 for p in cl.parentrevs(r):
21 seen[p] = 1
22 return heads
23
24 def _bundle(repo, bases, heads, node, suffix, extranodes=None):
12 def _bundle(repo, bases, heads, node, suffix, extranodes=None):
25 """create a bundle with the specified revisions as a backup"""
13 """create a bundle with the specified revisions as a backup"""
26 cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
14 cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
27 backupdir = repo.join("strip-backup")
15 backupdir = repo.join("strip-backup")
28 if not os.path.isdir(backupdir):
16 if not os.path.isdir(backupdir):
29 os.mkdir(backupdir)
17 os.mkdir(backupdir)
30 name = os.path.join(backupdir, "%s-%s" % (short(node), suffix))
18 name = os.path.join(backupdir, "%s-%s" % (short(node), suffix))
31 repo.ui.warn("saving bundle to %s\n" % name)
19 repo.ui.warn("saving bundle to %s\n" % name)
32 return changegroup.writebundle(cg, name, "HG10BZ")
20 return changegroup.writebundle(cg, name, "HG10BZ")
33
21
34 def _collectfiles(repo, striprev):
22 def _collectfiles(repo, striprev):
35 """find out the filelogs affected by the strip"""
23 """find out the filelogs affected by the strip"""
36 files = {}
24 files = {}
37
25
38 for x in xrange(striprev, repo.changelog.count()):
26 for x in xrange(striprev, repo.changelog.count()):
39 for name in repo.changectx(x).files():
27 for name in repo.changectx(x).files():
40 if name in files:
28 if name in files:
41 continue
29 continue
42 files[name] = 1
30 files[name] = 1
43
31
44 files = files.keys()
32 files = files.keys()
45 files.sort()
33 files.sort()
46 return files
34 return files
47
35
48 def _collectextranodes(repo, files, link):
36 def _collectextranodes(repo, files, link):
49 """return the nodes that have to be saved before the strip"""
37 """return the nodes that have to be saved before the strip"""
50 def collectone(revlog):
38 def collectone(revlog):
51 extra = []
39 extra = []
52 startrev = count = revlog.count()
40 startrev = count = revlog.count()
53 # find the truncation point of the revlog
41 # find the truncation point of the revlog
54 for i in xrange(0, count):
42 for i in xrange(0, count):
55 node = revlog.node(i)
43 node = revlog.node(i)
56 lrev = revlog.linkrev(node)
44 lrev = revlog.linkrev(node)
57 if lrev >= link:
45 if lrev >= link:
58 startrev = i + 1
46 startrev = i + 1
59 break
47 break
60
48
61 # see if any revision after that point has a linkrev less than link
49 # see if any revision after that point has a linkrev less than link
62 # (we have to manually save these guys)
50 # (we have to manually save these guys)
63 for i in xrange(startrev, count):
51 for i in xrange(startrev, count):
64 node = revlog.node(i)
52 node = revlog.node(i)
65 lrev = revlog.linkrev(node)
53 lrev = revlog.linkrev(node)
66 if lrev < link:
54 if lrev < link:
67 extra.append((node, cl.node(lrev)))
55 extra.append((node, cl.node(lrev)))
68
56
69 return extra
57 return extra
70
58
71 extranodes = {}
59 extranodes = {}
72 cl = repo.changelog
60 cl = repo.changelog
73 extra = collectone(repo.manifest)
61 extra = collectone(repo.manifest)
74 if extra:
62 if extra:
75 extranodes[1] = extra
63 extranodes[1] = extra
76 for fname in files:
64 for fname in files:
77 f = repo.file(fname)
65 f = repo.file(fname)
78 extra = collectone(f)
66 extra = collectone(f)
79 if extra:
67 if extra:
80 extranodes[fname] = extra
68 extranodes[fname] = extra
81
69
82 return extranodes
70 return extranodes
83
71
84 def strip(ui, repo, node, backup="all"):
72 def strip(ui, repo, node, backup="all"):
85 cl = repo.changelog
73 cl = repo.changelog
86 # TODO delete the undo files, and handle undo of merge sets
74 # TODO delete the undo files, and handle undo of merge sets
87 pp = cl.parents(node)
75 pp = cl.parents(node)
88 striprev = cl.rev(node)
76 striprev = cl.rev(node)
89
77
90 # save is a list of all the branches we are truncating away
78 # Some revisions with rev > striprev may not be descendants of striprev.
91 # that we actually want to keep. changegroup will be used
79 # We have to find these revisions and put them in a bundle, so that
92 # to preserve them and add them back after the truncate
80 # we can restore them after the truncations.
93 saveheads = []
81 # To create the bundle we use repo.changegroupsubset which requires
94 savebases = {}
82 # the list of heads and bases of the set of interesting revisions.
95
83 # (head = revision in the set that has no descendant in the set;
96 heads = [cl.node(r) for r in _limitheads(cl, striprev)]
84 # base = revision in the set that has no ancestor in the set)
97 seen = {}
85 tostrip = {striprev: 1}
86 saveheads = {}
87 savebases = []
88 for r in xrange(striprev + 1, cl.count()):
89 parents = cl.parentrevs(r)
90 if parents[0] in tostrip or parents[1] in tostrip:
91 # r is a descendant of striprev
92 tostrip[r] = 1
93 # if this is a merge and one of the parents does not descend
94 # from striprev, mark that parent as a savehead.
95 if parents[1] != nullrev:
96 for p in parents:
97 if p not in tostrip and p > striprev:
98 saveheads[p] = 1
99 else:
100 # if no parents of this revision will be stripped, mark it as
101 # a savebase
102 if parents[0] < striprev and parents[1] < striprev:
103 savebases.append(cl.node(r))
98
104
99 # search through all the heads, finding those where the revision
105 for p in parents:
100 # we want to strip away is an ancestor. Also look for merges
106 if p in saveheads:
101 # that might be turned into new heads by the strip.
107 del saveheads[p]
102 while heads:
108 saveheads[r] = 1
103 h = heads.pop()
104 n = h
105 while True:
106 seen[n] = 1
107 pp = cl.parents(n)
108 if pp[1] != nullid:
109 for p in pp:
110 if cl.rev(p) > striprev and p not in seen:
111 heads.append(p)
112 if pp[0] == nullid:
113 break
114 if cl.rev(pp[0]) < striprev:
115 break
116 n = pp[0]
117 if n == node:
118 break
119 r = cl.reachable(h, node)
120 if node not in r:
121 saveheads.append(h)
122 for x in r:
123 if cl.rev(x) > striprev:
124 savebases[x] = 1
125
109
110 saveheads = [cl.node(r) for r in saveheads]
126 files = _collectfiles(repo, striprev)
111 files = _collectfiles(repo, striprev)
127
112
128 extranodes = _collectextranodes(repo, files, striprev)
113 extranodes = _collectextranodes(repo, files, striprev)
129
114
130 # create a changegroup for all the branches we need to keep
115 # create a changegroup for all the branches we need to keep
131 if backup == "all":
116 if backup == "all":
132 _bundle(repo, [node], cl.heads(), node, 'backup')
117 _bundle(repo, [node], cl.heads(), node, 'backup')
133 if saveheads or extranodes:
118 if saveheads or extranodes:
134 chgrpfile = _bundle(repo, savebases.keys(), saveheads, node, 'temp',
119 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
135 extranodes)
120 extranodes)
136
121
137 cl.strip(striprev)
122 cl.strip(striprev)
138 repo.manifest.strip(striprev)
123 repo.manifest.strip(striprev)
139 for name in files:
124 for name in files:
140 f = repo.file(name)
125 f = repo.file(name)
141 f.strip(striprev)
126 f.strip(striprev)
142
127
143 if saveheads or extranodes:
128 if saveheads or extranodes:
144 ui.status("adding branch\n")
129 ui.status("adding branch\n")
145 f = open(chgrpfile, "rb")
130 f = open(chgrpfile, "rb")
146 gen = changegroup.readbundle(f, chgrpfile)
131 gen = changegroup.readbundle(f, chgrpfile)
147 repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
132 repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
148 f.close()
133 f.close()
149 if backup != "strip":
134 if backup != "strip":
150 os.unlink(chgrpfile)
135 os.unlink(chgrpfile)
151
136
@@ -1,1319 +1,1319 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
7 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import _
14 from i18n import _
15 import binascii, changegroup, errno, ancestor, mdiff, os
15 import binascii, changegroup, errno, ancestor, mdiff, os
16 import sha, struct, util, zlib
16 import sha, struct, util, zlib
17
17
18 _pack = struct.pack
18 _pack = struct.pack
19 _unpack = struct.unpack
19 _unpack = struct.unpack
20 _compress = zlib.compress
20 _compress = zlib.compress
21 _decompress = zlib.decompress
21 _decompress = zlib.decompress
22 _sha = sha.new
22 _sha = sha.new
23
23
24 # revlog flags
24 # revlog flags
25 REVLOGV0 = 0
25 REVLOGV0 = 0
26 REVLOGNG = 1
26 REVLOGNG = 1
27 REVLOGNGINLINEDATA = (1 << 16)
27 REVLOGNGINLINEDATA = (1 << 16)
28 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
28 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
29 REVLOG_DEFAULT_FORMAT = REVLOGNG
29 REVLOG_DEFAULT_FORMAT = REVLOGNG
30 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
30 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
31
31
32 class RevlogError(Exception):
32 class RevlogError(Exception):
33 pass
33 pass
34
34
35 class LookupError(RevlogError):
35 class LookupError(RevlogError):
36 def __init__(self, name, message=None):
36 def __init__(self, name, message=None):
37 if message is None:
37 if message is None:
38 message = _('not found: %s') % name
38 message = _('not found: %s') % name
39 RevlogError.__init__(self, message)
39 RevlogError.__init__(self, message)
40 self.name = name
40 self.name = name
41
41
42 def getoffset(q):
42 def getoffset(q):
43 return int(q >> 16)
43 return int(q >> 16)
44
44
45 def gettype(q):
45 def gettype(q):
46 return int(q & 0xFFFF)
46 return int(q & 0xFFFF)
47
47
48 def offset_type(offset, type):
48 def offset_type(offset, type):
49 return long(long(offset) << 16 | type)
49 return long(long(offset) << 16 | type)
50
50
51 def hash(text, p1, p2):
51 def hash(text, p1, p2):
52 """generate a hash from the given text and its parent hashes
52 """generate a hash from the given text and its parent hashes
53
53
54 This hash combines both the current file contents and its history
54 This hash combines both the current file contents and its history
55 in a manner that makes it easy to distinguish nodes with the same
55 in a manner that makes it easy to distinguish nodes with the same
56 content in the revision graph.
56 content in the revision graph.
57 """
57 """
58 l = [p1, p2]
58 l = [p1, p2]
59 l.sort()
59 l.sort()
60 s = _sha(l[0])
60 s = _sha(l[0])
61 s.update(l[1])
61 s.update(l[1])
62 s.update(text)
62 s.update(text)
63 return s.digest()
63 return s.digest()
64
64
65 def compress(text):
65 def compress(text):
66 """ generate a possibly-compressed representation of text """
66 """ generate a possibly-compressed representation of text """
67 if not text:
67 if not text:
68 return ("", text)
68 return ("", text)
69 l = len(text)
69 l = len(text)
70 bin = None
70 bin = None
71 if l < 44:
71 if l < 44:
72 pass
72 pass
73 elif l > 1000000:
73 elif l > 1000000:
74 # zlib makes an internal copy, thus doubling memory usage for
74 # zlib makes an internal copy, thus doubling memory usage for
75 # large files, so lets do this in pieces
75 # large files, so lets do this in pieces
76 z = zlib.compressobj()
76 z = zlib.compressobj()
77 p = []
77 p = []
78 pos = 0
78 pos = 0
79 while pos < l:
79 while pos < l:
80 pos2 = pos + 2**20
80 pos2 = pos + 2**20
81 p.append(z.compress(text[pos:pos2]))
81 p.append(z.compress(text[pos:pos2]))
82 pos = pos2
82 pos = pos2
83 p.append(z.flush())
83 p.append(z.flush())
84 if sum(map(len, p)) < l:
84 if sum(map(len, p)) < l:
85 bin = "".join(p)
85 bin = "".join(p)
86 else:
86 else:
87 bin = _compress(text)
87 bin = _compress(text)
88 if bin is None or len(bin) > l:
88 if bin is None or len(bin) > l:
89 if text[0] == '\0':
89 if text[0] == '\0':
90 return ("", text)
90 return ("", text)
91 return ('u', text)
91 return ('u', text)
92 return ("", bin)
92 return ("", bin)
93
93
94 def decompress(bin):
94 def decompress(bin):
95 """ decompress the given input """
95 """ decompress the given input """
96 if not bin:
96 if not bin:
97 return bin
97 return bin
98 t = bin[0]
98 t = bin[0]
99 if t == '\0':
99 if t == '\0':
100 return bin
100 return bin
101 if t == 'x':
101 if t == 'x':
102 return _decompress(bin)
102 return _decompress(bin)
103 if t == 'u':
103 if t == 'u':
104 return bin[1:]
104 return bin[1:]
105 raise RevlogError(_("unknown compression type %r") % t)
105 raise RevlogError(_("unknown compression type %r") % t)
106
106
107 class lazyparser(object):
107 class lazyparser(object):
108 """
108 """
109 this class avoids the need to parse the entirety of large indices
109 this class avoids the need to parse the entirety of large indices
110 """
110 """
111
111
112 # lazyparser is not safe to use on windows if win32 extensions not
112 # lazyparser is not safe to use on windows if win32 extensions not
113 # available. it keeps file handle open, which make it not possible
113 # available. it keeps file handle open, which make it not possible
114 # to break hardlinks on local cloned repos.
114 # to break hardlinks on local cloned repos.
115
115
116 def __init__(self, dataf, size):
116 def __init__(self, dataf, size):
117 self.dataf = dataf
117 self.dataf = dataf
118 self.s = struct.calcsize(indexformatng)
118 self.s = struct.calcsize(indexformatng)
119 self.datasize = size
119 self.datasize = size
120 self.l = size/self.s
120 self.l = size/self.s
121 self.index = [None] * self.l
121 self.index = [None] * self.l
122 self.map = {nullid: nullrev}
122 self.map = {nullid: nullrev}
123 self.allmap = 0
123 self.allmap = 0
124 self.all = 0
124 self.all = 0
125 self.mapfind_count = 0
125 self.mapfind_count = 0
126
126
127 def loadmap(self):
127 def loadmap(self):
128 """
128 """
129 during a commit, we need to make sure the rev being added is
129 during a commit, we need to make sure the rev being added is
130 not a duplicate. This requires loading the entire index,
130 not a duplicate. This requires loading the entire index,
131 which is fairly slow. loadmap can load up just the node map,
131 which is fairly slow. loadmap can load up just the node map,
132 which takes much less time.
132 which takes much less time.
133 """
133 """
134 if self.allmap:
134 if self.allmap:
135 return
135 return
136 end = self.datasize
136 end = self.datasize
137 self.allmap = 1
137 self.allmap = 1
138 cur = 0
138 cur = 0
139 count = 0
139 count = 0
140 blocksize = self.s * 256
140 blocksize = self.s * 256
141 self.dataf.seek(0)
141 self.dataf.seek(0)
142 while cur < end:
142 while cur < end:
143 data = self.dataf.read(blocksize)
143 data = self.dataf.read(blocksize)
144 off = 0
144 off = 0
145 for x in xrange(256):
145 for x in xrange(256):
146 n = data[off + ngshaoffset:off + ngshaoffset + 20]
146 n = data[off + ngshaoffset:off + ngshaoffset + 20]
147 self.map[n] = count
147 self.map[n] = count
148 count += 1
148 count += 1
149 if count >= self.l:
149 if count >= self.l:
150 break
150 break
151 off += self.s
151 off += self.s
152 cur += blocksize
152 cur += blocksize
153
153
154 def loadblock(self, blockstart, blocksize, data=None):
154 def loadblock(self, blockstart, blocksize, data=None):
155 if self.all:
155 if self.all:
156 return
156 return
157 if data is None:
157 if data is None:
158 self.dataf.seek(blockstart)
158 self.dataf.seek(blockstart)
159 if blockstart + blocksize > self.datasize:
159 if blockstart + blocksize > self.datasize:
160 # the revlog may have grown since we've started running,
160 # the revlog may have grown since we've started running,
161 # but we don't have space in self.index for more entries.
161 # but we don't have space in self.index for more entries.
162 # limit blocksize so that we don't get too much data.
162 # limit blocksize so that we don't get too much data.
163 blocksize = max(self.datasize - blockstart, 0)
163 blocksize = max(self.datasize - blockstart, 0)
164 data = self.dataf.read(blocksize)
164 data = self.dataf.read(blocksize)
165 lend = len(data) / self.s
165 lend = len(data) / self.s
166 i = blockstart / self.s
166 i = blockstart / self.s
167 off = 0
167 off = 0
168 # lazyindex supports __delitem__
168 # lazyindex supports __delitem__
169 if lend > len(self.index) - i:
169 if lend > len(self.index) - i:
170 lend = len(self.index) - i
170 lend = len(self.index) - i
171 for x in xrange(lend):
171 for x in xrange(lend):
172 if self.index[i + x] == None:
172 if self.index[i + x] == None:
173 b = data[off : off + self.s]
173 b = data[off : off + self.s]
174 self.index[i + x] = b
174 self.index[i + x] = b
175 n = b[ngshaoffset:ngshaoffset + 20]
175 n = b[ngshaoffset:ngshaoffset + 20]
176 self.map[n] = i + x
176 self.map[n] = i + x
177 off += self.s
177 off += self.s
178
178
179 def findnode(self, node):
179 def findnode(self, node):
180 """search backwards through the index file for a specific node"""
180 """search backwards through the index file for a specific node"""
181 if self.allmap:
181 if self.allmap:
182 return None
182 return None
183
183
184 # hg log will cause many many searches for the manifest
184 # hg log will cause many many searches for the manifest
185 # nodes. After we get called a few times, just load the whole
185 # nodes. After we get called a few times, just load the whole
186 # thing.
186 # thing.
187 if self.mapfind_count > 8:
187 if self.mapfind_count > 8:
188 self.loadmap()
188 self.loadmap()
189 if node in self.map:
189 if node in self.map:
190 return node
190 return node
191 return None
191 return None
192 self.mapfind_count += 1
192 self.mapfind_count += 1
193 last = self.l - 1
193 last = self.l - 1
194 while self.index[last] != None:
194 while self.index[last] != None:
195 if last == 0:
195 if last == 0:
196 self.all = 1
196 self.all = 1
197 self.allmap = 1
197 self.allmap = 1
198 return None
198 return None
199 last -= 1
199 last -= 1
200 end = (last + 1) * self.s
200 end = (last + 1) * self.s
201 blocksize = self.s * 256
201 blocksize = self.s * 256
202 while end >= 0:
202 while end >= 0:
203 start = max(end - blocksize, 0)
203 start = max(end - blocksize, 0)
204 self.dataf.seek(start)
204 self.dataf.seek(start)
205 data = self.dataf.read(end - start)
205 data = self.dataf.read(end - start)
206 findend = end - start
206 findend = end - start
207 while True:
207 while True:
208 # we're searching backwards, so we have to make sure
208 # we're searching backwards, so we have to make sure
209 # we don't find a changeset where this node is a parent
209 # we don't find a changeset where this node is a parent
210 off = data.find(node, 0, findend)
210 off = data.find(node, 0, findend)
211 findend = off
211 findend = off
212 if off >= 0:
212 if off >= 0:
213 i = off / self.s
213 i = off / self.s
214 off = i * self.s
214 off = i * self.s
215 n = data[off + ngshaoffset:off + ngshaoffset + 20]
215 n = data[off + ngshaoffset:off + ngshaoffset + 20]
216 if n == node:
216 if n == node:
217 self.map[n] = i + start / self.s
217 self.map[n] = i + start / self.s
218 return node
218 return node
219 else:
219 else:
220 break
220 break
221 end -= blocksize
221 end -= blocksize
222 return None
222 return None
223
223
224 def loadindex(self, i=None, end=None):
224 def loadindex(self, i=None, end=None):
225 if self.all:
225 if self.all:
226 return
226 return
227 all = False
227 all = False
228 if i == None:
228 if i == None:
229 blockstart = 0
229 blockstart = 0
230 blocksize = (65536 / self.s) * self.s
230 blocksize = (65536 / self.s) * self.s
231 end = self.datasize
231 end = self.datasize
232 all = True
232 all = True
233 else:
233 else:
234 if end:
234 if end:
235 blockstart = i * self.s
235 blockstart = i * self.s
236 end = end * self.s
236 end = end * self.s
237 blocksize = end - blockstart
237 blocksize = end - blockstart
238 else:
238 else:
239 blockstart = (i & ~1023) * self.s
239 blockstart = (i & ~1023) * self.s
240 blocksize = self.s * 1024
240 blocksize = self.s * 1024
241 end = blockstart + blocksize
241 end = blockstart + blocksize
242 while blockstart < end:
242 while blockstart < end:
243 self.loadblock(blockstart, blocksize)
243 self.loadblock(blockstart, blocksize)
244 blockstart += blocksize
244 blockstart += blocksize
245 if all:
245 if all:
246 self.all = True
246 self.all = True
247
247
248 class lazyindex(object):
248 class lazyindex(object):
249 """a lazy version of the index array"""
249 """a lazy version of the index array"""
250 def __init__(self, parser):
250 def __init__(self, parser):
251 self.p = parser
251 self.p = parser
252 def __len__(self):
252 def __len__(self):
253 return len(self.p.index)
253 return len(self.p.index)
254 def load(self, pos):
254 def load(self, pos):
255 if pos < 0:
255 if pos < 0:
256 pos += len(self.p.index)
256 pos += len(self.p.index)
257 self.p.loadindex(pos)
257 self.p.loadindex(pos)
258 return self.p.index[pos]
258 return self.p.index[pos]
259 def __getitem__(self, pos):
259 def __getitem__(self, pos):
260 return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
260 return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
261 def __setitem__(self, pos, item):
261 def __setitem__(self, pos, item):
262 self.p.index[pos] = _pack(indexformatng, *item)
262 self.p.index[pos] = _pack(indexformatng, *item)
263 def __delitem__(self, pos):
263 def __delitem__(self, pos):
264 del self.p.index[pos]
264 del self.p.index[pos]
265 def insert(self, pos, e):
265 def insert(self, pos, e):
266 self.p.index.insert(pos, _pack(indexformatng, *e))
266 self.p.index.insert(pos, _pack(indexformatng, *e))
267 def append(self, e):
267 def append(self, e):
268 self.p.index.append(_pack(indexformatng, *e))
268 self.p.index.append(_pack(indexformatng, *e))
269
269
270 class lazymap(object):
270 class lazymap(object):
271 """a lazy version of the node map"""
271 """a lazy version of the node map"""
272 def __init__(self, parser):
272 def __init__(self, parser):
273 self.p = parser
273 self.p = parser
274 def load(self, key):
274 def load(self, key):
275 n = self.p.findnode(key)
275 n = self.p.findnode(key)
276 if n == None:
276 if n == None:
277 raise KeyError(key)
277 raise KeyError(key)
278 def __contains__(self, key):
278 def __contains__(self, key):
279 if key in self.p.map:
279 if key in self.p.map:
280 return True
280 return True
281 self.p.loadmap()
281 self.p.loadmap()
282 return key in self.p.map
282 return key in self.p.map
283 def __iter__(self):
283 def __iter__(self):
284 yield nullid
284 yield nullid
285 for i in xrange(self.p.l):
285 for i in xrange(self.p.l):
286 ret = self.p.index[i]
286 ret = self.p.index[i]
287 if not ret:
287 if not ret:
288 self.p.loadindex(i)
288 self.p.loadindex(i)
289 ret = self.p.index[i]
289 ret = self.p.index[i]
290 if isinstance(ret, str):
290 if isinstance(ret, str):
291 ret = _unpack(indexformatng, ret)
291 ret = _unpack(indexformatng, ret)
292 yield ret[7]
292 yield ret[7]
293 def __getitem__(self, key):
293 def __getitem__(self, key):
294 try:
294 try:
295 return self.p.map[key]
295 return self.p.map[key]
296 except KeyError:
296 except KeyError:
297 try:
297 try:
298 self.load(key)
298 self.load(key)
299 return self.p.map[key]
299 return self.p.map[key]
300 except KeyError:
300 except KeyError:
301 raise KeyError("node " + hex(key))
301 raise KeyError("node " + hex(key))
302 def __setitem__(self, key, val):
302 def __setitem__(self, key, val):
303 self.p.map[key] = val
303 self.p.map[key] = val
304 def __delitem__(self, key):
304 def __delitem__(self, key):
305 del self.p.map[key]
305 del self.p.map[key]
306
306
307 indexformatv0 = ">4l20s20s20s"
307 indexformatv0 = ">4l20s20s20s"
308 v0shaoffset = 56
308 v0shaoffset = 56
309
309
310 class revlogoldio(object):
310 class revlogoldio(object):
311 def __init__(self):
311 def __init__(self):
312 self.size = struct.calcsize(indexformatv0)
312 self.size = struct.calcsize(indexformatv0)
313
313
314 def parseindex(self, fp, inline):
314 def parseindex(self, fp, inline):
315 s = self.size
315 s = self.size
316 index = []
316 index = []
317 nodemap = {nullid: nullrev}
317 nodemap = {nullid: nullrev}
318 n = off = 0
318 n = off = 0
319 data = fp.read()
319 data = fp.read()
320 l = len(data)
320 l = len(data)
321 while off + s <= l:
321 while off + s <= l:
322 cur = data[off:off + s]
322 cur = data[off:off + s]
323 off += s
323 off += s
324 e = _unpack(indexformatv0, cur)
324 e = _unpack(indexformatv0, cur)
325 # transform to revlogv1 format
325 # transform to revlogv1 format
326 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
326 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
327 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
327 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
328 index.append(e2)
328 index.append(e2)
329 nodemap[e[6]] = n
329 nodemap[e[6]] = n
330 n += 1
330 n += 1
331
331
332 return index, nodemap, None
332 return index, nodemap, None
333
333
334 def packentry(self, entry, node, version, rev):
334 def packentry(self, entry, node, version, rev):
335 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
335 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
336 node(entry[5]), node(entry[6]), entry[7])
336 node(entry[5]), node(entry[6]), entry[7])
337 return _pack(indexformatv0, *e2)
337 return _pack(indexformatv0, *e2)
338
338
339 # index ng:
339 # index ng:
340 # 6 bytes offset
340 # 6 bytes offset
341 # 2 bytes flags
341 # 2 bytes flags
342 # 4 bytes compressed length
342 # 4 bytes compressed length
343 # 4 bytes uncompressed length
343 # 4 bytes uncompressed length
344 # 4 bytes: base rev
344 # 4 bytes: base rev
345 # 4 bytes link rev
345 # 4 bytes link rev
346 # 4 bytes parent 1 rev
346 # 4 bytes parent 1 rev
347 # 4 bytes parent 2 rev
347 # 4 bytes parent 2 rev
348 # 32 bytes: nodeid
348 # 32 bytes: nodeid
349 indexformatng = ">Qiiiiii20s12x"
349 indexformatng = ">Qiiiiii20s12x"
350 ngshaoffset = 32
350 ngshaoffset = 32
351 versionformat = ">I"
351 versionformat = ">I"
352
352
353 class revlogio(object):
353 class revlogio(object):
354 def __init__(self):
354 def __init__(self):
355 self.size = struct.calcsize(indexformatng)
355 self.size = struct.calcsize(indexformatng)
356
356
357 def parseindex(self, fp, inline):
357 def parseindex(self, fp, inline):
358 try:
358 try:
359 size = util.fstat(fp).st_size
359 size = util.fstat(fp).st_size
360 except AttributeError:
360 except AttributeError:
361 size = 0
361 size = 0
362
362
363 if util.openhardlinks() and not inline and size > 1000000:
363 if util.openhardlinks() and not inline and size > 1000000:
364 # big index, let's parse it on demand
364 # big index, let's parse it on demand
365 parser = lazyparser(fp, size)
365 parser = lazyparser(fp, size)
366 index = lazyindex(parser)
366 index = lazyindex(parser)
367 nodemap = lazymap(parser)
367 nodemap = lazymap(parser)
368 e = list(index[0])
368 e = list(index[0])
369 type = gettype(e[0])
369 type = gettype(e[0])
370 e[0] = offset_type(0, type)
370 e[0] = offset_type(0, type)
371 index[0] = e
371 index[0] = e
372 return index, nodemap, None
372 return index, nodemap, None
373
373
374 s = self.size
374 s = self.size
375 cache = None
375 cache = None
376 index = []
376 index = []
377 nodemap = {nullid: nullrev}
377 nodemap = {nullid: nullrev}
378 n = off = 0
378 n = off = 0
379 # if we're not using lazymap, always read the whole index
379 # if we're not using lazymap, always read the whole index
380 data = fp.read()
380 data = fp.read()
381 l = len(data) - s
381 l = len(data) - s
382 append = index.append
382 append = index.append
383 if inline:
383 if inline:
384 cache = (0, data)
384 cache = (0, data)
385 while off <= l:
385 while off <= l:
386 e = _unpack(indexformatng, data[off:off + s])
386 e = _unpack(indexformatng, data[off:off + s])
387 nodemap[e[7]] = n
387 nodemap[e[7]] = n
388 append(e)
388 append(e)
389 n += 1
389 n += 1
390 if e[1] < 0:
390 if e[1] < 0:
391 break
391 break
392 off += e[1] + s
392 off += e[1] + s
393 else:
393 else:
394 while off <= l:
394 while off <= l:
395 e = _unpack(indexformatng, data[off:off + s])
395 e = _unpack(indexformatng, data[off:off + s])
396 nodemap[e[7]] = n
396 nodemap[e[7]] = n
397 append(e)
397 append(e)
398 n += 1
398 n += 1
399 off += s
399 off += s
400
400
401 e = list(index[0])
401 e = list(index[0])
402 type = gettype(e[0])
402 type = gettype(e[0])
403 e[0] = offset_type(0, type)
403 e[0] = offset_type(0, type)
404 index[0] = e
404 index[0] = e
405
405
406 return index, nodemap, cache
406 return index, nodemap, cache
407
407
408 def packentry(self, entry, node, version, rev):
408 def packentry(self, entry, node, version, rev):
409 p = _pack(indexformatng, *entry)
409 p = _pack(indexformatng, *entry)
410 if rev == 0:
410 if rev == 0:
411 p = _pack(versionformat, version) + p[4:]
411 p = _pack(versionformat, version) + p[4:]
412 return p
412 return p
413
413
414 class revlog(object):
414 class revlog(object):
415 """
415 """
416 the underlying revision storage object
416 the underlying revision storage object
417
417
418 A revlog consists of two parts, an index and the revision data.
418 A revlog consists of two parts, an index and the revision data.
419
419
420 The index is a file with a fixed record size containing
420 The index is a file with a fixed record size containing
421 information on each revision, includings its nodeid (hash), the
421 information on each revision, includings its nodeid (hash), the
422 nodeids of its parents, the position and offset of its data within
422 nodeids of its parents, the position and offset of its data within
423 the data file, and the revision it's based on. Finally, each entry
423 the data file, and the revision it's based on. Finally, each entry
424 contains a linkrev entry that can serve as a pointer to external
424 contains a linkrev entry that can serve as a pointer to external
425 data.
425 data.
426
426
427 The revision data itself is a linear collection of data chunks.
427 The revision data itself is a linear collection of data chunks.
428 Each chunk represents a revision and is usually represented as a
428 Each chunk represents a revision and is usually represented as a
429 delta against the previous chunk. To bound lookup time, runs of
429 delta against the previous chunk. To bound lookup time, runs of
430 deltas are limited to about 2 times the length of the original
430 deltas are limited to about 2 times the length of the original
431 version data. This makes retrieval of a version proportional to
431 version data. This makes retrieval of a version proportional to
432 its size, or O(1) relative to the number of revisions.
432 its size, or O(1) relative to the number of revisions.
433
433
434 Both pieces of the revlog are written to in an append-only
434 Both pieces of the revlog are written to in an append-only
435 fashion, which means we never need to rewrite a file to insert or
435 fashion, which means we never need to rewrite a file to insert or
436 remove data, and can use some simple techniques to avoid the need
436 remove data, and can use some simple techniques to avoid the need
437 for locking while reading.
437 for locking while reading.
438 """
438 """
439 def __init__(self, opener, indexfile):
439 def __init__(self, opener, indexfile):
440 """
440 """
441 create a revlog object
441 create a revlog object
442
442
443 opener is a function that abstracts the file opening operation
443 opener is a function that abstracts the file opening operation
444 and can be used to implement COW semantics or the like.
444 and can be used to implement COW semantics or the like.
445 """
445 """
446 self.indexfile = indexfile
446 self.indexfile = indexfile
447 self.datafile = indexfile[:-2] + ".d"
447 self.datafile = indexfile[:-2] + ".d"
448 self.opener = opener
448 self.opener = opener
449 self._cache = None
449 self._cache = None
450 self._chunkcache = None
450 self._chunkcache = None
451 self.nodemap = {nullid: nullrev}
451 self.nodemap = {nullid: nullrev}
452 self.index = []
452 self.index = []
453
453
454 v = REVLOG_DEFAULT_VERSION
454 v = REVLOG_DEFAULT_VERSION
455 if hasattr(opener, "defversion"):
455 if hasattr(opener, "defversion"):
456 v = opener.defversion
456 v = opener.defversion
457 if v & REVLOGNG:
457 if v & REVLOGNG:
458 v |= REVLOGNGINLINEDATA
458 v |= REVLOGNGINLINEDATA
459
459
460 i = ""
460 i = ""
461 try:
461 try:
462 f = self.opener(self.indexfile)
462 f = self.opener(self.indexfile)
463 i = f.read(4)
463 i = f.read(4)
464 f.seek(0)
464 f.seek(0)
465 if len(i) > 0:
465 if len(i) > 0:
466 v = struct.unpack(versionformat, i)[0]
466 v = struct.unpack(versionformat, i)[0]
467 except IOError, inst:
467 except IOError, inst:
468 if inst.errno != errno.ENOENT:
468 if inst.errno != errno.ENOENT:
469 raise
469 raise
470
470
471 self.version = v
471 self.version = v
472 self._inline = v & REVLOGNGINLINEDATA
472 self._inline = v & REVLOGNGINLINEDATA
473 flags = v & ~0xFFFF
473 flags = v & ~0xFFFF
474 fmt = v & 0xFFFF
474 fmt = v & 0xFFFF
475 if fmt == REVLOGV0 and flags:
475 if fmt == REVLOGV0 and flags:
476 raise RevlogError(_("index %s unknown flags %#04x for format v0")
476 raise RevlogError(_("index %s unknown flags %#04x for format v0")
477 % (self.indexfile, flags >> 16))
477 % (self.indexfile, flags >> 16))
478 elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
478 elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
479 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
479 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
480 % (self.indexfile, flags >> 16))
480 % (self.indexfile, flags >> 16))
481 elif fmt > REVLOGNG:
481 elif fmt > REVLOGNG:
482 raise RevlogError(_("index %s unknown format %d")
482 raise RevlogError(_("index %s unknown format %d")
483 % (self.indexfile, fmt))
483 % (self.indexfile, fmt))
484
484
485 self._io = revlogio()
485 self._io = revlogio()
486 if self.version == REVLOGV0:
486 if self.version == REVLOGV0:
487 self._io = revlogoldio()
487 self._io = revlogoldio()
488 if i:
488 if i:
489 d = self._io.parseindex(f, self._inline)
489 d = self._io.parseindex(f, self._inline)
490 self.index, self.nodemap, self._chunkcache = d
490 self.index, self.nodemap, self._chunkcache = d
491
491
492 # add the magic null revision at -1
492 # add the magic null revision at -1
493 self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
493 self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
494
494
495 def _loadindex(self, start, end):
495 def _loadindex(self, start, end):
496 """load a block of indexes all at once from the lazy parser"""
496 """load a block of indexes all at once from the lazy parser"""
497 if isinstance(self.index, lazyindex):
497 if isinstance(self.index, lazyindex):
498 self.index.p.loadindex(start, end)
498 self.index.p.loadindex(start, end)
499
499
500 def _loadindexmap(self):
500 def _loadindexmap(self):
501 """loads both the map and the index from the lazy parser"""
501 """loads both the map and the index from the lazy parser"""
502 if isinstance(self.index, lazyindex):
502 if isinstance(self.index, lazyindex):
503 p = self.index.p
503 p = self.index.p
504 p.loadindex()
504 p.loadindex()
505 self.nodemap = p.map
505 self.nodemap = p.map
506
506
507 def _loadmap(self):
507 def _loadmap(self):
508 """loads the map from the lazy parser"""
508 """loads the map from the lazy parser"""
509 if isinstance(self.nodemap, lazymap):
509 if isinstance(self.nodemap, lazymap):
510 self.nodemap.p.loadmap()
510 self.nodemap.p.loadmap()
511 self.nodemap = self.nodemap.p.map
511 self.nodemap = self.nodemap.p.map
512
512
513 def tip(self):
513 def tip(self):
514 return self.node(len(self.index) - 2)
514 return self.node(len(self.index) - 2)
515 def count(self):
515 def count(self):
516 return len(self.index) - 1
516 return len(self.index) - 1
517
517
518 def rev(self, node):
518 def rev(self, node):
519 try:
519 try:
520 return self.nodemap[node]
520 return self.nodemap[node]
521 except KeyError:
521 except KeyError:
522 raise LookupError(hex(node), _('%s: no node %s') % (self.indexfile, hex(node)))
522 raise LookupError(hex(node), _('%s: no node %s') % (self.indexfile, hex(node)))
523 def node(self, rev):
523 def node(self, rev):
524 return self.index[rev][7]
524 return self.index[rev][7]
525 def linkrev(self, node):
525 def linkrev(self, node):
526 return self.index[self.rev(node)][4]
526 return self.index[self.rev(node)][4]
527 def parents(self, node):
527 def parents(self, node):
528 d = self.index[self.rev(node)][5:7]
528 d = self.index[self.rev(node)][5:7]
529 return (self.node(d[0]), self.node(d[1]))
529 return (self.node(d[0]), self.node(d[1]))
530 def parentrevs(self, rev):
530 def parentrevs(self, rev):
531 return self.index[rev][5:7]
531 return self.index[rev][5:7]
532 def start(self, rev):
532 def start(self, rev):
533 return int(self.index[rev][0] >> 16)
533 return int(self.index[rev][0] >> 16)
534 def end(self, rev):
534 def end(self, rev):
535 return self.start(rev) + self.length(rev)
535 return self.start(rev) + self.length(rev)
536 def length(self, rev):
536 def length(self, rev):
537 return self.index[rev][1]
537 return self.index[rev][1]
538 def base(self, rev):
538 def base(self, rev):
539 return self.index[rev][3]
539 return self.index[rev][3]
540
540
541 def size(self, rev):
541 def size(self, rev):
542 """return the length of the uncompressed text for a given revision"""
542 """return the length of the uncompressed text for a given revision"""
543 l = self.index[rev][2]
543 l = self.index[rev][2]
544 if l >= 0:
544 if l >= 0:
545 return l
545 return l
546
546
547 t = self.revision(self.node(rev))
547 t = self.revision(self.node(rev))
548 return len(t)
548 return len(t)
549
549
550 # alternate implementation, The advantage to this code is it
550 # alternate implementation, The advantage to this code is it
551 # will be faster for a single revision. But, the results are not
551 # will be faster for a single revision. But, the results are not
552 # cached, so finding the size of every revision will be slower.
552 # cached, so finding the size of every revision will be slower.
553 """
553 """
554 if self.cache and self.cache[1] == rev:
554 if self.cache and self.cache[1] == rev:
555 return len(self.cache[2])
555 return len(self.cache[2])
556
556
557 base = self.base(rev)
557 base = self.base(rev)
558 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
558 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
559 base = self.cache[1]
559 base = self.cache[1]
560 text = self.cache[2]
560 text = self.cache[2]
561 else:
561 else:
562 text = self.revision(self.node(base))
562 text = self.revision(self.node(base))
563
563
564 l = len(text)
564 l = len(text)
565 for x in xrange(base + 1, rev + 1):
565 for x in xrange(base + 1, rev + 1):
566 l = mdiff.patchedsize(l, self.chunk(x))
566 l = mdiff.patchedsize(l, self.chunk(x))
567 return l
567 return l
568 """
568 """
569
569
570 def reachable(self, node, stop=None):
570 def reachable(self, node, stop=None):
571 """return a hash of all nodes ancestral to a given node, including
571 """return a hash of all nodes ancestral to a given node, including
572 the node itself, stopping when stop is matched"""
572 the node itself, stopping when stop is matched"""
573 reachable = {}
573 reachable = {}
574 visit = [node]
574 visit = [node]
575 reachable[node] = 1
575 reachable[node] = 1
576 if stop:
576 if stop:
577 stopn = self.rev(stop)
577 stopn = self.rev(stop)
578 else:
578 else:
579 stopn = 0
579 stopn = 0
580 while visit:
580 while visit:
581 n = visit.pop(0)
581 n = visit.pop(0)
582 if n == stop:
582 if n == stop:
583 continue
583 continue
584 if n == nullid:
584 if n == nullid:
585 continue
585 continue
586 for p in self.parents(n):
586 for p in self.parents(n):
587 if self.rev(p) < stopn:
587 if self.rev(p) < stopn:
588 continue
588 continue
589 if p not in reachable:
589 if p not in reachable:
590 reachable[p] = 1
590 reachable[p] = 1
591 visit.append(p)
591 visit.append(p)
592 return reachable
592 return reachable
593
593
594 def nodesbetween(self, roots=None, heads=None):
594 def nodesbetween(self, roots=None, heads=None):
595 """Return a tuple containing three elements. Elements 1 and 2 contain
595 """Return a tuple containing three elements. Elements 1 and 2 contain
596 a final list bases and heads after all the unreachable ones have been
596 a final list bases and heads after all the unreachable ones have been
597 pruned. Element 0 contains a topologically sorted list of all
597 pruned. Element 0 contains a topologically sorted list of all
598
598
599 nodes that satisfy these constraints:
599 nodes that satisfy these constraints:
600 1. All nodes must be descended from a node in roots (the nodes on
600 1. All nodes must be descended from a node in roots (the nodes on
601 roots are considered descended from themselves).
601 roots are considered descended from themselves).
602 2. All nodes must also be ancestors of a node in heads (the nodes in
602 2. All nodes must also be ancestors of a node in heads (the nodes in
603 heads are considered to be their own ancestors).
603 heads are considered to be their own ancestors).
604
604
605 If roots is unspecified, nullid is assumed as the only root.
605 If roots is unspecified, nullid is assumed as the only root.
606 If heads is unspecified, it is taken to be the output of the
606 If heads is unspecified, it is taken to be the output of the
607 heads method (i.e. a list of all nodes in the repository that
607 heads method (i.e. a list of all nodes in the repository that
608 have no children)."""
608 have no children)."""
609 nonodes = ([], [], [])
609 nonodes = ([], [], [])
610 if roots is not None:
610 if roots is not None:
611 roots = list(roots)
611 roots = list(roots)
612 if not roots:
612 if not roots:
613 return nonodes
613 return nonodes
614 lowestrev = min([self.rev(n) for n in roots])
614 lowestrev = min([self.rev(n) for n in roots])
615 else:
615 else:
616 roots = [nullid] # Everybody's a descendent of nullid
616 roots = [nullid] # Everybody's a descendent of nullid
617 lowestrev = nullrev
617 lowestrev = nullrev
618 if (lowestrev == nullrev) and (heads is None):
618 if (lowestrev == nullrev) and (heads is None):
619 # We want _all_ the nodes!
619 # We want _all_ the nodes!
620 return ([self.node(r) for r in xrange(0, self.count())],
620 return ([self.node(r) for r in xrange(0, self.count())],
621 [nullid], list(self.heads()))
621 [nullid], list(self.heads()))
622 if heads is None:
622 if heads is None:
623 # All nodes are ancestors, so the latest ancestor is the last
623 # All nodes are ancestors, so the latest ancestor is the last
624 # node.
624 # node.
625 highestrev = self.count() - 1
625 highestrev = self.count() - 1
626 # Set ancestors to None to signal that every node is an ancestor.
626 # Set ancestors to None to signal that every node is an ancestor.
627 ancestors = None
627 ancestors = None
628 # Set heads to an empty dictionary for later discovery of heads
628 # Set heads to an empty dictionary for later discovery of heads
629 heads = {}
629 heads = {}
630 else:
630 else:
631 heads = list(heads)
631 heads = list(heads)
632 if not heads:
632 if not heads:
633 return nonodes
633 return nonodes
634 ancestors = {}
634 ancestors = {}
635 # Turn heads into a dictionary so we can remove 'fake' heads.
635 # Turn heads into a dictionary so we can remove 'fake' heads.
636 # Also, later we will be using it to filter out the heads we can't
636 # Also, later we will be using it to filter out the heads we can't
637 # find from roots.
637 # find from roots.
638 heads = dict.fromkeys(heads, 0)
638 heads = dict.fromkeys(heads, 0)
639 # Start at the top and keep marking parents until we're done.
639 # Start at the top and keep marking parents until we're done.
640 nodestotag = heads.keys()
640 nodestotag = heads.keys()
641 # Remember where the top was so we can use it as a limit later.
641 # Remember where the top was so we can use it as a limit later.
642 highestrev = max([self.rev(n) for n in nodestotag])
642 highestrev = max([self.rev(n) for n in nodestotag])
643 while nodestotag:
643 while nodestotag:
644 # grab a node to tag
644 # grab a node to tag
645 n = nodestotag.pop()
645 n = nodestotag.pop()
646 # Never tag nullid
646 # Never tag nullid
647 if n == nullid:
647 if n == nullid:
648 continue
648 continue
649 # A node's revision number represents its place in a
649 # A node's revision number represents its place in a
650 # topologically sorted list of nodes.
650 # topologically sorted list of nodes.
651 r = self.rev(n)
651 r = self.rev(n)
652 if r >= lowestrev:
652 if r >= lowestrev:
653 if n not in ancestors:
653 if n not in ancestors:
654 # If we are possibly a descendent of one of the roots
654 # If we are possibly a descendent of one of the roots
655 # and we haven't already been marked as an ancestor
655 # and we haven't already been marked as an ancestor
656 ancestors[n] = 1 # Mark as ancestor
656 ancestors[n] = 1 # Mark as ancestor
657 # Add non-nullid parents to list of nodes to tag.
657 # Add non-nullid parents to list of nodes to tag.
658 nodestotag.extend([p for p in self.parents(n) if
658 nodestotag.extend([p for p in self.parents(n) if
659 p != nullid])
659 p != nullid])
660 elif n in heads: # We've seen it before, is it a fake head?
660 elif n in heads: # We've seen it before, is it a fake head?
661 # So it is, real heads should not be the ancestors of
661 # So it is, real heads should not be the ancestors of
662 # any other heads.
662 # any other heads.
663 heads.pop(n)
663 heads.pop(n)
664 if not ancestors:
664 if not ancestors:
665 return nonodes
665 return nonodes
666 # Now that we have our set of ancestors, we want to remove any
666 # Now that we have our set of ancestors, we want to remove any
667 # roots that are not ancestors.
667 # roots that are not ancestors.
668
668
669 # If one of the roots was nullid, everything is included anyway.
669 # If one of the roots was nullid, everything is included anyway.
670 if lowestrev > nullrev:
670 if lowestrev > nullrev:
671 # But, since we weren't, let's recompute the lowest rev to not
671 # But, since we weren't, let's recompute the lowest rev to not
672 # include roots that aren't ancestors.
672 # include roots that aren't ancestors.
673
673
674 # Filter out roots that aren't ancestors of heads
674 # Filter out roots that aren't ancestors of heads
675 roots = [n for n in roots if n in ancestors]
675 roots = [n for n in roots if n in ancestors]
676 # Recompute the lowest revision
676 # Recompute the lowest revision
677 if roots:
677 if roots:
678 lowestrev = min([self.rev(n) for n in roots])
678 lowestrev = min([self.rev(n) for n in roots])
679 else:
679 else:
680 # No more roots? Return empty list
680 # No more roots? Return empty list
681 return nonodes
681 return nonodes
682 else:
682 else:
683 # We are descending from nullid, and don't need to care about
683 # We are descending from nullid, and don't need to care about
684 # any other roots.
684 # any other roots.
685 lowestrev = nullrev
685 lowestrev = nullrev
686 roots = [nullid]
686 roots = [nullid]
687 # Transform our roots list into a 'set' (i.e. a dictionary where the
687 # Transform our roots list into a 'set' (i.e. a dictionary where the
688 # values don't matter.
688 # values don't matter.
689 descendents = dict.fromkeys(roots, 1)
689 descendents = dict.fromkeys(roots, 1)
690 # Also, keep the original roots so we can filter out roots that aren't
690 # Also, keep the original roots so we can filter out roots that aren't
691 # 'real' roots (i.e. are descended from other roots).
691 # 'real' roots (i.e. are descended from other roots).
692 roots = descendents.copy()
692 roots = descendents.copy()
693 # Our topologically sorted list of output nodes.
693 # Our topologically sorted list of output nodes.
694 orderedout = []
694 orderedout = []
695 # Don't start at nullid since we don't want nullid in our output list,
695 # Don't start at nullid since we don't want nullid in our output list,
696 # and if nullid shows up in descedents, empty parents will look like
696 # and if nullid shows up in descedents, empty parents will look like
697 # they're descendents.
697 # they're descendents.
698 for r in xrange(max(lowestrev, 0), highestrev + 1):
698 for r in xrange(max(lowestrev, 0), highestrev + 1):
699 n = self.node(r)
699 n = self.node(r)
700 isdescendent = False
700 isdescendent = False
701 if lowestrev == nullrev: # Everybody is a descendent of nullid
701 if lowestrev == nullrev: # Everybody is a descendent of nullid
702 isdescendent = True
702 isdescendent = True
703 elif n in descendents:
703 elif n in descendents:
704 # n is already a descendent
704 # n is already a descendent
705 isdescendent = True
705 isdescendent = True
706 # This check only needs to be done here because all the roots
706 # This check only needs to be done here because all the roots
707 # will start being marked is descendents before the loop.
707 # will start being marked is descendents before the loop.
708 if n in roots:
708 if n in roots:
709 # If n was a root, check if it's a 'real' root.
709 # If n was a root, check if it's a 'real' root.
710 p = tuple(self.parents(n))
710 p = tuple(self.parents(n))
711 # If any of its parents are descendents, it's not a root.
711 # If any of its parents are descendents, it's not a root.
712 if (p[0] in descendents) or (p[1] in descendents):
712 if (p[0] in descendents) or (p[1] in descendents):
713 roots.pop(n)
713 roots.pop(n)
714 else:
714 else:
715 p = tuple(self.parents(n))
715 p = tuple(self.parents(n))
716 # A node is a descendent if either of its parents are
716 # A node is a descendent if either of its parents are
717 # descendents. (We seeded the dependents list with the roots
717 # descendents. (We seeded the dependents list with the roots
718 # up there, remember?)
718 # up there, remember?)
719 if (p[0] in descendents) or (p[1] in descendents):
719 if (p[0] in descendents) or (p[1] in descendents):
720 descendents[n] = 1
720 descendents[n] = 1
721 isdescendent = True
721 isdescendent = True
722 if isdescendent and ((ancestors is None) or (n in ancestors)):
722 if isdescendent and ((ancestors is None) or (n in ancestors)):
723 # Only include nodes that are both descendents and ancestors.
723 # Only include nodes that are both descendents and ancestors.
724 orderedout.append(n)
724 orderedout.append(n)
725 if (ancestors is not None) and (n in heads):
725 if (ancestors is not None) and (n in heads):
726 # We're trying to figure out which heads are reachable
726 # We're trying to figure out which heads are reachable
727 # from roots.
727 # from roots.
728 # Mark this head as having been reached
728 # Mark this head as having been reached
729 heads[n] = 1
729 heads[n] = 1
730 elif ancestors is None:
730 elif ancestors is None:
731 # Otherwise, we're trying to discover the heads.
731 # Otherwise, we're trying to discover the heads.
732 # Assume this is a head because if it isn't, the next step
732 # Assume this is a head because if it isn't, the next step
733 # will eventually remove it.
733 # will eventually remove it.
734 heads[n] = 1
734 heads[n] = 1
735 # But, obviously its parents aren't.
735 # But, obviously its parents aren't.
736 for p in self.parents(n):
736 for p in self.parents(n):
737 heads.pop(p, None)
737 heads.pop(p, None)
738 heads = [n for n in heads.iterkeys() if heads[n] != 0]
738 heads = [n for n in heads.iterkeys() if heads[n] != 0]
739 roots = roots.keys()
739 roots = roots.keys()
740 assert orderedout
740 assert orderedout
741 assert roots
741 assert roots
742 assert heads
742 assert heads
743 return (orderedout, roots, heads)
743 return (orderedout, roots, heads)
744
744
745 def heads(self, start=None, stop=None):
745 def heads(self, start=None, stop=None):
746 """return the list of all nodes that have no children
746 """return the list of all nodes that have no children
747
747
748 if start is specified, only heads that are descendants of
748 if start is specified, only heads that are descendants of
749 start will be returned
749 start will be returned
750 if stop is specified, it will consider all the revs from stop
750 if stop is specified, it will consider all the revs from stop
751 as if they had no children
751 as if they had no children
752 """
752 """
753 if start is None and stop is None:
753 if start is None and stop is None:
754 count = self.count()
754 count = self.count()
755 if not count:
755 if not count:
756 return [nullid]
756 return [nullid]
757 ishead = [1] * (count + 1)
757 ishead = [1] * (count + 1)
758 index = self.index
758 index = self.index
759 for r in xrange(count):
759 for r in xrange(count):
760 e = index[r]
760 e = index[r]
761 ishead[e[5]] = ishead[e[6]] = 0
761 ishead[e[5]] = ishead[e[6]] = 0
762 return [self.node(r) for r in xrange(count) if ishead[r]]
762 return [self.node(r) for r in xrange(count) if ishead[r]]
763
763
764 if start is None:
764 if start is None:
765 start = nullid
765 start = nullid
766 if stop is None:
766 if stop is None:
767 stop = []
767 stop = []
768 stoprevs = dict.fromkeys([self.rev(n) for n in stop])
768 stoprevs = dict.fromkeys([self.rev(n) for n in stop])
769 startrev = self.rev(start)
769 startrev = self.rev(start)
770 reachable = {startrev: 1}
770 reachable = {startrev: 1}
771 heads = {startrev: 1}
771 heads = {startrev: 1}
772
772
773 parentrevs = self.parentrevs
773 parentrevs = self.parentrevs
774 for r in xrange(startrev + 1, self.count()):
774 for r in xrange(startrev + 1, self.count()):
775 for p in parentrevs(r):
775 for p in parentrevs(r):
776 if p in reachable:
776 if p in reachable:
777 if r not in stoprevs:
777 if r not in stoprevs:
778 reachable[r] = 1
778 reachable[r] = 1
779 heads[r] = 1
779 heads[r] = 1
780 if p in heads and p not in stoprevs:
780 if p in heads and p not in stoprevs:
781 del heads[p]
781 del heads[p]
782
782
783 return [self.node(r) for r in heads]
783 return [self.node(r) for r in heads]
784
784
785 def children(self, node):
785 def children(self, node):
786 """find the children of a given node"""
786 """find the children of a given node"""
787 c = []
787 c = []
788 p = self.rev(node)
788 p = self.rev(node)
789 for r in range(p + 1, self.count()):
789 for r in range(p + 1, self.count()):
790 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
790 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
791 if prevs:
791 if prevs:
792 for pr in prevs:
792 for pr in prevs:
793 if pr == p:
793 if pr == p:
794 c.append(self.node(r))
794 c.append(self.node(r))
795 elif p == nullrev:
795 elif p == nullrev:
796 c.append(self.node(r))
796 c.append(self.node(r))
797 return c
797 return c
798
798
799 def _match(self, id):
799 def _match(self, id):
800 if isinstance(id, (long, int)):
800 if isinstance(id, (long, int)):
801 # rev
801 # rev
802 return self.node(id)
802 return self.node(id)
803 if len(id) == 20:
803 if len(id) == 20:
804 # possibly a binary node
804 # possibly a binary node
805 # odds of a binary node being all hex in ASCII are 1 in 10**25
805 # odds of a binary node being all hex in ASCII are 1 in 10**25
806 try:
806 try:
807 node = id
807 node = id
808 r = self.rev(node) # quick search the index
808 r = self.rev(node) # quick search the index
809 return node
809 return node
810 except LookupError:
810 except LookupError:
811 pass # may be partial hex id
811 pass # may be partial hex id
812 try:
812 try:
813 # str(rev)
813 # str(rev)
814 rev = int(id)
814 rev = int(id)
815 if str(rev) != id:
815 if str(rev) != id:
816 raise ValueError
816 raise ValueError
817 if rev < 0:
817 if rev < 0:
818 rev = self.count() + rev
818 rev = self.count() + rev
819 if rev < 0 or rev >= self.count():
819 if rev < 0 or rev >= self.count():
820 raise ValueError
820 raise ValueError
821 return self.node(rev)
821 return self.node(rev)
822 except (ValueError, OverflowError):
822 except (ValueError, OverflowError):
823 pass
823 pass
824 if len(id) == 40:
824 if len(id) == 40:
825 try:
825 try:
826 # a full hex nodeid?
826 # a full hex nodeid?
827 node = bin(id)
827 node = bin(id)
828 r = self.rev(node)
828 r = self.rev(node)
829 return node
829 return node
830 except TypeError:
830 except TypeError:
831 pass
831 pass
832
832
833 def _partialmatch(self, id):
833 def _partialmatch(self, id):
834 if len(id) < 40:
834 if len(id) < 40:
835 try:
835 try:
836 # hex(node)[:...]
836 # hex(node)[:...]
837 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
837 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
838 node = None
838 node = None
839 for n in self.nodemap:
839 for n in self.nodemap:
840 if n.startswith(bin_id) and hex(n).startswith(id):
840 if n.startswith(bin_id) and hex(n).startswith(id):
841 if node is not None:
841 if node is not None:
842 raise LookupError(hex(node),
842 raise LookupError(hex(node),
843 _("Ambiguous identifier"))
843 _("Ambiguous identifier"))
844 node = n
844 node = n
845 if node is not None:
845 if node is not None:
846 return node
846 return node
847 except TypeError:
847 except TypeError:
848 pass
848 pass
849
849
850 def lookup(self, id):
850 def lookup(self, id):
851 """locate a node based on:
851 """locate a node based on:
852 - revision number or str(revision number)
852 - revision number or str(revision number)
853 - nodeid or subset of hex nodeid
853 - nodeid or subset of hex nodeid
854 """
854 """
855 n = self._match(id)
855 n = self._match(id)
856 if n is not None:
856 if n is not None:
857 return n
857 return n
858 n = self._partialmatch(id)
858 n = self._partialmatch(id)
859 if n:
859 if n:
860 return n
860 return n
861
861
862 raise LookupError(id, _("No match found"))
862 raise LookupError(id, _("No match found"))
863
863
864 def cmp(self, node, text):
864 def cmp(self, node, text):
865 """compare text with a given file revision"""
865 """compare text with a given file revision"""
866 p1, p2 = self.parents(node)
866 p1, p2 = self.parents(node)
867 return hash(text, p1, p2) != node
867 return hash(text, p1, p2) != node
868
868
869 def chunk(self, rev, df=None):
869 def chunk(self, rev, df=None):
870 def loadcache(df):
870 def loadcache(df):
871 if not df:
871 if not df:
872 if self._inline:
872 if self._inline:
873 df = self.opener(self.indexfile)
873 df = self.opener(self.indexfile)
874 else:
874 else:
875 df = self.opener(self.datafile)
875 df = self.opener(self.datafile)
876 df.seek(start)
876 df.seek(start)
877 self._chunkcache = (start, df.read(cache_length))
877 self._chunkcache = (start, df.read(cache_length))
878
878
879 start, length = self.start(rev), self.length(rev)
879 start, length = self.start(rev), self.length(rev)
880 if self._inline:
880 if self._inline:
881 start += (rev + 1) * self._io.size
881 start += (rev + 1) * self._io.size
882 end = start + length
882 end = start + length
883
883
884 offset = 0
884 offset = 0
885 if not self._chunkcache:
885 if not self._chunkcache:
886 cache_length = max(65536, length)
886 cache_length = max(65536, length)
887 loadcache(df)
887 loadcache(df)
888 else:
888 else:
889 cache_start = self._chunkcache[0]
889 cache_start = self._chunkcache[0]
890 cache_length = len(self._chunkcache[1])
890 cache_length = len(self._chunkcache[1])
891 cache_end = cache_start + cache_length
891 cache_end = cache_start + cache_length
892 if start >= cache_start and end <= cache_end:
892 if start >= cache_start and end <= cache_end:
893 # it is cached
893 # it is cached
894 offset = start - cache_start
894 offset = start - cache_start
895 else:
895 else:
896 cache_length = max(65536, length)
896 cache_length = max(65536, length)
897 loadcache(df)
897 loadcache(df)
898
898
899 # avoid copying large chunks
899 # avoid copying large chunks
900 c = self._chunkcache[1]
900 c = self._chunkcache[1]
901 if cache_length != length:
901 if cache_length != length:
902 c = c[offset:offset + length]
902 c = c[offset:offset + length]
903
903
904 return decompress(c)
904 return decompress(c)
905
905
906 def delta(self, node):
906 def delta(self, node):
907 """return or calculate a delta between a node and its predecessor"""
907 """return or calculate a delta between a node and its predecessor"""
908 r = self.rev(node)
908 r = self.rev(node)
909 return self.revdiff(r - 1, r)
909 return self.revdiff(r - 1, r)
910
910
911 def revdiff(self, rev1, rev2):
911 def revdiff(self, rev1, rev2):
912 """return or calculate a delta between two revisions"""
912 """return or calculate a delta between two revisions"""
913 if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
913 if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
914 return self.chunk(rev2)
914 return self.chunk(rev2)
915
915
916 return mdiff.textdiff(self.revision(self.node(rev1)),
916 return mdiff.textdiff(self.revision(self.node(rev1)),
917 self.revision(self.node(rev2)))
917 self.revision(self.node(rev2)))
918
918
919 def revision(self, node):
919 def revision(self, node):
920 """return an uncompressed revision of a given"""
920 """return an uncompressed revision of a given"""
921 if node == nullid:
921 if node == nullid:
922 return ""
922 return ""
923 if self._cache and self._cache[0] == node:
923 if self._cache and self._cache[0] == node:
924 return str(self._cache[2])
924 return str(self._cache[2])
925
925
926 # look up what we need to read
926 # look up what we need to read
927 text = None
927 text = None
928 rev = self.rev(node)
928 rev = self.rev(node)
929 base = self.base(rev)
929 base = self.base(rev)
930
930
931 # check rev flags
931 # check rev flags
932 if self.index[rev][0] & 0xFFFF:
932 if self.index[rev][0] & 0xFFFF:
933 raise RevlogError(_('incompatible revision flag %x') %
933 raise RevlogError(_('incompatible revision flag %x') %
934 (self.index[rev][0] & 0xFFFF))
934 (self.index[rev][0] & 0xFFFF))
935
935
936 if self._inline:
936 df = None
937 # we probably have the whole chunk cached
938 df = None
939 else:
940 df = self.opener(self.datafile)
941
937
942 # do we have useful data cached?
938 # do we have useful data cached?
943 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
939 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
944 base = self._cache[1]
940 base = self._cache[1]
945 text = str(self._cache[2])
941 text = str(self._cache[2])
946 self._loadindex(base, rev + 1)
942 self._loadindex(base, rev + 1)
943 if not self._inline and rev > base + 1:
944 df = self.opener(self.datafile)
947 else:
945 else:
948 self._loadindex(base, rev + 1)
946 self._loadindex(base, rev + 1)
947 if not self._inline and rev > base:
948 df = self.opener(self.datafile)
949 text = self.chunk(base, df=df)
949 text = self.chunk(base, df=df)
950
950
951 bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
951 bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
952 text = mdiff.patches(text, bins)
952 text = mdiff.patches(text, bins)
953 p1, p2 = self.parents(node)
953 p1, p2 = self.parents(node)
954 if node != hash(text, p1, p2):
954 if node != hash(text, p1, p2):
955 raise RevlogError(_("integrity check failed on %s:%d")
955 raise RevlogError(_("integrity check failed on %s:%d")
956 % (self.datafile, rev))
956 % (self.datafile, rev))
957
957
958 self._cache = (node, rev, text)
958 self._cache = (node, rev, text)
959 return text
959 return text
960
960
961 def checkinlinesize(self, tr, fp=None):
961 def checkinlinesize(self, tr, fp=None):
962 if not self._inline:
962 if not self._inline:
963 return
963 return
964 if not fp:
964 if not fp:
965 fp = self.opener(self.indexfile, 'r')
965 fp = self.opener(self.indexfile, 'r')
966 fp.seek(0, 2)
966 fp.seek(0, 2)
967 size = fp.tell()
967 size = fp.tell()
968 if size < 131072:
968 if size < 131072:
969 return
969 return
970 trinfo = tr.find(self.indexfile)
970 trinfo = tr.find(self.indexfile)
971 if trinfo == None:
971 if trinfo == None:
972 raise RevlogError(_("%s not found in the transaction")
972 raise RevlogError(_("%s not found in the transaction")
973 % self.indexfile)
973 % self.indexfile)
974
974
975 trindex = trinfo[2]
975 trindex = trinfo[2]
976 dataoff = self.start(trindex)
976 dataoff = self.start(trindex)
977
977
978 tr.add(self.datafile, dataoff)
978 tr.add(self.datafile, dataoff)
979 df = self.opener(self.datafile, 'w')
979 df = self.opener(self.datafile, 'w')
980 calc = self._io.size
980 calc = self._io.size
981 for r in xrange(self.count()):
981 for r in xrange(self.count()):
982 start = self.start(r) + (r + 1) * calc
982 start = self.start(r) + (r + 1) * calc
983 length = self.length(r)
983 length = self.length(r)
984 fp.seek(start)
984 fp.seek(start)
985 d = fp.read(length)
985 d = fp.read(length)
986 df.write(d)
986 df.write(d)
987 fp.close()
987 fp.close()
988 df.close()
988 df.close()
989 fp = self.opener(self.indexfile, 'w', atomictemp=True)
989 fp = self.opener(self.indexfile, 'w', atomictemp=True)
990 self.version &= ~(REVLOGNGINLINEDATA)
990 self.version &= ~(REVLOGNGINLINEDATA)
991 self._inline = False
991 self._inline = False
992 for i in xrange(self.count()):
992 for i in xrange(self.count()):
993 e = self._io.packentry(self.index[i], self.node, self.version, i)
993 e = self._io.packentry(self.index[i], self.node, self.version, i)
994 fp.write(e)
994 fp.write(e)
995
995
996 # if we don't call rename, the temp file will never replace the
996 # if we don't call rename, the temp file will never replace the
997 # real index
997 # real index
998 fp.rename()
998 fp.rename()
999
999
1000 tr.replace(self.indexfile, trindex * calc)
1000 tr.replace(self.indexfile, trindex * calc)
1001 self._chunkcache = None
1001 self._chunkcache = None
1002
1002
1003 def addrevision(self, text, transaction, link, p1, p2, d=None):
1003 def addrevision(self, text, transaction, link, p1, p2, d=None):
1004 """add a revision to the log
1004 """add a revision to the log
1005
1005
1006 text - the revision data to add
1006 text - the revision data to add
1007 transaction - the transaction object used for rollback
1007 transaction - the transaction object used for rollback
1008 link - the linkrev data to add
1008 link - the linkrev data to add
1009 p1, p2 - the parent nodeids of the revision
1009 p1, p2 - the parent nodeids of the revision
1010 d - an optional precomputed delta
1010 d - an optional precomputed delta
1011 """
1011 """
1012 dfh = None
1012 dfh = None
1013 if not self._inline:
1013 if not self._inline:
1014 dfh = self.opener(self.datafile, "a")
1014 dfh = self.opener(self.datafile, "a")
1015 ifh = self.opener(self.indexfile, "a+")
1015 ifh = self.opener(self.indexfile, "a+")
1016 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1016 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1017
1017
1018 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1018 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1019 node = hash(text, p1, p2)
1019 node = hash(text, p1, p2)
1020 if node in self.nodemap:
1020 if node in self.nodemap:
1021 return node
1021 return node
1022
1022
1023 curr = self.count()
1023 curr = self.count()
1024 prev = curr - 1
1024 prev = curr - 1
1025 base = self.base(prev)
1025 base = self.base(prev)
1026 offset = self.end(prev)
1026 offset = self.end(prev)
1027
1027
1028 if curr:
1028 if curr:
1029 if not d:
1029 if not d:
1030 ptext = self.revision(self.node(prev))
1030 ptext = self.revision(self.node(prev))
1031 d = mdiff.textdiff(ptext, text)
1031 d = mdiff.textdiff(ptext, text)
1032 data = compress(d)
1032 data = compress(d)
1033 l = len(data[1]) + len(data[0])
1033 l = len(data[1]) + len(data[0])
1034 dist = l + offset - self.start(base)
1034 dist = l + offset - self.start(base)
1035
1035
1036 # full versions are inserted when the needed deltas
1036 # full versions are inserted when the needed deltas
1037 # become comparable to the uncompressed text
1037 # become comparable to the uncompressed text
1038 if not curr or dist > len(text) * 2:
1038 if not curr or dist > len(text) * 2:
1039 data = compress(text)
1039 data = compress(text)
1040 l = len(data[1]) + len(data[0])
1040 l = len(data[1]) + len(data[0])
1041 base = curr
1041 base = curr
1042
1042
1043 e = (offset_type(offset, 0), l, len(text),
1043 e = (offset_type(offset, 0), l, len(text),
1044 base, link, self.rev(p1), self.rev(p2), node)
1044 base, link, self.rev(p1), self.rev(p2), node)
1045 self.index.insert(-1, e)
1045 self.index.insert(-1, e)
1046 self.nodemap[node] = curr
1046 self.nodemap[node] = curr
1047
1047
1048 entry = self._io.packentry(e, self.node, self.version, curr)
1048 entry = self._io.packentry(e, self.node, self.version, curr)
1049 if not self._inline:
1049 if not self._inline:
1050 transaction.add(self.datafile, offset)
1050 transaction.add(self.datafile, offset)
1051 transaction.add(self.indexfile, curr * len(entry))
1051 transaction.add(self.indexfile, curr * len(entry))
1052 if data[0]:
1052 if data[0]:
1053 dfh.write(data[0])
1053 dfh.write(data[0])
1054 dfh.write(data[1])
1054 dfh.write(data[1])
1055 dfh.flush()
1055 dfh.flush()
1056 ifh.write(entry)
1056 ifh.write(entry)
1057 else:
1057 else:
1058 offset += curr * self._io.size
1058 offset += curr * self._io.size
1059 transaction.add(self.indexfile, offset, curr)
1059 transaction.add(self.indexfile, offset, curr)
1060 ifh.write(entry)
1060 ifh.write(entry)
1061 ifh.write(data[0])
1061 ifh.write(data[0])
1062 ifh.write(data[1])
1062 ifh.write(data[1])
1063 self.checkinlinesize(transaction, ifh)
1063 self.checkinlinesize(transaction, ifh)
1064
1064
1065 self._cache = (node, curr, text)
1065 self._cache = (node, curr, text)
1066 return node
1066 return node
1067
1067
1068 def ancestor(self, a, b):
1068 def ancestor(self, a, b):
1069 """calculate the least common ancestor of nodes a and b"""
1069 """calculate the least common ancestor of nodes a and b"""
1070
1070
1071 def parents(rev):
1071 def parents(rev):
1072 return [p for p in self.parentrevs(rev) if p != nullrev]
1072 return [p for p in self.parentrevs(rev) if p != nullrev]
1073
1073
1074 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1074 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1075 if c is None:
1075 if c is None:
1076 return nullid
1076 return nullid
1077
1077
1078 return self.node(c)
1078 return self.node(c)
1079
1079
1080 def group(self, nodelist, lookup, infocollect=None):
1080 def group(self, nodelist, lookup, infocollect=None):
1081 """calculate a delta group
1081 """calculate a delta group
1082
1082
1083 Given a list of changeset revs, return a set of deltas and
1083 Given a list of changeset revs, return a set of deltas and
1084 metadata corresponding to nodes. the first delta is
1084 metadata corresponding to nodes. the first delta is
1085 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1085 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1086 have this parent as it has all history before these
1086 have this parent as it has all history before these
1087 changesets. parent is parent[0]
1087 changesets. parent is parent[0]
1088 """
1088 """
1089 revs = [self.rev(n) for n in nodelist]
1089 revs = [self.rev(n) for n in nodelist]
1090
1090
1091 # if we don't have any revisions touched by these changesets, bail
1091 # if we don't have any revisions touched by these changesets, bail
1092 if not revs:
1092 if not revs:
1093 yield changegroup.closechunk()
1093 yield changegroup.closechunk()
1094 return
1094 return
1095
1095
1096 # add the parent of the first rev
1096 # add the parent of the first rev
1097 p = self.parents(self.node(revs[0]))[0]
1097 p = self.parents(self.node(revs[0]))[0]
1098 revs.insert(0, self.rev(p))
1098 revs.insert(0, self.rev(p))
1099
1099
1100 # build deltas
1100 # build deltas
1101 for d in xrange(0, len(revs) - 1):
1101 for d in xrange(0, len(revs) - 1):
1102 a, b = revs[d], revs[d + 1]
1102 a, b = revs[d], revs[d + 1]
1103 nb = self.node(b)
1103 nb = self.node(b)
1104
1104
1105 if infocollect is not None:
1105 if infocollect is not None:
1106 infocollect(nb)
1106 infocollect(nb)
1107
1107
1108 p = self.parents(nb)
1108 p = self.parents(nb)
1109 meta = nb + p[0] + p[1] + lookup(nb)
1109 meta = nb + p[0] + p[1] + lookup(nb)
1110 if a == -1:
1110 if a == -1:
1111 d = self.revision(nb)
1111 d = self.revision(nb)
1112 meta += mdiff.trivialdiffheader(len(d))
1112 meta += mdiff.trivialdiffheader(len(d))
1113 else:
1113 else:
1114 d = self.revdiff(a, b)
1114 d = self.revdiff(a, b)
1115 yield changegroup.chunkheader(len(meta) + len(d))
1115 yield changegroup.chunkheader(len(meta) + len(d))
1116 yield meta
1116 yield meta
1117 if len(d) > 2**20:
1117 if len(d) > 2**20:
1118 pos = 0
1118 pos = 0
1119 while pos < len(d):
1119 while pos < len(d):
1120 pos2 = pos + 2 ** 18
1120 pos2 = pos + 2 ** 18
1121 yield d[pos:pos2]
1121 yield d[pos:pos2]
1122 pos = pos2
1122 pos = pos2
1123 else:
1123 else:
1124 yield d
1124 yield d
1125
1125
1126 yield changegroup.closechunk()
1126 yield changegroup.closechunk()
1127
1127
1128 def addgroup(self, revs, linkmapper, transaction, unique=0):
1128 def addgroup(self, revs, linkmapper, transaction, unique=0):
1129 """
1129 """
1130 add a delta group
1130 add a delta group
1131
1131
1132 given a set of deltas, add them to the revision log. the
1132 given a set of deltas, add them to the revision log. the
1133 first delta is against its parent, which should be in our
1133 first delta is against its parent, which should be in our
1134 log, the rest are against the previous delta.
1134 log, the rest are against the previous delta.
1135 """
1135 """
1136
1136
1137 #track the base of the current delta log
1137 #track the base of the current delta log
1138 r = self.count()
1138 r = self.count()
1139 t = r - 1
1139 t = r - 1
1140 node = None
1140 node = None
1141
1141
1142 base = prev = nullrev
1142 base = prev = nullrev
1143 start = end = textlen = 0
1143 start = end = textlen = 0
1144 if r:
1144 if r:
1145 end = self.end(t)
1145 end = self.end(t)
1146
1146
1147 ifh = self.opener(self.indexfile, "a+")
1147 ifh = self.opener(self.indexfile, "a+")
1148 isize = r * self._io.size
1148 isize = r * self._io.size
1149 if self._inline:
1149 if self._inline:
1150 transaction.add(self.indexfile, end + isize, r)
1150 transaction.add(self.indexfile, end + isize, r)
1151 dfh = None
1151 dfh = None
1152 else:
1152 else:
1153 transaction.add(self.indexfile, isize, r)
1153 transaction.add(self.indexfile, isize, r)
1154 transaction.add(self.datafile, end)
1154 transaction.add(self.datafile, end)
1155 dfh = self.opener(self.datafile, "a")
1155 dfh = self.opener(self.datafile, "a")
1156
1156
1157 # loop through our set of deltas
1157 # loop through our set of deltas
1158 chain = None
1158 chain = None
1159 for chunk in revs:
1159 for chunk in revs:
1160 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1160 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1161 link = linkmapper(cs)
1161 link = linkmapper(cs)
1162 if node in self.nodemap:
1162 if node in self.nodemap:
1163 # this can happen if two branches make the same change
1163 # this can happen if two branches make the same change
1164 # if unique:
1164 # if unique:
1165 # raise RevlogError(_("already have %s") % hex(node[:4]))
1165 # raise RevlogError(_("already have %s") % hex(node[:4]))
1166 chain = node
1166 chain = node
1167 continue
1167 continue
1168 delta = buffer(chunk, 80)
1168 delta = buffer(chunk, 80)
1169 del chunk
1169 del chunk
1170
1170
1171 for p in (p1, p2):
1171 for p in (p1, p2):
1172 if not p in self.nodemap:
1172 if not p in self.nodemap:
1173 raise LookupError(hex(p), _("unknown parent %s") % short(p))
1173 raise LookupError(hex(p), _("unknown parent %s") % short(p))
1174
1174
1175 if not chain:
1175 if not chain:
1176 # retrieve the parent revision of the delta chain
1176 # retrieve the parent revision of the delta chain
1177 chain = p1
1177 chain = p1
1178 if not chain in self.nodemap:
1178 if not chain in self.nodemap:
1179 raise LookupError(hex(chain), _("unknown base %s") % short(chain[:4]))
1179 raise LookupError(hex(chain), _("unknown base %s") % short(chain[:4]))
1180
1180
1181 # full versions are inserted when the needed deltas become
1181 # full versions are inserted when the needed deltas become
1182 # comparable to the uncompressed text or when the previous
1182 # comparable to the uncompressed text or when the previous
1183 # version is not the one we have a delta against. We use
1183 # version is not the one we have a delta against. We use
1184 # the size of the previous full rev as a proxy for the
1184 # the size of the previous full rev as a proxy for the
1185 # current size.
1185 # current size.
1186
1186
1187 if chain == prev:
1187 if chain == prev:
1188 cdelta = compress(delta)
1188 cdelta = compress(delta)
1189 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1189 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1190 textlen = mdiff.patchedsize(textlen, delta)
1190 textlen = mdiff.patchedsize(textlen, delta)
1191
1191
1192 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1192 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1193 # flush our writes here so we can read it in revision
1193 # flush our writes here so we can read it in revision
1194 if dfh:
1194 if dfh:
1195 dfh.flush()
1195 dfh.flush()
1196 ifh.flush()
1196 ifh.flush()
1197 text = self.revision(chain)
1197 text = self.revision(chain)
1198 if len(text) == 0:
1198 if len(text) == 0:
1199 # skip over trivial delta header
1199 # skip over trivial delta header
1200 text = buffer(delta, 12)
1200 text = buffer(delta, 12)
1201 else:
1201 else:
1202 text = mdiff.patches(text, [delta])
1202 text = mdiff.patches(text, [delta])
1203 del delta
1203 del delta
1204 chk = self._addrevision(text, transaction, link, p1, p2, None,
1204 chk = self._addrevision(text, transaction, link, p1, p2, None,
1205 ifh, dfh)
1205 ifh, dfh)
1206 if not dfh and not self._inline:
1206 if not dfh and not self._inline:
1207 # addrevision switched from inline to conventional
1207 # addrevision switched from inline to conventional
1208 # reopen the index
1208 # reopen the index
1209 dfh = self.opener(self.datafile, "a")
1209 dfh = self.opener(self.datafile, "a")
1210 ifh = self.opener(self.indexfile, "a")
1210 ifh = self.opener(self.indexfile, "a")
1211 if chk != node:
1211 if chk != node:
1212 raise RevlogError(_("consistency error adding group"))
1212 raise RevlogError(_("consistency error adding group"))
1213 textlen = len(text)
1213 textlen = len(text)
1214 else:
1214 else:
1215 e = (offset_type(end, 0), cdeltalen, textlen, base,
1215 e = (offset_type(end, 0), cdeltalen, textlen, base,
1216 link, self.rev(p1), self.rev(p2), node)
1216 link, self.rev(p1), self.rev(p2), node)
1217 self.index.insert(-1, e)
1217 self.index.insert(-1, e)
1218 self.nodemap[node] = r
1218 self.nodemap[node] = r
1219 entry = self._io.packentry(e, self.node, self.version, r)
1219 entry = self._io.packentry(e, self.node, self.version, r)
1220 if self._inline:
1220 if self._inline:
1221 ifh.write(entry)
1221 ifh.write(entry)
1222 ifh.write(cdelta[0])
1222 ifh.write(cdelta[0])
1223 ifh.write(cdelta[1])
1223 ifh.write(cdelta[1])
1224 self.checkinlinesize(transaction, ifh)
1224 self.checkinlinesize(transaction, ifh)
1225 if not self._inline:
1225 if not self._inline:
1226 dfh = self.opener(self.datafile, "a")
1226 dfh = self.opener(self.datafile, "a")
1227 ifh = self.opener(self.indexfile, "a")
1227 ifh = self.opener(self.indexfile, "a")
1228 else:
1228 else:
1229 dfh.write(cdelta[0])
1229 dfh.write(cdelta[0])
1230 dfh.write(cdelta[1])
1230 dfh.write(cdelta[1])
1231 ifh.write(entry)
1231 ifh.write(entry)
1232
1232
1233 t, r, chain, prev = r, r + 1, node, node
1233 t, r, chain, prev = r, r + 1, node, node
1234 base = self.base(t)
1234 base = self.base(t)
1235 start = self.start(base)
1235 start = self.start(base)
1236 end = self.end(t)
1236 end = self.end(t)
1237
1237
1238 return node
1238 return node
1239
1239
1240 def strip(self, minlink):
1240 def strip(self, minlink):
1241 """truncate the revlog on the first revision with a linkrev >= minlink
1241 """truncate the revlog on the first revision with a linkrev >= minlink
1242
1242
1243 This function is called when we're stripping revision minlink and
1243 This function is called when we're stripping revision minlink and
1244 its descendants from the repository.
1244 its descendants from the repository.
1245
1245
1246 We have to remove all revisions with linkrev >= minlink, because
1246 We have to remove all revisions with linkrev >= minlink, because
1247 the equivalent changelog revisions will be renumbered after the
1247 the equivalent changelog revisions will be renumbered after the
1248 strip.
1248 strip.
1249
1249
1250 So we truncate the revlog on the first of these revisions, and
1250 So we truncate the revlog on the first of these revisions, and
1251 trust that the caller has saved the revisions that shouldn't be
1251 trust that the caller has saved the revisions that shouldn't be
1252 removed and that it'll readd them after this truncation.
1252 removed and that it'll readd them after this truncation.
1253 """
1253 """
1254 if self.count() == 0:
1254 if self.count() == 0:
1255 return
1255 return
1256
1256
1257 if isinstance(self.index, lazyindex):
1257 if isinstance(self.index, lazyindex):
1258 self._loadindexmap()
1258 self._loadindexmap()
1259
1259
1260 for rev in xrange(0, self.count()):
1260 for rev in xrange(0, self.count()):
1261 if self.index[rev][4] >= minlink:
1261 if self.index[rev][4] >= minlink:
1262 break
1262 break
1263 else:
1263 else:
1264 return
1264 return
1265
1265
1266 # first truncate the files on disk
1266 # first truncate the files on disk
1267 end = self.start(rev)
1267 end = self.start(rev)
1268 if not self._inline:
1268 if not self._inline:
1269 df = self.opener(self.datafile, "a")
1269 df = self.opener(self.datafile, "a")
1270 df.truncate(end)
1270 df.truncate(end)
1271 end = rev * self._io.size
1271 end = rev * self._io.size
1272 else:
1272 else:
1273 end += rev * self._io.size
1273 end += rev * self._io.size
1274
1274
1275 indexf = self.opener(self.indexfile, "a")
1275 indexf = self.opener(self.indexfile, "a")
1276 indexf.truncate(end)
1276 indexf.truncate(end)
1277
1277
1278 # then reset internal state in memory to forget those revisions
1278 # then reset internal state in memory to forget those revisions
1279 self._cache = None
1279 self._cache = None
1280 self._chunkcache = None
1280 self._chunkcache = None
1281 for x in xrange(rev, self.count()):
1281 for x in xrange(rev, self.count()):
1282 del self.nodemap[self.node(x)]
1282 del self.nodemap[self.node(x)]
1283
1283
1284 del self.index[rev:-1]
1284 del self.index[rev:-1]
1285
1285
1286 def checksize(self):
1286 def checksize(self):
1287 expected = 0
1287 expected = 0
1288 if self.count():
1288 if self.count():
1289 expected = max(0, self.end(self.count() - 1))
1289 expected = max(0, self.end(self.count() - 1))
1290
1290
1291 try:
1291 try:
1292 f = self.opener(self.datafile)
1292 f = self.opener(self.datafile)
1293 f.seek(0, 2)
1293 f.seek(0, 2)
1294 actual = f.tell()
1294 actual = f.tell()
1295 dd = actual - expected
1295 dd = actual - expected
1296 except IOError, inst:
1296 except IOError, inst:
1297 if inst.errno != errno.ENOENT:
1297 if inst.errno != errno.ENOENT:
1298 raise
1298 raise
1299 dd = 0
1299 dd = 0
1300
1300
1301 try:
1301 try:
1302 f = self.opener(self.indexfile)
1302 f = self.opener(self.indexfile)
1303 f.seek(0, 2)
1303 f.seek(0, 2)
1304 actual = f.tell()
1304 actual = f.tell()
1305 s = self._io.size
1305 s = self._io.size
1306 i = max(0, actual / s)
1306 i = max(0, actual / s)
1307 di = actual - (i * s)
1307 di = actual - (i * s)
1308 if self._inline:
1308 if self._inline:
1309 databytes = 0
1309 databytes = 0
1310 for r in xrange(self.count()):
1310 for r in xrange(self.count()):
1311 databytes += max(0, self.length(r))
1311 databytes += max(0, self.length(r))
1312 dd = 0
1312 dd = 0
1313 di = actual - self.count() * s - databytes
1313 di = actual - self.count() * s - databytes
1314 except IOError, inst:
1314 except IOError, inst:
1315 if inst.errno != errno.ENOENT:
1315 if inst.errno != errno.ENOENT:
1316 raise
1316 raise
1317 di = 0
1317 di = 0
1318
1318
1319 return (dd, di)
1319 return (dd, di)
@@ -1,120 +1,132 b''
1 hg convert [OPTION]... SOURCE [DEST [MAPFILE]]
1 hg convert [OPTION]... SOURCE [DEST [MAPFILE]]
2
2
3 Convert a foreign SCM repository to a Mercurial one.
3 Convert a foreign SCM repository to a Mercurial one.
4
4
5 Accepted source formats:
5 Accepted source formats:
6 - Mercurial
6 - Mercurial
7 - CVS
7 - CVS
8 - Darcs
8 - Darcs
9 - git
9 - git
10 - Subversion
10 - Subversion
11 - GNU Arch
11 - GNU Arch
12
12
13 Accepted destination formats:
13 Accepted destination formats:
14 - Mercurial
14 - Mercurial
15 - Subversion (history on branches is not preserved)
15 - Subversion (history on branches is not preserved)
16
16
17 If no revision is given, all revisions will be converted. Otherwise,
17 If no revision is given, all revisions will be converted. Otherwise,
18 convert will only import up to the named revision (given in a format
18 convert will only import up to the named revision (given in a format
19 understood by the source).
19 understood by the source).
20
20
21 If no destination directory name is specified, it defaults to the
21 If no destination directory name is specified, it defaults to the
22 basename of the source with '-hg' appended. If the destination
22 basename of the source with '-hg' appended. If the destination
23 repository doesn't exist, it will be created.
23 repository doesn't exist, it will be created.
24
24
25 If <MAPFILE> isn't given, it will be put in a default location
25 If <MAPFILE> isn't given, it will be put in a default location
26 (<dest>/.hg/shamap by default). The <MAPFILE> is a simple text
26 (<dest>/.hg/shamap by default). The <MAPFILE> is a simple text
27 file that maps each source commit ID to the destination ID for
27 file that maps each source commit ID to the destination ID for
28 that revision, like so:
28 that revision, like so:
29 <source ID> <destination ID>
29 <source ID> <destination ID>
30
30
31 If the file doesn't exist, it's automatically created. It's updated
31 If the file doesn't exist, it's automatically created. It's updated
32 on each commit copied, so convert-repo can be interrupted and can
32 on each commit copied, so convert-repo can be interrupted and can
33 be run repeatedly to copy new commits.
33 be run repeatedly to copy new commits.
34
34
35 The [username mapping] file is a simple text file that maps each source
35 The [username mapping] file is a simple text file that maps each source
36 commit author to a destination commit author. It is handy for source SCMs
36 commit author to a destination commit author. It is handy for source SCMs
37 that use unix logins to identify authors (eg: CVS). One line per author
37 that use unix logins to identify authors (eg: CVS). One line per author
38 mapping and the line format is:
38 mapping and the line format is:
39 srcauthor=whatever string you want
39 srcauthor=whatever string you want
40
40
41 The filemap is a file that allows filtering and remapping of files
41 The filemap is a file that allows filtering and remapping of files
42 and directories. Comment lines start with '#'. Each line can
42 and directories. Comment lines start with '#'. Each line can
43 contain one of the following directives:
43 contain one of the following directives:
44
44
45 include path/to/file
45 include path/to/file
46
46
47 exclude path/to/file
47 exclude path/to/file
48
48
49 rename from/file to/file
49 rename from/file to/file
50
50
51 The 'include' directive causes a file, or all files under a
51 The 'include' directive causes a file, or all files under a
52 directory, to be included in the destination repository, and the
52 directory, to be included in the destination repository, and the
53 exclusion of all other files and dirs not explicitely included.
53 exclusion of all other files and dirs not explicitely included.
54 The 'exclude' directive causes files or directories to be omitted.
54 The 'exclude' directive causes files or directories to be omitted.
55 The 'rename' directive renames a file or directory. To rename from a
55 The 'rename' directive renames a file or directory. To rename from a
56 subdirectory into the root of the repository, use '.' as the path to
56 subdirectory into the root of the repository, use '.' as the path to
57 rename to.
57 rename to.
58
58
59 The splicemap is a file that allows insertion of synthetic
60 history, letting you specify the parents of a revision. This is
61 useful if you want to e.g. give a Subversion merge two parents, or
62 graft two disconnected series of history together. Each entry
63 contains a key, followed by a space, followed by one or two
64 values, separated by spaces. The key is the revision ID in the
65 source revision control system whose parents should be modified
66 (same format as a key in .hg/shamap). The values are the revision
67 IDs (in either the source or destination revision control system)
68 that should be used as the new parents for that node.
69
59 Back end options:
70 Back end options:
60
71
61 --config convert.hg.clonebranches=False (boolean)
72 --config convert.hg.clonebranches=False (boolean)
62 hg target: XXX not documented
73 hg target: XXX not documented
63 --config convert.hg.saverev=True (boolean)
74 --config convert.hg.saverev=True (boolean)
64 hg source: allow target to preserve source revision ID
75 hg source: allow target to preserve source revision ID
65 --config convert.hg.tagsbranch=default (branch name)
76 --config convert.hg.tagsbranch=default (branch name)
66 hg target: XXX not documented
77 hg target: XXX not documented
67 --config convert.hg.usebranchnames=True (boolean)
78 --config convert.hg.usebranchnames=True (boolean)
68 hg target: preserve branch names
79 hg target: preserve branch names
69
80
70 --config convert.svn.branches=branches (directory name)
81 --config convert.svn.branches=branches (directory name)
71 svn source: specify the directory containing branches
82 svn source: specify the directory containing branches
72 --config convert.svn.tags=tags (directory name)
83 --config convert.svn.tags=tags (directory name)
73 svn source: specify the directory containing tags
84 svn source: specify the directory containing tags
74 --config convert.svn.trunk=trunk (directory name)
85 --config convert.svn.trunk=trunk (directory name)
75 svn source: specify the name of the trunk branch
86 svn source: specify the name of the trunk branch
76
87
77 options:
88 options:
78
89
79 -A --authors username mapping filename
90 -A --authors username mapping filename
80 -d --dest-type destination repository type
91 -d --dest-type destination repository type
81 --filemap remap file names using contents of file
92 --filemap remap file names using contents of file
82 -r --rev import up to target revision REV
93 -r --rev import up to target revision REV
83 -s --source-type source repository type
94 -s --source-type source repository type
95 --splicemap splice synthesized history into place
84 --datesort try to sort changesets by date
96 --datesort try to sort changesets by date
85
97
86 use "hg -v help convert" to show global options
98 use "hg -v help convert" to show global options
87 adding a
99 adding a
88 assuming destination a-hg
100 assuming destination a-hg
89 initializing destination a-hg repository
101 initializing destination a-hg repository
90 scanning source...
102 scanning source...
91 sorting...
103 sorting...
92 converting...
104 converting...
93 4 a
105 4 a
94 3 b
106 3 b
95 2 c
107 2 c
96 1 d
108 1 d
97 0 e
109 0 e
98 pulling from ../a
110 pulling from ../a
99 searching for changes
111 searching for changes
100 no changes found
112 no changes found
101 % should fail
113 % should fail
102 initializing destination bogusfile repository
114 initializing destination bogusfile repository
103 abort: cannot create new bundle repository
115 abort: cannot create new bundle repository
104 % should fail
116 % should fail
105 abort: Permission denied: bogusdir
117 abort: Permission denied: bogusdir
106 % should succeed
118 % should succeed
107 initializing destination bogusdir repository
119 initializing destination bogusdir repository
108 scanning source...
120 scanning source...
109 sorting...
121 sorting...
110 converting...
122 converting...
111 4 a
123 4 a
112 3 b
124 3 b
113 2 c
125 2 c
114 1 d
126 1 d
115 0 e
127 0 e
116 % test pre and post conversion actions
128 % test pre and post conversion actions
117 run hg source pre-conversion action
129 run hg source pre-conversion action
118 run hg sink pre-conversion action
130 run hg sink pre-conversion action
119 run hg sink post-conversion action
131 run hg sink post-conversion action
120 run hg source post-conversion action
132 run hg source post-conversion action
General Comments 0
You need to be logged in to leave comments. Login now