##// END OF EJS Templates
merge with backout
Benoit Boissinot -
r3593:9bfb3c04 merge default
parent child Browse files
Show More
@@ -1,3550 +1,3551 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup")
15 demandload(globals(), "archival cStringIO changegroup")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted = repo.status()[:4]
24 modified, added, removed, deleted = repo.status()[:4]
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def relpath(repo, args):
28 def relpath(repo, args):
29 cwd = repo.getcwd()
29 cwd = repo.getcwd()
30 if cwd:
30 if cwd:
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 return args
32 return args
33
33
34 def logmessage(opts):
34 def logmessage(opts):
35 """ get the log message according to -m and -l option """
35 """ get the log message according to -m and -l option """
36 message = opts['message']
36 message = opts['message']
37 logfile = opts['logfile']
37 logfile = opts['logfile']
38
38
39 if message and logfile:
39 if message and logfile:
40 raise util.Abort(_('options --message and --logfile are mutually '
40 raise util.Abort(_('options --message and --logfile are mutually '
41 'exclusive'))
41 'exclusive'))
42 if not message and logfile:
42 if not message and logfile:
43 try:
43 try:
44 if logfile == '-':
44 if logfile == '-':
45 message = sys.stdin.read()
45 message = sys.stdin.read()
46 else:
46 else:
47 message = open(logfile).read()
47 message = open(logfile).read()
48 except IOError, inst:
48 except IOError, inst:
49 raise util.Abort(_("can't read commit message '%s': %s") %
49 raise util.Abort(_("can't read commit message '%s': %s") %
50 (logfile, inst.strerror))
50 (logfile, inst.strerror))
51 return message
51 return message
52
52
53 def walkchangerevs(ui, repo, pats, change, opts):
53 def walkchangerevs(ui, repo, pats, change, opts):
54 '''Iterate over files and the revs they changed in.
54 '''Iterate over files and the revs they changed in.
55
55
56 Callers most commonly need to iterate backwards over the history
56 Callers most commonly need to iterate backwards over the history
57 it is interested in. Doing so has awful (quadratic-looking)
57 it is interested in. Doing so has awful (quadratic-looking)
58 performance, so we use iterators in a "windowed" way.
58 performance, so we use iterators in a "windowed" way.
59
59
60 We walk a window of revisions in the desired order. Within the
60 We walk a window of revisions in the desired order. Within the
61 window, we first walk forwards to gather data, then in the desired
61 window, we first walk forwards to gather data, then in the desired
62 order (usually backwards) to display it.
62 order (usually backwards) to display it.
63
63
64 This function returns an (iterator, matchfn) tuple. The iterator
64 This function returns an (iterator, matchfn) tuple. The iterator
65 yields 3-tuples. They will be of one of the following forms:
65 yields 3-tuples. They will be of one of the following forms:
66
66
67 "window", incrementing, lastrev: stepping through a window,
67 "window", incrementing, lastrev: stepping through a window,
68 positive if walking forwards through revs, last rev in the
68 positive if walking forwards through revs, last rev in the
69 sequence iterated over - use to reset state for the current window
69 sequence iterated over - use to reset state for the current window
70
70
71 "add", rev, fns: out-of-order traversal of the given file names
71 "add", rev, fns: out-of-order traversal of the given file names
72 fns, which changed during revision rev - use to gather data for
72 fns, which changed during revision rev - use to gather data for
73 possible display
73 possible display
74
74
75 "iter", rev, None: in-order traversal of the revs earlier iterated
75 "iter", rev, None: in-order traversal of the revs earlier iterated
76 over with "add" - use to display data'''
76 over with "add" - use to display data'''
77
77
78 def increasing_windows(start, end, windowsize=8, sizelimit=512):
78 def increasing_windows(start, end, windowsize=8, sizelimit=512):
79 if start < end:
79 if start < end:
80 while start < end:
80 while start < end:
81 yield start, min(windowsize, end-start)
81 yield start, min(windowsize, end-start)
82 start += windowsize
82 start += windowsize
83 if windowsize < sizelimit:
83 if windowsize < sizelimit:
84 windowsize *= 2
84 windowsize *= 2
85 else:
85 else:
86 while start > end:
86 while start > end:
87 yield start, min(windowsize, start-end-1)
87 yield start, min(windowsize, start-end-1)
88 start -= windowsize
88 start -= windowsize
89 if windowsize < sizelimit:
89 if windowsize < sizelimit:
90 windowsize *= 2
90 windowsize *= 2
91
91
92 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
92 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
93 follow = opts.get('follow') or opts.get('follow_first')
93 follow = opts.get('follow') or opts.get('follow_first')
94
94
95 if repo.changelog.count() == 0:
95 if repo.changelog.count() == 0:
96 return [], matchfn
96 return [], matchfn
97
97
98 if follow:
98 if follow:
99 defrange = '%s:0' % repo.changectx().rev()
99 defrange = '%s:0' % repo.changectx().rev()
100 else:
100 else:
101 defrange = 'tip:0'
101 defrange = 'tip:0'
102 revs = cmdutil.revrange(ui, repo, opts['rev'] or [defrange])
102 revs = cmdutil.revrange(ui, repo, opts['rev'] or [defrange])
103 wanted = {}
103 wanted = {}
104 slowpath = anypats
104 slowpath = anypats
105 fncache = {}
105 fncache = {}
106
106
107 if not slowpath and not files:
107 if not slowpath and not files:
108 # No files, no patterns. Display all revs.
108 # No files, no patterns. Display all revs.
109 wanted = dict.fromkeys(revs)
109 wanted = dict.fromkeys(revs)
110 copies = []
110 copies = []
111 if not slowpath:
111 if not slowpath:
112 # Only files, no patterns. Check the history of each file.
112 # Only files, no patterns. Check the history of each file.
113 def filerevgen(filelog, node):
113 def filerevgen(filelog, node):
114 cl_count = repo.changelog.count()
114 cl_count = repo.changelog.count()
115 if node is None:
115 if node is None:
116 last = filelog.count() - 1
116 last = filelog.count() - 1
117 else:
117 else:
118 last = filelog.rev(node)
118 last = filelog.rev(node)
119 for i, window in increasing_windows(last, nullrev):
119 for i, window in increasing_windows(last, nullrev):
120 revs = []
120 revs = []
121 for j in xrange(i - window, i + 1):
121 for j in xrange(i - window, i + 1):
122 n = filelog.node(j)
122 n = filelog.node(j)
123 revs.append((filelog.linkrev(n),
123 revs.append((filelog.linkrev(n),
124 follow and filelog.renamed(n)))
124 follow and filelog.renamed(n)))
125 revs.reverse()
125 revs.reverse()
126 for rev in revs:
126 for rev in revs:
127 # only yield rev for which we have the changelog, it can
127 # only yield rev for which we have the changelog, it can
128 # happen while doing "hg log" during a pull or commit
128 # happen while doing "hg log" during a pull or commit
129 if rev[0] < cl_count:
129 if rev[0] < cl_count:
130 yield rev
130 yield rev
131 def iterfiles():
131 def iterfiles():
132 for filename in files:
132 for filename in files:
133 yield filename, None
133 yield filename, None
134 for filename_node in copies:
134 for filename_node in copies:
135 yield filename_node
135 yield filename_node
136 minrev, maxrev = min(revs), max(revs)
136 minrev, maxrev = min(revs), max(revs)
137 for file_, node in iterfiles():
137 for file_, node in iterfiles():
138 filelog = repo.file(file_)
138 filelog = repo.file(file_)
139 # A zero count may be a directory or deleted file, so
139 # A zero count may be a directory or deleted file, so
140 # try to find matching entries on the slow path.
140 # try to find matching entries on the slow path.
141 if filelog.count() == 0:
141 if filelog.count() == 0:
142 slowpath = True
142 slowpath = True
143 break
143 break
144 for rev, copied in filerevgen(filelog, node):
144 for rev, copied in filerevgen(filelog, node):
145 if rev <= maxrev:
145 if rev <= maxrev:
146 if rev < minrev:
146 if rev < minrev:
147 break
147 break
148 fncache.setdefault(rev, [])
148 fncache.setdefault(rev, [])
149 fncache[rev].append(file_)
149 fncache[rev].append(file_)
150 wanted[rev] = 1
150 wanted[rev] = 1
151 if follow and copied:
151 if follow and copied:
152 copies.append(copied)
152 copies.append(copied)
153 if slowpath:
153 if slowpath:
154 if follow:
154 if follow:
155 raise util.Abort(_('can only follow copies/renames for explicit '
155 raise util.Abort(_('can only follow copies/renames for explicit '
156 'file names'))
156 'file names'))
157
157
158 # The slow path checks files modified in every changeset.
158 # The slow path checks files modified in every changeset.
159 def changerevgen():
159 def changerevgen():
160 for i, window in increasing_windows(repo.changelog.count()-1,
160 for i, window in increasing_windows(repo.changelog.count()-1,
161 nullrev):
161 nullrev):
162 for j in xrange(i - window, i + 1):
162 for j in xrange(i - window, i + 1):
163 yield j, change(j)[3]
163 yield j, change(j)[3]
164
164
165 for rev, changefiles in changerevgen():
165 for rev, changefiles in changerevgen():
166 matches = filter(matchfn, changefiles)
166 matches = filter(matchfn, changefiles)
167 if matches:
167 if matches:
168 fncache[rev] = matches
168 fncache[rev] = matches
169 wanted[rev] = 1
169 wanted[rev] = 1
170
170
171 class followfilter:
171 class followfilter:
172 def __init__(self, onlyfirst=False):
172 def __init__(self, onlyfirst=False):
173 self.startrev = nullrev
173 self.startrev = nullrev
174 self.roots = []
174 self.roots = []
175 self.onlyfirst = onlyfirst
175 self.onlyfirst = onlyfirst
176
176
177 def match(self, rev):
177 def match(self, rev):
178 def realparents(rev):
178 def realparents(rev):
179 if self.onlyfirst:
179 if self.onlyfirst:
180 return repo.changelog.parentrevs(rev)[0:1]
180 return repo.changelog.parentrevs(rev)[0:1]
181 else:
181 else:
182 return filter(lambda x: x != nullrev,
182 return filter(lambda x: x != nullrev,
183 repo.changelog.parentrevs(rev))
183 repo.changelog.parentrevs(rev))
184
184
185 if self.startrev == nullrev:
185 if self.startrev == nullrev:
186 self.startrev = rev
186 self.startrev = rev
187 return True
187 return True
188
188
189 if rev > self.startrev:
189 if rev > self.startrev:
190 # forward: all descendants
190 # forward: all descendants
191 if not self.roots:
191 if not self.roots:
192 self.roots.append(self.startrev)
192 self.roots.append(self.startrev)
193 for parent in realparents(rev):
193 for parent in realparents(rev):
194 if parent in self.roots:
194 if parent in self.roots:
195 self.roots.append(rev)
195 self.roots.append(rev)
196 return True
196 return True
197 else:
197 else:
198 # backwards: all parents
198 # backwards: all parents
199 if not self.roots:
199 if not self.roots:
200 self.roots.extend(realparents(self.startrev))
200 self.roots.extend(realparents(self.startrev))
201 if rev in self.roots:
201 if rev in self.roots:
202 self.roots.remove(rev)
202 self.roots.remove(rev)
203 self.roots.extend(realparents(rev))
203 self.roots.extend(realparents(rev))
204 return True
204 return True
205
205
206 return False
206 return False
207
207
208 # it might be worthwhile to do this in the iterator if the rev range
208 # it might be worthwhile to do this in the iterator if the rev range
209 # is descending and the prune args are all within that range
209 # is descending and the prune args are all within that range
210 for rev in opts.get('prune', ()):
210 for rev in opts.get('prune', ()):
211 rev = repo.changelog.rev(repo.lookup(rev))
211 rev = repo.changelog.rev(repo.lookup(rev))
212 ff = followfilter()
212 ff = followfilter()
213 stop = min(revs[0], revs[-1])
213 stop = min(revs[0], revs[-1])
214 for x in xrange(rev, stop-1, -1):
214 for x in xrange(rev, stop-1, -1):
215 if ff.match(x) and x in wanted:
215 if ff.match(x) and x in wanted:
216 del wanted[x]
216 del wanted[x]
217
217
218 def iterate():
218 def iterate():
219 if follow and not files:
219 if follow and not files:
220 ff = followfilter(onlyfirst=opts.get('follow_first'))
220 ff = followfilter(onlyfirst=opts.get('follow_first'))
221 def want(rev):
221 def want(rev):
222 if ff.match(rev) and rev in wanted:
222 if ff.match(rev) and rev in wanted:
223 return True
223 return True
224 return False
224 return False
225 else:
225 else:
226 def want(rev):
226 def want(rev):
227 return rev in wanted
227 return rev in wanted
228
228
229 for i, window in increasing_windows(0, len(revs)):
229 for i, window in increasing_windows(0, len(revs)):
230 yield 'window', revs[0] < revs[-1], revs[-1]
230 yield 'window', revs[0] < revs[-1], revs[-1]
231 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
231 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
232 srevs = list(nrevs)
232 srevs = list(nrevs)
233 srevs.sort()
233 srevs.sort()
234 for rev in srevs:
234 for rev in srevs:
235 fns = fncache.get(rev)
235 fns = fncache.get(rev)
236 if not fns:
236 if not fns:
237 def fns_generator():
237 def fns_generator():
238 for f in change(rev)[3]:
238 for f in change(rev)[3]:
239 if matchfn(f):
239 if matchfn(f):
240 yield f
240 yield f
241 fns = fns_generator()
241 fns = fns_generator()
242 yield 'add', rev, fns
242 yield 'add', rev, fns
243 for rev in nrevs:
243 for rev in nrevs:
244 yield 'iter', rev, None
244 yield 'iter', rev, None
245 return iterate(), matchfn
245 return iterate(), matchfn
246
246
247 def write_bundle(cg, filename=None, compress=True):
247 def write_bundle(cg, filename=None, compress=True):
248 """Write a bundle file and return its filename.
248 """Write a bundle file and return its filename.
249
249
250 Existing files will not be overwritten.
250 Existing files will not be overwritten.
251 If no filename is specified, a temporary file is created.
251 If no filename is specified, a temporary file is created.
252 bz2 compression can be turned off.
252 bz2 compression can be turned off.
253 The bundle file will be deleted in case of errors.
253 The bundle file will be deleted in case of errors.
254 """
254 """
255 class nocompress(object):
255 class nocompress(object):
256 def compress(self, x):
256 def compress(self, x):
257 return x
257 return x
258 def flush(self):
258 def flush(self):
259 return ""
259 return ""
260
260
261 fh = None
261 fh = None
262 cleanup = None
262 cleanup = None
263 try:
263 try:
264 if filename:
264 if filename:
265 if os.path.exists(filename):
265 if os.path.exists(filename):
266 raise util.Abort(_("file '%s' already exists") % filename)
266 raise util.Abort(_("file '%s' already exists") % filename)
267 fh = open(filename, "wb")
267 fh = open(filename, "wb")
268 else:
268 else:
269 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
269 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
270 fh = os.fdopen(fd, "wb")
270 fh = os.fdopen(fd, "wb")
271 cleanup = filename
271 cleanup = filename
272
272
273 if compress:
273 if compress:
274 fh.write("HG10")
274 fh.write("HG10")
275 z = bz2.BZ2Compressor(9)
275 z = bz2.BZ2Compressor(9)
276 else:
276 else:
277 fh.write("HG10UN")
277 fh.write("HG10UN")
278 z = nocompress()
278 z = nocompress()
279 # parse the changegroup data, otherwise we will block
279 # parse the changegroup data, otherwise we will block
280 # in case of sshrepo because we don't know the end of the stream
280 # in case of sshrepo because we don't know the end of the stream
281
281
282 # an empty chunkiter is the end of the changegroup
282 # an empty chunkiter is the end of the changegroup
283 empty = False
283 empty = False
284 while not empty:
284 while not empty:
285 empty = True
285 empty = True
286 for chunk in changegroup.chunkiter(cg):
286 for chunk in changegroup.chunkiter(cg):
287 empty = False
287 empty = False
288 fh.write(z.compress(changegroup.genchunk(chunk)))
288 fh.write(z.compress(changegroup.genchunk(chunk)))
289 fh.write(z.compress(changegroup.closechunk()))
289 fh.write(z.compress(changegroup.closechunk()))
290 fh.write(z.flush())
290 fh.write(z.flush())
291 cleanup = None
291 cleanup = None
292 return filename
292 return filename
293 finally:
293 finally:
294 if fh is not None:
294 if fh is not None:
295 fh.close()
295 fh.close()
296 if cleanup is not None:
296 if cleanup is not None:
297 os.unlink(cleanup)
297 os.unlink(cleanup)
298
298
299 class changeset_printer(object):
299 class changeset_printer(object):
300 '''show changeset information when templating not requested.'''
300 '''show changeset information when templating not requested.'''
301
301
302 def __init__(self, ui, repo):
302 def __init__(self, ui, repo):
303 self.ui = ui
303 self.ui = ui
304 self.repo = repo
304 self.repo = repo
305
305
306 def show(self, rev=0, changenode=None, brinfo=None, copies=None):
306 def show(self, rev=0, changenode=None, brinfo=None, copies=None):
307 '''show a single changeset or file revision'''
307 '''show a single changeset or file revision'''
308 log = self.repo.changelog
308 log = self.repo.changelog
309 if changenode is None:
309 if changenode is None:
310 changenode = log.node(rev)
310 changenode = log.node(rev)
311 elif not rev:
311 elif not rev:
312 rev = log.rev(changenode)
312 rev = log.rev(changenode)
313
313
314 if self.ui.quiet:
314 if self.ui.quiet:
315 self.ui.write("%d:%s\n" % (rev, short(changenode)))
315 self.ui.write("%d:%s\n" % (rev, short(changenode)))
316 return
316 return
317
317
318 changes = log.read(changenode)
318 changes = log.read(changenode)
319 date = util.datestr(changes[2])
319 date = util.datestr(changes[2])
320 extra = changes[5]
320 extra = changes[5]
321 branch = extra.get("branch")
321 branch = extra.get("branch")
322
322
323 hexfunc = self.ui.debugflag and hex or short
323 hexfunc = self.ui.debugflag and hex or short
324
324
325 parents = log.parentrevs(rev)
325 parents = log.parentrevs(rev)
326 if not self.ui.debugflag:
326 if not self.ui.debugflag:
327 parents = [p for p in parents if p != nullrev]
327 parents = [p for p in parents if p != nullrev]
328 if len(parents) == 1 and parents[0] == rev-1:
328 if len(parents) == 1 and parents[0] == rev-1:
329 parents = []
329 parents = []
330 parents = [(p, hexfunc(log.node(p))) for p in parents]
330 parents = [(p, hexfunc(log.node(p))) for p in parents]
331
331
332
332
333 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
333 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
334
334
335 if branch:
335 if branch:
336 self.ui.write(_("branch: %s\n") % branch)
336 self.ui.write(_("branch: %s\n") % branch)
337 for tag in self.repo.nodetags(changenode):
337 for tag in self.repo.nodetags(changenode):
338 self.ui.write(_("tag: %s\n") % tag)
338 self.ui.write(_("tag: %s\n") % tag)
339 for parent in parents:
339 for parent in parents:
340 self.ui.write(_("parent: %d:%s\n") % parent)
340 self.ui.write(_("parent: %d:%s\n") % parent)
341
341
342 if brinfo and changenode in brinfo:
342 if brinfo and changenode in brinfo:
343 br = brinfo[changenode]
343 br = brinfo[changenode]
344 self.ui.write(_("branch: %s\n") % " ".join(br))
344 self.ui.write(_("branch: %s\n") % " ".join(br))
345
345
346 if self.ui.debugflag:
346 if self.ui.debugflag:
347 self.ui.write(_("manifest: %d:%s\n") %
347 self.ui.write(_("manifest: %d:%s\n") %
348 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
348 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
349 self.ui.write(_("user: %s\n") % changes[1])
349 self.ui.write(_("user: %s\n") % changes[1])
350 self.ui.write(_("date: %s\n") % date)
350 self.ui.write(_("date: %s\n") % date)
351
351
352 if self.ui.debugflag:
352 if self.ui.debugflag:
353 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
353 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
354 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
354 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
355 files):
355 files):
356 if value:
356 if value:
357 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
357 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
358 elif changes[3] and self.ui.verbose:
358 elif changes[3] and self.ui.verbose:
359 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
359 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
360 if copies and self.ui.verbose:
360 if copies and self.ui.verbose:
361 copies = ['%s (%s)' % c for c in copies]
361 copies = ['%s (%s)' % c for c in copies]
362 self.ui.write(_("copies: %s\n") % ' '.join(copies))
362 self.ui.write(_("copies: %s\n") % ' '.join(copies))
363
363
364 if extra and self.ui.debugflag:
364 if extra and self.ui.debugflag:
365 extraitems = extra.items()
365 extraitems = extra.items()
366 extraitems.sort()
366 extraitems.sort()
367 for key, value in extraitems:
367 for key, value in extraitems:
368 self.ui.write(_("extra: %s=%s\n")
368 self.ui.write(_("extra: %s=%s\n")
369 % (key, value.encode('string_escape')))
369 % (key, value.encode('string_escape')))
370
370
371 description = changes[4].strip()
371 description = changes[4].strip()
372 if description:
372 if description:
373 if self.ui.verbose:
373 if self.ui.verbose:
374 self.ui.write(_("description:\n"))
374 self.ui.write(_("description:\n"))
375 self.ui.write(description)
375 self.ui.write(description)
376 self.ui.write("\n\n")
376 self.ui.write("\n\n")
377 else:
377 else:
378 self.ui.write(_("summary: %s\n") %
378 self.ui.write(_("summary: %s\n") %
379 description.splitlines()[0])
379 description.splitlines()[0])
380 self.ui.write("\n")
380 self.ui.write("\n")
381
381
382 def show_changeset(ui, repo, opts):
382 def show_changeset(ui, repo, opts):
383 """show one changeset using template or regular display.
383 """show one changeset using template or regular display.
384
384
385 Display format will be the first non-empty hit of:
385 Display format will be the first non-empty hit of:
386 1. option 'template'
386 1. option 'template'
387 2. option 'style'
387 2. option 'style'
388 3. [ui] setting 'logtemplate'
388 3. [ui] setting 'logtemplate'
389 4. [ui] setting 'style'
389 4. [ui] setting 'style'
390 If all of these values are either the unset or the empty string,
390 If all of these values are either the unset or the empty string,
391 regular display via changeset_printer() is done.
391 regular display via changeset_printer() is done.
392 """
392 """
393 # options
393 # options
394 tmpl = opts.get('template')
394 tmpl = opts.get('template')
395 mapfile = None
395 mapfile = None
396 if tmpl:
396 if tmpl:
397 tmpl = templater.parsestring(tmpl, quoted=False)
397 tmpl = templater.parsestring(tmpl, quoted=False)
398 else:
398 else:
399 mapfile = opts.get('style')
399 mapfile = opts.get('style')
400 # ui settings
400 # ui settings
401 if not mapfile:
401 if not mapfile:
402 tmpl = ui.config('ui', 'logtemplate')
402 tmpl = ui.config('ui', 'logtemplate')
403 if tmpl:
403 if tmpl:
404 tmpl = templater.parsestring(tmpl)
404 tmpl = templater.parsestring(tmpl)
405 else:
405 else:
406 mapfile = ui.config('ui', 'style')
406 mapfile = ui.config('ui', 'style')
407
407
408 if tmpl or mapfile:
408 if tmpl or mapfile:
409 if mapfile:
409 if mapfile:
410 if not os.path.split(mapfile)[0]:
410 if not os.path.split(mapfile)[0]:
411 mapname = (templater.templatepath('map-cmdline.' + mapfile)
411 mapname = (templater.templatepath('map-cmdline.' + mapfile)
412 or templater.templatepath(mapfile))
412 or templater.templatepath(mapfile))
413 if mapname: mapfile = mapname
413 if mapname: mapfile = mapname
414 try:
414 try:
415 t = templater.changeset_templater(ui, repo, mapfile)
415 t = templater.changeset_templater(ui, repo, mapfile)
416 except SyntaxError, inst:
416 except SyntaxError, inst:
417 raise util.Abort(inst.args[0])
417 raise util.Abort(inst.args[0])
418 if tmpl: t.use_template(tmpl)
418 if tmpl: t.use_template(tmpl)
419 return t
419 return t
420 return changeset_printer(ui, repo)
420 return changeset_printer(ui, repo)
421
421
422 def setremoteconfig(ui, opts):
422 def setremoteconfig(ui, opts):
423 "copy remote options to ui tree"
423 "copy remote options to ui tree"
424 if opts.get('ssh'):
424 if opts.get('ssh'):
425 ui.setconfig("ui", "ssh", opts['ssh'])
425 ui.setconfig("ui", "ssh", opts['ssh'])
426 if opts.get('remotecmd'):
426 if opts.get('remotecmd'):
427 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
427 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
428
428
429 def show_version(ui):
429 def show_version(ui):
430 """output version and copyright information"""
430 """output version and copyright information"""
431 ui.write(_("Mercurial Distributed SCM (version %s)\n")
431 ui.write(_("Mercurial Distributed SCM (version %s)\n")
432 % version.get_version())
432 % version.get_version())
433 ui.status(_(
433 ui.status(_(
434 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
434 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
435 "This is free software; see the source for copying conditions. "
435 "This is free software; see the source for copying conditions. "
436 "There is NO\nwarranty; "
436 "There is NO\nwarranty; "
437 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
437 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
438 ))
438 ))
439
439
440 def help_(ui, name=None, with_version=False):
440 def help_(ui, name=None, with_version=False):
441 """show help for a command, extension, or list of commands
441 """show help for a command, extension, or list of commands
442
442
443 With no arguments, print a list of commands and short help.
443 With no arguments, print a list of commands and short help.
444
444
445 Given a command name, print help for that command.
445 Given a command name, print help for that command.
446
446
447 Given an extension name, print help for that extension, and the
447 Given an extension name, print help for that extension, and the
448 commands it provides."""
448 commands it provides."""
449 option_lists = []
449 option_lists = []
450
450
451 def helpcmd(name):
451 def helpcmd(name):
452 if with_version:
452 if with_version:
453 show_version(ui)
453 show_version(ui)
454 ui.write('\n')
454 ui.write('\n')
455 aliases, i = findcmd(ui, name)
455 aliases, i = findcmd(ui, name)
456 # synopsis
456 # synopsis
457 ui.write("%s\n\n" % i[2])
457 ui.write("%s\n\n" % i[2])
458
458
459 # description
459 # description
460 doc = i[0].__doc__
460 doc = i[0].__doc__
461 if not doc:
461 if not doc:
462 doc = _("(No help text available)")
462 doc = _("(No help text available)")
463 if ui.quiet:
463 if ui.quiet:
464 doc = doc.splitlines(0)[0]
464 doc = doc.splitlines(0)[0]
465 ui.write("%s\n" % doc.rstrip())
465 ui.write("%s\n" % doc.rstrip())
466
466
467 if not ui.quiet:
467 if not ui.quiet:
468 # aliases
468 # aliases
469 if len(aliases) > 1:
469 if len(aliases) > 1:
470 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
470 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
471
471
472 # options
472 # options
473 if i[1]:
473 if i[1]:
474 option_lists.append(("options", i[1]))
474 option_lists.append(("options", i[1]))
475
475
476 def helplist(select=None):
476 def helplist(select=None):
477 h = {}
477 h = {}
478 cmds = {}
478 cmds = {}
479 for c, e in table.items():
479 for c, e in table.items():
480 f = c.split("|", 1)[0]
480 f = c.split("|", 1)[0]
481 if select and not select(f):
481 if select and not select(f):
482 continue
482 continue
483 if name == "shortlist" and not f.startswith("^"):
483 if name == "shortlist" and not f.startswith("^"):
484 continue
484 continue
485 f = f.lstrip("^")
485 f = f.lstrip("^")
486 if not ui.debugflag and f.startswith("debug"):
486 if not ui.debugflag and f.startswith("debug"):
487 continue
487 continue
488 doc = e[0].__doc__
488 doc = e[0].__doc__
489 if not doc:
489 if not doc:
490 doc = _("(No help text available)")
490 doc = _("(No help text available)")
491 h[f] = doc.splitlines(0)[0].rstrip()
491 h[f] = doc.splitlines(0)[0].rstrip()
492 cmds[f] = c.lstrip("^")
492 cmds[f] = c.lstrip("^")
493
493
494 fns = h.keys()
494 fns = h.keys()
495 fns.sort()
495 fns.sort()
496 m = max(map(len, fns))
496 m = max(map(len, fns))
497 for f in fns:
497 for f in fns:
498 if ui.verbose:
498 if ui.verbose:
499 commands = cmds[f].replace("|",", ")
499 commands = cmds[f].replace("|",", ")
500 ui.write(" %s:\n %s\n"%(commands, h[f]))
500 ui.write(" %s:\n %s\n"%(commands, h[f]))
501 else:
501 else:
502 ui.write(' %-*s %s\n' % (m, f, h[f]))
502 ui.write(' %-*s %s\n' % (m, f, h[f]))
503
503
504 def helpext(name):
504 def helpext(name):
505 try:
505 try:
506 mod = findext(name)
506 mod = findext(name)
507 except KeyError:
507 except KeyError:
508 raise UnknownCommand(name)
508 raise UnknownCommand(name)
509
509
510 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
510 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
511 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
511 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
512 for d in doc[1:]:
512 for d in doc[1:]:
513 ui.write(d, '\n')
513 ui.write(d, '\n')
514
514
515 ui.status('\n')
515 ui.status('\n')
516 if ui.verbose:
516 if ui.verbose:
517 ui.status(_('list of commands:\n\n'))
517 ui.status(_('list of commands:\n\n'))
518 else:
518 else:
519 ui.status(_('list of commands (use "hg help -v %s" '
519 ui.status(_('list of commands (use "hg help -v %s" '
520 'to show aliases and global options):\n\n') % name)
520 'to show aliases and global options):\n\n') % name)
521
521
522 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
522 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
523 helplist(modcmds.has_key)
523 helplist(modcmds.has_key)
524
524
525 if name and name != 'shortlist':
525 if name and name != 'shortlist':
526 try:
526 try:
527 helpcmd(name)
527 helpcmd(name)
528 except UnknownCommand:
528 except UnknownCommand:
529 helpext(name)
529 helpext(name)
530
530
531 else:
531 else:
532 # program name
532 # program name
533 if ui.verbose or with_version:
533 if ui.verbose or with_version:
534 show_version(ui)
534 show_version(ui)
535 else:
535 else:
536 ui.status(_("Mercurial Distributed SCM\n"))
536 ui.status(_("Mercurial Distributed SCM\n"))
537 ui.status('\n')
537 ui.status('\n')
538
538
539 # list of commands
539 # list of commands
540 if name == "shortlist":
540 if name == "shortlist":
541 ui.status(_('basic commands (use "hg help" '
541 ui.status(_('basic commands (use "hg help" '
542 'for the full list or option "-v" for details):\n\n'))
542 'for the full list or option "-v" for details):\n\n'))
543 elif ui.verbose:
543 elif ui.verbose:
544 ui.status(_('list of commands:\n\n'))
544 ui.status(_('list of commands:\n\n'))
545 else:
545 else:
546 ui.status(_('list of commands (use "hg help -v" '
546 ui.status(_('list of commands (use "hg help -v" '
547 'to show aliases and global options):\n\n'))
547 'to show aliases and global options):\n\n'))
548
548
549 helplist()
549 helplist()
550
550
551 # global options
551 # global options
552 if ui.verbose:
552 if ui.verbose:
553 option_lists.append(("global options", globalopts))
553 option_lists.append(("global options", globalopts))
554
554
555 # list all option lists
555 # list all option lists
556 opt_output = []
556 opt_output = []
557 for title, options in option_lists:
557 for title, options in option_lists:
558 opt_output.append(("\n%s:\n" % title, None))
558 opt_output.append(("\n%s:\n" % title, None))
559 for shortopt, longopt, default, desc in options:
559 for shortopt, longopt, default, desc in options:
560 if "DEPRECATED" in desc and not ui.verbose: continue
560 if "DEPRECATED" in desc and not ui.verbose: continue
561 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
561 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
562 longopt and " --%s" % longopt),
562 longopt and " --%s" % longopt),
563 "%s%s" % (desc,
563 "%s%s" % (desc,
564 default
564 default
565 and _(" (default: %s)") % default
565 and _(" (default: %s)") % default
566 or "")))
566 or "")))
567
567
568 if opt_output:
568 if opt_output:
569 opts_len = max([len(line[0]) for line in opt_output if line[1]])
569 opts_len = max([len(line[0]) for line in opt_output if line[1]])
570 for first, second in opt_output:
570 for first, second in opt_output:
571 if second:
571 if second:
572 ui.write(" %-*s %s\n" % (opts_len, first, second))
572 ui.write(" %-*s %s\n" % (opts_len, first, second))
573 else:
573 else:
574 ui.write("%s\n" % first)
574 ui.write("%s\n" % first)
575
575
576 # Commands start here, listed alphabetically
576 # Commands start here, listed alphabetically
577
577
578 def add(ui, repo, *pats, **opts):
578 def add(ui, repo, *pats, **opts):
579 """add the specified files on the next commit
579 """add the specified files on the next commit
580
580
581 Schedule files to be version controlled and added to the repository.
581 Schedule files to be version controlled and added to the repository.
582
582
583 The files will be added to the repository at the next commit.
583 The files will be added to the repository at the next commit.
584
584
585 If no names are given, add all files in the repository.
585 If no names are given, add all files in the repository.
586 """
586 """
587
587
588 names = []
588 names = []
589 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
589 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
590 if exact:
590 if exact:
591 if ui.verbose:
591 if ui.verbose:
592 ui.status(_('adding %s\n') % rel)
592 ui.status(_('adding %s\n') % rel)
593 names.append(abs)
593 names.append(abs)
594 elif repo.dirstate.state(abs) == '?':
594 elif repo.dirstate.state(abs) == '?':
595 ui.status(_('adding %s\n') % rel)
595 ui.status(_('adding %s\n') % rel)
596 names.append(abs)
596 names.append(abs)
597 if not opts.get('dry_run'):
597 if not opts.get('dry_run'):
598 repo.add(names)
598 repo.add(names)
599
599
600 def addremove(ui, repo, *pats, **opts):
600 def addremove(ui, repo, *pats, **opts):
601 """add all new files, delete all missing files
601 """add all new files, delete all missing files
602
602
603 Add all new files and remove all missing files from the repository.
603 Add all new files and remove all missing files from the repository.
604
604
605 New files are ignored if they match any of the patterns in .hgignore. As
605 New files are ignored if they match any of the patterns in .hgignore. As
606 with add, these changes take effect at the next commit.
606 with add, these changes take effect at the next commit.
607
607
608 Use the -s option to detect renamed files. With a parameter > 0,
608 Use the -s option to detect renamed files. With a parameter > 0,
609 this compares every removed file with every added file and records
609 this compares every removed file with every added file and records
610 those similar enough as renames. This option takes a percentage
610 those similar enough as renames. This option takes a percentage
611 between 0 (disabled) and 100 (files must be identical) as its
611 between 0 (disabled) and 100 (files must be identical) as its
612 parameter. Detecting renamed files this way can be expensive.
612 parameter. Detecting renamed files this way can be expensive.
613 """
613 """
614 sim = float(opts.get('similarity') or 0)
614 sim = float(opts.get('similarity') or 0)
615 if sim < 0 or sim > 100:
615 if sim < 0 or sim > 100:
616 raise util.Abort(_('similarity must be between 0 and 100'))
616 raise util.Abort(_('similarity must be between 0 and 100'))
617 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
617 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
618
618
619 def annotate(ui, repo, *pats, **opts):
619 def annotate(ui, repo, *pats, **opts):
620 """show changeset information per file line
620 """show changeset information per file line
621
621
622 List changes in files, showing the revision id responsible for each line
622 List changes in files, showing the revision id responsible for each line
623
623
624 This command is useful to discover who did a change or when a change took
624 This command is useful to discover who did a change or when a change took
625 place.
625 place.
626
626
627 Without the -a option, annotate will avoid processing files it
627 Without the -a option, annotate will avoid processing files it
628 detects as binary. With -a, annotate will generate an annotation
628 detects as binary. With -a, annotate will generate an annotation
629 anyway, probably with undesirable results.
629 anyway, probably with undesirable results.
630 """
630 """
631 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
631 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
632
632
633 if not pats:
633 if not pats:
634 raise util.Abort(_('at least one file name or pattern required'))
634 raise util.Abort(_('at least one file name or pattern required'))
635
635
636 opmap = [['user', lambda x: ui.shortuser(x.user())],
636 opmap = [['user', lambda x: ui.shortuser(x.user())],
637 ['number', lambda x: str(x.rev())],
637 ['number', lambda x: str(x.rev())],
638 ['changeset', lambda x: short(x.node())],
638 ['changeset', lambda x: short(x.node())],
639 ['date', getdate], ['follow', lambda x: x.path()]]
639 ['date', getdate], ['follow', lambda x: x.path()]]
640 if (not opts['user'] and not opts['changeset'] and not opts['date']
640 if (not opts['user'] and not opts['changeset'] and not opts['date']
641 and not opts['follow']):
641 and not opts['follow']):
642 opts['number'] = 1
642 opts['number'] = 1
643
643
644 ctx = repo.changectx(opts['rev'])
644 ctx = repo.changectx(opts['rev'])
645
645
646 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
646 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
647 node=ctx.node()):
647 node=ctx.node()):
648 fctx = ctx.filectx(abs)
648 fctx = ctx.filectx(abs)
649 if not opts['text'] and util.binary(fctx.data()):
649 if not opts['text'] and util.binary(fctx.data()):
650 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
650 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
651 continue
651 continue
652
652
653 lines = fctx.annotate(follow=opts.get('follow'))
653 lines = fctx.annotate(follow=opts.get('follow'))
654 pieces = []
654 pieces = []
655
655
656 for o, f in opmap:
656 for o, f in opmap:
657 if opts[o]:
657 if opts[o]:
658 l = [f(n) for n, dummy in lines]
658 l = [f(n) for n, dummy in lines]
659 if l:
659 if l:
660 m = max(map(len, l))
660 m = max(map(len, l))
661 pieces.append(["%*s" % (m, x) for x in l])
661 pieces.append(["%*s" % (m, x) for x in l])
662
662
663 if pieces:
663 if pieces:
664 for p, l in zip(zip(*pieces), lines):
664 for p, l in zip(zip(*pieces), lines):
665 ui.write("%s: %s" % (" ".join(p), l[1]))
665 ui.write("%s: %s" % (" ".join(p), l[1]))
666
666
667 def archive(ui, repo, dest, **opts):
667 def archive(ui, repo, dest, **opts):
668 '''create unversioned archive of a repository revision
668 '''create unversioned archive of a repository revision
669
669
670 By default, the revision used is the parent of the working
670 By default, the revision used is the parent of the working
671 directory; use "-r" to specify a different revision.
671 directory; use "-r" to specify a different revision.
672
672
673 To specify the type of archive to create, use "-t". Valid
673 To specify the type of archive to create, use "-t". Valid
674 types are:
674 types are:
675
675
676 "files" (default): a directory full of files
676 "files" (default): a directory full of files
677 "tar": tar archive, uncompressed
677 "tar": tar archive, uncompressed
678 "tbz2": tar archive, compressed using bzip2
678 "tbz2": tar archive, compressed using bzip2
679 "tgz": tar archive, compressed using gzip
679 "tgz": tar archive, compressed using gzip
680 "uzip": zip archive, uncompressed
680 "uzip": zip archive, uncompressed
681 "zip": zip archive, compressed using deflate
681 "zip": zip archive, compressed using deflate
682
682
683 The exact name of the destination archive or directory is given
683 The exact name of the destination archive or directory is given
684 using a format string; see "hg help export" for details.
684 using a format string; see "hg help export" for details.
685
685
686 Each member added to an archive file has a directory prefix
686 Each member added to an archive file has a directory prefix
687 prepended. Use "-p" to specify a format string for the prefix.
687 prepended. Use "-p" to specify a format string for the prefix.
688 The default is the basename of the archive, with suffixes removed.
688 The default is the basename of the archive, with suffixes removed.
689 '''
689 '''
690
690
691 node = repo.changectx(opts['rev']).node()
691 node = repo.changectx(opts['rev']).node()
692 dest = cmdutil.make_filename(repo, dest, node)
692 dest = cmdutil.make_filename(repo, dest, node)
693 if os.path.realpath(dest) == repo.root:
693 if os.path.realpath(dest) == repo.root:
694 raise util.Abort(_('repository root cannot be destination'))
694 raise util.Abort(_('repository root cannot be destination'))
695 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
695 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
696 kind = opts.get('type') or 'files'
696 kind = opts.get('type') or 'files'
697 prefix = opts['prefix']
697 prefix = opts['prefix']
698 if dest == '-':
698 if dest == '-':
699 if kind == 'files':
699 if kind == 'files':
700 raise util.Abort(_('cannot archive plain files to stdout'))
700 raise util.Abort(_('cannot archive plain files to stdout'))
701 dest = sys.stdout
701 dest = sys.stdout
702 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
702 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
703 prefix = cmdutil.make_filename(repo, prefix, node)
703 prefix = cmdutil.make_filename(repo, prefix, node)
704 archival.archive(repo, dest, node, kind, not opts['no_decode'],
704 archival.archive(repo, dest, node, kind, not opts['no_decode'],
705 matchfn, prefix)
705 matchfn, prefix)
706
706
707 def backout(ui, repo, rev, **opts):
707 def backout(ui, repo, rev, **opts):
708 '''reverse effect of earlier changeset
708 '''reverse effect of earlier changeset
709
709
710 Commit the backed out changes as a new changeset. The new
710 Commit the backed out changes as a new changeset. The new
711 changeset is a child of the backed out changeset.
711 changeset is a child of the backed out changeset.
712
712
713 If you back out a changeset other than the tip, a new head is
713 If you back out a changeset other than the tip, a new head is
714 created. This head is the parent of the working directory. If
714 created. This head is the parent of the working directory. If
715 you back out an old changeset, your working directory will appear
715 you back out an old changeset, your working directory will appear
716 old after the backout. You should merge the backout changeset
716 old after the backout. You should merge the backout changeset
717 with another head.
717 with another head.
718
718
719 The --merge option remembers the parent of the working directory
719 The --merge option remembers the parent of the working directory
720 before starting the backout, then merges the new head with that
720 before starting the backout, then merges the new head with that
721 changeset afterwards. This saves you from doing the merge by
721 changeset afterwards. This saves you from doing the merge by
722 hand. The result of this merge is not committed, as for a normal
722 hand. The result of this merge is not committed, as for a normal
723 merge.'''
723 merge.'''
724
724
725 bail_if_changed(repo)
725 bail_if_changed(repo)
726 op1, op2 = repo.dirstate.parents()
726 op1, op2 = repo.dirstate.parents()
727 if op2 != nullid:
727 if op2 != nullid:
728 raise util.Abort(_('outstanding uncommitted merge'))
728 raise util.Abort(_('outstanding uncommitted merge'))
729 node = repo.lookup(rev)
729 node = repo.lookup(rev)
730 p1, p2 = repo.changelog.parents(node)
730 p1, p2 = repo.changelog.parents(node)
731 if p1 == nullid:
731 if p1 == nullid:
732 raise util.Abort(_('cannot back out a change with no parents'))
732 raise util.Abort(_('cannot back out a change with no parents'))
733 if p2 != nullid:
733 if p2 != nullid:
734 if not opts['parent']:
734 if not opts['parent']:
735 raise util.Abort(_('cannot back out a merge changeset without '
735 raise util.Abort(_('cannot back out a merge changeset without '
736 '--parent'))
736 '--parent'))
737 p = repo.lookup(opts['parent'])
737 p = repo.lookup(opts['parent'])
738 if p not in (p1, p2):
738 if p not in (p1, p2):
739 raise util.Abort(_('%s is not a parent of %s' %
739 raise util.Abort(_('%s is not a parent of %s' %
740 (short(p), short(node))))
740 (short(p), short(node))))
741 parent = p
741 parent = p
742 else:
742 else:
743 if opts['parent']:
743 if opts['parent']:
744 raise util.Abort(_('cannot use --parent on non-merge changeset'))
744 raise util.Abort(_('cannot use --parent on non-merge changeset'))
745 parent = p1
745 parent = p1
746 hg.clean(repo, node, show_stats=False)
746 hg.clean(repo, node, show_stats=False)
747 revert_opts = opts.copy()
747 revert_opts = opts.copy()
748 revert_opts['all'] = True
748 revert_opts['all'] = True
749 revert_opts['rev'] = hex(parent)
749 revert_opts['rev'] = hex(parent)
750 revert(ui, repo, **revert_opts)
750 revert(ui, repo, **revert_opts)
751 commit_opts = opts.copy()
751 commit_opts = opts.copy()
752 commit_opts['addremove'] = False
752 commit_opts['addremove'] = False
753 if not commit_opts['message'] and not commit_opts['logfile']:
753 if not commit_opts['message'] and not commit_opts['logfile']:
754 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
754 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
755 commit_opts['force_editor'] = True
755 commit_opts['force_editor'] = True
756 commit(ui, repo, **commit_opts)
756 commit(ui, repo, **commit_opts)
757 def nice(node):
757 def nice(node):
758 return '%d:%s' % (repo.changelog.rev(node), short(node))
758 return '%d:%s' % (repo.changelog.rev(node), short(node))
759 ui.status(_('changeset %s backs out changeset %s\n') %
759 ui.status(_('changeset %s backs out changeset %s\n') %
760 (nice(repo.changelog.tip()), nice(node)))
760 (nice(repo.changelog.tip()), nice(node)))
761 if op1 != node:
761 if op1 != node:
762 if opts['merge']:
762 if opts['merge']:
763 ui.status(_('merging with changeset %s\n') % nice(op1))
763 ui.status(_('merging with changeset %s\n') % nice(op1))
764 n = _lookup(repo, hex(op1))
764 n = _lookup(repo, hex(op1))
765 hg.merge(repo, n)
765 hg.merge(repo, n)
766 else:
766 else:
767 ui.status(_('the backout changeset is a new head - '
767 ui.status(_('the backout changeset is a new head - '
768 'do not forget to merge\n'))
768 'do not forget to merge\n'))
769 ui.status(_('(use "backout --merge" '
769 ui.status(_('(use "backout --merge" '
770 'if you want to auto-merge)\n'))
770 'if you want to auto-merge)\n'))
771
771
772 def branch(ui, repo, label=None):
772 def branch(ui, repo, label=None):
773 """set or show the current branch name
773 """set or show the current branch name
774
774
775 With <name>, set the current branch name. Otherwise, show the
775 With <name>, set the current branch name. Otherwise, show the
776 current branch name.
776 current branch name.
777 """
777 """
778
778
779 if label is not None:
779 if label is not None:
780 repo.opener("branch", "w").write(label)
780 repo.opener("branch", "w").write(label)
781 else:
781 else:
782 b = repo.workingctx().branch()
782 b = repo.workingctx().branch()
783 if b:
783 if b:
784 ui.write("%s\n" % b)
784 ui.write("%s\n" % b)
785
785
786 def branches(ui, repo):
786 def branches(ui, repo):
787 """list repository named branches
787 """list repository named branches
788
788
789 List the repository's named branches.
789 List the repository's named branches.
790 """
790 """
791 b = repo.branchtags()
791 b = repo.branchtags()
792 l = [(-repo.changelog.rev(n), n, t) for t,n in b.items()]
792 l = [(-repo.changelog.rev(n), n, t) for t,n in b.items()]
793 l.sort()
793 l.sort()
794 for r, n, t in l:
794 for r, n, t in l:
795 hexfunc = ui.debugflag and hex or short
795 hexfunc = ui.debugflag and hex or short
796 if ui.quiet:
796 if ui.quiet:
797 ui.write("%s\n" % t)
797 ui.write("%s\n" % t)
798 else:
798 else:
799 ui.write("%-30s %s:%s\n" % (t, -r, hexfunc(n)))
799 ui.write("%-30s %s:%s\n" % (t, -r, hexfunc(n)))
800
800
801 def bundle(ui, repo, fname, dest=None, **opts):
801 def bundle(ui, repo, fname, dest=None, **opts):
802 """create a changegroup file
802 """create a changegroup file
803
803
804 Generate a compressed changegroup file collecting changesets not
804 Generate a compressed changegroup file collecting changesets not
805 found in the other repository.
805 found in the other repository.
806
806
807 If no destination repository is specified the destination is assumed
807 If no destination repository is specified the destination is assumed
808 to have all the nodes specified by one or more --base parameters.
808 to have all the nodes specified by one or more --base parameters.
809
809
810 The bundle file can then be transferred using conventional means and
810 The bundle file can then be transferred using conventional means and
811 applied to another repository with the unbundle or pull command.
811 applied to another repository with the unbundle or pull command.
812 This is useful when direct push and pull are not available or when
812 This is useful when direct push and pull are not available or when
813 exporting an entire repository is undesirable.
813 exporting an entire repository is undesirable.
814
814
815 Applying bundles preserves all changeset contents including
815 Applying bundles preserves all changeset contents including
816 permissions, copy/rename information, and revision history.
816 permissions, copy/rename information, and revision history.
817 """
817 """
818 revs = opts.get('rev') or None
818 revs = opts.get('rev') or None
819 if revs:
819 if revs:
820 revs = [repo.lookup(rev) for rev in revs]
820 revs = [repo.lookup(rev) for rev in revs]
821 base = opts.get('base')
821 base = opts.get('base')
822 if base:
822 if base:
823 if dest:
823 if dest:
824 raise util.Abort(_("--base is incompatible with specifiying "
824 raise util.Abort(_("--base is incompatible with specifiying "
825 "a destination"))
825 "a destination"))
826 base = [repo.lookup(rev) for rev in base]
826 base = [repo.lookup(rev) for rev in base]
827 # create the right base
827 # create the right base
828 # XXX: nodesbetween / changegroup* should be "fixed" instead
828 # XXX: nodesbetween / changegroup* should be "fixed" instead
829 o = []
829 o = []
830 has_set = sets.Set(base)
830 has_set = sets.Set(base)
831 for n in base:
831 for n in base:
832 has_set.update(repo.changelog.reachable(n))
832 has_set.update(repo.changelog.reachable(n))
833 if revs:
833 if revs:
834 visit = list(revs)
834 visit = list(revs)
835 else:
835 else:
836 visit = repo.changelog.heads()
836 visit = repo.changelog.heads()
837 seen = sets.Set(visit)
837 seen = sets.Set(visit)
838 while visit:
838 while visit:
839 n = visit.pop(0)
839 n = visit.pop(0)
840 parents = [p for p in repo.changelog.parents(n)
840 parents = [p for p in repo.changelog.parents(n)
841 if p != nullid and p not in has_set]
841 if p != nullid and p not in has_set]
842 if len(parents) == 0:
842 if len(parents) == 0:
843 o.insert(0, n)
843 o.insert(0, n)
844 else:
844 else:
845 for p in parents:
845 for p in parents:
846 if p not in seen:
846 if p not in seen:
847 seen.add(p)
847 seen.add(p)
848 visit.append(p)
848 visit.append(p)
849 else:
849 else:
850 setremoteconfig(ui, opts)
850 setremoteconfig(ui, opts)
851 dest = ui.expandpath(dest or 'default-push', dest or 'default')
851 dest = ui.expandpath(dest or 'default-push', dest or 'default')
852 other = hg.repository(ui, dest)
852 other = hg.repository(ui, dest)
853 o = repo.findoutgoing(other, force=opts['force'])
853 o = repo.findoutgoing(other, force=opts['force'])
854
854
855 if revs:
855 if revs:
856 cg = repo.changegroupsubset(o, revs, 'bundle')
856 cg = repo.changegroupsubset(o, revs, 'bundle')
857 else:
857 else:
858 cg = repo.changegroup(o, 'bundle')
858 cg = repo.changegroup(o, 'bundle')
859 write_bundle(cg, fname)
859 write_bundle(cg, fname)
860
860
861 def cat(ui, repo, file1, *pats, **opts):
861 def cat(ui, repo, file1, *pats, **opts):
862 """output the latest or given revisions of files
862 """output the latest or given revisions of files
863
863
864 Print the specified files as they were at the given revision.
864 Print the specified files as they were at the given revision.
865 If no revision is given then working dir parent is used, or tip
865 If no revision is given then working dir parent is used, or tip
866 if no revision is checked out.
866 if no revision is checked out.
867
867
868 Output may be to a file, in which case the name of the file is
868 Output may be to a file, in which case the name of the file is
869 given using a format string. The formatting rules are the same as
869 given using a format string. The formatting rules are the same as
870 for the export command, with the following additions:
870 for the export command, with the following additions:
871
871
872 %s basename of file being printed
872 %s basename of file being printed
873 %d dirname of file being printed, or '.' if in repo root
873 %d dirname of file being printed, or '.' if in repo root
874 %p root-relative path name of file being printed
874 %p root-relative path name of file being printed
875 """
875 """
876 ctx = repo.changectx(opts['rev'])
876 ctx = repo.changectx(opts['rev'])
877 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
877 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
878 ctx.node()):
878 ctx.node()):
879 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
879 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
880 fp.write(ctx.filectx(abs).data())
880 fp.write(ctx.filectx(abs).data())
881
881
882 def clone(ui, source, dest=None, **opts):
882 def clone(ui, source, dest=None, **opts):
883 """make a copy of an existing repository
883 """make a copy of an existing repository
884
884
885 Create a copy of an existing repository in a new directory.
885 Create a copy of an existing repository in a new directory.
886
886
887 If no destination directory name is specified, it defaults to the
887 If no destination directory name is specified, it defaults to the
888 basename of the source.
888 basename of the source.
889
889
890 The location of the source is added to the new repository's
890 The location of the source is added to the new repository's
891 .hg/hgrc file, as the default to be used for future pulls.
891 .hg/hgrc file, as the default to be used for future pulls.
892
892
893 For efficiency, hardlinks are used for cloning whenever the source
893 For efficiency, hardlinks are used for cloning whenever the source
894 and destination are on the same filesystem (note this applies only
894 and destination are on the same filesystem (note this applies only
895 to the repository data, not to the checked out files). Some
895 to the repository data, not to the checked out files). Some
896 filesystems, such as AFS, implement hardlinking incorrectly, but
896 filesystems, such as AFS, implement hardlinking incorrectly, but
897 do not report errors. In these cases, use the --pull option to
897 do not report errors. In these cases, use the --pull option to
898 avoid hardlinking.
898 avoid hardlinking.
899
899
900 You can safely clone repositories and checked out files using full
900 You can safely clone repositories and checked out files using full
901 hardlinks with
901 hardlinks with
902
902
903 $ cp -al REPO REPOCLONE
903 $ cp -al REPO REPOCLONE
904
904
905 which is the fastest way to clone. However, the operation is not
905 which is the fastest way to clone. However, the operation is not
906 atomic (making sure REPO is not modified during the operation is
906 atomic (making sure REPO is not modified during the operation is
907 up to you) and you have to make sure your editor breaks hardlinks
907 up to you) and you have to make sure your editor breaks hardlinks
908 (Emacs and most Linux Kernel tools do so).
908 (Emacs and most Linux Kernel tools do so).
909
909
910 If you use the -r option to clone up to a specific revision, no
910 If you use the -r option to clone up to a specific revision, no
911 subsequent revisions will be present in the cloned repository.
911 subsequent revisions will be present in the cloned repository.
912 This option implies --pull, even on local repositories.
912 This option implies --pull, even on local repositories.
913
913
914 See pull for valid source format details.
914 See pull for valid source format details.
915
915
916 It is possible to specify an ssh:// URL as the destination, but no
916 It is possible to specify an ssh:// URL as the destination, but no
917 .hg/hgrc will be created on the remote side. Look at the help text
917 .hg/hgrc and working directory will be created on the remote side.
918 for the pull command for important details about ssh:// URLs.
918 Look at the help text for the pull command for important details
919 about ssh:// URLs.
919 """
920 """
920 setremoteconfig(ui, opts)
921 setremoteconfig(ui, opts)
921 hg.clone(ui, ui.expandpath(source), dest,
922 hg.clone(ui, ui.expandpath(source), dest,
922 pull=opts['pull'],
923 pull=opts['pull'],
923 stream=opts['uncompressed'],
924 stream=opts['uncompressed'],
924 rev=opts['rev'],
925 rev=opts['rev'],
925 update=not opts['noupdate'])
926 update=not opts['noupdate'])
926
927
927 def commit(ui, repo, *pats, **opts):
928 def commit(ui, repo, *pats, **opts):
928 """commit the specified files or all outstanding changes
929 """commit the specified files or all outstanding changes
929
930
930 Commit changes to the given files into the repository.
931 Commit changes to the given files into the repository.
931
932
932 If a list of files is omitted, all changes reported by "hg status"
933 If a list of files is omitted, all changes reported by "hg status"
933 will be committed.
934 will be committed.
934
935
935 If no commit message is specified, the editor configured in your hgrc
936 If no commit message is specified, the editor configured in your hgrc
936 or in the EDITOR environment variable is started to enter a message.
937 or in the EDITOR environment variable is started to enter a message.
937 """
938 """
938 message = logmessage(opts)
939 message = logmessage(opts)
939
940
940 if opts['addremove']:
941 if opts['addremove']:
941 cmdutil.addremove(repo, pats, opts)
942 cmdutil.addremove(repo, pats, opts)
942 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
943 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
943 if pats:
944 if pats:
944 modified, added, removed = repo.status(files=fns, match=match)[:3]
945 modified, added, removed = repo.status(files=fns, match=match)[:3]
945 files = modified + added + removed
946 files = modified + added + removed
946 else:
947 else:
947 files = []
948 files = []
948 try:
949 try:
949 repo.commit(files, message, opts['user'], opts['date'], match,
950 repo.commit(files, message, opts['user'], opts['date'], match,
950 force_editor=opts.get('force_editor'))
951 force_editor=opts.get('force_editor'))
951 except ValueError, inst:
952 except ValueError, inst:
952 raise util.Abort(str(inst))
953 raise util.Abort(str(inst))
953
954
954 def docopy(ui, repo, pats, opts, wlock):
955 def docopy(ui, repo, pats, opts, wlock):
955 # called with the repo lock held
956 # called with the repo lock held
956 cwd = repo.getcwd()
957 cwd = repo.getcwd()
957 errors = 0
958 errors = 0
958 copied = []
959 copied = []
959 targets = {}
960 targets = {}
960
961
961 def okaytocopy(abs, rel, exact):
962 def okaytocopy(abs, rel, exact):
962 reasons = {'?': _('is not managed'),
963 reasons = {'?': _('is not managed'),
963 'a': _('has been marked for add'),
964 'a': _('has been marked for add'),
964 'r': _('has been marked for remove')}
965 'r': _('has been marked for remove')}
965 state = repo.dirstate.state(abs)
966 state = repo.dirstate.state(abs)
966 reason = reasons.get(state)
967 reason = reasons.get(state)
967 if reason:
968 if reason:
968 if state == 'a':
969 if state == 'a':
969 origsrc = repo.dirstate.copied(abs)
970 origsrc = repo.dirstate.copied(abs)
970 if origsrc is not None:
971 if origsrc is not None:
971 return origsrc
972 return origsrc
972 if exact:
973 if exact:
973 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
974 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
974 else:
975 else:
975 return abs
976 return abs
976
977
977 def copy(origsrc, abssrc, relsrc, target, exact):
978 def copy(origsrc, abssrc, relsrc, target, exact):
978 abstarget = util.canonpath(repo.root, cwd, target)
979 abstarget = util.canonpath(repo.root, cwd, target)
979 reltarget = util.pathto(cwd, abstarget)
980 reltarget = util.pathto(cwd, abstarget)
980 prevsrc = targets.get(abstarget)
981 prevsrc = targets.get(abstarget)
981 if prevsrc is not None:
982 if prevsrc is not None:
982 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
983 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
983 (reltarget, abssrc, prevsrc))
984 (reltarget, abssrc, prevsrc))
984 return
985 return
985 if (not opts['after'] and os.path.exists(reltarget) or
986 if (not opts['after'] and os.path.exists(reltarget) or
986 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
987 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
987 if not opts['force']:
988 if not opts['force']:
988 ui.warn(_('%s: not overwriting - file exists\n') %
989 ui.warn(_('%s: not overwriting - file exists\n') %
989 reltarget)
990 reltarget)
990 return
991 return
991 if not opts['after'] and not opts.get('dry_run'):
992 if not opts['after'] and not opts.get('dry_run'):
992 os.unlink(reltarget)
993 os.unlink(reltarget)
993 if opts['after']:
994 if opts['after']:
994 if not os.path.exists(reltarget):
995 if not os.path.exists(reltarget):
995 return
996 return
996 else:
997 else:
997 targetdir = os.path.dirname(reltarget) or '.'
998 targetdir = os.path.dirname(reltarget) or '.'
998 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
999 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
999 os.makedirs(targetdir)
1000 os.makedirs(targetdir)
1000 try:
1001 try:
1001 restore = repo.dirstate.state(abstarget) == 'r'
1002 restore = repo.dirstate.state(abstarget) == 'r'
1002 if restore and not opts.get('dry_run'):
1003 if restore and not opts.get('dry_run'):
1003 repo.undelete([abstarget], wlock)
1004 repo.undelete([abstarget], wlock)
1004 try:
1005 try:
1005 if not opts.get('dry_run'):
1006 if not opts.get('dry_run'):
1006 shutil.copyfile(relsrc, reltarget)
1007 shutil.copyfile(relsrc, reltarget)
1007 shutil.copymode(relsrc, reltarget)
1008 shutil.copymode(relsrc, reltarget)
1008 restore = False
1009 restore = False
1009 finally:
1010 finally:
1010 if restore:
1011 if restore:
1011 repo.remove([abstarget], wlock)
1012 repo.remove([abstarget], wlock)
1012 except shutil.Error, inst:
1013 except shutil.Error, inst:
1013 raise util.Abort(str(inst))
1014 raise util.Abort(str(inst))
1014 except IOError, inst:
1015 except IOError, inst:
1015 if inst.errno == errno.ENOENT:
1016 if inst.errno == errno.ENOENT:
1016 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1017 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1017 else:
1018 else:
1018 ui.warn(_('%s: cannot copy - %s\n') %
1019 ui.warn(_('%s: cannot copy - %s\n') %
1019 (relsrc, inst.strerror))
1020 (relsrc, inst.strerror))
1020 errors += 1
1021 errors += 1
1021 return
1022 return
1022 if ui.verbose or not exact:
1023 if ui.verbose or not exact:
1023 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1024 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1024 targets[abstarget] = abssrc
1025 targets[abstarget] = abssrc
1025 if abstarget != origsrc and not opts.get('dry_run'):
1026 if abstarget != origsrc and not opts.get('dry_run'):
1026 repo.copy(origsrc, abstarget, wlock)
1027 repo.copy(origsrc, abstarget, wlock)
1027 copied.append((abssrc, relsrc, exact))
1028 copied.append((abssrc, relsrc, exact))
1028
1029
1029 def targetpathfn(pat, dest, srcs):
1030 def targetpathfn(pat, dest, srcs):
1030 if os.path.isdir(pat):
1031 if os.path.isdir(pat):
1031 abspfx = util.canonpath(repo.root, cwd, pat)
1032 abspfx = util.canonpath(repo.root, cwd, pat)
1032 if destdirexists:
1033 if destdirexists:
1033 striplen = len(os.path.split(abspfx)[0])
1034 striplen = len(os.path.split(abspfx)[0])
1034 else:
1035 else:
1035 striplen = len(abspfx)
1036 striplen = len(abspfx)
1036 if striplen:
1037 if striplen:
1037 striplen += len(os.sep)
1038 striplen += len(os.sep)
1038 res = lambda p: os.path.join(dest, p[striplen:])
1039 res = lambda p: os.path.join(dest, p[striplen:])
1039 elif destdirexists:
1040 elif destdirexists:
1040 res = lambda p: os.path.join(dest, os.path.basename(p))
1041 res = lambda p: os.path.join(dest, os.path.basename(p))
1041 else:
1042 else:
1042 res = lambda p: dest
1043 res = lambda p: dest
1043 return res
1044 return res
1044
1045
1045 def targetpathafterfn(pat, dest, srcs):
1046 def targetpathafterfn(pat, dest, srcs):
1046 if util.patkind(pat, None)[0]:
1047 if util.patkind(pat, None)[0]:
1047 # a mercurial pattern
1048 # a mercurial pattern
1048 res = lambda p: os.path.join(dest, os.path.basename(p))
1049 res = lambda p: os.path.join(dest, os.path.basename(p))
1049 else:
1050 else:
1050 abspfx = util.canonpath(repo.root, cwd, pat)
1051 abspfx = util.canonpath(repo.root, cwd, pat)
1051 if len(abspfx) < len(srcs[0][0]):
1052 if len(abspfx) < len(srcs[0][0]):
1052 # A directory. Either the target path contains the last
1053 # A directory. Either the target path contains the last
1053 # component of the source path or it does not.
1054 # component of the source path or it does not.
1054 def evalpath(striplen):
1055 def evalpath(striplen):
1055 score = 0
1056 score = 0
1056 for s in srcs:
1057 for s in srcs:
1057 t = os.path.join(dest, s[0][striplen:])
1058 t = os.path.join(dest, s[0][striplen:])
1058 if os.path.exists(t):
1059 if os.path.exists(t):
1059 score += 1
1060 score += 1
1060 return score
1061 return score
1061
1062
1062 striplen = len(abspfx)
1063 striplen = len(abspfx)
1063 if striplen:
1064 if striplen:
1064 striplen += len(os.sep)
1065 striplen += len(os.sep)
1065 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1066 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1066 score = evalpath(striplen)
1067 score = evalpath(striplen)
1067 striplen1 = len(os.path.split(abspfx)[0])
1068 striplen1 = len(os.path.split(abspfx)[0])
1068 if striplen1:
1069 if striplen1:
1069 striplen1 += len(os.sep)
1070 striplen1 += len(os.sep)
1070 if evalpath(striplen1) > score:
1071 if evalpath(striplen1) > score:
1071 striplen = striplen1
1072 striplen = striplen1
1072 res = lambda p: os.path.join(dest, p[striplen:])
1073 res = lambda p: os.path.join(dest, p[striplen:])
1073 else:
1074 else:
1074 # a file
1075 # a file
1075 if destdirexists:
1076 if destdirexists:
1076 res = lambda p: os.path.join(dest, os.path.basename(p))
1077 res = lambda p: os.path.join(dest, os.path.basename(p))
1077 else:
1078 else:
1078 res = lambda p: dest
1079 res = lambda p: dest
1079 return res
1080 return res
1080
1081
1081
1082
1082 pats = list(pats)
1083 pats = list(pats)
1083 if not pats:
1084 if not pats:
1084 raise util.Abort(_('no source or destination specified'))
1085 raise util.Abort(_('no source or destination specified'))
1085 if len(pats) == 1:
1086 if len(pats) == 1:
1086 raise util.Abort(_('no destination specified'))
1087 raise util.Abort(_('no destination specified'))
1087 dest = pats.pop()
1088 dest = pats.pop()
1088 destdirexists = os.path.isdir(dest)
1089 destdirexists = os.path.isdir(dest)
1089 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1090 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1090 raise util.Abort(_('with multiple sources, destination must be an '
1091 raise util.Abort(_('with multiple sources, destination must be an '
1091 'existing directory'))
1092 'existing directory'))
1092 if opts['after']:
1093 if opts['after']:
1093 tfn = targetpathafterfn
1094 tfn = targetpathafterfn
1094 else:
1095 else:
1095 tfn = targetpathfn
1096 tfn = targetpathfn
1096 copylist = []
1097 copylist = []
1097 for pat in pats:
1098 for pat in pats:
1098 srcs = []
1099 srcs = []
1099 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1100 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1100 origsrc = okaytocopy(abssrc, relsrc, exact)
1101 origsrc = okaytocopy(abssrc, relsrc, exact)
1101 if origsrc:
1102 if origsrc:
1102 srcs.append((origsrc, abssrc, relsrc, exact))
1103 srcs.append((origsrc, abssrc, relsrc, exact))
1103 if not srcs:
1104 if not srcs:
1104 continue
1105 continue
1105 copylist.append((tfn(pat, dest, srcs), srcs))
1106 copylist.append((tfn(pat, dest, srcs), srcs))
1106 if not copylist:
1107 if not copylist:
1107 raise util.Abort(_('no files to copy'))
1108 raise util.Abort(_('no files to copy'))
1108
1109
1109 for targetpath, srcs in copylist:
1110 for targetpath, srcs in copylist:
1110 for origsrc, abssrc, relsrc, exact in srcs:
1111 for origsrc, abssrc, relsrc, exact in srcs:
1111 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1112 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1112
1113
1113 if errors:
1114 if errors:
1114 ui.warn(_('(consider using --after)\n'))
1115 ui.warn(_('(consider using --after)\n'))
1115 return errors, copied
1116 return errors, copied
1116
1117
1117 def copy(ui, repo, *pats, **opts):
1118 def copy(ui, repo, *pats, **opts):
1118 """mark files as copied for the next commit
1119 """mark files as copied for the next commit
1119
1120
1120 Mark dest as having copies of source files. If dest is a
1121 Mark dest as having copies of source files. If dest is a
1121 directory, copies are put in that directory. If dest is a file,
1122 directory, copies are put in that directory. If dest is a file,
1122 there can only be one source.
1123 there can only be one source.
1123
1124
1124 By default, this command copies the contents of files as they
1125 By default, this command copies the contents of files as they
1125 stand in the working directory. If invoked with --after, the
1126 stand in the working directory. If invoked with --after, the
1126 operation is recorded, but no copying is performed.
1127 operation is recorded, but no copying is performed.
1127
1128
1128 This command takes effect in the next commit.
1129 This command takes effect in the next commit.
1129
1130
1130 NOTE: This command should be treated as experimental. While it
1131 NOTE: This command should be treated as experimental. While it
1131 should properly record copied files, this information is not yet
1132 should properly record copied files, this information is not yet
1132 fully used by merge, nor fully reported by log.
1133 fully used by merge, nor fully reported by log.
1133 """
1134 """
1134 wlock = repo.wlock(0)
1135 wlock = repo.wlock(0)
1135 errs, copied = docopy(ui, repo, pats, opts, wlock)
1136 errs, copied = docopy(ui, repo, pats, opts, wlock)
1136 return errs
1137 return errs
1137
1138
1138 def debugancestor(ui, index, rev1, rev2):
1139 def debugancestor(ui, index, rev1, rev2):
1139 """find the ancestor revision of two revisions in a given index"""
1140 """find the ancestor revision of two revisions in a given index"""
1140 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1141 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1141 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1142 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1142 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1143 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1143
1144
1144 def debugcomplete(ui, cmd='', **opts):
1145 def debugcomplete(ui, cmd='', **opts):
1145 """returns the completion list associated with the given command"""
1146 """returns the completion list associated with the given command"""
1146
1147
1147 if opts['options']:
1148 if opts['options']:
1148 options = []
1149 options = []
1149 otables = [globalopts]
1150 otables = [globalopts]
1150 if cmd:
1151 if cmd:
1151 aliases, entry = findcmd(ui, cmd)
1152 aliases, entry = findcmd(ui, cmd)
1152 otables.append(entry[1])
1153 otables.append(entry[1])
1153 for t in otables:
1154 for t in otables:
1154 for o in t:
1155 for o in t:
1155 if o[0]:
1156 if o[0]:
1156 options.append('-%s' % o[0])
1157 options.append('-%s' % o[0])
1157 options.append('--%s' % o[1])
1158 options.append('--%s' % o[1])
1158 ui.write("%s\n" % "\n".join(options))
1159 ui.write("%s\n" % "\n".join(options))
1159 return
1160 return
1160
1161
1161 clist = findpossible(ui, cmd).keys()
1162 clist = findpossible(ui, cmd).keys()
1162 clist.sort()
1163 clist.sort()
1163 ui.write("%s\n" % "\n".join(clist))
1164 ui.write("%s\n" % "\n".join(clist))
1164
1165
1165 def debugrebuildstate(ui, repo, rev=None):
1166 def debugrebuildstate(ui, repo, rev=None):
1166 """rebuild the dirstate as it would look like for the given revision"""
1167 """rebuild the dirstate as it would look like for the given revision"""
1167 if not rev:
1168 if not rev:
1168 rev = repo.changelog.tip()
1169 rev = repo.changelog.tip()
1169 else:
1170 else:
1170 rev = repo.lookup(rev)
1171 rev = repo.lookup(rev)
1171 change = repo.changelog.read(rev)
1172 change = repo.changelog.read(rev)
1172 n = change[0]
1173 n = change[0]
1173 files = repo.manifest.read(n)
1174 files = repo.manifest.read(n)
1174 wlock = repo.wlock()
1175 wlock = repo.wlock()
1175 repo.dirstate.rebuild(rev, files)
1176 repo.dirstate.rebuild(rev, files)
1176
1177
1177 def debugcheckstate(ui, repo):
1178 def debugcheckstate(ui, repo):
1178 """validate the correctness of the current dirstate"""
1179 """validate the correctness of the current dirstate"""
1179 parent1, parent2 = repo.dirstate.parents()
1180 parent1, parent2 = repo.dirstate.parents()
1180 repo.dirstate.read()
1181 repo.dirstate.read()
1181 dc = repo.dirstate.map
1182 dc = repo.dirstate.map
1182 keys = dc.keys()
1183 keys = dc.keys()
1183 keys.sort()
1184 keys.sort()
1184 m1n = repo.changelog.read(parent1)[0]
1185 m1n = repo.changelog.read(parent1)[0]
1185 m2n = repo.changelog.read(parent2)[0]
1186 m2n = repo.changelog.read(parent2)[0]
1186 m1 = repo.manifest.read(m1n)
1187 m1 = repo.manifest.read(m1n)
1187 m2 = repo.manifest.read(m2n)
1188 m2 = repo.manifest.read(m2n)
1188 errors = 0
1189 errors = 0
1189 for f in dc:
1190 for f in dc:
1190 state = repo.dirstate.state(f)
1191 state = repo.dirstate.state(f)
1191 if state in "nr" and f not in m1:
1192 if state in "nr" and f not in m1:
1192 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1193 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1193 errors += 1
1194 errors += 1
1194 if state in "a" and f in m1:
1195 if state in "a" and f in m1:
1195 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1196 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1196 errors += 1
1197 errors += 1
1197 if state in "m" and f not in m1 and f not in m2:
1198 if state in "m" and f not in m1 and f not in m2:
1198 ui.warn(_("%s in state %s, but not in either manifest\n") %
1199 ui.warn(_("%s in state %s, but not in either manifest\n") %
1199 (f, state))
1200 (f, state))
1200 errors += 1
1201 errors += 1
1201 for f in m1:
1202 for f in m1:
1202 state = repo.dirstate.state(f)
1203 state = repo.dirstate.state(f)
1203 if state not in "nrm":
1204 if state not in "nrm":
1204 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1205 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1205 errors += 1
1206 errors += 1
1206 if errors:
1207 if errors:
1207 error = _(".hg/dirstate inconsistent with current parent's manifest")
1208 error = _(".hg/dirstate inconsistent with current parent's manifest")
1208 raise util.Abort(error)
1209 raise util.Abort(error)
1209
1210
1210 def showconfig(ui, repo, *values, **opts):
1211 def showconfig(ui, repo, *values, **opts):
1211 """show combined config settings from all hgrc files
1212 """show combined config settings from all hgrc files
1212
1213
1213 With no args, print names and values of all config items.
1214 With no args, print names and values of all config items.
1214
1215
1215 With one arg of the form section.name, print just the value of
1216 With one arg of the form section.name, print just the value of
1216 that config item.
1217 that config item.
1217
1218
1218 With multiple args, print names and values of all config items
1219 With multiple args, print names and values of all config items
1219 with matching section names."""
1220 with matching section names."""
1220
1221
1221 untrusted = bool(opts.get('untrusted'))
1222 untrusted = bool(opts.get('untrusted'))
1222 if values:
1223 if values:
1223 if len([v for v in values if '.' in v]) > 1:
1224 if len([v for v in values if '.' in v]) > 1:
1224 raise util.Abort(_('only one config item permitted'))
1225 raise util.Abort(_('only one config item permitted'))
1225 for section, name, value in ui.walkconfig(untrusted=untrusted):
1226 for section, name, value in ui.walkconfig(untrusted=untrusted):
1226 sectname = section + '.' + name
1227 sectname = section + '.' + name
1227 if values:
1228 if values:
1228 for v in values:
1229 for v in values:
1229 if v == section:
1230 if v == section:
1230 ui.write('%s=%s\n' % (sectname, value))
1231 ui.write('%s=%s\n' % (sectname, value))
1231 elif v == sectname:
1232 elif v == sectname:
1232 ui.write(value, '\n')
1233 ui.write(value, '\n')
1233 else:
1234 else:
1234 ui.write('%s=%s\n' % (sectname, value))
1235 ui.write('%s=%s\n' % (sectname, value))
1235
1236
1236 def debugsetparents(ui, repo, rev1, rev2=None):
1237 def debugsetparents(ui, repo, rev1, rev2=None):
1237 """manually set the parents of the current working directory
1238 """manually set the parents of the current working directory
1238
1239
1239 This is useful for writing repository conversion tools, but should
1240 This is useful for writing repository conversion tools, but should
1240 be used with care.
1241 be used with care.
1241 """
1242 """
1242
1243
1243 if not rev2:
1244 if not rev2:
1244 rev2 = hex(nullid)
1245 rev2 = hex(nullid)
1245
1246
1246 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1247 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1247
1248
1248 def debugstate(ui, repo):
1249 def debugstate(ui, repo):
1249 """show the contents of the current dirstate"""
1250 """show the contents of the current dirstate"""
1250 repo.dirstate.read()
1251 repo.dirstate.read()
1251 dc = repo.dirstate.map
1252 dc = repo.dirstate.map
1252 keys = dc.keys()
1253 keys = dc.keys()
1253 keys.sort()
1254 keys.sort()
1254 for file_ in keys:
1255 for file_ in keys:
1255 ui.write("%c %3o %10d %s %s\n"
1256 ui.write("%c %3o %10d %s %s\n"
1256 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1257 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1257 time.strftime("%x %X",
1258 time.strftime("%x %X",
1258 time.localtime(dc[file_][3])), file_))
1259 time.localtime(dc[file_][3])), file_))
1259 for f in repo.dirstate.copies():
1260 for f in repo.dirstate.copies():
1260 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1261 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1261
1262
1262 def debugdata(ui, file_, rev):
1263 def debugdata(ui, file_, rev):
1263 """dump the contents of an data file revision"""
1264 """dump the contents of an data file revision"""
1264 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1265 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1265 file_[:-2] + ".i", file_, 0)
1266 file_[:-2] + ".i", file_, 0)
1266 try:
1267 try:
1267 ui.write(r.revision(r.lookup(rev)))
1268 ui.write(r.revision(r.lookup(rev)))
1268 except KeyError:
1269 except KeyError:
1269 raise util.Abort(_('invalid revision identifier %s') % rev)
1270 raise util.Abort(_('invalid revision identifier %s') % rev)
1270
1271
1271 def debugindex(ui, file_):
1272 def debugindex(ui, file_):
1272 """dump the contents of an index file"""
1273 """dump the contents of an index file"""
1273 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1274 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1274 ui.write(" rev offset length base linkrev" +
1275 ui.write(" rev offset length base linkrev" +
1275 " nodeid p1 p2\n")
1276 " nodeid p1 p2\n")
1276 for i in xrange(r.count()):
1277 for i in xrange(r.count()):
1277 node = r.node(i)
1278 node = r.node(i)
1278 pp = r.parents(node)
1279 pp = r.parents(node)
1279 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1280 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1280 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1281 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1281 short(node), short(pp[0]), short(pp[1])))
1282 short(node), short(pp[0]), short(pp[1])))
1282
1283
1283 def debugindexdot(ui, file_):
1284 def debugindexdot(ui, file_):
1284 """dump an index DAG as a .dot file"""
1285 """dump an index DAG as a .dot file"""
1285 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1286 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1286 ui.write("digraph G {\n")
1287 ui.write("digraph G {\n")
1287 for i in xrange(r.count()):
1288 for i in xrange(r.count()):
1288 node = r.node(i)
1289 node = r.node(i)
1289 pp = r.parents(node)
1290 pp = r.parents(node)
1290 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1291 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1291 if pp[1] != nullid:
1292 if pp[1] != nullid:
1292 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1293 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1293 ui.write("}\n")
1294 ui.write("}\n")
1294
1295
1295 def debugrename(ui, repo, file, rev=None):
1296 def debugrename(ui, repo, file, rev=None):
1296 """dump rename information"""
1297 """dump rename information"""
1297 r = repo.file(relpath(repo, [file])[0])
1298 r = repo.file(relpath(repo, [file])[0])
1298 if rev:
1299 if rev:
1299 try:
1300 try:
1300 # assume all revision numbers are for changesets
1301 # assume all revision numbers are for changesets
1301 n = repo.lookup(rev)
1302 n = repo.lookup(rev)
1302 change = repo.changelog.read(n)
1303 change = repo.changelog.read(n)
1303 m = repo.manifest.read(change[0])
1304 m = repo.manifest.read(change[0])
1304 n = m[relpath(repo, [file])[0]]
1305 n = m[relpath(repo, [file])[0]]
1305 except (hg.RepoError, KeyError):
1306 except (hg.RepoError, KeyError):
1306 n = r.lookup(rev)
1307 n = r.lookup(rev)
1307 else:
1308 else:
1308 n = r.tip()
1309 n = r.tip()
1309 m = r.renamed(n)
1310 m = r.renamed(n)
1310 if m:
1311 if m:
1311 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1312 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1312 else:
1313 else:
1313 ui.write(_("not renamed\n"))
1314 ui.write(_("not renamed\n"))
1314
1315
1315 def debugwalk(ui, repo, *pats, **opts):
1316 def debugwalk(ui, repo, *pats, **opts):
1316 """show how files match on given patterns"""
1317 """show how files match on given patterns"""
1317 items = list(cmdutil.walk(repo, pats, opts))
1318 items = list(cmdutil.walk(repo, pats, opts))
1318 if not items:
1319 if not items:
1319 return
1320 return
1320 fmt = '%%s %%-%ds %%-%ds %%s' % (
1321 fmt = '%%s %%-%ds %%-%ds %%s' % (
1321 max([len(abs) for (src, abs, rel, exact) in items]),
1322 max([len(abs) for (src, abs, rel, exact) in items]),
1322 max([len(rel) for (src, abs, rel, exact) in items]))
1323 max([len(rel) for (src, abs, rel, exact) in items]))
1323 for src, abs, rel, exact in items:
1324 for src, abs, rel, exact in items:
1324 line = fmt % (src, abs, rel, exact and 'exact' or '')
1325 line = fmt % (src, abs, rel, exact and 'exact' or '')
1325 ui.write("%s\n" % line.rstrip())
1326 ui.write("%s\n" % line.rstrip())
1326
1327
1327 def diff(ui, repo, *pats, **opts):
1328 def diff(ui, repo, *pats, **opts):
1328 """diff repository (or selected files)
1329 """diff repository (or selected files)
1329
1330
1330 Show differences between revisions for the specified files.
1331 Show differences between revisions for the specified files.
1331
1332
1332 Differences between files are shown using the unified diff format.
1333 Differences between files are shown using the unified diff format.
1333
1334
1334 When two revision arguments are given, then changes are shown
1335 When two revision arguments are given, then changes are shown
1335 between those revisions. If only one revision is specified then
1336 between those revisions. If only one revision is specified then
1336 that revision is compared to the working directory, and, when no
1337 that revision is compared to the working directory, and, when no
1337 revisions are specified, the working directory files are compared
1338 revisions are specified, the working directory files are compared
1338 to its parent.
1339 to its parent.
1339
1340
1340 Without the -a option, diff will avoid generating diffs of files
1341 Without the -a option, diff will avoid generating diffs of files
1341 it detects as binary. With -a, diff will generate a diff anyway,
1342 it detects as binary. With -a, diff will generate a diff anyway,
1342 probably with undesirable results.
1343 probably with undesirable results.
1343 """
1344 """
1344 node1, node2 = cmdutil.revpair(ui, repo, opts['rev'])
1345 node1, node2 = cmdutil.revpair(ui, repo, opts['rev'])
1345
1346
1346 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1347 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1347
1348
1348 patch.diff(repo, node1, node2, fns, match=matchfn,
1349 patch.diff(repo, node1, node2, fns, match=matchfn,
1349 opts=patch.diffopts(ui, opts))
1350 opts=patch.diffopts(ui, opts))
1350
1351
1351 def export(ui, repo, *changesets, **opts):
1352 def export(ui, repo, *changesets, **opts):
1352 """dump the header and diffs for one or more changesets
1353 """dump the header and diffs for one or more changesets
1353
1354
1354 Print the changeset header and diffs for one or more revisions.
1355 Print the changeset header and diffs for one or more revisions.
1355
1356
1356 The information shown in the changeset header is: author,
1357 The information shown in the changeset header is: author,
1357 changeset hash, parent and commit comment.
1358 changeset hash, parent and commit comment.
1358
1359
1359 Output may be to a file, in which case the name of the file is
1360 Output may be to a file, in which case the name of the file is
1360 given using a format string. The formatting rules are as follows:
1361 given using a format string. The formatting rules are as follows:
1361
1362
1362 %% literal "%" character
1363 %% literal "%" character
1363 %H changeset hash (40 bytes of hexadecimal)
1364 %H changeset hash (40 bytes of hexadecimal)
1364 %N number of patches being generated
1365 %N number of patches being generated
1365 %R changeset revision number
1366 %R changeset revision number
1366 %b basename of the exporting repository
1367 %b basename of the exporting repository
1367 %h short-form changeset hash (12 bytes of hexadecimal)
1368 %h short-form changeset hash (12 bytes of hexadecimal)
1368 %n zero-padded sequence number, starting at 1
1369 %n zero-padded sequence number, starting at 1
1369 %r zero-padded changeset revision number
1370 %r zero-padded changeset revision number
1370
1371
1371 Without the -a option, export will avoid generating diffs of files
1372 Without the -a option, export will avoid generating diffs of files
1372 it detects as binary. With -a, export will generate a diff anyway,
1373 it detects as binary. With -a, export will generate a diff anyway,
1373 probably with undesirable results.
1374 probably with undesirable results.
1374
1375
1375 With the --switch-parent option, the diff will be against the second
1376 With the --switch-parent option, the diff will be against the second
1376 parent. It can be useful to review a merge.
1377 parent. It can be useful to review a merge.
1377 """
1378 """
1378 if not changesets:
1379 if not changesets:
1379 raise util.Abort(_("export requires at least one changeset"))
1380 raise util.Abort(_("export requires at least one changeset"))
1380 revs = cmdutil.revrange(ui, repo, changesets)
1381 revs = cmdutil.revrange(ui, repo, changesets)
1381 if len(revs) > 1:
1382 if len(revs) > 1:
1382 ui.note(_('exporting patches:\n'))
1383 ui.note(_('exporting patches:\n'))
1383 else:
1384 else:
1384 ui.note(_('exporting patch:\n'))
1385 ui.note(_('exporting patch:\n'))
1385 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1386 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1386 switch_parent=opts['switch_parent'],
1387 switch_parent=opts['switch_parent'],
1387 opts=patch.diffopts(ui, opts))
1388 opts=patch.diffopts(ui, opts))
1388
1389
1389 def grep(ui, repo, pattern, *pats, **opts):
1390 def grep(ui, repo, pattern, *pats, **opts):
1390 """search for a pattern in specified files and revisions
1391 """search for a pattern in specified files and revisions
1391
1392
1392 Search revisions of files for a regular expression.
1393 Search revisions of files for a regular expression.
1393
1394
1394 This command behaves differently than Unix grep. It only accepts
1395 This command behaves differently than Unix grep. It only accepts
1395 Python/Perl regexps. It searches repository history, not the
1396 Python/Perl regexps. It searches repository history, not the
1396 working directory. It always prints the revision number in which
1397 working directory. It always prints the revision number in which
1397 a match appears.
1398 a match appears.
1398
1399
1399 By default, grep only prints output for the first revision of a
1400 By default, grep only prints output for the first revision of a
1400 file in which it finds a match. To get it to print every revision
1401 file in which it finds a match. To get it to print every revision
1401 that contains a change in match status ("-" for a match that
1402 that contains a change in match status ("-" for a match that
1402 becomes a non-match, or "+" for a non-match that becomes a match),
1403 becomes a non-match, or "+" for a non-match that becomes a match),
1403 use the --all flag.
1404 use the --all flag.
1404 """
1405 """
1405 reflags = 0
1406 reflags = 0
1406 if opts['ignore_case']:
1407 if opts['ignore_case']:
1407 reflags |= re.I
1408 reflags |= re.I
1408 regexp = re.compile(pattern, reflags)
1409 regexp = re.compile(pattern, reflags)
1409 sep, eol = ':', '\n'
1410 sep, eol = ':', '\n'
1410 if opts['print0']:
1411 if opts['print0']:
1411 sep = eol = '\0'
1412 sep = eol = '\0'
1412
1413
1413 fcache = {}
1414 fcache = {}
1414 def getfile(fn):
1415 def getfile(fn):
1415 if fn not in fcache:
1416 if fn not in fcache:
1416 fcache[fn] = repo.file(fn)
1417 fcache[fn] = repo.file(fn)
1417 return fcache[fn]
1418 return fcache[fn]
1418
1419
1419 def matchlines(body):
1420 def matchlines(body):
1420 begin = 0
1421 begin = 0
1421 linenum = 0
1422 linenum = 0
1422 while True:
1423 while True:
1423 match = regexp.search(body, begin)
1424 match = regexp.search(body, begin)
1424 if not match:
1425 if not match:
1425 break
1426 break
1426 mstart, mend = match.span()
1427 mstart, mend = match.span()
1427 linenum += body.count('\n', begin, mstart) + 1
1428 linenum += body.count('\n', begin, mstart) + 1
1428 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1429 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1429 lend = body.find('\n', mend)
1430 lend = body.find('\n', mend)
1430 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1431 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1431 begin = lend + 1
1432 begin = lend + 1
1432
1433
1433 class linestate(object):
1434 class linestate(object):
1434 def __init__(self, line, linenum, colstart, colend):
1435 def __init__(self, line, linenum, colstart, colend):
1435 self.line = line
1436 self.line = line
1436 self.linenum = linenum
1437 self.linenum = linenum
1437 self.colstart = colstart
1438 self.colstart = colstart
1438 self.colend = colend
1439 self.colend = colend
1439
1440
1440 def __eq__(self, other):
1441 def __eq__(self, other):
1441 return self.line == other.line
1442 return self.line == other.line
1442
1443
1443 matches = {}
1444 matches = {}
1444 copies = {}
1445 copies = {}
1445 def grepbody(fn, rev, body):
1446 def grepbody(fn, rev, body):
1446 matches[rev].setdefault(fn, [])
1447 matches[rev].setdefault(fn, [])
1447 m = matches[rev][fn]
1448 m = matches[rev][fn]
1448 for lnum, cstart, cend, line in matchlines(body):
1449 for lnum, cstart, cend, line in matchlines(body):
1449 s = linestate(line, lnum, cstart, cend)
1450 s = linestate(line, lnum, cstart, cend)
1450 m.append(s)
1451 m.append(s)
1451
1452
1452 def difflinestates(a, b):
1453 def difflinestates(a, b):
1453 sm = difflib.SequenceMatcher(None, a, b)
1454 sm = difflib.SequenceMatcher(None, a, b)
1454 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1455 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1455 if tag == 'insert':
1456 if tag == 'insert':
1456 for i in xrange(blo, bhi):
1457 for i in xrange(blo, bhi):
1457 yield ('+', b[i])
1458 yield ('+', b[i])
1458 elif tag == 'delete':
1459 elif tag == 'delete':
1459 for i in xrange(alo, ahi):
1460 for i in xrange(alo, ahi):
1460 yield ('-', a[i])
1461 yield ('-', a[i])
1461 elif tag == 'replace':
1462 elif tag == 'replace':
1462 for i in xrange(alo, ahi):
1463 for i in xrange(alo, ahi):
1463 yield ('-', a[i])
1464 yield ('-', a[i])
1464 for i in xrange(blo, bhi):
1465 for i in xrange(blo, bhi):
1465 yield ('+', b[i])
1466 yield ('+', b[i])
1466
1467
1467 prev = {}
1468 prev = {}
1468 def display(fn, rev, states, prevstates):
1469 def display(fn, rev, states, prevstates):
1469 counts = {'-': 0, '+': 0}
1470 counts = {'-': 0, '+': 0}
1470 filerevmatches = {}
1471 filerevmatches = {}
1471 if incrementing or not opts['all']:
1472 if incrementing or not opts['all']:
1472 a, b, r = prevstates, states, rev
1473 a, b, r = prevstates, states, rev
1473 else:
1474 else:
1474 a, b, r = states, prevstates, prev.get(fn, -1)
1475 a, b, r = states, prevstates, prev.get(fn, -1)
1475 for change, l in difflinestates(a, b):
1476 for change, l in difflinestates(a, b):
1476 cols = [fn, str(r)]
1477 cols = [fn, str(r)]
1477 if opts['line_number']:
1478 if opts['line_number']:
1478 cols.append(str(l.linenum))
1479 cols.append(str(l.linenum))
1479 if opts['all']:
1480 if opts['all']:
1480 cols.append(change)
1481 cols.append(change)
1481 if opts['user']:
1482 if opts['user']:
1482 cols.append(ui.shortuser(getchange(r)[1]))
1483 cols.append(ui.shortuser(getchange(r)[1]))
1483 if opts['files_with_matches']:
1484 if opts['files_with_matches']:
1484 c = (fn, r)
1485 c = (fn, r)
1485 if c in filerevmatches:
1486 if c in filerevmatches:
1486 continue
1487 continue
1487 filerevmatches[c] = 1
1488 filerevmatches[c] = 1
1488 else:
1489 else:
1489 cols.append(l.line)
1490 cols.append(l.line)
1490 ui.write(sep.join(cols), eol)
1491 ui.write(sep.join(cols), eol)
1491 counts[change] += 1
1492 counts[change] += 1
1492 return counts['+'], counts['-']
1493 return counts['+'], counts['-']
1493
1494
1494 fstate = {}
1495 fstate = {}
1495 skip = {}
1496 skip = {}
1496 getchange = util.cachefunc(lambda r:repo.changectx(r).changeset())
1497 getchange = util.cachefunc(lambda r:repo.changectx(r).changeset())
1497 changeiter, matchfn = walkchangerevs(ui, repo, pats, getchange, opts)
1498 changeiter, matchfn = walkchangerevs(ui, repo, pats, getchange, opts)
1498 count = 0
1499 count = 0
1499 incrementing = False
1500 incrementing = False
1500 follow = opts.get('follow')
1501 follow = opts.get('follow')
1501 for st, rev, fns in changeiter:
1502 for st, rev, fns in changeiter:
1502 if st == 'window':
1503 if st == 'window':
1503 incrementing = rev
1504 incrementing = rev
1504 matches.clear()
1505 matches.clear()
1505 elif st == 'add':
1506 elif st == 'add':
1506 mf = repo.changectx(rev).manifest()
1507 mf = repo.changectx(rev).manifest()
1507 matches[rev] = {}
1508 matches[rev] = {}
1508 for fn in fns:
1509 for fn in fns:
1509 if fn in skip:
1510 if fn in skip:
1510 continue
1511 continue
1511 fstate.setdefault(fn, {})
1512 fstate.setdefault(fn, {})
1512 try:
1513 try:
1513 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1514 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1514 if follow:
1515 if follow:
1515 copied = getfile(fn).renamed(mf[fn])
1516 copied = getfile(fn).renamed(mf[fn])
1516 if copied:
1517 if copied:
1517 copies.setdefault(rev, {})[fn] = copied[0]
1518 copies.setdefault(rev, {})[fn] = copied[0]
1518 except KeyError:
1519 except KeyError:
1519 pass
1520 pass
1520 elif st == 'iter':
1521 elif st == 'iter':
1521 states = matches[rev].items()
1522 states = matches[rev].items()
1522 states.sort()
1523 states.sort()
1523 for fn, m in states:
1524 for fn, m in states:
1524 copy = copies.get(rev, {}).get(fn)
1525 copy = copies.get(rev, {}).get(fn)
1525 if fn in skip:
1526 if fn in skip:
1526 if copy:
1527 if copy:
1527 skip[copy] = True
1528 skip[copy] = True
1528 continue
1529 continue
1529 if incrementing or not opts['all'] or fstate[fn]:
1530 if incrementing or not opts['all'] or fstate[fn]:
1530 pos, neg = display(fn, rev, m, fstate[fn])
1531 pos, neg = display(fn, rev, m, fstate[fn])
1531 count += pos + neg
1532 count += pos + neg
1532 if pos and not opts['all']:
1533 if pos and not opts['all']:
1533 skip[fn] = True
1534 skip[fn] = True
1534 if copy:
1535 if copy:
1535 skip[copy] = True
1536 skip[copy] = True
1536 fstate[fn] = m
1537 fstate[fn] = m
1537 if copy:
1538 if copy:
1538 fstate[copy] = m
1539 fstate[copy] = m
1539 prev[fn] = rev
1540 prev[fn] = rev
1540
1541
1541 if not incrementing:
1542 if not incrementing:
1542 fstate = fstate.items()
1543 fstate = fstate.items()
1543 fstate.sort()
1544 fstate.sort()
1544 for fn, state in fstate:
1545 for fn, state in fstate:
1545 if fn in skip:
1546 if fn in skip:
1546 continue
1547 continue
1547 if fn not in copies.get(prev[fn], {}):
1548 if fn not in copies.get(prev[fn], {}):
1548 display(fn, rev, {}, state)
1549 display(fn, rev, {}, state)
1549 return (count == 0 and 1) or 0
1550 return (count == 0 and 1) or 0
1550
1551
1551 def heads(ui, repo, **opts):
1552 def heads(ui, repo, **opts):
1552 """show current repository heads
1553 """show current repository heads
1553
1554
1554 Show all repository head changesets.
1555 Show all repository head changesets.
1555
1556
1556 Repository "heads" are changesets that don't have children
1557 Repository "heads" are changesets that don't have children
1557 changesets. They are where development generally takes place and
1558 changesets. They are where development generally takes place and
1558 are the usual targets for update and merge operations.
1559 are the usual targets for update and merge operations.
1559 """
1560 """
1560 if opts['rev']:
1561 if opts['rev']:
1561 heads = repo.heads(repo.lookup(opts['rev']))
1562 heads = repo.heads(repo.lookup(opts['rev']))
1562 else:
1563 else:
1563 heads = repo.heads()
1564 heads = repo.heads()
1564 br = None
1565 br = None
1565 if opts['branches']:
1566 if opts['branches']:
1566 ui.warn(_("the --branches option is deprecated, "
1567 ui.warn(_("the --branches option is deprecated, "
1567 "please use 'hg branches' instead\n"))
1568 "please use 'hg branches' instead\n"))
1568 br = repo.branchlookup(heads)
1569 br = repo.branchlookup(heads)
1569 displayer = show_changeset(ui, repo, opts)
1570 displayer = show_changeset(ui, repo, opts)
1570 for n in heads:
1571 for n in heads:
1571 displayer.show(changenode=n, brinfo=br)
1572 displayer.show(changenode=n, brinfo=br)
1572
1573
1573 def identify(ui, repo):
1574 def identify(ui, repo):
1574 """print information about the working copy
1575 """print information about the working copy
1575
1576
1576 Print a short summary of the current state of the repo.
1577 Print a short summary of the current state of the repo.
1577
1578
1578 This summary identifies the repository state using one or two parent
1579 This summary identifies the repository state using one or two parent
1579 hash identifiers, followed by a "+" if there are uncommitted changes
1580 hash identifiers, followed by a "+" if there are uncommitted changes
1580 in the working directory, followed by a list of tags for this revision.
1581 in the working directory, followed by a list of tags for this revision.
1581 """
1582 """
1582 parents = [p for p in repo.dirstate.parents() if p != nullid]
1583 parents = [p for p in repo.dirstate.parents() if p != nullid]
1583 if not parents:
1584 if not parents:
1584 ui.write(_("unknown\n"))
1585 ui.write(_("unknown\n"))
1585 return
1586 return
1586
1587
1587 hexfunc = ui.debugflag and hex or short
1588 hexfunc = ui.debugflag and hex or short
1588 modified, added, removed, deleted = repo.status()[:4]
1589 modified, added, removed, deleted = repo.status()[:4]
1589 output = ["%s%s" %
1590 output = ["%s%s" %
1590 ('+'.join([hexfunc(parent) for parent in parents]),
1591 ('+'.join([hexfunc(parent) for parent in parents]),
1591 (modified or added or removed or deleted) and "+" or "")]
1592 (modified or added or removed or deleted) and "+" or "")]
1592
1593
1593 if not ui.quiet:
1594 if not ui.quiet:
1594
1595
1595 branch = repo.workingctx().branch()
1596 branch = repo.workingctx().branch()
1596 if branch:
1597 if branch:
1597 output.append("(%s)" % branch)
1598 output.append("(%s)" % branch)
1598
1599
1599 # multiple tags for a single parent separated by '/'
1600 # multiple tags for a single parent separated by '/'
1600 parenttags = ['/'.join(tags)
1601 parenttags = ['/'.join(tags)
1601 for tags in map(repo.nodetags, parents) if tags]
1602 for tags in map(repo.nodetags, parents) if tags]
1602 # tags for multiple parents separated by ' + '
1603 # tags for multiple parents separated by ' + '
1603 if parenttags:
1604 if parenttags:
1604 output.append(' + '.join(parenttags))
1605 output.append(' + '.join(parenttags))
1605
1606
1606 ui.write("%s\n" % ' '.join(output))
1607 ui.write("%s\n" % ' '.join(output))
1607
1608
1608 def import_(ui, repo, patch1, *patches, **opts):
1609 def import_(ui, repo, patch1, *patches, **opts):
1609 """import an ordered set of patches
1610 """import an ordered set of patches
1610
1611
1611 Import a list of patches and commit them individually.
1612 Import a list of patches and commit them individually.
1612
1613
1613 If there are outstanding changes in the working directory, import
1614 If there are outstanding changes in the working directory, import
1614 will abort unless given the -f flag.
1615 will abort unless given the -f flag.
1615
1616
1616 You can import a patch straight from a mail message. Even patches
1617 You can import a patch straight from a mail message. Even patches
1617 as attachments work (body part must be type text/plain or
1618 as attachments work (body part must be type text/plain or
1618 text/x-patch to be used). From and Subject headers of email
1619 text/x-patch to be used). From and Subject headers of email
1619 message are used as default committer and commit message. All
1620 message are used as default committer and commit message. All
1620 text/plain body parts before first diff are added to commit
1621 text/plain body parts before first diff are added to commit
1621 message.
1622 message.
1622
1623
1623 If imported patch was generated by hg export, user and description
1624 If imported patch was generated by hg export, user and description
1624 from patch override values from message headers and body. Values
1625 from patch override values from message headers and body. Values
1625 given on command line with -m and -u override these.
1626 given on command line with -m and -u override these.
1626
1627
1627 To read a patch from standard input, use patch name "-".
1628 To read a patch from standard input, use patch name "-".
1628 """
1629 """
1629 patches = (patch1,) + patches
1630 patches = (patch1,) + patches
1630
1631
1631 if not opts['force']:
1632 if not opts['force']:
1632 bail_if_changed(repo)
1633 bail_if_changed(repo)
1633
1634
1634 d = opts["base"]
1635 d = opts["base"]
1635 strip = opts["strip"]
1636 strip = opts["strip"]
1636
1637
1637 wlock = repo.wlock()
1638 wlock = repo.wlock()
1638 lock = repo.lock()
1639 lock = repo.lock()
1639
1640
1640 for p in patches:
1641 for p in patches:
1641 pf = os.path.join(d, p)
1642 pf = os.path.join(d, p)
1642
1643
1643 if pf == '-':
1644 if pf == '-':
1644 ui.status(_("applying patch from stdin\n"))
1645 ui.status(_("applying patch from stdin\n"))
1645 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1646 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1646 else:
1647 else:
1647 ui.status(_("applying %s\n") % p)
1648 ui.status(_("applying %s\n") % p)
1648 tmpname, message, user, date = patch.extract(ui, file(pf))
1649 tmpname, message, user, date = patch.extract(ui, file(pf))
1649
1650
1650 if tmpname is None:
1651 if tmpname is None:
1651 raise util.Abort(_('no diffs found'))
1652 raise util.Abort(_('no diffs found'))
1652
1653
1653 try:
1654 try:
1654 if opts['message']:
1655 if opts['message']:
1655 # pickup the cmdline msg
1656 # pickup the cmdline msg
1656 message = opts['message']
1657 message = opts['message']
1657 elif message:
1658 elif message:
1658 # pickup the patch msg
1659 # pickup the patch msg
1659 message = message.strip()
1660 message = message.strip()
1660 else:
1661 else:
1661 # launch the editor
1662 # launch the editor
1662 message = None
1663 message = None
1663 ui.debug(_('message:\n%s\n') % message)
1664 ui.debug(_('message:\n%s\n') % message)
1664
1665
1665 files = {}
1666 files = {}
1666 try:
1667 try:
1667 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1668 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1668 files=files)
1669 files=files)
1669 finally:
1670 finally:
1670 files = patch.updatedir(ui, repo, files, wlock=wlock)
1671 files = patch.updatedir(ui, repo, files, wlock=wlock)
1671 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1672 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1672 finally:
1673 finally:
1673 os.unlink(tmpname)
1674 os.unlink(tmpname)
1674
1675
1675 def incoming(ui, repo, source="default", **opts):
1676 def incoming(ui, repo, source="default", **opts):
1676 """show new changesets found in source
1677 """show new changesets found in source
1677
1678
1678 Show new changesets found in the specified path/URL or the default
1679 Show new changesets found in the specified path/URL or the default
1679 pull location. These are the changesets that would be pulled if a pull
1680 pull location. These are the changesets that would be pulled if a pull
1680 was requested.
1681 was requested.
1681
1682
1682 For remote repository, using --bundle avoids downloading the changesets
1683 For remote repository, using --bundle avoids downloading the changesets
1683 twice if the incoming is followed by a pull.
1684 twice if the incoming is followed by a pull.
1684
1685
1685 See pull for valid source format details.
1686 See pull for valid source format details.
1686 """
1687 """
1687 source = ui.expandpath(source)
1688 source = ui.expandpath(source)
1688 setremoteconfig(ui, opts)
1689 setremoteconfig(ui, opts)
1689
1690
1690 other = hg.repository(ui, source)
1691 other = hg.repository(ui, source)
1691 incoming = repo.findincoming(other, force=opts["force"])
1692 incoming = repo.findincoming(other, force=opts["force"])
1692 if not incoming:
1693 if not incoming:
1693 ui.status(_("no changes found\n"))
1694 ui.status(_("no changes found\n"))
1694 return
1695 return
1695
1696
1696 cleanup = None
1697 cleanup = None
1697 try:
1698 try:
1698 fname = opts["bundle"]
1699 fname = opts["bundle"]
1699 if fname or not other.local():
1700 if fname or not other.local():
1700 # create a bundle (uncompressed if other repo is not local)
1701 # create a bundle (uncompressed if other repo is not local)
1701 cg = other.changegroup(incoming, "incoming")
1702 cg = other.changegroup(incoming, "incoming")
1702 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1703 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1703 # keep written bundle?
1704 # keep written bundle?
1704 if opts["bundle"]:
1705 if opts["bundle"]:
1705 cleanup = None
1706 cleanup = None
1706 if not other.local():
1707 if not other.local():
1707 # use the created uncompressed bundlerepo
1708 # use the created uncompressed bundlerepo
1708 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1709 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1709
1710
1710 revs = None
1711 revs = None
1711 if opts['rev']:
1712 if opts['rev']:
1712 revs = [other.lookup(rev) for rev in opts['rev']]
1713 revs = [other.lookup(rev) for rev in opts['rev']]
1713 o = other.changelog.nodesbetween(incoming, revs)[0]
1714 o = other.changelog.nodesbetween(incoming, revs)[0]
1714 if opts['newest_first']:
1715 if opts['newest_first']:
1715 o.reverse()
1716 o.reverse()
1716 displayer = show_changeset(ui, other, opts)
1717 displayer = show_changeset(ui, other, opts)
1717 for n in o:
1718 for n in o:
1718 parents = [p for p in other.changelog.parents(n) if p != nullid]
1719 parents = [p for p in other.changelog.parents(n) if p != nullid]
1719 if opts['no_merges'] and len(parents) == 2:
1720 if opts['no_merges'] and len(parents) == 2:
1720 continue
1721 continue
1721 displayer.show(changenode=n)
1722 displayer.show(changenode=n)
1722 if opts['patch']:
1723 if opts['patch']:
1723 prev = (parents and parents[0]) or nullid
1724 prev = (parents and parents[0]) or nullid
1724 patch.diff(other, prev, n, fp=repo.ui)
1725 patch.diff(other, prev, n, fp=repo.ui)
1725 ui.write("\n")
1726 ui.write("\n")
1726 finally:
1727 finally:
1727 if hasattr(other, 'close'):
1728 if hasattr(other, 'close'):
1728 other.close()
1729 other.close()
1729 if cleanup:
1730 if cleanup:
1730 os.unlink(cleanup)
1731 os.unlink(cleanup)
1731
1732
1732 def init(ui, dest=".", **opts):
1733 def init(ui, dest=".", **opts):
1733 """create a new repository in the given directory
1734 """create a new repository in the given directory
1734
1735
1735 Initialize a new repository in the given directory. If the given
1736 Initialize a new repository in the given directory. If the given
1736 directory does not exist, it is created.
1737 directory does not exist, it is created.
1737
1738
1738 If no directory is given, the current directory is used.
1739 If no directory is given, the current directory is used.
1739
1740
1740 It is possible to specify an ssh:// URL as the destination.
1741 It is possible to specify an ssh:// URL as the destination.
1741 Look at the help text for the pull command for important details
1742 Look at the help text for the pull command for important details
1742 about ssh:// URLs.
1743 about ssh:// URLs.
1743 """
1744 """
1744 setremoteconfig(ui, opts)
1745 setremoteconfig(ui, opts)
1745 hg.repository(ui, dest, create=1)
1746 hg.repository(ui, dest, create=1)
1746
1747
1747 def locate(ui, repo, *pats, **opts):
1748 def locate(ui, repo, *pats, **opts):
1748 """locate files matching specific patterns
1749 """locate files matching specific patterns
1749
1750
1750 Print all files under Mercurial control whose names match the
1751 Print all files under Mercurial control whose names match the
1751 given patterns.
1752 given patterns.
1752
1753
1753 This command searches the current directory and its
1754 This command searches the current directory and its
1754 subdirectories. To search an entire repository, move to the root
1755 subdirectories. To search an entire repository, move to the root
1755 of the repository.
1756 of the repository.
1756
1757
1757 If no patterns are given to match, this command prints all file
1758 If no patterns are given to match, this command prints all file
1758 names.
1759 names.
1759
1760
1760 If you want to feed the output of this command into the "xargs"
1761 If you want to feed the output of this command into the "xargs"
1761 command, use the "-0" option to both this command and "xargs".
1762 command, use the "-0" option to both this command and "xargs".
1762 This will avoid the problem of "xargs" treating single filenames
1763 This will avoid the problem of "xargs" treating single filenames
1763 that contain white space as multiple filenames.
1764 that contain white space as multiple filenames.
1764 """
1765 """
1765 end = opts['print0'] and '\0' or '\n'
1766 end = opts['print0'] and '\0' or '\n'
1766 rev = opts['rev']
1767 rev = opts['rev']
1767 if rev:
1768 if rev:
1768 node = repo.lookup(rev)
1769 node = repo.lookup(rev)
1769 else:
1770 else:
1770 node = None
1771 node = None
1771
1772
1772 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1773 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1773 head='(?:.*/|)'):
1774 head='(?:.*/|)'):
1774 if not node and repo.dirstate.state(abs) == '?':
1775 if not node and repo.dirstate.state(abs) == '?':
1775 continue
1776 continue
1776 if opts['fullpath']:
1777 if opts['fullpath']:
1777 ui.write(os.path.join(repo.root, abs), end)
1778 ui.write(os.path.join(repo.root, abs), end)
1778 else:
1779 else:
1779 ui.write(((pats and rel) or abs), end)
1780 ui.write(((pats and rel) or abs), end)
1780
1781
1781 def log(ui, repo, *pats, **opts):
1782 def log(ui, repo, *pats, **opts):
1782 """show revision history of entire repository or files
1783 """show revision history of entire repository or files
1783
1784
1784 Print the revision history of the specified files or the entire
1785 Print the revision history of the specified files or the entire
1785 project.
1786 project.
1786
1787
1787 File history is shown without following rename or copy history of
1788 File history is shown without following rename or copy history of
1788 files. Use -f/--follow with a file name to follow history across
1789 files. Use -f/--follow with a file name to follow history across
1789 renames and copies. --follow without a file name will only show
1790 renames and copies. --follow without a file name will only show
1790 ancestors or descendants of the starting revision. --follow-first
1791 ancestors or descendants of the starting revision. --follow-first
1791 only follows the first parent of merge revisions.
1792 only follows the first parent of merge revisions.
1792
1793
1793 If no revision range is specified, the default is tip:0 unless
1794 If no revision range is specified, the default is tip:0 unless
1794 --follow is set, in which case the working directory parent is
1795 --follow is set, in which case the working directory parent is
1795 used as the starting revision.
1796 used as the starting revision.
1796
1797
1797 By default this command outputs: changeset id and hash, tags,
1798 By default this command outputs: changeset id and hash, tags,
1798 non-trivial parents, user, date and time, and a summary for each
1799 non-trivial parents, user, date and time, and a summary for each
1799 commit. When the -v/--verbose switch is used, the list of changed
1800 commit. When the -v/--verbose switch is used, the list of changed
1800 files and full commit message is shown.
1801 files and full commit message is shown.
1801 """
1802 """
1802 class dui(object):
1803 class dui(object):
1803 # Implement and delegate some ui protocol. Save hunks of
1804 # Implement and delegate some ui protocol. Save hunks of
1804 # output for later display in the desired order.
1805 # output for later display in the desired order.
1805 def __init__(self, ui):
1806 def __init__(self, ui):
1806 self.ui = ui
1807 self.ui = ui
1807 self.hunk = {}
1808 self.hunk = {}
1808 self.header = {}
1809 self.header = {}
1809 self.quiet = ui.quiet
1810 self.quiet = ui.quiet
1810 self.verbose = ui.verbose
1811 self.verbose = ui.verbose
1811 self.debugflag = ui.debugflag
1812 self.debugflag = ui.debugflag
1812 def bump(self, rev):
1813 def bump(self, rev):
1813 self.rev = rev
1814 self.rev = rev
1814 self.hunk[rev] = []
1815 self.hunk[rev] = []
1815 self.header[rev] = []
1816 self.header[rev] = []
1816 def note(self, *args):
1817 def note(self, *args):
1817 if self.verbose:
1818 if self.verbose:
1818 self.write(*args)
1819 self.write(*args)
1819 def status(self, *args):
1820 def status(self, *args):
1820 if not self.quiet:
1821 if not self.quiet:
1821 self.write(*args)
1822 self.write(*args)
1822 def write(self, *args):
1823 def write(self, *args):
1823 self.hunk[self.rev].extend(args)
1824 self.hunk[self.rev].extend(args)
1824 def write_header(self, *args):
1825 def write_header(self, *args):
1825 self.header[self.rev].extend(args)
1826 self.header[self.rev].extend(args)
1826 def debug(self, *args):
1827 def debug(self, *args):
1827 if self.debugflag:
1828 if self.debugflag:
1828 self.write(*args)
1829 self.write(*args)
1829 def __getattr__(self, key):
1830 def __getattr__(self, key):
1830 return getattr(self.ui, key)
1831 return getattr(self.ui, key)
1831
1832
1832 getchange = util.cachefunc(lambda r:repo.changectx(r).changeset())
1833 getchange = util.cachefunc(lambda r:repo.changectx(r).changeset())
1833 changeiter, matchfn = walkchangerevs(ui, repo, pats, getchange, opts)
1834 changeiter, matchfn = walkchangerevs(ui, repo, pats, getchange, opts)
1834
1835
1835 if opts['branches']:
1836 if opts['branches']:
1836 ui.warn(_("the --branches option is deprecated, "
1837 ui.warn(_("the --branches option is deprecated, "
1837 "please use 'hg branches' instead\n"))
1838 "please use 'hg branches' instead\n"))
1838
1839
1839 if opts['limit']:
1840 if opts['limit']:
1840 try:
1841 try:
1841 limit = int(opts['limit'])
1842 limit = int(opts['limit'])
1842 except ValueError:
1843 except ValueError:
1843 raise util.Abort(_('limit must be a positive integer'))
1844 raise util.Abort(_('limit must be a positive integer'))
1844 if limit <= 0: raise util.Abort(_('limit must be positive'))
1845 if limit <= 0: raise util.Abort(_('limit must be positive'))
1845 else:
1846 else:
1846 limit = sys.maxint
1847 limit = sys.maxint
1847 count = 0
1848 count = 0
1848
1849
1849 if opts['copies'] and opts['rev']:
1850 if opts['copies'] and opts['rev']:
1850 endrev = max(cmdutil.revrange(ui, repo, opts['rev'])) + 1
1851 endrev = max(cmdutil.revrange(ui, repo, opts['rev'])) + 1
1851 else:
1852 else:
1852 endrev = repo.changelog.count()
1853 endrev = repo.changelog.count()
1853 rcache = {}
1854 rcache = {}
1854 ncache = {}
1855 ncache = {}
1855 dcache = []
1856 dcache = []
1856 def getrenamed(fn, rev, man):
1857 def getrenamed(fn, rev, man):
1857 '''looks up all renames for a file (up to endrev) the first
1858 '''looks up all renames for a file (up to endrev) the first
1858 time the file is given. It indexes on the changerev and only
1859 time the file is given. It indexes on the changerev and only
1859 parses the manifest if linkrev != changerev.
1860 parses the manifest if linkrev != changerev.
1860 Returns rename info for fn at changerev rev.'''
1861 Returns rename info for fn at changerev rev.'''
1861 if fn not in rcache:
1862 if fn not in rcache:
1862 rcache[fn] = {}
1863 rcache[fn] = {}
1863 ncache[fn] = {}
1864 ncache[fn] = {}
1864 fl = repo.file(fn)
1865 fl = repo.file(fn)
1865 for i in xrange(fl.count()):
1866 for i in xrange(fl.count()):
1866 node = fl.node(i)
1867 node = fl.node(i)
1867 lr = fl.linkrev(node)
1868 lr = fl.linkrev(node)
1868 renamed = fl.renamed(node)
1869 renamed = fl.renamed(node)
1869 rcache[fn][lr] = renamed
1870 rcache[fn][lr] = renamed
1870 if renamed:
1871 if renamed:
1871 ncache[fn][node] = renamed
1872 ncache[fn][node] = renamed
1872 if lr >= endrev:
1873 if lr >= endrev:
1873 break
1874 break
1874 if rev in rcache[fn]:
1875 if rev in rcache[fn]:
1875 return rcache[fn][rev]
1876 return rcache[fn][rev]
1876 mr = repo.manifest.rev(man)
1877 mr = repo.manifest.rev(man)
1877 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1878 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1878 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1879 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1879 if not dcache or dcache[0] != man:
1880 if not dcache or dcache[0] != man:
1880 dcache[:] = [man, repo.manifest.readdelta(man)]
1881 dcache[:] = [man, repo.manifest.readdelta(man)]
1881 if fn in dcache[1]:
1882 if fn in dcache[1]:
1882 return ncache[fn].get(dcache[1][fn])
1883 return ncache[fn].get(dcache[1][fn])
1883 return None
1884 return None
1884
1885
1885 displayer = show_changeset(ui, repo, opts)
1886 displayer = show_changeset(ui, repo, opts)
1886 for st, rev, fns in changeiter:
1887 for st, rev, fns in changeiter:
1887 if st == 'window':
1888 if st == 'window':
1888 du = dui(ui)
1889 du = dui(ui)
1889 displayer.ui = du
1890 displayer.ui = du
1890 elif st == 'add':
1891 elif st == 'add':
1891 du.bump(rev)
1892 du.bump(rev)
1892 changenode = repo.changelog.node(rev)
1893 changenode = repo.changelog.node(rev)
1893 parents = [p for p in repo.changelog.parentrevs(rev)
1894 parents = [p for p in repo.changelog.parentrevs(rev)
1894 if p != nullrev]
1895 if p != nullrev]
1895 if opts['no_merges'] and len(parents) == 2:
1896 if opts['no_merges'] and len(parents) == 2:
1896 continue
1897 continue
1897 if opts['only_merges'] and len(parents) != 2:
1898 if opts['only_merges'] and len(parents) != 2:
1898 continue
1899 continue
1899
1900
1900 if opts['keyword']:
1901 if opts['keyword']:
1901 changes = getchange(rev)
1902 changes = getchange(rev)
1902 miss = 0
1903 miss = 0
1903 for k in [kw.lower() for kw in opts['keyword']]:
1904 for k in [kw.lower() for kw in opts['keyword']]:
1904 if not (k in changes[1].lower() or
1905 if not (k in changes[1].lower() or
1905 k in changes[4].lower() or
1906 k in changes[4].lower() or
1906 k in " ".join(changes[3][:20]).lower()):
1907 k in " ".join(changes[3][:20]).lower()):
1907 miss = 1
1908 miss = 1
1908 break
1909 break
1909 if miss:
1910 if miss:
1910 continue
1911 continue
1911
1912
1912 br = None
1913 br = None
1913 if opts['branches']:
1914 if opts['branches']:
1914 br = repo.branchlookup([repo.changelog.node(rev)])
1915 br = repo.branchlookup([repo.changelog.node(rev)])
1915
1916
1916 copies = []
1917 copies = []
1917 if opts.get('copies') and rev:
1918 if opts.get('copies') and rev:
1918 mf = getchange(rev)[0]
1919 mf = getchange(rev)[0]
1919 for fn in getchange(rev)[3]:
1920 for fn in getchange(rev)[3]:
1920 rename = getrenamed(fn, rev, mf)
1921 rename = getrenamed(fn, rev, mf)
1921 if rename:
1922 if rename:
1922 copies.append((fn, rename[0]))
1923 copies.append((fn, rename[0]))
1923 displayer.show(rev, changenode, brinfo=br, copies=copies)
1924 displayer.show(rev, changenode, brinfo=br, copies=copies)
1924 if opts['patch']:
1925 if opts['patch']:
1925 if parents:
1926 if parents:
1926 prev = parents[0]
1927 prev = parents[0]
1927 else:
1928 else:
1928 prev = nullrev
1929 prev = nullrev
1929 prev = repo.changelog.node(prev)
1930 prev = repo.changelog.node(prev)
1930 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1931 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1931 du.write("\n\n")
1932 du.write("\n\n")
1932 elif st == 'iter':
1933 elif st == 'iter':
1933 if count == limit: break
1934 if count == limit: break
1934 if du.header[rev]:
1935 if du.header[rev]:
1935 ui.write_header(*du.header[rev])
1936 ui.write_header(*du.header[rev])
1936 if du.hunk[rev]:
1937 if du.hunk[rev]:
1937 count += 1
1938 count += 1
1938 ui.write(*du.hunk[rev])
1939 ui.write(*du.hunk[rev])
1939
1940
1940 def manifest(ui, repo, rev=None):
1941 def manifest(ui, repo, rev=None):
1941 """output the latest or given revision of the project manifest
1942 """output the latest or given revision of the project manifest
1942
1943
1943 Print a list of version controlled files for the given revision.
1944 Print a list of version controlled files for the given revision.
1944
1945
1945 The manifest is the list of files being version controlled. If no revision
1946 The manifest is the list of files being version controlled. If no revision
1946 is given then the tip is used.
1947 is given then the tip is used.
1947 """
1948 """
1948 if rev:
1949 if rev:
1949 try:
1950 try:
1950 # assume all revision numbers are for changesets
1951 # assume all revision numbers are for changesets
1951 n = repo.lookup(rev)
1952 n = repo.lookup(rev)
1952 change = repo.changelog.read(n)
1953 change = repo.changelog.read(n)
1953 n = change[0]
1954 n = change[0]
1954 except hg.RepoError:
1955 except hg.RepoError:
1955 n = repo.manifest.lookup(rev)
1956 n = repo.manifest.lookup(rev)
1956 else:
1957 else:
1957 n = repo.manifest.tip()
1958 n = repo.manifest.tip()
1958 m = repo.manifest.read(n)
1959 m = repo.manifest.read(n)
1959 files = m.keys()
1960 files = m.keys()
1960 files.sort()
1961 files.sort()
1961
1962
1962 for f in files:
1963 for f in files:
1963 ui.write("%40s %3s %s\n" % (hex(m[f]),
1964 ui.write("%40s %3s %s\n" % (hex(m[f]),
1964 m.execf(f) and "755" or "644", f))
1965 m.execf(f) and "755" or "644", f))
1965
1966
1966 def merge(ui, repo, node=None, force=None, branch=None):
1967 def merge(ui, repo, node=None, force=None, branch=None):
1967 """Merge working directory with another revision
1968 """Merge working directory with another revision
1968
1969
1969 Merge the contents of the current working directory and the
1970 Merge the contents of the current working directory and the
1970 requested revision. Files that changed between either parent are
1971 requested revision. Files that changed between either parent are
1971 marked as changed for the next commit and a commit must be
1972 marked as changed for the next commit and a commit must be
1972 performed before any further updates are allowed.
1973 performed before any further updates are allowed.
1973
1974
1974 If no revision is specified, the working directory's parent is a
1975 If no revision is specified, the working directory's parent is a
1975 head revision, and the repository contains exactly one other head,
1976 head revision, and the repository contains exactly one other head,
1976 the other head is merged with by default. Otherwise, an explicit
1977 the other head is merged with by default. Otherwise, an explicit
1977 revision to merge with must be provided.
1978 revision to merge with must be provided.
1978 """
1979 """
1979
1980
1980 if node or branch:
1981 if node or branch:
1981 node = _lookup(repo, node, branch)
1982 node = _lookup(repo, node, branch)
1982 else:
1983 else:
1983 heads = repo.heads()
1984 heads = repo.heads()
1984 if len(heads) > 2:
1985 if len(heads) > 2:
1985 raise util.Abort(_('repo has %d heads - '
1986 raise util.Abort(_('repo has %d heads - '
1986 'please merge with an explicit rev') %
1987 'please merge with an explicit rev') %
1987 len(heads))
1988 len(heads))
1988 if len(heads) == 1:
1989 if len(heads) == 1:
1989 raise util.Abort(_('there is nothing to merge - '
1990 raise util.Abort(_('there is nothing to merge - '
1990 'use "hg update" instead'))
1991 'use "hg update" instead'))
1991 parent = repo.dirstate.parents()[0]
1992 parent = repo.dirstate.parents()[0]
1992 if parent not in heads:
1993 if parent not in heads:
1993 raise util.Abort(_('working dir not at a head rev - '
1994 raise util.Abort(_('working dir not at a head rev - '
1994 'use "hg update" or merge with an explicit rev'))
1995 'use "hg update" or merge with an explicit rev'))
1995 node = parent == heads[0] and heads[-1] or heads[0]
1996 node = parent == heads[0] and heads[-1] or heads[0]
1996 return hg.merge(repo, node, force=force)
1997 return hg.merge(repo, node, force=force)
1997
1998
1998 def outgoing(ui, repo, dest=None, **opts):
1999 def outgoing(ui, repo, dest=None, **opts):
1999 """show changesets not found in destination
2000 """show changesets not found in destination
2000
2001
2001 Show changesets not found in the specified destination repository or
2002 Show changesets not found in the specified destination repository or
2002 the default push location. These are the changesets that would be pushed
2003 the default push location. These are the changesets that would be pushed
2003 if a push was requested.
2004 if a push was requested.
2004
2005
2005 See pull for valid destination format details.
2006 See pull for valid destination format details.
2006 """
2007 """
2007 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2008 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2008 setremoteconfig(ui, opts)
2009 setremoteconfig(ui, opts)
2009 revs = None
2010 revs = None
2010 if opts['rev']:
2011 if opts['rev']:
2011 revs = [repo.lookup(rev) for rev in opts['rev']]
2012 revs = [repo.lookup(rev) for rev in opts['rev']]
2012
2013
2013 other = hg.repository(ui, dest)
2014 other = hg.repository(ui, dest)
2014 o = repo.findoutgoing(other, force=opts['force'])
2015 o = repo.findoutgoing(other, force=opts['force'])
2015 if not o:
2016 if not o:
2016 ui.status(_("no changes found\n"))
2017 ui.status(_("no changes found\n"))
2017 return
2018 return
2018 o = repo.changelog.nodesbetween(o, revs)[0]
2019 o = repo.changelog.nodesbetween(o, revs)[0]
2019 if opts['newest_first']:
2020 if opts['newest_first']:
2020 o.reverse()
2021 o.reverse()
2021 displayer = show_changeset(ui, repo, opts)
2022 displayer = show_changeset(ui, repo, opts)
2022 for n in o:
2023 for n in o:
2023 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2024 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2024 if opts['no_merges'] and len(parents) == 2:
2025 if opts['no_merges'] and len(parents) == 2:
2025 continue
2026 continue
2026 displayer.show(changenode=n)
2027 displayer.show(changenode=n)
2027 if opts['patch']:
2028 if opts['patch']:
2028 prev = (parents and parents[0]) or nullid
2029 prev = (parents and parents[0]) or nullid
2029 patch.diff(repo, prev, n)
2030 patch.diff(repo, prev, n)
2030 ui.write("\n")
2031 ui.write("\n")
2031
2032
2032 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2033 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2033 """show the parents of the working dir or revision
2034 """show the parents of the working dir or revision
2034
2035
2035 Print the working directory's parent revisions.
2036 Print the working directory's parent revisions.
2036 """
2037 """
2037 # legacy
2038 # legacy
2038 if file_ and not rev:
2039 if file_ and not rev:
2039 try:
2040 try:
2040 rev = repo.lookup(file_)
2041 rev = repo.lookup(file_)
2041 file_ = None
2042 file_ = None
2042 except hg.RepoError:
2043 except hg.RepoError:
2043 pass
2044 pass
2044 else:
2045 else:
2045 ui.warn(_("'hg parent REV' is deprecated, "
2046 ui.warn(_("'hg parent REV' is deprecated, "
2046 "please use 'hg parents -r REV instead\n"))
2047 "please use 'hg parents -r REV instead\n"))
2047
2048
2048 if rev:
2049 if rev:
2049 if file_:
2050 if file_:
2050 ctx = repo.filectx(file_, changeid=rev)
2051 ctx = repo.filectx(file_, changeid=rev)
2051 else:
2052 else:
2052 ctx = repo.changectx(rev)
2053 ctx = repo.changectx(rev)
2053 p = [cp.node() for cp in ctx.parents()]
2054 p = [cp.node() for cp in ctx.parents()]
2054 else:
2055 else:
2055 p = repo.dirstate.parents()
2056 p = repo.dirstate.parents()
2056
2057
2057 br = None
2058 br = None
2058 if branches is not None:
2059 if branches is not None:
2059 ui.warn(_("the --branches option is deprecated, "
2060 ui.warn(_("the --branches option is deprecated, "
2060 "please use 'hg branches' instead\n"))
2061 "please use 'hg branches' instead\n"))
2061 br = repo.branchlookup(p)
2062 br = repo.branchlookup(p)
2062 displayer = show_changeset(ui, repo, opts)
2063 displayer = show_changeset(ui, repo, opts)
2063 for n in p:
2064 for n in p:
2064 if n != nullid:
2065 if n != nullid:
2065 displayer.show(changenode=n, brinfo=br)
2066 displayer.show(changenode=n, brinfo=br)
2066
2067
2067 def paths(ui, repo, search=None):
2068 def paths(ui, repo, search=None):
2068 """show definition of symbolic path names
2069 """show definition of symbolic path names
2069
2070
2070 Show definition of symbolic path name NAME. If no name is given, show
2071 Show definition of symbolic path name NAME. If no name is given, show
2071 definition of available names.
2072 definition of available names.
2072
2073
2073 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2074 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2074 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2075 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2075 """
2076 """
2076 if search:
2077 if search:
2077 for name, path in ui.configitems("paths"):
2078 for name, path in ui.configitems("paths"):
2078 if name == search:
2079 if name == search:
2079 ui.write("%s\n" % path)
2080 ui.write("%s\n" % path)
2080 return
2081 return
2081 ui.warn(_("not found!\n"))
2082 ui.warn(_("not found!\n"))
2082 return 1
2083 return 1
2083 else:
2084 else:
2084 for name, path in ui.configitems("paths"):
2085 for name, path in ui.configitems("paths"):
2085 ui.write("%s = %s\n" % (name, path))
2086 ui.write("%s = %s\n" % (name, path))
2086
2087
2087 def postincoming(ui, repo, modheads, optupdate):
2088 def postincoming(ui, repo, modheads, optupdate):
2088 if modheads == 0:
2089 if modheads == 0:
2089 return
2090 return
2090 if optupdate:
2091 if optupdate:
2091 if modheads == 1:
2092 if modheads == 1:
2092 return hg.update(repo, repo.changelog.tip()) # update
2093 return hg.update(repo, repo.changelog.tip()) # update
2093 else:
2094 else:
2094 ui.status(_("not updating, since new heads added\n"))
2095 ui.status(_("not updating, since new heads added\n"))
2095 if modheads > 1:
2096 if modheads > 1:
2096 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2097 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2097 else:
2098 else:
2098 ui.status(_("(run 'hg update' to get a working copy)\n"))
2099 ui.status(_("(run 'hg update' to get a working copy)\n"))
2099
2100
2100 def pull(ui, repo, source="default", **opts):
2101 def pull(ui, repo, source="default", **opts):
2101 """pull changes from the specified source
2102 """pull changes from the specified source
2102
2103
2103 Pull changes from a remote repository to a local one.
2104 Pull changes from a remote repository to a local one.
2104
2105
2105 This finds all changes from the repository at the specified path
2106 This finds all changes from the repository at the specified path
2106 or URL and adds them to the local repository. By default, this
2107 or URL and adds them to the local repository. By default, this
2107 does not update the copy of the project in the working directory.
2108 does not update the copy of the project in the working directory.
2108
2109
2109 Valid URLs are of the form:
2110 Valid URLs are of the form:
2110
2111
2111 local/filesystem/path (or file://local/filesystem/path)
2112 local/filesystem/path (or file://local/filesystem/path)
2112 http://[user@]host[:port]/[path]
2113 http://[user@]host[:port]/[path]
2113 https://[user@]host[:port]/[path]
2114 https://[user@]host[:port]/[path]
2114 ssh://[user@]host[:port]/[path]
2115 ssh://[user@]host[:port]/[path]
2115 static-http://host[:port]/[path]
2116 static-http://host[:port]/[path]
2116
2117
2117 Paths in the local filesystem can either point to Mercurial
2118 Paths in the local filesystem can either point to Mercurial
2118 repositories or to bundle files (as created by 'hg bundle' or
2119 repositories or to bundle files (as created by 'hg bundle' or
2119 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2120 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2120 allows access to a Mercurial repository where you simply use a web
2121 allows access to a Mercurial repository where you simply use a web
2121 server to publish the .hg directory as static content.
2122 server to publish the .hg directory as static content.
2122
2123
2123 Some notes about using SSH with Mercurial:
2124 Some notes about using SSH with Mercurial:
2124 - SSH requires an accessible shell account on the destination machine
2125 - SSH requires an accessible shell account on the destination machine
2125 and a copy of hg in the remote path or specified with as remotecmd.
2126 and a copy of hg in the remote path or specified with as remotecmd.
2126 - path is relative to the remote user's home directory by default.
2127 - path is relative to the remote user's home directory by default.
2127 Use an extra slash at the start of a path to specify an absolute path:
2128 Use an extra slash at the start of a path to specify an absolute path:
2128 ssh://example.com//tmp/repository
2129 ssh://example.com//tmp/repository
2129 - Mercurial doesn't use its own compression via SSH; the right thing
2130 - Mercurial doesn't use its own compression via SSH; the right thing
2130 to do is to configure it in your ~/.ssh/config, e.g.:
2131 to do is to configure it in your ~/.ssh/config, e.g.:
2131 Host *.mylocalnetwork.example.com
2132 Host *.mylocalnetwork.example.com
2132 Compression no
2133 Compression no
2133 Host *
2134 Host *
2134 Compression yes
2135 Compression yes
2135 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2136 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2136 with the --ssh command line option.
2137 with the --ssh command line option.
2137 """
2138 """
2138 source = ui.expandpath(source)
2139 source = ui.expandpath(source)
2139 setremoteconfig(ui, opts)
2140 setremoteconfig(ui, opts)
2140
2141
2141 other = hg.repository(ui, source)
2142 other = hg.repository(ui, source)
2142 ui.status(_('pulling from %s\n') % (source))
2143 ui.status(_('pulling from %s\n') % (source))
2143 revs = None
2144 revs = None
2144 if opts['rev']:
2145 if opts['rev']:
2145 if 'lookup' in other.capabilities:
2146 if 'lookup' in other.capabilities:
2146 revs = [other.lookup(rev) for rev in opts['rev']]
2147 revs = [other.lookup(rev) for rev in opts['rev']]
2147 else:
2148 else:
2148 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2149 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2149 raise util.Abort(error)
2150 raise util.Abort(error)
2150 modheads = repo.pull(other, heads=revs, force=opts['force'])
2151 modheads = repo.pull(other, heads=revs, force=opts['force'])
2151 return postincoming(ui, repo, modheads, opts['update'])
2152 return postincoming(ui, repo, modheads, opts['update'])
2152
2153
2153 def push(ui, repo, dest=None, **opts):
2154 def push(ui, repo, dest=None, **opts):
2154 """push changes to the specified destination
2155 """push changes to the specified destination
2155
2156
2156 Push changes from the local repository to the given destination.
2157 Push changes from the local repository to the given destination.
2157
2158
2158 This is the symmetrical operation for pull. It helps to move
2159 This is the symmetrical operation for pull. It helps to move
2159 changes from the current repository to a different one. If the
2160 changes from the current repository to a different one. If the
2160 destination is local this is identical to a pull in that directory
2161 destination is local this is identical to a pull in that directory
2161 from the current one.
2162 from the current one.
2162
2163
2163 By default, push will refuse to run if it detects the result would
2164 By default, push will refuse to run if it detects the result would
2164 increase the number of remote heads. This generally indicates the
2165 increase the number of remote heads. This generally indicates the
2165 the client has forgotten to sync and merge before pushing.
2166 the client has forgotten to sync and merge before pushing.
2166
2167
2167 Valid URLs are of the form:
2168 Valid URLs are of the form:
2168
2169
2169 local/filesystem/path (or file://local/filesystem/path)
2170 local/filesystem/path (or file://local/filesystem/path)
2170 ssh://[user@]host[:port]/[path]
2171 ssh://[user@]host[:port]/[path]
2171 http://[user@]host[:port]/[path]
2172 http://[user@]host[:port]/[path]
2172 https://[user@]host[:port]/[path]
2173 https://[user@]host[:port]/[path]
2173
2174
2174 Look at the help text for the pull command for important details
2175 Look at the help text for the pull command for important details
2175 about ssh:// URLs.
2176 about ssh:// URLs.
2176
2177
2177 Pushing to http:// and https:// URLs is only possible, if this
2178 Pushing to http:// and https:// URLs is only possible, if this
2178 feature is explicitly enabled on the remote Mercurial server.
2179 feature is explicitly enabled on the remote Mercurial server.
2179 """
2180 """
2180 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2181 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2181 setremoteconfig(ui, opts)
2182 setremoteconfig(ui, opts)
2182
2183
2183 other = hg.repository(ui, dest)
2184 other = hg.repository(ui, dest)
2184 ui.status('pushing to %s\n' % (dest))
2185 ui.status('pushing to %s\n' % (dest))
2185 revs = None
2186 revs = None
2186 if opts['rev']:
2187 if opts['rev']:
2187 revs = [repo.lookup(rev) for rev in opts['rev']]
2188 revs = [repo.lookup(rev) for rev in opts['rev']]
2188 r = repo.push(other, opts['force'], revs=revs)
2189 r = repo.push(other, opts['force'], revs=revs)
2189 return r == 0
2190 return r == 0
2190
2191
2191 def rawcommit(ui, repo, *flist, **rc):
2192 def rawcommit(ui, repo, *flist, **rc):
2192 """raw commit interface (DEPRECATED)
2193 """raw commit interface (DEPRECATED)
2193
2194
2194 (DEPRECATED)
2195 (DEPRECATED)
2195 Lowlevel commit, for use in helper scripts.
2196 Lowlevel commit, for use in helper scripts.
2196
2197
2197 This command is not intended to be used by normal users, as it is
2198 This command is not intended to be used by normal users, as it is
2198 primarily useful for importing from other SCMs.
2199 primarily useful for importing from other SCMs.
2199
2200
2200 This command is now deprecated and will be removed in a future
2201 This command is now deprecated and will be removed in a future
2201 release, please use debugsetparents and commit instead.
2202 release, please use debugsetparents and commit instead.
2202 """
2203 """
2203
2204
2204 ui.warn(_("(the rawcommit command is deprecated)\n"))
2205 ui.warn(_("(the rawcommit command is deprecated)\n"))
2205
2206
2206 message = rc['message']
2207 message = rc['message']
2207 if not message and rc['logfile']:
2208 if not message and rc['logfile']:
2208 try:
2209 try:
2209 message = open(rc['logfile']).read()
2210 message = open(rc['logfile']).read()
2210 except IOError:
2211 except IOError:
2211 pass
2212 pass
2212 if not message and not rc['logfile']:
2213 if not message and not rc['logfile']:
2213 raise util.Abort(_("missing commit message"))
2214 raise util.Abort(_("missing commit message"))
2214
2215
2215 files = relpath(repo, list(flist))
2216 files = relpath(repo, list(flist))
2216 if rc['files']:
2217 if rc['files']:
2217 files += open(rc['files']).read().splitlines()
2218 files += open(rc['files']).read().splitlines()
2218
2219
2219 rc['parent'] = map(repo.lookup, rc['parent'])
2220 rc['parent'] = map(repo.lookup, rc['parent'])
2220
2221
2221 try:
2222 try:
2222 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2223 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2223 except ValueError, inst:
2224 except ValueError, inst:
2224 raise util.Abort(str(inst))
2225 raise util.Abort(str(inst))
2225
2226
2226 def recover(ui, repo):
2227 def recover(ui, repo):
2227 """roll back an interrupted transaction
2228 """roll back an interrupted transaction
2228
2229
2229 Recover from an interrupted commit or pull.
2230 Recover from an interrupted commit or pull.
2230
2231
2231 This command tries to fix the repository status after an interrupted
2232 This command tries to fix the repository status after an interrupted
2232 operation. It should only be necessary when Mercurial suggests it.
2233 operation. It should only be necessary when Mercurial suggests it.
2233 """
2234 """
2234 if repo.recover():
2235 if repo.recover():
2235 return hg.verify(repo)
2236 return hg.verify(repo)
2236 return 1
2237 return 1
2237
2238
2238 def remove(ui, repo, *pats, **opts):
2239 def remove(ui, repo, *pats, **opts):
2239 """remove the specified files on the next commit
2240 """remove the specified files on the next commit
2240
2241
2241 Schedule the indicated files for removal from the repository.
2242 Schedule the indicated files for removal from the repository.
2242
2243
2243 This command schedules the files to be removed at the next commit.
2244 This command schedules the files to be removed at the next commit.
2244 This only removes files from the current branch, not from the
2245 This only removes files from the current branch, not from the
2245 entire project history. If the files still exist in the working
2246 entire project history. If the files still exist in the working
2246 directory, they will be deleted from it. If invoked with --after,
2247 directory, they will be deleted from it. If invoked with --after,
2247 files that have been manually deleted are marked as removed.
2248 files that have been manually deleted are marked as removed.
2248
2249
2249 Modified files and added files are not removed by default. To
2250 Modified files and added files are not removed by default. To
2250 remove them, use the -f/--force option.
2251 remove them, use the -f/--force option.
2251 """
2252 """
2252 names = []
2253 names = []
2253 if not opts['after'] and not pats:
2254 if not opts['after'] and not pats:
2254 raise util.Abort(_('no files specified'))
2255 raise util.Abort(_('no files specified'))
2255 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2256 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2256 exact = dict.fromkeys(files)
2257 exact = dict.fromkeys(files)
2257 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2258 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2258 modified, added, removed, deleted, unknown = mardu
2259 modified, added, removed, deleted, unknown = mardu
2259 remove, forget = [], []
2260 remove, forget = [], []
2260 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2261 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2261 reason = None
2262 reason = None
2262 if abs not in deleted and opts['after']:
2263 if abs not in deleted and opts['after']:
2263 reason = _('is still present')
2264 reason = _('is still present')
2264 elif abs in modified and not opts['force']:
2265 elif abs in modified and not opts['force']:
2265 reason = _('is modified (use -f to force removal)')
2266 reason = _('is modified (use -f to force removal)')
2266 elif abs in added:
2267 elif abs in added:
2267 if opts['force']:
2268 if opts['force']:
2268 forget.append(abs)
2269 forget.append(abs)
2269 continue
2270 continue
2270 reason = _('has been marked for add (use -f to force removal)')
2271 reason = _('has been marked for add (use -f to force removal)')
2271 elif abs in unknown:
2272 elif abs in unknown:
2272 reason = _('is not managed')
2273 reason = _('is not managed')
2273 elif abs in removed:
2274 elif abs in removed:
2274 continue
2275 continue
2275 if reason:
2276 if reason:
2276 if exact:
2277 if exact:
2277 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2278 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2278 else:
2279 else:
2279 if ui.verbose or not exact:
2280 if ui.verbose or not exact:
2280 ui.status(_('removing %s\n') % rel)
2281 ui.status(_('removing %s\n') % rel)
2281 remove.append(abs)
2282 remove.append(abs)
2282 repo.forget(forget)
2283 repo.forget(forget)
2283 repo.remove(remove, unlink=not opts['after'])
2284 repo.remove(remove, unlink=not opts['after'])
2284
2285
2285 def rename(ui, repo, *pats, **opts):
2286 def rename(ui, repo, *pats, **opts):
2286 """rename files; equivalent of copy + remove
2287 """rename files; equivalent of copy + remove
2287
2288
2288 Mark dest as copies of sources; mark sources for deletion. If
2289 Mark dest as copies of sources; mark sources for deletion. If
2289 dest is a directory, copies are put in that directory. If dest is
2290 dest is a directory, copies are put in that directory. If dest is
2290 a file, there can only be one source.
2291 a file, there can only be one source.
2291
2292
2292 By default, this command copies the contents of files as they
2293 By default, this command copies the contents of files as they
2293 stand in the working directory. If invoked with --after, the
2294 stand in the working directory. If invoked with --after, the
2294 operation is recorded, but no copying is performed.
2295 operation is recorded, but no copying is performed.
2295
2296
2296 This command takes effect in the next commit.
2297 This command takes effect in the next commit.
2297
2298
2298 NOTE: This command should be treated as experimental. While it
2299 NOTE: This command should be treated as experimental. While it
2299 should properly record rename files, this information is not yet
2300 should properly record rename files, this information is not yet
2300 fully used by merge, nor fully reported by log.
2301 fully used by merge, nor fully reported by log.
2301 """
2302 """
2302 wlock = repo.wlock(0)
2303 wlock = repo.wlock(0)
2303 errs, copied = docopy(ui, repo, pats, opts, wlock)
2304 errs, copied = docopy(ui, repo, pats, opts, wlock)
2304 names = []
2305 names = []
2305 for abs, rel, exact in copied:
2306 for abs, rel, exact in copied:
2306 if ui.verbose or not exact:
2307 if ui.verbose or not exact:
2307 ui.status(_('removing %s\n') % rel)
2308 ui.status(_('removing %s\n') % rel)
2308 names.append(abs)
2309 names.append(abs)
2309 if not opts.get('dry_run'):
2310 if not opts.get('dry_run'):
2310 repo.remove(names, True, wlock)
2311 repo.remove(names, True, wlock)
2311 return errs
2312 return errs
2312
2313
2313 def revert(ui, repo, *pats, **opts):
2314 def revert(ui, repo, *pats, **opts):
2314 """revert files or dirs to their states as of some revision
2315 """revert files or dirs to their states as of some revision
2315
2316
2316 With no revision specified, revert the named files or directories
2317 With no revision specified, revert the named files or directories
2317 to the contents they had in the parent of the working directory.
2318 to the contents they had in the parent of the working directory.
2318 This restores the contents of the affected files to an unmodified
2319 This restores the contents of the affected files to an unmodified
2319 state. If the working directory has two parents, you must
2320 state. If the working directory has two parents, you must
2320 explicitly specify the revision to revert to.
2321 explicitly specify the revision to revert to.
2321
2322
2322 Modified files are saved with a .orig suffix before reverting.
2323 Modified files are saved with a .orig suffix before reverting.
2323 To disable these backups, use --no-backup.
2324 To disable these backups, use --no-backup.
2324
2325
2325 Using the -r option, revert the given files or directories to their
2326 Using the -r option, revert the given files or directories to their
2326 contents as of a specific revision. This can be helpful to "roll
2327 contents as of a specific revision. This can be helpful to "roll
2327 back" some or all of a change that should not have been committed.
2328 back" some or all of a change that should not have been committed.
2328
2329
2329 Revert modifies the working directory. It does not commit any
2330 Revert modifies the working directory. It does not commit any
2330 changes, or change the parent of the working directory. If you
2331 changes, or change the parent of the working directory. If you
2331 revert to a revision other than the parent of the working
2332 revert to a revision other than the parent of the working
2332 directory, the reverted files will thus appear modified
2333 directory, the reverted files will thus appear modified
2333 afterwards.
2334 afterwards.
2334
2335
2335 If a file has been deleted, it is recreated. If the executable
2336 If a file has been deleted, it is recreated. If the executable
2336 mode of a file was changed, it is reset.
2337 mode of a file was changed, it is reset.
2337
2338
2338 If names are given, all files matching the names are reverted.
2339 If names are given, all files matching the names are reverted.
2339
2340
2340 If no arguments are given, no files are reverted.
2341 If no arguments are given, no files are reverted.
2341 """
2342 """
2342
2343
2343 if not pats and not opts['all']:
2344 if not pats and not opts['all']:
2344 raise util.Abort(_('no files or directories specified; '
2345 raise util.Abort(_('no files or directories specified; '
2345 'use --all to revert the whole repo'))
2346 'use --all to revert the whole repo'))
2346
2347
2347 parent, p2 = repo.dirstate.parents()
2348 parent, p2 = repo.dirstate.parents()
2348 if not opts['rev'] and p2 != nullid:
2349 if not opts['rev'] and p2 != nullid:
2349 raise util.Abort(_('uncommitted merge - please provide a '
2350 raise util.Abort(_('uncommitted merge - please provide a '
2350 'specific revision'))
2351 'specific revision'))
2351 node = repo.changectx(opts['rev']).node()
2352 node = repo.changectx(opts['rev']).node()
2352 mf = repo.manifest.read(repo.changelog.read(node)[0])
2353 mf = repo.manifest.read(repo.changelog.read(node)[0])
2353 if node == parent:
2354 if node == parent:
2354 pmf = mf
2355 pmf = mf
2355 else:
2356 else:
2356 pmf = None
2357 pmf = None
2357
2358
2358 wlock = repo.wlock()
2359 wlock = repo.wlock()
2359
2360
2360 # need all matching names in dirstate and manifest of target rev,
2361 # need all matching names in dirstate and manifest of target rev,
2361 # so have to walk both. do not print errors if files exist in one
2362 # so have to walk both. do not print errors if files exist in one
2362 # but not other.
2363 # but not other.
2363
2364
2364 names = {}
2365 names = {}
2365 target_only = {}
2366 target_only = {}
2366
2367
2367 # walk dirstate.
2368 # walk dirstate.
2368
2369
2369 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2370 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2370 badmatch=mf.has_key):
2371 badmatch=mf.has_key):
2371 names[abs] = (rel, exact)
2372 names[abs] = (rel, exact)
2372 if src == 'b':
2373 if src == 'b':
2373 target_only[abs] = True
2374 target_only[abs] = True
2374
2375
2375 # walk target manifest.
2376 # walk target manifest.
2376
2377
2377 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2378 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2378 badmatch=names.has_key):
2379 badmatch=names.has_key):
2379 if abs in names: continue
2380 if abs in names: continue
2380 names[abs] = (rel, exact)
2381 names[abs] = (rel, exact)
2381 target_only[abs] = True
2382 target_only[abs] = True
2382
2383
2383 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2384 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2384 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2385 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2385
2386
2386 revert = ([], _('reverting %s\n'))
2387 revert = ([], _('reverting %s\n'))
2387 add = ([], _('adding %s\n'))
2388 add = ([], _('adding %s\n'))
2388 remove = ([], _('removing %s\n'))
2389 remove = ([], _('removing %s\n'))
2389 forget = ([], _('forgetting %s\n'))
2390 forget = ([], _('forgetting %s\n'))
2390 undelete = ([], _('undeleting %s\n'))
2391 undelete = ([], _('undeleting %s\n'))
2391 update = {}
2392 update = {}
2392
2393
2393 disptable = (
2394 disptable = (
2394 # dispatch table:
2395 # dispatch table:
2395 # file state
2396 # file state
2396 # action if in target manifest
2397 # action if in target manifest
2397 # action if not in target manifest
2398 # action if not in target manifest
2398 # make backup if in target manifest
2399 # make backup if in target manifest
2399 # make backup if not in target manifest
2400 # make backup if not in target manifest
2400 (modified, revert, remove, True, True),
2401 (modified, revert, remove, True, True),
2401 (added, revert, forget, True, False),
2402 (added, revert, forget, True, False),
2402 (removed, undelete, None, False, False),
2403 (removed, undelete, None, False, False),
2403 (deleted, revert, remove, False, False),
2404 (deleted, revert, remove, False, False),
2404 (unknown, add, None, True, False),
2405 (unknown, add, None, True, False),
2405 (target_only, add, None, False, False),
2406 (target_only, add, None, False, False),
2406 )
2407 )
2407
2408
2408 entries = names.items()
2409 entries = names.items()
2409 entries.sort()
2410 entries.sort()
2410
2411
2411 for abs, (rel, exact) in entries:
2412 for abs, (rel, exact) in entries:
2412 mfentry = mf.get(abs)
2413 mfentry = mf.get(abs)
2413 def handle(xlist, dobackup):
2414 def handle(xlist, dobackup):
2414 xlist[0].append(abs)
2415 xlist[0].append(abs)
2415 update[abs] = 1
2416 update[abs] = 1
2416 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2417 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2417 bakname = "%s.orig" % rel
2418 bakname = "%s.orig" % rel
2418 ui.note(_('saving current version of %s as %s\n') %
2419 ui.note(_('saving current version of %s as %s\n') %
2419 (rel, bakname))
2420 (rel, bakname))
2420 if not opts.get('dry_run'):
2421 if not opts.get('dry_run'):
2421 shutil.copyfile(rel, bakname)
2422 shutil.copyfile(rel, bakname)
2422 shutil.copymode(rel, bakname)
2423 shutil.copymode(rel, bakname)
2423 if ui.verbose or not exact:
2424 if ui.verbose or not exact:
2424 ui.status(xlist[1] % rel)
2425 ui.status(xlist[1] % rel)
2425 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2426 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2426 if abs not in table: continue
2427 if abs not in table: continue
2427 # file has changed in dirstate
2428 # file has changed in dirstate
2428 if mfentry:
2429 if mfentry:
2429 handle(hitlist, backuphit)
2430 handle(hitlist, backuphit)
2430 elif misslist is not None:
2431 elif misslist is not None:
2431 handle(misslist, backupmiss)
2432 handle(misslist, backupmiss)
2432 else:
2433 else:
2433 if exact: ui.warn(_('file not managed: %s\n' % rel))
2434 if exact: ui.warn(_('file not managed: %s\n' % rel))
2434 break
2435 break
2435 else:
2436 else:
2436 # file has not changed in dirstate
2437 # file has not changed in dirstate
2437 if node == parent:
2438 if node == parent:
2438 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2439 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2439 continue
2440 continue
2440 if pmf is None:
2441 if pmf is None:
2441 # only need parent manifest in this unlikely case,
2442 # only need parent manifest in this unlikely case,
2442 # so do not read by default
2443 # so do not read by default
2443 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2444 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2444 if abs in pmf:
2445 if abs in pmf:
2445 if mfentry:
2446 if mfentry:
2446 # if version of file is same in parent and target
2447 # if version of file is same in parent and target
2447 # manifests, do nothing
2448 # manifests, do nothing
2448 if pmf[abs] != mfentry:
2449 if pmf[abs] != mfentry:
2449 handle(revert, False)
2450 handle(revert, False)
2450 else:
2451 else:
2451 handle(remove, False)
2452 handle(remove, False)
2452
2453
2453 if not opts.get('dry_run'):
2454 if not opts.get('dry_run'):
2454 repo.dirstate.forget(forget[0])
2455 repo.dirstate.forget(forget[0])
2455 r = hg.revert(repo, node, update.has_key, wlock)
2456 r = hg.revert(repo, node, update.has_key, wlock)
2456 repo.dirstate.update(add[0], 'a')
2457 repo.dirstate.update(add[0], 'a')
2457 repo.dirstate.update(undelete[0], 'n')
2458 repo.dirstate.update(undelete[0], 'n')
2458 repo.dirstate.update(remove[0], 'r')
2459 repo.dirstate.update(remove[0], 'r')
2459 return r
2460 return r
2460
2461
2461 def rollback(ui, repo):
2462 def rollback(ui, repo):
2462 """roll back the last transaction in this repository
2463 """roll back the last transaction in this repository
2463
2464
2464 Roll back the last transaction in this repository, restoring the
2465 Roll back the last transaction in this repository, restoring the
2465 project to its state prior to the transaction.
2466 project to its state prior to the transaction.
2466
2467
2467 Transactions are used to encapsulate the effects of all commands
2468 Transactions are used to encapsulate the effects of all commands
2468 that create new changesets or propagate existing changesets into a
2469 that create new changesets or propagate existing changesets into a
2469 repository. For example, the following commands are transactional,
2470 repository. For example, the following commands are transactional,
2470 and their effects can be rolled back:
2471 and their effects can be rolled back:
2471
2472
2472 commit
2473 commit
2473 import
2474 import
2474 pull
2475 pull
2475 push (with this repository as destination)
2476 push (with this repository as destination)
2476 unbundle
2477 unbundle
2477
2478
2478 This command should be used with care. There is only one level of
2479 This command should be used with care. There is only one level of
2479 rollback, and there is no way to undo a rollback.
2480 rollback, and there is no way to undo a rollback.
2480
2481
2481 This command is not intended for use on public repositories. Once
2482 This command is not intended for use on public repositories. Once
2482 changes are visible for pull by other users, rolling a transaction
2483 changes are visible for pull by other users, rolling a transaction
2483 back locally is ineffective (someone else may already have pulled
2484 back locally is ineffective (someone else may already have pulled
2484 the changes). Furthermore, a race is possible with readers of the
2485 the changes). Furthermore, a race is possible with readers of the
2485 repository; for example an in-progress pull from the repository
2486 repository; for example an in-progress pull from the repository
2486 may fail if a rollback is performed.
2487 may fail if a rollback is performed.
2487 """
2488 """
2488 repo.rollback()
2489 repo.rollback()
2489
2490
2490 def root(ui, repo):
2491 def root(ui, repo):
2491 """print the root (top) of the current working dir
2492 """print the root (top) of the current working dir
2492
2493
2493 Print the root directory of the current repository.
2494 Print the root directory of the current repository.
2494 """
2495 """
2495 ui.write(repo.root + "\n")
2496 ui.write(repo.root + "\n")
2496
2497
2497 def serve(ui, repo, **opts):
2498 def serve(ui, repo, **opts):
2498 """export the repository via HTTP
2499 """export the repository via HTTP
2499
2500
2500 Start a local HTTP repository browser and pull server.
2501 Start a local HTTP repository browser and pull server.
2501
2502
2502 By default, the server logs accesses to stdout and errors to
2503 By default, the server logs accesses to stdout and errors to
2503 stderr. Use the "-A" and "-E" options to log to files.
2504 stderr. Use the "-A" and "-E" options to log to files.
2504 """
2505 """
2505
2506
2506 if opts["stdio"]:
2507 if opts["stdio"]:
2507 if repo is None:
2508 if repo is None:
2508 raise hg.RepoError(_("There is no Mercurial repository here"
2509 raise hg.RepoError(_("There is no Mercurial repository here"
2509 " (.hg not found)"))
2510 " (.hg not found)"))
2510 s = sshserver.sshserver(ui, repo)
2511 s = sshserver.sshserver(ui, repo)
2511 s.serve_forever()
2512 s.serve_forever()
2512
2513
2513 optlist = ("name templates style address port ipv6"
2514 optlist = ("name templates style address port ipv6"
2514 " accesslog errorlog webdir_conf")
2515 " accesslog errorlog webdir_conf")
2515 for o in optlist.split():
2516 for o in optlist.split():
2516 if opts[o]:
2517 if opts[o]:
2517 ui.setconfig("web", o, str(opts[o]))
2518 ui.setconfig("web", o, str(opts[o]))
2518
2519
2519 if repo is None and not ui.config("web", "webdir_conf"):
2520 if repo is None and not ui.config("web", "webdir_conf"):
2520 raise hg.RepoError(_("There is no Mercurial repository here"
2521 raise hg.RepoError(_("There is no Mercurial repository here"
2521 " (.hg not found)"))
2522 " (.hg not found)"))
2522
2523
2523 if opts['daemon'] and not opts['daemon_pipefds']:
2524 if opts['daemon'] and not opts['daemon_pipefds']:
2524 rfd, wfd = os.pipe()
2525 rfd, wfd = os.pipe()
2525 args = sys.argv[:]
2526 args = sys.argv[:]
2526 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2527 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2527 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2528 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2528 args[0], args)
2529 args[0], args)
2529 os.close(wfd)
2530 os.close(wfd)
2530 os.read(rfd, 1)
2531 os.read(rfd, 1)
2531 os._exit(0)
2532 os._exit(0)
2532
2533
2533 try:
2534 try:
2534 httpd = hgweb.server.create_server(ui, repo)
2535 httpd = hgweb.server.create_server(ui, repo)
2535 except socket.error, inst:
2536 except socket.error, inst:
2536 raise util.Abort(_('cannot start server: %s') % inst.args[1])
2537 raise util.Abort(_('cannot start server: %s') % inst.args[1])
2537
2538
2538 if ui.verbose:
2539 if ui.verbose:
2539 addr, port = httpd.socket.getsockname()
2540 addr, port = httpd.socket.getsockname()
2540 if addr == '0.0.0.0':
2541 if addr == '0.0.0.0':
2541 addr = socket.gethostname()
2542 addr = socket.gethostname()
2542 else:
2543 else:
2543 try:
2544 try:
2544 addr = socket.gethostbyaddr(addr)[0]
2545 addr = socket.gethostbyaddr(addr)[0]
2545 except socket.error:
2546 except socket.error:
2546 pass
2547 pass
2547 if port != 80:
2548 if port != 80:
2548 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2549 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2549 else:
2550 else:
2550 ui.status(_('listening at http://%s/\n') % addr)
2551 ui.status(_('listening at http://%s/\n') % addr)
2551
2552
2552 if opts['pid_file']:
2553 if opts['pid_file']:
2553 fp = open(opts['pid_file'], 'w')
2554 fp = open(opts['pid_file'], 'w')
2554 fp.write(str(os.getpid()) + '\n')
2555 fp.write(str(os.getpid()) + '\n')
2555 fp.close()
2556 fp.close()
2556
2557
2557 if opts['daemon_pipefds']:
2558 if opts['daemon_pipefds']:
2558 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2559 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2559 os.close(rfd)
2560 os.close(rfd)
2560 os.write(wfd, 'y')
2561 os.write(wfd, 'y')
2561 os.close(wfd)
2562 os.close(wfd)
2562 sys.stdout.flush()
2563 sys.stdout.flush()
2563 sys.stderr.flush()
2564 sys.stderr.flush()
2564 fd = os.open(util.nulldev, os.O_RDWR)
2565 fd = os.open(util.nulldev, os.O_RDWR)
2565 if fd != 0: os.dup2(fd, 0)
2566 if fd != 0: os.dup2(fd, 0)
2566 if fd != 1: os.dup2(fd, 1)
2567 if fd != 1: os.dup2(fd, 1)
2567 if fd != 2: os.dup2(fd, 2)
2568 if fd != 2: os.dup2(fd, 2)
2568 if fd not in (0, 1, 2): os.close(fd)
2569 if fd not in (0, 1, 2): os.close(fd)
2569
2570
2570 httpd.serve_forever()
2571 httpd.serve_forever()
2571
2572
2572 def status(ui, repo, *pats, **opts):
2573 def status(ui, repo, *pats, **opts):
2573 """show changed files in the working directory
2574 """show changed files in the working directory
2574
2575
2575 Show status of files in the repository. If names are given, only
2576 Show status of files in the repository. If names are given, only
2576 files that match are shown. Files that are clean or ignored, are
2577 files that match are shown. Files that are clean or ignored, are
2577 not listed unless -c (clean), -i (ignored) or -A is given.
2578 not listed unless -c (clean), -i (ignored) or -A is given.
2578
2579
2579 If one revision is given, it is used as the base revision.
2580 If one revision is given, it is used as the base revision.
2580 If two revisions are given, the difference between them is shown.
2581 If two revisions are given, the difference between them is shown.
2581
2582
2582 The codes used to show the status of files are:
2583 The codes used to show the status of files are:
2583 M = modified
2584 M = modified
2584 A = added
2585 A = added
2585 R = removed
2586 R = removed
2586 C = clean
2587 C = clean
2587 ! = deleted, but still tracked
2588 ! = deleted, but still tracked
2588 ? = not tracked
2589 ? = not tracked
2589 I = ignored (not shown by default)
2590 I = ignored (not shown by default)
2590 = the previous added file was copied from here
2591 = the previous added file was copied from here
2591 """
2592 """
2592
2593
2593 all = opts['all']
2594 all = opts['all']
2594 node1, node2 = cmdutil.revpair(ui, repo, opts.get('rev'))
2595 node1, node2 = cmdutil.revpair(ui, repo, opts.get('rev'))
2595
2596
2596 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2597 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2597 cwd = (pats and repo.getcwd()) or ''
2598 cwd = (pats and repo.getcwd()) or ''
2598 modified, added, removed, deleted, unknown, ignored, clean = [
2599 modified, added, removed, deleted, unknown, ignored, clean = [
2599 [util.pathto(cwd, x) for x in n]
2600 [util.pathto(cwd, x) for x in n]
2600 for n in repo.status(node1=node1, node2=node2, files=files,
2601 for n in repo.status(node1=node1, node2=node2, files=files,
2601 match=matchfn,
2602 match=matchfn,
2602 list_ignored=all or opts['ignored'],
2603 list_ignored=all or opts['ignored'],
2603 list_clean=all or opts['clean'])]
2604 list_clean=all or opts['clean'])]
2604
2605
2605 changetypes = (('modified', 'M', modified),
2606 changetypes = (('modified', 'M', modified),
2606 ('added', 'A', added),
2607 ('added', 'A', added),
2607 ('removed', 'R', removed),
2608 ('removed', 'R', removed),
2608 ('deleted', '!', deleted),
2609 ('deleted', '!', deleted),
2609 ('unknown', '?', unknown),
2610 ('unknown', '?', unknown),
2610 ('ignored', 'I', ignored))
2611 ('ignored', 'I', ignored))
2611
2612
2612 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2613 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2613
2614
2614 end = opts['print0'] and '\0' or '\n'
2615 end = opts['print0'] and '\0' or '\n'
2615
2616
2616 for opt, char, changes in ([ct for ct in explicit_changetypes
2617 for opt, char, changes in ([ct for ct in explicit_changetypes
2617 if all or opts[ct[0]]]
2618 if all or opts[ct[0]]]
2618 or changetypes):
2619 or changetypes):
2619 if opts['no_status']:
2620 if opts['no_status']:
2620 format = "%%s%s" % end
2621 format = "%%s%s" % end
2621 else:
2622 else:
2622 format = "%s %%s%s" % (char, end)
2623 format = "%s %%s%s" % (char, end)
2623
2624
2624 for f in changes:
2625 for f in changes:
2625 ui.write(format % f)
2626 ui.write(format % f)
2626 if ((all or opts.get('copies')) and not opts.get('no_status')):
2627 if ((all or opts.get('copies')) and not opts.get('no_status')):
2627 copied = repo.dirstate.copied(f)
2628 copied = repo.dirstate.copied(f)
2628 if copied:
2629 if copied:
2629 ui.write(' %s%s' % (copied, end))
2630 ui.write(' %s%s' % (copied, end))
2630
2631
2631 def tag(ui, repo, name, rev_=None, **opts):
2632 def tag(ui, repo, name, rev_=None, **opts):
2632 """add a tag for the current tip or a given revision
2633 """add a tag for the current tip or a given revision
2633
2634
2634 Name a particular revision using <name>.
2635 Name a particular revision using <name>.
2635
2636
2636 Tags are used to name particular revisions of the repository and are
2637 Tags are used to name particular revisions of the repository and are
2637 very useful to compare different revision, to go back to significant
2638 very useful to compare different revision, to go back to significant
2638 earlier versions or to mark branch points as releases, etc.
2639 earlier versions or to mark branch points as releases, etc.
2639
2640
2640 If no revision is given, the parent of the working directory is used.
2641 If no revision is given, the parent of the working directory is used.
2641
2642
2642 To facilitate version control, distribution, and merging of tags,
2643 To facilitate version control, distribution, and merging of tags,
2643 they are stored as a file named ".hgtags" which is managed
2644 they are stored as a file named ".hgtags" which is managed
2644 similarly to other project files and can be hand-edited if
2645 similarly to other project files and can be hand-edited if
2645 necessary. The file '.hg/localtags' is used for local tags (not
2646 necessary. The file '.hg/localtags' is used for local tags (not
2646 shared among repositories).
2647 shared among repositories).
2647 """
2648 """
2648 if name in ['tip', '.']:
2649 if name in ['tip', '.']:
2649 raise util.Abort(_("the name '%s' is reserved") % name)
2650 raise util.Abort(_("the name '%s' is reserved") % name)
2650 if rev_ is not None:
2651 if rev_ is not None:
2651 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2652 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2652 "please use 'hg tag [-r REV] NAME' instead\n"))
2653 "please use 'hg tag [-r REV] NAME' instead\n"))
2653 if opts['rev']:
2654 if opts['rev']:
2654 raise util.Abort(_("use only one form to specify the revision"))
2655 raise util.Abort(_("use only one form to specify the revision"))
2655 if opts['rev']:
2656 if opts['rev']:
2656 rev_ = opts['rev']
2657 rev_ = opts['rev']
2657 if not rev_ and repo.dirstate.parents()[1] != nullid:
2658 if not rev_ and repo.dirstate.parents()[1] != nullid:
2658 raise util.Abort(_('uncommitted merge - please provide a '
2659 raise util.Abort(_('uncommitted merge - please provide a '
2659 'specific revision'))
2660 'specific revision'))
2660 r = repo.changectx(rev_).node()
2661 r = repo.changectx(rev_).node()
2661
2662
2662 message = opts['message']
2663 message = opts['message']
2663 if not message:
2664 if not message:
2664 message = _('Added tag %s for changeset %s') % (name, short(r))
2665 message = _('Added tag %s for changeset %s') % (name, short(r))
2665
2666
2666 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2667 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2667
2668
2668 def tags(ui, repo):
2669 def tags(ui, repo):
2669 """list repository tags
2670 """list repository tags
2670
2671
2671 List the repository tags.
2672 List the repository tags.
2672
2673
2673 This lists both regular and local tags.
2674 This lists both regular and local tags.
2674 """
2675 """
2675
2676
2676 l = repo.tagslist()
2677 l = repo.tagslist()
2677 l.reverse()
2678 l.reverse()
2678 hexfunc = ui.debugflag and hex or short
2679 hexfunc = ui.debugflag and hex or short
2679 for t, n in l:
2680 for t, n in l:
2680 try:
2681 try:
2681 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2682 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2682 except KeyError:
2683 except KeyError:
2683 r = " ?:?"
2684 r = " ?:?"
2684 if ui.quiet:
2685 if ui.quiet:
2685 ui.write("%s\n" % t)
2686 ui.write("%s\n" % t)
2686 else:
2687 else:
2687 ui.write("%-30s %s\n" % (t, r))
2688 ui.write("%-30s %s\n" % (t, r))
2688
2689
2689 def tip(ui, repo, **opts):
2690 def tip(ui, repo, **opts):
2690 """show the tip revision
2691 """show the tip revision
2691
2692
2692 Show the tip revision.
2693 Show the tip revision.
2693 """
2694 """
2694 n = repo.changelog.tip()
2695 n = repo.changelog.tip()
2695 br = None
2696 br = None
2696 if opts['branches']:
2697 if opts['branches']:
2697 ui.warn(_("the --branches option is deprecated, "
2698 ui.warn(_("the --branches option is deprecated, "
2698 "please use 'hg branches' instead\n"))
2699 "please use 'hg branches' instead\n"))
2699 br = repo.branchlookup([n])
2700 br = repo.branchlookup([n])
2700 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2701 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2701 if opts['patch']:
2702 if opts['patch']:
2702 patch.diff(repo, repo.changelog.parents(n)[0], n)
2703 patch.diff(repo, repo.changelog.parents(n)[0], n)
2703
2704
2704 def unbundle(ui, repo, fname, **opts):
2705 def unbundle(ui, repo, fname, **opts):
2705 """apply a changegroup file
2706 """apply a changegroup file
2706
2707
2707 Apply a compressed changegroup file generated by the bundle
2708 Apply a compressed changegroup file generated by the bundle
2708 command.
2709 command.
2709 """
2710 """
2710 f = urllib.urlopen(fname)
2711 f = urllib.urlopen(fname)
2711
2712
2712 header = f.read(6)
2713 header = f.read(6)
2713 if not header.startswith("HG"):
2714 if not header.startswith("HG"):
2714 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2715 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2715 elif not header.startswith("HG10"):
2716 elif not header.startswith("HG10"):
2716 raise util.Abort(_("%s: unknown bundle version") % fname)
2717 raise util.Abort(_("%s: unknown bundle version") % fname)
2717 elif header == "HG10BZ":
2718 elif header == "HG10BZ":
2718 def generator(f):
2719 def generator(f):
2719 zd = bz2.BZ2Decompressor()
2720 zd = bz2.BZ2Decompressor()
2720 zd.decompress("BZ")
2721 zd.decompress("BZ")
2721 for chunk in f:
2722 for chunk in f:
2722 yield zd.decompress(chunk)
2723 yield zd.decompress(chunk)
2723 elif header == "HG10UN":
2724 elif header == "HG10UN":
2724 def generator(f):
2725 def generator(f):
2725 for chunk in f:
2726 for chunk in f:
2726 yield chunk
2727 yield chunk
2727 else:
2728 else:
2728 raise util.Abort(_("%s: unknown bundle compression type")
2729 raise util.Abort(_("%s: unknown bundle compression type")
2729 % fname)
2730 % fname)
2730 gen = generator(util.filechunkiter(f, 4096))
2731 gen = generator(util.filechunkiter(f, 4096))
2731 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2732 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2732 'bundle:' + fname)
2733 'bundle:' + fname)
2733 return postincoming(ui, repo, modheads, opts['update'])
2734 return postincoming(ui, repo, modheads, opts['update'])
2734
2735
2735 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2736 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2736 branch=None):
2737 branch=None):
2737 """update or merge working directory
2738 """update or merge working directory
2738
2739
2739 Update the working directory to the specified revision.
2740 Update the working directory to the specified revision.
2740
2741
2741 If there are no outstanding changes in the working directory and
2742 If there are no outstanding changes in the working directory and
2742 there is a linear relationship between the current version and the
2743 there is a linear relationship between the current version and the
2743 requested version, the result is the requested version.
2744 requested version, the result is the requested version.
2744
2745
2745 To merge the working directory with another revision, use the
2746 To merge the working directory with another revision, use the
2746 merge command.
2747 merge command.
2747
2748
2748 By default, update will refuse to run if doing so would require
2749 By default, update will refuse to run if doing so would require
2749 merging or discarding local changes.
2750 merging or discarding local changes.
2750 """
2751 """
2751 node = _lookup(repo, node, branch)
2752 node = _lookup(repo, node, branch)
2752 if clean:
2753 if clean:
2753 return hg.clean(repo, node)
2754 return hg.clean(repo, node)
2754 else:
2755 else:
2755 return hg.update(repo, node)
2756 return hg.update(repo, node)
2756
2757
2757 def _lookup(repo, node, branch=None):
2758 def _lookup(repo, node, branch=None):
2758 if branch:
2759 if branch:
2759 repo.ui.warn(_("the --branch option is deprecated, "
2760 repo.ui.warn(_("the --branch option is deprecated, "
2760 "please use 'hg branch' instead\n"))
2761 "please use 'hg branch' instead\n"))
2761 br = repo.branchlookup(branch=branch)
2762 br = repo.branchlookup(branch=branch)
2762 found = []
2763 found = []
2763 for x in br:
2764 for x in br:
2764 if branch in br[x]:
2765 if branch in br[x]:
2765 found.append(x)
2766 found.append(x)
2766 if len(found) > 1:
2767 if len(found) > 1:
2767 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2768 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2768 for x in found:
2769 for x in found:
2769 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2770 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2770 raise util.Abort("")
2771 raise util.Abort("")
2771 if len(found) == 1:
2772 if len(found) == 1:
2772 node = found[0]
2773 node = found[0]
2773 repo.ui.warn(_("Using head %s for branch %s\n")
2774 repo.ui.warn(_("Using head %s for branch %s\n")
2774 % (short(node), branch))
2775 % (short(node), branch))
2775 else:
2776 else:
2776 raise util.Abort(_("branch %s not found") % branch)
2777 raise util.Abort(_("branch %s not found") % branch)
2777 else:
2778 else:
2778 node = node and repo.lookup(node) or repo.changelog.tip()
2779 node = node and repo.lookup(node) or repo.changelog.tip()
2779 return node
2780 return node
2780
2781
2781 def verify(ui, repo):
2782 def verify(ui, repo):
2782 """verify the integrity of the repository
2783 """verify the integrity of the repository
2783
2784
2784 Verify the integrity of the current repository.
2785 Verify the integrity of the current repository.
2785
2786
2786 This will perform an extensive check of the repository's
2787 This will perform an extensive check of the repository's
2787 integrity, validating the hashes and checksums of each entry in
2788 integrity, validating the hashes and checksums of each entry in
2788 the changelog, manifest, and tracked files, as well as the
2789 the changelog, manifest, and tracked files, as well as the
2789 integrity of their crosslinks and indices.
2790 integrity of their crosslinks and indices.
2790 """
2791 """
2791 return hg.verify(repo)
2792 return hg.verify(repo)
2792
2793
2793 # Command options and aliases are listed here, alphabetically
2794 # Command options and aliases are listed here, alphabetically
2794
2795
2795 globalopts = [
2796 globalopts = [
2796 ('R', 'repository', '',
2797 ('R', 'repository', '',
2797 _('repository root directory or symbolic path name')),
2798 _('repository root directory or symbolic path name')),
2798 ('', 'cwd', '', _('change working directory')),
2799 ('', 'cwd', '', _('change working directory')),
2799 ('y', 'noninteractive', None,
2800 ('y', 'noninteractive', None,
2800 _('do not prompt, assume \'yes\' for any required answers')),
2801 _('do not prompt, assume \'yes\' for any required answers')),
2801 ('q', 'quiet', None, _('suppress output')),
2802 ('q', 'quiet', None, _('suppress output')),
2802 ('v', 'verbose', None, _('enable additional output')),
2803 ('v', 'verbose', None, _('enable additional output')),
2803 ('', 'config', [], _('set/override config option')),
2804 ('', 'config', [], _('set/override config option')),
2804 ('', 'debug', None, _('enable debugging output')),
2805 ('', 'debug', None, _('enable debugging output')),
2805 ('', 'debugger', None, _('start debugger')),
2806 ('', 'debugger', None, _('start debugger')),
2806 ('', 'lsprof', None, _('print improved command execution profile')),
2807 ('', 'lsprof', None, _('print improved command execution profile')),
2807 ('', 'traceback', None, _('print traceback on exception')),
2808 ('', 'traceback', None, _('print traceback on exception')),
2808 ('', 'time', None, _('time how long the command takes')),
2809 ('', 'time', None, _('time how long the command takes')),
2809 ('', 'profile', None, _('print command execution profile')),
2810 ('', 'profile', None, _('print command execution profile')),
2810 ('', 'version', None, _('output version information and exit')),
2811 ('', 'version', None, _('output version information and exit')),
2811 ('h', 'help', None, _('display help and exit')),
2812 ('h', 'help', None, _('display help and exit')),
2812 ]
2813 ]
2813
2814
2814 dryrunopts = [('n', 'dry-run', None,
2815 dryrunopts = [('n', 'dry-run', None,
2815 _('do not perform actions, just print output'))]
2816 _('do not perform actions, just print output'))]
2816
2817
2817 remoteopts = [
2818 remoteopts = [
2818 ('e', 'ssh', '', _('specify ssh command to use')),
2819 ('e', 'ssh', '', _('specify ssh command to use')),
2819 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2820 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2820 ]
2821 ]
2821
2822
2822 walkopts = [
2823 walkopts = [
2823 ('I', 'include', [], _('include names matching the given patterns')),
2824 ('I', 'include', [], _('include names matching the given patterns')),
2824 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2825 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2825 ]
2826 ]
2826
2827
2827 table = {
2828 table = {
2828 "^add":
2829 "^add":
2829 (add,
2830 (add,
2830 walkopts + dryrunopts,
2831 walkopts + dryrunopts,
2831 _('hg add [OPTION]... [FILE]...')),
2832 _('hg add [OPTION]... [FILE]...')),
2832 "addremove":
2833 "addremove":
2833 (addremove,
2834 (addremove,
2834 [('s', 'similarity', '',
2835 [('s', 'similarity', '',
2835 _('guess renamed files by similarity (0<=s<=100)')),
2836 _('guess renamed files by similarity (0<=s<=100)')),
2836 ] + walkopts + dryrunopts,
2837 ] + walkopts + dryrunopts,
2837 _('hg addremove [OPTION]... [FILE]...')),
2838 _('hg addremove [OPTION]... [FILE]...')),
2838 "^annotate":
2839 "^annotate":
2839 (annotate,
2840 (annotate,
2840 [('r', 'rev', '', _('annotate the specified revision')),
2841 [('r', 'rev', '', _('annotate the specified revision')),
2841 ('f', 'follow', None, _('follow file copies and renames')),
2842 ('f', 'follow', None, _('follow file copies and renames')),
2842 ('a', 'text', None, _('treat all files as text')),
2843 ('a', 'text', None, _('treat all files as text')),
2843 ('u', 'user', None, _('list the author')),
2844 ('u', 'user', None, _('list the author')),
2844 ('d', 'date', None, _('list the date')),
2845 ('d', 'date', None, _('list the date')),
2845 ('n', 'number', None, _('list the revision number (default)')),
2846 ('n', 'number', None, _('list the revision number (default)')),
2846 ('c', 'changeset', None, _('list the changeset')),
2847 ('c', 'changeset', None, _('list the changeset')),
2847 ] + walkopts,
2848 ] + walkopts,
2848 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2849 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2849 "archive":
2850 "archive":
2850 (archive,
2851 (archive,
2851 [('', 'no-decode', None, _('do not pass files through decoders')),
2852 [('', 'no-decode', None, _('do not pass files through decoders')),
2852 ('p', 'prefix', '', _('directory prefix for files in archive')),
2853 ('p', 'prefix', '', _('directory prefix for files in archive')),
2853 ('r', 'rev', '', _('revision to distribute')),
2854 ('r', 'rev', '', _('revision to distribute')),
2854 ('t', 'type', '', _('type of distribution to create')),
2855 ('t', 'type', '', _('type of distribution to create')),
2855 ] + walkopts,
2856 ] + walkopts,
2856 _('hg archive [OPTION]... DEST')),
2857 _('hg archive [OPTION]... DEST')),
2857 "backout":
2858 "backout":
2858 (backout,
2859 (backout,
2859 [('', 'merge', None,
2860 [('', 'merge', None,
2860 _('merge with old dirstate parent after backout')),
2861 _('merge with old dirstate parent after backout')),
2861 ('m', 'message', '', _('use <text> as commit message')),
2862 ('m', 'message', '', _('use <text> as commit message')),
2862 ('l', 'logfile', '', _('read commit message from <file>')),
2863 ('l', 'logfile', '', _('read commit message from <file>')),
2863 ('d', 'date', '', _('record datecode as commit date')),
2864 ('d', 'date', '', _('record datecode as commit date')),
2864 ('', 'parent', '', _('parent to choose when backing out merge')),
2865 ('', 'parent', '', _('parent to choose when backing out merge')),
2865 ('u', 'user', '', _('record user as committer')),
2866 ('u', 'user', '', _('record user as committer')),
2866 ] + walkopts,
2867 ] + walkopts,
2867 _('hg backout [OPTION]... REV')),
2868 _('hg backout [OPTION]... REV')),
2868 "branch": (branch, [], _('hg branch [NAME]')),
2869 "branch": (branch, [], _('hg branch [NAME]')),
2869 "branches": (branches, [], _('hg branches')),
2870 "branches": (branches, [], _('hg branches')),
2870 "bundle":
2871 "bundle":
2871 (bundle,
2872 (bundle,
2872 [('f', 'force', None,
2873 [('f', 'force', None,
2873 _('run even when remote repository is unrelated')),
2874 _('run even when remote repository is unrelated')),
2874 ('r', 'rev', [],
2875 ('r', 'rev', [],
2875 _('a changeset you would like to bundle')),
2876 _('a changeset you would like to bundle')),
2876 ('', 'base', [],
2877 ('', 'base', [],
2877 _('a base changeset to specify instead of a destination')),
2878 _('a base changeset to specify instead of a destination')),
2878 ] + remoteopts,
2879 ] + remoteopts,
2879 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2880 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2880 "cat":
2881 "cat":
2881 (cat,
2882 (cat,
2882 [('o', 'output', '', _('print output to file with formatted name')),
2883 [('o', 'output', '', _('print output to file with formatted name')),
2883 ('r', 'rev', '', _('print the given revision')),
2884 ('r', 'rev', '', _('print the given revision')),
2884 ] + walkopts,
2885 ] + walkopts,
2885 _('hg cat [OPTION]... FILE...')),
2886 _('hg cat [OPTION]... FILE...')),
2886 "^clone":
2887 "^clone":
2887 (clone,
2888 (clone,
2888 [('U', 'noupdate', None, _('do not update the new working directory')),
2889 [('U', 'noupdate', None, _('do not update the new working directory')),
2889 ('r', 'rev', [],
2890 ('r', 'rev', [],
2890 _('a changeset you would like to have after cloning')),
2891 _('a changeset you would like to have after cloning')),
2891 ('', 'pull', None, _('use pull protocol to copy metadata')),
2892 ('', 'pull', None, _('use pull protocol to copy metadata')),
2892 ('', 'uncompressed', None,
2893 ('', 'uncompressed', None,
2893 _('use uncompressed transfer (fast over LAN)')),
2894 _('use uncompressed transfer (fast over LAN)')),
2894 ] + remoteopts,
2895 ] + remoteopts,
2895 _('hg clone [OPTION]... SOURCE [DEST]')),
2896 _('hg clone [OPTION]... SOURCE [DEST]')),
2896 "^commit|ci":
2897 "^commit|ci":
2897 (commit,
2898 (commit,
2898 [('A', 'addremove', None,
2899 [('A', 'addremove', None,
2899 _('mark new/missing files as added/removed before committing')),
2900 _('mark new/missing files as added/removed before committing')),
2900 ('m', 'message', '', _('use <text> as commit message')),
2901 ('m', 'message', '', _('use <text> as commit message')),
2901 ('l', 'logfile', '', _('read the commit message from <file>')),
2902 ('l', 'logfile', '', _('read the commit message from <file>')),
2902 ('d', 'date', '', _('record datecode as commit date')),
2903 ('d', 'date', '', _('record datecode as commit date')),
2903 ('u', 'user', '', _('record user as commiter')),
2904 ('u', 'user', '', _('record user as commiter')),
2904 ] + walkopts,
2905 ] + walkopts,
2905 _('hg commit [OPTION]... [FILE]...')),
2906 _('hg commit [OPTION]... [FILE]...')),
2906 "copy|cp":
2907 "copy|cp":
2907 (copy,
2908 (copy,
2908 [('A', 'after', None, _('record a copy that has already occurred')),
2909 [('A', 'after', None, _('record a copy that has already occurred')),
2909 ('f', 'force', None,
2910 ('f', 'force', None,
2910 _('forcibly copy over an existing managed file')),
2911 _('forcibly copy over an existing managed file')),
2911 ] + walkopts + dryrunopts,
2912 ] + walkopts + dryrunopts,
2912 _('hg copy [OPTION]... [SOURCE]... DEST')),
2913 _('hg copy [OPTION]... [SOURCE]... DEST')),
2913 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2914 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2914 "debugcomplete":
2915 "debugcomplete":
2915 (debugcomplete,
2916 (debugcomplete,
2916 [('o', 'options', None, _('show the command options'))],
2917 [('o', 'options', None, _('show the command options'))],
2917 _('debugcomplete [-o] CMD')),
2918 _('debugcomplete [-o] CMD')),
2918 "debugrebuildstate":
2919 "debugrebuildstate":
2919 (debugrebuildstate,
2920 (debugrebuildstate,
2920 [('r', 'rev', '', _('revision to rebuild to'))],
2921 [('r', 'rev', '', _('revision to rebuild to'))],
2921 _('debugrebuildstate [-r REV] [REV]')),
2922 _('debugrebuildstate [-r REV] [REV]')),
2922 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2923 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2923 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2924 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2924 "debugstate": (debugstate, [], _('debugstate')),
2925 "debugstate": (debugstate, [], _('debugstate')),
2925 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2926 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2926 "debugindex": (debugindex, [], _('debugindex FILE')),
2927 "debugindex": (debugindex, [], _('debugindex FILE')),
2927 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2928 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2928 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2929 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2929 "debugwalk":
2930 "debugwalk":
2930 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2931 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2931 "^diff":
2932 "^diff":
2932 (diff,
2933 (diff,
2933 [('r', 'rev', [], _('revision')),
2934 [('r', 'rev', [], _('revision')),
2934 ('a', 'text', None, _('treat all files as text')),
2935 ('a', 'text', None, _('treat all files as text')),
2935 ('p', 'show-function', None,
2936 ('p', 'show-function', None,
2936 _('show which function each change is in')),
2937 _('show which function each change is in')),
2937 ('g', 'git', None, _('use git extended diff format')),
2938 ('g', 'git', None, _('use git extended diff format')),
2938 ('', 'nodates', None, _("don't include dates in diff headers")),
2939 ('', 'nodates', None, _("don't include dates in diff headers")),
2939 ('w', 'ignore-all-space', None,
2940 ('w', 'ignore-all-space', None,
2940 _('ignore white space when comparing lines')),
2941 _('ignore white space when comparing lines')),
2941 ('b', 'ignore-space-change', None,
2942 ('b', 'ignore-space-change', None,
2942 _('ignore changes in the amount of white space')),
2943 _('ignore changes in the amount of white space')),
2943 ('B', 'ignore-blank-lines', None,
2944 ('B', 'ignore-blank-lines', None,
2944 _('ignore changes whose lines are all blank')),
2945 _('ignore changes whose lines are all blank')),
2945 ] + walkopts,
2946 ] + walkopts,
2946 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2947 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2947 "^export":
2948 "^export":
2948 (export,
2949 (export,
2949 [('o', 'output', '', _('print output to file with formatted name')),
2950 [('o', 'output', '', _('print output to file with formatted name')),
2950 ('a', 'text', None, _('treat all files as text')),
2951 ('a', 'text', None, _('treat all files as text')),
2951 ('g', 'git', None, _('use git extended diff format')),
2952 ('g', 'git', None, _('use git extended diff format')),
2952 ('', 'nodates', None, _("don't include dates in diff headers")),
2953 ('', 'nodates', None, _("don't include dates in diff headers")),
2953 ('', 'switch-parent', None, _('diff against the second parent'))],
2954 ('', 'switch-parent', None, _('diff against the second parent'))],
2954 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2955 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2955 "grep":
2956 "grep":
2956 (grep,
2957 (grep,
2957 [('0', 'print0', None, _('end fields with NUL')),
2958 [('0', 'print0', None, _('end fields with NUL')),
2958 ('', 'all', None, _('print all revisions that match')),
2959 ('', 'all', None, _('print all revisions that match')),
2959 ('f', 'follow', None,
2960 ('f', 'follow', None,
2960 _('follow changeset history, or file history across copies and renames')),
2961 _('follow changeset history, or file history across copies and renames')),
2961 ('i', 'ignore-case', None, _('ignore case when matching')),
2962 ('i', 'ignore-case', None, _('ignore case when matching')),
2962 ('l', 'files-with-matches', None,
2963 ('l', 'files-with-matches', None,
2963 _('print only filenames and revs that match')),
2964 _('print only filenames and revs that match')),
2964 ('n', 'line-number', None, _('print matching line numbers')),
2965 ('n', 'line-number', None, _('print matching line numbers')),
2965 ('r', 'rev', [], _('search in given revision range')),
2966 ('r', 'rev', [], _('search in given revision range')),
2966 ('u', 'user', None, _('print user who committed change')),
2967 ('u', 'user', None, _('print user who committed change')),
2967 ] + walkopts,
2968 ] + walkopts,
2968 _('hg grep [OPTION]... PATTERN [FILE]...')),
2969 _('hg grep [OPTION]... PATTERN [FILE]...')),
2969 "heads":
2970 "heads":
2970 (heads,
2971 (heads,
2971 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2972 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2972 ('', 'style', '', _('display using template map file')),
2973 ('', 'style', '', _('display using template map file')),
2973 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2974 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2974 ('', 'template', '', _('display with template'))],
2975 ('', 'template', '', _('display with template'))],
2975 _('hg heads [-r REV]')),
2976 _('hg heads [-r REV]')),
2976 "help": (help_, [], _('hg help [COMMAND]')),
2977 "help": (help_, [], _('hg help [COMMAND]')),
2977 "identify|id": (identify, [], _('hg identify')),
2978 "identify|id": (identify, [], _('hg identify')),
2978 "import|patch":
2979 "import|patch":
2979 (import_,
2980 (import_,
2980 [('p', 'strip', 1,
2981 [('p', 'strip', 1,
2981 _('directory strip option for patch. This has the same\n'
2982 _('directory strip option for patch. This has the same\n'
2982 'meaning as the corresponding patch option')),
2983 'meaning as the corresponding patch option')),
2983 ('m', 'message', '', _('use <text> as commit message')),
2984 ('m', 'message', '', _('use <text> as commit message')),
2984 ('b', 'base', '', _('base path (DEPRECATED)')),
2985 ('b', 'base', '', _('base path (DEPRECATED)')),
2985 ('f', 'force', None,
2986 ('f', 'force', None,
2986 _('skip check for outstanding uncommitted changes'))],
2987 _('skip check for outstanding uncommitted changes'))],
2987 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2988 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2988 "incoming|in": (incoming,
2989 "incoming|in": (incoming,
2989 [('M', 'no-merges', None, _('do not show merges')),
2990 [('M', 'no-merges', None, _('do not show merges')),
2990 ('f', 'force', None,
2991 ('f', 'force', None,
2991 _('run even when remote repository is unrelated')),
2992 _('run even when remote repository is unrelated')),
2992 ('', 'style', '', _('display using template map file')),
2993 ('', 'style', '', _('display using template map file')),
2993 ('n', 'newest-first', None, _('show newest record first')),
2994 ('n', 'newest-first', None, _('show newest record first')),
2994 ('', 'bundle', '', _('file to store the bundles into')),
2995 ('', 'bundle', '', _('file to store the bundles into')),
2995 ('p', 'patch', None, _('show patch')),
2996 ('p', 'patch', None, _('show patch')),
2996 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2997 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2997 ('', 'template', '', _('display with template')),
2998 ('', 'template', '', _('display with template')),
2998 ] + remoteopts,
2999 ] + remoteopts,
2999 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3000 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3000 ' [--bundle FILENAME] [SOURCE]')),
3001 ' [--bundle FILENAME] [SOURCE]')),
3001 "^init":
3002 "^init":
3002 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3003 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3003 "locate":
3004 "locate":
3004 (locate,
3005 (locate,
3005 [('r', 'rev', '', _('search the repository as it stood at rev')),
3006 [('r', 'rev', '', _('search the repository as it stood at rev')),
3006 ('0', 'print0', None,
3007 ('0', 'print0', None,
3007 _('end filenames with NUL, for use with xargs')),
3008 _('end filenames with NUL, for use with xargs')),
3008 ('f', 'fullpath', None,
3009 ('f', 'fullpath', None,
3009 _('print complete paths from the filesystem root')),
3010 _('print complete paths from the filesystem root')),
3010 ] + walkopts,
3011 ] + walkopts,
3011 _('hg locate [OPTION]... [PATTERN]...')),
3012 _('hg locate [OPTION]... [PATTERN]...')),
3012 "^log|history":
3013 "^log|history":
3013 (log,
3014 (log,
3014 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3015 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3015 ('f', 'follow', None,
3016 ('f', 'follow', None,
3016 _('follow changeset history, or file history across copies and renames')),
3017 _('follow changeset history, or file history across copies and renames')),
3017 ('', 'follow-first', None,
3018 ('', 'follow-first', None,
3018 _('only follow the first parent of merge changesets')),
3019 _('only follow the first parent of merge changesets')),
3019 ('C', 'copies', None, _('show copied files')),
3020 ('C', 'copies', None, _('show copied files')),
3020 ('k', 'keyword', [], _('search for a keyword')),
3021 ('k', 'keyword', [], _('search for a keyword')),
3021 ('l', 'limit', '', _('limit number of changes displayed')),
3022 ('l', 'limit', '', _('limit number of changes displayed')),
3022 ('r', 'rev', [], _('show the specified revision or range')),
3023 ('r', 'rev', [], _('show the specified revision or range')),
3023 ('M', 'no-merges', None, _('do not show merges')),
3024 ('M', 'no-merges', None, _('do not show merges')),
3024 ('', 'style', '', _('display using template map file')),
3025 ('', 'style', '', _('display using template map file')),
3025 ('m', 'only-merges', None, _('show only merges')),
3026 ('m', 'only-merges', None, _('show only merges')),
3026 ('p', 'patch', None, _('show patch')),
3027 ('p', 'patch', None, _('show patch')),
3027 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3028 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3028 ('', 'template', '', _('display with template')),
3029 ('', 'template', '', _('display with template')),
3029 ] + walkopts,
3030 ] + walkopts,
3030 _('hg log [OPTION]... [FILE]')),
3031 _('hg log [OPTION]... [FILE]')),
3031 "manifest": (manifest, [], _('hg manifest [REV]')),
3032 "manifest": (manifest, [], _('hg manifest [REV]')),
3032 "merge":
3033 "merge":
3033 (merge,
3034 (merge,
3034 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
3035 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
3035 ('f', 'force', None, _('force a merge with outstanding changes'))],
3036 ('f', 'force', None, _('force a merge with outstanding changes'))],
3036 _('hg merge [-f] [REV]')),
3037 _('hg merge [-f] [REV]')),
3037 "outgoing|out": (outgoing,
3038 "outgoing|out": (outgoing,
3038 [('M', 'no-merges', None, _('do not show merges')),
3039 [('M', 'no-merges', None, _('do not show merges')),
3039 ('f', 'force', None,
3040 ('f', 'force', None,
3040 _('run even when remote repository is unrelated')),
3041 _('run even when remote repository is unrelated')),
3041 ('p', 'patch', None, _('show patch')),
3042 ('p', 'patch', None, _('show patch')),
3042 ('', 'style', '', _('display using template map file')),
3043 ('', 'style', '', _('display using template map file')),
3043 ('r', 'rev', [], _('a specific revision you would like to push')),
3044 ('r', 'rev', [], _('a specific revision you would like to push')),
3044 ('n', 'newest-first', None, _('show newest record first')),
3045 ('n', 'newest-first', None, _('show newest record first')),
3045 ('', 'template', '', _('display with template')),
3046 ('', 'template', '', _('display with template')),
3046 ] + remoteopts,
3047 ] + remoteopts,
3047 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3048 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3048 "^parents":
3049 "^parents":
3049 (parents,
3050 (parents,
3050 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3051 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3051 ('r', 'rev', '', _('show parents from the specified rev')),
3052 ('r', 'rev', '', _('show parents from the specified rev')),
3052 ('', 'style', '', _('display using template map file')),
3053 ('', 'style', '', _('display using template map file')),
3053 ('', 'template', '', _('display with template'))],
3054 ('', 'template', '', _('display with template'))],
3054 _('hg parents [-r REV] [FILE]')),
3055 _('hg parents [-r REV] [FILE]')),
3055 "paths": (paths, [], _('hg paths [NAME]')),
3056 "paths": (paths, [], _('hg paths [NAME]')),
3056 "^pull":
3057 "^pull":
3057 (pull,
3058 (pull,
3058 [('u', 'update', None,
3059 [('u', 'update', None,
3059 _('update to new tip if changesets were pulled')),
3060 _('update to new tip if changesets were pulled')),
3060 ('f', 'force', None,
3061 ('f', 'force', None,
3061 _('run even when remote repository is unrelated')),
3062 _('run even when remote repository is unrelated')),
3062 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3063 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3063 ] + remoteopts,
3064 ] + remoteopts,
3064 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3065 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3065 "^push":
3066 "^push":
3066 (push,
3067 (push,
3067 [('f', 'force', None, _('force push')),
3068 [('f', 'force', None, _('force push')),
3068 ('r', 'rev', [], _('a specific revision you would like to push')),
3069 ('r', 'rev', [], _('a specific revision you would like to push')),
3069 ] + remoteopts,
3070 ] + remoteopts,
3070 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3071 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3071 "debugrawcommit|rawcommit":
3072 "debugrawcommit|rawcommit":
3072 (rawcommit,
3073 (rawcommit,
3073 [('p', 'parent', [], _('parent')),
3074 [('p', 'parent', [], _('parent')),
3074 ('d', 'date', '', _('date code')),
3075 ('d', 'date', '', _('date code')),
3075 ('u', 'user', '', _('user')),
3076 ('u', 'user', '', _('user')),
3076 ('F', 'files', '', _('file list')),
3077 ('F', 'files', '', _('file list')),
3077 ('m', 'message', '', _('commit message')),
3078 ('m', 'message', '', _('commit message')),
3078 ('l', 'logfile', '', _('commit message file'))],
3079 ('l', 'logfile', '', _('commit message file'))],
3079 _('hg debugrawcommit [OPTION]... [FILE]...')),
3080 _('hg debugrawcommit [OPTION]... [FILE]...')),
3080 "recover": (recover, [], _('hg recover')),
3081 "recover": (recover, [], _('hg recover')),
3081 "^remove|rm":
3082 "^remove|rm":
3082 (remove,
3083 (remove,
3083 [('A', 'after', None, _('record remove that has already occurred')),
3084 [('A', 'after', None, _('record remove that has already occurred')),
3084 ('f', 'force', None, _('remove file even if modified')),
3085 ('f', 'force', None, _('remove file even if modified')),
3085 ] + walkopts,
3086 ] + walkopts,
3086 _('hg remove [OPTION]... FILE...')),
3087 _('hg remove [OPTION]... FILE...')),
3087 "rename|mv":
3088 "rename|mv":
3088 (rename,
3089 (rename,
3089 [('A', 'after', None, _('record a rename that has already occurred')),
3090 [('A', 'after', None, _('record a rename that has already occurred')),
3090 ('f', 'force', None,
3091 ('f', 'force', None,
3091 _('forcibly copy over an existing managed file')),
3092 _('forcibly copy over an existing managed file')),
3092 ] + walkopts + dryrunopts,
3093 ] + walkopts + dryrunopts,
3093 _('hg rename [OPTION]... SOURCE... DEST')),
3094 _('hg rename [OPTION]... SOURCE... DEST')),
3094 "^revert":
3095 "^revert":
3095 (revert,
3096 (revert,
3096 [('a', 'all', None, _('revert all changes when no arguments given')),
3097 [('a', 'all', None, _('revert all changes when no arguments given')),
3097 ('r', 'rev', '', _('revision to revert to')),
3098 ('r', 'rev', '', _('revision to revert to')),
3098 ('', 'no-backup', None, _('do not save backup copies of files')),
3099 ('', 'no-backup', None, _('do not save backup copies of files')),
3099 ] + walkopts + dryrunopts,
3100 ] + walkopts + dryrunopts,
3100 _('hg revert [-r REV] [NAME]...')),
3101 _('hg revert [-r REV] [NAME]...')),
3101 "rollback": (rollback, [], _('hg rollback')),
3102 "rollback": (rollback, [], _('hg rollback')),
3102 "root": (root, [], _('hg root')),
3103 "root": (root, [], _('hg root')),
3103 "showconfig|debugconfig":
3104 "showconfig|debugconfig":
3104 (showconfig,
3105 (showconfig,
3105 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3106 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3106 _('showconfig [-u] [NAME]...')),
3107 _('showconfig [-u] [NAME]...')),
3107 "^serve":
3108 "^serve":
3108 (serve,
3109 (serve,
3109 [('A', 'accesslog', '', _('name of access log file to write to')),
3110 [('A', 'accesslog', '', _('name of access log file to write to')),
3110 ('d', 'daemon', None, _('run server in background')),
3111 ('d', 'daemon', None, _('run server in background')),
3111 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3112 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3112 ('E', 'errorlog', '', _('name of error log file to write to')),
3113 ('E', 'errorlog', '', _('name of error log file to write to')),
3113 ('p', 'port', 0, _('port to use (default: 8000)')),
3114 ('p', 'port', 0, _('port to use (default: 8000)')),
3114 ('a', 'address', '', _('address to use')),
3115 ('a', 'address', '', _('address to use')),
3115 ('n', 'name', '',
3116 ('n', 'name', '',
3116 _('name to show in web pages (default: working dir)')),
3117 _('name to show in web pages (default: working dir)')),
3117 ('', 'webdir-conf', '', _('name of the webdir config file'
3118 ('', 'webdir-conf', '', _('name of the webdir config file'
3118 ' (serve more than one repo)')),
3119 ' (serve more than one repo)')),
3119 ('', 'pid-file', '', _('name of file to write process ID to')),
3120 ('', 'pid-file', '', _('name of file to write process ID to')),
3120 ('', 'stdio', None, _('for remote clients')),
3121 ('', 'stdio', None, _('for remote clients')),
3121 ('t', 'templates', '', _('web templates to use')),
3122 ('t', 'templates', '', _('web templates to use')),
3122 ('', 'style', '', _('template style to use')),
3123 ('', 'style', '', _('template style to use')),
3123 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3124 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3124 _('hg serve [OPTION]...')),
3125 _('hg serve [OPTION]...')),
3125 "^status|st":
3126 "^status|st":
3126 (status,
3127 (status,
3127 [('A', 'all', None, _('show status of all files')),
3128 [('A', 'all', None, _('show status of all files')),
3128 ('m', 'modified', None, _('show only modified files')),
3129 ('m', 'modified', None, _('show only modified files')),
3129 ('a', 'added', None, _('show only added files')),
3130 ('a', 'added', None, _('show only added files')),
3130 ('r', 'removed', None, _('show only removed files')),
3131 ('r', 'removed', None, _('show only removed files')),
3131 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3132 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3132 ('c', 'clean', None, _('show only files without changes')),
3133 ('c', 'clean', None, _('show only files without changes')),
3133 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3134 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3134 ('i', 'ignored', None, _('show ignored files')),
3135 ('i', 'ignored', None, _('show ignored files')),
3135 ('n', 'no-status', None, _('hide status prefix')),
3136 ('n', 'no-status', None, _('hide status prefix')),
3136 ('C', 'copies', None, _('show source of copied files')),
3137 ('C', 'copies', None, _('show source of copied files')),
3137 ('0', 'print0', None,
3138 ('0', 'print0', None,
3138 _('end filenames with NUL, for use with xargs')),
3139 _('end filenames with NUL, for use with xargs')),
3139 ('', 'rev', [], _('show difference from revision')),
3140 ('', 'rev', [], _('show difference from revision')),
3140 ] + walkopts,
3141 ] + walkopts,
3141 _('hg status [OPTION]... [FILE]...')),
3142 _('hg status [OPTION]... [FILE]...')),
3142 "tag":
3143 "tag":
3143 (tag,
3144 (tag,
3144 [('l', 'local', None, _('make the tag local')),
3145 [('l', 'local', None, _('make the tag local')),
3145 ('m', 'message', '', _('message for tag commit log entry')),
3146 ('m', 'message', '', _('message for tag commit log entry')),
3146 ('d', 'date', '', _('record datecode as commit date')),
3147 ('d', 'date', '', _('record datecode as commit date')),
3147 ('u', 'user', '', _('record user as commiter')),
3148 ('u', 'user', '', _('record user as commiter')),
3148 ('r', 'rev', '', _('revision to tag'))],
3149 ('r', 'rev', '', _('revision to tag'))],
3149 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3150 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3150 "tags": (tags, [], _('hg tags')),
3151 "tags": (tags, [], _('hg tags')),
3151 "tip":
3152 "tip":
3152 (tip,
3153 (tip,
3153 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3154 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3154 ('', 'style', '', _('display using template map file')),
3155 ('', 'style', '', _('display using template map file')),
3155 ('p', 'patch', None, _('show patch')),
3156 ('p', 'patch', None, _('show patch')),
3156 ('', 'template', '', _('display with template'))],
3157 ('', 'template', '', _('display with template'))],
3157 _('hg tip [-p]')),
3158 _('hg tip [-p]')),
3158 "unbundle":
3159 "unbundle":
3159 (unbundle,
3160 (unbundle,
3160 [('u', 'update', None,
3161 [('u', 'update', None,
3161 _('update to new tip if changesets were unbundled'))],
3162 _('update to new tip if changesets were unbundled'))],
3162 _('hg unbundle [-u] FILE')),
3163 _('hg unbundle [-u] FILE')),
3163 "^update|up|checkout|co":
3164 "^update|up|checkout|co":
3164 (update,
3165 (update,
3165 [('b', 'branch', '',
3166 [('b', 'branch', '',
3166 _('checkout the head of a specific branch (DEPRECATED)')),
3167 _('checkout the head of a specific branch (DEPRECATED)')),
3167 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3168 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3168 ('C', 'clean', None, _('overwrite locally modified files')),
3169 ('C', 'clean', None, _('overwrite locally modified files')),
3169 ('f', 'force', None, _('force a merge with outstanding changes'))],
3170 ('f', 'force', None, _('force a merge with outstanding changes'))],
3170 _('hg update [-C] [-f] [REV]')),
3171 _('hg update [-C] [-f] [REV]')),
3171 "verify": (verify, [], _('hg verify')),
3172 "verify": (verify, [], _('hg verify')),
3172 "version": (show_version, [], _('hg version')),
3173 "version": (show_version, [], _('hg version')),
3173 }
3174 }
3174
3175
3175 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3176 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3176 " debugindex debugindexdot")
3177 " debugindex debugindexdot")
3177 optionalrepo = ("paths serve showconfig")
3178 optionalrepo = ("paths serve showconfig")
3178
3179
3179 def findpossible(ui, cmd):
3180 def findpossible(ui, cmd):
3180 """
3181 """
3181 Return cmd -> (aliases, command table entry)
3182 Return cmd -> (aliases, command table entry)
3182 for each matching command.
3183 for each matching command.
3183 Return debug commands (or their aliases) only if no normal command matches.
3184 Return debug commands (or their aliases) only if no normal command matches.
3184 """
3185 """
3185 choice = {}
3186 choice = {}
3186 debugchoice = {}
3187 debugchoice = {}
3187 for e in table.keys():
3188 for e in table.keys():
3188 aliases = e.lstrip("^").split("|")
3189 aliases = e.lstrip("^").split("|")
3189 found = None
3190 found = None
3190 if cmd in aliases:
3191 if cmd in aliases:
3191 found = cmd
3192 found = cmd
3192 elif not ui.config("ui", "strict"):
3193 elif not ui.config("ui", "strict"):
3193 for a in aliases:
3194 for a in aliases:
3194 if a.startswith(cmd):
3195 if a.startswith(cmd):
3195 found = a
3196 found = a
3196 break
3197 break
3197 if found is not None:
3198 if found is not None:
3198 if aliases[0].startswith("debug") or found.startswith("debug"):
3199 if aliases[0].startswith("debug") or found.startswith("debug"):
3199 debugchoice[found] = (aliases, table[e])
3200 debugchoice[found] = (aliases, table[e])
3200 else:
3201 else:
3201 choice[found] = (aliases, table[e])
3202 choice[found] = (aliases, table[e])
3202
3203
3203 if not choice and debugchoice:
3204 if not choice and debugchoice:
3204 choice = debugchoice
3205 choice = debugchoice
3205
3206
3206 return choice
3207 return choice
3207
3208
3208 def findcmd(ui, cmd):
3209 def findcmd(ui, cmd):
3209 """Return (aliases, command table entry) for command string."""
3210 """Return (aliases, command table entry) for command string."""
3210 choice = findpossible(ui, cmd)
3211 choice = findpossible(ui, cmd)
3211
3212
3212 if choice.has_key(cmd):
3213 if choice.has_key(cmd):
3213 return choice[cmd]
3214 return choice[cmd]
3214
3215
3215 if len(choice) > 1:
3216 if len(choice) > 1:
3216 clist = choice.keys()
3217 clist = choice.keys()
3217 clist.sort()
3218 clist.sort()
3218 raise AmbiguousCommand(cmd, clist)
3219 raise AmbiguousCommand(cmd, clist)
3219
3220
3220 if choice:
3221 if choice:
3221 return choice.values()[0]
3222 return choice.values()[0]
3222
3223
3223 raise UnknownCommand(cmd)
3224 raise UnknownCommand(cmd)
3224
3225
3225 def catchterm(*args):
3226 def catchterm(*args):
3226 raise util.SignalInterrupt
3227 raise util.SignalInterrupt
3227
3228
3228 def run():
3229 def run():
3229 sys.exit(dispatch(sys.argv[1:]))
3230 sys.exit(dispatch(sys.argv[1:]))
3230
3231
3231 class ParseError(Exception):
3232 class ParseError(Exception):
3232 """Exception raised on errors in parsing the command line."""
3233 """Exception raised on errors in parsing the command line."""
3233
3234
3234 def parse(ui, args):
3235 def parse(ui, args):
3235 options = {}
3236 options = {}
3236 cmdoptions = {}
3237 cmdoptions = {}
3237
3238
3238 try:
3239 try:
3239 args = fancyopts.fancyopts(args, globalopts, options)
3240 args = fancyopts.fancyopts(args, globalopts, options)
3240 except fancyopts.getopt.GetoptError, inst:
3241 except fancyopts.getopt.GetoptError, inst:
3241 raise ParseError(None, inst)
3242 raise ParseError(None, inst)
3242
3243
3243 if args:
3244 if args:
3244 cmd, args = args[0], args[1:]
3245 cmd, args = args[0], args[1:]
3245 aliases, i = findcmd(ui, cmd)
3246 aliases, i = findcmd(ui, cmd)
3246 cmd = aliases[0]
3247 cmd = aliases[0]
3247 defaults = ui.config("defaults", cmd)
3248 defaults = ui.config("defaults", cmd)
3248 if defaults:
3249 if defaults:
3249 args = shlex.split(defaults) + args
3250 args = shlex.split(defaults) + args
3250 c = list(i[1])
3251 c = list(i[1])
3251 else:
3252 else:
3252 cmd = None
3253 cmd = None
3253 c = []
3254 c = []
3254
3255
3255 # combine global options into local
3256 # combine global options into local
3256 for o in globalopts:
3257 for o in globalopts:
3257 c.append((o[0], o[1], options[o[1]], o[3]))
3258 c.append((o[0], o[1], options[o[1]], o[3]))
3258
3259
3259 try:
3260 try:
3260 args = fancyopts.fancyopts(args, c, cmdoptions)
3261 args = fancyopts.fancyopts(args, c, cmdoptions)
3261 except fancyopts.getopt.GetoptError, inst:
3262 except fancyopts.getopt.GetoptError, inst:
3262 raise ParseError(cmd, inst)
3263 raise ParseError(cmd, inst)
3263
3264
3264 # separate global options back out
3265 # separate global options back out
3265 for o in globalopts:
3266 for o in globalopts:
3266 n = o[1]
3267 n = o[1]
3267 options[n] = cmdoptions[n]
3268 options[n] = cmdoptions[n]
3268 del cmdoptions[n]
3269 del cmdoptions[n]
3269
3270
3270 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3271 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3271
3272
3272 external = {}
3273 external = {}
3273
3274
3274 def findext(name):
3275 def findext(name):
3275 '''return module with given extension name'''
3276 '''return module with given extension name'''
3276 try:
3277 try:
3277 return sys.modules[external[name]]
3278 return sys.modules[external[name]]
3278 except KeyError:
3279 except KeyError:
3279 for k, v in external.iteritems():
3280 for k, v in external.iteritems():
3280 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3281 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3281 return sys.modules[v]
3282 return sys.modules[v]
3282 raise KeyError(name)
3283 raise KeyError(name)
3283
3284
3284 def load_extensions(ui):
3285 def load_extensions(ui):
3285 added = []
3286 added = []
3286 for ext_name, load_from_name in ui.extensions():
3287 for ext_name, load_from_name in ui.extensions():
3287 if ext_name in external:
3288 if ext_name in external:
3288 continue
3289 continue
3289 try:
3290 try:
3290 if load_from_name:
3291 if load_from_name:
3291 # the module will be loaded in sys.modules
3292 # the module will be loaded in sys.modules
3292 # choose an unique name so that it doesn't
3293 # choose an unique name so that it doesn't
3293 # conflicts with other modules
3294 # conflicts with other modules
3294 module_name = "hgext_%s" % ext_name.replace('.', '_')
3295 module_name = "hgext_%s" % ext_name.replace('.', '_')
3295 mod = imp.load_source(module_name, load_from_name)
3296 mod = imp.load_source(module_name, load_from_name)
3296 else:
3297 else:
3297 def importh(name):
3298 def importh(name):
3298 mod = __import__(name)
3299 mod = __import__(name)
3299 components = name.split('.')
3300 components = name.split('.')
3300 for comp in components[1:]:
3301 for comp in components[1:]:
3301 mod = getattr(mod, comp)
3302 mod = getattr(mod, comp)
3302 return mod
3303 return mod
3303 try:
3304 try:
3304 mod = importh("hgext.%s" % ext_name)
3305 mod = importh("hgext.%s" % ext_name)
3305 except ImportError:
3306 except ImportError:
3306 mod = importh(ext_name)
3307 mod = importh(ext_name)
3307 external[ext_name] = mod.__name__
3308 external[ext_name] = mod.__name__
3308 added.append((mod, ext_name))
3309 added.append((mod, ext_name))
3309 except (util.SignalInterrupt, KeyboardInterrupt):
3310 except (util.SignalInterrupt, KeyboardInterrupt):
3310 raise
3311 raise
3311 except Exception, inst:
3312 except Exception, inst:
3312 ui.warn(_("*** failed to import extension %s: %s\n") %
3313 ui.warn(_("*** failed to import extension %s: %s\n") %
3313 (ext_name, inst))
3314 (ext_name, inst))
3314 if ui.print_exc():
3315 if ui.print_exc():
3315 return 1
3316 return 1
3316
3317
3317 for mod, name in added:
3318 for mod, name in added:
3318 uisetup = getattr(mod, 'uisetup', None)
3319 uisetup = getattr(mod, 'uisetup', None)
3319 if uisetup:
3320 if uisetup:
3320 uisetup(ui)
3321 uisetup(ui)
3321 cmdtable = getattr(mod, 'cmdtable', {})
3322 cmdtable = getattr(mod, 'cmdtable', {})
3322 for t in cmdtable:
3323 for t in cmdtable:
3323 if t in table:
3324 if t in table:
3324 ui.warn(_("module %s overrides %s\n") % (name, t))
3325 ui.warn(_("module %s overrides %s\n") % (name, t))
3325 table.update(cmdtable)
3326 table.update(cmdtable)
3326
3327
3327 def parseconfig(config):
3328 def parseconfig(config):
3328 """parse the --config options from the command line"""
3329 """parse the --config options from the command line"""
3329 parsed = []
3330 parsed = []
3330 for cfg in config:
3331 for cfg in config:
3331 try:
3332 try:
3332 name, value = cfg.split('=', 1)
3333 name, value = cfg.split('=', 1)
3333 section, name = name.split('.', 1)
3334 section, name = name.split('.', 1)
3334 if not section or not name:
3335 if not section or not name:
3335 raise IndexError
3336 raise IndexError
3336 parsed.append((section, name, value))
3337 parsed.append((section, name, value))
3337 except (IndexError, ValueError):
3338 except (IndexError, ValueError):
3338 raise util.Abort(_('malformed --config option: %s') % cfg)
3339 raise util.Abort(_('malformed --config option: %s') % cfg)
3339 return parsed
3340 return parsed
3340
3341
3341 def dispatch(args):
3342 def dispatch(args):
3342 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3343 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3343 num = getattr(signal, name, None)
3344 num = getattr(signal, name, None)
3344 if num: signal.signal(num, catchterm)
3345 if num: signal.signal(num, catchterm)
3345
3346
3346 try:
3347 try:
3347 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3348 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3348 except util.Abort, inst:
3349 except util.Abort, inst:
3349 sys.stderr.write(_("abort: %s\n") % inst)
3350 sys.stderr.write(_("abort: %s\n") % inst)
3350 return -1
3351 return -1
3351
3352
3352 load_extensions(u)
3353 load_extensions(u)
3353 u.addreadhook(load_extensions)
3354 u.addreadhook(load_extensions)
3354
3355
3355 try:
3356 try:
3356 cmd, func, args, options, cmdoptions = parse(u, args)
3357 cmd, func, args, options, cmdoptions = parse(u, args)
3357 if options["time"]:
3358 if options["time"]:
3358 def get_times():
3359 def get_times():
3359 t = os.times()
3360 t = os.times()
3360 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3361 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3361 t = (t[0], t[1], t[2], t[3], time.clock())
3362 t = (t[0], t[1], t[2], t[3], time.clock())
3362 return t
3363 return t
3363 s = get_times()
3364 s = get_times()
3364 def print_time():
3365 def print_time():
3365 t = get_times()
3366 t = get_times()
3366 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3367 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3367 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3368 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3368 atexit.register(print_time)
3369 atexit.register(print_time)
3369
3370
3370 # enter the debugger before command execution
3371 # enter the debugger before command execution
3371 if options['debugger']:
3372 if options['debugger']:
3372 pdb.set_trace()
3373 pdb.set_trace()
3373
3374
3374 try:
3375 try:
3375 if options['cwd']:
3376 if options['cwd']:
3376 try:
3377 try:
3377 os.chdir(options['cwd'])
3378 os.chdir(options['cwd'])
3378 except OSError, inst:
3379 except OSError, inst:
3379 raise util.Abort('%s: %s' %
3380 raise util.Abort('%s: %s' %
3380 (options['cwd'], inst.strerror))
3381 (options['cwd'], inst.strerror))
3381
3382
3382 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3383 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3383 not options["noninteractive"], options["traceback"],
3384 not options["noninteractive"], options["traceback"],
3384 parseconfig(options["config"]))
3385 parseconfig(options["config"]))
3385
3386
3386 path = u.expandpath(options["repository"]) or ""
3387 path = u.expandpath(options["repository"]) or ""
3387 repo = path and hg.repository(u, path=path) or None
3388 repo = path and hg.repository(u, path=path) or None
3388 if repo and not repo.local():
3389 if repo and not repo.local():
3389 raise util.Abort(_("repository '%s' is not local") % path)
3390 raise util.Abort(_("repository '%s' is not local") % path)
3390
3391
3391 if options['help']:
3392 if options['help']:
3392 return help_(u, cmd, options['version'])
3393 return help_(u, cmd, options['version'])
3393 elif options['version']:
3394 elif options['version']:
3394 return show_version(u)
3395 return show_version(u)
3395 elif not cmd:
3396 elif not cmd:
3396 return help_(u, 'shortlist')
3397 return help_(u, 'shortlist')
3397
3398
3398 if cmd not in norepo.split():
3399 if cmd not in norepo.split():
3399 try:
3400 try:
3400 if not repo:
3401 if not repo:
3401 repo = hg.repository(u, path=path)
3402 repo = hg.repository(u, path=path)
3402 u = repo.ui
3403 u = repo.ui
3403 for name in external.itervalues():
3404 for name in external.itervalues():
3404 mod = sys.modules[name]
3405 mod = sys.modules[name]
3405 if hasattr(mod, 'reposetup'):
3406 if hasattr(mod, 'reposetup'):
3406 mod.reposetup(u, repo)
3407 mod.reposetup(u, repo)
3407 hg.repo_setup_hooks.append(mod.reposetup)
3408 hg.repo_setup_hooks.append(mod.reposetup)
3408 except hg.RepoError:
3409 except hg.RepoError:
3409 if cmd not in optionalrepo.split():
3410 if cmd not in optionalrepo.split():
3410 raise
3411 raise
3411 d = lambda: func(u, repo, *args, **cmdoptions)
3412 d = lambda: func(u, repo, *args, **cmdoptions)
3412 else:
3413 else:
3413 d = lambda: func(u, *args, **cmdoptions)
3414 d = lambda: func(u, *args, **cmdoptions)
3414
3415
3415 try:
3416 try:
3416 if options['profile']:
3417 if options['profile']:
3417 import hotshot, hotshot.stats
3418 import hotshot, hotshot.stats
3418 prof = hotshot.Profile("hg.prof")
3419 prof = hotshot.Profile("hg.prof")
3419 try:
3420 try:
3420 try:
3421 try:
3421 return prof.runcall(d)
3422 return prof.runcall(d)
3422 except:
3423 except:
3423 try:
3424 try:
3424 u.warn(_('exception raised - generating '
3425 u.warn(_('exception raised - generating '
3425 'profile anyway\n'))
3426 'profile anyway\n'))
3426 except:
3427 except:
3427 pass
3428 pass
3428 raise
3429 raise
3429 finally:
3430 finally:
3430 prof.close()
3431 prof.close()
3431 stats = hotshot.stats.load("hg.prof")
3432 stats = hotshot.stats.load("hg.prof")
3432 stats.strip_dirs()
3433 stats.strip_dirs()
3433 stats.sort_stats('time', 'calls')
3434 stats.sort_stats('time', 'calls')
3434 stats.print_stats(40)
3435 stats.print_stats(40)
3435 elif options['lsprof']:
3436 elif options['lsprof']:
3436 try:
3437 try:
3437 from mercurial import lsprof
3438 from mercurial import lsprof
3438 except ImportError:
3439 except ImportError:
3439 raise util.Abort(_(
3440 raise util.Abort(_(
3440 'lsprof not available - install from '
3441 'lsprof not available - install from '
3441 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3442 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3442 p = lsprof.Profiler()
3443 p = lsprof.Profiler()
3443 p.enable(subcalls=True)
3444 p.enable(subcalls=True)
3444 try:
3445 try:
3445 return d()
3446 return d()
3446 finally:
3447 finally:
3447 p.disable()
3448 p.disable()
3448 stats = lsprof.Stats(p.getstats())
3449 stats = lsprof.Stats(p.getstats())
3449 stats.sort()
3450 stats.sort()
3450 stats.pprint(top=10, file=sys.stderr, climit=5)
3451 stats.pprint(top=10, file=sys.stderr, climit=5)
3451 else:
3452 else:
3452 return d()
3453 return d()
3453 finally:
3454 finally:
3454 u.flush()
3455 u.flush()
3455 except:
3456 except:
3456 # enter the debugger when we hit an exception
3457 # enter the debugger when we hit an exception
3457 if options['debugger']:
3458 if options['debugger']:
3458 pdb.post_mortem(sys.exc_info()[2])
3459 pdb.post_mortem(sys.exc_info()[2])
3459 u.print_exc()
3460 u.print_exc()
3460 raise
3461 raise
3461 except ParseError, inst:
3462 except ParseError, inst:
3462 if inst.args[0]:
3463 if inst.args[0]:
3463 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3464 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3464 help_(u, inst.args[0])
3465 help_(u, inst.args[0])
3465 else:
3466 else:
3466 u.warn(_("hg: %s\n") % inst.args[1])
3467 u.warn(_("hg: %s\n") % inst.args[1])
3467 help_(u, 'shortlist')
3468 help_(u, 'shortlist')
3468 except AmbiguousCommand, inst:
3469 except AmbiguousCommand, inst:
3469 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3470 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3470 (inst.args[0], " ".join(inst.args[1])))
3471 (inst.args[0], " ".join(inst.args[1])))
3471 except UnknownCommand, inst:
3472 except UnknownCommand, inst:
3472 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3473 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3473 help_(u, 'shortlist')
3474 help_(u, 'shortlist')
3474 except hg.RepoError, inst:
3475 except hg.RepoError, inst:
3475 u.warn(_("abort: %s!\n") % inst)
3476 u.warn(_("abort: %s!\n") % inst)
3476 except lock.LockHeld, inst:
3477 except lock.LockHeld, inst:
3477 if inst.errno == errno.ETIMEDOUT:
3478 if inst.errno == errno.ETIMEDOUT:
3478 reason = _('timed out waiting for lock held by %s') % inst.locker
3479 reason = _('timed out waiting for lock held by %s') % inst.locker
3479 else:
3480 else:
3480 reason = _('lock held by %s') % inst.locker
3481 reason = _('lock held by %s') % inst.locker
3481 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3482 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3482 except lock.LockUnavailable, inst:
3483 except lock.LockUnavailable, inst:
3483 u.warn(_("abort: could not lock %s: %s\n") %
3484 u.warn(_("abort: could not lock %s: %s\n") %
3484 (inst.desc or inst.filename, inst.strerror))
3485 (inst.desc or inst.filename, inst.strerror))
3485 except revlog.RevlogError, inst:
3486 except revlog.RevlogError, inst:
3486 u.warn(_("abort: %s!\n") % inst)
3487 u.warn(_("abort: %s!\n") % inst)
3487 except util.SignalInterrupt:
3488 except util.SignalInterrupt:
3488 u.warn(_("killed!\n"))
3489 u.warn(_("killed!\n"))
3489 except KeyboardInterrupt:
3490 except KeyboardInterrupt:
3490 try:
3491 try:
3491 u.warn(_("interrupted!\n"))
3492 u.warn(_("interrupted!\n"))
3492 except IOError, inst:
3493 except IOError, inst:
3493 if inst.errno == errno.EPIPE:
3494 if inst.errno == errno.EPIPE:
3494 if u.debugflag:
3495 if u.debugflag:
3495 u.warn(_("\nbroken pipe\n"))
3496 u.warn(_("\nbroken pipe\n"))
3496 else:
3497 else:
3497 raise
3498 raise
3498 except IOError, inst:
3499 except IOError, inst:
3499 if hasattr(inst, "code"):
3500 if hasattr(inst, "code"):
3500 u.warn(_("abort: %s\n") % inst)
3501 u.warn(_("abort: %s\n") % inst)
3501 elif hasattr(inst, "reason"):
3502 elif hasattr(inst, "reason"):
3502 u.warn(_("abort: error: %s\n") % inst.reason[1])
3503 u.warn(_("abort: error: %s\n") % inst.reason[1])
3503 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3504 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3504 if u.debugflag:
3505 if u.debugflag:
3505 u.warn(_("broken pipe\n"))
3506 u.warn(_("broken pipe\n"))
3506 elif getattr(inst, "strerror", None):
3507 elif getattr(inst, "strerror", None):
3507 if getattr(inst, "filename", None):
3508 if getattr(inst, "filename", None):
3508 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3509 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3509 else:
3510 else:
3510 u.warn(_("abort: %s\n") % inst.strerror)
3511 u.warn(_("abort: %s\n") % inst.strerror)
3511 else:
3512 else:
3512 raise
3513 raise
3513 except OSError, inst:
3514 except OSError, inst:
3514 if getattr(inst, "filename", None):
3515 if getattr(inst, "filename", None):
3515 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3516 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3516 else:
3517 else:
3517 u.warn(_("abort: %s\n") % inst.strerror)
3518 u.warn(_("abort: %s\n") % inst.strerror)
3518 except util.UnexpectedOutput, inst:
3519 except util.UnexpectedOutput, inst:
3519 u.warn(_("abort: %s") % inst[0])
3520 u.warn(_("abort: %s") % inst[0])
3520 if not isinstance(inst[1], basestring):
3521 if not isinstance(inst[1], basestring):
3521 u.warn(" %r\n" % (inst[1],))
3522 u.warn(" %r\n" % (inst[1],))
3522 elif not inst[1]:
3523 elif not inst[1]:
3523 u.warn(_(" empty string\n"))
3524 u.warn(_(" empty string\n"))
3524 else:
3525 else:
3525 u.warn("\n%r%s\n" %
3526 u.warn("\n%r%s\n" %
3526 (inst[1][:400], len(inst[1]) > 400 and '...' or ''))
3527 (inst[1][:400], len(inst[1]) > 400 and '...' or ''))
3527 except util.Abort, inst:
3528 except util.Abort, inst:
3528 u.warn(_("abort: %s\n") % inst)
3529 u.warn(_("abort: %s\n") % inst)
3529 except TypeError, inst:
3530 except TypeError, inst:
3530 # was this an argument error?
3531 # was this an argument error?
3531 tb = traceback.extract_tb(sys.exc_info()[2])
3532 tb = traceback.extract_tb(sys.exc_info()[2])
3532 if len(tb) > 2: # no
3533 if len(tb) > 2: # no
3533 raise
3534 raise
3534 u.debug(inst, "\n")
3535 u.debug(inst, "\n")
3535 u.warn(_("%s: invalid arguments\n") % cmd)
3536 u.warn(_("%s: invalid arguments\n") % cmd)
3536 help_(u, cmd)
3537 help_(u, cmd)
3537 except SystemExit, inst:
3538 except SystemExit, inst:
3538 # Commands shouldn't sys.exit directly, but give a return code.
3539 # Commands shouldn't sys.exit directly, but give a return code.
3539 # Just in case catch this and and pass exit code to caller.
3540 # Just in case catch this and and pass exit code to caller.
3540 return inst.code
3541 return inst.code
3541 except:
3542 except:
3542 u.warn(_("** unknown exception encountered, details follow\n"))
3543 u.warn(_("** unknown exception encountered, details follow\n"))
3543 u.warn(_("** report bug details to "
3544 u.warn(_("** report bug details to "
3544 "http://www.selenic.com/mercurial/bts\n"))
3545 "http://www.selenic.com/mercurial/bts\n"))
3545 u.warn(_("** or mercurial@selenic.com\n"))
3546 u.warn(_("** or mercurial@selenic.com\n"))
3546 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3547 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3547 % version.get_version())
3548 % version.get_version())
3548 raise
3549 raise
3549
3550
3550 return -1
3551 return -1
@@ -1,418 +1,418 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "errno util os tempfile")
11 demandload(globals(), "errno util os tempfile")
12
12
13 def filemerge(repo, fw, fo, wctx, mctx):
13 def filemerge(repo, fw, fo, wctx, mctx):
14 """perform a 3-way merge in the working directory
14 """perform a 3-way merge in the working directory
15
15
16 fw = filename in the working directory
16 fw = filename in the working directory
17 fo = filename in other parent
17 fo = filename in other parent
18 wctx, mctx = working and merge changecontexts
18 wctx, mctx = working and merge changecontexts
19 """
19 """
20
20
21 def temp(prefix, ctx):
21 def temp(prefix, ctx):
22 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
22 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
23 (fd, name) = tempfile.mkstemp(prefix=pre)
23 (fd, name) = tempfile.mkstemp(prefix=pre)
24 f = os.fdopen(fd, "wb")
24 f = os.fdopen(fd, "wb")
25 repo.wwrite(ctx.path(), ctx.data(), f)
25 repo.wwrite(ctx.path(), ctx.data(), f)
26 f.close()
26 f.close()
27 return name
27 return name
28
28
29 fcm = wctx.filectx(fw)
29 fcm = wctx.filectx(fw)
30 fco = mctx.filectx(fo)
30 fco = mctx.filectx(fo)
31
31
32 if not fco.cmp(fcm.data()): # files identical?
32 if not fco.cmp(fcm.data()): # files identical?
33 return None
33 return None
34
34
35 fca = fcm.ancestor(fco)
35 fca = fcm.ancestor(fco)
36 if not fca:
36 if not fca:
37 fca = repo.filectx(fw, fileid=nullrev)
37 fca = repo.filectx(fw, fileid=nullrev)
38 a = repo.wjoin(fw)
38 a = repo.wjoin(fw)
39 b = temp("base", fca)
39 b = temp("base", fca)
40 c = temp("other", fco)
40 c = temp("other", fco)
41
41
42 if fw != fo:
42 if fw != fo:
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
44 else:
44 else:
45 repo.ui.status(_("merging %s\n") % fw)
45 repo.ui.status(_("merging %s\n") % fw)
46
46
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
48
48
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
50 or "hgmerge")
50 or "hgmerge")
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
52 environ={'HG_FILE': fw,
52 environ={'HG_FILE': fw,
53 'HG_MY_NODE': str(wctx.parents()[0]),
53 'HG_MY_NODE': str(wctx.parents()[0]),
54 'HG_OTHER_NODE': str(mctx)})
54 'HG_OTHER_NODE': str(mctx)})
55 if r:
55 if r:
56 repo.ui.warn(_("merging %s failed!\n") % fw)
56 repo.ui.warn(_("merging %s failed!\n") % fw)
57
57
58 os.unlink(b)
58 os.unlink(b)
59 os.unlink(c)
59 os.unlink(c)
60 return r
60 return r
61
61
62 def checkunknown(wctx, mctx):
62 def checkunknown(wctx, mctx):
63 "check for collisions between unknown files and files in mctx"
63 "check for collisions between unknown files and files in mctx"
64 man = mctx.manifest()
64 man = mctx.manifest()
65 for f in wctx.unknown():
65 for f in wctx.unknown():
66 if f in man:
66 if f in man:
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
68 raise util.Abort(_("'%s' already exists in the working"
68 raise util.Abort(_("'%s' already exists in the working"
69 " dir and differs from remote") % f)
69 " dir and differs from remote") % f)
70
70
71 def forgetremoved(wctx, mctx):
71 def forgetremoved(wctx, mctx):
72 """
72 """
73 Forget removed files
73 Forget removed files
74
74
75 If we're jumping between revisions (as opposed to merging), and if
75 If we're jumping between revisions (as opposed to merging), and if
76 neither the working directory nor the target rev has the file,
76 neither the working directory nor the target rev has the file,
77 then we need to remove it from the dirstate, to prevent the
77 then we need to remove it from the dirstate, to prevent the
78 dirstate from listing the file when it is no longer in the
78 dirstate from listing the file when it is no longer in the
79 manifest.
79 manifest.
80 """
80 """
81
81
82 action = []
82 action = []
83 man = mctx.manifest()
83 man = mctx.manifest()
84 for f in wctx.deleted() + wctx.removed():
84 for f in wctx.deleted() + wctx.removed():
85 if f not in man:
85 if f not in man:
86 action.append((f, "f"))
86 action.append((f, "f"))
87
87
88 return action
88 return action
89
89
90 def nonoverlap(d1, d2, d3):
90 def nonoverlap(d1, d2, d3):
91 "Return list of elements in d1 not in d2 or d3"
91 "Return list of elements in d1 not in d2 or d3"
92
92
93 l = []
93 l = []
94 for d in d1:
94 for d in d1:
95 if d not in d3 and d not in d2:
95 if d not in d3 and d not in d2:
96 l.append(d)
96 l.append(d)
97
97
98 l.sort()
98 l.sort()
99 return l
99 return l
100
100
101 def findold(fctx, limit):
101 def findold(fctx, limit):
102 "find files that path was copied from, back to linkrev limit"
102 "find files that path was copied from, back to linkrev limit"
103
103
104 old = {}
104 old = {}
105 orig = fctx.path()
105 orig = fctx.path()
106 visit = [fctx]
106 visit = [fctx]
107 while visit:
107 while visit:
108 fc = visit.pop()
108 fc = visit.pop()
109 if fc.rev() < limit:
109 if fc.rev() < limit:
110 continue
110 continue
111 if fc.path() != orig and fc.path() not in old:
111 if fc.path() != orig and fc.path() not in old:
112 old[fc.path()] = 1
112 old[fc.path()] = 1
113 visit += fc.parents()
113 visit += fc.parents()
114
114
115 old = old.keys()
115 old = old.keys()
116 old.sort()
116 old.sort()
117 return old
117 return old
118
118
119 def findcopies(repo, m1, m2, ma, limit):
119 def findcopies(repo, m1, m2, ma, limit):
120 """
120 """
121 Find moves and copies between m1 and m2 back to limit linkrev
121 Find moves and copies between m1 and m2 back to limit linkrev
122 """
122 """
123
123
124 if not repo.ui.configbool("merge", "followcopies", True):
124 if not repo.ui.configbool("merge", "followcopies", True):
125 return {}
125 return {}
126
126
127 # avoid silly behavior for update from empty dir
127 # avoid silly behavior for update from empty dir
128 if not m1:
128 if not m1:
129 return {}
129 return {}
130
130
131 dcopies = repo.dirstate.copies()
131 dcopies = repo.dirstate.copies()
132 copy = {}
132 copy = {}
133 match = {}
133 match = {}
134 u1 = nonoverlap(m1, m2, ma)
134 u1 = nonoverlap(m1, m2, ma)
135 u2 = nonoverlap(m2, m1, ma)
135 u2 = nonoverlap(m2, m1, ma)
136 ctx = util.cachefunc(lambda f,n: repo.filectx(f, fileid=n[:20]))
136 ctx = util.cachefunc(lambda f,n: repo.filectx(f, fileid=n[:20]))
137
137
138 def checkpair(c, f2, man):
138 def checkpair(c, f2, man):
139 ''' check if an apparent pair actually matches '''
139 ''' check if an apparent pair actually matches '''
140 c2 = ctx(f2, man[f2])
140 c2 = ctx(f2, man[f2])
141 ca = c.ancestor(c2)
141 ca = c.ancestor(c2)
142 if ca and ca.path() == c.path() or ca.path() == c2.path():
142 if ca and ca.path() == c.path() or ca.path() == c2.path():
143 copy[c.path()] = f2
143 copy[c.path()] = f2
144 copy[f2] = c.path()
144 copy[f2] = c.path()
145
145
146 for f in u1:
146 for f in u1:
147 c = ctx(dcopies.get(f, f), m1[f])
147 c = ctx(dcopies.get(f, f), m1[f])
148 for of in findold(c, limit):
148 for of in findold(c, limit):
149 if of in m2:
149 if of in m2:
150 checkpair(c, of, m2)
150 checkpair(c, of, m2)
151 else:
151 else:
152 match.setdefault(of, []).append(f)
152 match.setdefault(of, []).append(f)
153
153
154 for f in u2:
154 for f in u2:
155 c = ctx(f, m2[f])
155 c = ctx(f, m2[f])
156 for of in findold(c, limit):
156 for of in findold(c, limit):
157 if of in m1:
157 if of in m1:
158 checkpair(c, of, m1)
158 checkpair(c, of, m1)
159 elif of in match:
159 elif of in match:
160 for mf in match[of]:
160 for mf in match[of]:
161 checkpair(c, mf, m1)
161 checkpair(c, mf, m1)
162
162
163 return copy
163 return copy
164
164
165 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
165 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
166 """
166 """
167 Merge p1 and p2 with ancestor ma and generate merge action list
167 Merge p1 and p2 with ancestor ma and generate merge action list
168
168
169 overwrite = whether we clobber working files
169 overwrite = whether we clobber working files
170 partial = function to filter file lists
170 partial = function to filter file lists
171 """
171 """
172
172
173 repo.ui.note(_("resolving manifests\n"))
173 repo.ui.note(_("resolving manifests\n"))
174 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
174 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
175 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
175 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
176
176
177 m1 = p1.manifest()
177 m1 = p1.manifest()
178 m2 = p2.manifest()
178 m2 = p2.manifest()
179 ma = pa.manifest()
179 ma = pa.manifest()
180 backwards = (pa == p2)
180 backwards = (pa == p2)
181 action = []
181 action = []
182 copy = {}
182 copy = {}
183
183
184 def fmerge(f, f2=None, fa=None):
184 def fmerge(f, f2=None, fa=None):
185 """merge executable flags"""
185 """merge executable flags"""
186 if not f2:
186 if not f2:
187 f2 = f
187 f2 = f
188 fa = f
188 fa = f
189 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
189 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
190 return ((a^b) | (a^c)) ^ a
190 return ((a^b) | (a^c)) ^ a
191
191
192 def act(msg, m, f, *args):
192 def act(msg, m, f, *args):
193 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
193 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
194 action.append((f, m) + args)
194 action.append((f, m) + args)
195
195
196 if not (backwards or overwrite):
196 if not (backwards or overwrite):
197 copy = findcopies(repo, m1, m2, ma, pa.rev())
197 copy = findcopies(repo, m1, m2, ma, pa.rev())
198
198
199 # Compare manifests
199 # Compare manifests
200 for f, n in m1.iteritems():
200 for f, n in m1.iteritems():
201 if partial and not partial(f):
201 if partial and not partial(f):
202 continue
202 continue
203 if f in m2:
203 if f in m2:
204 # are files different?
204 # are files different?
205 if n != m2[f]:
205 if n != m2[f]:
206 a = ma.get(f, nullid)
206 a = ma.get(f, nullid)
207 # are both different from the ancestor?
207 # are both different from the ancestor?
208 if not overwrite and n != a and m2[f] != a:
208 if not overwrite and n != a and m2[f] != a:
209 act("versions differ", "m", f, f, f, fmerge(f), False)
209 act("versions differ", "m", f, f, f, fmerge(f), False)
210 # are we clobbering?
210 # are we clobbering?
211 # is remote's version newer?
211 # is remote's version newer?
212 # or are we going back in time and clean?
212 # or are we going back in time and clean?
213 elif overwrite or m2[f] != a or (backwards and not n[20:]):
213 elif overwrite or m2[f] != a or (backwards and not n[20:]):
214 act("remote is newer", "g", f, m2.execf(f))
214 act("remote is newer", "g", f, m2.execf(f))
215 # local is newer, not overwrite, check mode bits
215 # local is newer, not overwrite, check mode bits
216 elif fmerge(f) != m1.execf(f):
216 elif fmerge(f) != m1.execf(f):
217 act("update permissions", "e", f, m2.execf(f))
217 act("update permissions", "e", f, m2.execf(f))
218 # contents same, check mode bits
218 # contents same, check mode bits
219 elif m1.execf(f) != m2.execf(f):
219 elif m1.execf(f) != m2.execf(f):
220 if overwrite or fmerge(f) != m1.execf(f):
220 if overwrite or fmerge(f) != m1.execf(f):
221 act("update permissions", "e", f, m2.execf(f))
221 act("update permissions", "e", f, m2.execf(f))
222 elif f in copy:
222 elif f in copy:
223 f2 = copy[f]
223 f2 = copy[f]
224 if f in ma: # case 3,20 A/B/A
224 if f in ma: # case 3,20 A/B/A
225 act("remote moved", "m", f, f2, f2, fmerge(f, f2, f), True)
225 act("remote moved", "m", f, f2, f2, fmerge(f, f2, f), True)
226 else:
226 else:
227 if f2 in m1: # case 2 A,B/B/B
227 if f2 in m1: # case 2 A,B/B/B
228 act("local copied", "m",
228 act("local copied", "m",
229 f, f2, f, fmerge(f, f2, f2), False)
229 f, f2, f, fmerge(f, f2, f2), False)
230 else: # case 4,21 A/B/B
230 else: # case 4,21 A/B/B
231 act("local moved", "m",
231 act("local moved", "m",
232 f, f2, f, fmerge(f, f2, f2), False)
232 f, f2, f, fmerge(f, f2, f2), False)
233 elif f in ma:
233 elif f in ma:
234 if n != ma[f] and not overwrite:
234 if n != ma[f] and not overwrite:
235 if repo.ui.prompt(
235 if repo.ui.prompt(
236 (_(" local changed %s which remote deleted\n") % f) +
236 (_(" local changed %s which remote deleted\n") % f) +
237 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
237 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
238 act("prompt delete", "r", f)
238 act("prompt delete", "r", f)
239 else:
239 else:
240 act("other deleted", "r", f)
240 act("other deleted", "r", f)
241 else:
241 else:
242 # file is created on branch or in working directory
242 # file is created on branch or in working directory
243 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
243 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
244 act("remote deleted", "r", f)
244 act("remote deleted", "r", f)
245
245
246 for f, n in m2.iteritems():
246 for f, n in m2.iteritems():
247 if partial and not partial(f):
247 if partial and not partial(f):
248 continue
248 continue
249 if f in m1:
249 if f in m1:
250 continue
250 continue
251 if f in copy:
251 if f in copy:
252 f2 = copy[f]
252 f2 = copy[f]
253 if f2 not in m2: # already seen
253 if f2 not in m2: # already seen
254 continue
254 continue
255 # rename case 1, A/A,B/A
255 # rename case 1, A/A,B/A
256 act("remote copied", "m", f2, f, f, fmerge(f2, f, f2), False)
256 act("remote copied", "m", f2, f, f, fmerge(f2, f, f2), False)
257 elif f in ma:
257 elif f in ma:
258 if overwrite or backwards:
258 if overwrite or backwards:
259 act("recreating", "g", f, m2.execf(f))
259 act("recreating", "g", f, m2.execf(f))
260 elif n != ma[f]:
260 elif n != ma[f]:
261 if repo.ui.prompt(
261 if repo.ui.prompt(
262 (_("remote changed %s which local deleted\n") % f) +
262 (_("remote changed %s which local deleted\n") % f) +
263 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
263 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
264 act("prompt recreating", "g", f, m2.execf(f))
264 act("prompt recreating", "g", f, m2.execf(f))
265 else:
265 else:
266 act("remote created", "g", f, m2.execf(f))
266 act("remote created", "g", f, m2.execf(f))
267
267
268 return action
268 return action
269
269
270 def applyupdates(repo, action, wctx, mctx):
270 def applyupdates(repo, action, wctx, mctx):
271 "apply the merge action list to the working directory"
271 "apply the merge action list to the working directory"
272
272
273 updated, merged, removed, unresolved = 0, 0, 0, 0
273 updated, merged, removed, unresolved = 0, 0, 0, 0
274 action.sort()
274 action.sort()
275 for a in action:
275 for a in action:
276 f, m = a[:2]
276 f, m = a[:2]
277 if f[0] == "/":
277 if f[0] == "/":
278 continue
278 continue
279 if m == "r": # remove
279 if m == "r": # remove
280 repo.ui.note(_("removing %s\n") % f)
280 repo.ui.note(_("removing %s\n") % f)
281 util.audit_path(f)
281 util.audit_path(f)
282 try:
282 try:
283 util.unlink(repo.wjoin(f))
283 util.unlink(repo.wjoin(f))
284 except OSError, inst:
284 except OSError, inst:
285 if inst.errno != errno.ENOENT:
285 if inst.errno != errno.ENOENT:
286 repo.ui.warn(_("update failed to remove %s: %s!\n") %
286 repo.ui.warn(_("update failed to remove %s: %s!\n") %
287 (f, inst.strerror))
287 (f, inst.strerror))
288 removed +=1
288 removed +=1
289 elif m == "m": # merge
289 elif m == "m": # merge
290 f2, fd, flag, move = a[2:]
290 f2, fd, flag, move = a[2:]
291 r = filemerge(repo, f, f2, wctx, mctx)
291 r = filemerge(repo, f, f2, wctx, mctx)
292 if r > 0:
292 if r > 0:
293 unresolved += 1
293 unresolved += 1
294 else:
294 else:
295 if r is None:
295 if r is None:
296 updated += 1
296 updated += 1
297 else:
297 else:
298 merged += 1
298 merged += 1
299 if f != fd:
299 if f != fd:
300 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
300 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
301 repo.wwrite(fd, repo.wread(f))
301 repo.wwrite(fd, repo.wread(f))
302 if move:
302 if move:
303 repo.ui.debug(_("removing %s\n") % f)
303 repo.ui.debug(_("removing %s\n") % f)
304 os.unlink(repo.wjoin(f))
304 os.unlink(repo.wjoin(f))
305 util.set_exec(repo.wjoin(fd), flag)
305 util.set_exec(repo.wjoin(fd), flag)
306 elif m == "g": # get
306 elif m == "g": # get
307 flag = a[2]
307 flag = a[2]
308 repo.ui.note(_("getting %s\n") % f)
308 repo.ui.note(_("getting %s\n") % f)
309 t = mctx.filectx(f).data()
309 t = mctx.filectx(f).data()
310 repo.wwrite(f, t)
310 repo.wwrite(f, t)
311 util.set_exec(repo.wjoin(f), flag)
311 util.set_exec(repo.wjoin(f), flag)
312 updated += 1
312 updated += 1
313 elif m == "e": # exec
313 elif m == "e": # exec
314 flag = a[2]
314 flag = a[2]
315 util.set_exec(repo.wjoin(f), flag)
315 util.set_exec(repo.wjoin(f), flag)
316
316
317 return updated, merged, removed, unresolved
317 return updated, merged, removed, unresolved
318
318
319 def recordupdates(repo, action, branchmerge):
319 def recordupdates(repo, action, branchmerge):
320 "record merge actions to the dirstate"
320 "record merge actions to the dirstate"
321
321
322 for a in action:
322 for a in action:
323 f, m = a[:2]
323 f, m = a[:2]
324 if m == "r": # remove
324 if m == "r": # remove
325 if branchmerge:
325 if branchmerge:
326 repo.dirstate.update([f], 'r')
326 repo.dirstate.update([f], 'r')
327 else:
327 else:
328 repo.dirstate.forget([f])
328 repo.dirstate.forget([f])
329 elif m == "f": # forget
329 elif m == "f": # forget
330 repo.dirstate.forget([f])
330 repo.dirstate.forget([f])
331 elif m == "g": # get
331 elif m == "g": # get
332 if branchmerge:
332 if branchmerge:
333 repo.dirstate.update([f], 'n', st_mtime=-1)
333 repo.dirstate.update([f], 'n', st_mtime=-1)
334 else:
334 else:
335 repo.dirstate.update([f], 'n')
335 repo.dirstate.update([f], 'n')
336 elif m == "m": # merge
336 elif m == "m": # merge
337 f2, fd, flag, move = a[2:]
337 f2, fd, flag, move = a[2:]
338 if branchmerge:
338 if branchmerge:
339 # We've done a branch merge, mark this file as merged
339 # We've done a branch merge, mark this file as merged
340 # so that we properly record the merger later
340 # so that we properly record the merger later
341 repo.dirstate.update([fd], 'm')
341 repo.dirstate.update([fd], 'm')
342 if f != f2: # copy/rename
342 if f != f2: # copy/rename
343 if move:
343 if move:
344 repo.dirstate.update([f], 'r')
344 repo.dirstate.update([f], 'r')
345 if f != fd:
345 if f != fd:
346 repo.dirstate.copy(f, fd)
346 repo.dirstate.copy(f, fd)
347 else:
347 else:
348 repo.dirstate.copy(f2, fd)
348 repo.dirstate.copy(f2, fd)
349 else:
349 else:
350 # We've update-merged a locally modified file, so
350 # We've update-merged a locally modified file, so
351 # we set the dirstate to emulate a normal checkout
351 # we set the dirstate to emulate a normal checkout
352 # of that file some time in the past. Thus our
352 # of that file some time in the past. Thus our
353 # merge will appear as a normal local file
353 # merge will appear as a normal local file
354 # modification.
354 # modification.
355 repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
355 repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
356 if move:
356 if move:
357 repo.dirstate.forget([f])
357 repo.dirstate.forget([f])
358
358
359 def update(repo, node, branchmerge, force, partial, wlock):
359 def update(repo, node, branchmerge, force, partial, wlock):
360 """
360 """
361 Perform a merge between the working directory and the given node
361 Perform a merge between the working directory and the given node
362
362
363 branchmerge = whether to merge between branches
363 branchmerge = whether to merge between branches
364 force = whether to force branch merging or file overwriting
364 force = whether to force branch merging or file overwriting
365 partial = a function to filter file lists (dirstate not updated)
365 partial = a function to filter file lists (dirstate not updated)
366 wlock = working dir lock, if already held
366 wlock = working dir lock, if already held
367 """
367 """
368
368
369 if not wlock:
369 if not wlock:
370 wlock = repo.wlock()
370 wlock = repo.wlock()
371
371
372 overwrite = force and not branchmerge
372 overwrite = force and not branchmerge
373 forcemerge = force and branchmerge
373 forcemerge = force and branchmerge
374 wc = repo.workingctx()
374 wc = repo.workingctx()
375 pl = wc.parents()
375 pl = wc.parents()
376 p1, p2 = pl[0], repo.changectx(node)
376 p1, p2 = pl[0], repo.changectx(node)
377 pa = p1.ancestor(p2)
377 pa = p1.ancestor(p2)
378 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
378 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
379
379
380 ### check phase
380 ### check phase
381 if not overwrite and len(pl) > 1:
381 if not overwrite and len(pl) > 1:
382 raise util.Abort(_("outstanding uncommitted merges"))
382 raise util.Abort(_("outstanding uncommitted merges"))
383 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
383 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
384 if branchmerge:
384 if branchmerge:
385 raise util.Abort(_("there is nothing to merge, just use "
385 raise util.Abort(_("there is nothing to merge, just use "
386 "'hg update' or look at 'hg heads'"))
386 "'hg update' or look at 'hg heads'"))
387 elif not (overwrite or branchmerge):
387 elif not (overwrite or branchmerge):
388 raise util.Abort(_("update spans branches, use 'hg merge' "
388 raise util.Abort(_("update spans branches, use 'hg merge' "
389 "or 'hg update -C' to lose changes"))
389 "or 'hg update -C' to lose changes"))
390 if branchmerge and not forcemerge:
390 if branchmerge and not forcemerge:
391 if wc.modified() or wc.added() or wc.removed():
391 if wc.files():
392 raise util.Abort(_("outstanding uncommitted changes"))
392 raise util.Abort(_("outstanding uncommitted changes"))
393
393
394 ### calculate phase
394 ### calculate phase
395 action = []
395 action = []
396 if not force:
396 if not force:
397 checkunknown(wc, p2)
397 checkunknown(wc, p2)
398 if not branchmerge:
398 if not branchmerge:
399 action += forgetremoved(wc, p2)
399 action += forgetremoved(wc, p2)
400 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
400 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
401
401
402 ### apply phase
402 ### apply phase
403 if not branchmerge: # just jump to the new rev
403 if not branchmerge: # just jump to the new rev
404 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
404 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
405 if not partial:
405 if not partial:
406 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
406 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
407
407
408 stats = applyupdates(repo, action, wc, p2)
408 stats = applyupdates(repo, action, wc, p2)
409
409
410 if not partial:
410 if not partial:
411 recordupdates(repo, action, branchmerge)
411 recordupdates(repo, action, branchmerge)
412 repo.dirstate.setparents(fp1, fp2)
412 repo.dirstate.setparents(fp1, fp2)
413 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
413 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
414 if not branchmerge:
414 if not branchmerge:
415 repo.opener("branch", "w").write(p2.branch() + "\n")
415 repo.opener("branch", "w").write(p2.branch() + "\n")
416
416
417 return stats
417 return stats
418
418
@@ -1,1278 +1,1284 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 from demandload import demandload
16 demandload(globals(), "binascii changegroup errno ancestor mdiff os")
16 demandload(globals(), "binascii changegroup errno ancestor mdiff os")
17 demandload(globals(), "sha struct util zlib")
17 demandload(globals(), "sha struct util zlib")
18
18
19 # revlog version strings
19 # revlog version strings
20 REVLOGV0 = 0
20 REVLOGV0 = 0
21 REVLOGNG = 1
21 REVLOGNG = 1
22
22
23 # revlog flags
23 # revlog flags
24 REVLOGNGINLINEDATA = (1 << 16)
24 REVLOGNGINLINEDATA = (1 << 16)
25 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
25 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
26
26
27 REVLOG_DEFAULT_FORMAT = REVLOGNG
27 REVLOG_DEFAULT_FORMAT = REVLOGNG
28 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
28 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
29
29
30 def flagstr(flag):
30 def flagstr(flag):
31 if flag == "inline":
31 if flag == "inline":
32 return REVLOGNGINLINEDATA
32 return REVLOGNGINLINEDATA
33 raise RevlogError(_("unknown revlog flag %s" % flag))
33 raise RevlogError(_("unknown revlog flag %s" % flag))
34
34
35 def hash(text, p1, p2):
35 def hash(text, p1, p2):
36 """generate a hash from the given text and its parent hashes
36 """generate a hash from the given text and its parent hashes
37
37
38 This hash combines both the current file contents and its history
38 This hash combines both the current file contents and its history
39 in a manner that makes it easy to distinguish nodes with the same
39 in a manner that makes it easy to distinguish nodes with the same
40 content in the revision graph.
40 content in the revision graph.
41 """
41 """
42 l = [p1, p2]
42 l = [p1, p2]
43 l.sort()
43 l.sort()
44 s = sha.new(l[0])
44 s = sha.new(l[0])
45 s.update(l[1])
45 s.update(l[1])
46 s.update(text)
46 s.update(text)
47 return s.digest()
47 return s.digest()
48
48
49 def compress(text):
49 def compress(text):
50 """ generate a possibly-compressed representation of text """
50 """ generate a possibly-compressed representation of text """
51 if not text: return ("", text)
51 if not text: return ("", text)
52 if len(text) < 44:
52 if len(text) < 44:
53 if text[0] == '\0': return ("", text)
53 if text[0] == '\0': return ("", text)
54 return ('u', text)
54 return ('u', text)
55 bin = zlib.compress(text)
55 bin = zlib.compress(text)
56 if len(bin) > len(text):
56 if len(bin) > len(text):
57 if text[0] == '\0': return ("", text)
57 if text[0] == '\0': return ("", text)
58 return ('u', text)
58 return ('u', text)
59 return ("", bin)
59 return ("", bin)
60
60
61 def decompress(bin):
61 def decompress(bin):
62 """ decompress the given input """
62 """ decompress the given input """
63 if not bin: return bin
63 if not bin: return bin
64 t = bin[0]
64 t = bin[0]
65 if t == '\0': return bin
65 if t == '\0': return bin
66 if t == 'x': return zlib.decompress(bin)
66 if t == 'x': return zlib.decompress(bin)
67 if t == 'u': return bin[1:]
67 if t == 'u': return bin[1:]
68 raise RevlogError(_("unknown compression type %r") % t)
68 raise RevlogError(_("unknown compression type %r") % t)
69
69
70 indexformatv0 = ">4l20s20s20s"
70 indexformatv0 = ">4l20s20s20s"
71 v0shaoffset = 56
71 v0shaoffset = 56
72 # index ng:
72 # index ng:
73 # 6 bytes offset
73 # 6 bytes offset
74 # 2 bytes flags
74 # 2 bytes flags
75 # 4 bytes compressed length
75 # 4 bytes compressed length
76 # 4 bytes uncompressed length
76 # 4 bytes uncompressed length
77 # 4 bytes: base rev
77 # 4 bytes: base rev
78 # 4 bytes link rev
78 # 4 bytes link rev
79 # 4 bytes parent 1 rev
79 # 4 bytes parent 1 rev
80 # 4 bytes parent 2 rev
80 # 4 bytes parent 2 rev
81 # 32 bytes: nodeid
81 # 32 bytes: nodeid
82 indexformatng = ">Qiiiiii20s12x"
82 indexformatng = ">Qiiiiii20s12x"
83 ngshaoffset = 32
83 ngshaoffset = 32
84 versionformat = ">i"
84 versionformat = ">i"
85
85
86 class lazyparser(object):
86 class lazyparser(object):
87 """
87 """
88 this class avoids the need to parse the entirety of large indices
88 this class avoids the need to parse the entirety of large indices
89 """
89 """
90
90
91 # lazyparser is not safe to use on windows if win32 extensions not
91 # lazyparser is not safe to use on windows if win32 extensions not
92 # available. it keeps file handle open, which make it not possible
92 # available. it keeps file handle open, which make it not possible
93 # to break hardlinks on local cloned repos.
93 # to break hardlinks on local cloned repos.
94 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
94 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
95 hasattr(util, 'win32api'))
95 hasattr(util, 'win32api'))
96
96
97 def __init__(self, dataf, size, indexformat, shaoffset):
97 def __init__(self, dataf, size, indexformat, shaoffset):
98 self.dataf = dataf
98 self.dataf = dataf
99 self.format = indexformat
99 self.format = indexformat
100 self.s = struct.calcsize(indexformat)
100 self.s = struct.calcsize(indexformat)
101 self.indexformat = indexformat
101 self.indexformat = indexformat
102 self.datasize = size
102 self.datasize = size
103 self.l = size/self.s
103 self.l = size/self.s
104 self.index = [None] * self.l
104 self.index = [None] * self.l
105 self.map = {nullid: nullrev}
105 self.map = {nullid: nullrev}
106 self.allmap = 0
106 self.allmap = 0
107 self.all = 0
107 self.all = 0
108 self.mapfind_count = 0
108 self.mapfind_count = 0
109 self.shaoffset = shaoffset
109 self.shaoffset = shaoffset
110
110
111 def loadmap(self):
111 def loadmap(self):
112 """
112 """
113 during a commit, we need to make sure the rev being added is
113 during a commit, we need to make sure the rev being added is
114 not a duplicate. This requires loading the entire index,
114 not a duplicate. This requires loading the entire index,
115 which is fairly slow. loadmap can load up just the node map,
115 which is fairly slow. loadmap can load up just the node map,
116 which takes much less time.
116 which takes much less time.
117 """
117 """
118 if self.allmap: return
118 if self.allmap: return
119 end = self.datasize
119 end = self.datasize
120 self.allmap = 1
120 self.allmap = 1
121 cur = 0
121 cur = 0
122 count = 0
122 count = 0
123 blocksize = self.s * 256
123 blocksize = self.s * 256
124 self.dataf.seek(0)
124 self.dataf.seek(0)
125 while cur < end:
125 while cur < end:
126 data = self.dataf.read(blocksize)
126 data = self.dataf.read(blocksize)
127 off = 0
127 off = 0
128 for x in xrange(256):
128 for x in xrange(256):
129 n = data[off + self.shaoffset:off + self.shaoffset + 20]
129 n = data[off + self.shaoffset:off + self.shaoffset + 20]
130 self.map[n] = count
130 self.map[n] = count
131 count += 1
131 count += 1
132 if count >= self.l:
132 if count >= self.l:
133 break
133 break
134 off += self.s
134 off += self.s
135 cur += blocksize
135 cur += blocksize
136
136
137 def loadblock(self, blockstart, blocksize, data=None):
137 def loadblock(self, blockstart, blocksize, data=None):
138 if self.all: return
138 if self.all: return
139 if data is None:
139 if data is None:
140 self.dataf.seek(blockstart)
140 self.dataf.seek(blockstart)
141 if blockstart + blocksize > self.datasize:
141 if blockstart + blocksize > self.datasize:
142 # the revlog may have grown since we've started running,
142 # the revlog may have grown since we've started running,
143 # but we don't have space in self.index for more entries.
143 # but we don't have space in self.index for more entries.
144 # limit blocksize so that we don't get too much data.
144 # limit blocksize so that we don't get too much data.
145 blocksize = max(self.datasize - blockstart, 0)
145 blocksize = max(self.datasize - blockstart, 0)
146 data = self.dataf.read(blocksize)
146 data = self.dataf.read(blocksize)
147 lend = len(data) / self.s
147 lend = len(data) / self.s
148 i = blockstart / self.s
148 i = blockstart / self.s
149 off = 0
149 off = 0
150 for x in xrange(lend):
150 for x in xrange(lend):
151 if self.index[i + x] == None:
151 if self.index[i + x] == None:
152 b = data[off : off + self.s]
152 b = data[off : off + self.s]
153 self.index[i + x] = b
153 self.index[i + x] = b
154 n = b[self.shaoffset:self.shaoffset + 20]
154 n = b[self.shaoffset:self.shaoffset + 20]
155 self.map[n] = i + x
155 self.map[n] = i + x
156 off += self.s
156 off += self.s
157
157
158 def findnode(self, node):
158 def findnode(self, node):
159 """search backwards through the index file for a specific node"""
159 """search backwards through the index file for a specific node"""
160 if self.allmap: return None
160 if self.allmap: return None
161
161
162 # hg log will cause many many searches for the manifest
162 # hg log will cause many many searches for the manifest
163 # nodes. After we get called a few times, just load the whole
163 # nodes. After we get called a few times, just load the whole
164 # thing.
164 # thing.
165 if self.mapfind_count > 8:
165 if self.mapfind_count > 8:
166 self.loadmap()
166 self.loadmap()
167 if node in self.map:
167 if node in self.map:
168 return node
168 return node
169 return None
169 return None
170 self.mapfind_count += 1
170 self.mapfind_count += 1
171 last = self.l - 1
171 last = self.l - 1
172 while self.index[last] != None:
172 while self.index[last] != None:
173 if last == 0:
173 if last == 0:
174 self.all = 1
174 self.all = 1
175 self.allmap = 1
175 self.allmap = 1
176 return None
176 return None
177 last -= 1
177 last -= 1
178 end = (last + 1) * self.s
178 end = (last + 1) * self.s
179 blocksize = self.s * 256
179 blocksize = self.s * 256
180 while end >= 0:
180 while end >= 0:
181 start = max(end - blocksize, 0)
181 start = max(end - blocksize, 0)
182 self.dataf.seek(start)
182 self.dataf.seek(start)
183 data = self.dataf.read(end - start)
183 data = self.dataf.read(end - start)
184 findend = end - start
184 findend = end - start
185 while True:
185 while True:
186 # we're searching backwards, so weh have to make sure
186 # we're searching backwards, so weh have to make sure
187 # we don't find a changeset where this node is a parent
187 # we don't find a changeset where this node is a parent
188 off = data.rfind(node, 0, findend)
188 off = data.rfind(node, 0, findend)
189 findend = off
189 findend = off
190 if off >= 0:
190 if off >= 0:
191 i = off / self.s
191 i = off / self.s
192 off = i * self.s
192 off = i * self.s
193 n = data[off + self.shaoffset:off + self.shaoffset + 20]
193 n = data[off + self.shaoffset:off + self.shaoffset + 20]
194 if n == node:
194 if n == node:
195 self.map[n] = i + start / self.s
195 self.map[n] = i + start / self.s
196 return node
196 return node
197 else:
197 else:
198 break
198 break
199 end -= blocksize
199 end -= blocksize
200 return None
200 return None
201
201
202 def loadindex(self, i=None, end=None):
202 def loadindex(self, i=None, end=None):
203 if self.all: return
203 if self.all: return
204 all = False
204 all = False
205 if i == None:
205 if i == None:
206 blockstart = 0
206 blockstart = 0
207 blocksize = (512 / self.s) * self.s
207 blocksize = (512 / self.s) * self.s
208 end = self.datasize
208 end = self.datasize
209 all = True
209 all = True
210 else:
210 else:
211 if end:
211 if end:
212 blockstart = i * self.s
212 blockstart = i * self.s
213 end = end * self.s
213 end = end * self.s
214 blocksize = end - blockstart
214 blocksize = end - blockstart
215 else:
215 else:
216 blockstart = (i & ~(32)) * self.s
216 blockstart = (i & ~(32)) * self.s
217 blocksize = self.s * 64
217 blocksize = self.s * 64
218 end = blockstart + blocksize
218 end = blockstart + blocksize
219 while blockstart < end:
219 while blockstart < end:
220 self.loadblock(blockstart, blocksize)
220 self.loadblock(blockstart, blocksize)
221 blockstart += blocksize
221 blockstart += blocksize
222 if all: self.all = True
222 if all: self.all = True
223
223
224 class lazyindex(object):
224 class lazyindex(object):
225 """a lazy version of the index array"""
225 """a lazy version of the index array"""
226 def __init__(self, parser):
226 def __init__(self, parser):
227 self.p = parser
227 self.p = parser
228 def __len__(self):
228 def __len__(self):
229 return len(self.p.index)
229 return len(self.p.index)
230 def load(self, pos):
230 def load(self, pos):
231 if pos < 0:
231 if pos < 0:
232 pos += len(self.p.index)
232 pos += len(self.p.index)
233 self.p.loadindex(pos)
233 self.p.loadindex(pos)
234 return self.p.index[pos]
234 return self.p.index[pos]
235 def __getitem__(self, pos):
235 def __getitem__(self, pos):
236 ret = self.p.index[pos] or self.load(pos)
236 ret = self.p.index[pos] or self.load(pos)
237 if isinstance(ret, str):
237 if isinstance(ret, str):
238 ret = struct.unpack(self.p.indexformat, ret)
238 ret = struct.unpack(self.p.indexformat, ret)
239 return ret
239 return ret
240 def __setitem__(self, pos, item):
240 def __setitem__(self, pos, item):
241 self.p.index[pos] = item
241 self.p.index[pos] = item
242 def __delitem__(self, pos):
242 def __delitem__(self, pos):
243 del self.p.index[pos]
243 del self.p.index[pos]
244 def append(self, e):
244 def append(self, e):
245 self.p.index.append(e)
245 self.p.index.append(e)
246
246
247 class lazymap(object):
247 class lazymap(object):
248 """a lazy version of the node map"""
248 """a lazy version of the node map"""
249 def __init__(self, parser):
249 def __init__(self, parser):
250 self.p = parser
250 self.p = parser
251 def load(self, key):
251 def load(self, key):
252 n = self.p.findnode(key)
252 n = self.p.findnode(key)
253 if n == None:
253 if n == None:
254 raise KeyError(key)
254 raise KeyError(key)
255 def __contains__(self, key):
255 def __contains__(self, key):
256 if key in self.p.map:
256 if key in self.p.map:
257 return True
257 return True
258 self.p.loadmap()
258 self.p.loadmap()
259 return key in self.p.map
259 return key in self.p.map
260 def __iter__(self):
260 def __iter__(self):
261 yield nullid
261 yield nullid
262 for i in xrange(self.p.l):
262 for i in xrange(self.p.l):
263 ret = self.p.index[i]
263 ret = self.p.index[i]
264 if not ret:
264 if not ret:
265 self.p.loadindex(i)
265 self.p.loadindex(i)
266 ret = self.p.index[i]
266 ret = self.p.index[i]
267 if isinstance(ret, str):
267 if isinstance(ret, str):
268 ret = struct.unpack(self.p.indexformat, ret)
268 ret = struct.unpack(self.p.indexformat, ret)
269 yield ret[-1]
269 yield ret[-1]
270 def __getitem__(self, key):
270 def __getitem__(self, key):
271 try:
271 try:
272 return self.p.map[key]
272 return self.p.map[key]
273 except KeyError:
273 except KeyError:
274 try:
274 try:
275 self.load(key)
275 self.load(key)
276 return self.p.map[key]
276 return self.p.map[key]
277 except KeyError:
277 except KeyError:
278 raise KeyError("node " + hex(key))
278 raise KeyError("node " + hex(key))
279 def __setitem__(self, key, val):
279 def __setitem__(self, key, val):
280 self.p.map[key] = val
280 self.p.map[key] = val
281 def __delitem__(self, key):
281 def __delitem__(self, key):
282 del self.p.map[key]
282 del self.p.map[key]
283
283
284 class RevlogError(Exception): pass
284 class RevlogError(Exception): pass
285
285
286 class revlog(object):
286 class revlog(object):
287 """
287 """
288 the underlying revision storage object
288 the underlying revision storage object
289
289
290 A revlog consists of two parts, an index and the revision data.
290 A revlog consists of two parts, an index and the revision data.
291
291
292 The index is a file with a fixed record size containing
292 The index is a file with a fixed record size containing
293 information on each revision, includings its nodeid (hash), the
293 information on each revision, includings its nodeid (hash), the
294 nodeids of its parents, the position and offset of its data within
294 nodeids of its parents, the position and offset of its data within
295 the data file, and the revision it's based on. Finally, each entry
295 the data file, and the revision it's based on. Finally, each entry
296 contains a linkrev entry that can serve as a pointer to external
296 contains a linkrev entry that can serve as a pointer to external
297 data.
297 data.
298
298
299 The revision data itself is a linear collection of data chunks.
299 The revision data itself is a linear collection of data chunks.
300 Each chunk represents a revision and is usually represented as a
300 Each chunk represents a revision and is usually represented as a
301 delta against the previous chunk. To bound lookup time, runs of
301 delta against the previous chunk. To bound lookup time, runs of
302 deltas are limited to about 2 times the length of the original
302 deltas are limited to about 2 times the length of the original
303 version data. This makes retrieval of a version proportional to
303 version data. This makes retrieval of a version proportional to
304 its size, or O(1) relative to the number of revisions.
304 its size, or O(1) relative to the number of revisions.
305
305
306 Both pieces of the revlog are written to in an append-only
306 Both pieces of the revlog are written to in an append-only
307 fashion, which means we never need to rewrite a file to insert or
307 fashion, which means we never need to rewrite a file to insert or
308 remove data, and can use some simple techniques to avoid the need
308 remove data, and can use some simple techniques to avoid the need
309 for locking while reading.
309 for locking while reading.
310 """
310 """
311 def __init__(self, opener, indexfile, datafile,
311 def __init__(self, opener, indexfile, datafile,
312 defversion=REVLOG_DEFAULT_VERSION):
312 defversion=REVLOG_DEFAULT_VERSION):
313 """
313 """
314 create a revlog object
314 create a revlog object
315
315
316 opener is a function that abstracts the file opening operation
316 opener is a function that abstracts the file opening operation
317 and can be used to implement COW semantics or the like.
317 and can be used to implement COW semantics or the like.
318 """
318 """
319 self.indexfile = indexfile
319 self.indexfile = indexfile
320 self.datafile = datafile
320 self.datafile = datafile
321 self.opener = opener
321 self.opener = opener
322
322
323 self.indexstat = None
323 self.indexstat = None
324 self.cache = None
324 self.cache = None
325 self.chunkcache = None
325 self.chunkcache = None
326 self.defversion = defversion
326 self.defversion = defversion
327 self.load()
327 self.load()
328
328
329 def load(self):
329 def load(self):
330 v = self.defversion
330 v = self.defversion
331 try:
331 try:
332 f = self.opener(self.indexfile)
332 f = self.opener(self.indexfile)
333 i = f.read(4)
333 i = f.read(4)
334 f.seek(0)
334 f.seek(0)
335 except IOError, inst:
335 except IOError, inst:
336 if inst.errno != errno.ENOENT:
336 if inst.errno != errno.ENOENT:
337 raise
337 raise
338 i = ""
338 i = ""
339 else:
339 else:
340 try:
340 try:
341 st = util.fstat(f)
341 st = util.fstat(f)
342 except AttributeError, inst:
342 except AttributeError, inst:
343 st = None
343 st = None
344 else:
344 else:
345 oldst = self.indexstat
345 oldst = self.indexstat
346 if (oldst and st.st_dev == oldst.st_dev
346 if (oldst and st.st_dev == oldst.st_dev
347 and st.st_ino == oldst.st_ino
347 and st.st_ino == oldst.st_ino
348 and st.st_mtime == oldst.st_mtime
348 and st.st_mtime == oldst.st_mtime
349 and st.st_ctime == oldst.st_ctime):
349 and st.st_ctime == oldst.st_ctime):
350 return
350 return
351 self.indexstat = st
351 self.indexstat = st
352 if len(i) > 0:
352 if len(i) > 0:
353 v = struct.unpack(versionformat, i)[0]
353 v = struct.unpack(versionformat, i)[0]
354 flags = v & ~0xFFFF
354 flags = v & ~0xFFFF
355 fmt = v & 0xFFFF
355 fmt = v & 0xFFFF
356 if fmt == REVLOGV0:
356 if fmt == REVLOGV0:
357 if flags:
357 if flags:
358 raise RevlogError(_("index %s invalid flags %x for format v0" %
358 raise RevlogError(_("index %s invalid flags %x for format v0" %
359 (self.indexfile, flags)))
359 (self.indexfile, flags)))
360 elif fmt == REVLOGNG:
360 elif fmt == REVLOGNG:
361 if flags & ~REVLOGNGINLINEDATA:
361 if flags & ~REVLOGNGINLINEDATA:
362 raise RevlogError(_("index %s invalid flags %x for revlogng" %
362 raise RevlogError(_("index %s invalid flags %x for revlogng" %
363 (self.indexfile, flags)))
363 (self.indexfile, flags)))
364 else:
364 else:
365 raise RevlogError(_("index %s invalid format %d" %
365 raise RevlogError(_("index %s invalid format %d" %
366 (self.indexfile, fmt)))
366 (self.indexfile, fmt)))
367 self.version = v
367 self.version = v
368 if v == REVLOGV0:
368 if v == REVLOGV0:
369 self.indexformat = indexformatv0
369 self.indexformat = indexformatv0
370 shaoffset = v0shaoffset
370 shaoffset = v0shaoffset
371 else:
371 else:
372 self.indexformat = indexformatng
372 self.indexformat = indexformatng
373 shaoffset = ngshaoffset
373 shaoffset = ngshaoffset
374
374
375 if i:
375 if i:
376 if (lazyparser.safe_to_use and not self.inlinedata() and
376 if (lazyparser.safe_to_use and not self.inlinedata() and
377 st and st.st_size > 10000):
377 st and st.st_size > 10000):
378 # big index, let's parse it on demand
378 # big index, let's parse it on demand
379 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
379 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
380 self.index = lazyindex(parser)
380 self.index = lazyindex(parser)
381 self.nodemap = lazymap(parser)
381 self.nodemap = lazymap(parser)
382 else:
382 else:
383 self.parseindex(f, st)
383 self.parseindex(f, st)
384 if self.version != REVLOGV0:
384 if self.version != REVLOGV0:
385 e = list(self.index[0])
385 e = list(self.index[0])
386 type = self.ngtype(e[0])
386 type = self.ngtype(e[0])
387 e[0] = self.offset_type(0, type)
387 e[0] = self.offset_type(0, type)
388 self.index[0] = e
388 self.index[0] = e
389 else:
389 else:
390 self.nodemap = {nullid: nullrev}
390 self.nodemap = {nullid: nullrev}
391 self.index = []
391 self.index = []
392
392
393
393
394 def parseindex(self, fp, st):
394 def parseindex(self, fp, st):
395 s = struct.calcsize(self.indexformat)
395 s = struct.calcsize(self.indexformat)
396 self.index = []
396 self.index = []
397 self.nodemap = {nullid: nullrev}
397 self.nodemap = {nullid: nullrev}
398 inline = self.inlinedata()
398 inline = self.inlinedata()
399 n = 0
399 n = 0
400 leftover = None
400 leftover = None
401 while True:
401 while True:
402 if st:
402 if st:
403 data = fp.read(65536)
403 data = fp.read(65536)
404 else:
404 else:
405 # hack for httprangereader, it doesn't do partial reads well
405 # hack for httprangereader, it doesn't do partial reads well
406 data = fp.read()
406 data = fp.read()
407 if not data:
407 if not data:
408 break
408 break
409 if n == 0 and self.inlinedata():
409 if n == 0 and self.inlinedata():
410 # cache the first chunk
410 # cache the first chunk
411 self.chunkcache = (0, data)
411 self.chunkcache = (0, data)
412 if leftover:
412 if leftover:
413 data = leftover + data
413 data = leftover + data
414 leftover = None
414 leftover = None
415 off = 0
415 off = 0
416 l = len(data)
416 l = len(data)
417 while off < l:
417 while off < l:
418 if l - off < s:
418 if l - off < s:
419 leftover = data[off:]
419 leftover = data[off:]
420 break
420 break
421 cur = data[off:off + s]
421 cur = data[off:off + s]
422 off += s
422 off += s
423 e = struct.unpack(self.indexformat, cur)
423 e = struct.unpack(self.indexformat, cur)
424 self.index.append(e)
424 self.index.append(e)
425 self.nodemap[e[-1]] = n
425 self.nodemap[e[-1]] = n
426 n += 1
426 n += 1
427 if inline:
427 if inline:
428 off += e[1]
428 off += e[1]
429 if off > l:
429 if off > l:
430 # some things don't seek well, just read it
430 # some things don't seek well, just read it
431 fp.read(off - l)
431 fp.read(off - l)
432 if not st:
432 if not st:
433 break
433 break
434
434
435
435
436 def ngoffset(self, q):
436 def ngoffset(self, q):
437 if q & 0xFFFF:
437 if q & 0xFFFF:
438 raise RevlogError(_('%s: incompatible revision flag %x') %
438 raise RevlogError(_('%s: incompatible revision flag %x') %
439 (self.indexfile, q))
439 (self.indexfile, q))
440 return long(q >> 16)
440 return long(q >> 16)
441
441
442 def ngtype(self, q):
442 def ngtype(self, q):
443 return int(q & 0xFFFF)
443 return int(q & 0xFFFF)
444
444
445 def offset_type(self, offset, type):
445 def offset_type(self, offset, type):
446 return long(long(offset) << 16 | type)
446 return long(long(offset) << 16 | type)
447
447
448 def loadindex(self, start, end):
448 def loadindex(self, start, end):
449 """load a block of indexes all at once from the lazy parser"""
449 """load a block of indexes all at once from the lazy parser"""
450 if isinstance(self.index, lazyindex):
450 if isinstance(self.index, lazyindex):
451 self.index.p.loadindex(start, end)
451 self.index.p.loadindex(start, end)
452
452
453 def loadindexmap(self):
453 def loadindexmap(self):
454 """loads both the map and the index from the lazy parser"""
454 """loads both the map and the index from the lazy parser"""
455 if isinstance(self.index, lazyindex):
455 if isinstance(self.index, lazyindex):
456 p = self.index.p
456 p = self.index.p
457 p.loadindex()
457 p.loadindex()
458 self.nodemap = p.map
458 self.nodemap = p.map
459
459
460 def loadmap(self):
460 def loadmap(self):
461 """loads the map from the lazy parser"""
461 """loads the map from the lazy parser"""
462 if isinstance(self.nodemap, lazymap):
462 if isinstance(self.nodemap, lazymap):
463 self.nodemap.p.loadmap()
463 self.nodemap.p.loadmap()
464 self.nodemap = self.nodemap.p.map
464 self.nodemap = self.nodemap.p.map
465
465
466 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
466 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
467 def tip(self): return self.node(len(self.index) - 1)
467 def tip(self): return self.node(len(self.index) - 1)
468 def count(self): return len(self.index)
468 def count(self): return len(self.index)
469 def node(self, rev):
469 def node(self, rev):
470 return (rev < 0) and nullid or self.index[rev][-1]
470 return rev == nullrev and nullid or self.index[rev][-1]
471 def rev(self, node):
471 def rev(self, node):
472 try:
472 try:
473 return self.nodemap[node]
473 return self.nodemap[node]
474 except KeyError:
474 except KeyError:
475 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
475 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
476 def linkrev(self, node):
476 def linkrev(self, node):
477 return (node == nullid) and nullrev or self.index[self.rev(node)][-4]
477 return (node == nullid) and nullrev or self.index[self.rev(node)][-4]
478 def parents(self, node):
478 def parents(self, node):
479 if node == nullid: return (nullid, nullid)
479 if node == nullid: return (nullid, nullid)
480 r = self.rev(node)
480 r = self.rev(node)
481 d = self.index[r][-3:-1]
481 d = self.index[r][-3:-1]
482 if self.version == REVLOGV0:
482 if self.version == REVLOGV0:
483 return d
483 return d
484 return (self.node(d[0]), self.node(d[1]))
484 return (self.node(d[0]), self.node(d[1]))
485 def parentrevs(self, rev):
485 def parentrevs(self, rev):
486 if rev == nullrev:
486 if rev == nullrev:
487 return (nullrev, nullrev)
487 return (nullrev, nullrev)
488 d = self.index[rev][-3:-1]
488 d = self.index[rev][-3:-1]
489 if self.version == REVLOGV0:
489 if self.version == REVLOGV0:
490 return (self.rev(d[0]), self.rev(d[1]))
490 return (self.rev(d[0]), self.rev(d[1]))
491 return d
491 return d
492 def start(self, rev):
492 def start(self, rev):
493 if rev < 0:
493 if rev == nullrev:
494 return nullrev
494 return 0
495 if self.version != REVLOGV0:
495 if self.version != REVLOGV0:
496 return self.ngoffset(self.index[rev][0])
496 return self.ngoffset(self.index[rev][0])
497 return self.index[rev][0]
497 return self.index[rev][0]
498
498
499 def end(self, rev): return self.start(rev) + self.length(rev)
499 def end(self, rev): return self.start(rev) + self.length(rev)
500
500
501 def size(self, rev):
501 def size(self, rev):
502 """return the length of the uncompressed text for a given revision"""
502 """return the length of the uncompressed text for a given revision"""
503 if rev == nullrev:
504 return 0
503 l = -1
505 l = -1
504 if self.version != REVLOGV0:
506 if self.version != REVLOGV0:
505 l = self.index[rev][2]
507 l = self.index[rev][2]
506 if l >= 0:
508 if l >= 0:
507 return l
509 return l
508
510
509 t = self.revision(self.node(rev))
511 t = self.revision(self.node(rev))
510 return len(t)
512 return len(t)
511
513
512 # alternate implementation, The advantage to this code is it
514 # alternate implementation, The advantage to this code is it
513 # will be faster for a single revision. But, the results are not
515 # will be faster for a single revision. But, the results are not
514 # cached, so finding the size of every revision will be slower.
516 # cached, so finding the size of every revision will be slower.
515 """
517 """
516 if self.cache and self.cache[1] == rev:
518 if self.cache and self.cache[1] == rev:
517 return len(self.cache[2])
519 return len(self.cache[2])
518
520
519 base = self.base(rev)
521 base = self.base(rev)
520 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
522 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
521 base = self.cache[1]
523 base = self.cache[1]
522 text = self.cache[2]
524 text = self.cache[2]
523 else:
525 else:
524 text = self.revision(self.node(base))
526 text = self.revision(self.node(base))
525
527
526 l = len(text)
528 l = len(text)
527 for x in xrange(base + 1, rev + 1):
529 for x in xrange(base + 1, rev + 1):
528 l = mdiff.patchedsize(l, self.chunk(x))
530 l = mdiff.patchedsize(l, self.chunk(x))
529 return l
531 return l
530 """
532 """
531
533
532 def length(self, rev):
534 def length(self, rev):
533 if rev < 0:
535 if rev == nullrev:
534 return 0
536 return 0
535 else:
537 else:
536 return self.index[rev][1]
538 return self.index[rev][1]
537 def base(self, rev): return (rev < 0) and rev or self.index[rev][-5]
539 def base(self, rev):
540 if (rev == nullrev):
541 return nullrev
542 else:
543 return self.index[rev][-5]
538
544
539 def reachable(self, rev, stop=None):
545 def reachable(self, rev, stop=None):
540 reachable = {}
546 reachable = {}
541 visit = [rev]
547 visit = [rev]
542 reachable[rev] = 1
548 reachable[rev] = 1
543 if stop:
549 if stop:
544 stopn = self.rev(stop)
550 stopn = self.rev(stop)
545 else:
551 else:
546 stopn = 0
552 stopn = 0
547 while visit:
553 while visit:
548 n = visit.pop(0)
554 n = visit.pop(0)
549 if n == stop:
555 if n == stop:
550 continue
556 continue
551 if n == nullid:
557 if n == nullid:
552 continue
558 continue
553 for p in self.parents(n):
559 for p in self.parents(n):
554 if self.rev(p) < stopn:
560 if self.rev(p) < stopn:
555 continue
561 continue
556 if p not in reachable:
562 if p not in reachable:
557 reachable[p] = 1
563 reachable[p] = 1
558 visit.append(p)
564 visit.append(p)
559 return reachable
565 return reachable
560
566
561 def nodesbetween(self, roots=None, heads=None):
567 def nodesbetween(self, roots=None, heads=None):
562 """Return a tuple containing three elements. Elements 1 and 2 contain
568 """Return a tuple containing three elements. Elements 1 and 2 contain
563 a final list bases and heads after all the unreachable ones have been
569 a final list bases and heads after all the unreachable ones have been
564 pruned. Element 0 contains a topologically sorted list of all
570 pruned. Element 0 contains a topologically sorted list of all
565
571
566 nodes that satisfy these constraints:
572 nodes that satisfy these constraints:
567 1. All nodes must be descended from a node in roots (the nodes on
573 1. All nodes must be descended from a node in roots (the nodes on
568 roots are considered descended from themselves).
574 roots are considered descended from themselves).
569 2. All nodes must also be ancestors of a node in heads (the nodes in
575 2. All nodes must also be ancestors of a node in heads (the nodes in
570 heads are considered to be their own ancestors).
576 heads are considered to be their own ancestors).
571
577
572 If roots is unspecified, nullid is assumed as the only root.
578 If roots is unspecified, nullid is assumed as the only root.
573 If heads is unspecified, it is taken to be the output of the
579 If heads is unspecified, it is taken to be the output of the
574 heads method (i.e. a list of all nodes in the repository that
580 heads method (i.e. a list of all nodes in the repository that
575 have no children)."""
581 have no children)."""
576 nonodes = ([], [], [])
582 nonodes = ([], [], [])
577 if roots is not None:
583 if roots is not None:
578 roots = list(roots)
584 roots = list(roots)
579 if not roots:
585 if not roots:
580 return nonodes
586 return nonodes
581 lowestrev = min([self.rev(n) for n in roots])
587 lowestrev = min([self.rev(n) for n in roots])
582 else:
588 else:
583 roots = [nullid] # Everybody's a descendent of nullid
589 roots = [nullid] # Everybody's a descendent of nullid
584 lowestrev = nullrev
590 lowestrev = nullrev
585 if (lowestrev == nullrev) and (heads is None):
591 if (lowestrev == nullrev) and (heads is None):
586 # We want _all_ the nodes!
592 # We want _all_ the nodes!
587 return ([self.node(r) for r in xrange(0, self.count())],
593 return ([self.node(r) for r in xrange(0, self.count())],
588 [nullid], list(self.heads()))
594 [nullid], list(self.heads()))
589 if heads is None:
595 if heads is None:
590 # All nodes are ancestors, so the latest ancestor is the last
596 # All nodes are ancestors, so the latest ancestor is the last
591 # node.
597 # node.
592 highestrev = self.count() - 1
598 highestrev = self.count() - 1
593 # Set ancestors to None to signal that every node is an ancestor.
599 # Set ancestors to None to signal that every node is an ancestor.
594 ancestors = None
600 ancestors = None
595 # Set heads to an empty dictionary for later discovery of heads
601 # Set heads to an empty dictionary for later discovery of heads
596 heads = {}
602 heads = {}
597 else:
603 else:
598 heads = list(heads)
604 heads = list(heads)
599 if not heads:
605 if not heads:
600 return nonodes
606 return nonodes
601 ancestors = {}
607 ancestors = {}
602 # Turn heads into a dictionary so we can remove 'fake' heads.
608 # Turn heads into a dictionary so we can remove 'fake' heads.
603 # Also, later we will be using it to filter out the heads we can't
609 # Also, later we will be using it to filter out the heads we can't
604 # find from roots.
610 # find from roots.
605 heads = dict.fromkeys(heads, 0)
611 heads = dict.fromkeys(heads, 0)
606 # Start at the top and keep marking parents until we're done.
612 # Start at the top and keep marking parents until we're done.
607 nodestotag = heads.keys()
613 nodestotag = heads.keys()
608 # Remember where the top was so we can use it as a limit later.
614 # Remember where the top was so we can use it as a limit later.
609 highestrev = max([self.rev(n) for n in nodestotag])
615 highestrev = max([self.rev(n) for n in nodestotag])
610 while nodestotag:
616 while nodestotag:
611 # grab a node to tag
617 # grab a node to tag
612 n = nodestotag.pop()
618 n = nodestotag.pop()
613 # Never tag nullid
619 # Never tag nullid
614 if n == nullid:
620 if n == nullid:
615 continue
621 continue
616 # A node's revision number represents its place in a
622 # A node's revision number represents its place in a
617 # topologically sorted list of nodes.
623 # topologically sorted list of nodes.
618 r = self.rev(n)
624 r = self.rev(n)
619 if r >= lowestrev:
625 if r >= lowestrev:
620 if n not in ancestors:
626 if n not in ancestors:
621 # If we are possibly a descendent of one of the roots
627 # If we are possibly a descendent of one of the roots
622 # and we haven't already been marked as an ancestor
628 # and we haven't already been marked as an ancestor
623 ancestors[n] = 1 # Mark as ancestor
629 ancestors[n] = 1 # Mark as ancestor
624 # Add non-nullid parents to list of nodes to tag.
630 # Add non-nullid parents to list of nodes to tag.
625 nodestotag.extend([p for p in self.parents(n) if
631 nodestotag.extend([p for p in self.parents(n) if
626 p != nullid])
632 p != nullid])
627 elif n in heads: # We've seen it before, is it a fake head?
633 elif n in heads: # We've seen it before, is it a fake head?
628 # So it is, real heads should not be the ancestors of
634 # So it is, real heads should not be the ancestors of
629 # any other heads.
635 # any other heads.
630 heads.pop(n)
636 heads.pop(n)
631 if not ancestors:
637 if not ancestors:
632 return nonodes
638 return nonodes
633 # Now that we have our set of ancestors, we want to remove any
639 # Now that we have our set of ancestors, we want to remove any
634 # roots that are not ancestors.
640 # roots that are not ancestors.
635
641
636 # If one of the roots was nullid, everything is included anyway.
642 # If one of the roots was nullid, everything is included anyway.
637 if lowestrev > nullrev:
643 if lowestrev > nullrev:
638 # But, since we weren't, let's recompute the lowest rev to not
644 # But, since we weren't, let's recompute the lowest rev to not
639 # include roots that aren't ancestors.
645 # include roots that aren't ancestors.
640
646
641 # Filter out roots that aren't ancestors of heads
647 # Filter out roots that aren't ancestors of heads
642 roots = [n for n in roots if n in ancestors]
648 roots = [n for n in roots if n in ancestors]
643 # Recompute the lowest revision
649 # Recompute the lowest revision
644 if roots:
650 if roots:
645 lowestrev = min([self.rev(n) for n in roots])
651 lowestrev = min([self.rev(n) for n in roots])
646 else:
652 else:
647 # No more roots? Return empty list
653 # No more roots? Return empty list
648 return nonodes
654 return nonodes
649 else:
655 else:
650 # We are descending from nullid, and don't need to care about
656 # We are descending from nullid, and don't need to care about
651 # any other roots.
657 # any other roots.
652 lowestrev = nullrev
658 lowestrev = nullrev
653 roots = [nullid]
659 roots = [nullid]
654 # Transform our roots list into a 'set' (i.e. a dictionary where the
660 # Transform our roots list into a 'set' (i.e. a dictionary where the
655 # values don't matter.
661 # values don't matter.
656 descendents = dict.fromkeys(roots, 1)
662 descendents = dict.fromkeys(roots, 1)
657 # Also, keep the original roots so we can filter out roots that aren't
663 # Also, keep the original roots so we can filter out roots that aren't
658 # 'real' roots (i.e. are descended from other roots).
664 # 'real' roots (i.e. are descended from other roots).
659 roots = descendents.copy()
665 roots = descendents.copy()
660 # Our topologically sorted list of output nodes.
666 # Our topologically sorted list of output nodes.
661 orderedout = []
667 orderedout = []
662 # Don't start at nullid since we don't want nullid in our output list,
668 # Don't start at nullid since we don't want nullid in our output list,
663 # and if nullid shows up in descedents, empty parents will look like
669 # and if nullid shows up in descedents, empty parents will look like
664 # they're descendents.
670 # they're descendents.
665 for r in xrange(max(lowestrev, 0), highestrev + 1):
671 for r in xrange(max(lowestrev, 0), highestrev + 1):
666 n = self.node(r)
672 n = self.node(r)
667 isdescendent = False
673 isdescendent = False
668 if lowestrev == nullrev: # Everybody is a descendent of nullid
674 if lowestrev == nullrev: # Everybody is a descendent of nullid
669 isdescendent = True
675 isdescendent = True
670 elif n in descendents:
676 elif n in descendents:
671 # n is already a descendent
677 # n is already a descendent
672 isdescendent = True
678 isdescendent = True
673 # This check only needs to be done here because all the roots
679 # This check only needs to be done here because all the roots
674 # will start being marked is descendents before the loop.
680 # will start being marked is descendents before the loop.
675 if n in roots:
681 if n in roots:
676 # If n was a root, check if it's a 'real' root.
682 # If n was a root, check if it's a 'real' root.
677 p = tuple(self.parents(n))
683 p = tuple(self.parents(n))
678 # If any of its parents are descendents, it's not a root.
684 # If any of its parents are descendents, it's not a root.
679 if (p[0] in descendents) or (p[1] in descendents):
685 if (p[0] in descendents) or (p[1] in descendents):
680 roots.pop(n)
686 roots.pop(n)
681 else:
687 else:
682 p = tuple(self.parents(n))
688 p = tuple(self.parents(n))
683 # A node is a descendent if either of its parents are
689 # A node is a descendent if either of its parents are
684 # descendents. (We seeded the dependents list with the roots
690 # descendents. (We seeded the dependents list with the roots
685 # up there, remember?)
691 # up there, remember?)
686 if (p[0] in descendents) or (p[1] in descendents):
692 if (p[0] in descendents) or (p[1] in descendents):
687 descendents[n] = 1
693 descendents[n] = 1
688 isdescendent = True
694 isdescendent = True
689 if isdescendent and ((ancestors is None) or (n in ancestors)):
695 if isdescendent and ((ancestors is None) or (n in ancestors)):
690 # Only include nodes that are both descendents and ancestors.
696 # Only include nodes that are both descendents and ancestors.
691 orderedout.append(n)
697 orderedout.append(n)
692 if (ancestors is not None) and (n in heads):
698 if (ancestors is not None) and (n in heads):
693 # We're trying to figure out which heads are reachable
699 # We're trying to figure out which heads are reachable
694 # from roots.
700 # from roots.
695 # Mark this head as having been reached
701 # Mark this head as having been reached
696 heads[n] = 1
702 heads[n] = 1
697 elif ancestors is None:
703 elif ancestors is None:
698 # Otherwise, we're trying to discover the heads.
704 # Otherwise, we're trying to discover the heads.
699 # Assume this is a head because if it isn't, the next step
705 # Assume this is a head because if it isn't, the next step
700 # will eventually remove it.
706 # will eventually remove it.
701 heads[n] = 1
707 heads[n] = 1
702 # But, obviously its parents aren't.
708 # But, obviously its parents aren't.
703 for p in self.parents(n):
709 for p in self.parents(n):
704 heads.pop(p, None)
710 heads.pop(p, None)
705 heads = [n for n in heads.iterkeys() if heads[n] != 0]
711 heads = [n for n in heads.iterkeys() if heads[n] != 0]
706 roots = roots.keys()
712 roots = roots.keys()
707 assert orderedout
713 assert orderedout
708 assert roots
714 assert roots
709 assert heads
715 assert heads
710 return (orderedout, roots, heads)
716 return (orderedout, roots, heads)
711
717
712 def heads(self, start=None):
718 def heads(self, start=None):
713 """return the list of all nodes that have no children
719 """return the list of all nodes that have no children
714
720
715 if start is specified, only heads that are descendants of
721 if start is specified, only heads that are descendants of
716 start will be returned
722 start will be returned
717
723
718 """
724 """
719 if start is None:
725 if start is None:
720 start = nullid
726 start = nullid
721 startrev = self.rev(start)
727 startrev = self.rev(start)
722 reachable = {startrev: 1}
728 reachable = {startrev: 1}
723 heads = {startrev: 1}
729 heads = {startrev: 1}
724
730
725 parentrevs = self.parentrevs
731 parentrevs = self.parentrevs
726 for r in xrange(startrev + 1, self.count()):
732 for r in xrange(startrev + 1, self.count()):
727 for p in parentrevs(r):
733 for p in parentrevs(r):
728 if p in reachable:
734 if p in reachable:
729 reachable[r] = 1
735 reachable[r] = 1
730 heads[r] = 1
736 heads[r] = 1
731 if p in heads:
737 if p in heads:
732 del heads[p]
738 del heads[p]
733 return [self.node(r) for r in heads]
739 return [self.node(r) for r in heads]
734
740
735 def children(self, node):
741 def children(self, node):
736 """find the children of a given node"""
742 """find the children of a given node"""
737 c = []
743 c = []
738 p = self.rev(node)
744 p = self.rev(node)
739 for r in range(p + 1, self.count()):
745 for r in range(p + 1, self.count()):
740 for pr in self.parentrevs(r):
746 for pr in self.parentrevs(r):
741 if pr == p:
747 if pr == p:
742 c.append(self.node(r))
748 c.append(self.node(r))
743 return c
749 return c
744
750
745 def _match(self, id):
751 def _match(self, id):
746 if isinstance(id, (long, int)):
752 if isinstance(id, (long, int)):
747 # rev
753 # rev
748 return self.node(id)
754 return self.node(id)
749 if len(id) == 20:
755 if len(id) == 20:
750 # possibly a binary node
756 # possibly a binary node
751 # odds of a binary node being all hex in ASCII are 1 in 10**25
757 # odds of a binary node being all hex in ASCII are 1 in 10**25
752 try:
758 try:
753 node = id
759 node = id
754 r = self.rev(node) # quick search the index
760 r = self.rev(node) # quick search the index
755 return node
761 return node
756 except RevlogError:
762 except RevlogError:
757 pass # may be partial hex id
763 pass # may be partial hex id
758 try:
764 try:
759 # str(rev)
765 # str(rev)
760 rev = int(id)
766 rev = int(id)
761 if str(rev) != id: raise ValueError
767 if str(rev) != id: raise ValueError
762 if rev < 0: rev = self.count() + rev
768 if rev < 0: rev = self.count() + rev
763 if rev < 0 or rev >= self.count(): raise ValueError
769 if rev < 0 or rev >= self.count(): raise ValueError
764 return self.node(rev)
770 return self.node(rev)
765 except (ValueError, OverflowError):
771 except (ValueError, OverflowError):
766 pass
772 pass
767 if len(id) == 40:
773 if len(id) == 40:
768 try:
774 try:
769 # a full hex nodeid?
775 # a full hex nodeid?
770 node = bin(id)
776 node = bin(id)
771 r = self.rev(node)
777 r = self.rev(node)
772 return node
778 return node
773 except TypeError:
779 except TypeError:
774 pass
780 pass
775
781
776 def _partialmatch(self, id):
782 def _partialmatch(self, id):
777 if len(id) < 40:
783 if len(id) < 40:
778 try:
784 try:
779 # hex(node)[:...]
785 # hex(node)[:...]
780 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
786 bin_id = bin(id[:len(id) & ~1]) # grab an even number of digits
781 node = None
787 node = None
782 for n in self.nodemap:
788 for n in self.nodemap:
783 if n.startswith(bin_id) and hex(n).startswith(id):
789 if n.startswith(bin_id) and hex(n).startswith(id):
784 if node is not None:
790 if node is not None:
785 raise RevlogError(_("Ambiguous identifier"))
791 raise RevlogError(_("Ambiguous identifier"))
786 node = n
792 node = n
787 if node is not None:
793 if node is not None:
788 return node
794 return node
789 except TypeError:
795 except TypeError:
790 pass
796 pass
791
797
792 def lookup(self, id):
798 def lookup(self, id):
793 """locate a node based on:
799 """locate a node based on:
794 - revision number or str(revision number)
800 - revision number or str(revision number)
795 - nodeid or subset of hex nodeid
801 - nodeid or subset of hex nodeid
796 """
802 """
797
803
798 n = self._match(id)
804 n = self._match(id)
799 if n is not None:
805 if n is not None:
800 return n
806 return n
801 n = self._partialmatch(id)
807 n = self._partialmatch(id)
802 if n:
808 if n:
803 return n
809 return n
804
810
805 raise RevlogError(_("No match found"))
811 raise RevlogError(_("No match found"))
806
812
807 def cmp(self, node, text):
813 def cmp(self, node, text):
808 """compare text with a given file revision"""
814 """compare text with a given file revision"""
809 p1, p2 = self.parents(node)
815 p1, p2 = self.parents(node)
810 return hash(text, p1, p2) != node
816 return hash(text, p1, p2) != node
811
817
812 def makenode(self, node, text):
818 def makenode(self, node, text):
813 """calculate a file nodeid for text, descended or possibly
819 """calculate a file nodeid for text, descended or possibly
814 unchanged from node"""
820 unchanged from node"""
815
821
816 if self.cmp(node, text):
822 if self.cmp(node, text):
817 return hash(text, node, nullid)
823 return hash(text, node, nullid)
818 return node
824 return node
819
825
820 def diff(self, a, b):
826 def diff(self, a, b):
821 """return a delta between two revisions"""
827 """return a delta between two revisions"""
822 return mdiff.textdiff(a, b)
828 return mdiff.textdiff(a, b)
823
829
824 def patches(self, t, pl):
830 def patches(self, t, pl):
825 """apply a list of patches to a string"""
831 """apply a list of patches to a string"""
826 return mdiff.patches(t, pl)
832 return mdiff.patches(t, pl)
827
833
828 def chunk(self, rev, df=None, cachelen=4096):
834 def chunk(self, rev, df=None, cachelen=4096):
829 start, length = self.start(rev), self.length(rev)
835 start, length = self.start(rev), self.length(rev)
830 inline = self.inlinedata()
836 inline = self.inlinedata()
831 if inline:
837 if inline:
832 start += (rev + 1) * struct.calcsize(self.indexformat)
838 start += (rev + 1) * struct.calcsize(self.indexformat)
833 end = start + length
839 end = start + length
834 def loadcache(df):
840 def loadcache(df):
835 cache_length = max(cachelen, length) # 4k
841 cache_length = max(cachelen, length) # 4k
836 if not df:
842 if not df:
837 if inline:
843 if inline:
838 df = self.opener(self.indexfile)
844 df = self.opener(self.indexfile)
839 else:
845 else:
840 df = self.opener(self.datafile)
846 df = self.opener(self.datafile)
841 df.seek(start)
847 df.seek(start)
842 self.chunkcache = (start, df.read(cache_length))
848 self.chunkcache = (start, df.read(cache_length))
843
849
844 if not self.chunkcache:
850 if not self.chunkcache:
845 loadcache(df)
851 loadcache(df)
846
852
847 cache_start = self.chunkcache[0]
853 cache_start = self.chunkcache[0]
848 cache_end = cache_start + len(self.chunkcache[1])
854 cache_end = cache_start + len(self.chunkcache[1])
849 if start >= cache_start and end <= cache_end:
855 if start >= cache_start and end <= cache_end:
850 # it is cached
856 # it is cached
851 offset = start - cache_start
857 offset = start - cache_start
852 else:
858 else:
853 loadcache(df)
859 loadcache(df)
854 offset = 0
860 offset = 0
855
861
856 #def checkchunk():
862 #def checkchunk():
857 # df = self.opener(self.datafile)
863 # df = self.opener(self.datafile)
858 # df.seek(start)
864 # df.seek(start)
859 # return df.read(length)
865 # return df.read(length)
860 #assert s == checkchunk()
866 #assert s == checkchunk()
861 return decompress(self.chunkcache[1][offset:offset + length])
867 return decompress(self.chunkcache[1][offset:offset + length])
862
868
863 def delta(self, node):
869 def delta(self, node):
864 """return or calculate a delta between a node and its predecessor"""
870 """return or calculate a delta between a node and its predecessor"""
865 r = self.rev(node)
871 r = self.rev(node)
866 return self.revdiff(r - 1, r)
872 return self.revdiff(r - 1, r)
867
873
868 def revdiff(self, rev1, rev2):
874 def revdiff(self, rev1, rev2):
869 """return or calculate a delta between two revisions"""
875 """return or calculate a delta between two revisions"""
870 b1 = self.base(rev1)
876 b1 = self.base(rev1)
871 b2 = self.base(rev2)
877 b2 = self.base(rev2)
872 if b1 == b2 and rev1 + 1 == rev2:
878 if b1 == b2 and rev1 + 1 == rev2:
873 return self.chunk(rev2)
879 return self.chunk(rev2)
874 else:
880 else:
875 return self.diff(self.revision(self.node(rev1)),
881 return self.diff(self.revision(self.node(rev1)),
876 self.revision(self.node(rev2)))
882 self.revision(self.node(rev2)))
877
883
878 def revision(self, node):
884 def revision(self, node):
879 """return an uncompressed revision of a given"""
885 """return an uncompressed revision of a given"""
880 if node == nullid: return ""
886 if node == nullid: return ""
881 if self.cache and self.cache[0] == node: return self.cache[2]
887 if self.cache and self.cache[0] == node: return self.cache[2]
882
888
883 # look up what we need to read
889 # look up what we need to read
884 text = None
890 text = None
885 rev = self.rev(node)
891 rev = self.rev(node)
886 base = self.base(rev)
892 base = self.base(rev)
887
893
888 if self.inlinedata():
894 if self.inlinedata():
889 # we probably have the whole chunk cached
895 # we probably have the whole chunk cached
890 df = None
896 df = None
891 else:
897 else:
892 df = self.opener(self.datafile)
898 df = self.opener(self.datafile)
893
899
894 # do we have useful data cached?
900 # do we have useful data cached?
895 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
901 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
896 base = self.cache[1]
902 base = self.cache[1]
897 text = self.cache[2]
903 text = self.cache[2]
898 self.loadindex(base, rev + 1)
904 self.loadindex(base, rev + 1)
899 else:
905 else:
900 self.loadindex(base, rev + 1)
906 self.loadindex(base, rev + 1)
901 text = self.chunk(base, df=df)
907 text = self.chunk(base, df=df)
902
908
903 bins = []
909 bins = []
904 for r in xrange(base + 1, rev + 1):
910 for r in xrange(base + 1, rev + 1):
905 bins.append(self.chunk(r, df=df))
911 bins.append(self.chunk(r, df=df))
906
912
907 text = self.patches(text, bins)
913 text = self.patches(text, bins)
908
914
909 p1, p2 = self.parents(node)
915 p1, p2 = self.parents(node)
910 if node != hash(text, p1, p2):
916 if node != hash(text, p1, p2):
911 raise RevlogError(_("integrity check failed on %s:%d")
917 raise RevlogError(_("integrity check failed on %s:%d")
912 % (self.datafile, rev))
918 % (self.datafile, rev))
913
919
914 self.cache = (node, rev, text)
920 self.cache = (node, rev, text)
915 return text
921 return text
916
922
917 def checkinlinesize(self, tr, fp=None):
923 def checkinlinesize(self, tr, fp=None):
918 if not self.inlinedata():
924 if not self.inlinedata():
919 return
925 return
920 if not fp:
926 if not fp:
921 fp = self.opener(self.indexfile, 'r')
927 fp = self.opener(self.indexfile, 'r')
922 fp.seek(0, 2)
928 fp.seek(0, 2)
923 size = fp.tell()
929 size = fp.tell()
924 if size < 131072:
930 if size < 131072:
925 return
931 return
926 trinfo = tr.find(self.indexfile)
932 trinfo = tr.find(self.indexfile)
927 if trinfo == None:
933 if trinfo == None:
928 raise RevlogError(_("%s not found in the transaction" %
934 raise RevlogError(_("%s not found in the transaction" %
929 self.indexfile))
935 self.indexfile))
930
936
931 trindex = trinfo[2]
937 trindex = trinfo[2]
932 dataoff = self.start(trindex)
938 dataoff = self.start(trindex)
933
939
934 tr.add(self.datafile, dataoff)
940 tr.add(self.datafile, dataoff)
935 df = self.opener(self.datafile, 'w')
941 df = self.opener(self.datafile, 'w')
936 calc = struct.calcsize(self.indexformat)
942 calc = struct.calcsize(self.indexformat)
937 for r in xrange(self.count()):
943 for r in xrange(self.count()):
938 start = self.start(r) + (r + 1) * calc
944 start = self.start(r) + (r + 1) * calc
939 length = self.length(r)
945 length = self.length(r)
940 fp.seek(start)
946 fp.seek(start)
941 d = fp.read(length)
947 d = fp.read(length)
942 df.write(d)
948 df.write(d)
943 fp.close()
949 fp.close()
944 df.close()
950 df.close()
945 fp = self.opener(self.indexfile, 'w', atomictemp=True)
951 fp = self.opener(self.indexfile, 'w', atomictemp=True)
946 self.version &= ~(REVLOGNGINLINEDATA)
952 self.version &= ~(REVLOGNGINLINEDATA)
947 if self.count():
953 if self.count():
948 x = self.index[0]
954 x = self.index[0]
949 e = struct.pack(self.indexformat, *x)[4:]
955 e = struct.pack(self.indexformat, *x)[4:]
950 l = struct.pack(versionformat, self.version)
956 l = struct.pack(versionformat, self.version)
951 fp.write(l)
957 fp.write(l)
952 fp.write(e)
958 fp.write(e)
953
959
954 for i in xrange(1, self.count()):
960 for i in xrange(1, self.count()):
955 x = self.index[i]
961 x = self.index[i]
956 e = struct.pack(self.indexformat, *x)
962 e = struct.pack(self.indexformat, *x)
957 fp.write(e)
963 fp.write(e)
958
964
959 # if we don't call rename, the temp file will never replace the
965 # if we don't call rename, the temp file will never replace the
960 # real index
966 # real index
961 fp.rename()
967 fp.rename()
962
968
963 tr.replace(self.indexfile, trindex * calc)
969 tr.replace(self.indexfile, trindex * calc)
964 self.chunkcache = None
970 self.chunkcache = None
965
971
966 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
972 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
967 """add a revision to the log
973 """add a revision to the log
968
974
969 text - the revision data to add
975 text - the revision data to add
970 transaction - the transaction object used for rollback
976 transaction - the transaction object used for rollback
971 link - the linkrev data to add
977 link - the linkrev data to add
972 p1, p2 - the parent nodeids of the revision
978 p1, p2 - the parent nodeids of the revision
973 d - an optional precomputed delta
979 d - an optional precomputed delta
974 """
980 """
975 if not self.inlinedata():
981 if not self.inlinedata():
976 dfh = self.opener(self.datafile, "a")
982 dfh = self.opener(self.datafile, "a")
977 else:
983 else:
978 dfh = None
984 dfh = None
979 ifh = self.opener(self.indexfile, "a+")
985 ifh = self.opener(self.indexfile, "a+")
980 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
986 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
981
987
982 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
988 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
983 if text is None: text = ""
989 if text is None: text = ""
984 if p1 is None: p1 = self.tip()
990 if p1 is None: p1 = self.tip()
985 if p2 is None: p2 = nullid
991 if p2 is None: p2 = nullid
986
992
987 node = hash(text, p1, p2)
993 node = hash(text, p1, p2)
988
994
989 if node in self.nodemap:
995 if node in self.nodemap:
990 return node
996 return node
991
997
992 n = self.count()
998 n = self.count()
993 t = n - 1
999 t = n - 1
994
1000
995 if n:
1001 if n:
996 base = self.base(t)
1002 base = self.base(t)
997 start = self.start(base)
1003 start = self.start(base)
998 end = self.end(t)
1004 end = self.end(t)
999 if not d:
1005 if not d:
1000 prev = self.revision(self.tip())
1006 prev = self.revision(self.tip())
1001 d = self.diff(prev, text)
1007 d = self.diff(prev, text)
1002 data = compress(d)
1008 data = compress(d)
1003 l = len(data[1]) + len(data[0])
1009 l = len(data[1]) + len(data[0])
1004 dist = end - start + l
1010 dist = end - start + l
1005
1011
1006 # full versions are inserted when the needed deltas
1012 # full versions are inserted when the needed deltas
1007 # become comparable to the uncompressed text
1013 # become comparable to the uncompressed text
1008 if not n or dist > len(text) * 2:
1014 if not n or dist > len(text) * 2:
1009 data = compress(text)
1015 data = compress(text)
1010 l = len(data[1]) + len(data[0])
1016 l = len(data[1]) + len(data[0])
1011 base = n
1017 base = n
1012 else:
1018 else:
1013 base = self.base(t)
1019 base = self.base(t)
1014
1020
1015 offset = 0
1021 offset = 0
1016 if t >= 0:
1022 if t >= 0:
1017 offset = self.end(t)
1023 offset = self.end(t)
1018
1024
1019 if self.version == REVLOGV0:
1025 if self.version == REVLOGV0:
1020 e = (offset, l, base, link, p1, p2, node)
1026 e = (offset, l, base, link, p1, p2, node)
1021 else:
1027 else:
1022 e = (self.offset_type(offset, 0), l, len(text),
1028 e = (self.offset_type(offset, 0), l, len(text),
1023 base, link, self.rev(p1), self.rev(p2), node)
1029 base, link, self.rev(p1), self.rev(p2), node)
1024
1030
1025 self.index.append(e)
1031 self.index.append(e)
1026 self.nodemap[node] = n
1032 self.nodemap[node] = n
1027 entry = struct.pack(self.indexformat, *e)
1033 entry = struct.pack(self.indexformat, *e)
1028
1034
1029 if not self.inlinedata():
1035 if not self.inlinedata():
1030 transaction.add(self.datafile, offset)
1036 transaction.add(self.datafile, offset)
1031 transaction.add(self.indexfile, n * len(entry))
1037 transaction.add(self.indexfile, n * len(entry))
1032 if data[0]:
1038 if data[0]:
1033 dfh.write(data[0])
1039 dfh.write(data[0])
1034 dfh.write(data[1])
1040 dfh.write(data[1])
1035 dfh.flush()
1041 dfh.flush()
1036 else:
1042 else:
1037 ifh.seek(0, 2)
1043 ifh.seek(0, 2)
1038 transaction.add(self.indexfile, ifh.tell(), self.count() - 1)
1044 transaction.add(self.indexfile, ifh.tell(), self.count() - 1)
1039
1045
1040 if len(self.index) == 1 and self.version != REVLOGV0:
1046 if len(self.index) == 1 and self.version != REVLOGV0:
1041 l = struct.pack(versionformat, self.version)
1047 l = struct.pack(versionformat, self.version)
1042 ifh.write(l)
1048 ifh.write(l)
1043 entry = entry[4:]
1049 entry = entry[4:]
1044
1050
1045 ifh.write(entry)
1051 ifh.write(entry)
1046
1052
1047 if self.inlinedata():
1053 if self.inlinedata():
1048 ifh.write(data[0])
1054 ifh.write(data[0])
1049 ifh.write(data[1])
1055 ifh.write(data[1])
1050 self.checkinlinesize(transaction, ifh)
1056 self.checkinlinesize(transaction, ifh)
1051
1057
1052 self.cache = (node, n, text)
1058 self.cache = (node, n, text)
1053 return node
1059 return node
1054
1060
1055 def ancestor(self, a, b):
1061 def ancestor(self, a, b):
1056 """calculate the least common ancestor of nodes a and b"""
1062 """calculate the least common ancestor of nodes a and b"""
1057
1063
1058 def parents(rev):
1064 def parents(rev):
1059 return [p for p in self.parentrevs(rev) if p != nullrev]
1065 return [p for p in self.parentrevs(rev) if p != nullrev]
1060
1066
1061 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1067 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1062 if c is None:
1068 if c is None:
1063 return nullid
1069 return nullid
1064
1070
1065 return self.node(c)
1071 return self.node(c)
1066
1072
1067 def group(self, nodelist, lookup, infocollect=None):
1073 def group(self, nodelist, lookup, infocollect=None):
1068 """calculate a delta group
1074 """calculate a delta group
1069
1075
1070 Given a list of changeset revs, return a set of deltas and
1076 Given a list of changeset revs, return a set of deltas and
1071 metadata corresponding to nodes. the first delta is
1077 metadata corresponding to nodes. the first delta is
1072 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1078 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1073 have this parent as it has all history before these
1079 have this parent as it has all history before these
1074 changesets. parent is parent[0]
1080 changesets. parent is parent[0]
1075 """
1081 """
1076 revs = [self.rev(n) for n in nodelist]
1082 revs = [self.rev(n) for n in nodelist]
1077
1083
1078 # if we don't have any revisions touched by these changesets, bail
1084 # if we don't have any revisions touched by these changesets, bail
1079 if not revs:
1085 if not revs:
1080 yield changegroup.closechunk()
1086 yield changegroup.closechunk()
1081 return
1087 return
1082
1088
1083 # add the parent of the first rev
1089 # add the parent of the first rev
1084 p = self.parents(self.node(revs[0]))[0]
1090 p = self.parents(self.node(revs[0]))[0]
1085 revs.insert(0, self.rev(p))
1091 revs.insert(0, self.rev(p))
1086
1092
1087 # build deltas
1093 # build deltas
1088 for d in xrange(0, len(revs) - 1):
1094 for d in xrange(0, len(revs) - 1):
1089 a, b = revs[d], revs[d + 1]
1095 a, b = revs[d], revs[d + 1]
1090 nb = self.node(b)
1096 nb = self.node(b)
1091
1097
1092 if infocollect is not None:
1098 if infocollect is not None:
1093 infocollect(nb)
1099 infocollect(nb)
1094
1100
1095 d = self.revdiff(a, b)
1101 d = self.revdiff(a, b)
1096 p = self.parents(nb)
1102 p = self.parents(nb)
1097 meta = nb + p[0] + p[1] + lookup(nb)
1103 meta = nb + p[0] + p[1] + lookup(nb)
1098 yield changegroup.genchunk("%s%s" % (meta, d))
1104 yield changegroup.genchunk("%s%s" % (meta, d))
1099
1105
1100 yield changegroup.closechunk()
1106 yield changegroup.closechunk()
1101
1107
1102 def addgroup(self, revs, linkmapper, transaction, unique=0):
1108 def addgroup(self, revs, linkmapper, transaction, unique=0):
1103 """
1109 """
1104 add a delta group
1110 add a delta group
1105
1111
1106 given a set of deltas, add them to the revision log. the
1112 given a set of deltas, add them to the revision log. the
1107 first delta is against its parent, which should be in our
1113 first delta is against its parent, which should be in our
1108 log, the rest are against the previous delta.
1114 log, the rest are against the previous delta.
1109 """
1115 """
1110
1116
1111 #track the base of the current delta log
1117 #track the base of the current delta log
1112 r = self.count()
1118 r = self.count()
1113 t = r - 1
1119 t = r - 1
1114 node = None
1120 node = None
1115
1121
1116 base = prev = nullrev
1122 base = prev = nullrev
1117 start = end = textlen = 0
1123 start = end = textlen = 0
1118 if r:
1124 if r:
1119 end = self.end(t)
1125 end = self.end(t)
1120
1126
1121 ifh = self.opener(self.indexfile, "a+")
1127 ifh = self.opener(self.indexfile, "a+")
1122 ifh.seek(0, 2)
1128 ifh.seek(0, 2)
1123 transaction.add(self.indexfile, ifh.tell(), self.count())
1129 transaction.add(self.indexfile, ifh.tell(), self.count())
1124 if self.inlinedata():
1130 if self.inlinedata():
1125 dfh = None
1131 dfh = None
1126 else:
1132 else:
1127 transaction.add(self.datafile, end)
1133 transaction.add(self.datafile, end)
1128 dfh = self.opener(self.datafile, "a")
1134 dfh = self.opener(self.datafile, "a")
1129
1135
1130 # loop through our set of deltas
1136 # loop through our set of deltas
1131 chain = None
1137 chain = None
1132 for chunk in revs:
1138 for chunk in revs:
1133 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1139 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1134 link = linkmapper(cs)
1140 link = linkmapper(cs)
1135 if node in self.nodemap:
1141 if node in self.nodemap:
1136 # this can happen if two branches make the same change
1142 # this can happen if two branches make the same change
1137 # if unique:
1143 # if unique:
1138 # raise RevlogError(_("already have %s") % hex(node[:4]))
1144 # raise RevlogError(_("already have %s") % hex(node[:4]))
1139 chain = node
1145 chain = node
1140 continue
1146 continue
1141 delta = chunk[80:]
1147 delta = chunk[80:]
1142
1148
1143 for p in (p1, p2):
1149 for p in (p1, p2):
1144 if not p in self.nodemap:
1150 if not p in self.nodemap:
1145 raise RevlogError(_("unknown parent %s") % short(p))
1151 raise RevlogError(_("unknown parent %s") % short(p))
1146
1152
1147 if not chain:
1153 if not chain:
1148 # retrieve the parent revision of the delta chain
1154 # retrieve the parent revision of the delta chain
1149 chain = p1
1155 chain = p1
1150 if not chain in self.nodemap:
1156 if not chain in self.nodemap:
1151 raise RevlogError(_("unknown base %s") % short(chain[:4]))
1157 raise RevlogError(_("unknown base %s") % short(chain[:4]))
1152
1158
1153 # full versions are inserted when the needed deltas become
1159 # full versions are inserted when the needed deltas become
1154 # comparable to the uncompressed text or when the previous
1160 # comparable to the uncompressed text or when the previous
1155 # version is not the one we have a delta against. We use
1161 # version is not the one we have a delta against. We use
1156 # the size of the previous full rev as a proxy for the
1162 # the size of the previous full rev as a proxy for the
1157 # current size.
1163 # current size.
1158
1164
1159 if chain == prev:
1165 if chain == prev:
1160 tempd = compress(delta)
1166 tempd = compress(delta)
1161 cdelta = tempd[0] + tempd[1]
1167 cdelta = tempd[0] + tempd[1]
1162 textlen = mdiff.patchedsize(textlen, delta)
1168 textlen = mdiff.patchedsize(textlen, delta)
1163
1169
1164 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1170 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1165 # flush our writes here so we can read it in revision
1171 # flush our writes here so we can read it in revision
1166 if dfh:
1172 if dfh:
1167 dfh.flush()
1173 dfh.flush()
1168 ifh.flush()
1174 ifh.flush()
1169 text = self.revision(chain)
1175 text = self.revision(chain)
1170 text = self.patches(text, [delta])
1176 text = self.patches(text, [delta])
1171 chk = self._addrevision(text, transaction, link, p1, p2, None,
1177 chk = self._addrevision(text, transaction, link, p1, p2, None,
1172 ifh, dfh)
1178 ifh, dfh)
1173 if not dfh and not self.inlinedata():
1179 if not dfh and not self.inlinedata():
1174 # addrevision switched from inline to conventional
1180 # addrevision switched from inline to conventional
1175 # reopen the index
1181 # reopen the index
1176 dfh = self.opener(self.datafile, "a")
1182 dfh = self.opener(self.datafile, "a")
1177 ifh = self.opener(self.indexfile, "a")
1183 ifh = self.opener(self.indexfile, "a")
1178 if chk != node:
1184 if chk != node:
1179 raise RevlogError(_("consistency error adding group"))
1185 raise RevlogError(_("consistency error adding group"))
1180 textlen = len(text)
1186 textlen = len(text)
1181 else:
1187 else:
1182 if self.version == REVLOGV0:
1188 if self.version == REVLOGV0:
1183 e = (end, len(cdelta), base, link, p1, p2, node)
1189 e = (end, len(cdelta), base, link, p1, p2, node)
1184 else:
1190 else:
1185 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1191 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1186 link, self.rev(p1), self.rev(p2), node)
1192 link, self.rev(p1), self.rev(p2), node)
1187 self.index.append(e)
1193 self.index.append(e)
1188 self.nodemap[node] = r
1194 self.nodemap[node] = r
1189 if self.inlinedata():
1195 if self.inlinedata():
1190 ifh.write(struct.pack(self.indexformat, *e))
1196 ifh.write(struct.pack(self.indexformat, *e))
1191 ifh.write(cdelta)
1197 ifh.write(cdelta)
1192 self.checkinlinesize(transaction, ifh)
1198 self.checkinlinesize(transaction, ifh)
1193 if not self.inlinedata():
1199 if not self.inlinedata():
1194 dfh = self.opener(self.datafile, "a")
1200 dfh = self.opener(self.datafile, "a")
1195 ifh = self.opener(self.indexfile, "a")
1201 ifh = self.opener(self.indexfile, "a")
1196 else:
1202 else:
1197 dfh.write(cdelta)
1203 dfh.write(cdelta)
1198 ifh.write(struct.pack(self.indexformat, *e))
1204 ifh.write(struct.pack(self.indexformat, *e))
1199
1205
1200 t, r, chain, prev = r, r + 1, node, node
1206 t, r, chain, prev = r, r + 1, node, node
1201 base = self.base(t)
1207 base = self.base(t)
1202 start = self.start(base)
1208 start = self.start(base)
1203 end = self.end(t)
1209 end = self.end(t)
1204
1210
1205 return node
1211 return node
1206
1212
1207 def strip(self, rev, minlink):
1213 def strip(self, rev, minlink):
1208 if self.count() == 0 or rev >= self.count():
1214 if self.count() == 0 or rev >= self.count():
1209 return
1215 return
1210
1216
1211 if isinstance(self.index, lazyindex):
1217 if isinstance(self.index, lazyindex):
1212 self.loadindexmap()
1218 self.loadindexmap()
1213
1219
1214 # When stripping away a revision, we need to make sure it
1220 # When stripping away a revision, we need to make sure it
1215 # does not actually belong to an older changeset.
1221 # does not actually belong to an older changeset.
1216 # The minlink parameter defines the oldest revision
1222 # The minlink parameter defines the oldest revision
1217 # we're allowed to strip away.
1223 # we're allowed to strip away.
1218 while minlink > self.index[rev][-4]:
1224 while minlink > self.index[rev][-4]:
1219 rev += 1
1225 rev += 1
1220 if rev >= self.count():
1226 if rev >= self.count():
1221 return
1227 return
1222
1228
1223 # first truncate the files on disk
1229 # first truncate the files on disk
1224 end = self.start(rev)
1230 end = self.start(rev)
1225 if not self.inlinedata():
1231 if not self.inlinedata():
1226 df = self.opener(self.datafile, "a")
1232 df = self.opener(self.datafile, "a")
1227 df.truncate(end)
1233 df.truncate(end)
1228 end = rev * struct.calcsize(self.indexformat)
1234 end = rev * struct.calcsize(self.indexformat)
1229 else:
1235 else:
1230 end += rev * struct.calcsize(self.indexformat)
1236 end += rev * struct.calcsize(self.indexformat)
1231
1237
1232 indexf = self.opener(self.indexfile, "a")
1238 indexf = self.opener(self.indexfile, "a")
1233 indexf.truncate(end)
1239 indexf.truncate(end)
1234
1240
1235 # then reset internal state in memory to forget those revisions
1241 # then reset internal state in memory to forget those revisions
1236 self.cache = None
1242 self.cache = None
1237 self.chunkcache = None
1243 self.chunkcache = None
1238 for x in xrange(rev, self.count()):
1244 for x in xrange(rev, self.count()):
1239 del self.nodemap[self.node(x)]
1245 del self.nodemap[self.node(x)]
1240
1246
1241 del self.index[rev:]
1247 del self.index[rev:]
1242
1248
1243 def checksize(self):
1249 def checksize(self):
1244 expected = 0
1250 expected = 0
1245 if self.count():
1251 if self.count():
1246 expected = self.end(self.count() - 1)
1252 expected = self.end(self.count() - 1)
1247
1253
1248 try:
1254 try:
1249 f = self.opener(self.datafile)
1255 f = self.opener(self.datafile)
1250 f.seek(0, 2)
1256 f.seek(0, 2)
1251 actual = f.tell()
1257 actual = f.tell()
1252 dd = actual - expected
1258 dd = actual - expected
1253 except IOError, inst:
1259 except IOError, inst:
1254 if inst.errno != errno.ENOENT:
1260 if inst.errno != errno.ENOENT:
1255 raise
1261 raise
1256 dd = 0
1262 dd = 0
1257
1263
1258 try:
1264 try:
1259 f = self.opener(self.indexfile)
1265 f = self.opener(self.indexfile)
1260 f.seek(0, 2)
1266 f.seek(0, 2)
1261 actual = f.tell()
1267 actual = f.tell()
1262 s = struct.calcsize(self.indexformat)
1268 s = struct.calcsize(self.indexformat)
1263 i = actual / s
1269 i = actual / s
1264 di = actual - (i * s)
1270 di = actual - (i * s)
1265 if self.inlinedata():
1271 if self.inlinedata():
1266 databytes = 0
1272 databytes = 0
1267 for r in xrange(self.count()):
1273 for r in xrange(self.count()):
1268 databytes += self.length(r)
1274 databytes += self.length(r)
1269 dd = 0
1275 dd = 0
1270 di = actual - self.count() * s - databytes
1276 di = actual - self.count() * s - databytes
1271 except IOError, inst:
1277 except IOError, inst:
1272 if inst.errno != errno.ENOENT:
1278 if inst.errno != errno.ENOENT:
1273 raise
1279 raise
1274 di = 0
1280 di = 0
1275
1281
1276 return (dd, di)
1282 return (dd, di)
1277
1283
1278
1284
General Comments 0
You need to be logged in to leave comments. Login now