##// END OF EJS Templates
Stop erroring out pull -r and clone -r if repository isn't local.
Eric Hopper -
r3448:6ca49c5f default
parent child Browse files
Show More
@@ -1,3503 +1,3505 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup")
15 demandload(globals(), "archival cStringIO changegroup")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted = repo.status()[:4]
24 modified, added, removed, deleted = repo.status()[:4]
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def relpath(repo, args):
28 def relpath(repo, args):
29 cwd = repo.getcwd()
29 cwd = repo.getcwd()
30 if cwd:
30 if cwd:
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 return args
32 return args
33
33
34 def logmessage(opts):
34 def logmessage(opts):
35 """ get the log message according to -m and -l option """
35 """ get the log message according to -m and -l option """
36 message = opts['message']
36 message = opts['message']
37 logfile = opts['logfile']
37 logfile = opts['logfile']
38
38
39 if message and logfile:
39 if message and logfile:
40 raise util.Abort(_('options --message and --logfile are mutually '
40 raise util.Abort(_('options --message and --logfile are mutually '
41 'exclusive'))
41 'exclusive'))
42 if not message and logfile:
42 if not message and logfile:
43 try:
43 try:
44 if logfile == '-':
44 if logfile == '-':
45 message = sys.stdin.read()
45 message = sys.stdin.read()
46 else:
46 else:
47 message = open(logfile).read()
47 message = open(logfile).read()
48 except IOError, inst:
48 except IOError, inst:
49 raise util.Abort(_("can't read commit message '%s': %s") %
49 raise util.Abort(_("can't read commit message '%s': %s") %
50 (logfile, inst.strerror))
50 (logfile, inst.strerror))
51 return message
51 return message
52
52
53 def walkchangerevs(ui, repo, pats, opts):
53 def walkchangerevs(ui, repo, pats, opts):
54 '''Iterate over files and the revs they changed in.
54 '''Iterate over files and the revs they changed in.
55
55
56 Callers most commonly need to iterate backwards over the history
56 Callers most commonly need to iterate backwards over the history
57 it is interested in. Doing so has awful (quadratic-looking)
57 it is interested in. Doing so has awful (quadratic-looking)
58 performance, so we use iterators in a "windowed" way.
58 performance, so we use iterators in a "windowed" way.
59
59
60 We walk a window of revisions in the desired order. Within the
60 We walk a window of revisions in the desired order. Within the
61 window, we first walk forwards to gather data, then in the desired
61 window, we first walk forwards to gather data, then in the desired
62 order (usually backwards) to display it.
62 order (usually backwards) to display it.
63
63
64 This function returns an (iterator, getchange, matchfn) tuple. The
64 This function returns an (iterator, getchange, matchfn) tuple. The
65 getchange function returns the changelog entry for a numeric
65 getchange function returns the changelog entry for a numeric
66 revision. The iterator yields 3-tuples. They will be of one of
66 revision. The iterator yields 3-tuples. They will be of one of
67 the following forms:
67 the following forms:
68
68
69 "window", incrementing, lastrev: stepping through a window,
69 "window", incrementing, lastrev: stepping through a window,
70 positive if walking forwards through revs, last rev in the
70 positive if walking forwards through revs, last rev in the
71 sequence iterated over - use to reset state for the current window
71 sequence iterated over - use to reset state for the current window
72
72
73 "add", rev, fns: out-of-order traversal of the given file names
73 "add", rev, fns: out-of-order traversal of the given file names
74 fns, which changed during revision rev - use to gather data for
74 fns, which changed during revision rev - use to gather data for
75 possible display
75 possible display
76
76
77 "iter", rev, None: in-order traversal of the revs earlier iterated
77 "iter", rev, None: in-order traversal of the revs earlier iterated
78 over with "add" - use to display data'''
78 over with "add" - use to display data'''
79
79
80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
81 if start < end:
81 if start < end:
82 while start < end:
82 while start < end:
83 yield start, min(windowsize, end-start)
83 yield start, min(windowsize, end-start)
84 start += windowsize
84 start += windowsize
85 if windowsize < sizelimit:
85 if windowsize < sizelimit:
86 windowsize *= 2
86 windowsize *= 2
87 else:
87 else:
88 while start > end:
88 while start > end:
89 yield start, min(windowsize, start-end-1)
89 yield start, min(windowsize, start-end-1)
90 start -= windowsize
90 start -= windowsize
91 if windowsize < sizelimit:
91 if windowsize < sizelimit:
92 windowsize *= 2
92 windowsize *= 2
93
93
94
94
95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
96 follow = opts.get('follow') or opts.get('follow_first')
96 follow = opts.get('follow') or opts.get('follow_first')
97
97
98 if repo.changelog.count() == 0:
98 if repo.changelog.count() == 0:
99 return [], False, matchfn
99 return [], False, matchfn
100
100
101 if follow:
101 if follow:
102 defrange = '%s:0' % repo.changectx().rev()
102 defrange = '%s:0' % repo.changectx().rev()
103 else:
103 else:
104 defrange = 'tip:0'
104 defrange = 'tip:0'
105 revs = map(int, cmdutil.revrange(ui, repo, opts['rev'] or [defrange]))
105 revs = map(int, cmdutil.revrange(ui, repo, opts['rev'] or [defrange]))
106 wanted = {}
106 wanted = {}
107 slowpath = anypats
107 slowpath = anypats
108 fncache = {}
108 fncache = {}
109
109
110 chcache = {}
110 chcache = {}
111 def getchange(rev):
111 def getchange(rev):
112 ch = chcache.get(rev)
112 ch = chcache.get(rev)
113 if ch is None:
113 if ch is None:
114 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
114 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
115 return ch
115 return ch
116
116
117 if not slowpath and not files:
117 if not slowpath and not files:
118 # No files, no patterns. Display all revs.
118 # No files, no patterns. Display all revs.
119 wanted = dict(zip(revs, revs))
119 wanted = dict(zip(revs, revs))
120 copies = []
120 copies = []
121 if not slowpath:
121 if not slowpath:
122 # Only files, no patterns. Check the history of each file.
122 # Only files, no patterns. Check the history of each file.
123 def filerevgen(filelog, node):
123 def filerevgen(filelog, node):
124 cl_count = repo.changelog.count()
124 cl_count = repo.changelog.count()
125 if node is None:
125 if node is None:
126 last = filelog.count() - 1
126 last = filelog.count() - 1
127 else:
127 else:
128 last = filelog.rev(node)
128 last = filelog.rev(node)
129 for i, window in increasing_windows(last, -1):
129 for i, window in increasing_windows(last, -1):
130 revs = []
130 revs = []
131 for j in xrange(i - window, i + 1):
131 for j in xrange(i - window, i + 1):
132 n = filelog.node(j)
132 n = filelog.node(j)
133 revs.append((filelog.linkrev(n),
133 revs.append((filelog.linkrev(n),
134 follow and filelog.renamed(n)))
134 follow and filelog.renamed(n)))
135 revs.reverse()
135 revs.reverse()
136 for rev in revs:
136 for rev in revs:
137 # only yield rev for which we have the changelog, it can
137 # only yield rev for which we have the changelog, it can
138 # happen while doing "hg log" during a pull or commit
138 # happen while doing "hg log" during a pull or commit
139 if rev[0] < cl_count:
139 if rev[0] < cl_count:
140 yield rev
140 yield rev
141 def iterfiles():
141 def iterfiles():
142 for filename in files:
142 for filename in files:
143 yield filename, None
143 yield filename, None
144 for filename_node in copies:
144 for filename_node in copies:
145 yield filename_node
145 yield filename_node
146 minrev, maxrev = min(revs), max(revs)
146 minrev, maxrev = min(revs), max(revs)
147 for file_, node in iterfiles():
147 for file_, node in iterfiles():
148 filelog = repo.file(file_)
148 filelog = repo.file(file_)
149 # A zero count may be a directory or deleted file, so
149 # A zero count may be a directory or deleted file, so
150 # try to find matching entries on the slow path.
150 # try to find matching entries on the slow path.
151 if filelog.count() == 0:
151 if filelog.count() == 0:
152 slowpath = True
152 slowpath = True
153 break
153 break
154 for rev, copied in filerevgen(filelog, node):
154 for rev, copied in filerevgen(filelog, node):
155 if rev <= maxrev:
155 if rev <= maxrev:
156 if rev < minrev:
156 if rev < minrev:
157 break
157 break
158 fncache.setdefault(rev, [])
158 fncache.setdefault(rev, [])
159 fncache[rev].append(file_)
159 fncache[rev].append(file_)
160 wanted[rev] = 1
160 wanted[rev] = 1
161 if follow and copied:
161 if follow and copied:
162 copies.append(copied)
162 copies.append(copied)
163 if slowpath:
163 if slowpath:
164 if follow:
164 if follow:
165 raise util.Abort(_('can only follow copies/renames for explicit '
165 raise util.Abort(_('can only follow copies/renames for explicit '
166 'file names'))
166 'file names'))
167
167
168 # The slow path checks files modified in every changeset.
168 # The slow path checks files modified in every changeset.
169 def changerevgen():
169 def changerevgen():
170 for i, window in increasing_windows(repo.changelog.count()-1, -1):
170 for i, window in increasing_windows(repo.changelog.count()-1, -1):
171 for j in xrange(i - window, i + 1):
171 for j in xrange(i - window, i + 1):
172 yield j, getchange(j)[3]
172 yield j, getchange(j)[3]
173
173
174 for rev, changefiles in changerevgen():
174 for rev, changefiles in changerevgen():
175 matches = filter(matchfn, changefiles)
175 matches = filter(matchfn, changefiles)
176 if matches:
176 if matches:
177 fncache[rev] = matches
177 fncache[rev] = matches
178 wanted[rev] = 1
178 wanted[rev] = 1
179
179
180 class followfilter:
180 class followfilter:
181 def __init__(self, onlyfirst=False):
181 def __init__(self, onlyfirst=False):
182 self.startrev = -1
182 self.startrev = -1
183 self.roots = []
183 self.roots = []
184 self.onlyfirst = onlyfirst
184 self.onlyfirst = onlyfirst
185
185
186 def match(self, rev):
186 def match(self, rev):
187 def realparents(rev):
187 def realparents(rev):
188 if self.onlyfirst:
188 if self.onlyfirst:
189 return repo.changelog.parentrevs(rev)[0:1]
189 return repo.changelog.parentrevs(rev)[0:1]
190 else:
190 else:
191 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
191 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
192
192
193 if self.startrev == -1:
193 if self.startrev == -1:
194 self.startrev = rev
194 self.startrev = rev
195 return True
195 return True
196
196
197 if rev > self.startrev:
197 if rev > self.startrev:
198 # forward: all descendants
198 # forward: all descendants
199 if not self.roots:
199 if not self.roots:
200 self.roots.append(self.startrev)
200 self.roots.append(self.startrev)
201 for parent in realparents(rev):
201 for parent in realparents(rev):
202 if parent in self.roots:
202 if parent in self.roots:
203 self.roots.append(rev)
203 self.roots.append(rev)
204 return True
204 return True
205 else:
205 else:
206 # backwards: all parents
206 # backwards: all parents
207 if not self.roots:
207 if not self.roots:
208 self.roots.extend(realparents(self.startrev))
208 self.roots.extend(realparents(self.startrev))
209 if rev in self.roots:
209 if rev in self.roots:
210 self.roots.remove(rev)
210 self.roots.remove(rev)
211 self.roots.extend(realparents(rev))
211 self.roots.extend(realparents(rev))
212 return True
212 return True
213
213
214 return False
214 return False
215
215
216 # it might be worthwhile to do this in the iterator if the rev range
216 # it might be worthwhile to do this in the iterator if the rev range
217 # is descending and the prune args are all within that range
217 # is descending and the prune args are all within that range
218 for rev in opts.get('prune', ()):
218 for rev in opts.get('prune', ()):
219 rev = repo.changelog.rev(repo.lookup(rev))
219 rev = repo.changelog.rev(repo.lookup(rev))
220 ff = followfilter()
220 ff = followfilter()
221 stop = min(revs[0], revs[-1])
221 stop = min(revs[0], revs[-1])
222 for x in range(rev, stop-1, -1):
222 for x in range(rev, stop-1, -1):
223 if ff.match(x) and wanted.has_key(x):
223 if ff.match(x) and wanted.has_key(x):
224 del wanted[x]
224 del wanted[x]
225
225
226 def iterate():
226 def iterate():
227 if follow and not files:
227 if follow and not files:
228 ff = followfilter(onlyfirst=opts.get('follow_first'))
228 ff = followfilter(onlyfirst=opts.get('follow_first'))
229 def want(rev):
229 def want(rev):
230 if ff.match(rev) and rev in wanted:
230 if ff.match(rev) and rev in wanted:
231 return True
231 return True
232 return False
232 return False
233 else:
233 else:
234 def want(rev):
234 def want(rev):
235 return rev in wanted
235 return rev in wanted
236
236
237 for i, window in increasing_windows(0, len(revs)):
237 for i, window in increasing_windows(0, len(revs)):
238 yield 'window', revs[0] < revs[-1], revs[-1]
238 yield 'window', revs[0] < revs[-1], revs[-1]
239 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
239 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
240 srevs = list(nrevs)
240 srevs = list(nrevs)
241 srevs.sort()
241 srevs.sort()
242 for rev in srevs:
242 for rev in srevs:
243 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
243 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
244 yield 'add', rev, fns
244 yield 'add', rev, fns
245 for rev in nrevs:
245 for rev in nrevs:
246 yield 'iter', rev, None
246 yield 'iter', rev, None
247 return iterate(), getchange, matchfn
247 return iterate(), getchange, matchfn
248
248
249 def write_bundle(cg, filename=None, compress=True):
249 def write_bundle(cg, filename=None, compress=True):
250 """Write a bundle file and return its filename.
250 """Write a bundle file and return its filename.
251
251
252 Existing files will not be overwritten.
252 Existing files will not be overwritten.
253 If no filename is specified, a temporary file is created.
253 If no filename is specified, a temporary file is created.
254 bz2 compression can be turned off.
254 bz2 compression can be turned off.
255 The bundle file will be deleted in case of errors.
255 The bundle file will be deleted in case of errors.
256 """
256 """
257 class nocompress(object):
257 class nocompress(object):
258 def compress(self, x):
258 def compress(self, x):
259 return x
259 return x
260 def flush(self):
260 def flush(self):
261 return ""
261 return ""
262
262
263 fh = None
263 fh = None
264 cleanup = None
264 cleanup = None
265 try:
265 try:
266 if filename:
266 if filename:
267 if os.path.exists(filename):
267 if os.path.exists(filename):
268 raise util.Abort(_("file '%s' already exists") % filename)
268 raise util.Abort(_("file '%s' already exists") % filename)
269 fh = open(filename, "wb")
269 fh = open(filename, "wb")
270 else:
270 else:
271 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
271 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
272 fh = os.fdopen(fd, "wb")
272 fh = os.fdopen(fd, "wb")
273 cleanup = filename
273 cleanup = filename
274
274
275 if compress:
275 if compress:
276 fh.write("HG10")
276 fh.write("HG10")
277 z = bz2.BZ2Compressor(9)
277 z = bz2.BZ2Compressor(9)
278 else:
278 else:
279 fh.write("HG10UN")
279 fh.write("HG10UN")
280 z = nocompress()
280 z = nocompress()
281 # parse the changegroup data, otherwise we will block
281 # parse the changegroup data, otherwise we will block
282 # in case of sshrepo because we don't know the end of the stream
282 # in case of sshrepo because we don't know the end of the stream
283
283
284 # an empty chunkiter is the end of the changegroup
284 # an empty chunkiter is the end of the changegroup
285 empty = False
285 empty = False
286 while not empty:
286 while not empty:
287 empty = True
287 empty = True
288 for chunk in changegroup.chunkiter(cg):
288 for chunk in changegroup.chunkiter(cg):
289 empty = False
289 empty = False
290 fh.write(z.compress(changegroup.genchunk(chunk)))
290 fh.write(z.compress(changegroup.genchunk(chunk)))
291 fh.write(z.compress(changegroup.closechunk()))
291 fh.write(z.compress(changegroup.closechunk()))
292 fh.write(z.flush())
292 fh.write(z.flush())
293 cleanup = None
293 cleanup = None
294 return filename
294 return filename
295 finally:
295 finally:
296 if fh is not None:
296 if fh is not None:
297 fh.close()
297 fh.close()
298 if cleanup is not None:
298 if cleanup is not None:
299 os.unlink(cleanup)
299 os.unlink(cleanup)
300
300
301 def trimuser(ui, name, rev, revcache):
301 def trimuser(ui, name, rev, revcache):
302 """trim the name of the user who committed a change"""
302 """trim the name of the user who committed a change"""
303 user = revcache.get(rev)
303 user = revcache.get(rev)
304 if user is None:
304 if user is None:
305 user = revcache[rev] = ui.shortuser(name)
305 user = revcache[rev] = ui.shortuser(name)
306 return user
306 return user
307
307
308 class changeset_printer(object):
308 class changeset_printer(object):
309 '''show changeset information when templating not requested.'''
309 '''show changeset information when templating not requested.'''
310
310
311 def __init__(self, ui, repo):
311 def __init__(self, ui, repo):
312 self.ui = ui
312 self.ui = ui
313 self.repo = repo
313 self.repo = repo
314
314
315 def show(self, rev=0, changenode=None, brinfo=None, copies=None):
315 def show(self, rev=0, changenode=None, brinfo=None, copies=None):
316 '''show a single changeset or file revision'''
316 '''show a single changeset or file revision'''
317 log = self.repo.changelog
317 log = self.repo.changelog
318 if changenode is None:
318 if changenode is None:
319 changenode = log.node(rev)
319 changenode = log.node(rev)
320 elif not rev:
320 elif not rev:
321 rev = log.rev(changenode)
321 rev = log.rev(changenode)
322
322
323 if self.ui.quiet:
323 if self.ui.quiet:
324 self.ui.write("%d:%s\n" % (rev, short(changenode)))
324 self.ui.write("%d:%s\n" % (rev, short(changenode)))
325 return
325 return
326
326
327 changes = log.read(changenode)
327 changes = log.read(changenode)
328 date = util.datestr(changes[2])
328 date = util.datestr(changes[2])
329 branch = changes[5].get("branch")
329 branch = changes[5].get("branch")
330
330
331 hexfunc = self.ui.debugflag and hex or short
331 hexfunc = self.ui.debugflag and hex or short
332
332
333 parents = [(log.rev(p), hexfunc(p)) for p in log.parents(changenode)
333 parents = [(log.rev(p), hexfunc(p)) for p in log.parents(changenode)
334 if self.ui.debugflag or p != nullid]
334 if self.ui.debugflag or p != nullid]
335 if (not self.ui.debugflag and len(parents) == 1 and
335 if (not self.ui.debugflag and len(parents) == 1 and
336 parents[0][0] == rev-1):
336 parents[0][0] == rev-1):
337 parents = []
337 parents = []
338
338
339 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
339 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
340
340
341 if branch:
341 if branch:
342 self.ui.status(_("branch: %s\n") % branch)
342 self.ui.status(_("branch: %s\n") % branch)
343 for tag in self.repo.nodetags(changenode):
343 for tag in self.repo.nodetags(changenode):
344 self.ui.status(_("tag: %s\n") % tag)
344 self.ui.status(_("tag: %s\n") % tag)
345 for parent in parents:
345 for parent in parents:
346 self.ui.write(_("parent: %d:%s\n") % parent)
346 self.ui.write(_("parent: %d:%s\n") % parent)
347
347
348 if brinfo and changenode in brinfo:
348 if brinfo and changenode in brinfo:
349 br = brinfo[changenode]
349 br = brinfo[changenode]
350 self.ui.write(_("branch: %s\n") % " ".join(br))
350 self.ui.write(_("branch: %s\n") % " ".join(br))
351
351
352 self.ui.debug(_("manifest: %d:%s\n") %
352 self.ui.debug(_("manifest: %d:%s\n") %
353 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
353 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
354 self.ui.status(_("user: %s\n") % changes[1])
354 self.ui.status(_("user: %s\n") % changes[1])
355 self.ui.status(_("date: %s\n") % date)
355 self.ui.status(_("date: %s\n") % date)
356
356
357 if self.ui.debugflag:
357 if self.ui.debugflag:
358 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
358 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
359 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
359 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
360 files):
360 files):
361 if value:
361 if value:
362 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
362 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
363 else:
363 else:
364 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
364 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
365 if copies:
365 if copies:
366 copies = ['%s (%s)' % c for c in copies]
366 copies = ['%s (%s)' % c for c in copies]
367 self.ui.note(_("copies: %s\n") % ' '.join(copies))
367 self.ui.note(_("copies: %s\n") % ' '.join(copies))
368
368
369 description = changes[4].strip()
369 description = changes[4].strip()
370 if description:
370 if description:
371 if self.ui.verbose:
371 if self.ui.verbose:
372 self.ui.status(_("description:\n"))
372 self.ui.status(_("description:\n"))
373 self.ui.status(description)
373 self.ui.status(description)
374 self.ui.status("\n\n")
374 self.ui.status("\n\n")
375 else:
375 else:
376 self.ui.status(_("summary: %s\n") %
376 self.ui.status(_("summary: %s\n") %
377 description.splitlines()[0])
377 description.splitlines()[0])
378 self.ui.status("\n")
378 self.ui.status("\n")
379
379
380 def show_changeset(ui, repo, opts):
380 def show_changeset(ui, repo, opts):
381 """show one changeset using template or regular display.
381 """show one changeset using template or regular display.
382
382
383 Display format will be the first non-empty hit of:
383 Display format will be the first non-empty hit of:
384 1. option 'template'
384 1. option 'template'
385 2. option 'style'
385 2. option 'style'
386 3. [ui] setting 'logtemplate'
386 3. [ui] setting 'logtemplate'
387 4. [ui] setting 'style'
387 4. [ui] setting 'style'
388 If all of these values are either the unset or the empty string,
388 If all of these values are either the unset or the empty string,
389 regular display via changeset_printer() is done.
389 regular display via changeset_printer() is done.
390 """
390 """
391 # options
391 # options
392 tmpl = opts.get('template')
392 tmpl = opts.get('template')
393 mapfile = None
393 mapfile = None
394 if tmpl:
394 if tmpl:
395 tmpl = templater.parsestring(tmpl, quoted=False)
395 tmpl = templater.parsestring(tmpl, quoted=False)
396 else:
396 else:
397 mapfile = opts.get('style')
397 mapfile = opts.get('style')
398 # ui settings
398 # ui settings
399 if not mapfile:
399 if not mapfile:
400 tmpl = ui.config('ui', 'logtemplate')
400 tmpl = ui.config('ui', 'logtemplate')
401 if tmpl:
401 if tmpl:
402 tmpl = templater.parsestring(tmpl)
402 tmpl = templater.parsestring(tmpl)
403 else:
403 else:
404 mapfile = ui.config('ui', 'style')
404 mapfile = ui.config('ui', 'style')
405
405
406 if tmpl or mapfile:
406 if tmpl or mapfile:
407 if mapfile:
407 if mapfile:
408 if not os.path.split(mapfile)[0]:
408 if not os.path.split(mapfile)[0]:
409 mapname = (templater.templatepath('map-cmdline.' + mapfile)
409 mapname = (templater.templatepath('map-cmdline.' + mapfile)
410 or templater.templatepath(mapfile))
410 or templater.templatepath(mapfile))
411 if mapname: mapfile = mapname
411 if mapname: mapfile = mapname
412 try:
412 try:
413 t = templater.changeset_templater(ui, repo, mapfile)
413 t = templater.changeset_templater(ui, repo, mapfile)
414 except SyntaxError, inst:
414 except SyntaxError, inst:
415 raise util.Abort(inst.args[0])
415 raise util.Abort(inst.args[0])
416 if tmpl: t.use_template(tmpl)
416 if tmpl: t.use_template(tmpl)
417 return t
417 return t
418 return changeset_printer(ui, repo)
418 return changeset_printer(ui, repo)
419
419
420 def setremoteconfig(ui, opts):
420 def setremoteconfig(ui, opts):
421 "copy remote options to ui tree"
421 "copy remote options to ui tree"
422 if opts.get('ssh'):
422 if opts.get('ssh'):
423 ui.setconfig("ui", "ssh", opts['ssh'])
423 ui.setconfig("ui", "ssh", opts['ssh'])
424 if opts.get('remotecmd'):
424 if opts.get('remotecmd'):
425 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
425 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
426
426
427 def show_version(ui):
427 def show_version(ui):
428 """output version and copyright information"""
428 """output version and copyright information"""
429 ui.write(_("Mercurial Distributed SCM (version %s)\n")
429 ui.write(_("Mercurial Distributed SCM (version %s)\n")
430 % version.get_version())
430 % version.get_version())
431 ui.status(_(
431 ui.status(_(
432 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
432 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
433 "This is free software; see the source for copying conditions. "
433 "This is free software; see the source for copying conditions. "
434 "There is NO\nwarranty; "
434 "There is NO\nwarranty; "
435 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
435 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
436 ))
436 ))
437
437
438 def help_(ui, name=None, with_version=False):
438 def help_(ui, name=None, with_version=False):
439 """show help for a command, extension, or list of commands
439 """show help for a command, extension, or list of commands
440
440
441 With no arguments, print a list of commands and short help.
441 With no arguments, print a list of commands and short help.
442
442
443 Given a command name, print help for that command.
443 Given a command name, print help for that command.
444
444
445 Given an extension name, print help for that extension, and the
445 Given an extension name, print help for that extension, and the
446 commands it provides."""
446 commands it provides."""
447 option_lists = []
447 option_lists = []
448
448
449 def helpcmd(name):
449 def helpcmd(name):
450 if with_version:
450 if with_version:
451 show_version(ui)
451 show_version(ui)
452 ui.write('\n')
452 ui.write('\n')
453 aliases, i = findcmd(ui, name)
453 aliases, i = findcmd(ui, name)
454 # synopsis
454 # synopsis
455 ui.write("%s\n\n" % i[2])
455 ui.write("%s\n\n" % i[2])
456
456
457 # description
457 # description
458 doc = i[0].__doc__
458 doc = i[0].__doc__
459 if not doc:
459 if not doc:
460 doc = _("(No help text available)")
460 doc = _("(No help text available)")
461 if ui.quiet:
461 if ui.quiet:
462 doc = doc.splitlines(0)[0]
462 doc = doc.splitlines(0)[0]
463 ui.write("%s\n" % doc.rstrip())
463 ui.write("%s\n" % doc.rstrip())
464
464
465 if not ui.quiet:
465 if not ui.quiet:
466 # aliases
466 # aliases
467 if len(aliases) > 1:
467 if len(aliases) > 1:
468 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
468 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
469
469
470 # options
470 # options
471 if i[1]:
471 if i[1]:
472 option_lists.append(("options", i[1]))
472 option_lists.append(("options", i[1]))
473
473
474 def helplist(select=None):
474 def helplist(select=None):
475 h = {}
475 h = {}
476 cmds = {}
476 cmds = {}
477 for c, e in table.items():
477 for c, e in table.items():
478 f = c.split("|", 1)[0]
478 f = c.split("|", 1)[0]
479 if select and not select(f):
479 if select and not select(f):
480 continue
480 continue
481 if name == "shortlist" and not f.startswith("^"):
481 if name == "shortlist" and not f.startswith("^"):
482 continue
482 continue
483 f = f.lstrip("^")
483 f = f.lstrip("^")
484 if not ui.debugflag and f.startswith("debug"):
484 if not ui.debugflag and f.startswith("debug"):
485 continue
485 continue
486 doc = e[0].__doc__
486 doc = e[0].__doc__
487 if not doc:
487 if not doc:
488 doc = _("(No help text available)")
488 doc = _("(No help text available)")
489 h[f] = doc.splitlines(0)[0].rstrip()
489 h[f] = doc.splitlines(0)[0].rstrip()
490 cmds[f] = c.lstrip("^")
490 cmds[f] = c.lstrip("^")
491
491
492 fns = h.keys()
492 fns = h.keys()
493 fns.sort()
493 fns.sort()
494 m = max(map(len, fns))
494 m = max(map(len, fns))
495 for f in fns:
495 for f in fns:
496 if ui.verbose:
496 if ui.verbose:
497 commands = cmds[f].replace("|",", ")
497 commands = cmds[f].replace("|",", ")
498 ui.write(" %s:\n %s\n"%(commands, h[f]))
498 ui.write(" %s:\n %s\n"%(commands, h[f]))
499 else:
499 else:
500 ui.write(' %-*s %s\n' % (m, f, h[f]))
500 ui.write(' %-*s %s\n' % (m, f, h[f]))
501
501
502 def helpext(name):
502 def helpext(name):
503 try:
503 try:
504 mod = findext(name)
504 mod = findext(name)
505 except KeyError:
505 except KeyError:
506 raise UnknownCommand(name)
506 raise UnknownCommand(name)
507
507
508 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
508 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
509 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
509 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
510 for d in doc[1:]:
510 for d in doc[1:]:
511 ui.write(d, '\n')
511 ui.write(d, '\n')
512
512
513 ui.status('\n')
513 ui.status('\n')
514 if ui.verbose:
514 if ui.verbose:
515 ui.status(_('list of commands:\n\n'))
515 ui.status(_('list of commands:\n\n'))
516 else:
516 else:
517 ui.status(_('list of commands (use "hg help -v %s" '
517 ui.status(_('list of commands (use "hg help -v %s" '
518 'to show aliases and global options):\n\n') % name)
518 'to show aliases and global options):\n\n') % name)
519
519
520 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
520 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
521 helplist(modcmds.has_key)
521 helplist(modcmds.has_key)
522
522
523 if name and name != 'shortlist':
523 if name and name != 'shortlist':
524 try:
524 try:
525 helpcmd(name)
525 helpcmd(name)
526 except UnknownCommand:
526 except UnknownCommand:
527 helpext(name)
527 helpext(name)
528
528
529 else:
529 else:
530 # program name
530 # program name
531 if ui.verbose or with_version:
531 if ui.verbose or with_version:
532 show_version(ui)
532 show_version(ui)
533 else:
533 else:
534 ui.status(_("Mercurial Distributed SCM\n"))
534 ui.status(_("Mercurial Distributed SCM\n"))
535 ui.status('\n')
535 ui.status('\n')
536
536
537 # list of commands
537 # list of commands
538 if name == "shortlist":
538 if name == "shortlist":
539 ui.status(_('basic commands (use "hg help" '
539 ui.status(_('basic commands (use "hg help" '
540 'for the full list or option "-v" for details):\n\n'))
540 'for the full list or option "-v" for details):\n\n'))
541 elif ui.verbose:
541 elif ui.verbose:
542 ui.status(_('list of commands:\n\n'))
542 ui.status(_('list of commands:\n\n'))
543 else:
543 else:
544 ui.status(_('list of commands (use "hg help -v" '
544 ui.status(_('list of commands (use "hg help -v" '
545 'to show aliases and global options):\n\n'))
545 'to show aliases and global options):\n\n'))
546
546
547 helplist()
547 helplist()
548
548
549 # global options
549 # global options
550 if ui.verbose:
550 if ui.verbose:
551 option_lists.append(("global options", globalopts))
551 option_lists.append(("global options", globalopts))
552
552
553 # list all option lists
553 # list all option lists
554 opt_output = []
554 opt_output = []
555 for title, options in option_lists:
555 for title, options in option_lists:
556 opt_output.append(("\n%s:\n" % title, None))
556 opt_output.append(("\n%s:\n" % title, None))
557 for shortopt, longopt, default, desc in options:
557 for shortopt, longopt, default, desc in options:
558 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
558 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
559 longopt and " --%s" % longopt),
559 longopt and " --%s" % longopt),
560 "%s%s" % (desc,
560 "%s%s" % (desc,
561 default
561 default
562 and _(" (default: %s)") % default
562 and _(" (default: %s)") % default
563 or "")))
563 or "")))
564
564
565 if opt_output:
565 if opt_output:
566 opts_len = max([len(line[0]) for line in opt_output if line[1]])
566 opts_len = max([len(line[0]) for line in opt_output if line[1]])
567 for first, second in opt_output:
567 for first, second in opt_output:
568 if second:
568 if second:
569 ui.write(" %-*s %s\n" % (opts_len, first, second))
569 ui.write(" %-*s %s\n" % (opts_len, first, second))
570 else:
570 else:
571 ui.write("%s\n" % first)
571 ui.write("%s\n" % first)
572
572
573 # Commands start here, listed alphabetically
573 # Commands start here, listed alphabetically
574
574
575 def add(ui, repo, *pats, **opts):
575 def add(ui, repo, *pats, **opts):
576 """add the specified files on the next commit
576 """add the specified files on the next commit
577
577
578 Schedule files to be version controlled and added to the repository.
578 Schedule files to be version controlled and added to the repository.
579
579
580 The files will be added to the repository at the next commit.
580 The files will be added to the repository at the next commit.
581
581
582 If no names are given, add all files in the repository.
582 If no names are given, add all files in the repository.
583 """
583 """
584
584
585 names = []
585 names = []
586 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
586 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
587 if exact:
587 if exact:
588 if ui.verbose:
588 if ui.verbose:
589 ui.status(_('adding %s\n') % rel)
589 ui.status(_('adding %s\n') % rel)
590 names.append(abs)
590 names.append(abs)
591 elif repo.dirstate.state(abs) == '?':
591 elif repo.dirstate.state(abs) == '?':
592 ui.status(_('adding %s\n') % rel)
592 ui.status(_('adding %s\n') % rel)
593 names.append(abs)
593 names.append(abs)
594 if not opts.get('dry_run'):
594 if not opts.get('dry_run'):
595 repo.add(names)
595 repo.add(names)
596
596
597 def addremove(ui, repo, *pats, **opts):
597 def addremove(ui, repo, *pats, **opts):
598 """add all new files, delete all missing files
598 """add all new files, delete all missing files
599
599
600 Add all new files and remove all missing files from the repository.
600 Add all new files and remove all missing files from the repository.
601
601
602 New files are ignored if they match any of the patterns in .hgignore. As
602 New files are ignored if they match any of the patterns in .hgignore. As
603 with add, these changes take effect at the next commit.
603 with add, these changes take effect at the next commit.
604
604
605 Use the -s option to detect renamed files. With a parameter > 0,
605 Use the -s option to detect renamed files. With a parameter > 0,
606 this compares every removed file with every added file and records
606 this compares every removed file with every added file and records
607 those similar enough as renames. This option takes a percentage
607 those similar enough as renames. This option takes a percentage
608 between 0 (disabled) and 100 (files must be identical) as its
608 between 0 (disabled) and 100 (files must be identical) as its
609 parameter. Detecting renamed files this way can be expensive.
609 parameter. Detecting renamed files this way can be expensive.
610 """
610 """
611 sim = float(opts.get('similarity') or 0)
611 sim = float(opts.get('similarity') or 0)
612 if sim < 0 or sim > 100:
612 if sim < 0 or sim > 100:
613 raise util.Abort(_('similarity must be between 0 and 100'))
613 raise util.Abort(_('similarity must be between 0 and 100'))
614 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
614 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
615
615
616 def annotate(ui, repo, *pats, **opts):
616 def annotate(ui, repo, *pats, **opts):
617 """show changeset information per file line
617 """show changeset information per file line
618
618
619 List changes in files, showing the revision id responsible for each line
619 List changes in files, showing the revision id responsible for each line
620
620
621 This command is useful to discover who did a change or when a change took
621 This command is useful to discover who did a change or when a change took
622 place.
622 place.
623
623
624 Without the -a option, annotate will avoid processing files it
624 Without the -a option, annotate will avoid processing files it
625 detects as binary. With -a, annotate will generate an annotation
625 detects as binary. With -a, annotate will generate an annotation
626 anyway, probably with undesirable results.
626 anyway, probably with undesirable results.
627 """
627 """
628 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
628 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
629
629
630 if not pats:
630 if not pats:
631 raise util.Abort(_('at least one file name or pattern required'))
631 raise util.Abort(_('at least one file name or pattern required'))
632
632
633 opmap = [['user', lambda x: ui.shortuser(x.user())],
633 opmap = [['user', lambda x: ui.shortuser(x.user())],
634 ['number', lambda x: str(x.rev())],
634 ['number', lambda x: str(x.rev())],
635 ['changeset', lambda x: short(x.node())],
635 ['changeset', lambda x: short(x.node())],
636 ['date', getdate], ['follow', lambda x: x.path()]]
636 ['date', getdate], ['follow', lambda x: x.path()]]
637 if (not opts['user'] and not opts['changeset'] and not opts['date']
637 if (not opts['user'] and not opts['changeset'] and not opts['date']
638 and not opts['follow']):
638 and not opts['follow']):
639 opts['number'] = 1
639 opts['number'] = 1
640
640
641 ctx = repo.changectx(opts['rev'])
641 ctx = repo.changectx(opts['rev'])
642
642
643 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
643 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
644 node=ctx.node()):
644 node=ctx.node()):
645 fctx = ctx.filectx(abs)
645 fctx = ctx.filectx(abs)
646 if not opts['text'] and util.binary(fctx.data()):
646 if not opts['text'] and util.binary(fctx.data()):
647 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
647 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
648 continue
648 continue
649
649
650 lines = fctx.annotate(follow=opts.get('follow'))
650 lines = fctx.annotate(follow=opts.get('follow'))
651 pieces = []
651 pieces = []
652
652
653 for o, f in opmap:
653 for o, f in opmap:
654 if opts[o]:
654 if opts[o]:
655 l = [f(n) for n, dummy in lines]
655 l = [f(n) for n, dummy in lines]
656 if l:
656 if l:
657 m = max(map(len, l))
657 m = max(map(len, l))
658 pieces.append(["%*s" % (m, x) for x in l])
658 pieces.append(["%*s" % (m, x) for x in l])
659
659
660 if pieces:
660 if pieces:
661 for p, l in zip(zip(*pieces), lines):
661 for p, l in zip(zip(*pieces), lines):
662 ui.write("%s: %s" % (" ".join(p), l[1]))
662 ui.write("%s: %s" % (" ".join(p), l[1]))
663
663
664 def archive(ui, repo, dest, **opts):
664 def archive(ui, repo, dest, **opts):
665 '''create unversioned archive of a repository revision
665 '''create unversioned archive of a repository revision
666
666
667 By default, the revision used is the parent of the working
667 By default, the revision used is the parent of the working
668 directory; use "-r" to specify a different revision.
668 directory; use "-r" to specify a different revision.
669
669
670 To specify the type of archive to create, use "-t". Valid
670 To specify the type of archive to create, use "-t". Valid
671 types are:
671 types are:
672
672
673 "files" (default): a directory full of files
673 "files" (default): a directory full of files
674 "tar": tar archive, uncompressed
674 "tar": tar archive, uncompressed
675 "tbz2": tar archive, compressed using bzip2
675 "tbz2": tar archive, compressed using bzip2
676 "tgz": tar archive, compressed using gzip
676 "tgz": tar archive, compressed using gzip
677 "uzip": zip archive, uncompressed
677 "uzip": zip archive, uncompressed
678 "zip": zip archive, compressed using deflate
678 "zip": zip archive, compressed using deflate
679
679
680 The exact name of the destination archive or directory is given
680 The exact name of the destination archive or directory is given
681 using a format string; see "hg help export" for details.
681 using a format string; see "hg help export" for details.
682
682
683 Each member added to an archive file has a directory prefix
683 Each member added to an archive file has a directory prefix
684 prepended. Use "-p" to specify a format string for the prefix.
684 prepended. Use "-p" to specify a format string for the prefix.
685 The default is the basename of the archive, with suffixes removed.
685 The default is the basename of the archive, with suffixes removed.
686 '''
686 '''
687
687
688 node = repo.changectx(opts['rev']).node()
688 node = repo.changectx(opts['rev']).node()
689 dest = cmdutil.make_filename(repo, dest, node)
689 dest = cmdutil.make_filename(repo, dest, node)
690 if os.path.realpath(dest) == repo.root:
690 if os.path.realpath(dest) == repo.root:
691 raise util.Abort(_('repository root cannot be destination'))
691 raise util.Abort(_('repository root cannot be destination'))
692 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
692 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
693 kind = opts.get('type') or 'files'
693 kind = opts.get('type') or 'files'
694 prefix = opts['prefix']
694 prefix = opts['prefix']
695 if dest == '-':
695 if dest == '-':
696 if kind == 'files':
696 if kind == 'files':
697 raise util.Abort(_('cannot archive plain files to stdout'))
697 raise util.Abort(_('cannot archive plain files to stdout'))
698 dest = sys.stdout
698 dest = sys.stdout
699 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
699 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
700 prefix = cmdutil.make_filename(repo, prefix, node)
700 prefix = cmdutil.make_filename(repo, prefix, node)
701 archival.archive(repo, dest, node, kind, not opts['no_decode'],
701 archival.archive(repo, dest, node, kind, not opts['no_decode'],
702 matchfn, prefix)
702 matchfn, prefix)
703
703
704 def backout(ui, repo, rev, **opts):
704 def backout(ui, repo, rev, **opts):
705 '''reverse effect of earlier changeset
705 '''reverse effect of earlier changeset
706
706
707 Commit the backed out changes as a new changeset. The new
707 Commit the backed out changes as a new changeset. The new
708 changeset is a child of the backed out changeset.
708 changeset is a child of the backed out changeset.
709
709
710 If you back out a changeset other than the tip, a new head is
710 If you back out a changeset other than the tip, a new head is
711 created. This head is the parent of the working directory. If
711 created. This head is the parent of the working directory. If
712 you back out an old changeset, your working directory will appear
712 you back out an old changeset, your working directory will appear
713 old after the backout. You should merge the backout changeset
713 old after the backout. You should merge the backout changeset
714 with another head.
714 with another head.
715
715
716 The --merge option remembers the parent of the working directory
716 The --merge option remembers the parent of the working directory
717 before starting the backout, then merges the new head with that
717 before starting the backout, then merges the new head with that
718 changeset afterwards. This saves you from doing the merge by
718 changeset afterwards. This saves you from doing the merge by
719 hand. The result of this merge is not committed, as for a normal
719 hand. The result of this merge is not committed, as for a normal
720 merge.'''
720 merge.'''
721
721
722 bail_if_changed(repo)
722 bail_if_changed(repo)
723 op1, op2 = repo.dirstate.parents()
723 op1, op2 = repo.dirstate.parents()
724 if op2 != nullid:
724 if op2 != nullid:
725 raise util.Abort(_('outstanding uncommitted merge'))
725 raise util.Abort(_('outstanding uncommitted merge'))
726 node = repo.lookup(rev)
726 node = repo.lookup(rev)
727 p1, p2 = repo.changelog.parents(node)
727 p1, p2 = repo.changelog.parents(node)
728 if p1 == nullid:
728 if p1 == nullid:
729 raise util.Abort(_('cannot back out a change with no parents'))
729 raise util.Abort(_('cannot back out a change with no parents'))
730 if p2 != nullid:
730 if p2 != nullid:
731 if not opts['parent']:
731 if not opts['parent']:
732 raise util.Abort(_('cannot back out a merge changeset without '
732 raise util.Abort(_('cannot back out a merge changeset without '
733 '--parent'))
733 '--parent'))
734 p = repo.lookup(opts['parent'])
734 p = repo.lookup(opts['parent'])
735 if p not in (p1, p2):
735 if p not in (p1, p2):
736 raise util.Abort(_('%s is not a parent of %s' %
736 raise util.Abort(_('%s is not a parent of %s' %
737 (short(p), short(node))))
737 (short(p), short(node))))
738 parent = p
738 parent = p
739 else:
739 else:
740 if opts['parent']:
740 if opts['parent']:
741 raise util.Abort(_('cannot use --parent on non-merge changeset'))
741 raise util.Abort(_('cannot use --parent on non-merge changeset'))
742 parent = p1
742 parent = p1
743 hg.clean(repo, node, show_stats=False)
743 hg.clean(repo, node, show_stats=False)
744 revert_opts = opts.copy()
744 revert_opts = opts.copy()
745 revert_opts['all'] = True
745 revert_opts['all'] = True
746 revert_opts['rev'] = hex(parent)
746 revert_opts['rev'] = hex(parent)
747 revert(ui, repo, **revert_opts)
747 revert(ui, repo, **revert_opts)
748 commit_opts = opts.copy()
748 commit_opts = opts.copy()
749 commit_opts['addremove'] = False
749 commit_opts['addremove'] = False
750 if not commit_opts['message'] and not commit_opts['logfile']:
750 if not commit_opts['message'] and not commit_opts['logfile']:
751 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
751 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
752 commit_opts['force_editor'] = True
752 commit_opts['force_editor'] = True
753 commit(ui, repo, **commit_opts)
753 commit(ui, repo, **commit_opts)
754 def nice(node):
754 def nice(node):
755 return '%d:%s' % (repo.changelog.rev(node), short(node))
755 return '%d:%s' % (repo.changelog.rev(node), short(node))
756 ui.status(_('changeset %s backs out changeset %s\n') %
756 ui.status(_('changeset %s backs out changeset %s\n') %
757 (nice(repo.changelog.tip()), nice(node)))
757 (nice(repo.changelog.tip()), nice(node)))
758 if op1 != node:
758 if op1 != node:
759 if opts['merge']:
759 if opts['merge']:
760 ui.status(_('merging with changeset %s\n') % nice(op1))
760 ui.status(_('merging with changeset %s\n') % nice(op1))
761 n = _lookup(repo, hex(op1))
761 n = _lookup(repo, hex(op1))
762 hg.merge(repo, n)
762 hg.merge(repo, n)
763 else:
763 else:
764 ui.status(_('the backout changeset is a new head - '
764 ui.status(_('the backout changeset is a new head - '
765 'do not forget to merge\n'))
765 'do not forget to merge\n'))
766 ui.status(_('(use "backout --merge" '
766 ui.status(_('(use "backout --merge" '
767 'if you want to auto-merge)\n'))
767 'if you want to auto-merge)\n'))
768
768
769 def bundle(ui, repo, fname, dest=None, **opts):
769 def bundle(ui, repo, fname, dest=None, **opts):
770 """create a changegroup file
770 """create a changegroup file
771
771
772 Generate a compressed changegroup file collecting changesets.
772 Generate a compressed changegroup file collecting changesets.
773 not found in the other repository.
773 not found in the other repository.
774
774
775 If no destination repository is specified the destination is
775 If no destination repository is specified the destination is
776 assumed to have all the node specified by --base.
776 assumed to have all the node specified by --base.
777
777
778 This file can then be transferred using conventional means and
778 This file can then be transferred using conventional means and
779 applied to another repository with the unbundle command. This is
779 applied to another repository with the unbundle command. This is
780 useful when native push and pull are not available or when
780 useful when native push and pull are not available or when
781 exporting an entire repository is undesirable. The standard file
781 exporting an entire repository is undesirable. The standard file
782 extension is ".hg".
782 extension is ".hg".
783
783
784 Unlike import/export, this exactly preserves all changeset
784 Unlike import/export, this exactly preserves all changeset
785 contents including permissions, rename data, and revision history.
785 contents including permissions, rename data, and revision history.
786 """
786 """
787 revs = opts.get('rev') or None
787 revs = opts.get('rev') or None
788 if revs:
788 if revs:
789 revs = [repo.lookup(rev) for rev in revs]
789 revs = [repo.lookup(rev) for rev in revs]
790 base = opts.get('base')
790 base = opts.get('base')
791 if base:
791 if base:
792 if dest:
792 if dest:
793 raise util.Abort(_("--base is incompatible with specifiying "
793 raise util.Abort(_("--base is incompatible with specifiying "
794 "a destination"))
794 "a destination"))
795 base = [repo.lookup(rev) for rev in base]
795 base = [repo.lookup(rev) for rev in base]
796 # create the right base
796 # create the right base
797 # XXX: nodesbetween / changegroup* should be "fixed" instead
797 # XXX: nodesbetween / changegroup* should be "fixed" instead
798 o = []
798 o = []
799 has_set = sets.Set(base)
799 has_set = sets.Set(base)
800 for n in base:
800 for n in base:
801 has_set.update(repo.changelog.reachable(n))
801 has_set.update(repo.changelog.reachable(n))
802 if revs:
802 if revs:
803 visit = list(revs)
803 visit = list(revs)
804 else:
804 else:
805 visit = repo.changelog.heads()
805 visit = repo.changelog.heads()
806 while visit:
806 while visit:
807 n = visit.pop(0)
807 n = visit.pop(0)
808 parents = [p for p in repo.changelog.parents(n)
808 parents = [p for p in repo.changelog.parents(n)
809 if p != nullid and p not in has_set]
809 if p != nullid and p not in has_set]
810 if len(parents) == 0:
810 if len(parents) == 0:
811 o.insert(0, n)
811 o.insert(0, n)
812 else:
812 else:
813 visit.extend(parents)
813 visit.extend(parents)
814 else:
814 else:
815 setremoteconfig(ui, opts)
815 setremoteconfig(ui, opts)
816 dest = ui.expandpath(dest or 'default-push', dest or 'default')
816 dest = ui.expandpath(dest or 'default-push', dest or 'default')
817 other = hg.repository(ui, dest)
817 other = hg.repository(ui, dest)
818 o = repo.findoutgoing(other, force=opts['force'])
818 o = repo.findoutgoing(other, force=opts['force'])
819
819
820 if revs:
820 if revs:
821 cg = repo.changegroupsubset(o, revs, 'bundle')
821 cg = repo.changegroupsubset(o, revs, 'bundle')
822 else:
822 else:
823 cg = repo.changegroup(o, 'bundle')
823 cg = repo.changegroup(o, 'bundle')
824 write_bundle(cg, fname)
824 write_bundle(cg, fname)
825
825
826 def cat(ui, repo, file1, *pats, **opts):
826 def cat(ui, repo, file1, *pats, **opts):
827 """output the latest or given revisions of files
827 """output the latest or given revisions of files
828
828
829 Print the specified files as they were at the given revision.
829 Print the specified files as they were at the given revision.
830 If no revision is given then working dir parent is used, or tip
830 If no revision is given then working dir parent is used, or tip
831 if no revision is checked out.
831 if no revision is checked out.
832
832
833 Output may be to a file, in which case the name of the file is
833 Output may be to a file, in which case the name of the file is
834 given using a format string. The formatting rules are the same as
834 given using a format string. The formatting rules are the same as
835 for the export command, with the following additions:
835 for the export command, with the following additions:
836
836
837 %s basename of file being printed
837 %s basename of file being printed
838 %d dirname of file being printed, or '.' if in repo root
838 %d dirname of file being printed, or '.' if in repo root
839 %p root-relative path name of file being printed
839 %p root-relative path name of file being printed
840 """
840 """
841 ctx = repo.changectx(opts['rev'])
841 ctx = repo.changectx(opts['rev'])
842 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
842 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
843 ctx.node()):
843 ctx.node()):
844 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
844 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
845 fp.write(ctx.filectx(abs).data())
845 fp.write(ctx.filectx(abs).data())
846
846
847 def clone(ui, source, dest=None, **opts):
847 def clone(ui, source, dest=None, **opts):
848 """make a copy of an existing repository
848 """make a copy of an existing repository
849
849
850 Create a copy of an existing repository in a new directory.
850 Create a copy of an existing repository in a new directory.
851
851
852 If no destination directory name is specified, it defaults to the
852 If no destination directory name is specified, it defaults to the
853 basename of the source.
853 basename of the source.
854
854
855 The location of the source is added to the new repository's
855 The location of the source is added to the new repository's
856 .hg/hgrc file, as the default to be used for future pulls.
856 .hg/hgrc file, as the default to be used for future pulls.
857
857
858 For efficiency, hardlinks are used for cloning whenever the source
858 For efficiency, hardlinks are used for cloning whenever the source
859 and destination are on the same filesystem (note this applies only
859 and destination are on the same filesystem (note this applies only
860 to the repository data, not to the checked out files). Some
860 to the repository data, not to the checked out files). Some
861 filesystems, such as AFS, implement hardlinking incorrectly, but
861 filesystems, such as AFS, implement hardlinking incorrectly, but
862 do not report errors. In these cases, use the --pull option to
862 do not report errors. In these cases, use the --pull option to
863 avoid hardlinking.
863 avoid hardlinking.
864
864
865 You can safely clone repositories and checked out files using full
865 You can safely clone repositories and checked out files using full
866 hardlinks with
866 hardlinks with
867
867
868 $ cp -al REPO REPOCLONE
868 $ cp -al REPO REPOCLONE
869
869
870 which is the fastest way to clone. However, the operation is not
870 which is the fastest way to clone. However, the operation is not
871 atomic (making sure REPO is not modified during the operation is
871 atomic (making sure REPO is not modified during the operation is
872 up to you) and you have to make sure your editor breaks hardlinks
872 up to you) and you have to make sure your editor breaks hardlinks
873 (Emacs and most Linux Kernel tools do so).
873 (Emacs and most Linux Kernel tools do so).
874
874
875 If you use the -r option to clone up to a specific revision, no
875 If you use the -r option to clone up to a specific revision, no
876 subsequent revisions will be present in the cloned repository.
876 subsequent revisions will be present in the cloned repository.
877 This option implies --pull, even on local repositories.
877 This option implies --pull, even on local repositories.
878
878
879 See pull for valid source format details.
879 See pull for valid source format details.
880
880
881 It is possible to specify an ssh:// URL as the destination, but no
881 It is possible to specify an ssh:// URL as the destination, but no
882 .hg/hgrc will be created on the remote side. Look at the help text
882 .hg/hgrc will be created on the remote side. Look at the help text
883 for the pull command for important details about ssh:// URLs.
883 for the pull command for important details about ssh:// URLs.
884 """
884 """
885 setremoteconfig(ui, opts)
885 setremoteconfig(ui, opts)
886 hg.clone(ui, ui.expandpath(source), dest,
886 hg.clone(ui, ui.expandpath(source), dest,
887 pull=opts['pull'],
887 pull=opts['pull'],
888 stream=opts['uncompressed'],
888 stream=opts['uncompressed'],
889 rev=opts['rev'],
889 rev=opts['rev'],
890 update=not opts['noupdate'])
890 update=not opts['noupdate'])
891
891
892 def commit(ui, repo, *pats, **opts):
892 def commit(ui, repo, *pats, **opts):
893 """commit the specified files or all outstanding changes
893 """commit the specified files or all outstanding changes
894
894
895 Commit changes to the given files into the repository.
895 Commit changes to the given files into the repository.
896
896
897 If a list of files is omitted, all changes reported by "hg status"
897 If a list of files is omitted, all changes reported by "hg status"
898 will be committed.
898 will be committed.
899
899
900 If no commit message is specified, the editor configured in your hgrc
900 If no commit message is specified, the editor configured in your hgrc
901 or in the EDITOR environment variable is started to enter a message.
901 or in the EDITOR environment variable is started to enter a message.
902 """
902 """
903 message = logmessage(opts)
903 message = logmessage(opts)
904
904
905 if opts['addremove']:
905 if opts['addremove']:
906 cmdutil.addremove(repo, pats, opts)
906 cmdutil.addremove(repo, pats, opts)
907 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
907 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
908 if pats:
908 if pats:
909 modified, added, removed = repo.status(files=fns, match=match)[:3]
909 modified, added, removed = repo.status(files=fns, match=match)[:3]
910 files = modified + added + removed
910 files = modified + added + removed
911 else:
911 else:
912 files = []
912 files = []
913 try:
913 try:
914 repo.commit(files, message, opts['user'], opts['date'], match,
914 repo.commit(files, message, opts['user'], opts['date'], match,
915 force_editor=opts.get('force_editor'))
915 force_editor=opts.get('force_editor'))
916 except ValueError, inst:
916 except ValueError, inst:
917 raise util.Abort(str(inst))
917 raise util.Abort(str(inst))
918
918
919 def docopy(ui, repo, pats, opts, wlock):
919 def docopy(ui, repo, pats, opts, wlock):
920 # called with the repo lock held
920 # called with the repo lock held
921 cwd = repo.getcwd()
921 cwd = repo.getcwd()
922 errors = 0
922 errors = 0
923 copied = []
923 copied = []
924 targets = {}
924 targets = {}
925
925
926 def okaytocopy(abs, rel, exact):
926 def okaytocopy(abs, rel, exact):
927 reasons = {'?': _('is not managed'),
927 reasons = {'?': _('is not managed'),
928 'a': _('has been marked for add'),
928 'a': _('has been marked for add'),
929 'r': _('has been marked for remove')}
929 'r': _('has been marked for remove')}
930 state = repo.dirstate.state(abs)
930 state = repo.dirstate.state(abs)
931 reason = reasons.get(state)
931 reason = reasons.get(state)
932 if reason:
932 if reason:
933 if state == 'a':
933 if state == 'a':
934 origsrc = repo.dirstate.copied(abs)
934 origsrc = repo.dirstate.copied(abs)
935 if origsrc is not None:
935 if origsrc is not None:
936 return origsrc
936 return origsrc
937 if exact:
937 if exact:
938 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
938 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
939 else:
939 else:
940 return abs
940 return abs
941
941
942 def copy(origsrc, abssrc, relsrc, target, exact):
942 def copy(origsrc, abssrc, relsrc, target, exact):
943 abstarget = util.canonpath(repo.root, cwd, target)
943 abstarget = util.canonpath(repo.root, cwd, target)
944 reltarget = util.pathto(cwd, abstarget)
944 reltarget = util.pathto(cwd, abstarget)
945 prevsrc = targets.get(abstarget)
945 prevsrc = targets.get(abstarget)
946 if prevsrc is not None:
946 if prevsrc is not None:
947 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
947 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
948 (reltarget, abssrc, prevsrc))
948 (reltarget, abssrc, prevsrc))
949 return
949 return
950 if (not opts['after'] and os.path.exists(reltarget) or
950 if (not opts['after'] and os.path.exists(reltarget) or
951 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
951 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
952 if not opts['force']:
952 if not opts['force']:
953 ui.warn(_('%s: not overwriting - file exists\n') %
953 ui.warn(_('%s: not overwriting - file exists\n') %
954 reltarget)
954 reltarget)
955 return
955 return
956 if not opts['after'] and not opts.get('dry_run'):
956 if not opts['after'] and not opts.get('dry_run'):
957 os.unlink(reltarget)
957 os.unlink(reltarget)
958 if opts['after']:
958 if opts['after']:
959 if not os.path.exists(reltarget):
959 if not os.path.exists(reltarget):
960 return
960 return
961 else:
961 else:
962 targetdir = os.path.dirname(reltarget) or '.'
962 targetdir = os.path.dirname(reltarget) or '.'
963 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
963 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
964 os.makedirs(targetdir)
964 os.makedirs(targetdir)
965 try:
965 try:
966 restore = repo.dirstate.state(abstarget) == 'r'
966 restore = repo.dirstate.state(abstarget) == 'r'
967 if restore and not opts.get('dry_run'):
967 if restore and not opts.get('dry_run'):
968 repo.undelete([abstarget], wlock)
968 repo.undelete([abstarget], wlock)
969 try:
969 try:
970 if not opts.get('dry_run'):
970 if not opts.get('dry_run'):
971 shutil.copyfile(relsrc, reltarget)
971 shutil.copyfile(relsrc, reltarget)
972 shutil.copymode(relsrc, reltarget)
972 shutil.copymode(relsrc, reltarget)
973 restore = False
973 restore = False
974 finally:
974 finally:
975 if restore:
975 if restore:
976 repo.remove([abstarget], wlock)
976 repo.remove([abstarget], wlock)
977 except shutil.Error, inst:
977 except shutil.Error, inst:
978 raise util.Abort(str(inst))
978 raise util.Abort(str(inst))
979 except IOError, inst:
979 except IOError, inst:
980 if inst.errno == errno.ENOENT:
980 if inst.errno == errno.ENOENT:
981 ui.warn(_('%s: deleted in working copy\n') % relsrc)
981 ui.warn(_('%s: deleted in working copy\n') % relsrc)
982 else:
982 else:
983 ui.warn(_('%s: cannot copy - %s\n') %
983 ui.warn(_('%s: cannot copy - %s\n') %
984 (relsrc, inst.strerror))
984 (relsrc, inst.strerror))
985 errors += 1
985 errors += 1
986 return
986 return
987 if ui.verbose or not exact:
987 if ui.verbose or not exact:
988 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
988 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
989 targets[abstarget] = abssrc
989 targets[abstarget] = abssrc
990 if abstarget != origsrc and not opts.get('dry_run'):
990 if abstarget != origsrc and not opts.get('dry_run'):
991 repo.copy(origsrc, abstarget, wlock)
991 repo.copy(origsrc, abstarget, wlock)
992 copied.append((abssrc, relsrc, exact))
992 copied.append((abssrc, relsrc, exact))
993
993
994 def targetpathfn(pat, dest, srcs):
994 def targetpathfn(pat, dest, srcs):
995 if os.path.isdir(pat):
995 if os.path.isdir(pat):
996 abspfx = util.canonpath(repo.root, cwd, pat)
996 abspfx = util.canonpath(repo.root, cwd, pat)
997 if destdirexists:
997 if destdirexists:
998 striplen = len(os.path.split(abspfx)[0])
998 striplen = len(os.path.split(abspfx)[0])
999 else:
999 else:
1000 striplen = len(abspfx)
1000 striplen = len(abspfx)
1001 if striplen:
1001 if striplen:
1002 striplen += len(os.sep)
1002 striplen += len(os.sep)
1003 res = lambda p: os.path.join(dest, p[striplen:])
1003 res = lambda p: os.path.join(dest, p[striplen:])
1004 elif destdirexists:
1004 elif destdirexists:
1005 res = lambda p: os.path.join(dest, os.path.basename(p))
1005 res = lambda p: os.path.join(dest, os.path.basename(p))
1006 else:
1006 else:
1007 res = lambda p: dest
1007 res = lambda p: dest
1008 return res
1008 return res
1009
1009
1010 def targetpathafterfn(pat, dest, srcs):
1010 def targetpathafterfn(pat, dest, srcs):
1011 if util.patkind(pat, None)[0]:
1011 if util.patkind(pat, None)[0]:
1012 # a mercurial pattern
1012 # a mercurial pattern
1013 res = lambda p: os.path.join(dest, os.path.basename(p))
1013 res = lambda p: os.path.join(dest, os.path.basename(p))
1014 else:
1014 else:
1015 abspfx = util.canonpath(repo.root, cwd, pat)
1015 abspfx = util.canonpath(repo.root, cwd, pat)
1016 if len(abspfx) < len(srcs[0][0]):
1016 if len(abspfx) < len(srcs[0][0]):
1017 # A directory. Either the target path contains the last
1017 # A directory. Either the target path contains the last
1018 # component of the source path or it does not.
1018 # component of the source path or it does not.
1019 def evalpath(striplen):
1019 def evalpath(striplen):
1020 score = 0
1020 score = 0
1021 for s in srcs:
1021 for s in srcs:
1022 t = os.path.join(dest, s[0][striplen:])
1022 t = os.path.join(dest, s[0][striplen:])
1023 if os.path.exists(t):
1023 if os.path.exists(t):
1024 score += 1
1024 score += 1
1025 return score
1025 return score
1026
1026
1027 striplen = len(abspfx)
1027 striplen = len(abspfx)
1028 if striplen:
1028 if striplen:
1029 striplen += len(os.sep)
1029 striplen += len(os.sep)
1030 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1030 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1031 score = evalpath(striplen)
1031 score = evalpath(striplen)
1032 striplen1 = len(os.path.split(abspfx)[0])
1032 striplen1 = len(os.path.split(abspfx)[0])
1033 if striplen1:
1033 if striplen1:
1034 striplen1 += len(os.sep)
1034 striplen1 += len(os.sep)
1035 if evalpath(striplen1) > score:
1035 if evalpath(striplen1) > score:
1036 striplen = striplen1
1036 striplen = striplen1
1037 res = lambda p: os.path.join(dest, p[striplen:])
1037 res = lambda p: os.path.join(dest, p[striplen:])
1038 else:
1038 else:
1039 # a file
1039 # a file
1040 if destdirexists:
1040 if destdirexists:
1041 res = lambda p: os.path.join(dest, os.path.basename(p))
1041 res = lambda p: os.path.join(dest, os.path.basename(p))
1042 else:
1042 else:
1043 res = lambda p: dest
1043 res = lambda p: dest
1044 return res
1044 return res
1045
1045
1046
1046
1047 pats = list(pats)
1047 pats = list(pats)
1048 if not pats:
1048 if not pats:
1049 raise util.Abort(_('no source or destination specified'))
1049 raise util.Abort(_('no source or destination specified'))
1050 if len(pats) == 1:
1050 if len(pats) == 1:
1051 raise util.Abort(_('no destination specified'))
1051 raise util.Abort(_('no destination specified'))
1052 dest = pats.pop()
1052 dest = pats.pop()
1053 destdirexists = os.path.isdir(dest)
1053 destdirexists = os.path.isdir(dest)
1054 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1054 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1055 raise util.Abort(_('with multiple sources, destination must be an '
1055 raise util.Abort(_('with multiple sources, destination must be an '
1056 'existing directory'))
1056 'existing directory'))
1057 if opts['after']:
1057 if opts['after']:
1058 tfn = targetpathafterfn
1058 tfn = targetpathafterfn
1059 else:
1059 else:
1060 tfn = targetpathfn
1060 tfn = targetpathfn
1061 copylist = []
1061 copylist = []
1062 for pat in pats:
1062 for pat in pats:
1063 srcs = []
1063 srcs = []
1064 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1064 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1065 origsrc = okaytocopy(abssrc, relsrc, exact)
1065 origsrc = okaytocopy(abssrc, relsrc, exact)
1066 if origsrc:
1066 if origsrc:
1067 srcs.append((origsrc, abssrc, relsrc, exact))
1067 srcs.append((origsrc, abssrc, relsrc, exact))
1068 if not srcs:
1068 if not srcs:
1069 continue
1069 continue
1070 copylist.append((tfn(pat, dest, srcs), srcs))
1070 copylist.append((tfn(pat, dest, srcs), srcs))
1071 if not copylist:
1071 if not copylist:
1072 raise util.Abort(_('no files to copy'))
1072 raise util.Abort(_('no files to copy'))
1073
1073
1074 for targetpath, srcs in copylist:
1074 for targetpath, srcs in copylist:
1075 for origsrc, abssrc, relsrc, exact in srcs:
1075 for origsrc, abssrc, relsrc, exact in srcs:
1076 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1076 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1077
1077
1078 if errors:
1078 if errors:
1079 ui.warn(_('(consider using --after)\n'))
1079 ui.warn(_('(consider using --after)\n'))
1080 return errors, copied
1080 return errors, copied
1081
1081
1082 def copy(ui, repo, *pats, **opts):
1082 def copy(ui, repo, *pats, **opts):
1083 """mark files as copied for the next commit
1083 """mark files as copied for the next commit
1084
1084
1085 Mark dest as having copies of source files. If dest is a
1085 Mark dest as having copies of source files. If dest is a
1086 directory, copies are put in that directory. If dest is a file,
1086 directory, copies are put in that directory. If dest is a file,
1087 there can only be one source.
1087 there can only be one source.
1088
1088
1089 By default, this command copies the contents of files as they
1089 By default, this command copies the contents of files as they
1090 stand in the working directory. If invoked with --after, the
1090 stand in the working directory. If invoked with --after, the
1091 operation is recorded, but no copying is performed.
1091 operation is recorded, but no copying is performed.
1092
1092
1093 This command takes effect in the next commit.
1093 This command takes effect in the next commit.
1094
1094
1095 NOTE: This command should be treated as experimental. While it
1095 NOTE: This command should be treated as experimental. While it
1096 should properly record copied files, this information is not yet
1096 should properly record copied files, this information is not yet
1097 fully used by merge, nor fully reported by log.
1097 fully used by merge, nor fully reported by log.
1098 """
1098 """
1099 wlock = repo.wlock(0)
1099 wlock = repo.wlock(0)
1100 errs, copied = docopy(ui, repo, pats, opts, wlock)
1100 errs, copied = docopy(ui, repo, pats, opts, wlock)
1101 return errs
1101 return errs
1102
1102
1103 def debugancestor(ui, index, rev1, rev2):
1103 def debugancestor(ui, index, rev1, rev2):
1104 """find the ancestor revision of two revisions in a given index"""
1104 """find the ancestor revision of two revisions in a given index"""
1105 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1105 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1106 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1106 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1107 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1107 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1108
1108
1109 def debugcomplete(ui, cmd='', **opts):
1109 def debugcomplete(ui, cmd='', **opts):
1110 """returns the completion list associated with the given command"""
1110 """returns the completion list associated with the given command"""
1111
1111
1112 if opts['options']:
1112 if opts['options']:
1113 options = []
1113 options = []
1114 otables = [globalopts]
1114 otables = [globalopts]
1115 if cmd:
1115 if cmd:
1116 aliases, entry = findcmd(ui, cmd)
1116 aliases, entry = findcmd(ui, cmd)
1117 otables.append(entry[1])
1117 otables.append(entry[1])
1118 for t in otables:
1118 for t in otables:
1119 for o in t:
1119 for o in t:
1120 if o[0]:
1120 if o[0]:
1121 options.append('-%s' % o[0])
1121 options.append('-%s' % o[0])
1122 options.append('--%s' % o[1])
1122 options.append('--%s' % o[1])
1123 ui.write("%s\n" % "\n".join(options))
1123 ui.write("%s\n" % "\n".join(options))
1124 return
1124 return
1125
1125
1126 clist = findpossible(ui, cmd).keys()
1126 clist = findpossible(ui, cmd).keys()
1127 clist.sort()
1127 clist.sort()
1128 ui.write("%s\n" % "\n".join(clist))
1128 ui.write("%s\n" % "\n".join(clist))
1129
1129
1130 def debugrebuildstate(ui, repo, rev=None):
1130 def debugrebuildstate(ui, repo, rev=None):
1131 """rebuild the dirstate as it would look like for the given revision"""
1131 """rebuild the dirstate as it would look like for the given revision"""
1132 if not rev:
1132 if not rev:
1133 rev = repo.changelog.tip()
1133 rev = repo.changelog.tip()
1134 else:
1134 else:
1135 rev = repo.lookup(rev)
1135 rev = repo.lookup(rev)
1136 change = repo.changelog.read(rev)
1136 change = repo.changelog.read(rev)
1137 n = change[0]
1137 n = change[0]
1138 files = repo.manifest.read(n)
1138 files = repo.manifest.read(n)
1139 wlock = repo.wlock()
1139 wlock = repo.wlock()
1140 repo.dirstate.rebuild(rev, files)
1140 repo.dirstate.rebuild(rev, files)
1141
1141
1142 def debugcheckstate(ui, repo):
1142 def debugcheckstate(ui, repo):
1143 """validate the correctness of the current dirstate"""
1143 """validate the correctness of the current dirstate"""
1144 parent1, parent2 = repo.dirstate.parents()
1144 parent1, parent2 = repo.dirstate.parents()
1145 repo.dirstate.read()
1145 repo.dirstate.read()
1146 dc = repo.dirstate.map
1146 dc = repo.dirstate.map
1147 keys = dc.keys()
1147 keys = dc.keys()
1148 keys.sort()
1148 keys.sort()
1149 m1n = repo.changelog.read(parent1)[0]
1149 m1n = repo.changelog.read(parent1)[0]
1150 m2n = repo.changelog.read(parent2)[0]
1150 m2n = repo.changelog.read(parent2)[0]
1151 m1 = repo.manifest.read(m1n)
1151 m1 = repo.manifest.read(m1n)
1152 m2 = repo.manifest.read(m2n)
1152 m2 = repo.manifest.read(m2n)
1153 errors = 0
1153 errors = 0
1154 for f in dc:
1154 for f in dc:
1155 state = repo.dirstate.state(f)
1155 state = repo.dirstate.state(f)
1156 if state in "nr" and f not in m1:
1156 if state in "nr" and f not in m1:
1157 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1157 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1158 errors += 1
1158 errors += 1
1159 if state in "a" and f in m1:
1159 if state in "a" and f in m1:
1160 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1160 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1161 errors += 1
1161 errors += 1
1162 if state in "m" and f not in m1 and f not in m2:
1162 if state in "m" and f not in m1 and f not in m2:
1163 ui.warn(_("%s in state %s, but not in either manifest\n") %
1163 ui.warn(_("%s in state %s, but not in either manifest\n") %
1164 (f, state))
1164 (f, state))
1165 errors += 1
1165 errors += 1
1166 for f in m1:
1166 for f in m1:
1167 state = repo.dirstate.state(f)
1167 state = repo.dirstate.state(f)
1168 if state not in "nrm":
1168 if state not in "nrm":
1169 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1169 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1170 errors += 1
1170 errors += 1
1171 if errors:
1171 if errors:
1172 error = _(".hg/dirstate inconsistent with current parent's manifest")
1172 error = _(".hg/dirstate inconsistent with current parent's manifest")
1173 raise util.Abort(error)
1173 raise util.Abort(error)
1174
1174
1175 def showconfig(ui, repo, *values):
1175 def showconfig(ui, repo, *values):
1176 """show combined config settings from all hgrc files
1176 """show combined config settings from all hgrc files
1177
1177
1178 With no args, print names and values of all config items.
1178 With no args, print names and values of all config items.
1179
1179
1180 With one arg of the form section.name, print just the value of
1180 With one arg of the form section.name, print just the value of
1181 that config item.
1181 that config item.
1182
1182
1183 With multiple args, print names and values of all config items
1183 With multiple args, print names and values of all config items
1184 with matching section names."""
1184 with matching section names."""
1185
1185
1186 if values:
1186 if values:
1187 if len([v for v in values if '.' in v]) > 1:
1187 if len([v for v in values if '.' in v]) > 1:
1188 raise util.Abort(_('only one config item permitted'))
1188 raise util.Abort(_('only one config item permitted'))
1189 for section, name, value in ui.walkconfig():
1189 for section, name, value in ui.walkconfig():
1190 sectname = section + '.' + name
1190 sectname = section + '.' + name
1191 if values:
1191 if values:
1192 for v in values:
1192 for v in values:
1193 if v == section:
1193 if v == section:
1194 ui.write('%s=%s\n' % (sectname, value))
1194 ui.write('%s=%s\n' % (sectname, value))
1195 elif v == sectname:
1195 elif v == sectname:
1196 ui.write(value, '\n')
1196 ui.write(value, '\n')
1197 else:
1197 else:
1198 ui.write('%s=%s\n' % (sectname, value))
1198 ui.write('%s=%s\n' % (sectname, value))
1199
1199
1200 def debugsetparents(ui, repo, rev1, rev2=None):
1200 def debugsetparents(ui, repo, rev1, rev2=None):
1201 """manually set the parents of the current working directory
1201 """manually set the parents of the current working directory
1202
1202
1203 This is useful for writing repository conversion tools, but should
1203 This is useful for writing repository conversion tools, but should
1204 be used with care.
1204 be used with care.
1205 """
1205 """
1206
1206
1207 if not rev2:
1207 if not rev2:
1208 rev2 = hex(nullid)
1208 rev2 = hex(nullid)
1209
1209
1210 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1210 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1211
1211
1212 def debugstate(ui, repo):
1212 def debugstate(ui, repo):
1213 """show the contents of the current dirstate"""
1213 """show the contents of the current dirstate"""
1214 repo.dirstate.read()
1214 repo.dirstate.read()
1215 dc = repo.dirstate.map
1215 dc = repo.dirstate.map
1216 keys = dc.keys()
1216 keys = dc.keys()
1217 keys.sort()
1217 keys.sort()
1218 for file_ in keys:
1218 for file_ in keys:
1219 ui.write("%c %3o %10d %s %s\n"
1219 ui.write("%c %3o %10d %s %s\n"
1220 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1220 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1221 time.strftime("%x %X",
1221 time.strftime("%x %X",
1222 time.localtime(dc[file_][3])), file_))
1222 time.localtime(dc[file_][3])), file_))
1223 for f in repo.dirstate.copies():
1223 for f in repo.dirstate.copies():
1224 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1224 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1225
1225
1226 def debugdata(ui, file_, rev):
1226 def debugdata(ui, file_, rev):
1227 """dump the contents of an data file revision"""
1227 """dump the contents of an data file revision"""
1228 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1228 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1229 file_[:-2] + ".i", file_, 0)
1229 file_[:-2] + ".i", file_, 0)
1230 try:
1230 try:
1231 ui.write(r.revision(r.lookup(rev)))
1231 ui.write(r.revision(r.lookup(rev)))
1232 except KeyError:
1232 except KeyError:
1233 raise util.Abort(_('invalid revision identifier %s') % rev)
1233 raise util.Abort(_('invalid revision identifier %s') % rev)
1234
1234
1235 def debugindex(ui, file_):
1235 def debugindex(ui, file_):
1236 """dump the contents of an index file"""
1236 """dump the contents of an index file"""
1237 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1237 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1238 ui.write(" rev offset length base linkrev" +
1238 ui.write(" rev offset length base linkrev" +
1239 " nodeid p1 p2\n")
1239 " nodeid p1 p2\n")
1240 for i in range(r.count()):
1240 for i in range(r.count()):
1241 node = r.node(i)
1241 node = r.node(i)
1242 pp = r.parents(node)
1242 pp = r.parents(node)
1243 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1243 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1244 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1244 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1245 short(node), short(pp[0]), short(pp[1])))
1245 short(node), short(pp[0]), short(pp[1])))
1246
1246
1247 def debugindexdot(ui, file_):
1247 def debugindexdot(ui, file_):
1248 """dump an index DAG as a .dot file"""
1248 """dump an index DAG as a .dot file"""
1249 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1249 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1250 ui.write("digraph G {\n")
1250 ui.write("digraph G {\n")
1251 for i in range(r.count()):
1251 for i in range(r.count()):
1252 node = r.node(i)
1252 node = r.node(i)
1253 pp = r.parents(node)
1253 pp = r.parents(node)
1254 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1254 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1255 if pp[1] != nullid:
1255 if pp[1] != nullid:
1256 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1256 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1257 ui.write("}\n")
1257 ui.write("}\n")
1258
1258
1259 def debugrename(ui, repo, file, rev=None):
1259 def debugrename(ui, repo, file, rev=None):
1260 """dump rename information"""
1260 """dump rename information"""
1261 r = repo.file(relpath(repo, [file])[0])
1261 r = repo.file(relpath(repo, [file])[0])
1262 if rev:
1262 if rev:
1263 try:
1263 try:
1264 # assume all revision numbers are for changesets
1264 # assume all revision numbers are for changesets
1265 n = repo.lookup(rev)
1265 n = repo.lookup(rev)
1266 change = repo.changelog.read(n)
1266 change = repo.changelog.read(n)
1267 m = repo.manifest.read(change[0])
1267 m = repo.manifest.read(change[0])
1268 n = m[relpath(repo, [file])[0]]
1268 n = m[relpath(repo, [file])[0]]
1269 except (hg.RepoError, KeyError):
1269 except (hg.RepoError, KeyError):
1270 n = r.lookup(rev)
1270 n = r.lookup(rev)
1271 else:
1271 else:
1272 n = r.tip()
1272 n = r.tip()
1273 m = r.renamed(n)
1273 m = r.renamed(n)
1274 if m:
1274 if m:
1275 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1275 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1276 else:
1276 else:
1277 ui.write(_("not renamed\n"))
1277 ui.write(_("not renamed\n"))
1278
1278
1279 def debugwalk(ui, repo, *pats, **opts):
1279 def debugwalk(ui, repo, *pats, **opts):
1280 """show how files match on given patterns"""
1280 """show how files match on given patterns"""
1281 items = list(cmdutil.walk(repo, pats, opts))
1281 items = list(cmdutil.walk(repo, pats, opts))
1282 if not items:
1282 if not items:
1283 return
1283 return
1284 fmt = '%%s %%-%ds %%-%ds %%s' % (
1284 fmt = '%%s %%-%ds %%-%ds %%s' % (
1285 max([len(abs) for (src, abs, rel, exact) in items]),
1285 max([len(abs) for (src, abs, rel, exact) in items]),
1286 max([len(rel) for (src, abs, rel, exact) in items]))
1286 max([len(rel) for (src, abs, rel, exact) in items]))
1287 for src, abs, rel, exact in items:
1287 for src, abs, rel, exact in items:
1288 line = fmt % (src, abs, rel, exact and 'exact' or '')
1288 line = fmt % (src, abs, rel, exact and 'exact' or '')
1289 ui.write("%s\n" % line.rstrip())
1289 ui.write("%s\n" % line.rstrip())
1290
1290
1291 def diff(ui, repo, *pats, **opts):
1291 def diff(ui, repo, *pats, **opts):
1292 """diff repository (or selected files)
1292 """diff repository (or selected files)
1293
1293
1294 Show differences between revisions for the specified files.
1294 Show differences between revisions for the specified files.
1295
1295
1296 Differences between files are shown using the unified diff format.
1296 Differences between files are shown using the unified diff format.
1297
1297
1298 When two revision arguments are given, then changes are shown
1298 When two revision arguments are given, then changes are shown
1299 between those revisions. If only one revision is specified then
1299 between those revisions. If only one revision is specified then
1300 that revision is compared to the working directory, and, when no
1300 that revision is compared to the working directory, and, when no
1301 revisions are specified, the working directory files are compared
1301 revisions are specified, the working directory files are compared
1302 to its parent.
1302 to its parent.
1303
1303
1304 Without the -a option, diff will avoid generating diffs of files
1304 Without the -a option, diff will avoid generating diffs of files
1305 it detects as binary. With -a, diff will generate a diff anyway,
1305 it detects as binary. With -a, diff will generate a diff anyway,
1306 probably with undesirable results.
1306 probably with undesirable results.
1307 """
1307 """
1308 node1, node2 = cmdutil.revpair(ui, repo, opts['rev'])
1308 node1, node2 = cmdutil.revpair(ui, repo, opts['rev'])
1309
1309
1310 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1310 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1311
1311
1312 patch.diff(repo, node1, node2, fns, match=matchfn,
1312 patch.diff(repo, node1, node2, fns, match=matchfn,
1313 opts=patch.diffopts(ui, opts))
1313 opts=patch.diffopts(ui, opts))
1314
1314
1315 def export(ui, repo, *changesets, **opts):
1315 def export(ui, repo, *changesets, **opts):
1316 """dump the header and diffs for one or more changesets
1316 """dump the header and diffs for one or more changesets
1317
1317
1318 Print the changeset header and diffs for one or more revisions.
1318 Print the changeset header and diffs for one or more revisions.
1319
1319
1320 The information shown in the changeset header is: author,
1320 The information shown in the changeset header is: author,
1321 changeset hash, parent and commit comment.
1321 changeset hash, parent and commit comment.
1322
1322
1323 Output may be to a file, in which case the name of the file is
1323 Output may be to a file, in which case the name of the file is
1324 given using a format string. The formatting rules are as follows:
1324 given using a format string. The formatting rules are as follows:
1325
1325
1326 %% literal "%" character
1326 %% literal "%" character
1327 %H changeset hash (40 bytes of hexadecimal)
1327 %H changeset hash (40 bytes of hexadecimal)
1328 %N number of patches being generated
1328 %N number of patches being generated
1329 %R changeset revision number
1329 %R changeset revision number
1330 %b basename of the exporting repository
1330 %b basename of the exporting repository
1331 %h short-form changeset hash (12 bytes of hexadecimal)
1331 %h short-form changeset hash (12 bytes of hexadecimal)
1332 %n zero-padded sequence number, starting at 1
1332 %n zero-padded sequence number, starting at 1
1333 %r zero-padded changeset revision number
1333 %r zero-padded changeset revision number
1334
1334
1335 Without the -a option, export will avoid generating diffs of files
1335 Without the -a option, export will avoid generating diffs of files
1336 it detects as binary. With -a, export will generate a diff anyway,
1336 it detects as binary. With -a, export will generate a diff anyway,
1337 probably with undesirable results.
1337 probably with undesirable results.
1338
1338
1339 With the --switch-parent option, the diff will be against the second
1339 With the --switch-parent option, the diff will be against the second
1340 parent. It can be useful to review a merge.
1340 parent. It can be useful to review a merge.
1341 """
1341 """
1342 if not changesets:
1342 if not changesets:
1343 raise util.Abort(_("export requires at least one changeset"))
1343 raise util.Abort(_("export requires at least one changeset"))
1344 revs = list(cmdutil.revrange(ui, repo, changesets))
1344 revs = list(cmdutil.revrange(ui, repo, changesets))
1345 if len(revs) > 1:
1345 if len(revs) > 1:
1346 ui.note(_('exporting patches:\n'))
1346 ui.note(_('exporting patches:\n'))
1347 else:
1347 else:
1348 ui.note(_('exporting patch:\n'))
1348 ui.note(_('exporting patch:\n'))
1349 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1349 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1350 switch_parent=opts['switch_parent'],
1350 switch_parent=opts['switch_parent'],
1351 opts=patch.diffopts(ui, opts))
1351 opts=patch.diffopts(ui, opts))
1352
1352
1353 def forget(ui, repo, *pats, **opts):
1353 def forget(ui, repo, *pats, **opts):
1354 """don't add the specified files on the next commit (DEPRECATED)
1354 """don't add the specified files on the next commit (DEPRECATED)
1355
1355
1356 (DEPRECATED)
1356 (DEPRECATED)
1357 Undo an 'hg add' scheduled for the next commit.
1357 Undo an 'hg add' scheduled for the next commit.
1358
1358
1359 This command is now deprecated and will be removed in a future
1359 This command is now deprecated and will be removed in a future
1360 release. Please use revert instead.
1360 release. Please use revert instead.
1361 """
1361 """
1362 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1362 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1363 forget = []
1363 forget = []
1364 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1364 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1365 if repo.dirstate.state(abs) == 'a':
1365 if repo.dirstate.state(abs) == 'a':
1366 forget.append(abs)
1366 forget.append(abs)
1367 if ui.verbose or not exact:
1367 if ui.verbose or not exact:
1368 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1368 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1369 repo.forget(forget)
1369 repo.forget(forget)
1370
1370
1371 def grep(ui, repo, pattern, *pats, **opts):
1371 def grep(ui, repo, pattern, *pats, **opts):
1372 """search for a pattern in specified files and revisions
1372 """search for a pattern in specified files and revisions
1373
1373
1374 Search revisions of files for a regular expression.
1374 Search revisions of files for a regular expression.
1375
1375
1376 This command behaves differently than Unix grep. It only accepts
1376 This command behaves differently than Unix grep. It only accepts
1377 Python/Perl regexps. It searches repository history, not the
1377 Python/Perl regexps. It searches repository history, not the
1378 working directory. It always prints the revision number in which
1378 working directory. It always prints the revision number in which
1379 a match appears.
1379 a match appears.
1380
1380
1381 By default, grep only prints output for the first revision of a
1381 By default, grep only prints output for the first revision of a
1382 file in which it finds a match. To get it to print every revision
1382 file in which it finds a match. To get it to print every revision
1383 that contains a change in match status ("-" for a match that
1383 that contains a change in match status ("-" for a match that
1384 becomes a non-match, or "+" for a non-match that becomes a match),
1384 becomes a non-match, or "+" for a non-match that becomes a match),
1385 use the --all flag.
1385 use the --all flag.
1386 """
1386 """
1387 reflags = 0
1387 reflags = 0
1388 if opts['ignore_case']:
1388 if opts['ignore_case']:
1389 reflags |= re.I
1389 reflags |= re.I
1390 regexp = re.compile(pattern, reflags)
1390 regexp = re.compile(pattern, reflags)
1391 sep, eol = ':', '\n'
1391 sep, eol = ':', '\n'
1392 if opts['print0']:
1392 if opts['print0']:
1393 sep = eol = '\0'
1393 sep = eol = '\0'
1394
1394
1395 fcache = {}
1395 fcache = {}
1396 def getfile(fn):
1396 def getfile(fn):
1397 if fn not in fcache:
1397 if fn not in fcache:
1398 fcache[fn] = repo.file(fn)
1398 fcache[fn] = repo.file(fn)
1399 return fcache[fn]
1399 return fcache[fn]
1400
1400
1401 def matchlines(body):
1401 def matchlines(body):
1402 begin = 0
1402 begin = 0
1403 linenum = 0
1403 linenum = 0
1404 while True:
1404 while True:
1405 match = regexp.search(body, begin)
1405 match = regexp.search(body, begin)
1406 if not match:
1406 if not match:
1407 break
1407 break
1408 mstart, mend = match.span()
1408 mstart, mend = match.span()
1409 linenum += body.count('\n', begin, mstart) + 1
1409 linenum += body.count('\n', begin, mstart) + 1
1410 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1410 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1411 lend = body.find('\n', mend)
1411 lend = body.find('\n', mend)
1412 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1412 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1413 begin = lend + 1
1413 begin = lend + 1
1414
1414
1415 class linestate(object):
1415 class linestate(object):
1416 def __init__(self, line, linenum, colstart, colend):
1416 def __init__(self, line, linenum, colstart, colend):
1417 self.line = line
1417 self.line = line
1418 self.linenum = linenum
1418 self.linenum = linenum
1419 self.colstart = colstart
1419 self.colstart = colstart
1420 self.colend = colend
1420 self.colend = colend
1421
1421
1422 def __eq__(self, other):
1422 def __eq__(self, other):
1423 return self.line == other.line
1423 return self.line == other.line
1424
1424
1425 matches = {}
1425 matches = {}
1426 copies = {}
1426 copies = {}
1427 def grepbody(fn, rev, body):
1427 def grepbody(fn, rev, body):
1428 matches[rev].setdefault(fn, [])
1428 matches[rev].setdefault(fn, [])
1429 m = matches[rev][fn]
1429 m = matches[rev][fn]
1430 for lnum, cstart, cend, line in matchlines(body):
1430 for lnum, cstart, cend, line in matchlines(body):
1431 s = linestate(line, lnum, cstart, cend)
1431 s = linestate(line, lnum, cstart, cend)
1432 m.append(s)
1432 m.append(s)
1433
1433
1434 def difflinestates(a, b):
1434 def difflinestates(a, b):
1435 sm = difflib.SequenceMatcher(None, a, b)
1435 sm = difflib.SequenceMatcher(None, a, b)
1436 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1436 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1437 if tag == 'insert':
1437 if tag == 'insert':
1438 for i in range(blo, bhi):
1438 for i in range(blo, bhi):
1439 yield ('+', b[i])
1439 yield ('+', b[i])
1440 elif tag == 'delete':
1440 elif tag == 'delete':
1441 for i in range(alo, ahi):
1441 for i in range(alo, ahi):
1442 yield ('-', a[i])
1442 yield ('-', a[i])
1443 elif tag == 'replace':
1443 elif tag == 'replace':
1444 for i in range(alo, ahi):
1444 for i in range(alo, ahi):
1445 yield ('-', a[i])
1445 yield ('-', a[i])
1446 for i in range(blo, bhi):
1446 for i in range(blo, bhi):
1447 yield ('+', b[i])
1447 yield ('+', b[i])
1448
1448
1449 prev = {}
1449 prev = {}
1450 ucache = {}
1450 ucache = {}
1451 def display(fn, rev, states, prevstates):
1451 def display(fn, rev, states, prevstates):
1452 counts = {'-': 0, '+': 0}
1452 counts = {'-': 0, '+': 0}
1453 filerevmatches = {}
1453 filerevmatches = {}
1454 if incrementing or not opts['all']:
1454 if incrementing or not opts['all']:
1455 a, b = prevstates, states
1455 a, b = prevstates, states
1456 else:
1456 else:
1457 a, b = states, prevstates
1457 a, b = states, prevstates
1458 for change, l in difflinestates(a, b):
1458 for change, l in difflinestates(a, b):
1459 if incrementing or not opts['all']:
1459 if incrementing or not opts['all']:
1460 r = rev
1460 r = rev
1461 else:
1461 else:
1462 r = prev[fn]
1462 r = prev[fn]
1463 cols = [fn, str(r)]
1463 cols = [fn, str(r)]
1464 if opts['line_number']:
1464 if opts['line_number']:
1465 cols.append(str(l.linenum))
1465 cols.append(str(l.linenum))
1466 if opts['all']:
1466 if opts['all']:
1467 cols.append(change)
1467 cols.append(change)
1468 if opts['user']:
1468 if opts['user']:
1469 cols.append(trimuser(ui, getchange(r)[1], rev,
1469 cols.append(trimuser(ui, getchange(r)[1], rev,
1470 ucache))
1470 ucache))
1471 if opts['files_with_matches']:
1471 if opts['files_with_matches']:
1472 c = (fn, rev)
1472 c = (fn, rev)
1473 if c in filerevmatches:
1473 if c in filerevmatches:
1474 continue
1474 continue
1475 filerevmatches[c] = 1
1475 filerevmatches[c] = 1
1476 else:
1476 else:
1477 cols.append(l.line)
1477 cols.append(l.line)
1478 ui.write(sep.join(cols), eol)
1478 ui.write(sep.join(cols), eol)
1479 counts[change] += 1
1479 counts[change] += 1
1480 return counts['+'], counts['-']
1480 return counts['+'], counts['-']
1481
1481
1482 fstate = {}
1482 fstate = {}
1483 skip = {}
1483 skip = {}
1484 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1484 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1485 count = 0
1485 count = 0
1486 incrementing = False
1486 incrementing = False
1487 follow = opts.get('follow')
1487 follow = opts.get('follow')
1488 for st, rev, fns in changeiter:
1488 for st, rev, fns in changeiter:
1489 if st == 'window':
1489 if st == 'window':
1490 incrementing = rev
1490 incrementing = rev
1491 matches.clear()
1491 matches.clear()
1492 elif st == 'add':
1492 elif st == 'add':
1493 change = repo.changelog.read(repo.lookup(str(rev)))
1493 change = repo.changelog.read(repo.lookup(str(rev)))
1494 mf = repo.manifest.read(change[0])
1494 mf = repo.manifest.read(change[0])
1495 matches[rev] = {}
1495 matches[rev] = {}
1496 for fn in fns:
1496 for fn in fns:
1497 if fn in skip:
1497 if fn in skip:
1498 continue
1498 continue
1499 fstate.setdefault(fn, {})
1499 fstate.setdefault(fn, {})
1500 try:
1500 try:
1501 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1501 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1502 if follow:
1502 if follow:
1503 copied = getfile(fn).renamed(mf[fn])
1503 copied = getfile(fn).renamed(mf[fn])
1504 if copied:
1504 if copied:
1505 copies.setdefault(rev, {})[fn] = copied[0]
1505 copies.setdefault(rev, {})[fn] = copied[0]
1506 except KeyError:
1506 except KeyError:
1507 pass
1507 pass
1508 elif st == 'iter':
1508 elif st == 'iter':
1509 states = matches[rev].items()
1509 states = matches[rev].items()
1510 states.sort()
1510 states.sort()
1511 for fn, m in states:
1511 for fn, m in states:
1512 copy = copies.get(rev, {}).get(fn)
1512 copy = copies.get(rev, {}).get(fn)
1513 if fn in skip:
1513 if fn in skip:
1514 if copy:
1514 if copy:
1515 skip[copy] = True
1515 skip[copy] = True
1516 continue
1516 continue
1517 if incrementing or not opts['all'] or fstate[fn]:
1517 if incrementing or not opts['all'] or fstate[fn]:
1518 pos, neg = display(fn, rev, m, fstate[fn])
1518 pos, neg = display(fn, rev, m, fstate[fn])
1519 count += pos + neg
1519 count += pos + neg
1520 if pos and not opts['all']:
1520 if pos and not opts['all']:
1521 skip[fn] = True
1521 skip[fn] = True
1522 if copy:
1522 if copy:
1523 skip[copy] = True
1523 skip[copy] = True
1524 fstate[fn] = m
1524 fstate[fn] = m
1525 if copy:
1525 if copy:
1526 fstate[copy] = m
1526 fstate[copy] = m
1527 prev[fn] = rev
1527 prev[fn] = rev
1528
1528
1529 if not incrementing:
1529 if not incrementing:
1530 fstate = fstate.items()
1530 fstate = fstate.items()
1531 fstate.sort()
1531 fstate.sort()
1532 for fn, state in fstate:
1532 for fn, state in fstate:
1533 if fn in skip:
1533 if fn in skip:
1534 continue
1534 continue
1535 if fn not in copies.get(prev[fn], {}):
1535 if fn not in copies.get(prev[fn], {}):
1536 display(fn, rev, {}, state)
1536 display(fn, rev, {}, state)
1537 return (count == 0 and 1) or 0
1537 return (count == 0 and 1) or 0
1538
1538
1539 def heads(ui, repo, **opts):
1539 def heads(ui, repo, **opts):
1540 """show current repository heads
1540 """show current repository heads
1541
1541
1542 Show all repository head changesets.
1542 Show all repository head changesets.
1543
1543
1544 Repository "heads" are changesets that don't have children
1544 Repository "heads" are changesets that don't have children
1545 changesets. They are where development generally takes place and
1545 changesets. They are where development generally takes place and
1546 are the usual targets for update and merge operations.
1546 are the usual targets for update and merge operations.
1547 """
1547 """
1548 if opts['rev']:
1548 if opts['rev']:
1549 heads = repo.heads(repo.lookup(opts['rev']))
1549 heads = repo.heads(repo.lookup(opts['rev']))
1550 else:
1550 else:
1551 heads = repo.heads()
1551 heads = repo.heads()
1552 br = None
1552 br = None
1553 if opts['branches']:
1553 if opts['branches']:
1554 br = repo.branchlookup(heads)
1554 br = repo.branchlookup(heads)
1555 displayer = show_changeset(ui, repo, opts)
1555 displayer = show_changeset(ui, repo, opts)
1556 for n in heads:
1556 for n in heads:
1557 displayer.show(changenode=n, brinfo=br)
1557 displayer.show(changenode=n, brinfo=br)
1558
1558
1559 def identify(ui, repo):
1559 def identify(ui, repo):
1560 """print information about the working copy
1560 """print information about the working copy
1561
1561
1562 Print a short summary of the current state of the repo.
1562 Print a short summary of the current state of the repo.
1563
1563
1564 This summary identifies the repository state using one or two parent
1564 This summary identifies the repository state using one or two parent
1565 hash identifiers, followed by a "+" if there are uncommitted changes
1565 hash identifiers, followed by a "+" if there are uncommitted changes
1566 in the working directory, followed by a list of tags for this revision.
1566 in the working directory, followed by a list of tags for this revision.
1567 """
1567 """
1568 parents = [p for p in repo.dirstate.parents() if p != nullid]
1568 parents = [p for p in repo.dirstate.parents() if p != nullid]
1569 if not parents:
1569 if not parents:
1570 ui.write(_("unknown\n"))
1570 ui.write(_("unknown\n"))
1571 return
1571 return
1572
1572
1573 hexfunc = ui.debugflag and hex or short
1573 hexfunc = ui.debugflag and hex or short
1574 modified, added, removed, deleted = repo.status()[:4]
1574 modified, added, removed, deleted = repo.status()[:4]
1575 output = ["%s%s" %
1575 output = ["%s%s" %
1576 ('+'.join([hexfunc(parent) for parent in parents]),
1576 ('+'.join([hexfunc(parent) for parent in parents]),
1577 (modified or added or removed or deleted) and "+" or "")]
1577 (modified or added or removed or deleted) and "+" or "")]
1578
1578
1579 if not ui.quiet:
1579 if not ui.quiet:
1580
1580
1581 branch = repo.workingctx().branch()
1581 branch = repo.workingctx().branch()
1582 if branch:
1582 if branch:
1583 output.append("(%s)" % branch)
1583 output.append("(%s)" % branch)
1584
1584
1585 # multiple tags for a single parent separated by '/'
1585 # multiple tags for a single parent separated by '/'
1586 parenttags = ['/'.join(tags)
1586 parenttags = ['/'.join(tags)
1587 for tags in map(repo.nodetags, parents) if tags]
1587 for tags in map(repo.nodetags, parents) if tags]
1588 # tags for multiple parents separated by ' + '
1588 # tags for multiple parents separated by ' + '
1589 if parenttags:
1589 if parenttags:
1590 output.append(' + '.join(parenttags))
1590 output.append(' + '.join(parenttags))
1591
1591
1592 ui.write("%s\n" % ' '.join(output))
1592 ui.write("%s\n" % ' '.join(output))
1593
1593
1594 def import_(ui, repo, patch1, *patches, **opts):
1594 def import_(ui, repo, patch1, *patches, **opts):
1595 """import an ordered set of patches
1595 """import an ordered set of patches
1596
1596
1597 Import a list of patches and commit them individually.
1597 Import a list of patches and commit them individually.
1598
1598
1599 If there are outstanding changes in the working directory, import
1599 If there are outstanding changes in the working directory, import
1600 will abort unless given the -f flag.
1600 will abort unless given the -f flag.
1601
1601
1602 You can import a patch straight from a mail message. Even patches
1602 You can import a patch straight from a mail message. Even patches
1603 as attachments work (body part must be type text/plain or
1603 as attachments work (body part must be type text/plain or
1604 text/x-patch to be used). From and Subject headers of email
1604 text/x-patch to be used). From and Subject headers of email
1605 message are used as default committer and commit message. All
1605 message are used as default committer and commit message. All
1606 text/plain body parts before first diff are added to commit
1606 text/plain body parts before first diff are added to commit
1607 message.
1607 message.
1608
1608
1609 If imported patch was generated by hg export, user and description
1609 If imported patch was generated by hg export, user and description
1610 from patch override values from message headers and body. Values
1610 from patch override values from message headers and body. Values
1611 given on command line with -m and -u override these.
1611 given on command line with -m and -u override these.
1612
1612
1613 To read a patch from standard input, use patch name "-".
1613 To read a patch from standard input, use patch name "-".
1614 """
1614 """
1615 patches = (patch1,) + patches
1615 patches = (patch1,) + patches
1616
1616
1617 if not opts['force']:
1617 if not opts['force']:
1618 bail_if_changed(repo)
1618 bail_if_changed(repo)
1619
1619
1620 d = opts["base"]
1620 d = opts["base"]
1621 strip = opts["strip"]
1621 strip = opts["strip"]
1622
1622
1623 wlock = repo.wlock()
1623 wlock = repo.wlock()
1624 lock = repo.lock()
1624 lock = repo.lock()
1625
1625
1626 for p in patches:
1626 for p in patches:
1627 pf = os.path.join(d, p)
1627 pf = os.path.join(d, p)
1628
1628
1629 if pf == '-':
1629 if pf == '-':
1630 ui.status(_("applying patch from stdin\n"))
1630 ui.status(_("applying patch from stdin\n"))
1631 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1631 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1632 else:
1632 else:
1633 ui.status(_("applying %s\n") % p)
1633 ui.status(_("applying %s\n") % p)
1634 tmpname, message, user, date = patch.extract(ui, file(pf))
1634 tmpname, message, user, date = patch.extract(ui, file(pf))
1635
1635
1636 if tmpname is None:
1636 if tmpname is None:
1637 raise util.Abort(_('no diffs found'))
1637 raise util.Abort(_('no diffs found'))
1638
1638
1639 try:
1639 try:
1640 if opts['message']:
1640 if opts['message']:
1641 # pickup the cmdline msg
1641 # pickup the cmdline msg
1642 message = opts['message']
1642 message = opts['message']
1643 elif message:
1643 elif message:
1644 # pickup the patch msg
1644 # pickup the patch msg
1645 message = message.strip()
1645 message = message.strip()
1646 else:
1646 else:
1647 # launch the editor
1647 # launch the editor
1648 message = None
1648 message = None
1649 ui.debug(_('message:\n%s\n') % message)
1649 ui.debug(_('message:\n%s\n') % message)
1650
1650
1651 files, fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root)
1651 files, fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root)
1652 files = patch.updatedir(ui, repo, files, wlock=wlock)
1652 files = patch.updatedir(ui, repo, files, wlock=wlock)
1653 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1653 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1654 finally:
1654 finally:
1655 os.unlink(tmpname)
1655 os.unlink(tmpname)
1656
1656
1657 def incoming(ui, repo, source="default", **opts):
1657 def incoming(ui, repo, source="default", **opts):
1658 """show new changesets found in source
1658 """show new changesets found in source
1659
1659
1660 Show new changesets found in the specified path/URL or the default
1660 Show new changesets found in the specified path/URL or the default
1661 pull location. These are the changesets that would be pulled if a pull
1661 pull location. These are the changesets that would be pulled if a pull
1662 was requested.
1662 was requested.
1663
1663
1664 For remote repository, using --bundle avoids downloading the changesets
1664 For remote repository, using --bundle avoids downloading the changesets
1665 twice if the incoming is followed by a pull.
1665 twice if the incoming is followed by a pull.
1666
1666
1667 See pull for valid source format details.
1667 See pull for valid source format details.
1668 """
1668 """
1669 source = ui.expandpath(source)
1669 source = ui.expandpath(source)
1670 setremoteconfig(ui, opts)
1670 setremoteconfig(ui, opts)
1671
1671
1672 other = hg.repository(ui, source)
1672 other = hg.repository(ui, source)
1673 incoming = repo.findincoming(other, force=opts["force"])
1673 incoming = repo.findincoming(other, force=opts["force"])
1674 if not incoming:
1674 if not incoming:
1675 ui.status(_("no changes found\n"))
1675 ui.status(_("no changes found\n"))
1676 return
1676 return
1677
1677
1678 cleanup = None
1678 cleanup = None
1679 try:
1679 try:
1680 fname = opts["bundle"]
1680 fname = opts["bundle"]
1681 if fname or not other.local():
1681 if fname or not other.local():
1682 # create a bundle (uncompressed if other repo is not local)
1682 # create a bundle (uncompressed if other repo is not local)
1683 cg = other.changegroup(incoming, "incoming")
1683 cg = other.changegroup(incoming, "incoming")
1684 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1684 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1685 # keep written bundle?
1685 # keep written bundle?
1686 if opts["bundle"]:
1686 if opts["bundle"]:
1687 cleanup = None
1687 cleanup = None
1688 if not other.local():
1688 if not other.local():
1689 # use the created uncompressed bundlerepo
1689 # use the created uncompressed bundlerepo
1690 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1690 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1691
1691
1692 revs = None
1692 revs = None
1693 if opts['rev']:
1693 if opts['rev']:
1694 revs = [other.lookup(rev) for rev in opts['rev']]
1694 revs = [other.lookup(rev) for rev in opts['rev']]
1695 o = other.changelog.nodesbetween(incoming, revs)[0]
1695 o = other.changelog.nodesbetween(incoming, revs)[0]
1696 if opts['newest_first']:
1696 if opts['newest_first']:
1697 o.reverse()
1697 o.reverse()
1698 displayer = show_changeset(ui, other, opts)
1698 displayer = show_changeset(ui, other, opts)
1699 for n in o:
1699 for n in o:
1700 parents = [p for p in other.changelog.parents(n) if p != nullid]
1700 parents = [p for p in other.changelog.parents(n) if p != nullid]
1701 if opts['no_merges'] and len(parents) == 2:
1701 if opts['no_merges'] and len(parents) == 2:
1702 continue
1702 continue
1703 displayer.show(changenode=n)
1703 displayer.show(changenode=n)
1704 if opts['patch']:
1704 if opts['patch']:
1705 prev = (parents and parents[0]) or nullid
1705 prev = (parents and parents[0]) or nullid
1706 patch.diff(other, prev, n, fp=repo.ui)
1706 patch.diff(other, prev, n, fp=repo.ui)
1707 ui.write("\n")
1707 ui.write("\n")
1708 finally:
1708 finally:
1709 if hasattr(other, 'close'):
1709 if hasattr(other, 'close'):
1710 other.close()
1710 other.close()
1711 if cleanup:
1711 if cleanup:
1712 os.unlink(cleanup)
1712 os.unlink(cleanup)
1713
1713
1714 def init(ui, dest=".", **opts):
1714 def init(ui, dest=".", **opts):
1715 """create a new repository in the given directory
1715 """create a new repository in the given directory
1716
1716
1717 Initialize a new repository in the given directory. If the given
1717 Initialize a new repository in the given directory. If the given
1718 directory does not exist, it is created.
1718 directory does not exist, it is created.
1719
1719
1720 If no directory is given, the current directory is used.
1720 If no directory is given, the current directory is used.
1721
1721
1722 It is possible to specify an ssh:// URL as the destination.
1722 It is possible to specify an ssh:// URL as the destination.
1723 Look at the help text for the pull command for important details
1723 Look at the help text for the pull command for important details
1724 about ssh:// URLs.
1724 about ssh:// URLs.
1725 """
1725 """
1726 setremoteconfig(ui, opts)
1726 setremoteconfig(ui, opts)
1727 hg.repository(ui, dest, create=1)
1727 hg.repository(ui, dest, create=1)
1728
1728
1729 def locate(ui, repo, *pats, **opts):
1729 def locate(ui, repo, *pats, **opts):
1730 """locate files matching specific patterns
1730 """locate files matching specific patterns
1731
1731
1732 Print all files under Mercurial control whose names match the
1732 Print all files under Mercurial control whose names match the
1733 given patterns.
1733 given patterns.
1734
1734
1735 This command searches the current directory and its
1735 This command searches the current directory and its
1736 subdirectories. To search an entire repository, move to the root
1736 subdirectories. To search an entire repository, move to the root
1737 of the repository.
1737 of the repository.
1738
1738
1739 If no patterns are given to match, this command prints all file
1739 If no patterns are given to match, this command prints all file
1740 names.
1740 names.
1741
1741
1742 If you want to feed the output of this command into the "xargs"
1742 If you want to feed the output of this command into the "xargs"
1743 command, use the "-0" option to both this command and "xargs".
1743 command, use the "-0" option to both this command and "xargs".
1744 This will avoid the problem of "xargs" treating single filenames
1744 This will avoid the problem of "xargs" treating single filenames
1745 that contain white space as multiple filenames.
1745 that contain white space as multiple filenames.
1746 """
1746 """
1747 end = opts['print0'] and '\0' or '\n'
1747 end = opts['print0'] and '\0' or '\n'
1748 rev = opts['rev']
1748 rev = opts['rev']
1749 if rev:
1749 if rev:
1750 node = repo.lookup(rev)
1750 node = repo.lookup(rev)
1751 else:
1751 else:
1752 node = None
1752 node = None
1753
1753
1754 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1754 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1755 head='(?:.*/|)'):
1755 head='(?:.*/|)'):
1756 if not node and repo.dirstate.state(abs) == '?':
1756 if not node and repo.dirstate.state(abs) == '?':
1757 continue
1757 continue
1758 if opts['fullpath']:
1758 if opts['fullpath']:
1759 ui.write(os.path.join(repo.root, abs), end)
1759 ui.write(os.path.join(repo.root, abs), end)
1760 else:
1760 else:
1761 ui.write(((pats and rel) or abs), end)
1761 ui.write(((pats and rel) or abs), end)
1762
1762
1763 def log(ui, repo, *pats, **opts):
1763 def log(ui, repo, *pats, **opts):
1764 """show revision history of entire repository or files
1764 """show revision history of entire repository or files
1765
1765
1766 Print the revision history of the specified files or the entire
1766 Print the revision history of the specified files or the entire
1767 project.
1767 project.
1768
1768
1769 File history is shown without following rename or copy history of
1769 File history is shown without following rename or copy history of
1770 files. Use -f/--follow with a file name to follow history across
1770 files. Use -f/--follow with a file name to follow history across
1771 renames and copies. --follow without a file name will only show
1771 renames and copies. --follow without a file name will only show
1772 ancestors or descendants of the starting revision. --follow-first
1772 ancestors or descendants of the starting revision. --follow-first
1773 only follows the first parent of merge revisions.
1773 only follows the first parent of merge revisions.
1774
1774
1775 If no revision range is specified, the default is tip:0 unless
1775 If no revision range is specified, the default is tip:0 unless
1776 --follow is set, in which case the working directory parent is
1776 --follow is set, in which case the working directory parent is
1777 used as the starting revision.
1777 used as the starting revision.
1778
1778
1779 By default this command outputs: changeset id and hash, tags,
1779 By default this command outputs: changeset id and hash, tags,
1780 non-trivial parents, user, date and time, and a summary for each
1780 non-trivial parents, user, date and time, and a summary for each
1781 commit. When the -v/--verbose switch is used, the list of changed
1781 commit. When the -v/--verbose switch is used, the list of changed
1782 files and full commit message is shown.
1782 files and full commit message is shown.
1783 """
1783 """
1784 class dui(object):
1784 class dui(object):
1785 # Implement and delegate some ui protocol. Save hunks of
1785 # Implement and delegate some ui protocol. Save hunks of
1786 # output for later display in the desired order.
1786 # output for later display in the desired order.
1787 def __init__(self, ui):
1787 def __init__(self, ui):
1788 self.ui = ui
1788 self.ui = ui
1789 self.hunk = {}
1789 self.hunk = {}
1790 self.header = {}
1790 self.header = {}
1791 def bump(self, rev):
1791 def bump(self, rev):
1792 self.rev = rev
1792 self.rev = rev
1793 self.hunk[rev] = []
1793 self.hunk[rev] = []
1794 self.header[rev] = []
1794 self.header[rev] = []
1795 def note(self, *args):
1795 def note(self, *args):
1796 if self.verbose:
1796 if self.verbose:
1797 self.write(*args)
1797 self.write(*args)
1798 def status(self, *args):
1798 def status(self, *args):
1799 if not self.quiet:
1799 if not self.quiet:
1800 self.write(*args)
1800 self.write(*args)
1801 def write(self, *args):
1801 def write(self, *args):
1802 self.hunk[self.rev].append(args)
1802 self.hunk[self.rev].append(args)
1803 def write_header(self, *args):
1803 def write_header(self, *args):
1804 self.header[self.rev].append(args)
1804 self.header[self.rev].append(args)
1805 def debug(self, *args):
1805 def debug(self, *args):
1806 if self.debugflag:
1806 if self.debugflag:
1807 self.write(*args)
1807 self.write(*args)
1808 def __getattr__(self, key):
1808 def __getattr__(self, key):
1809 return getattr(self.ui, key)
1809 return getattr(self.ui, key)
1810
1810
1811 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1811 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1812
1812
1813 if opts['limit']:
1813 if opts['limit']:
1814 try:
1814 try:
1815 limit = int(opts['limit'])
1815 limit = int(opts['limit'])
1816 except ValueError:
1816 except ValueError:
1817 raise util.Abort(_('limit must be a positive integer'))
1817 raise util.Abort(_('limit must be a positive integer'))
1818 if limit <= 0: raise util.Abort(_('limit must be positive'))
1818 if limit <= 0: raise util.Abort(_('limit must be positive'))
1819 else:
1819 else:
1820 limit = sys.maxint
1820 limit = sys.maxint
1821 count = 0
1821 count = 0
1822
1822
1823 if opts['copies'] and opts['rev']:
1823 if opts['copies'] and opts['rev']:
1824 endrev = max([int(i)
1824 endrev = max([int(i)
1825 for i in cmdutil.revrange(ui, repo, opts['rev'])]) + 1
1825 for i in cmdutil.revrange(ui, repo, opts['rev'])]) + 1
1826 else:
1826 else:
1827 endrev = repo.changelog.count()
1827 endrev = repo.changelog.count()
1828 rcache = {}
1828 rcache = {}
1829 ncache = {}
1829 ncache = {}
1830 dcache = []
1830 dcache = []
1831 def getrenamed(fn, rev, man):
1831 def getrenamed(fn, rev, man):
1832 '''looks up all renames for a file (up to endrev) the first
1832 '''looks up all renames for a file (up to endrev) the first
1833 time the file is given. It indexes on the changerev and only
1833 time the file is given. It indexes on the changerev and only
1834 parses the manifest if linkrev != changerev.
1834 parses the manifest if linkrev != changerev.
1835 Returns rename info for fn at changerev rev.'''
1835 Returns rename info for fn at changerev rev.'''
1836 if fn not in rcache:
1836 if fn not in rcache:
1837 rcache[fn] = {}
1837 rcache[fn] = {}
1838 ncache[fn] = {}
1838 ncache[fn] = {}
1839 fl = repo.file(fn)
1839 fl = repo.file(fn)
1840 for i in xrange(fl.count()):
1840 for i in xrange(fl.count()):
1841 node = fl.node(i)
1841 node = fl.node(i)
1842 lr = fl.linkrev(node)
1842 lr = fl.linkrev(node)
1843 renamed = fl.renamed(node)
1843 renamed = fl.renamed(node)
1844 rcache[fn][lr] = renamed
1844 rcache[fn][lr] = renamed
1845 if renamed:
1845 if renamed:
1846 ncache[fn][node] = renamed
1846 ncache[fn][node] = renamed
1847 if lr >= endrev:
1847 if lr >= endrev:
1848 break
1848 break
1849 if rev in rcache[fn]:
1849 if rev in rcache[fn]:
1850 return rcache[fn][rev]
1850 return rcache[fn][rev]
1851 mr = repo.manifest.rev(man)
1851 mr = repo.manifest.rev(man)
1852 if repo.manifest.parentrevs(mr) != (mr - 1, -1):
1852 if repo.manifest.parentrevs(mr) != (mr - 1, -1):
1853 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1853 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1854 if not dcache or dcache[0] != man:
1854 if not dcache or dcache[0] != man:
1855 dcache[:] = [man, repo.manifest.readdelta(man)]
1855 dcache[:] = [man, repo.manifest.readdelta(man)]
1856 if fn in dcache[1]:
1856 if fn in dcache[1]:
1857 return ncache[fn].get(dcache[1][fn])
1857 return ncache[fn].get(dcache[1][fn])
1858 return None
1858 return None
1859
1859
1860 displayer = show_changeset(ui, repo, opts)
1860 displayer = show_changeset(ui, repo, opts)
1861 for st, rev, fns in changeiter:
1861 for st, rev, fns in changeiter:
1862 if st == 'window':
1862 if st == 'window':
1863 du = dui(ui)
1863 du = dui(ui)
1864 displayer.ui = du
1864 displayer.ui = du
1865 elif st == 'add':
1865 elif st == 'add':
1866 du.bump(rev)
1866 du.bump(rev)
1867 changenode = repo.changelog.node(rev)
1867 changenode = repo.changelog.node(rev)
1868 parents = [p for p in repo.changelog.parents(changenode)
1868 parents = [p for p in repo.changelog.parents(changenode)
1869 if p != nullid]
1869 if p != nullid]
1870 if opts['no_merges'] and len(parents) == 2:
1870 if opts['no_merges'] and len(parents) == 2:
1871 continue
1871 continue
1872 if opts['only_merges'] and len(parents) != 2:
1872 if opts['only_merges'] and len(parents) != 2:
1873 continue
1873 continue
1874
1874
1875 if opts['keyword']:
1875 if opts['keyword']:
1876 changes = getchange(rev)
1876 changes = getchange(rev)
1877 miss = 0
1877 miss = 0
1878 for k in [kw.lower() for kw in opts['keyword']]:
1878 for k in [kw.lower() for kw in opts['keyword']]:
1879 if not (k in changes[1].lower() or
1879 if not (k in changes[1].lower() or
1880 k in changes[4].lower() or
1880 k in changes[4].lower() or
1881 k in " ".join(changes[3][:20]).lower()):
1881 k in " ".join(changes[3][:20]).lower()):
1882 miss = 1
1882 miss = 1
1883 break
1883 break
1884 if miss:
1884 if miss:
1885 continue
1885 continue
1886
1886
1887 br = None
1887 br = None
1888 if opts['branches']:
1888 if opts['branches']:
1889 br = repo.branchlookup([repo.changelog.node(rev)])
1889 br = repo.branchlookup([repo.changelog.node(rev)])
1890
1890
1891 copies = []
1891 copies = []
1892 if opts.get('copies') and rev:
1892 if opts.get('copies') and rev:
1893 mf = getchange(rev)[0]
1893 mf = getchange(rev)[0]
1894 for fn in getchange(rev)[3]:
1894 for fn in getchange(rev)[3]:
1895 rename = getrenamed(fn, rev, mf)
1895 rename = getrenamed(fn, rev, mf)
1896 if rename:
1896 if rename:
1897 copies.append((fn, rename[0]))
1897 copies.append((fn, rename[0]))
1898 displayer.show(rev, brinfo=br, copies=copies)
1898 displayer.show(rev, brinfo=br, copies=copies)
1899 if opts['patch']:
1899 if opts['patch']:
1900 prev = (parents and parents[0]) or nullid
1900 prev = (parents and parents[0]) or nullid
1901 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1901 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1902 du.write("\n\n")
1902 du.write("\n\n")
1903 elif st == 'iter':
1903 elif st == 'iter':
1904 if count == limit: break
1904 if count == limit: break
1905 if du.header[rev]:
1905 if du.header[rev]:
1906 for args in du.header[rev]:
1906 for args in du.header[rev]:
1907 ui.write_header(*args)
1907 ui.write_header(*args)
1908 if du.hunk[rev]:
1908 if du.hunk[rev]:
1909 count += 1
1909 count += 1
1910 for args in du.hunk[rev]:
1910 for args in du.hunk[rev]:
1911 ui.write(*args)
1911 ui.write(*args)
1912
1912
1913 def manifest(ui, repo, rev=None):
1913 def manifest(ui, repo, rev=None):
1914 """output the latest or given revision of the project manifest
1914 """output the latest or given revision of the project manifest
1915
1915
1916 Print a list of version controlled files for the given revision.
1916 Print a list of version controlled files for the given revision.
1917
1917
1918 The manifest is the list of files being version controlled. If no revision
1918 The manifest is the list of files being version controlled. If no revision
1919 is given then the tip is used.
1919 is given then the tip is used.
1920 """
1920 """
1921 if rev:
1921 if rev:
1922 try:
1922 try:
1923 # assume all revision numbers are for changesets
1923 # assume all revision numbers are for changesets
1924 n = repo.lookup(rev)
1924 n = repo.lookup(rev)
1925 change = repo.changelog.read(n)
1925 change = repo.changelog.read(n)
1926 n = change[0]
1926 n = change[0]
1927 except hg.RepoError:
1927 except hg.RepoError:
1928 n = repo.manifest.lookup(rev)
1928 n = repo.manifest.lookup(rev)
1929 else:
1929 else:
1930 n = repo.manifest.tip()
1930 n = repo.manifest.tip()
1931 m = repo.manifest.read(n)
1931 m = repo.manifest.read(n)
1932 files = m.keys()
1932 files = m.keys()
1933 files.sort()
1933 files.sort()
1934
1934
1935 for f in files:
1935 for f in files:
1936 ui.write("%40s %3s %s\n" % (hex(m[f]),
1936 ui.write("%40s %3s %s\n" % (hex(m[f]),
1937 m.execf(f) and "755" or "644", f))
1937 m.execf(f) and "755" or "644", f))
1938
1938
1939 def merge(ui, repo, node=None, force=None, branch=None):
1939 def merge(ui, repo, node=None, force=None, branch=None):
1940 """Merge working directory with another revision
1940 """Merge working directory with another revision
1941
1941
1942 Merge the contents of the current working directory and the
1942 Merge the contents of the current working directory and the
1943 requested revision. Files that changed between either parent are
1943 requested revision. Files that changed between either parent are
1944 marked as changed for the next commit and a commit must be
1944 marked as changed for the next commit and a commit must be
1945 performed before any further updates are allowed.
1945 performed before any further updates are allowed.
1946
1946
1947 If no revision is specified, the working directory's parent is a
1947 If no revision is specified, the working directory's parent is a
1948 head revision, and the repository contains exactly one other head,
1948 head revision, and the repository contains exactly one other head,
1949 the other head is merged with by default. Otherwise, an explicit
1949 the other head is merged with by default. Otherwise, an explicit
1950 revision to merge with must be provided.
1950 revision to merge with must be provided.
1951 """
1951 """
1952
1952
1953 if node or branch:
1953 if node or branch:
1954 node = _lookup(repo, node, branch)
1954 node = _lookup(repo, node, branch)
1955 else:
1955 else:
1956 heads = repo.heads()
1956 heads = repo.heads()
1957 if len(heads) > 2:
1957 if len(heads) > 2:
1958 raise util.Abort(_('repo has %d heads - '
1958 raise util.Abort(_('repo has %d heads - '
1959 'please merge with an explicit rev') %
1959 'please merge with an explicit rev') %
1960 len(heads))
1960 len(heads))
1961 if len(heads) == 1:
1961 if len(heads) == 1:
1962 raise util.Abort(_('there is nothing to merge - '
1962 raise util.Abort(_('there is nothing to merge - '
1963 'use "hg update" instead'))
1963 'use "hg update" instead'))
1964 parent = repo.dirstate.parents()[0]
1964 parent = repo.dirstate.parents()[0]
1965 if parent not in heads:
1965 if parent not in heads:
1966 raise util.Abort(_('working dir not at a head rev - '
1966 raise util.Abort(_('working dir not at a head rev - '
1967 'use "hg update" or merge with an explicit rev'))
1967 'use "hg update" or merge with an explicit rev'))
1968 node = parent == heads[0] and heads[-1] or heads[0]
1968 node = parent == heads[0] and heads[-1] or heads[0]
1969 return hg.merge(repo, node, force=force)
1969 return hg.merge(repo, node, force=force)
1970
1970
1971 def outgoing(ui, repo, dest=None, **opts):
1971 def outgoing(ui, repo, dest=None, **opts):
1972 """show changesets not found in destination
1972 """show changesets not found in destination
1973
1973
1974 Show changesets not found in the specified destination repository or
1974 Show changesets not found in the specified destination repository or
1975 the default push location. These are the changesets that would be pushed
1975 the default push location. These are the changesets that would be pushed
1976 if a push was requested.
1976 if a push was requested.
1977
1977
1978 See pull for valid destination format details.
1978 See pull for valid destination format details.
1979 """
1979 """
1980 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1980 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1981 setremoteconfig(ui, opts)
1981 setremoteconfig(ui, opts)
1982 revs = None
1982 revs = None
1983 if opts['rev']:
1983 if opts['rev']:
1984 revs = [repo.lookup(rev) for rev in opts['rev']]
1984 revs = [repo.lookup(rev) for rev in opts['rev']]
1985
1985
1986 other = hg.repository(ui, dest)
1986 other = hg.repository(ui, dest)
1987 o = repo.findoutgoing(other, force=opts['force'])
1987 o = repo.findoutgoing(other, force=opts['force'])
1988 if not o:
1988 if not o:
1989 ui.status(_("no changes found\n"))
1989 ui.status(_("no changes found\n"))
1990 return
1990 return
1991 o = repo.changelog.nodesbetween(o, revs)[0]
1991 o = repo.changelog.nodesbetween(o, revs)[0]
1992 if opts['newest_first']:
1992 if opts['newest_first']:
1993 o.reverse()
1993 o.reverse()
1994 displayer = show_changeset(ui, repo, opts)
1994 displayer = show_changeset(ui, repo, opts)
1995 for n in o:
1995 for n in o:
1996 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1996 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1997 if opts['no_merges'] and len(parents) == 2:
1997 if opts['no_merges'] and len(parents) == 2:
1998 continue
1998 continue
1999 displayer.show(changenode=n)
1999 displayer.show(changenode=n)
2000 if opts['patch']:
2000 if opts['patch']:
2001 prev = (parents and parents[0]) or nullid
2001 prev = (parents and parents[0]) or nullid
2002 patch.diff(repo, prev, n)
2002 patch.diff(repo, prev, n)
2003 ui.write("\n")
2003 ui.write("\n")
2004
2004
2005 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2005 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2006 """show the parents of the working dir or revision
2006 """show the parents of the working dir or revision
2007
2007
2008 Print the working directory's parent revisions.
2008 Print the working directory's parent revisions.
2009 """
2009 """
2010 # legacy
2010 # legacy
2011 if file_ and not rev:
2011 if file_ and not rev:
2012 try:
2012 try:
2013 rev = repo.lookup(file_)
2013 rev = repo.lookup(file_)
2014 file_ = None
2014 file_ = None
2015 except hg.RepoError:
2015 except hg.RepoError:
2016 pass
2016 pass
2017 else:
2017 else:
2018 ui.warn(_("'hg parent REV' is deprecated, "
2018 ui.warn(_("'hg parent REV' is deprecated, "
2019 "please use 'hg parents -r REV instead\n"))
2019 "please use 'hg parents -r REV instead\n"))
2020
2020
2021 if rev:
2021 if rev:
2022 if file_:
2022 if file_:
2023 ctx = repo.filectx(file_, changeid=rev)
2023 ctx = repo.filectx(file_, changeid=rev)
2024 else:
2024 else:
2025 ctx = repo.changectx(rev)
2025 ctx = repo.changectx(rev)
2026 p = [cp.node() for cp in ctx.parents()]
2026 p = [cp.node() for cp in ctx.parents()]
2027 else:
2027 else:
2028 p = repo.dirstate.parents()
2028 p = repo.dirstate.parents()
2029
2029
2030 br = None
2030 br = None
2031 if branches is not None:
2031 if branches is not None:
2032 br = repo.branchlookup(p)
2032 br = repo.branchlookup(p)
2033 displayer = show_changeset(ui, repo, opts)
2033 displayer = show_changeset(ui, repo, opts)
2034 for n in p:
2034 for n in p:
2035 if n != nullid:
2035 if n != nullid:
2036 displayer.show(changenode=n, brinfo=br)
2036 displayer.show(changenode=n, brinfo=br)
2037
2037
2038 def paths(ui, repo, search=None):
2038 def paths(ui, repo, search=None):
2039 """show definition of symbolic path names
2039 """show definition of symbolic path names
2040
2040
2041 Show definition of symbolic path name NAME. If no name is given, show
2041 Show definition of symbolic path name NAME. If no name is given, show
2042 definition of available names.
2042 definition of available names.
2043
2043
2044 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2044 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2045 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2045 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2046 """
2046 """
2047 if search:
2047 if search:
2048 for name, path in ui.configitems("paths"):
2048 for name, path in ui.configitems("paths"):
2049 if name == search:
2049 if name == search:
2050 ui.write("%s\n" % path)
2050 ui.write("%s\n" % path)
2051 return
2051 return
2052 ui.warn(_("not found!\n"))
2052 ui.warn(_("not found!\n"))
2053 return 1
2053 return 1
2054 else:
2054 else:
2055 for name, path in ui.configitems("paths"):
2055 for name, path in ui.configitems("paths"):
2056 ui.write("%s = %s\n" % (name, path))
2056 ui.write("%s = %s\n" % (name, path))
2057
2057
2058 def postincoming(ui, repo, modheads, optupdate):
2058 def postincoming(ui, repo, modheads, optupdate):
2059 if modheads == 0:
2059 if modheads == 0:
2060 return
2060 return
2061 if optupdate:
2061 if optupdate:
2062 if modheads == 1:
2062 if modheads == 1:
2063 return hg.update(repo, repo.changelog.tip()) # update
2063 return hg.update(repo, repo.changelog.tip()) # update
2064 else:
2064 else:
2065 ui.status(_("not updating, since new heads added\n"))
2065 ui.status(_("not updating, since new heads added\n"))
2066 if modheads > 1:
2066 if modheads > 1:
2067 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2067 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2068 else:
2068 else:
2069 ui.status(_("(run 'hg update' to get a working copy)\n"))
2069 ui.status(_("(run 'hg update' to get a working copy)\n"))
2070
2070
2071 def pull(ui, repo, source="default", **opts):
2071 def pull(ui, repo, source="default", **opts):
2072 """pull changes from the specified source
2072 """pull changes from the specified source
2073
2073
2074 Pull changes from a remote repository to a local one.
2074 Pull changes from a remote repository to a local one.
2075
2075
2076 This finds all changes from the repository at the specified path
2076 This finds all changes from the repository at the specified path
2077 or URL and adds them to the local repository. By default, this
2077 or URL and adds them to the local repository. By default, this
2078 does not update the copy of the project in the working directory.
2078 does not update the copy of the project in the working directory.
2079
2079
2080 Valid URLs are of the form:
2080 Valid URLs are of the form:
2081
2081
2082 local/filesystem/path
2082 local/filesystem/path
2083 http://[user@]host[:port]/[path]
2083 http://[user@]host[:port]/[path]
2084 https://[user@]host[:port]/[path]
2084 https://[user@]host[:port]/[path]
2085 ssh://[user@]host[:port]/[path]
2085 ssh://[user@]host[:port]/[path]
2086
2086
2087 Some notes about using SSH with Mercurial:
2087 Some notes about using SSH with Mercurial:
2088 - SSH requires an accessible shell account on the destination machine
2088 - SSH requires an accessible shell account on the destination machine
2089 and a copy of hg in the remote path or specified with as remotecmd.
2089 and a copy of hg in the remote path or specified with as remotecmd.
2090 - path is relative to the remote user's home directory by default.
2090 - path is relative to the remote user's home directory by default.
2091 Use an extra slash at the start of a path to specify an absolute path:
2091 Use an extra slash at the start of a path to specify an absolute path:
2092 ssh://example.com//tmp/repository
2092 ssh://example.com//tmp/repository
2093 - Mercurial doesn't use its own compression via SSH; the right thing
2093 - Mercurial doesn't use its own compression via SSH; the right thing
2094 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2094 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2095 Host *.mylocalnetwork.example.com
2095 Host *.mylocalnetwork.example.com
2096 Compression off
2096 Compression off
2097 Host *
2097 Host *
2098 Compression on
2098 Compression on
2099 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2099 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2100 with the --ssh command line option.
2100 with the --ssh command line option.
2101 """
2101 """
2102 source = ui.expandpath(source)
2102 source = ui.expandpath(source)
2103 setremoteconfig(ui, opts)
2103 setremoteconfig(ui, opts)
2104
2104
2105 other = hg.repository(ui, source)
2105 other = hg.repository(ui, source)
2106 ui.status(_('pulling from %s\n') % (source))
2106 ui.status(_('pulling from %s\n') % (source))
2107 revs = None
2107 revs = None
2108 if opts['rev'] and not other.local():
2108 if opts['rev']:
2109 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2109 if 'lookup' in other.capabilities:
2110 elif opts['rev']:
2111 revs = [other.lookup(rev) for rev in opts['rev']]
2110 revs = [other.lookup(rev) for rev in opts['rev']]
2111 else:
2112 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2113 raise util.Abort(error)
2112 modheads = repo.pull(other, heads=revs, force=opts['force'])
2114 modheads = repo.pull(other, heads=revs, force=opts['force'])
2113 return postincoming(ui, repo, modheads, opts['update'])
2115 return postincoming(ui, repo, modheads, opts['update'])
2114
2116
2115 def push(ui, repo, dest=None, **opts):
2117 def push(ui, repo, dest=None, **opts):
2116 """push changes to the specified destination
2118 """push changes to the specified destination
2117
2119
2118 Push changes from the local repository to the given destination.
2120 Push changes from the local repository to the given destination.
2119
2121
2120 This is the symmetrical operation for pull. It helps to move
2122 This is the symmetrical operation for pull. It helps to move
2121 changes from the current repository to a different one. If the
2123 changes from the current repository to a different one. If the
2122 destination is local this is identical to a pull in that directory
2124 destination is local this is identical to a pull in that directory
2123 from the current one.
2125 from the current one.
2124
2126
2125 By default, push will refuse to run if it detects the result would
2127 By default, push will refuse to run if it detects the result would
2126 increase the number of remote heads. This generally indicates the
2128 increase the number of remote heads. This generally indicates the
2127 the client has forgotten to sync and merge before pushing.
2129 the client has forgotten to sync and merge before pushing.
2128
2130
2129 Valid URLs are of the form:
2131 Valid URLs are of the form:
2130
2132
2131 local/filesystem/path
2133 local/filesystem/path
2132 ssh://[user@]host[:port]/[path]
2134 ssh://[user@]host[:port]/[path]
2133
2135
2134 Look at the help text for the pull command for important details
2136 Look at the help text for the pull command for important details
2135 about ssh:// URLs.
2137 about ssh:// URLs.
2136
2138
2137 Pushing to http:// and https:// URLs is possible, too, if this
2139 Pushing to http:// and https:// URLs is possible, too, if this
2138 feature is enabled on the remote Mercurial server.
2140 feature is enabled on the remote Mercurial server.
2139 """
2141 """
2140 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2142 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2141 setremoteconfig(ui, opts)
2143 setremoteconfig(ui, opts)
2142
2144
2143 other = hg.repository(ui, dest)
2145 other = hg.repository(ui, dest)
2144 ui.status('pushing to %s\n' % (dest))
2146 ui.status('pushing to %s\n' % (dest))
2145 revs = None
2147 revs = None
2146 if opts['rev']:
2148 if opts['rev']:
2147 revs = [repo.lookup(rev) for rev in opts['rev']]
2149 revs = [repo.lookup(rev) for rev in opts['rev']]
2148 r = repo.push(other, opts['force'], revs=revs)
2150 r = repo.push(other, opts['force'], revs=revs)
2149 return r == 0
2151 return r == 0
2150
2152
2151 def rawcommit(ui, repo, *flist, **rc):
2153 def rawcommit(ui, repo, *flist, **rc):
2152 """raw commit interface (DEPRECATED)
2154 """raw commit interface (DEPRECATED)
2153
2155
2154 (DEPRECATED)
2156 (DEPRECATED)
2155 Lowlevel commit, for use in helper scripts.
2157 Lowlevel commit, for use in helper scripts.
2156
2158
2157 This command is not intended to be used by normal users, as it is
2159 This command is not intended to be used by normal users, as it is
2158 primarily useful for importing from other SCMs.
2160 primarily useful for importing from other SCMs.
2159
2161
2160 This command is now deprecated and will be removed in a future
2162 This command is now deprecated and will be removed in a future
2161 release, please use debugsetparents and commit instead.
2163 release, please use debugsetparents and commit instead.
2162 """
2164 """
2163
2165
2164 ui.warn(_("(the rawcommit command is deprecated)\n"))
2166 ui.warn(_("(the rawcommit command is deprecated)\n"))
2165
2167
2166 message = rc['message']
2168 message = rc['message']
2167 if not message and rc['logfile']:
2169 if not message and rc['logfile']:
2168 try:
2170 try:
2169 message = open(rc['logfile']).read()
2171 message = open(rc['logfile']).read()
2170 except IOError:
2172 except IOError:
2171 pass
2173 pass
2172 if not message and not rc['logfile']:
2174 if not message and not rc['logfile']:
2173 raise util.Abort(_("missing commit message"))
2175 raise util.Abort(_("missing commit message"))
2174
2176
2175 files = relpath(repo, list(flist))
2177 files = relpath(repo, list(flist))
2176 if rc['files']:
2178 if rc['files']:
2177 files += open(rc['files']).read().splitlines()
2179 files += open(rc['files']).read().splitlines()
2178
2180
2179 rc['parent'] = map(repo.lookup, rc['parent'])
2181 rc['parent'] = map(repo.lookup, rc['parent'])
2180
2182
2181 try:
2183 try:
2182 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2184 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2183 except ValueError, inst:
2185 except ValueError, inst:
2184 raise util.Abort(str(inst))
2186 raise util.Abort(str(inst))
2185
2187
2186 def recover(ui, repo):
2188 def recover(ui, repo):
2187 """roll back an interrupted transaction
2189 """roll back an interrupted transaction
2188
2190
2189 Recover from an interrupted commit or pull.
2191 Recover from an interrupted commit or pull.
2190
2192
2191 This command tries to fix the repository status after an interrupted
2193 This command tries to fix the repository status after an interrupted
2192 operation. It should only be necessary when Mercurial suggests it.
2194 operation. It should only be necessary when Mercurial suggests it.
2193 """
2195 """
2194 if repo.recover():
2196 if repo.recover():
2195 return hg.verify(repo)
2197 return hg.verify(repo)
2196 return 1
2198 return 1
2197
2199
2198 def remove(ui, repo, *pats, **opts):
2200 def remove(ui, repo, *pats, **opts):
2199 """remove the specified files on the next commit
2201 """remove the specified files on the next commit
2200
2202
2201 Schedule the indicated files for removal from the repository.
2203 Schedule the indicated files for removal from the repository.
2202
2204
2203 This command schedules the files to be removed at the next commit.
2205 This command schedules the files to be removed at the next commit.
2204 This only removes files from the current branch, not from the
2206 This only removes files from the current branch, not from the
2205 entire project history. If the files still exist in the working
2207 entire project history. If the files still exist in the working
2206 directory, they will be deleted from it. If invoked with --after,
2208 directory, they will be deleted from it. If invoked with --after,
2207 files that have been manually deleted are marked as removed.
2209 files that have been manually deleted are marked as removed.
2208
2210
2209 Modified files and added files are not removed by default. To
2211 Modified files and added files are not removed by default. To
2210 remove them, use the -f/--force option.
2212 remove them, use the -f/--force option.
2211 """
2213 """
2212 names = []
2214 names = []
2213 if not opts['after'] and not pats:
2215 if not opts['after'] and not pats:
2214 raise util.Abort(_('no files specified'))
2216 raise util.Abort(_('no files specified'))
2215 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2217 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2216 exact = dict.fromkeys(files)
2218 exact = dict.fromkeys(files)
2217 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2219 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2218 modified, added, removed, deleted, unknown = mardu
2220 modified, added, removed, deleted, unknown = mardu
2219 remove, forget = [], []
2221 remove, forget = [], []
2220 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2222 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2221 reason = None
2223 reason = None
2222 if abs not in deleted and opts['after']:
2224 if abs not in deleted and opts['after']:
2223 reason = _('is still present')
2225 reason = _('is still present')
2224 elif abs in modified and not opts['force']:
2226 elif abs in modified and not opts['force']:
2225 reason = _('is modified (use -f to force removal)')
2227 reason = _('is modified (use -f to force removal)')
2226 elif abs in added:
2228 elif abs in added:
2227 if opts['force']:
2229 if opts['force']:
2228 forget.append(abs)
2230 forget.append(abs)
2229 continue
2231 continue
2230 reason = _('has been marked for add (use -f to force removal)')
2232 reason = _('has been marked for add (use -f to force removal)')
2231 elif abs in unknown:
2233 elif abs in unknown:
2232 reason = _('is not managed')
2234 reason = _('is not managed')
2233 elif abs in removed:
2235 elif abs in removed:
2234 continue
2236 continue
2235 if reason:
2237 if reason:
2236 if exact:
2238 if exact:
2237 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2239 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2238 else:
2240 else:
2239 if ui.verbose or not exact:
2241 if ui.verbose or not exact:
2240 ui.status(_('removing %s\n') % rel)
2242 ui.status(_('removing %s\n') % rel)
2241 remove.append(abs)
2243 remove.append(abs)
2242 repo.forget(forget)
2244 repo.forget(forget)
2243 repo.remove(remove, unlink=not opts['after'])
2245 repo.remove(remove, unlink=not opts['after'])
2244
2246
2245 def rename(ui, repo, *pats, **opts):
2247 def rename(ui, repo, *pats, **opts):
2246 """rename files; equivalent of copy + remove
2248 """rename files; equivalent of copy + remove
2247
2249
2248 Mark dest as copies of sources; mark sources for deletion. If
2250 Mark dest as copies of sources; mark sources for deletion. If
2249 dest is a directory, copies are put in that directory. If dest is
2251 dest is a directory, copies are put in that directory. If dest is
2250 a file, there can only be one source.
2252 a file, there can only be one source.
2251
2253
2252 By default, this command copies the contents of files as they
2254 By default, this command copies the contents of files as they
2253 stand in the working directory. If invoked with --after, the
2255 stand in the working directory. If invoked with --after, the
2254 operation is recorded, but no copying is performed.
2256 operation is recorded, but no copying is performed.
2255
2257
2256 This command takes effect in the next commit.
2258 This command takes effect in the next commit.
2257
2259
2258 NOTE: This command should be treated as experimental. While it
2260 NOTE: This command should be treated as experimental. While it
2259 should properly record rename files, this information is not yet
2261 should properly record rename files, this information is not yet
2260 fully used by merge, nor fully reported by log.
2262 fully used by merge, nor fully reported by log.
2261 """
2263 """
2262 wlock = repo.wlock(0)
2264 wlock = repo.wlock(0)
2263 errs, copied = docopy(ui, repo, pats, opts, wlock)
2265 errs, copied = docopy(ui, repo, pats, opts, wlock)
2264 names = []
2266 names = []
2265 for abs, rel, exact in copied:
2267 for abs, rel, exact in copied:
2266 if ui.verbose or not exact:
2268 if ui.verbose or not exact:
2267 ui.status(_('removing %s\n') % rel)
2269 ui.status(_('removing %s\n') % rel)
2268 names.append(abs)
2270 names.append(abs)
2269 if not opts.get('dry_run'):
2271 if not opts.get('dry_run'):
2270 repo.remove(names, True, wlock)
2272 repo.remove(names, True, wlock)
2271 return errs
2273 return errs
2272
2274
2273 def revert(ui, repo, *pats, **opts):
2275 def revert(ui, repo, *pats, **opts):
2274 """revert files or dirs to their states as of some revision
2276 """revert files or dirs to their states as of some revision
2275
2277
2276 With no revision specified, revert the named files or directories
2278 With no revision specified, revert the named files or directories
2277 to the contents they had in the parent of the working directory.
2279 to the contents they had in the parent of the working directory.
2278 This restores the contents of the affected files to an unmodified
2280 This restores the contents of the affected files to an unmodified
2279 state. If the working directory has two parents, you must
2281 state. If the working directory has two parents, you must
2280 explicitly specify the revision to revert to.
2282 explicitly specify the revision to revert to.
2281
2283
2282 Modified files are saved with a .orig suffix before reverting.
2284 Modified files are saved with a .orig suffix before reverting.
2283 To disable these backups, use --no-backup.
2285 To disable these backups, use --no-backup.
2284
2286
2285 Using the -r option, revert the given files or directories to their
2287 Using the -r option, revert the given files or directories to their
2286 contents as of a specific revision. This can be helpful to "roll
2288 contents as of a specific revision. This can be helpful to "roll
2287 back" some or all of a change that should not have been committed.
2289 back" some or all of a change that should not have been committed.
2288
2290
2289 Revert modifies the working directory. It does not commit any
2291 Revert modifies the working directory. It does not commit any
2290 changes, or change the parent of the working directory. If you
2292 changes, or change the parent of the working directory. If you
2291 revert to a revision other than the parent of the working
2293 revert to a revision other than the parent of the working
2292 directory, the reverted files will thus appear modified
2294 directory, the reverted files will thus appear modified
2293 afterwards.
2295 afterwards.
2294
2296
2295 If a file has been deleted, it is recreated. If the executable
2297 If a file has been deleted, it is recreated. If the executable
2296 mode of a file was changed, it is reset.
2298 mode of a file was changed, it is reset.
2297
2299
2298 If names are given, all files matching the names are reverted.
2300 If names are given, all files matching the names are reverted.
2299
2301
2300 If no arguments are given, no files are reverted.
2302 If no arguments are given, no files are reverted.
2301 """
2303 """
2302
2304
2303 if not pats and not opts['all']:
2305 if not pats and not opts['all']:
2304 raise util.Abort(_('no files or directories specified; '
2306 raise util.Abort(_('no files or directories specified; '
2305 'use --all to revert the whole repo'))
2307 'use --all to revert the whole repo'))
2306
2308
2307 parent, p2 = repo.dirstate.parents()
2309 parent, p2 = repo.dirstate.parents()
2308 if not opts['rev'] and p2 != nullid:
2310 if not opts['rev'] and p2 != nullid:
2309 raise util.Abort(_('uncommitted merge - please provide a '
2311 raise util.Abort(_('uncommitted merge - please provide a '
2310 'specific revision'))
2312 'specific revision'))
2311 node = repo.changectx(opts['rev']).node()
2313 node = repo.changectx(opts['rev']).node()
2312 mf = repo.manifest.read(repo.changelog.read(node)[0])
2314 mf = repo.manifest.read(repo.changelog.read(node)[0])
2313 if node == parent:
2315 if node == parent:
2314 pmf = mf
2316 pmf = mf
2315 else:
2317 else:
2316 pmf = None
2318 pmf = None
2317
2319
2318 wlock = repo.wlock()
2320 wlock = repo.wlock()
2319
2321
2320 # need all matching names in dirstate and manifest of target rev,
2322 # need all matching names in dirstate and manifest of target rev,
2321 # so have to walk both. do not print errors if files exist in one
2323 # so have to walk both. do not print errors if files exist in one
2322 # but not other.
2324 # but not other.
2323
2325
2324 names = {}
2326 names = {}
2325 target_only = {}
2327 target_only = {}
2326
2328
2327 # walk dirstate.
2329 # walk dirstate.
2328
2330
2329 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2331 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2330 badmatch=mf.has_key):
2332 badmatch=mf.has_key):
2331 names[abs] = (rel, exact)
2333 names[abs] = (rel, exact)
2332 if src == 'b':
2334 if src == 'b':
2333 target_only[abs] = True
2335 target_only[abs] = True
2334
2336
2335 # walk target manifest.
2337 # walk target manifest.
2336
2338
2337 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2339 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2338 badmatch=names.has_key):
2340 badmatch=names.has_key):
2339 if abs in names: continue
2341 if abs in names: continue
2340 names[abs] = (rel, exact)
2342 names[abs] = (rel, exact)
2341 target_only[abs] = True
2343 target_only[abs] = True
2342
2344
2343 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2345 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2344 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2346 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2345
2347
2346 revert = ([], _('reverting %s\n'))
2348 revert = ([], _('reverting %s\n'))
2347 add = ([], _('adding %s\n'))
2349 add = ([], _('adding %s\n'))
2348 remove = ([], _('removing %s\n'))
2350 remove = ([], _('removing %s\n'))
2349 forget = ([], _('forgetting %s\n'))
2351 forget = ([], _('forgetting %s\n'))
2350 undelete = ([], _('undeleting %s\n'))
2352 undelete = ([], _('undeleting %s\n'))
2351 update = {}
2353 update = {}
2352
2354
2353 disptable = (
2355 disptable = (
2354 # dispatch table:
2356 # dispatch table:
2355 # file state
2357 # file state
2356 # action if in target manifest
2358 # action if in target manifest
2357 # action if not in target manifest
2359 # action if not in target manifest
2358 # make backup if in target manifest
2360 # make backup if in target manifest
2359 # make backup if not in target manifest
2361 # make backup if not in target manifest
2360 (modified, revert, remove, True, True),
2362 (modified, revert, remove, True, True),
2361 (added, revert, forget, True, False),
2363 (added, revert, forget, True, False),
2362 (removed, undelete, None, False, False),
2364 (removed, undelete, None, False, False),
2363 (deleted, revert, remove, False, False),
2365 (deleted, revert, remove, False, False),
2364 (unknown, add, None, True, False),
2366 (unknown, add, None, True, False),
2365 (target_only, add, None, False, False),
2367 (target_only, add, None, False, False),
2366 )
2368 )
2367
2369
2368 entries = names.items()
2370 entries = names.items()
2369 entries.sort()
2371 entries.sort()
2370
2372
2371 for abs, (rel, exact) in entries:
2373 for abs, (rel, exact) in entries:
2372 mfentry = mf.get(abs)
2374 mfentry = mf.get(abs)
2373 def handle(xlist, dobackup):
2375 def handle(xlist, dobackup):
2374 xlist[0].append(abs)
2376 xlist[0].append(abs)
2375 update[abs] = 1
2377 update[abs] = 1
2376 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2378 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2377 bakname = "%s.orig" % rel
2379 bakname = "%s.orig" % rel
2378 ui.note(_('saving current version of %s as %s\n') %
2380 ui.note(_('saving current version of %s as %s\n') %
2379 (rel, bakname))
2381 (rel, bakname))
2380 if not opts.get('dry_run'):
2382 if not opts.get('dry_run'):
2381 shutil.copyfile(rel, bakname)
2383 shutil.copyfile(rel, bakname)
2382 shutil.copymode(rel, bakname)
2384 shutil.copymode(rel, bakname)
2383 if ui.verbose or not exact:
2385 if ui.verbose or not exact:
2384 ui.status(xlist[1] % rel)
2386 ui.status(xlist[1] % rel)
2385 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2387 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2386 if abs not in table: continue
2388 if abs not in table: continue
2387 # file has changed in dirstate
2389 # file has changed in dirstate
2388 if mfentry:
2390 if mfentry:
2389 handle(hitlist, backuphit)
2391 handle(hitlist, backuphit)
2390 elif misslist is not None:
2392 elif misslist is not None:
2391 handle(misslist, backupmiss)
2393 handle(misslist, backupmiss)
2392 else:
2394 else:
2393 if exact: ui.warn(_('file not managed: %s\n' % rel))
2395 if exact: ui.warn(_('file not managed: %s\n' % rel))
2394 break
2396 break
2395 else:
2397 else:
2396 # file has not changed in dirstate
2398 # file has not changed in dirstate
2397 if node == parent:
2399 if node == parent:
2398 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2400 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2399 continue
2401 continue
2400 if pmf is None:
2402 if pmf is None:
2401 # only need parent manifest in this unlikely case,
2403 # only need parent manifest in this unlikely case,
2402 # so do not read by default
2404 # so do not read by default
2403 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2405 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2404 if abs in pmf:
2406 if abs in pmf:
2405 if mfentry:
2407 if mfentry:
2406 # if version of file is same in parent and target
2408 # if version of file is same in parent and target
2407 # manifests, do nothing
2409 # manifests, do nothing
2408 if pmf[abs] != mfentry:
2410 if pmf[abs] != mfentry:
2409 handle(revert, False)
2411 handle(revert, False)
2410 else:
2412 else:
2411 handle(remove, False)
2413 handle(remove, False)
2412
2414
2413 if not opts.get('dry_run'):
2415 if not opts.get('dry_run'):
2414 repo.dirstate.forget(forget[0])
2416 repo.dirstate.forget(forget[0])
2415 r = hg.revert(repo, node, update.has_key, wlock)
2417 r = hg.revert(repo, node, update.has_key, wlock)
2416 repo.dirstate.update(add[0], 'a')
2418 repo.dirstate.update(add[0], 'a')
2417 repo.dirstate.update(undelete[0], 'n')
2419 repo.dirstate.update(undelete[0], 'n')
2418 repo.dirstate.update(remove[0], 'r')
2420 repo.dirstate.update(remove[0], 'r')
2419 return r
2421 return r
2420
2422
2421 def rollback(ui, repo):
2423 def rollback(ui, repo):
2422 """roll back the last transaction in this repository
2424 """roll back the last transaction in this repository
2423
2425
2424 Roll back the last transaction in this repository, restoring the
2426 Roll back the last transaction in this repository, restoring the
2425 project to its state prior to the transaction.
2427 project to its state prior to the transaction.
2426
2428
2427 Transactions are used to encapsulate the effects of all commands
2429 Transactions are used to encapsulate the effects of all commands
2428 that create new changesets or propagate existing changesets into a
2430 that create new changesets or propagate existing changesets into a
2429 repository. For example, the following commands are transactional,
2431 repository. For example, the following commands are transactional,
2430 and their effects can be rolled back:
2432 and their effects can be rolled back:
2431
2433
2432 commit
2434 commit
2433 import
2435 import
2434 pull
2436 pull
2435 push (with this repository as destination)
2437 push (with this repository as destination)
2436 unbundle
2438 unbundle
2437
2439
2438 This command should be used with care. There is only one level of
2440 This command should be used with care. There is only one level of
2439 rollback, and there is no way to undo a rollback.
2441 rollback, and there is no way to undo a rollback.
2440
2442
2441 This command is not intended for use on public repositories. Once
2443 This command is not intended for use on public repositories. Once
2442 changes are visible for pull by other users, rolling a transaction
2444 changes are visible for pull by other users, rolling a transaction
2443 back locally is ineffective (someone else may already have pulled
2445 back locally is ineffective (someone else may already have pulled
2444 the changes). Furthermore, a race is possible with readers of the
2446 the changes). Furthermore, a race is possible with readers of the
2445 repository; for example an in-progress pull from the repository
2447 repository; for example an in-progress pull from the repository
2446 may fail if a rollback is performed.
2448 may fail if a rollback is performed.
2447 """
2449 """
2448 repo.rollback()
2450 repo.rollback()
2449
2451
2450 def root(ui, repo):
2452 def root(ui, repo):
2451 """print the root (top) of the current working dir
2453 """print the root (top) of the current working dir
2452
2454
2453 Print the root directory of the current repository.
2455 Print the root directory of the current repository.
2454 """
2456 """
2455 ui.write(repo.root + "\n")
2457 ui.write(repo.root + "\n")
2456
2458
2457 def serve(ui, repo, **opts):
2459 def serve(ui, repo, **opts):
2458 """export the repository via HTTP
2460 """export the repository via HTTP
2459
2461
2460 Start a local HTTP repository browser and pull server.
2462 Start a local HTTP repository browser and pull server.
2461
2463
2462 By default, the server logs accesses to stdout and errors to
2464 By default, the server logs accesses to stdout and errors to
2463 stderr. Use the "-A" and "-E" options to log to files.
2465 stderr. Use the "-A" and "-E" options to log to files.
2464 """
2466 """
2465
2467
2466 if opts["stdio"]:
2468 if opts["stdio"]:
2467 if repo is None:
2469 if repo is None:
2468 raise hg.RepoError(_("There is no Mercurial repository here"
2470 raise hg.RepoError(_("There is no Mercurial repository here"
2469 " (.hg not found)"))
2471 " (.hg not found)"))
2470 s = sshserver.sshserver(ui, repo)
2472 s = sshserver.sshserver(ui, repo)
2471 s.serve_forever()
2473 s.serve_forever()
2472
2474
2473 optlist = ("name templates style address port ipv6"
2475 optlist = ("name templates style address port ipv6"
2474 " accesslog errorlog webdir_conf")
2476 " accesslog errorlog webdir_conf")
2475 for o in optlist.split():
2477 for o in optlist.split():
2476 if opts[o]:
2478 if opts[o]:
2477 ui.setconfig("web", o, str(opts[o]))
2479 ui.setconfig("web", o, str(opts[o]))
2478
2480
2479 if repo is None and not ui.config("web", "webdir_conf"):
2481 if repo is None and not ui.config("web", "webdir_conf"):
2480 raise hg.RepoError(_("There is no Mercurial repository here"
2482 raise hg.RepoError(_("There is no Mercurial repository here"
2481 " (.hg not found)"))
2483 " (.hg not found)"))
2482
2484
2483 if opts['daemon'] and not opts['daemon_pipefds']:
2485 if opts['daemon'] and not opts['daemon_pipefds']:
2484 rfd, wfd = os.pipe()
2486 rfd, wfd = os.pipe()
2485 args = sys.argv[:]
2487 args = sys.argv[:]
2486 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2488 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2487 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2489 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2488 args[0], args)
2490 args[0], args)
2489 os.close(wfd)
2491 os.close(wfd)
2490 os.read(rfd, 1)
2492 os.read(rfd, 1)
2491 os._exit(0)
2493 os._exit(0)
2492
2494
2493 try:
2495 try:
2494 httpd = hgweb.server.create_server(ui, repo)
2496 httpd = hgweb.server.create_server(ui, repo)
2495 except socket.error, inst:
2497 except socket.error, inst:
2496 raise util.Abort(_('cannot start server: %s') % inst.args[1])
2498 raise util.Abort(_('cannot start server: %s') % inst.args[1])
2497
2499
2498 if ui.verbose:
2500 if ui.verbose:
2499 addr, port = httpd.socket.getsockname()
2501 addr, port = httpd.socket.getsockname()
2500 if addr == '0.0.0.0':
2502 if addr == '0.0.0.0':
2501 addr = socket.gethostname()
2503 addr = socket.gethostname()
2502 else:
2504 else:
2503 try:
2505 try:
2504 addr = socket.gethostbyaddr(addr)[0]
2506 addr = socket.gethostbyaddr(addr)[0]
2505 except socket.error:
2507 except socket.error:
2506 pass
2508 pass
2507 if port != 80:
2509 if port != 80:
2508 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2510 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2509 else:
2511 else:
2510 ui.status(_('listening at http://%s/\n') % addr)
2512 ui.status(_('listening at http://%s/\n') % addr)
2511
2513
2512 if opts['pid_file']:
2514 if opts['pid_file']:
2513 fp = open(opts['pid_file'], 'w')
2515 fp = open(opts['pid_file'], 'w')
2514 fp.write(str(os.getpid()) + '\n')
2516 fp.write(str(os.getpid()) + '\n')
2515 fp.close()
2517 fp.close()
2516
2518
2517 if opts['daemon_pipefds']:
2519 if opts['daemon_pipefds']:
2518 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2520 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2519 os.close(rfd)
2521 os.close(rfd)
2520 os.write(wfd, 'y')
2522 os.write(wfd, 'y')
2521 os.close(wfd)
2523 os.close(wfd)
2522 sys.stdout.flush()
2524 sys.stdout.flush()
2523 sys.stderr.flush()
2525 sys.stderr.flush()
2524 fd = os.open(util.nulldev, os.O_RDWR)
2526 fd = os.open(util.nulldev, os.O_RDWR)
2525 if fd != 0: os.dup2(fd, 0)
2527 if fd != 0: os.dup2(fd, 0)
2526 if fd != 1: os.dup2(fd, 1)
2528 if fd != 1: os.dup2(fd, 1)
2527 if fd != 2: os.dup2(fd, 2)
2529 if fd != 2: os.dup2(fd, 2)
2528 if fd not in (0, 1, 2): os.close(fd)
2530 if fd not in (0, 1, 2): os.close(fd)
2529
2531
2530 httpd.serve_forever()
2532 httpd.serve_forever()
2531
2533
2532 def status(ui, repo, *pats, **opts):
2534 def status(ui, repo, *pats, **opts):
2533 """show changed files in the working directory
2535 """show changed files in the working directory
2534
2536
2535 Show status of files in the repository. If names are given, only
2537 Show status of files in the repository. If names are given, only
2536 files that match are shown. Files that are clean or ignored, are
2538 files that match are shown. Files that are clean or ignored, are
2537 not listed unless -c (clean), -i (ignored) or -A is given.
2539 not listed unless -c (clean), -i (ignored) or -A is given.
2538
2540
2539 The codes used to show the status of files are:
2541 The codes used to show the status of files are:
2540 M = modified
2542 M = modified
2541 A = added
2543 A = added
2542 R = removed
2544 R = removed
2543 C = clean
2545 C = clean
2544 ! = deleted, but still tracked
2546 ! = deleted, but still tracked
2545 ? = not tracked
2547 ? = not tracked
2546 I = ignored (not shown by default)
2548 I = ignored (not shown by default)
2547 = the previous added file was copied from here
2549 = the previous added file was copied from here
2548 """
2550 """
2549
2551
2550 all = opts['all']
2552 all = opts['all']
2551
2553
2552 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2554 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2553 cwd = (pats and repo.getcwd()) or ''
2555 cwd = (pats and repo.getcwd()) or ''
2554 modified, added, removed, deleted, unknown, ignored, clean = [
2556 modified, added, removed, deleted, unknown, ignored, clean = [
2555 [util.pathto(cwd, x) for x in n]
2557 [util.pathto(cwd, x) for x in n]
2556 for n in repo.status(files=files, match=matchfn,
2558 for n in repo.status(files=files, match=matchfn,
2557 list_ignored=all or opts['ignored'],
2559 list_ignored=all or opts['ignored'],
2558 list_clean=all or opts['clean'])]
2560 list_clean=all or opts['clean'])]
2559
2561
2560 changetypes = (('modified', 'M', modified),
2562 changetypes = (('modified', 'M', modified),
2561 ('added', 'A', added),
2563 ('added', 'A', added),
2562 ('removed', 'R', removed),
2564 ('removed', 'R', removed),
2563 ('deleted', '!', deleted),
2565 ('deleted', '!', deleted),
2564 ('unknown', '?', unknown),
2566 ('unknown', '?', unknown),
2565 ('ignored', 'I', ignored))
2567 ('ignored', 'I', ignored))
2566
2568
2567 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2569 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2568
2570
2569 end = opts['print0'] and '\0' or '\n'
2571 end = opts['print0'] and '\0' or '\n'
2570
2572
2571 for opt, char, changes in ([ct for ct in explicit_changetypes
2573 for opt, char, changes in ([ct for ct in explicit_changetypes
2572 if all or opts[ct[0]]]
2574 if all or opts[ct[0]]]
2573 or changetypes):
2575 or changetypes):
2574 if opts['no_status']:
2576 if opts['no_status']:
2575 format = "%%s%s" % end
2577 format = "%%s%s" % end
2576 else:
2578 else:
2577 format = "%s %%s%s" % (char, end)
2579 format = "%s %%s%s" % (char, end)
2578
2580
2579 for f in changes:
2581 for f in changes:
2580 ui.write(format % f)
2582 ui.write(format % f)
2581 if ((all or opts.get('copies')) and not opts.get('no_status')):
2583 if ((all or opts.get('copies')) and not opts.get('no_status')):
2582 copied = repo.dirstate.copied(f)
2584 copied = repo.dirstate.copied(f)
2583 if copied:
2585 if copied:
2584 ui.write(' %s%s' % (copied, end))
2586 ui.write(' %s%s' % (copied, end))
2585
2587
2586 def tag(ui, repo, name, rev_=None, **opts):
2588 def tag(ui, repo, name, rev_=None, **opts):
2587 """add a tag for the current tip or a given revision
2589 """add a tag for the current tip or a given revision
2588
2590
2589 Name a particular revision using <name>.
2591 Name a particular revision using <name>.
2590
2592
2591 Tags are used to name particular revisions of the repository and are
2593 Tags are used to name particular revisions of the repository and are
2592 very useful to compare different revision, to go back to significant
2594 very useful to compare different revision, to go back to significant
2593 earlier versions or to mark branch points as releases, etc.
2595 earlier versions or to mark branch points as releases, etc.
2594
2596
2595 If no revision is given, the parent of the working directory is used.
2597 If no revision is given, the parent of the working directory is used.
2596
2598
2597 To facilitate version control, distribution, and merging of tags,
2599 To facilitate version control, distribution, and merging of tags,
2598 they are stored as a file named ".hgtags" which is managed
2600 they are stored as a file named ".hgtags" which is managed
2599 similarly to other project files and can be hand-edited if
2601 similarly to other project files and can be hand-edited if
2600 necessary. The file '.hg/localtags' is used for local tags (not
2602 necessary. The file '.hg/localtags' is used for local tags (not
2601 shared among repositories).
2603 shared among repositories).
2602 """
2604 """
2603 if name in ['tip', '.']:
2605 if name in ['tip', '.']:
2604 raise util.Abort(_("the name '%s' is reserved") % name)
2606 raise util.Abort(_("the name '%s' is reserved") % name)
2605 if rev_ is not None:
2607 if rev_ is not None:
2606 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2608 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2607 "please use 'hg tag [-r REV] NAME' instead\n"))
2609 "please use 'hg tag [-r REV] NAME' instead\n"))
2608 if opts['rev']:
2610 if opts['rev']:
2609 raise util.Abort(_("use only one form to specify the revision"))
2611 raise util.Abort(_("use only one form to specify the revision"))
2610 if opts['rev']:
2612 if opts['rev']:
2611 rev_ = opts['rev']
2613 rev_ = opts['rev']
2612 if not rev_ and repo.dirstate.parents()[1] != nullid:
2614 if not rev_ and repo.dirstate.parents()[1] != nullid:
2613 raise util.Abort(_('uncommitted merge - please provide a '
2615 raise util.Abort(_('uncommitted merge - please provide a '
2614 'specific revision'))
2616 'specific revision'))
2615 r = repo.changectx(rev_).node()
2617 r = repo.changectx(rev_).node()
2616
2618
2617 message = opts['message']
2619 message = opts['message']
2618 if not message:
2620 if not message:
2619 message = _('Added tag %s for changeset %s') % (name, short(r))
2621 message = _('Added tag %s for changeset %s') % (name, short(r))
2620
2622
2621 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2623 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2622
2624
2623 def tags(ui, repo):
2625 def tags(ui, repo):
2624 """list repository tags
2626 """list repository tags
2625
2627
2626 List the repository tags.
2628 List the repository tags.
2627
2629
2628 This lists both regular and local tags.
2630 This lists both regular and local tags.
2629 """
2631 """
2630
2632
2631 l = repo.tagslist()
2633 l = repo.tagslist()
2632 l.reverse()
2634 l.reverse()
2633 hexfunc = ui.debugflag and hex or short
2635 hexfunc = ui.debugflag and hex or short
2634 for t, n in l:
2636 for t, n in l:
2635 try:
2637 try:
2636 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2638 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2637 except KeyError:
2639 except KeyError:
2638 r = " ?:?"
2640 r = " ?:?"
2639 if ui.quiet:
2641 if ui.quiet:
2640 ui.write("%s\n" % t)
2642 ui.write("%s\n" % t)
2641 else:
2643 else:
2642 ui.write("%-30s %s\n" % (t, r))
2644 ui.write("%-30s %s\n" % (t, r))
2643
2645
2644 def tip(ui, repo, **opts):
2646 def tip(ui, repo, **opts):
2645 """show the tip revision
2647 """show the tip revision
2646
2648
2647 Show the tip revision.
2649 Show the tip revision.
2648 """
2650 """
2649 n = repo.changelog.tip()
2651 n = repo.changelog.tip()
2650 br = None
2652 br = None
2651 if opts['branches']:
2653 if opts['branches']:
2652 br = repo.branchlookup([n])
2654 br = repo.branchlookup([n])
2653 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2655 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2654 if opts['patch']:
2656 if opts['patch']:
2655 patch.diff(repo, repo.changelog.parents(n)[0], n)
2657 patch.diff(repo, repo.changelog.parents(n)[0], n)
2656
2658
2657 def unbundle(ui, repo, fname, **opts):
2659 def unbundle(ui, repo, fname, **opts):
2658 """apply a changegroup file
2660 """apply a changegroup file
2659
2661
2660 Apply a compressed changegroup file generated by the bundle
2662 Apply a compressed changegroup file generated by the bundle
2661 command.
2663 command.
2662 """
2664 """
2663 f = urllib.urlopen(fname)
2665 f = urllib.urlopen(fname)
2664
2666
2665 header = f.read(6)
2667 header = f.read(6)
2666 if not header.startswith("HG"):
2668 if not header.startswith("HG"):
2667 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2669 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2668 elif not header.startswith("HG10"):
2670 elif not header.startswith("HG10"):
2669 raise util.Abort(_("%s: unknown bundle version") % fname)
2671 raise util.Abort(_("%s: unknown bundle version") % fname)
2670 elif header == "HG10BZ":
2672 elif header == "HG10BZ":
2671 def generator(f):
2673 def generator(f):
2672 zd = bz2.BZ2Decompressor()
2674 zd = bz2.BZ2Decompressor()
2673 zd.decompress("BZ")
2675 zd.decompress("BZ")
2674 for chunk in f:
2676 for chunk in f:
2675 yield zd.decompress(chunk)
2677 yield zd.decompress(chunk)
2676 elif header == "HG10UN":
2678 elif header == "HG10UN":
2677 def generator(f):
2679 def generator(f):
2678 for chunk in f:
2680 for chunk in f:
2679 yield chunk
2681 yield chunk
2680 else:
2682 else:
2681 raise util.Abort(_("%s: unknown bundle compression type")
2683 raise util.Abort(_("%s: unknown bundle compression type")
2682 % fname)
2684 % fname)
2683 gen = generator(util.filechunkiter(f, 4096))
2685 gen = generator(util.filechunkiter(f, 4096))
2684 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2686 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2685 'bundle:' + fname)
2687 'bundle:' + fname)
2686 return postincoming(ui, repo, modheads, opts['update'])
2688 return postincoming(ui, repo, modheads, opts['update'])
2687
2689
2688 def undo(ui, repo):
2690 def undo(ui, repo):
2689 """undo the last commit or pull (DEPRECATED)
2691 """undo the last commit or pull (DEPRECATED)
2690
2692
2691 (DEPRECATED)
2693 (DEPRECATED)
2692 This command is now deprecated and will be removed in a future
2694 This command is now deprecated and will be removed in a future
2693 release. Please use the rollback command instead. For usage
2695 release. Please use the rollback command instead. For usage
2694 instructions, see the rollback command.
2696 instructions, see the rollback command.
2695 """
2697 """
2696 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2698 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2697 repo.rollback()
2699 repo.rollback()
2698
2700
2699 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2701 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2700 branch=None):
2702 branch=None):
2701 """update or merge working directory
2703 """update or merge working directory
2702
2704
2703 Update the working directory to the specified revision.
2705 Update the working directory to the specified revision.
2704
2706
2705 If there are no outstanding changes in the working directory and
2707 If there are no outstanding changes in the working directory and
2706 there is a linear relationship between the current version and the
2708 there is a linear relationship between the current version and the
2707 requested version, the result is the requested version.
2709 requested version, the result is the requested version.
2708
2710
2709 To merge the working directory with another revision, use the
2711 To merge the working directory with another revision, use the
2710 merge command.
2712 merge command.
2711
2713
2712 By default, update will refuse to run if doing so would require
2714 By default, update will refuse to run if doing so would require
2713 merging or discarding local changes.
2715 merging or discarding local changes.
2714 """
2716 """
2715 node = _lookup(repo, node, branch)
2717 node = _lookup(repo, node, branch)
2716 if merge:
2718 if merge:
2717 ui.warn(_('(the -m/--merge option is deprecated; '
2719 ui.warn(_('(the -m/--merge option is deprecated; '
2718 'use the merge command instead)\n'))
2720 'use the merge command instead)\n'))
2719 return hg.merge(repo, node, force=force)
2721 return hg.merge(repo, node, force=force)
2720 elif clean:
2722 elif clean:
2721 return hg.clean(repo, node)
2723 return hg.clean(repo, node)
2722 else:
2724 else:
2723 return hg.update(repo, node)
2725 return hg.update(repo, node)
2724
2726
2725 def _lookup(repo, node, branch=None):
2727 def _lookup(repo, node, branch=None):
2726 if branch:
2728 if branch:
2727 br = repo.branchlookup(branch=branch)
2729 br = repo.branchlookup(branch=branch)
2728 found = []
2730 found = []
2729 for x in br:
2731 for x in br:
2730 if branch in br[x]:
2732 if branch in br[x]:
2731 found.append(x)
2733 found.append(x)
2732 if len(found) > 1:
2734 if len(found) > 1:
2733 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2735 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2734 for x in found:
2736 for x in found:
2735 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2737 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2736 raise util.Abort("")
2738 raise util.Abort("")
2737 if len(found) == 1:
2739 if len(found) == 1:
2738 node = found[0]
2740 node = found[0]
2739 repo.ui.warn(_("Using head %s for branch %s\n")
2741 repo.ui.warn(_("Using head %s for branch %s\n")
2740 % (short(node), branch))
2742 % (short(node), branch))
2741 else:
2743 else:
2742 raise util.Abort(_("branch %s not found") % branch)
2744 raise util.Abort(_("branch %s not found") % branch)
2743 else:
2745 else:
2744 node = node and repo.lookup(node) or repo.changelog.tip()
2746 node = node and repo.lookup(node) or repo.changelog.tip()
2745 return node
2747 return node
2746
2748
2747 def verify(ui, repo):
2749 def verify(ui, repo):
2748 """verify the integrity of the repository
2750 """verify the integrity of the repository
2749
2751
2750 Verify the integrity of the current repository.
2752 Verify the integrity of the current repository.
2751
2753
2752 This will perform an extensive check of the repository's
2754 This will perform an extensive check of the repository's
2753 integrity, validating the hashes and checksums of each entry in
2755 integrity, validating the hashes and checksums of each entry in
2754 the changelog, manifest, and tracked files, as well as the
2756 the changelog, manifest, and tracked files, as well as the
2755 integrity of their crosslinks and indices.
2757 integrity of their crosslinks and indices.
2756 """
2758 """
2757 return hg.verify(repo)
2759 return hg.verify(repo)
2758
2760
2759 # Command options and aliases are listed here, alphabetically
2761 # Command options and aliases are listed here, alphabetically
2760
2762
2761 globalopts = [
2763 globalopts = [
2762 ('R', 'repository', '',
2764 ('R', 'repository', '',
2763 _('repository root directory or symbolic path name')),
2765 _('repository root directory or symbolic path name')),
2764 ('', 'cwd', '', _('change working directory')),
2766 ('', 'cwd', '', _('change working directory')),
2765 ('y', 'noninteractive', None,
2767 ('y', 'noninteractive', None,
2766 _('do not prompt, assume \'yes\' for any required answers')),
2768 _('do not prompt, assume \'yes\' for any required answers')),
2767 ('q', 'quiet', None, _('suppress output')),
2769 ('q', 'quiet', None, _('suppress output')),
2768 ('v', 'verbose', None, _('enable additional output')),
2770 ('v', 'verbose', None, _('enable additional output')),
2769 ('', 'config', [], _('set/override config option')),
2771 ('', 'config', [], _('set/override config option')),
2770 ('', 'debug', None, _('enable debugging output')),
2772 ('', 'debug', None, _('enable debugging output')),
2771 ('', 'debugger', None, _('start debugger')),
2773 ('', 'debugger', None, _('start debugger')),
2772 ('', 'lsprof', None, _('print improved command execution profile')),
2774 ('', 'lsprof', None, _('print improved command execution profile')),
2773 ('', 'traceback', None, _('print traceback on exception')),
2775 ('', 'traceback', None, _('print traceback on exception')),
2774 ('', 'time', None, _('time how long the command takes')),
2776 ('', 'time', None, _('time how long the command takes')),
2775 ('', 'profile', None, _('print command execution profile')),
2777 ('', 'profile', None, _('print command execution profile')),
2776 ('', 'version', None, _('output version information and exit')),
2778 ('', 'version', None, _('output version information and exit')),
2777 ('h', 'help', None, _('display help and exit')),
2779 ('h', 'help', None, _('display help and exit')),
2778 ]
2780 ]
2779
2781
2780 dryrunopts = [('n', 'dry-run', None,
2782 dryrunopts = [('n', 'dry-run', None,
2781 _('do not perform actions, just print output'))]
2783 _('do not perform actions, just print output'))]
2782
2784
2783 remoteopts = [
2785 remoteopts = [
2784 ('e', 'ssh', '', _('specify ssh command to use')),
2786 ('e', 'ssh', '', _('specify ssh command to use')),
2785 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2787 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2786 ]
2788 ]
2787
2789
2788 walkopts = [
2790 walkopts = [
2789 ('I', 'include', [], _('include names matching the given patterns')),
2791 ('I', 'include', [], _('include names matching the given patterns')),
2790 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2792 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2791 ]
2793 ]
2792
2794
2793 table = {
2795 table = {
2794 "^add":
2796 "^add":
2795 (add,
2797 (add,
2796 walkopts + dryrunopts,
2798 walkopts + dryrunopts,
2797 _('hg add [OPTION]... [FILE]...')),
2799 _('hg add [OPTION]... [FILE]...')),
2798 "addremove":
2800 "addremove":
2799 (addremove,
2801 (addremove,
2800 [('s', 'similarity', '',
2802 [('s', 'similarity', '',
2801 _('guess renamed files by similarity (0<=s<=100)')),
2803 _('guess renamed files by similarity (0<=s<=100)')),
2802 ] + walkopts + dryrunopts,
2804 ] + walkopts + dryrunopts,
2803 _('hg addremove [OPTION]... [FILE]...')),
2805 _('hg addremove [OPTION]... [FILE]...')),
2804 "^annotate":
2806 "^annotate":
2805 (annotate,
2807 (annotate,
2806 [('r', 'rev', '', _('annotate the specified revision')),
2808 [('r', 'rev', '', _('annotate the specified revision')),
2807 ('f', 'follow', None, _('follow file copies and renames')),
2809 ('f', 'follow', None, _('follow file copies and renames')),
2808 ('a', 'text', None, _('treat all files as text')),
2810 ('a', 'text', None, _('treat all files as text')),
2809 ('u', 'user', None, _('list the author')),
2811 ('u', 'user', None, _('list the author')),
2810 ('d', 'date', None, _('list the date')),
2812 ('d', 'date', None, _('list the date')),
2811 ('n', 'number', None, _('list the revision number (default)')),
2813 ('n', 'number', None, _('list the revision number (default)')),
2812 ('c', 'changeset', None, _('list the changeset')),
2814 ('c', 'changeset', None, _('list the changeset')),
2813 ] + walkopts,
2815 ] + walkopts,
2814 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2816 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2815 "archive":
2817 "archive":
2816 (archive,
2818 (archive,
2817 [('', 'no-decode', None, _('do not pass files through decoders')),
2819 [('', 'no-decode', None, _('do not pass files through decoders')),
2818 ('p', 'prefix', '', _('directory prefix for files in archive')),
2820 ('p', 'prefix', '', _('directory prefix for files in archive')),
2819 ('r', 'rev', '', _('revision to distribute')),
2821 ('r', 'rev', '', _('revision to distribute')),
2820 ('t', 'type', '', _('type of distribution to create')),
2822 ('t', 'type', '', _('type of distribution to create')),
2821 ] + walkopts,
2823 ] + walkopts,
2822 _('hg archive [OPTION]... DEST')),
2824 _('hg archive [OPTION]... DEST')),
2823 "backout":
2825 "backout":
2824 (backout,
2826 (backout,
2825 [('', 'merge', None,
2827 [('', 'merge', None,
2826 _('merge with old dirstate parent after backout')),
2828 _('merge with old dirstate parent after backout')),
2827 ('m', 'message', '', _('use <text> as commit message')),
2829 ('m', 'message', '', _('use <text> as commit message')),
2828 ('l', 'logfile', '', _('read commit message from <file>')),
2830 ('l', 'logfile', '', _('read commit message from <file>')),
2829 ('d', 'date', '', _('record datecode as commit date')),
2831 ('d', 'date', '', _('record datecode as commit date')),
2830 ('', 'parent', '', _('parent to choose when backing out merge')),
2832 ('', 'parent', '', _('parent to choose when backing out merge')),
2831 ('u', 'user', '', _('record user as committer')),
2833 ('u', 'user', '', _('record user as committer')),
2832 ] + walkopts,
2834 ] + walkopts,
2833 _('hg backout [OPTION]... REV')),
2835 _('hg backout [OPTION]... REV')),
2834 "bundle":
2836 "bundle":
2835 (bundle,
2837 (bundle,
2836 [('f', 'force', None,
2838 [('f', 'force', None,
2837 _('run even when remote repository is unrelated')),
2839 _('run even when remote repository is unrelated')),
2838 ('r', 'rev', [],
2840 ('r', 'rev', [],
2839 _('a changeset you would like to bundle')),
2841 _('a changeset you would like to bundle')),
2840 ('', 'base', [],
2842 ('', 'base', [],
2841 _('a base changeset to specify instead of a destination')),
2843 _('a base changeset to specify instead of a destination')),
2842 ] + remoteopts,
2844 ] + remoteopts,
2843 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2845 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2844 "cat":
2846 "cat":
2845 (cat,
2847 (cat,
2846 [('o', 'output', '', _('print output to file with formatted name')),
2848 [('o', 'output', '', _('print output to file with formatted name')),
2847 ('r', 'rev', '', _('print the given revision')),
2849 ('r', 'rev', '', _('print the given revision')),
2848 ] + walkopts,
2850 ] + walkopts,
2849 _('hg cat [OPTION]... FILE...')),
2851 _('hg cat [OPTION]... FILE...')),
2850 "^clone":
2852 "^clone":
2851 (clone,
2853 (clone,
2852 [('U', 'noupdate', None, _('do not update the new working directory')),
2854 [('U', 'noupdate', None, _('do not update the new working directory')),
2853 ('r', 'rev', [],
2855 ('r', 'rev', [],
2854 _('a changeset you would like to have after cloning')),
2856 _('a changeset you would like to have after cloning')),
2855 ('', 'pull', None, _('use pull protocol to copy metadata')),
2857 ('', 'pull', None, _('use pull protocol to copy metadata')),
2856 ('', 'uncompressed', None,
2858 ('', 'uncompressed', None,
2857 _('use uncompressed transfer (fast over LAN)')),
2859 _('use uncompressed transfer (fast over LAN)')),
2858 ] + remoteopts,
2860 ] + remoteopts,
2859 _('hg clone [OPTION]... SOURCE [DEST]')),
2861 _('hg clone [OPTION]... SOURCE [DEST]')),
2860 "^commit|ci":
2862 "^commit|ci":
2861 (commit,
2863 (commit,
2862 [('A', 'addremove', None,
2864 [('A', 'addremove', None,
2863 _('mark new/missing files as added/removed before committing')),
2865 _('mark new/missing files as added/removed before committing')),
2864 ('m', 'message', '', _('use <text> as commit message')),
2866 ('m', 'message', '', _('use <text> as commit message')),
2865 ('l', 'logfile', '', _('read the commit message from <file>')),
2867 ('l', 'logfile', '', _('read the commit message from <file>')),
2866 ('d', 'date', '', _('record datecode as commit date')),
2868 ('d', 'date', '', _('record datecode as commit date')),
2867 ('u', 'user', '', _('record user as commiter')),
2869 ('u', 'user', '', _('record user as commiter')),
2868 ] + walkopts,
2870 ] + walkopts,
2869 _('hg commit [OPTION]... [FILE]...')),
2871 _('hg commit [OPTION]... [FILE]...')),
2870 "copy|cp":
2872 "copy|cp":
2871 (copy,
2873 (copy,
2872 [('A', 'after', None, _('record a copy that has already occurred')),
2874 [('A', 'after', None, _('record a copy that has already occurred')),
2873 ('f', 'force', None,
2875 ('f', 'force', None,
2874 _('forcibly copy over an existing managed file')),
2876 _('forcibly copy over an existing managed file')),
2875 ] + walkopts + dryrunopts,
2877 ] + walkopts + dryrunopts,
2876 _('hg copy [OPTION]... [SOURCE]... DEST')),
2878 _('hg copy [OPTION]... [SOURCE]... DEST')),
2877 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2879 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2878 "debugcomplete":
2880 "debugcomplete":
2879 (debugcomplete,
2881 (debugcomplete,
2880 [('o', 'options', None, _('show the command options'))],
2882 [('o', 'options', None, _('show the command options'))],
2881 _('debugcomplete [-o] CMD')),
2883 _('debugcomplete [-o] CMD')),
2882 "debugrebuildstate":
2884 "debugrebuildstate":
2883 (debugrebuildstate,
2885 (debugrebuildstate,
2884 [('r', 'rev', '', _('revision to rebuild to'))],
2886 [('r', 'rev', '', _('revision to rebuild to'))],
2885 _('debugrebuildstate [-r REV] [REV]')),
2887 _('debugrebuildstate [-r REV] [REV]')),
2886 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2888 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2887 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2889 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2888 "debugstate": (debugstate, [], _('debugstate')),
2890 "debugstate": (debugstate, [], _('debugstate')),
2889 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2891 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2890 "debugindex": (debugindex, [], _('debugindex FILE')),
2892 "debugindex": (debugindex, [], _('debugindex FILE')),
2891 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2893 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2892 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2894 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2893 "debugwalk":
2895 "debugwalk":
2894 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2896 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2895 "^diff":
2897 "^diff":
2896 (diff,
2898 (diff,
2897 [('r', 'rev', [], _('revision')),
2899 [('r', 'rev', [], _('revision')),
2898 ('a', 'text', None, _('treat all files as text')),
2900 ('a', 'text', None, _('treat all files as text')),
2899 ('p', 'show-function', None,
2901 ('p', 'show-function', None,
2900 _('show which function each change is in')),
2902 _('show which function each change is in')),
2901 ('g', 'git', None, _('use git extended diff format')),
2903 ('g', 'git', None, _('use git extended diff format')),
2902 ('', 'nodates', None, _("don't include dates in diff headers")),
2904 ('', 'nodates', None, _("don't include dates in diff headers")),
2903 ('w', 'ignore-all-space', None,
2905 ('w', 'ignore-all-space', None,
2904 _('ignore white space when comparing lines')),
2906 _('ignore white space when comparing lines')),
2905 ('b', 'ignore-space-change', None,
2907 ('b', 'ignore-space-change', None,
2906 _('ignore changes in the amount of white space')),
2908 _('ignore changes in the amount of white space')),
2907 ('B', 'ignore-blank-lines', None,
2909 ('B', 'ignore-blank-lines', None,
2908 _('ignore changes whose lines are all blank')),
2910 _('ignore changes whose lines are all blank')),
2909 ] + walkopts,
2911 ] + walkopts,
2910 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2912 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2911 "^export":
2913 "^export":
2912 (export,
2914 (export,
2913 [('o', 'output', '', _('print output to file with formatted name')),
2915 [('o', 'output', '', _('print output to file with formatted name')),
2914 ('a', 'text', None, _('treat all files as text')),
2916 ('a', 'text', None, _('treat all files as text')),
2915 ('g', 'git', None, _('use git extended diff format')),
2917 ('g', 'git', None, _('use git extended diff format')),
2916 ('', 'nodates', None, _("don't include dates in diff headers")),
2918 ('', 'nodates', None, _("don't include dates in diff headers")),
2917 ('', 'switch-parent', None, _('diff against the second parent'))],
2919 ('', 'switch-parent', None, _('diff against the second parent'))],
2918 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2920 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2919 "debugforget|forget":
2921 "debugforget|forget":
2920 (forget, walkopts, _('hg forget [OPTION]... FILE...')),
2922 (forget, walkopts, _('hg forget [OPTION]... FILE...')),
2921 "grep":
2923 "grep":
2922 (grep,
2924 (grep,
2923 [('0', 'print0', None, _('end fields with NUL')),
2925 [('0', 'print0', None, _('end fields with NUL')),
2924 ('', 'all', None, _('print all revisions that match')),
2926 ('', 'all', None, _('print all revisions that match')),
2925 ('f', 'follow', None,
2927 ('f', 'follow', None,
2926 _('follow changeset history, or file history across copies and renames')),
2928 _('follow changeset history, or file history across copies and renames')),
2927 ('i', 'ignore-case', None, _('ignore case when matching')),
2929 ('i', 'ignore-case', None, _('ignore case when matching')),
2928 ('l', 'files-with-matches', None,
2930 ('l', 'files-with-matches', None,
2929 _('print only filenames and revs that match')),
2931 _('print only filenames and revs that match')),
2930 ('n', 'line-number', None, _('print matching line numbers')),
2932 ('n', 'line-number', None, _('print matching line numbers')),
2931 ('r', 'rev', [], _('search in given revision range')),
2933 ('r', 'rev', [], _('search in given revision range')),
2932 ('u', 'user', None, _('print user who committed change')),
2934 ('u', 'user', None, _('print user who committed change')),
2933 ] + walkopts,
2935 ] + walkopts,
2934 _('hg grep [OPTION]... PATTERN [FILE]...')),
2936 _('hg grep [OPTION]... PATTERN [FILE]...')),
2935 "heads":
2937 "heads":
2936 (heads,
2938 (heads,
2937 [('b', 'branches', None, _('show branches')),
2939 [('b', 'branches', None, _('show branches')),
2938 ('', 'style', '', _('display using template map file')),
2940 ('', 'style', '', _('display using template map file')),
2939 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2941 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2940 ('', 'template', '', _('display with template'))],
2942 ('', 'template', '', _('display with template'))],
2941 _('hg heads [-b] [-r <rev>]')),
2943 _('hg heads [-b] [-r <rev>]')),
2942 "help": (help_, [], _('hg help [COMMAND]')),
2944 "help": (help_, [], _('hg help [COMMAND]')),
2943 "identify|id": (identify, [], _('hg identify')),
2945 "identify|id": (identify, [], _('hg identify')),
2944 "import|patch":
2946 "import|patch":
2945 (import_,
2947 (import_,
2946 [('p', 'strip', 1,
2948 [('p', 'strip', 1,
2947 _('directory strip option for patch. This has the same\n'
2949 _('directory strip option for patch. This has the same\n'
2948 'meaning as the corresponding patch option')),
2950 'meaning as the corresponding patch option')),
2949 ('m', 'message', '', _('use <text> as commit message')),
2951 ('m', 'message', '', _('use <text> as commit message')),
2950 ('b', 'base', '', _('base path')),
2952 ('b', 'base', '', _('base path')),
2951 ('f', 'force', None,
2953 ('f', 'force', None,
2952 _('skip check for outstanding uncommitted changes'))],
2954 _('skip check for outstanding uncommitted changes'))],
2953 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2955 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2954 "incoming|in": (incoming,
2956 "incoming|in": (incoming,
2955 [('M', 'no-merges', None, _('do not show merges')),
2957 [('M', 'no-merges', None, _('do not show merges')),
2956 ('f', 'force', None,
2958 ('f', 'force', None,
2957 _('run even when remote repository is unrelated')),
2959 _('run even when remote repository is unrelated')),
2958 ('', 'style', '', _('display using template map file')),
2960 ('', 'style', '', _('display using template map file')),
2959 ('n', 'newest-first', None, _('show newest record first')),
2961 ('n', 'newest-first', None, _('show newest record first')),
2960 ('', 'bundle', '', _('file to store the bundles into')),
2962 ('', 'bundle', '', _('file to store the bundles into')),
2961 ('p', 'patch', None, _('show patch')),
2963 ('p', 'patch', None, _('show patch')),
2962 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2964 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2963 ('', 'template', '', _('display with template')),
2965 ('', 'template', '', _('display with template')),
2964 ] + remoteopts,
2966 ] + remoteopts,
2965 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2967 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2966 ' [--bundle FILENAME] [SOURCE]')),
2968 ' [--bundle FILENAME] [SOURCE]')),
2967 "^init":
2969 "^init":
2968 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2970 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2969 "locate":
2971 "locate":
2970 (locate,
2972 (locate,
2971 [('r', 'rev', '', _('search the repository as it stood at rev')),
2973 [('r', 'rev', '', _('search the repository as it stood at rev')),
2972 ('0', 'print0', None,
2974 ('0', 'print0', None,
2973 _('end filenames with NUL, for use with xargs')),
2975 _('end filenames with NUL, for use with xargs')),
2974 ('f', 'fullpath', None,
2976 ('f', 'fullpath', None,
2975 _('print complete paths from the filesystem root')),
2977 _('print complete paths from the filesystem root')),
2976 ] + walkopts,
2978 ] + walkopts,
2977 _('hg locate [OPTION]... [PATTERN]...')),
2979 _('hg locate [OPTION]... [PATTERN]...')),
2978 "^log|history":
2980 "^log|history":
2979 (log,
2981 (log,
2980 [('b', 'branches', None, _('show branches')),
2982 [('b', 'branches', None, _('show branches')),
2981 ('f', 'follow', None,
2983 ('f', 'follow', None,
2982 _('follow changeset history, or file history across copies and renames')),
2984 _('follow changeset history, or file history across copies and renames')),
2983 ('', 'follow-first', None,
2985 ('', 'follow-first', None,
2984 _('only follow the first parent of merge changesets')),
2986 _('only follow the first parent of merge changesets')),
2985 ('C', 'copies', None, _('show copied files')),
2987 ('C', 'copies', None, _('show copied files')),
2986 ('k', 'keyword', [], _('search for a keyword')),
2988 ('k', 'keyword', [], _('search for a keyword')),
2987 ('l', 'limit', '', _('limit number of changes displayed')),
2989 ('l', 'limit', '', _('limit number of changes displayed')),
2988 ('r', 'rev', [], _('show the specified revision or range')),
2990 ('r', 'rev', [], _('show the specified revision or range')),
2989 ('M', 'no-merges', None, _('do not show merges')),
2991 ('M', 'no-merges', None, _('do not show merges')),
2990 ('', 'style', '', _('display using template map file')),
2992 ('', 'style', '', _('display using template map file')),
2991 ('m', 'only-merges', None, _('show only merges')),
2993 ('m', 'only-merges', None, _('show only merges')),
2992 ('p', 'patch', None, _('show patch')),
2994 ('p', 'patch', None, _('show patch')),
2993 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2995 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2994 ('', 'template', '', _('display with template')),
2996 ('', 'template', '', _('display with template')),
2995 ] + walkopts,
2997 ] + walkopts,
2996 _('hg log [OPTION]... [FILE]')),
2998 _('hg log [OPTION]... [FILE]')),
2997 "manifest": (manifest, [], _('hg manifest [REV]')),
2999 "manifest": (manifest, [], _('hg manifest [REV]')),
2998 "merge":
3000 "merge":
2999 (merge,
3001 (merge,
3000 [('b', 'branch', '', _('merge with head of a specific branch')),
3002 [('b', 'branch', '', _('merge with head of a specific branch')),
3001 ('f', 'force', None, _('force a merge with outstanding changes'))],
3003 ('f', 'force', None, _('force a merge with outstanding changes'))],
3002 _('hg merge [-b TAG] [-f] [REV]')),
3004 _('hg merge [-b TAG] [-f] [REV]')),
3003 "outgoing|out": (outgoing,
3005 "outgoing|out": (outgoing,
3004 [('M', 'no-merges', None, _('do not show merges')),
3006 [('M', 'no-merges', None, _('do not show merges')),
3005 ('f', 'force', None,
3007 ('f', 'force', None,
3006 _('run even when remote repository is unrelated')),
3008 _('run even when remote repository is unrelated')),
3007 ('p', 'patch', None, _('show patch')),
3009 ('p', 'patch', None, _('show patch')),
3008 ('', 'style', '', _('display using template map file')),
3010 ('', 'style', '', _('display using template map file')),
3009 ('r', 'rev', [], _('a specific revision you would like to push')),
3011 ('r', 'rev', [], _('a specific revision you would like to push')),
3010 ('n', 'newest-first', None, _('show newest record first')),
3012 ('n', 'newest-first', None, _('show newest record first')),
3011 ('', 'template', '', _('display with template')),
3013 ('', 'template', '', _('display with template')),
3012 ] + remoteopts,
3014 ] + remoteopts,
3013 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3015 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3014 "^parents":
3016 "^parents":
3015 (parents,
3017 (parents,
3016 [('b', 'branches', None, _('show branches')),
3018 [('b', 'branches', None, _('show branches')),
3017 ('r', 'rev', '', _('show parents from the specified rev')),
3019 ('r', 'rev', '', _('show parents from the specified rev')),
3018 ('', 'style', '', _('display using template map file')),
3020 ('', 'style', '', _('display using template map file')),
3019 ('', 'template', '', _('display with template'))],
3021 ('', 'template', '', _('display with template'))],
3020 _('hg parents [-b] [-r REV] [FILE]')),
3022 _('hg parents [-b] [-r REV] [FILE]')),
3021 "paths": (paths, [], _('hg paths [NAME]')),
3023 "paths": (paths, [], _('hg paths [NAME]')),
3022 "^pull":
3024 "^pull":
3023 (pull,
3025 (pull,
3024 [('u', 'update', None,
3026 [('u', 'update', None,
3025 _('update to new tip if changesets were pulled')),
3027 _('update to new tip if changesets were pulled')),
3026 ('f', 'force', None,
3028 ('f', 'force', None,
3027 _('run even when remote repository is unrelated')),
3029 _('run even when remote repository is unrelated')),
3028 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3030 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3029 ] + remoteopts,
3031 ] + remoteopts,
3030 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3032 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3031 "^push":
3033 "^push":
3032 (push,
3034 (push,
3033 [('f', 'force', None, _('force push')),
3035 [('f', 'force', None, _('force push')),
3034 ('r', 'rev', [], _('a specific revision you would like to push')),
3036 ('r', 'rev', [], _('a specific revision you would like to push')),
3035 ] + remoteopts,
3037 ] + remoteopts,
3036 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3038 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3037 "debugrawcommit|rawcommit":
3039 "debugrawcommit|rawcommit":
3038 (rawcommit,
3040 (rawcommit,
3039 [('p', 'parent', [], _('parent')),
3041 [('p', 'parent', [], _('parent')),
3040 ('d', 'date', '', _('date code')),
3042 ('d', 'date', '', _('date code')),
3041 ('u', 'user', '', _('user')),
3043 ('u', 'user', '', _('user')),
3042 ('F', 'files', '', _('file list')),
3044 ('F', 'files', '', _('file list')),
3043 ('m', 'message', '', _('commit message')),
3045 ('m', 'message', '', _('commit message')),
3044 ('l', 'logfile', '', _('commit message file'))],
3046 ('l', 'logfile', '', _('commit message file'))],
3045 _('hg debugrawcommit [OPTION]... [FILE]...')),
3047 _('hg debugrawcommit [OPTION]... [FILE]...')),
3046 "recover": (recover, [], _('hg recover')),
3048 "recover": (recover, [], _('hg recover')),
3047 "^remove|rm":
3049 "^remove|rm":
3048 (remove,
3050 (remove,
3049 [('A', 'after', None, _('record remove that has already occurred')),
3051 [('A', 'after', None, _('record remove that has already occurred')),
3050 ('f', 'force', None, _('remove file even if modified')),
3052 ('f', 'force', None, _('remove file even if modified')),
3051 ] + walkopts,
3053 ] + walkopts,
3052 _('hg remove [OPTION]... FILE...')),
3054 _('hg remove [OPTION]... FILE...')),
3053 "rename|mv":
3055 "rename|mv":
3054 (rename,
3056 (rename,
3055 [('A', 'after', None, _('record a rename that has already occurred')),
3057 [('A', 'after', None, _('record a rename that has already occurred')),
3056 ('f', 'force', None,
3058 ('f', 'force', None,
3057 _('forcibly copy over an existing managed file')),
3059 _('forcibly copy over an existing managed file')),
3058 ] + walkopts + dryrunopts,
3060 ] + walkopts + dryrunopts,
3059 _('hg rename [OPTION]... SOURCE... DEST')),
3061 _('hg rename [OPTION]... SOURCE... DEST')),
3060 "^revert":
3062 "^revert":
3061 (revert,
3063 (revert,
3062 [('a', 'all', None, _('revert all changes when no arguments given')),
3064 [('a', 'all', None, _('revert all changes when no arguments given')),
3063 ('r', 'rev', '', _('revision to revert to')),
3065 ('r', 'rev', '', _('revision to revert to')),
3064 ('', 'no-backup', None, _('do not save backup copies of files')),
3066 ('', 'no-backup', None, _('do not save backup copies of files')),
3065 ] + walkopts + dryrunopts,
3067 ] + walkopts + dryrunopts,
3066 _('hg revert [-r REV] [NAME]...')),
3068 _('hg revert [-r REV] [NAME]...')),
3067 "rollback": (rollback, [], _('hg rollback')),
3069 "rollback": (rollback, [], _('hg rollback')),
3068 "root": (root, [], _('hg root')),
3070 "root": (root, [], _('hg root')),
3069 "showconfig|debugconfig": (showconfig, [], _('showconfig [NAME]...')),
3071 "showconfig|debugconfig": (showconfig, [], _('showconfig [NAME]...')),
3070 "^serve":
3072 "^serve":
3071 (serve,
3073 (serve,
3072 [('A', 'accesslog', '', _('name of access log file to write to')),
3074 [('A', 'accesslog', '', _('name of access log file to write to')),
3073 ('d', 'daemon', None, _('run server in background')),
3075 ('d', 'daemon', None, _('run server in background')),
3074 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3076 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3075 ('E', 'errorlog', '', _('name of error log file to write to')),
3077 ('E', 'errorlog', '', _('name of error log file to write to')),
3076 ('p', 'port', 0, _('port to use (default: 8000)')),
3078 ('p', 'port', 0, _('port to use (default: 8000)')),
3077 ('a', 'address', '', _('address to use')),
3079 ('a', 'address', '', _('address to use')),
3078 ('n', 'name', '',
3080 ('n', 'name', '',
3079 _('name to show in web pages (default: working dir)')),
3081 _('name to show in web pages (default: working dir)')),
3080 ('', 'webdir-conf', '', _('name of the webdir config file'
3082 ('', 'webdir-conf', '', _('name of the webdir config file'
3081 ' (serve more than one repo)')),
3083 ' (serve more than one repo)')),
3082 ('', 'pid-file', '', _('name of file to write process ID to')),
3084 ('', 'pid-file', '', _('name of file to write process ID to')),
3083 ('', 'stdio', None, _('for remote clients')),
3085 ('', 'stdio', None, _('for remote clients')),
3084 ('t', 'templates', '', _('web templates to use')),
3086 ('t', 'templates', '', _('web templates to use')),
3085 ('', 'style', '', _('template style to use')),
3087 ('', 'style', '', _('template style to use')),
3086 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3088 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3087 _('hg serve [OPTION]...')),
3089 _('hg serve [OPTION]...')),
3088 "^status|st":
3090 "^status|st":
3089 (status,
3091 (status,
3090 [('A', 'all', None, _('show status of all files')),
3092 [('A', 'all', None, _('show status of all files')),
3091 ('m', 'modified', None, _('show only modified files')),
3093 ('m', 'modified', None, _('show only modified files')),
3092 ('a', 'added', None, _('show only added files')),
3094 ('a', 'added', None, _('show only added files')),
3093 ('r', 'removed', None, _('show only removed files')),
3095 ('r', 'removed', None, _('show only removed files')),
3094 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3096 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3095 ('c', 'clean', None, _('show only files without changes')),
3097 ('c', 'clean', None, _('show only files without changes')),
3096 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3098 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3097 ('i', 'ignored', None, _('show ignored files')),
3099 ('i', 'ignored', None, _('show ignored files')),
3098 ('n', 'no-status', None, _('hide status prefix')),
3100 ('n', 'no-status', None, _('hide status prefix')),
3099 ('C', 'copies', None, _('show source of copied files')),
3101 ('C', 'copies', None, _('show source of copied files')),
3100 ('0', 'print0', None,
3102 ('0', 'print0', None,
3101 _('end filenames with NUL, for use with xargs')),
3103 _('end filenames with NUL, for use with xargs')),
3102 ] + walkopts,
3104 ] + walkopts,
3103 _('hg status [OPTION]... [FILE]...')),
3105 _('hg status [OPTION]... [FILE]...')),
3104 "tag":
3106 "tag":
3105 (tag,
3107 (tag,
3106 [('l', 'local', None, _('make the tag local')),
3108 [('l', 'local', None, _('make the tag local')),
3107 ('m', 'message', '', _('message for tag commit log entry')),
3109 ('m', 'message', '', _('message for tag commit log entry')),
3108 ('d', 'date', '', _('record datecode as commit date')),
3110 ('d', 'date', '', _('record datecode as commit date')),
3109 ('u', 'user', '', _('record user as commiter')),
3111 ('u', 'user', '', _('record user as commiter')),
3110 ('r', 'rev', '', _('revision to tag'))],
3112 ('r', 'rev', '', _('revision to tag'))],
3111 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3113 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3112 "tags": (tags, [], _('hg tags')),
3114 "tags": (tags, [], _('hg tags')),
3113 "tip":
3115 "tip":
3114 (tip,
3116 (tip,
3115 [('b', 'branches', None, _('show branches')),
3117 [('b', 'branches', None, _('show branches')),
3116 ('', 'style', '', _('display using template map file')),
3118 ('', 'style', '', _('display using template map file')),
3117 ('p', 'patch', None, _('show patch')),
3119 ('p', 'patch', None, _('show patch')),
3118 ('', 'template', '', _('display with template'))],
3120 ('', 'template', '', _('display with template'))],
3119 _('hg tip [-b] [-p]')),
3121 _('hg tip [-b] [-p]')),
3120 "unbundle":
3122 "unbundle":
3121 (unbundle,
3123 (unbundle,
3122 [('u', 'update', None,
3124 [('u', 'update', None,
3123 _('update to new tip if changesets were unbundled'))],
3125 _('update to new tip if changesets were unbundled'))],
3124 _('hg unbundle [-u] FILE')),
3126 _('hg unbundle [-u] FILE')),
3125 "debugundo|undo": (undo, [], _('hg undo')),
3127 "debugundo|undo": (undo, [], _('hg undo')),
3126 "^update|up|checkout|co":
3128 "^update|up|checkout|co":
3127 (update,
3129 (update,
3128 [('b', 'branch', '', _('checkout the head of a specific branch')),
3130 [('b', 'branch', '', _('checkout the head of a specific branch')),
3129 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3131 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3130 ('C', 'clean', None, _('overwrite locally modified files')),
3132 ('C', 'clean', None, _('overwrite locally modified files')),
3131 ('f', 'force', None, _('force a merge with outstanding changes'))],
3133 ('f', 'force', None, _('force a merge with outstanding changes'))],
3132 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3134 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3133 "verify": (verify, [], _('hg verify')),
3135 "verify": (verify, [], _('hg verify')),
3134 "version": (show_version, [], _('hg version')),
3136 "version": (show_version, [], _('hg version')),
3135 }
3137 }
3136
3138
3137 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3139 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3138 " debugindex debugindexdot")
3140 " debugindex debugindexdot")
3139 optionalrepo = ("paths serve showconfig")
3141 optionalrepo = ("paths serve showconfig")
3140
3142
3141 def findpossible(ui, cmd):
3143 def findpossible(ui, cmd):
3142 """
3144 """
3143 Return cmd -> (aliases, command table entry)
3145 Return cmd -> (aliases, command table entry)
3144 for each matching command.
3146 for each matching command.
3145 Return debug commands (or their aliases) only if no normal command matches.
3147 Return debug commands (or their aliases) only if no normal command matches.
3146 """
3148 """
3147 choice = {}
3149 choice = {}
3148 debugchoice = {}
3150 debugchoice = {}
3149 for e in table.keys():
3151 for e in table.keys():
3150 aliases = e.lstrip("^").split("|")
3152 aliases = e.lstrip("^").split("|")
3151 found = None
3153 found = None
3152 if cmd in aliases:
3154 if cmd in aliases:
3153 found = cmd
3155 found = cmd
3154 elif not ui.config("ui", "strict"):
3156 elif not ui.config("ui", "strict"):
3155 for a in aliases:
3157 for a in aliases:
3156 if a.startswith(cmd):
3158 if a.startswith(cmd):
3157 found = a
3159 found = a
3158 break
3160 break
3159 if found is not None:
3161 if found is not None:
3160 if aliases[0].startswith("debug") or found.startswith("debug"):
3162 if aliases[0].startswith("debug") or found.startswith("debug"):
3161 debugchoice[found] = (aliases, table[e])
3163 debugchoice[found] = (aliases, table[e])
3162 else:
3164 else:
3163 choice[found] = (aliases, table[e])
3165 choice[found] = (aliases, table[e])
3164
3166
3165 if not choice and debugchoice:
3167 if not choice and debugchoice:
3166 choice = debugchoice
3168 choice = debugchoice
3167
3169
3168 return choice
3170 return choice
3169
3171
3170 def findcmd(ui, cmd):
3172 def findcmd(ui, cmd):
3171 """Return (aliases, command table entry) for command string."""
3173 """Return (aliases, command table entry) for command string."""
3172 choice = findpossible(ui, cmd)
3174 choice = findpossible(ui, cmd)
3173
3175
3174 if choice.has_key(cmd):
3176 if choice.has_key(cmd):
3175 return choice[cmd]
3177 return choice[cmd]
3176
3178
3177 if len(choice) > 1:
3179 if len(choice) > 1:
3178 clist = choice.keys()
3180 clist = choice.keys()
3179 clist.sort()
3181 clist.sort()
3180 raise AmbiguousCommand(cmd, clist)
3182 raise AmbiguousCommand(cmd, clist)
3181
3183
3182 if choice:
3184 if choice:
3183 return choice.values()[0]
3185 return choice.values()[0]
3184
3186
3185 raise UnknownCommand(cmd)
3187 raise UnknownCommand(cmd)
3186
3188
3187 def catchterm(*args):
3189 def catchterm(*args):
3188 raise util.SignalInterrupt
3190 raise util.SignalInterrupt
3189
3191
3190 def run():
3192 def run():
3191 sys.exit(dispatch(sys.argv[1:]))
3193 sys.exit(dispatch(sys.argv[1:]))
3192
3194
3193 class ParseError(Exception):
3195 class ParseError(Exception):
3194 """Exception raised on errors in parsing the command line."""
3196 """Exception raised on errors in parsing the command line."""
3195
3197
3196 def parse(ui, args):
3198 def parse(ui, args):
3197 options = {}
3199 options = {}
3198 cmdoptions = {}
3200 cmdoptions = {}
3199
3201
3200 try:
3202 try:
3201 args = fancyopts.fancyopts(args, globalopts, options)
3203 args = fancyopts.fancyopts(args, globalopts, options)
3202 except fancyopts.getopt.GetoptError, inst:
3204 except fancyopts.getopt.GetoptError, inst:
3203 raise ParseError(None, inst)
3205 raise ParseError(None, inst)
3204
3206
3205 if args:
3207 if args:
3206 cmd, args = args[0], args[1:]
3208 cmd, args = args[0], args[1:]
3207 aliases, i = findcmd(ui, cmd)
3209 aliases, i = findcmd(ui, cmd)
3208 cmd = aliases[0]
3210 cmd = aliases[0]
3209 defaults = ui.config("defaults", cmd)
3211 defaults = ui.config("defaults", cmd)
3210 if defaults:
3212 if defaults:
3211 args = shlex.split(defaults) + args
3213 args = shlex.split(defaults) + args
3212 c = list(i[1])
3214 c = list(i[1])
3213 else:
3215 else:
3214 cmd = None
3216 cmd = None
3215 c = []
3217 c = []
3216
3218
3217 # combine global options into local
3219 # combine global options into local
3218 for o in globalopts:
3220 for o in globalopts:
3219 c.append((o[0], o[1], options[o[1]], o[3]))
3221 c.append((o[0], o[1], options[o[1]], o[3]))
3220
3222
3221 try:
3223 try:
3222 args = fancyopts.fancyopts(args, c, cmdoptions)
3224 args = fancyopts.fancyopts(args, c, cmdoptions)
3223 except fancyopts.getopt.GetoptError, inst:
3225 except fancyopts.getopt.GetoptError, inst:
3224 raise ParseError(cmd, inst)
3226 raise ParseError(cmd, inst)
3225
3227
3226 # separate global options back out
3228 # separate global options back out
3227 for o in globalopts:
3229 for o in globalopts:
3228 n = o[1]
3230 n = o[1]
3229 options[n] = cmdoptions[n]
3231 options[n] = cmdoptions[n]
3230 del cmdoptions[n]
3232 del cmdoptions[n]
3231
3233
3232 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3234 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3233
3235
3234 external = {}
3236 external = {}
3235
3237
3236 def findext(name):
3238 def findext(name):
3237 '''return module with given extension name'''
3239 '''return module with given extension name'''
3238 try:
3240 try:
3239 return sys.modules[external[name]]
3241 return sys.modules[external[name]]
3240 except KeyError:
3242 except KeyError:
3241 for k, v in external.iteritems():
3243 for k, v in external.iteritems():
3242 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3244 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3243 return sys.modules[v]
3245 return sys.modules[v]
3244 raise KeyError(name)
3246 raise KeyError(name)
3245
3247
3246 def load_extensions(ui):
3248 def load_extensions(ui):
3247 added = []
3249 added = []
3248 for ext_name, load_from_name in ui.extensions():
3250 for ext_name, load_from_name in ui.extensions():
3249 if ext_name in external:
3251 if ext_name in external:
3250 continue
3252 continue
3251 try:
3253 try:
3252 if load_from_name:
3254 if load_from_name:
3253 # the module will be loaded in sys.modules
3255 # the module will be loaded in sys.modules
3254 # choose an unique name so that it doesn't
3256 # choose an unique name so that it doesn't
3255 # conflicts with other modules
3257 # conflicts with other modules
3256 module_name = "hgext_%s" % ext_name.replace('.', '_')
3258 module_name = "hgext_%s" % ext_name.replace('.', '_')
3257 mod = imp.load_source(module_name, load_from_name)
3259 mod = imp.load_source(module_name, load_from_name)
3258 else:
3260 else:
3259 def importh(name):
3261 def importh(name):
3260 mod = __import__(name)
3262 mod = __import__(name)
3261 components = name.split('.')
3263 components = name.split('.')
3262 for comp in components[1:]:
3264 for comp in components[1:]:
3263 mod = getattr(mod, comp)
3265 mod = getattr(mod, comp)
3264 return mod
3266 return mod
3265 try:
3267 try:
3266 mod = importh("hgext.%s" % ext_name)
3268 mod = importh("hgext.%s" % ext_name)
3267 except ImportError:
3269 except ImportError:
3268 mod = importh(ext_name)
3270 mod = importh(ext_name)
3269 external[ext_name] = mod.__name__
3271 external[ext_name] = mod.__name__
3270 added.append((mod, ext_name))
3272 added.append((mod, ext_name))
3271 except (util.SignalInterrupt, KeyboardInterrupt):
3273 except (util.SignalInterrupt, KeyboardInterrupt):
3272 raise
3274 raise
3273 except Exception, inst:
3275 except Exception, inst:
3274 ui.warn(_("*** failed to import extension %s: %s\n") %
3276 ui.warn(_("*** failed to import extension %s: %s\n") %
3275 (ext_name, inst))
3277 (ext_name, inst))
3276 if ui.print_exc():
3278 if ui.print_exc():
3277 return 1
3279 return 1
3278
3280
3279 for mod, name in added:
3281 for mod, name in added:
3280 uisetup = getattr(mod, 'uisetup', None)
3282 uisetup = getattr(mod, 'uisetup', None)
3281 if uisetup:
3283 if uisetup:
3282 uisetup(ui)
3284 uisetup(ui)
3283 cmdtable = getattr(mod, 'cmdtable', {})
3285 cmdtable = getattr(mod, 'cmdtable', {})
3284 for t in cmdtable:
3286 for t in cmdtable:
3285 if t in table:
3287 if t in table:
3286 ui.warn(_("module %s overrides %s\n") % (name, t))
3288 ui.warn(_("module %s overrides %s\n") % (name, t))
3287 table.update(cmdtable)
3289 table.update(cmdtable)
3288
3290
3289 def parseconfig(config):
3291 def parseconfig(config):
3290 """parse the --config options from the command line"""
3292 """parse the --config options from the command line"""
3291 parsed = []
3293 parsed = []
3292 for cfg in config:
3294 for cfg in config:
3293 try:
3295 try:
3294 name, value = cfg.split('=', 1)
3296 name, value = cfg.split('=', 1)
3295 section, name = name.split('.', 1)
3297 section, name = name.split('.', 1)
3296 if not section or not name:
3298 if not section or not name:
3297 raise IndexError
3299 raise IndexError
3298 parsed.append((section, name, value))
3300 parsed.append((section, name, value))
3299 except (IndexError, ValueError):
3301 except (IndexError, ValueError):
3300 raise util.Abort(_('malformed --config option: %s') % cfg)
3302 raise util.Abort(_('malformed --config option: %s') % cfg)
3301 return parsed
3303 return parsed
3302
3304
3303 def dispatch(args):
3305 def dispatch(args):
3304 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3306 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3305 num = getattr(signal, name, None)
3307 num = getattr(signal, name, None)
3306 if num: signal.signal(num, catchterm)
3308 if num: signal.signal(num, catchterm)
3307
3309
3308 try:
3310 try:
3309 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3311 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3310 except util.Abort, inst:
3312 except util.Abort, inst:
3311 sys.stderr.write(_("abort: %s\n") % inst)
3313 sys.stderr.write(_("abort: %s\n") % inst)
3312 return -1
3314 return -1
3313
3315
3314 load_extensions(u)
3316 load_extensions(u)
3315 u.addreadhook(load_extensions)
3317 u.addreadhook(load_extensions)
3316
3318
3317 try:
3319 try:
3318 cmd, func, args, options, cmdoptions = parse(u, args)
3320 cmd, func, args, options, cmdoptions = parse(u, args)
3319 if options["time"]:
3321 if options["time"]:
3320 def get_times():
3322 def get_times():
3321 t = os.times()
3323 t = os.times()
3322 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3324 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3323 t = (t[0], t[1], t[2], t[3], time.clock())
3325 t = (t[0], t[1], t[2], t[3], time.clock())
3324 return t
3326 return t
3325 s = get_times()
3327 s = get_times()
3326 def print_time():
3328 def print_time():
3327 t = get_times()
3329 t = get_times()
3328 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3330 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3329 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3331 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3330 atexit.register(print_time)
3332 atexit.register(print_time)
3331
3333
3332 # enter the debugger before command execution
3334 # enter the debugger before command execution
3333 if options['debugger']:
3335 if options['debugger']:
3334 pdb.set_trace()
3336 pdb.set_trace()
3335
3337
3336 try:
3338 try:
3337 if options['cwd']:
3339 if options['cwd']:
3338 try:
3340 try:
3339 os.chdir(options['cwd'])
3341 os.chdir(options['cwd'])
3340 except OSError, inst:
3342 except OSError, inst:
3341 raise util.Abort('%s: %s' %
3343 raise util.Abort('%s: %s' %
3342 (options['cwd'], inst.strerror))
3344 (options['cwd'], inst.strerror))
3343
3345
3344 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3346 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3345 not options["noninteractive"], options["traceback"],
3347 not options["noninteractive"], options["traceback"],
3346 parseconfig(options["config"]))
3348 parseconfig(options["config"]))
3347
3349
3348 path = u.expandpath(options["repository"]) or ""
3350 path = u.expandpath(options["repository"]) or ""
3349 repo = path and hg.repository(u, path=path) or None
3351 repo = path and hg.repository(u, path=path) or None
3350 if repo and not repo.local():
3352 if repo and not repo.local():
3351 raise util.Abort(_("repository '%s' is not local") % path)
3353 raise util.Abort(_("repository '%s' is not local") % path)
3352
3354
3353 if options['help']:
3355 if options['help']:
3354 return help_(u, cmd, options['version'])
3356 return help_(u, cmd, options['version'])
3355 elif options['version']:
3357 elif options['version']:
3356 return show_version(u)
3358 return show_version(u)
3357 elif not cmd:
3359 elif not cmd:
3358 return help_(u, 'shortlist')
3360 return help_(u, 'shortlist')
3359
3361
3360 if cmd not in norepo.split():
3362 if cmd not in norepo.split():
3361 try:
3363 try:
3362 if not repo:
3364 if not repo:
3363 repo = hg.repository(u, path=path)
3365 repo = hg.repository(u, path=path)
3364 u = repo.ui
3366 u = repo.ui
3365 for name in external.itervalues():
3367 for name in external.itervalues():
3366 mod = sys.modules[name]
3368 mod = sys.modules[name]
3367 if hasattr(mod, 'reposetup'):
3369 if hasattr(mod, 'reposetup'):
3368 mod.reposetup(u, repo)
3370 mod.reposetup(u, repo)
3369 hg.repo_setup_hooks.append(mod.reposetup)
3371 hg.repo_setup_hooks.append(mod.reposetup)
3370 except hg.RepoError:
3372 except hg.RepoError:
3371 if cmd not in optionalrepo.split():
3373 if cmd not in optionalrepo.split():
3372 raise
3374 raise
3373 d = lambda: func(u, repo, *args, **cmdoptions)
3375 d = lambda: func(u, repo, *args, **cmdoptions)
3374 else:
3376 else:
3375 d = lambda: func(u, *args, **cmdoptions)
3377 d = lambda: func(u, *args, **cmdoptions)
3376
3378
3377 try:
3379 try:
3378 if options['profile']:
3380 if options['profile']:
3379 import hotshot, hotshot.stats
3381 import hotshot, hotshot.stats
3380 prof = hotshot.Profile("hg.prof")
3382 prof = hotshot.Profile("hg.prof")
3381 try:
3383 try:
3382 try:
3384 try:
3383 return prof.runcall(d)
3385 return prof.runcall(d)
3384 except:
3386 except:
3385 try:
3387 try:
3386 u.warn(_('exception raised - generating '
3388 u.warn(_('exception raised - generating '
3387 'profile anyway\n'))
3389 'profile anyway\n'))
3388 except:
3390 except:
3389 pass
3391 pass
3390 raise
3392 raise
3391 finally:
3393 finally:
3392 prof.close()
3394 prof.close()
3393 stats = hotshot.stats.load("hg.prof")
3395 stats = hotshot.stats.load("hg.prof")
3394 stats.strip_dirs()
3396 stats.strip_dirs()
3395 stats.sort_stats('time', 'calls')
3397 stats.sort_stats('time', 'calls')
3396 stats.print_stats(40)
3398 stats.print_stats(40)
3397 elif options['lsprof']:
3399 elif options['lsprof']:
3398 try:
3400 try:
3399 from mercurial import lsprof
3401 from mercurial import lsprof
3400 except ImportError:
3402 except ImportError:
3401 raise util.Abort(_(
3403 raise util.Abort(_(
3402 'lsprof not available - install from '
3404 'lsprof not available - install from '
3403 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3405 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3404 p = lsprof.Profiler()
3406 p = lsprof.Profiler()
3405 p.enable(subcalls=True)
3407 p.enable(subcalls=True)
3406 try:
3408 try:
3407 return d()
3409 return d()
3408 finally:
3410 finally:
3409 p.disable()
3411 p.disable()
3410 stats = lsprof.Stats(p.getstats())
3412 stats = lsprof.Stats(p.getstats())
3411 stats.sort()
3413 stats.sort()
3412 stats.pprint(top=10, file=sys.stderr, climit=5)
3414 stats.pprint(top=10, file=sys.stderr, climit=5)
3413 else:
3415 else:
3414 return d()
3416 return d()
3415 finally:
3417 finally:
3416 u.flush()
3418 u.flush()
3417 except:
3419 except:
3418 # enter the debugger when we hit an exception
3420 # enter the debugger when we hit an exception
3419 if options['debugger']:
3421 if options['debugger']:
3420 pdb.post_mortem(sys.exc_info()[2])
3422 pdb.post_mortem(sys.exc_info()[2])
3421 u.print_exc()
3423 u.print_exc()
3422 raise
3424 raise
3423 except ParseError, inst:
3425 except ParseError, inst:
3424 if inst.args[0]:
3426 if inst.args[0]:
3425 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3427 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3426 help_(u, inst.args[0])
3428 help_(u, inst.args[0])
3427 else:
3429 else:
3428 u.warn(_("hg: %s\n") % inst.args[1])
3430 u.warn(_("hg: %s\n") % inst.args[1])
3429 help_(u, 'shortlist')
3431 help_(u, 'shortlist')
3430 except AmbiguousCommand, inst:
3432 except AmbiguousCommand, inst:
3431 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3433 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3432 (inst.args[0], " ".join(inst.args[1])))
3434 (inst.args[0], " ".join(inst.args[1])))
3433 except UnknownCommand, inst:
3435 except UnknownCommand, inst:
3434 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3436 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3435 help_(u, 'shortlist')
3437 help_(u, 'shortlist')
3436 except hg.RepoError, inst:
3438 except hg.RepoError, inst:
3437 u.warn(_("abort: %s!\n") % inst)
3439 u.warn(_("abort: %s!\n") % inst)
3438 except lock.LockHeld, inst:
3440 except lock.LockHeld, inst:
3439 if inst.errno == errno.ETIMEDOUT:
3441 if inst.errno == errno.ETIMEDOUT:
3440 reason = _('timed out waiting for lock held by %s') % inst.locker
3442 reason = _('timed out waiting for lock held by %s') % inst.locker
3441 else:
3443 else:
3442 reason = _('lock held by %s') % inst.locker
3444 reason = _('lock held by %s') % inst.locker
3443 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3445 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3444 except lock.LockUnavailable, inst:
3446 except lock.LockUnavailable, inst:
3445 u.warn(_("abort: could not lock %s: %s\n") %
3447 u.warn(_("abort: could not lock %s: %s\n") %
3446 (inst.desc or inst.filename, inst.strerror))
3448 (inst.desc or inst.filename, inst.strerror))
3447 except revlog.RevlogError, inst:
3449 except revlog.RevlogError, inst:
3448 u.warn(_("abort: %s!\n") % inst)
3450 u.warn(_("abort: %s!\n") % inst)
3449 except util.SignalInterrupt:
3451 except util.SignalInterrupt:
3450 u.warn(_("killed!\n"))
3452 u.warn(_("killed!\n"))
3451 except KeyboardInterrupt:
3453 except KeyboardInterrupt:
3452 try:
3454 try:
3453 u.warn(_("interrupted!\n"))
3455 u.warn(_("interrupted!\n"))
3454 except IOError, inst:
3456 except IOError, inst:
3455 if inst.errno == errno.EPIPE:
3457 if inst.errno == errno.EPIPE:
3456 if u.debugflag:
3458 if u.debugflag:
3457 u.warn(_("\nbroken pipe\n"))
3459 u.warn(_("\nbroken pipe\n"))
3458 else:
3460 else:
3459 raise
3461 raise
3460 except IOError, inst:
3462 except IOError, inst:
3461 if hasattr(inst, "code"):
3463 if hasattr(inst, "code"):
3462 u.warn(_("abort: %s\n") % inst)
3464 u.warn(_("abort: %s\n") % inst)
3463 elif hasattr(inst, "reason"):
3465 elif hasattr(inst, "reason"):
3464 u.warn(_("abort: error: %s\n") % inst.reason[1])
3466 u.warn(_("abort: error: %s\n") % inst.reason[1])
3465 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3467 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3466 if u.debugflag:
3468 if u.debugflag:
3467 u.warn(_("broken pipe\n"))
3469 u.warn(_("broken pipe\n"))
3468 elif getattr(inst, "strerror", None):
3470 elif getattr(inst, "strerror", None):
3469 if getattr(inst, "filename", None):
3471 if getattr(inst, "filename", None):
3470 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3472 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3471 else:
3473 else:
3472 u.warn(_("abort: %s\n") % inst.strerror)
3474 u.warn(_("abort: %s\n") % inst.strerror)
3473 else:
3475 else:
3474 raise
3476 raise
3475 except OSError, inst:
3477 except OSError, inst:
3476 if getattr(inst, "filename", None):
3478 if getattr(inst, "filename", None):
3477 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3479 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3478 else:
3480 else:
3479 u.warn(_("abort: %s\n") % inst.strerror)
3481 u.warn(_("abort: %s\n") % inst.strerror)
3480 except util.Abort, inst:
3482 except util.Abort, inst:
3481 u.warn(_("abort: %s\n") % inst)
3483 u.warn(_("abort: %s\n") % inst)
3482 except TypeError, inst:
3484 except TypeError, inst:
3483 # was this an argument error?
3485 # was this an argument error?
3484 tb = traceback.extract_tb(sys.exc_info()[2])
3486 tb = traceback.extract_tb(sys.exc_info()[2])
3485 if len(tb) > 2: # no
3487 if len(tb) > 2: # no
3486 raise
3488 raise
3487 u.debug(inst, "\n")
3489 u.debug(inst, "\n")
3488 u.warn(_("%s: invalid arguments\n") % cmd)
3490 u.warn(_("%s: invalid arguments\n") % cmd)
3489 help_(u, cmd)
3491 help_(u, cmd)
3490 except SystemExit, inst:
3492 except SystemExit, inst:
3491 # Commands shouldn't sys.exit directly, but give a return code.
3493 # Commands shouldn't sys.exit directly, but give a return code.
3492 # Just in case catch this and and pass exit code to caller.
3494 # Just in case catch this and and pass exit code to caller.
3493 return inst.code
3495 return inst.code
3494 except:
3496 except:
3495 u.warn(_("** unknown exception encountered, details follow\n"))
3497 u.warn(_("** unknown exception encountered, details follow\n"))
3496 u.warn(_("** report bug details to "
3498 u.warn(_("** report bug details to "
3497 "http://www.selenic.com/mercurial/bts\n"))
3499 "http://www.selenic.com/mercurial/bts\n"))
3498 u.warn(_("** or mercurial@selenic.com\n"))
3500 u.warn(_("** or mercurial@selenic.com\n"))
3499 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3501 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3500 % version.get_version())
3502 % version.get_version())
3501 raise
3503 raise
3502
3504
3503 return -1
3505 return -1
@@ -1,256 +1,257 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from node import *
9 from node import *
10 from repo import *
10 from repo import *
11 from demandload import *
11 from demandload import *
12 from i18n import gettext as _
12 from i18n import gettext as _
13 demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo")
13 demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo")
14 demandload(globals(), "errno lock os shutil util merge@_merge verify@_verify")
14 demandload(globals(), "errno lock os shutil util merge@_merge verify@_verify")
15
15
16 def _local(path):
16 def _local(path):
17 return (os.path.isfile(util.drop_scheme('file', path)) and
17 return (os.path.isfile(util.drop_scheme('file', path)) and
18 bundlerepo or localrepo)
18 bundlerepo or localrepo)
19
19
20 schemes = {
20 schemes = {
21 'bundle': bundlerepo,
21 'bundle': bundlerepo,
22 'file': _local,
22 'file': _local,
23 'hg': httprepo,
23 'hg': httprepo,
24 'http': httprepo,
24 'http': httprepo,
25 'https': httprepo,
25 'https': httprepo,
26 'old-http': statichttprepo,
26 'old-http': statichttprepo,
27 'ssh': sshrepo,
27 'ssh': sshrepo,
28 'static-http': statichttprepo,
28 'static-http': statichttprepo,
29 }
29 }
30
30
31 def _lookup(path):
31 def _lookup(path):
32 scheme = 'file'
32 scheme = 'file'
33 if path:
33 if path:
34 c = path.find(':')
34 c = path.find(':')
35 if c > 0:
35 if c > 0:
36 scheme = path[:c]
36 scheme = path[:c]
37 thing = schemes.get(scheme) or schemes['file']
37 thing = schemes.get(scheme) or schemes['file']
38 try:
38 try:
39 return thing(path)
39 return thing(path)
40 except TypeError:
40 except TypeError:
41 return thing
41 return thing
42
42
43 def islocal(repo):
43 def islocal(repo):
44 '''return true if repo or path is local'''
44 '''return true if repo or path is local'''
45 if isinstance(repo, str):
45 if isinstance(repo, str):
46 try:
46 try:
47 return _lookup(repo).islocal(repo)
47 return _lookup(repo).islocal(repo)
48 except AttributeError:
48 except AttributeError:
49 return False
49 return False
50 return repo.local()
50 return repo.local()
51
51
52 repo_setup_hooks = []
52 repo_setup_hooks = []
53
53
54 def repository(ui, path='', create=False):
54 def repository(ui, path='', create=False):
55 """return a repository object for the specified path"""
55 """return a repository object for the specified path"""
56 repo = _lookup(path).instance(ui, path, create)
56 repo = _lookup(path).instance(ui, path, create)
57 for hook in repo_setup_hooks:
57 for hook in repo_setup_hooks:
58 hook(ui, repo)
58 hook(ui, repo)
59 return repo
59 return repo
60
60
61 def defaultdest(source):
61 def defaultdest(source):
62 '''return default destination of clone if none is given'''
62 '''return default destination of clone if none is given'''
63 return os.path.basename(os.path.normpath(source))
63 return os.path.basename(os.path.normpath(source))
64
64
65 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
65 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
66 stream=False):
66 stream=False):
67 """Make a copy of an existing repository.
67 """Make a copy of an existing repository.
68
68
69 Create a copy of an existing repository in a new directory. The
69 Create a copy of an existing repository in a new directory. The
70 source and destination are URLs, as passed to the repository
70 source and destination are URLs, as passed to the repository
71 function. Returns a pair of repository objects, the source and
71 function. Returns a pair of repository objects, the source and
72 newly created destination.
72 newly created destination.
73
73
74 The location of the source is added to the new repository's
74 The location of the source is added to the new repository's
75 .hg/hgrc file, as the default to be used for future pulls and
75 .hg/hgrc file, as the default to be used for future pulls and
76 pushes.
76 pushes.
77
77
78 If an exception is raised, the partly cloned/updated destination
78 If an exception is raised, the partly cloned/updated destination
79 repository will be deleted.
79 repository will be deleted.
80
80
81 Arguments:
81 Arguments:
82
82
83 source: repository object or URL
83 source: repository object or URL
84
84
85 dest: URL of destination repository to create (defaults to base
85 dest: URL of destination repository to create (defaults to base
86 name of source repository)
86 name of source repository)
87
87
88 pull: always pull from source repository, even in local case
88 pull: always pull from source repository, even in local case
89
89
90 stream: stream raw data uncompressed from repository (fast over
90 stream: stream raw data uncompressed from repository (fast over
91 LAN, slow over WAN)
91 LAN, slow over WAN)
92
92
93 rev: revision to clone up to (implies pull=True)
93 rev: revision to clone up to (implies pull=True)
94
94
95 update: update working directory after clone completes, if
95 update: update working directory after clone completes, if
96 destination is local repository
96 destination is local repository
97 """
97 """
98 if isinstance(source, str):
98 if isinstance(source, str):
99 src_repo = repository(ui, source)
99 src_repo = repository(ui, source)
100 else:
100 else:
101 src_repo = source
101 src_repo = source
102 source = src_repo.url()
102 source = src_repo.url()
103
103
104 if dest is None:
104 if dest is None:
105 dest = defaultdest(source)
105 dest = defaultdest(source)
106
106
107 def localpath(path):
107 def localpath(path):
108 if path.startswith('file://'):
108 if path.startswith('file://'):
109 return path[7:]
109 return path[7:]
110 if path.startswith('file:'):
110 if path.startswith('file:'):
111 return path[5:]
111 return path[5:]
112 return path
112 return path
113
113
114 dest = localpath(dest)
114 dest = localpath(dest)
115 source = localpath(source)
115 source = localpath(source)
116
116
117 if os.path.exists(dest):
117 if os.path.exists(dest):
118 raise util.Abort(_("destination '%s' already exists") % dest)
118 raise util.Abort(_("destination '%s' already exists") % dest)
119
119
120 class DirCleanup(object):
120 class DirCleanup(object):
121 def __init__(self, dir_):
121 def __init__(self, dir_):
122 self.rmtree = shutil.rmtree
122 self.rmtree = shutil.rmtree
123 self.dir_ = dir_
123 self.dir_ = dir_
124 def close(self):
124 def close(self):
125 self.dir_ = None
125 self.dir_ = None
126 def __del__(self):
126 def __del__(self):
127 if self.dir_:
127 if self.dir_:
128 self.rmtree(self.dir_, True)
128 self.rmtree(self.dir_, True)
129
129
130 dest_repo = repository(ui, dest, create=True)
130 dest_repo = repository(ui, dest, create=True)
131
131
132 dest_path = None
132 dest_path = None
133 dir_cleanup = None
133 dir_cleanup = None
134 if dest_repo.local():
134 if dest_repo.local():
135 dest_path = os.path.realpath(dest_repo.root)
135 dest_path = os.path.realpath(dest_repo.root)
136 dir_cleanup = DirCleanup(dest_path)
136 dir_cleanup = DirCleanup(dest_path)
137
137
138 abspath = source
138 abspath = source
139 copy = False
139 copy = False
140 if src_repo.local() and dest_repo.local():
140 if src_repo.local() and dest_repo.local():
141 abspath = os.path.abspath(source)
141 abspath = os.path.abspath(source)
142 copy = not pull and not rev
142 copy = not pull and not rev
143
143
144 src_lock, dest_lock = None, None
144 src_lock, dest_lock = None, None
145 if copy:
145 if copy:
146 try:
146 try:
147 # we use a lock here because if we race with commit, we
147 # we use a lock here because if we race with commit, we
148 # can end up with extra data in the cloned revlogs that's
148 # can end up with extra data in the cloned revlogs that's
149 # not pointed to by changesets, thus causing verify to
149 # not pointed to by changesets, thus causing verify to
150 # fail
150 # fail
151 src_lock = src_repo.lock()
151 src_lock = src_repo.lock()
152 except lock.LockException:
152 except lock.LockException:
153 copy = False
153 copy = False
154
154
155 if copy:
155 if copy:
156 # we lock here to avoid premature writing to the target
156 # we lock here to avoid premature writing to the target
157 dest_lock = lock.lock(os.path.join(dest_path, ".hg", "lock"))
157 dest_lock = lock.lock(os.path.join(dest_path, ".hg", "lock"))
158
158
159 # we need to remove the (empty) data dir in dest so copyfiles
159 # we need to remove the (empty) data dir in dest so copyfiles
160 # can do its work
160 # can do its work
161 os.rmdir(os.path.join(dest_path, ".hg", "data"))
161 os.rmdir(os.path.join(dest_path, ".hg", "data"))
162 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
162 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
163 for f in files.split():
163 for f in files.split():
164 src = os.path.join(source, ".hg", f)
164 src = os.path.join(source, ".hg", f)
165 dst = os.path.join(dest_path, ".hg", f)
165 dst = os.path.join(dest_path, ".hg", f)
166 try:
166 try:
167 util.copyfiles(src, dst)
167 util.copyfiles(src, dst)
168 except OSError, inst:
168 except OSError, inst:
169 if inst.errno != errno.ENOENT:
169 if inst.errno != errno.ENOENT:
170 raise
170 raise
171
171
172 # we need to re-init the repo after manually copying the data
172 # we need to re-init the repo after manually copying the data
173 # into it
173 # into it
174 dest_repo = repository(ui, dest)
174 dest_repo = repository(ui, dest)
175
175
176 else:
176 else:
177 revs = None
177 revs = None
178 if rev:
178 if rev:
179 if not src_repo.local():
179 if 'lookup' not in src_repo.capabilities:
180 raise util.Abort(_("clone by revision not supported yet "
180 raise util.Abort(_("src repository does not support revision "
181 "for remote repositories"))
181 "lookup and so doesn't support clone by "
182 "revision"))
182 revs = [src_repo.lookup(r) for r in rev]
183 revs = [src_repo.lookup(r) for r in rev]
183
184
184 if dest_repo.local():
185 if dest_repo.local():
185 dest_repo.clone(src_repo, heads=revs, stream=stream)
186 dest_repo.clone(src_repo, heads=revs, stream=stream)
186 elif src_repo.local():
187 elif src_repo.local():
187 src_repo.push(dest_repo, revs=revs)
188 src_repo.push(dest_repo, revs=revs)
188 else:
189 else:
189 raise util.Abort(_("clone from remote to remote not supported"))
190 raise util.Abort(_("clone from remote to remote not supported"))
190
191
191 if src_lock:
192 if src_lock:
192 src_lock.release()
193 src_lock.release()
193
194
194 if dest_repo.local():
195 if dest_repo.local():
195 fp = dest_repo.opener("hgrc", "w", text=True)
196 fp = dest_repo.opener("hgrc", "w", text=True)
196 fp.write("[paths]\n")
197 fp.write("[paths]\n")
197 fp.write("default = %s\n" % abspath)
198 fp.write("default = %s\n" % abspath)
198 fp.close()
199 fp.close()
199
200
200 if dest_lock:
201 if dest_lock:
201 dest_lock.release()
202 dest_lock.release()
202
203
203 if update:
204 if update:
204 _update(dest_repo, dest_repo.changelog.tip())
205 _update(dest_repo, dest_repo.changelog.tip())
205 if dir_cleanup:
206 if dir_cleanup:
206 dir_cleanup.close()
207 dir_cleanup.close()
207
208
208 return src_repo, dest_repo
209 return src_repo, dest_repo
209
210
210 def _showstats(repo, stats):
211 def _showstats(repo, stats):
211 stats = ((stats[0], _("updated")),
212 stats = ((stats[0], _("updated")),
212 (stats[1], _("merged")),
213 (stats[1], _("merged")),
213 (stats[2], _("removed")),
214 (stats[2], _("removed")),
214 (stats[3], _("unresolved")))
215 (stats[3], _("unresolved")))
215 note = ", ".join([_("%d files %s") % s for s in stats])
216 note = ", ".join([_("%d files %s") % s for s in stats])
216 repo.ui.status("%s\n" % note)
217 repo.ui.status("%s\n" % note)
217
218
218 def _update(repo, node): return update(repo, node)
219 def _update(repo, node): return update(repo, node)
219
220
220 def update(repo, node):
221 def update(repo, node):
221 """update the working directory to node, merging linear changes"""
222 """update the working directory to node, merging linear changes"""
222 stats = _merge.update(repo, node, False, False, None, None)
223 stats = _merge.update(repo, node, False, False, None, None)
223 _showstats(repo, stats)
224 _showstats(repo, stats)
224 if stats[3]:
225 if stats[3]:
225 repo.ui.status(_("There are unresolved merges with"
226 repo.ui.status(_("There are unresolved merges with"
226 " locally modified files.\n"))
227 " locally modified files.\n"))
227 return stats[3]
228 return stats[3]
228
229
229 def clean(repo, node, wlock=None, show_stats=True):
230 def clean(repo, node, wlock=None, show_stats=True):
230 """forcibly switch the working directory to node, clobbering changes"""
231 """forcibly switch the working directory to node, clobbering changes"""
231 stats = _merge.update(repo, node, False, True, None, wlock)
232 stats = _merge.update(repo, node, False, True, None, wlock)
232 if show_stats: _showstats(repo, stats)
233 if show_stats: _showstats(repo, stats)
233 return stats[3]
234 return stats[3]
234
235
235 def merge(repo, node, force=None, remind=True, wlock=None):
236 def merge(repo, node, force=None, remind=True, wlock=None):
236 """branch merge with node, resolving changes"""
237 """branch merge with node, resolving changes"""
237 stats = _merge.update(repo, node, True, force, False, wlock)
238 stats = _merge.update(repo, node, True, force, False, wlock)
238 _showstats(repo, stats)
239 _showstats(repo, stats)
239 if stats[3]:
240 if stats[3]:
240 pl = repo.parents()
241 pl = repo.parents()
241 repo.ui.status(_("There are unresolved merges,"
242 repo.ui.status(_("There are unresolved merges,"
242 " you can redo the full merge using:\n"
243 " you can redo the full merge using:\n"
243 " hg update -C %s\n"
244 " hg update -C %s\n"
244 " hg merge %s\n"
245 " hg merge %s\n"
245 % (pl[0].rev(), pl[1].rev())))
246 % (pl[0].rev(), pl[1].rev())))
246 elif remind:
247 elif remind:
247 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
248 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
248 return stats[3]
249 return stats[3]
249
250
250 def revert(repo, node, choose, wlock):
251 def revert(repo, node, choose, wlock):
251 """revert changes to revision in node without updating dirstate"""
252 """revert changes to revision in node without updating dirstate"""
252 return _merge.update(repo, node, False, True, choose, wlock)[3]
253 return _merge.update(repo, node, False, True, choose, wlock)[3]
253
254
254 def verify(repo):
255 def verify(repo):
255 """verify the consistency of a repository"""
256 """verify the consistency of a repository"""
256 return _verify.verify(repo)
257 return _verify.verify(repo)
@@ -1,1811 +1,1813 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ()
18 capabilities = ('lookup', 'changegroupsubset')
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("There is no Mercurial repository"
30 raise repo.RepoError(_("There is no Mercurial repository"
31 " here (.hg not found)"))
31 " here (.hg not found)"))
32 path = p
32 path = p
33 self.path = os.path.join(path, ".hg")
33 self.path = os.path.join(path, ".hg")
34
34
35 if not os.path.isdir(self.path):
35 if not os.path.isdir(self.path):
36 if create:
36 if create:
37 if not os.path.exists(path):
37 if not os.path.exists(path):
38 os.mkdir(path)
38 os.mkdir(path)
39 os.mkdir(self.path)
39 os.mkdir(self.path)
40 os.mkdir(self.join("data"))
40 os.mkdir(self.join("data"))
41 else:
41 else:
42 raise repo.RepoError(_("repository %s not found") % path)
42 raise repo.RepoError(_("repository %s not found") % path)
43 elif create:
43 elif create:
44 raise repo.RepoError(_("repository %s already exists") % path)
44 raise repo.RepoError(_("repository %s already exists") % path)
45
45
46 self.root = os.path.abspath(path)
46 self.root = os.path.abspath(path)
47 self.origroot = path
47 self.origroot = path
48 self.ui = ui.ui(parentui=parentui)
48 self.ui = ui.ui(parentui=parentui)
49 self.opener = util.opener(self.path)
49 self.opener = util.opener(self.path)
50 self.wopener = util.opener(self.root)
50 self.wopener = util.opener(self.root)
51
51
52 try:
52 try:
53 self.ui.readconfig(self.join("hgrc"), self.root)
53 self.ui.readconfig(self.join("hgrc"), self.root)
54 except IOError:
54 except IOError:
55 pass
55 pass
56
56
57 v = self.ui.configrevlog()
57 v = self.ui.configrevlog()
58 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
58 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
59 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
59 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
60 fl = v.get('flags', None)
60 fl = v.get('flags', None)
61 flags = 0
61 flags = 0
62 if fl != None:
62 if fl != None:
63 for x in fl.split():
63 for x in fl.split():
64 flags |= revlog.flagstr(x)
64 flags |= revlog.flagstr(x)
65 elif self.revlogv1:
65 elif self.revlogv1:
66 flags = revlog.REVLOG_DEFAULT_FLAGS
66 flags = revlog.REVLOG_DEFAULT_FLAGS
67
67
68 v = self.revlogversion | flags
68 v = self.revlogversion | flags
69 self.manifest = manifest.manifest(self.opener, v)
69 self.manifest = manifest.manifest(self.opener, v)
70 self.changelog = changelog.changelog(self.opener, v)
70 self.changelog = changelog.changelog(self.opener, v)
71
71
72 # the changelog might not have the inline index flag
72 # the changelog might not have the inline index flag
73 # on. If the format of the changelog is the same as found in
73 # on. If the format of the changelog is the same as found in
74 # .hgrc, apply any flags found in the .hgrc as well.
74 # .hgrc, apply any flags found in the .hgrc as well.
75 # Otherwise, just version from the changelog
75 # Otherwise, just version from the changelog
76 v = self.changelog.version
76 v = self.changelog.version
77 if v == self.revlogversion:
77 if v == self.revlogversion:
78 v |= flags
78 v |= flags
79 self.revlogversion = v
79 self.revlogversion = v
80
80
81 self.tagscache = None
81 self.tagscache = None
82 self.branchcache = None
82 self.branchcache = None
83 self.nodetagscache = None
83 self.nodetagscache = None
84 self.encodepats = None
84 self.encodepats = None
85 self.decodepats = None
85 self.decodepats = None
86 self.transhandle = None
86 self.transhandle = None
87
87
88 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
88 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
89
89
90 def url(self):
90 def url(self):
91 return 'file:' + self.root
91 return 'file:' + self.root
92
92
93 def hook(self, name, throw=False, **args):
93 def hook(self, name, throw=False, **args):
94 def callhook(hname, funcname):
94 def callhook(hname, funcname):
95 '''call python hook. hook is callable object, looked up as
95 '''call python hook. hook is callable object, looked up as
96 name in python module. if callable returns "true", hook
96 name in python module. if callable returns "true", hook
97 fails, else passes. if hook raises exception, treated as
97 fails, else passes. if hook raises exception, treated as
98 hook failure. exception propagates if throw is "true".
98 hook failure. exception propagates if throw is "true".
99
99
100 reason for "true" meaning "hook failed" is so that
100 reason for "true" meaning "hook failed" is so that
101 unmodified commands (e.g. mercurial.commands.update) can
101 unmodified commands (e.g. mercurial.commands.update) can
102 be run as hooks without wrappers to convert return values.'''
102 be run as hooks without wrappers to convert return values.'''
103
103
104 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
104 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
105 d = funcname.rfind('.')
105 d = funcname.rfind('.')
106 if d == -1:
106 if d == -1:
107 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
107 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
108 % (hname, funcname))
108 % (hname, funcname))
109 modname = funcname[:d]
109 modname = funcname[:d]
110 try:
110 try:
111 obj = __import__(modname)
111 obj = __import__(modname)
112 except ImportError:
112 except ImportError:
113 try:
113 try:
114 # extensions are loaded with hgext_ prefix
114 # extensions are loaded with hgext_ prefix
115 obj = __import__("hgext_%s" % modname)
115 obj = __import__("hgext_%s" % modname)
116 except ImportError:
116 except ImportError:
117 raise util.Abort(_('%s hook is invalid '
117 raise util.Abort(_('%s hook is invalid '
118 '(import of "%s" failed)') %
118 '(import of "%s" failed)') %
119 (hname, modname))
119 (hname, modname))
120 try:
120 try:
121 for p in funcname.split('.')[1:]:
121 for p in funcname.split('.')[1:]:
122 obj = getattr(obj, p)
122 obj = getattr(obj, p)
123 except AttributeError, err:
123 except AttributeError, err:
124 raise util.Abort(_('%s hook is invalid '
124 raise util.Abort(_('%s hook is invalid '
125 '("%s" is not defined)') %
125 '("%s" is not defined)') %
126 (hname, funcname))
126 (hname, funcname))
127 if not callable(obj):
127 if not callable(obj):
128 raise util.Abort(_('%s hook is invalid '
128 raise util.Abort(_('%s hook is invalid '
129 '("%s" is not callable)') %
129 '("%s" is not callable)') %
130 (hname, funcname))
130 (hname, funcname))
131 try:
131 try:
132 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
132 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
133 except (KeyboardInterrupt, util.SignalInterrupt):
133 except (KeyboardInterrupt, util.SignalInterrupt):
134 raise
134 raise
135 except Exception, exc:
135 except Exception, exc:
136 if isinstance(exc, util.Abort):
136 if isinstance(exc, util.Abort):
137 self.ui.warn(_('error: %s hook failed: %s\n') %
137 self.ui.warn(_('error: %s hook failed: %s\n') %
138 (hname, exc.args[0]))
138 (hname, exc.args[0]))
139 else:
139 else:
140 self.ui.warn(_('error: %s hook raised an exception: '
140 self.ui.warn(_('error: %s hook raised an exception: '
141 '%s\n') % (hname, exc))
141 '%s\n') % (hname, exc))
142 if throw:
142 if throw:
143 raise
143 raise
144 self.ui.print_exc()
144 self.ui.print_exc()
145 return True
145 return True
146 if r:
146 if r:
147 if throw:
147 if throw:
148 raise util.Abort(_('%s hook failed') % hname)
148 raise util.Abort(_('%s hook failed') % hname)
149 self.ui.warn(_('warning: %s hook failed\n') % hname)
149 self.ui.warn(_('warning: %s hook failed\n') % hname)
150 return r
150 return r
151
151
152 def runhook(name, cmd):
152 def runhook(name, cmd):
153 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
153 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
154 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
154 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
155 r = util.system(cmd, environ=env, cwd=self.root)
155 r = util.system(cmd, environ=env, cwd=self.root)
156 if r:
156 if r:
157 desc, r = util.explain_exit(r)
157 desc, r = util.explain_exit(r)
158 if throw:
158 if throw:
159 raise util.Abort(_('%s hook %s') % (name, desc))
159 raise util.Abort(_('%s hook %s') % (name, desc))
160 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
160 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
161 return r
161 return r
162
162
163 r = False
163 r = False
164 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
164 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
165 if hname.split(".", 1)[0] == name and cmd]
165 if hname.split(".", 1)[0] == name and cmd]
166 hooks.sort()
166 hooks.sort()
167 for hname, cmd in hooks:
167 for hname, cmd in hooks:
168 if cmd.startswith('python:'):
168 if cmd.startswith('python:'):
169 r = callhook(hname, cmd[7:].strip()) or r
169 r = callhook(hname, cmd[7:].strip()) or r
170 else:
170 else:
171 r = runhook(hname, cmd) or r
171 r = runhook(hname, cmd) or r
172 return r
172 return r
173
173
174 tag_disallowed = ':\r\n'
174 tag_disallowed = ':\r\n'
175
175
176 def tag(self, name, node, message, local, user, date):
176 def tag(self, name, node, message, local, user, date):
177 '''tag a revision with a symbolic name.
177 '''tag a revision with a symbolic name.
178
178
179 if local is True, the tag is stored in a per-repository file.
179 if local is True, the tag is stored in a per-repository file.
180 otherwise, it is stored in the .hgtags file, and a new
180 otherwise, it is stored in the .hgtags file, and a new
181 changeset is committed with the change.
181 changeset is committed with the change.
182
182
183 keyword arguments:
183 keyword arguments:
184
184
185 local: whether to store tag in non-version-controlled file
185 local: whether to store tag in non-version-controlled file
186 (default False)
186 (default False)
187
187
188 message: commit message to use if committing
188 message: commit message to use if committing
189
189
190 user: name of user to use if committing
190 user: name of user to use if committing
191
191
192 date: date tuple to use if committing'''
192 date: date tuple to use if committing'''
193
193
194 for c in self.tag_disallowed:
194 for c in self.tag_disallowed:
195 if c in name:
195 if c in name:
196 raise util.Abort(_('%r cannot be used in a tag name') % c)
196 raise util.Abort(_('%r cannot be used in a tag name') % c)
197
197
198 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
198 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
199
199
200 if local:
200 if local:
201 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
201 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
202 self.hook('tag', node=hex(node), tag=name, local=local)
202 self.hook('tag', node=hex(node), tag=name, local=local)
203 return
203 return
204
204
205 for x in self.status()[:5]:
205 for x in self.status()[:5]:
206 if '.hgtags' in x:
206 if '.hgtags' in x:
207 raise util.Abort(_('working copy of .hgtags is changed '
207 raise util.Abort(_('working copy of .hgtags is changed '
208 '(please commit .hgtags manually)'))
208 '(please commit .hgtags manually)'))
209
209
210 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
210 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
211 if self.dirstate.state('.hgtags') == '?':
211 if self.dirstate.state('.hgtags') == '?':
212 self.add(['.hgtags'])
212 self.add(['.hgtags'])
213
213
214 self.commit(['.hgtags'], message, user, date)
214 self.commit(['.hgtags'], message, user, date)
215 self.hook('tag', node=hex(node), tag=name, local=local)
215 self.hook('tag', node=hex(node), tag=name, local=local)
216
216
217 def tags(self):
217 def tags(self):
218 '''return a mapping of tag to node'''
218 '''return a mapping of tag to node'''
219 if not self.tagscache:
219 if not self.tagscache:
220 self.tagscache = {}
220 self.tagscache = {}
221
221
222 def parsetag(line, context):
222 def parsetag(line, context):
223 if not line:
223 if not line:
224 return
224 return
225 s = l.split(" ", 1)
225 s = l.split(" ", 1)
226 if len(s) != 2:
226 if len(s) != 2:
227 self.ui.warn(_("%s: cannot parse entry\n") % context)
227 self.ui.warn(_("%s: cannot parse entry\n") % context)
228 return
228 return
229 node, key = s
229 node, key = s
230 key = key.strip()
230 key = key.strip()
231 try:
231 try:
232 bin_n = bin(node)
232 bin_n = bin(node)
233 except TypeError:
233 except TypeError:
234 self.ui.warn(_("%s: node '%s' is not well formed\n") %
234 self.ui.warn(_("%s: node '%s' is not well formed\n") %
235 (context, node))
235 (context, node))
236 return
236 return
237 if bin_n not in self.changelog.nodemap:
237 if bin_n not in self.changelog.nodemap:
238 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
238 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
239 (context, key))
239 (context, key))
240 return
240 return
241 self.tagscache[key] = bin_n
241 self.tagscache[key] = bin_n
242
242
243 # read the tags file from each head, ending with the tip,
243 # read the tags file from each head, ending with the tip,
244 # and add each tag found to the map, with "newer" ones
244 # and add each tag found to the map, with "newer" ones
245 # taking precedence
245 # taking precedence
246 heads = self.heads()
246 heads = self.heads()
247 heads.reverse()
247 heads.reverse()
248 fl = self.file(".hgtags")
248 fl = self.file(".hgtags")
249 for node in heads:
249 for node in heads:
250 change = self.changelog.read(node)
250 change = self.changelog.read(node)
251 rev = self.changelog.rev(node)
251 rev = self.changelog.rev(node)
252 fn, ff = self.manifest.find(change[0], '.hgtags')
252 fn, ff = self.manifest.find(change[0], '.hgtags')
253 if fn is None: continue
253 if fn is None: continue
254 count = 0
254 count = 0
255 for l in fl.read(fn).splitlines():
255 for l in fl.read(fn).splitlines():
256 count += 1
256 count += 1
257 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
257 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
258 (rev, short(node), count))
258 (rev, short(node), count))
259 try:
259 try:
260 f = self.opener("localtags")
260 f = self.opener("localtags")
261 count = 0
261 count = 0
262 for l in f:
262 for l in f:
263 count += 1
263 count += 1
264 parsetag(l, _("localtags, line %d") % count)
264 parsetag(l, _("localtags, line %d") % count)
265 except IOError:
265 except IOError:
266 pass
266 pass
267
267
268 self.tagscache['tip'] = self.changelog.tip()
268 self.tagscache['tip'] = self.changelog.tip()
269
269
270 return self.tagscache
270 return self.tagscache
271
271
272 def tagslist(self):
272 def tagslist(self):
273 '''return a list of tags ordered by revision'''
273 '''return a list of tags ordered by revision'''
274 l = []
274 l = []
275 for t, n in self.tags().items():
275 for t, n in self.tags().items():
276 try:
276 try:
277 r = self.changelog.rev(n)
277 r = self.changelog.rev(n)
278 except:
278 except:
279 r = -2 # sort to the beginning of the list if unknown
279 r = -2 # sort to the beginning of the list if unknown
280 l.append((r, t, n))
280 l.append((r, t, n))
281 l.sort()
281 l.sort()
282 return [(t, n) for r, t, n in l]
282 return [(t, n) for r, t, n in l]
283
283
284 def nodetags(self, node):
284 def nodetags(self, node):
285 '''return the tags associated with a node'''
285 '''return the tags associated with a node'''
286 if not self.nodetagscache:
286 if not self.nodetagscache:
287 self.nodetagscache = {}
287 self.nodetagscache = {}
288 for t, n in self.tags().items():
288 for t, n in self.tags().items():
289 self.nodetagscache.setdefault(n, []).append(t)
289 self.nodetagscache.setdefault(n, []).append(t)
290 return self.nodetagscache.get(node, [])
290 return self.nodetagscache.get(node, [])
291
291
292 def branchtags(self):
292 def branchtags(self):
293 if self.branchcache != None:
293 if self.branchcache != None:
294 return self.branchcache
294 return self.branchcache
295
295
296 self.branchcache = {} # avoid recursion in changectx
296 self.branchcache = {} # avoid recursion in changectx
297
297
298 try:
298 try:
299 f = self.opener("branches.cache")
299 f = self.opener("branches.cache")
300 last, lrev = f.readline().rstrip().split(" ", 1)
300 last, lrev = f.readline().rstrip().split(" ", 1)
301 last, lrev = bin(last), int(lrev)
301 last, lrev = bin(last), int(lrev)
302 if (lrev < self.changelog.count() and
302 if (lrev < self.changelog.count() and
303 self.changelog.node(lrev) == last): # sanity check
303 self.changelog.node(lrev) == last): # sanity check
304 for l in f:
304 for l in f:
305 node, label = l.rstrip().split(" ", 1)
305 node, label = l.rstrip().split(" ", 1)
306 self.branchcache[label] = bin(node)
306 self.branchcache[label] = bin(node)
307 f.close()
307 f.close()
308 except IOError:
308 except IOError:
309 last, lrev = nullid, -1
309 last, lrev = nullid, -1
310 lrev = self.changelog.rev(last)
310 lrev = self.changelog.rev(last)
311
311
312 tip = self.changelog.count() - 1
312 tip = self.changelog.count() - 1
313 if lrev != tip:
313 if lrev != tip:
314 for r in xrange(lrev + 1, tip + 1):
314 for r in xrange(lrev + 1, tip + 1):
315 c = self.changectx(r)
315 c = self.changectx(r)
316 b = c.branch()
316 b = c.branch()
317 if b:
317 if b:
318 self.branchcache[b] = c.node()
318 self.branchcache[b] = c.node()
319 self._writebranchcache()
319 self._writebranchcache()
320
320
321 return self.branchcache
321 return self.branchcache
322
322
323 def _writebranchcache(self):
323 def _writebranchcache(self):
324 f = self.opener("branches.cache", "w")
324 f = self.opener("branches.cache", "w")
325 t = self.changelog.tip()
325 t = self.changelog.tip()
326 f.write("%s %s\n" % (hex(t), self.changelog.count() - 1))
326 f.write("%s %s\n" % (hex(t), self.changelog.count() - 1))
327 for label, node in self.branchcache.iteritems():
327 for label, node in self.branchcache.iteritems():
328 f.write("%s %s\n" % (hex(node), label))
328 f.write("%s %s\n" % (hex(node), label))
329
329
330 def lookup(self, key):
330 def lookup(self, key):
331 if key == '.':
331 if key == '.':
332 key = self.dirstate.parents()[0]
332 key = self.dirstate.parents()[0]
333 if key == nullid:
333 if key == nullid:
334 raise repo.RepoError(_("no revision checked out"))
334 raise repo.RepoError(_("no revision checked out"))
335 if key in self.tags():
335 if key in self.tags():
336 return self.tags()[key]
336 return self.tags()[key]
337 if key in self.branchtags():
337 if key in self.branchtags():
338 return self.branchtags()[key]
338 return self.branchtags()[key]
339 try:
339 try:
340 return self.changelog.lookup(key)
340 return self.changelog.lookup(key)
341 except:
341 except:
342 raise repo.RepoError(_("unknown revision '%s'") % key)
342 raise repo.RepoError(_("unknown revision '%s'") % key)
343
343
344 def dev(self):
344 def dev(self):
345 return os.lstat(self.path).st_dev
345 return os.lstat(self.path).st_dev
346
346
347 def local(self):
347 def local(self):
348 return True
348 return True
349
349
350 def join(self, f):
350 def join(self, f):
351 return os.path.join(self.path, f)
351 return os.path.join(self.path, f)
352
352
353 def wjoin(self, f):
353 def wjoin(self, f):
354 return os.path.join(self.root, f)
354 return os.path.join(self.root, f)
355
355
356 def file(self, f):
356 def file(self, f):
357 if f[0] == '/':
357 if f[0] == '/':
358 f = f[1:]
358 f = f[1:]
359 return filelog.filelog(self.opener, f, self.revlogversion)
359 return filelog.filelog(self.opener, f, self.revlogversion)
360
360
361 def changectx(self, changeid=None):
361 def changectx(self, changeid=None):
362 return context.changectx(self, changeid)
362 return context.changectx(self, changeid)
363
363
364 def workingctx(self):
364 def workingctx(self):
365 return context.workingctx(self)
365 return context.workingctx(self)
366
366
367 def parents(self, changeid=None):
367 def parents(self, changeid=None):
368 '''
368 '''
369 get list of changectxs for parents of changeid or working directory
369 get list of changectxs for parents of changeid or working directory
370 '''
370 '''
371 if changeid is None:
371 if changeid is None:
372 pl = self.dirstate.parents()
372 pl = self.dirstate.parents()
373 else:
373 else:
374 n = self.changelog.lookup(changeid)
374 n = self.changelog.lookup(changeid)
375 pl = self.changelog.parents(n)
375 pl = self.changelog.parents(n)
376 if pl[1] == nullid:
376 if pl[1] == nullid:
377 return [self.changectx(pl[0])]
377 return [self.changectx(pl[0])]
378 return [self.changectx(pl[0]), self.changectx(pl[1])]
378 return [self.changectx(pl[0]), self.changectx(pl[1])]
379
379
380 def filectx(self, path, changeid=None, fileid=None):
380 def filectx(self, path, changeid=None, fileid=None):
381 """changeid can be a changeset revision, node, or tag.
381 """changeid can be a changeset revision, node, or tag.
382 fileid can be a file revision or node."""
382 fileid can be a file revision or node."""
383 return context.filectx(self, path, changeid, fileid)
383 return context.filectx(self, path, changeid, fileid)
384
384
385 def getcwd(self):
385 def getcwd(self):
386 return self.dirstate.getcwd()
386 return self.dirstate.getcwd()
387
387
388 def wfile(self, f, mode='r'):
388 def wfile(self, f, mode='r'):
389 return self.wopener(f, mode)
389 return self.wopener(f, mode)
390
390
391 def wread(self, filename):
391 def wread(self, filename):
392 if self.encodepats == None:
392 if self.encodepats == None:
393 l = []
393 l = []
394 for pat, cmd in self.ui.configitems("encode"):
394 for pat, cmd in self.ui.configitems("encode"):
395 mf = util.matcher(self.root, "", [pat], [], [])[1]
395 mf = util.matcher(self.root, "", [pat], [], [])[1]
396 l.append((mf, cmd))
396 l.append((mf, cmd))
397 self.encodepats = l
397 self.encodepats = l
398
398
399 data = self.wopener(filename, 'r').read()
399 data = self.wopener(filename, 'r').read()
400
400
401 for mf, cmd in self.encodepats:
401 for mf, cmd in self.encodepats:
402 if mf(filename):
402 if mf(filename):
403 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
403 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
404 data = util.filter(data, cmd)
404 data = util.filter(data, cmd)
405 break
405 break
406
406
407 return data
407 return data
408
408
409 def wwrite(self, filename, data, fd=None):
409 def wwrite(self, filename, data, fd=None):
410 if self.decodepats == None:
410 if self.decodepats == None:
411 l = []
411 l = []
412 for pat, cmd in self.ui.configitems("decode"):
412 for pat, cmd in self.ui.configitems("decode"):
413 mf = util.matcher(self.root, "", [pat], [], [])[1]
413 mf = util.matcher(self.root, "", [pat], [], [])[1]
414 l.append((mf, cmd))
414 l.append((mf, cmd))
415 self.decodepats = l
415 self.decodepats = l
416
416
417 for mf, cmd in self.decodepats:
417 for mf, cmd in self.decodepats:
418 if mf(filename):
418 if mf(filename):
419 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
419 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
420 data = util.filter(data, cmd)
420 data = util.filter(data, cmd)
421 break
421 break
422
422
423 if fd:
423 if fd:
424 return fd.write(data)
424 return fd.write(data)
425 return self.wopener(filename, 'w').write(data)
425 return self.wopener(filename, 'w').write(data)
426
426
427 def transaction(self):
427 def transaction(self):
428 tr = self.transhandle
428 tr = self.transhandle
429 if tr != None and tr.running():
429 if tr != None and tr.running():
430 return tr.nest()
430 return tr.nest()
431
431
432 # save dirstate for rollback
432 # save dirstate for rollback
433 try:
433 try:
434 ds = self.opener("dirstate").read()
434 ds = self.opener("dirstate").read()
435 except IOError:
435 except IOError:
436 ds = ""
436 ds = ""
437 self.opener("journal.dirstate", "w").write(ds)
437 self.opener("journal.dirstate", "w").write(ds)
438
438
439 tr = transaction.transaction(self.ui.warn, self.opener,
439 tr = transaction.transaction(self.ui.warn, self.opener,
440 self.join("journal"),
440 self.join("journal"),
441 aftertrans(self.path))
441 aftertrans(self.path))
442 self.transhandle = tr
442 self.transhandle = tr
443 return tr
443 return tr
444
444
445 def recover(self):
445 def recover(self):
446 l = self.lock()
446 l = self.lock()
447 if os.path.exists(self.join("journal")):
447 if os.path.exists(self.join("journal")):
448 self.ui.status(_("rolling back interrupted transaction\n"))
448 self.ui.status(_("rolling back interrupted transaction\n"))
449 transaction.rollback(self.opener, self.join("journal"))
449 transaction.rollback(self.opener, self.join("journal"))
450 self.reload()
450 self.reload()
451 return True
451 return True
452 else:
452 else:
453 self.ui.warn(_("no interrupted transaction available\n"))
453 self.ui.warn(_("no interrupted transaction available\n"))
454 return False
454 return False
455
455
456 def rollback(self, wlock=None):
456 def rollback(self, wlock=None):
457 if not wlock:
457 if not wlock:
458 wlock = self.wlock()
458 wlock = self.wlock()
459 l = self.lock()
459 l = self.lock()
460 if os.path.exists(self.join("undo")):
460 if os.path.exists(self.join("undo")):
461 self.ui.status(_("rolling back last transaction\n"))
461 self.ui.status(_("rolling back last transaction\n"))
462 transaction.rollback(self.opener, self.join("undo"))
462 transaction.rollback(self.opener, self.join("undo"))
463 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
463 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
464 self.reload()
464 self.reload()
465 self.wreload()
465 self.wreload()
466 else:
466 else:
467 self.ui.warn(_("no rollback information available\n"))
467 self.ui.warn(_("no rollback information available\n"))
468
468
469 def wreload(self):
469 def wreload(self):
470 self.dirstate.read()
470 self.dirstate.read()
471
471
472 def reload(self):
472 def reload(self):
473 self.changelog.load()
473 self.changelog.load()
474 self.manifest.load()
474 self.manifest.load()
475 self.tagscache = None
475 self.tagscache = None
476 self.nodetagscache = None
476 self.nodetagscache = None
477
477
478 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
478 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
479 desc=None):
479 desc=None):
480 try:
480 try:
481 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
481 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
482 except lock.LockHeld, inst:
482 except lock.LockHeld, inst:
483 if not wait:
483 if not wait:
484 raise
484 raise
485 self.ui.warn(_("waiting for lock on %s held by %s\n") %
485 self.ui.warn(_("waiting for lock on %s held by %s\n") %
486 (desc, inst.args[0]))
486 (desc, inst.args[0]))
487 # default to 600 seconds timeout
487 # default to 600 seconds timeout
488 l = lock.lock(self.join(lockname),
488 l = lock.lock(self.join(lockname),
489 int(self.ui.config("ui", "timeout") or 600),
489 int(self.ui.config("ui", "timeout") or 600),
490 releasefn, desc=desc)
490 releasefn, desc=desc)
491 if acquirefn:
491 if acquirefn:
492 acquirefn()
492 acquirefn()
493 return l
493 return l
494
494
495 def lock(self, wait=1):
495 def lock(self, wait=1):
496 return self.do_lock("lock", wait, acquirefn=self.reload,
496 return self.do_lock("lock", wait, acquirefn=self.reload,
497 desc=_('repository %s') % self.origroot)
497 desc=_('repository %s') % self.origroot)
498
498
499 def wlock(self, wait=1):
499 def wlock(self, wait=1):
500 return self.do_lock("wlock", wait, self.dirstate.write,
500 return self.do_lock("wlock", wait, self.dirstate.write,
501 self.wreload,
501 self.wreload,
502 desc=_('working directory of %s') % self.origroot)
502 desc=_('working directory of %s') % self.origroot)
503
503
504 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
504 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
505 """
505 """
506 commit an individual file as part of a larger transaction
506 commit an individual file as part of a larger transaction
507 """
507 """
508
508
509 t = self.wread(fn)
509 t = self.wread(fn)
510 fl = self.file(fn)
510 fl = self.file(fn)
511 fp1 = manifest1.get(fn, nullid)
511 fp1 = manifest1.get(fn, nullid)
512 fp2 = manifest2.get(fn, nullid)
512 fp2 = manifest2.get(fn, nullid)
513
513
514 meta = {}
514 meta = {}
515 cp = self.dirstate.copied(fn)
515 cp = self.dirstate.copied(fn)
516 if cp:
516 if cp:
517 meta["copy"] = cp
517 meta["copy"] = cp
518 if not manifest2: # not a branch merge
518 if not manifest2: # not a branch merge
519 meta["copyrev"] = hex(manifest1.get(cp, nullid))
519 meta["copyrev"] = hex(manifest1.get(cp, nullid))
520 fp2 = nullid
520 fp2 = nullid
521 elif fp2 != nullid: # copied on remote side
521 elif fp2 != nullid: # copied on remote side
522 meta["copyrev"] = hex(manifest1.get(cp, nullid))
522 meta["copyrev"] = hex(manifest1.get(cp, nullid))
523 else: # copied on local side, reversed
523 else: # copied on local side, reversed
524 meta["copyrev"] = hex(manifest2.get(cp))
524 meta["copyrev"] = hex(manifest2.get(cp))
525 fp2 = nullid
525 fp2 = nullid
526 self.ui.debug(_(" %s: copy %s:%s\n") %
526 self.ui.debug(_(" %s: copy %s:%s\n") %
527 (fn, cp, meta["copyrev"]))
527 (fn, cp, meta["copyrev"]))
528 fp1 = nullid
528 fp1 = nullid
529 elif fp2 != nullid:
529 elif fp2 != nullid:
530 # is one parent an ancestor of the other?
530 # is one parent an ancestor of the other?
531 fpa = fl.ancestor(fp1, fp2)
531 fpa = fl.ancestor(fp1, fp2)
532 if fpa == fp1:
532 if fpa == fp1:
533 fp1, fp2 = fp2, nullid
533 fp1, fp2 = fp2, nullid
534 elif fpa == fp2:
534 elif fpa == fp2:
535 fp2 = nullid
535 fp2 = nullid
536
536
537 # is the file unmodified from the parent? report existing entry
537 # is the file unmodified from the parent? report existing entry
538 if fp2 == nullid and not fl.cmp(fp1, t):
538 if fp2 == nullid and not fl.cmp(fp1, t):
539 return fp1
539 return fp1
540
540
541 changelist.append(fn)
541 changelist.append(fn)
542 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
542 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
543
543
544 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
544 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
545 orig_parent = self.dirstate.parents()[0] or nullid
545 orig_parent = self.dirstate.parents()[0] or nullid
546 p1 = p1 or self.dirstate.parents()[0] or nullid
546 p1 = p1 or self.dirstate.parents()[0] or nullid
547 p2 = p2 or self.dirstate.parents()[1] or nullid
547 p2 = p2 or self.dirstate.parents()[1] or nullid
548 c1 = self.changelog.read(p1)
548 c1 = self.changelog.read(p1)
549 c2 = self.changelog.read(p2)
549 c2 = self.changelog.read(p2)
550 m1 = self.manifest.read(c1[0]).copy()
550 m1 = self.manifest.read(c1[0]).copy()
551 m2 = self.manifest.read(c2[0])
551 m2 = self.manifest.read(c2[0])
552 changed = []
552 changed = []
553 removed = []
553 removed = []
554
554
555 if orig_parent == p1:
555 if orig_parent == p1:
556 update_dirstate = 1
556 update_dirstate = 1
557 else:
557 else:
558 update_dirstate = 0
558 update_dirstate = 0
559
559
560 if not wlock:
560 if not wlock:
561 wlock = self.wlock()
561 wlock = self.wlock()
562 l = self.lock()
562 l = self.lock()
563 tr = self.transaction()
563 tr = self.transaction()
564 linkrev = self.changelog.count()
564 linkrev = self.changelog.count()
565 for f in files:
565 for f in files:
566 try:
566 try:
567 m1[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
567 m1[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
568 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
568 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
569 except IOError:
569 except IOError:
570 try:
570 try:
571 del m1[f]
571 del m1[f]
572 if update_dirstate:
572 if update_dirstate:
573 self.dirstate.forget([f])
573 self.dirstate.forget([f])
574 removed.append(f)
574 removed.append(f)
575 except:
575 except:
576 # deleted from p2?
576 # deleted from p2?
577 pass
577 pass
578
578
579 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
579 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
580 user = user or self.ui.username()
580 user = user or self.ui.username()
581 n = self.changelog.add(mnode, changed + removed, text,
581 n = self.changelog.add(mnode, changed + removed, text,
582 tr, p1, p2, user, date)
582 tr, p1, p2, user, date)
583 tr.close()
583 tr.close()
584 if update_dirstate:
584 if update_dirstate:
585 self.dirstate.setparents(n, nullid)
585 self.dirstate.setparents(n, nullid)
586
586
587 def commit(self, files=None, text="", user=None, date=None,
587 def commit(self, files=None, text="", user=None, date=None,
588 match=util.always, force=False, lock=None, wlock=None,
588 match=util.always, force=False, lock=None, wlock=None,
589 force_editor=False):
589 force_editor=False):
590 commit = []
590 commit = []
591 remove = []
591 remove = []
592 changed = []
592 changed = []
593
593
594 if files:
594 if files:
595 for f in files:
595 for f in files:
596 s = self.dirstate.state(f)
596 s = self.dirstate.state(f)
597 if s in 'nmai':
597 if s in 'nmai':
598 commit.append(f)
598 commit.append(f)
599 elif s == 'r':
599 elif s == 'r':
600 remove.append(f)
600 remove.append(f)
601 else:
601 else:
602 self.ui.warn(_("%s not tracked!\n") % f)
602 self.ui.warn(_("%s not tracked!\n") % f)
603 else:
603 else:
604 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
604 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
605 commit = modified + added
605 commit = modified + added
606 remove = removed
606 remove = removed
607
607
608 p1, p2 = self.dirstate.parents()
608 p1, p2 = self.dirstate.parents()
609 c1 = self.changelog.read(p1)
609 c1 = self.changelog.read(p1)
610 c2 = self.changelog.read(p2)
610 c2 = self.changelog.read(p2)
611 m1 = self.manifest.read(c1[0]).copy()
611 m1 = self.manifest.read(c1[0]).copy()
612 m2 = self.manifest.read(c2[0])
612 m2 = self.manifest.read(c2[0])
613
613
614 branchname = self.workingctx().branch()
614 branchname = self.workingctx().branch()
615 oldname = c1[5].get("branch", "")
615 oldname = c1[5].get("branch", "")
616
616
617 if not commit and not remove and not force and p2 == nullid and \
617 if not commit and not remove and not force and p2 == nullid and \
618 branchname == oldname:
618 branchname == oldname:
619 self.ui.status(_("nothing changed\n"))
619 self.ui.status(_("nothing changed\n"))
620 return None
620 return None
621
621
622 xp1 = hex(p1)
622 xp1 = hex(p1)
623 if p2 == nullid: xp2 = ''
623 if p2 == nullid: xp2 = ''
624 else: xp2 = hex(p2)
624 else: xp2 = hex(p2)
625
625
626 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
626 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
627
627
628 if not wlock:
628 if not wlock:
629 wlock = self.wlock()
629 wlock = self.wlock()
630 if not lock:
630 if not lock:
631 lock = self.lock()
631 lock = self.lock()
632 tr = self.transaction()
632 tr = self.transaction()
633
633
634 # check in files
634 # check in files
635 new = {}
635 new = {}
636 linkrev = self.changelog.count()
636 linkrev = self.changelog.count()
637 commit.sort()
637 commit.sort()
638 for f in commit:
638 for f in commit:
639 self.ui.note(f + "\n")
639 self.ui.note(f + "\n")
640 try:
640 try:
641 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
641 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
642 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
642 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
643 except IOError:
643 except IOError:
644 self.ui.warn(_("trouble committing %s!\n") % f)
644 self.ui.warn(_("trouble committing %s!\n") % f)
645 raise
645 raise
646
646
647 # update manifest
647 # update manifest
648 m1.update(new)
648 m1.update(new)
649 for f in remove:
649 for f in remove:
650 if f in m1:
650 if f in m1:
651 del m1[f]
651 del m1[f]
652 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
652 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
653
653
654 # add changeset
654 # add changeset
655 new = new.keys()
655 new = new.keys()
656 new.sort()
656 new.sort()
657
657
658 user = user or self.ui.username()
658 user = user or self.ui.username()
659 if not text or force_editor:
659 if not text or force_editor:
660 edittext = []
660 edittext = []
661 if text:
661 if text:
662 edittext.append(text)
662 edittext.append(text)
663 edittext.append("")
663 edittext.append("")
664 if p2 != nullid:
664 if p2 != nullid:
665 edittext.append("HG: branch merge")
665 edittext.append("HG: branch merge")
666 edittext.extend(["HG: changed %s" % f for f in changed])
666 edittext.extend(["HG: changed %s" % f for f in changed])
667 edittext.extend(["HG: removed %s" % f for f in remove])
667 edittext.extend(["HG: removed %s" % f for f in remove])
668 if not changed and not remove:
668 if not changed and not remove:
669 edittext.append("HG: no files changed")
669 edittext.append("HG: no files changed")
670 edittext.append("")
670 edittext.append("")
671 # run editor in the repository root
671 # run editor in the repository root
672 olddir = os.getcwd()
672 olddir = os.getcwd()
673 os.chdir(self.root)
673 os.chdir(self.root)
674 text = self.ui.edit("\n".join(edittext), user)
674 text = self.ui.edit("\n".join(edittext), user)
675 os.chdir(olddir)
675 os.chdir(olddir)
676
676
677 lines = [line.rstrip() for line in text.rstrip().splitlines()]
677 lines = [line.rstrip() for line in text.rstrip().splitlines()]
678 while lines and not lines[0]:
678 while lines and not lines[0]:
679 del lines[0]
679 del lines[0]
680 if not lines:
680 if not lines:
681 return None
681 return None
682 text = '\n'.join(lines)
682 text = '\n'.join(lines)
683 extra = {}
683 extra = {}
684 if branchname:
684 if branchname:
685 extra["branch"] = branchname
685 extra["branch"] = branchname
686 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
686 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
687 user, date, extra)
687 user, date, extra)
688 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
688 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
689 parent2=xp2)
689 parent2=xp2)
690 tr.close()
690 tr.close()
691
691
692 self.dirstate.setparents(n)
692 self.dirstate.setparents(n)
693 self.dirstate.update(new, "n")
693 self.dirstate.update(new, "n")
694 self.dirstate.forget(remove)
694 self.dirstate.forget(remove)
695
695
696 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
696 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
697 return n
697 return n
698
698
699 def walk(self, node=None, files=[], match=util.always, badmatch=None):
699 def walk(self, node=None, files=[], match=util.always, badmatch=None):
700 if node:
700 if node:
701 fdict = dict.fromkeys(files)
701 fdict = dict.fromkeys(files)
702 for fn in self.manifest.read(self.changelog.read(node)[0]):
702 for fn in self.manifest.read(self.changelog.read(node)[0]):
703 for ffn in fdict:
703 for ffn in fdict:
704 # match if the file is the exact name or a directory
704 # match if the file is the exact name or a directory
705 if ffn == fn or fn.startswith("%s/" % ffn):
705 if ffn == fn or fn.startswith("%s/" % ffn):
706 del fdict[ffn]
706 del fdict[ffn]
707 break
707 break
708 if match(fn):
708 if match(fn):
709 yield 'm', fn
709 yield 'm', fn
710 for fn in fdict:
710 for fn in fdict:
711 if badmatch and badmatch(fn):
711 if badmatch and badmatch(fn):
712 if match(fn):
712 if match(fn):
713 yield 'b', fn
713 yield 'b', fn
714 else:
714 else:
715 self.ui.warn(_('%s: No such file in rev %s\n') % (
715 self.ui.warn(_('%s: No such file in rev %s\n') % (
716 util.pathto(self.getcwd(), fn), short(node)))
716 util.pathto(self.getcwd(), fn), short(node)))
717 else:
717 else:
718 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
718 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
719 yield src, fn
719 yield src, fn
720
720
721 def status(self, node1=None, node2=None, files=[], match=util.always,
721 def status(self, node1=None, node2=None, files=[], match=util.always,
722 wlock=None, list_ignored=False, list_clean=False):
722 wlock=None, list_ignored=False, list_clean=False):
723 """return status of files between two nodes or node and working directory
723 """return status of files between two nodes or node and working directory
724
724
725 If node1 is None, use the first dirstate parent instead.
725 If node1 is None, use the first dirstate parent instead.
726 If node2 is None, compare node1 with working directory.
726 If node2 is None, compare node1 with working directory.
727 """
727 """
728
728
729 def fcmp(fn, mf):
729 def fcmp(fn, mf):
730 t1 = self.wread(fn)
730 t1 = self.wread(fn)
731 return self.file(fn).cmp(mf.get(fn, nullid), t1)
731 return self.file(fn).cmp(mf.get(fn, nullid), t1)
732
732
733 def mfmatches(node):
733 def mfmatches(node):
734 change = self.changelog.read(node)
734 change = self.changelog.read(node)
735 mf = self.manifest.read(change[0]).copy()
735 mf = self.manifest.read(change[0]).copy()
736 for fn in mf.keys():
736 for fn in mf.keys():
737 if not match(fn):
737 if not match(fn):
738 del mf[fn]
738 del mf[fn]
739 return mf
739 return mf
740
740
741 modified, added, removed, deleted, unknown = [], [], [], [], []
741 modified, added, removed, deleted, unknown = [], [], [], [], []
742 ignored, clean = [], []
742 ignored, clean = [], []
743
743
744 compareworking = False
744 compareworking = False
745 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
745 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
746 compareworking = True
746 compareworking = True
747
747
748 if not compareworking:
748 if not compareworking:
749 # read the manifest from node1 before the manifest from node2,
749 # read the manifest from node1 before the manifest from node2,
750 # so that we'll hit the manifest cache if we're going through
750 # so that we'll hit the manifest cache if we're going through
751 # all the revisions in parent->child order.
751 # all the revisions in parent->child order.
752 mf1 = mfmatches(node1)
752 mf1 = mfmatches(node1)
753
753
754 # are we comparing the working directory?
754 # are we comparing the working directory?
755 if not node2:
755 if not node2:
756 if not wlock:
756 if not wlock:
757 try:
757 try:
758 wlock = self.wlock(wait=0)
758 wlock = self.wlock(wait=0)
759 except lock.LockException:
759 except lock.LockException:
760 wlock = None
760 wlock = None
761 (lookup, modified, added, removed, deleted, unknown,
761 (lookup, modified, added, removed, deleted, unknown,
762 ignored, clean) = self.dirstate.status(files, match,
762 ignored, clean) = self.dirstate.status(files, match,
763 list_ignored, list_clean)
763 list_ignored, list_clean)
764
764
765 # are we comparing working dir against its parent?
765 # are we comparing working dir against its parent?
766 if compareworking:
766 if compareworking:
767 if lookup:
767 if lookup:
768 # do a full compare of any files that might have changed
768 # do a full compare of any files that might have changed
769 mf2 = mfmatches(self.dirstate.parents()[0])
769 mf2 = mfmatches(self.dirstate.parents()[0])
770 for f in lookup:
770 for f in lookup:
771 if fcmp(f, mf2):
771 if fcmp(f, mf2):
772 modified.append(f)
772 modified.append(f)
773 else:
773 else:
774 clean.append(f)
774 clean.append(f)
775 if wlock is not None:
775 if wlock is not None:
776 self.dirstate.update([f], "n")
776 self.dirstate.update([f], "n")
777 else:
777 else:
778 # we are comparing working dir against non-parent
778 # we are comparing working dir against non-parent
779 # generate a pseudo-manifest for the working dir
779 # generate a pseudo-manifest for the working dir
780 # XXX: create it in dirstate.py ?
780 # XXX: create it in dirstate.py ?
781 mf2 = mfmatches(self.dirstate.parents()[0])
781 mf2 = mfmatches(self.dirstate.parents()[0])
782 for f in lookup + modified + added:
782 for f in lookup + modified + added:
783 mf2[f] = ""
783 mf2[f] = ""
784 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
784 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
785 for f in removed:
785 for f in removed:
786 if f in mf2:
786 if f in mf2:
787 del mf2[f]
787 del mf2[f]
788 else:
788 else:
789 # we are comparing two revisions
789 # we are comparing two revisions
790 mf2 = mfmatches(node2)
790 mf2 = mfmatches(node2)
791
791
792 if not compareworking:
792 if not compareworking:
793 # flush lists from dirstate before comparing manifests
793 # flush lists from dirstate before comparing manifests
794 modified, added, clean = [], [], []
794 modified, added, clean = [], [], []
795
795
796 # make sure to sort the files so we talk to the disk in a
796 # make sure to sort the files so we talk to the disk in a
797 # reasonable order
797 # reasonable order
798 mf2keys = mf2.keys()
798 mf2keys = mf2.keys()
799 mf2keys.sort()
799 mf2keys.sort()
800 for fn in mf2keys:
800 for fn in mf2keys:
801 if mf1.has_key(fn):
801 if mf1.has_key(fn):
802 if mf1.flags(fn) != mf2.flags(fn) or \
802 if mf1.flags(fn) != mf2.flags(fn) or \
803 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
803 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
804 modified.append(fn)
804 modified.append(fn)
805 elif list_clean:
805 elif list_clean:
806 clean.append(fn)
806 clean.append(fn)
807 del mf1[fn]
807 del mf1[fn]
808 else:
808 else:
809 added.append(fn)
809 added.append(fn)
810
810
811 removed = mf1.keys()
811 removed = mf1.keys()
812
812
813 # sort and return results:
813 # sort and return results:
814 for l in modified, added, removed, deleted, unknown, ignored, clean:
814 for l in modified, added, removed, deleted, unknown, ignored, clean:
815 l.sort()
815 l.sort()
816 return (modified, added, removed, deleted, unknown, ignored, clean)
816 return (modified, added, removed, deleted, unknown, ignored, clean)
817
817
818 def add(self, list, wlock=None):
818 def add(self, list, wlock=None):
819 if not wlock:
819 if not wlock:
820 wlock = self.wlock()
820 wlock = self.wlock()
821 for f in list:
821 for f in list:
822 p = self.wjoin(f)
822 p = self.wjoin(f)
823 if not os.path.exists(p):
823 if not os.path.exists(p):
824 self.ui.warn(_("%s does not exist!\n") % f)
824 self.ui.warn(_("%s does not exist!\n") % f)
825 elif not os.path.isfile(p):
825 elif not os.path.isfile(p):
826 self.ui.warn(_("%s not added: only files supported currently\n")
826 self.ui.warn(_("%s not added: only files supported currently\n")
827 % f)
827 % f)
828 elif self.dirstate.state(f) in 'an':
828 elif self.dirstate.state(f) in 'an':
829 self.ui.warn(_("%s already tracked!\n") % f)
829 self.ui.warn(_("%s already tracked!\n") % f)
830 else:
830 else:
831 self.dirstate.update([f], "a")
831 self.dirstate.update([f], "a")
832
832
833 def forget(self, list, wlock=None):
833 def forget(self, list, wlock=None):
834 if not wlock:
834 if not wlock:
835 wlock = self.wlock()
835 wlock = self.wlock()
836 for f in list:
836 for f in list:
837 if self.dirstate.state(f) not in 'ai':
837 if self.dirstate.state(f) not in 'ai':
838 self.ui.warn(_("%s not added!\n") % f)
838 self.ui.warn(_("%s not added!\n") % f)
839 else:
839 else:
840 self.dirstate.forget([f])
840 self.dirstate.forget([f])
841
841
842 def remove(self, list, unlink=False, wlock=None):
842 def remove(self, list, unlink=False, wlock=None):
843 if unlink:
843 if unlink:
844 for f in list:
844 for f in list:
845 try:
845 try:
846 util.unlink(self.wjoin(f))
846 util.unlink(self.wjoin(f))
847 except OSError, inst:
847 except OSError, inst:
848 if inst.errno != errno.ENOENT:
848 if inst.errno != errno.ENOENT:
849 raise
849 raise
850 if not wlock:
850 if not wlock:
851 wlock = self.wlock()
851 wlock = self.wlock()
852 for f in list:
852 for f in list:
853 p = self.wjoin(f)
853 p = self.wjoin(f)
854 if os.path.exists(p):
854 if os.path.exists(p):
855 self.ui.warn(_("%s still exists!\n") % f)
855 self.ui.warn(_("%s still exists!\n") % f)
856 elif self.dirstate.state(f) == 'a':
856 elif self.dirstate.state(f) == 'a':
857 self.dirstate.forget([f])
857 self.dirstate.forget([f])
858 elif f not in self.dirstate:
858 elif f not in self.dirstate:
859 self.ui.warn(_("%s not tracked!\n") % f)
859 self.ui.warn(_("%s not tracked!\n") % f)
860 else:
860 else:
861 self.dirstate.update([f], "r")
861 self.dirstate.update([f], "r")
862
862
863 def undelete(self, list, wlock=None):
863 def undelete(self, list, wlock=None):
864 p = self.dirstate.parents()[0]
864 p = self.dirstate.parents()[0]
865 mn = self.changelog.read(p)[0]
865 mn = self.changelog.read(p)[0]
866 m = self.manifest.read(mn)
866 m = self.manifest.read(mn)
867 if not wlock:
867 if not wlock:
868 wlock = self.wlock()
868 wlock = self.wlock()
869 for f in list:
869 for f in list:
870 if self.dirstate.state(f) not in "r":
870 if self.dirstate.state(f) not in "r":
871 self.ui.warn("%s not removed!\n" % f)
871 self.ui.warn("%s not removed!\n" % f)
872 else:
872 else:
873 t = self.file(f).read(m[f])
873 t = self.file(f).read(m[f])
874 self.wwrite(f, t)
874 self.wwrite(f, t)
875 util.set_exec(self.wjoin(f), m.execf(f))
875 util.set_exec(self.wjoin(f), m.execf(f))
876 self.dirstate.update([f], "n")
876 self.dirstate.update([f], "n")
877
877
878 def copy(self, source, dest, wlock=None):
878 def copy(self, source, dest, wlock=None):
879 p = self.wjoin(dest)
879 p = self.wjoin(dest)
880 if not os.path.exists(p):
880 if not os.path.exists(p):
881 self.ui.warn(_("%s does not exist!\n") % dest)
881 self.ui.warn(_("%s does not exist!\n") % dest)
882 elif not os.path.isfile(p):
882 elif not os.path.isfile(p):
883 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
883 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
884 else:
884 else:
885 if not wlock:
885 if not wlock:
886 wlock = self.wlock()
886 wlock = self.wlock()
887 if self.dirstate.state(dest) == '?':
887 if self.dirstate.state(dest) == '?':
888 self.dirstate.update([dest], "a")
888 self.dirstate.update([dest], "a")
889 self.dirstate.copy(source, dest)
889 self.dirstate.copy(source, dest)
890
890
891 def heads(self, start=None):
891 def heads(self, start=None):
892 heads = self.changelog.heads(start)
892 heads = self.changelog.heads(start)
893 # sort the output in rev descending order
893 # sort the output in rev descending order
894 heads = [(-self.changelog.rev(h), h) for h in heads]
894 heads = [(-self.changelog.rev(h), h) for h in heads]
895 heads.sort()
895 heads.sort()
896 return [n for (r, n) in heads]
896 return [n for (r, n) in heads]
897
897
898 # branchlookup returns a dict giving a list of branches for
898 # branchlookup returns a dict giving a list of branches for
899 # each head. A branch is defined as the tag of a node or
899 # each head. A branch is defined as the tag of a node or
900 # the branch of the node's parents. If a node has multiple
900 # the branch of the node's parents. If a node has multiple
901 # branch tags, tags are eliminated if they are visible from other
901 # branch tags, tags are eliminated if they are visible from other
902 # branch tags.
902 # branch tags.
903 #
903 #
904 # So, for this graph: a->b->c->d->e
904 # So, for this graph: a->b->c->d->e
905 # \ /
905 # \ /
906 # aa -----/
906 # aa -----/
907 # a has tag 2.6.12
907 # a has tag 2.6.12
908 # d has tag 2.6.13
908 # d has tag 2.6.13
909 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
909 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
910 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
910 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
911 # from the list.
911 # from the list.
912 #
912 #
913 # It is possible that more than one head will have the same branch tag.
913 # It is possible that more than one head will have the same branch tag.
914 # callers need to check the result for multiple heads under the same
914 # callers need to check the result for multiple heads under the same
915 # branch tag if that is a problem for them (ie checkout of a specific
915 # branch tag if that is a problem for them (ie checkout of a specific
916 # branch).
916 # branch).
917 #
917 #
918 # passing in a specific branch will limit the depth of the search
918 # passing in a specific branch will limit the depth of the search
919 # through the parents. It won't limit the branches returned in the
919 # through the parents. It won't limit the branches returned in the
920 # result though.
920 # result though.
921 def branchlookup(self, heads=None, branch=None):
921 def branchlookup(self, heads=None, branch=None):
922 if not heads:
922 if not heads:
923 heads = self.heads()
923 heads = self.heads()
924 headt = [ h for h in heads ]
924 headt = [ h for h in heads ]
925 chlog = self.changelog
925 chlog = self.changelog
926 branches = {}
926 branches = {}
927 merges = []
927 merges = []
928 seenmerge = {}
928 seenmerge = {}
929
929
930 # traverse the tree once for each head, recording in the branches
930 # traverse the tree once for each head, recording in the branches
931 # dict which tags are visible from this head. The branches
931 # dict which tags are visible from this head. The branches
932 # dict also records which tags are visible from each tag
932 # dict also records which tags are visible from each tag
933 # while we traverse.
933 # while we traverse.
934 while headt or merges:
934 while headt or merges:
935 if merges:
935 if merges:
936 n, found = merges.pop()
936 n, found = merges.pop()
937 visit = [n]
937 visit = [n]
938 else:
938 else:
939 h = headt.pop()
939 h = headt.pop()
940 visit = [h]
940 visit = [h]
941 found = [h]
941 found = [h]
942 seen = {}
942 seen = {}
943 while visit:
943 while visit:
944 n = visit.pop()
944 n = visit.pop()
945 if n in seen:
945 if n in seen:
946 continue
946 continue
947 pp = chlog.parents(n)
947 pp = chlog.parents(n)
948 tags = self.nodetags(n)
948 tags = self.nodetags(n)
949 if tags:
949 if tags:
950 for x in tags:
950 for x in tags:
951 if x == 'tip':
951 if x == 'tip':
952 continue
952 continue
953 for f in found:
953 for f in found:
954 branches.setdefault(f, {})[n] = 1
954 branches.setdefault(f, {})[n] = 1
955 branches.setdefault(n, {})[n] = 1
955 branches.setdefault(n, {})[n] = 1
956 break
956 break
957 if n not in found:
957 if n not in found:
958 found.append(n)
958 found.append(n)
959 if branch in tags:
959 if branch in tags:
960 continue
960 continue
961 seen[n] = 1
961 seen[n] = 1
962 if pp[1] != nullid and n not in seenmerge:
962 if pp[1] != nullid and n not in seenmerge:
963 merges.append((pp[1], [x for x in found]))
963 merges.append((pp[1], [x for x in found]))
964 seenmerge[n] = 1
964 seenmerge[n] = 1
965 if pp[0] != nullid:
965 if pp[0] != nullid:
966 visit.append(pp[0])
966 visit.append(pp[0])
967 # traverse the branches dict, eliminating branch tags from each
967 # traverse the branches dict, eliminating branch tags from each
968 # head that are visible from another branch tag for that head.
968 # head that are visible from another branch tag for that head.
969 out = {}
969 out = {}
970 viscache = {}
970 viscache = {}
971 for h in heads:
971 for h in heads:
972 def visible(node):
972 def visible(node):
973 if node in viscache:
973 if node in viscache:
974 return viscache[node]
974 return viscache[node]
975 ret = {}
975 ret = {}
976 visit = [node]
976 visit = [node]
977 while visit:
977 while visit:
978 x = visit.pop()
978 x = visit.pop()
979 if x in viscache:
979 if x in viscache:
980 ret.update(viscache[x])
980 ret.update(viscache[x])
981 elif x not in ret:
981 elif x not in ret:
982 ret[x] = 1
982 ret[x] = 1
983 if x in branches:
983 if x in branches:
984 visit[len(visit):] = branches[x].keys()
984 visit[len(visit):] = branches[x].keys()
985 viscache[node] = ret
985 viscache[node] = ret
986 return ret
986 return ret
987 if h not in branches:
987 if h not in branches:
988 continue
988 continue
989 # O(n^2), but somewhat limited. This only searches the
989 # O(n^2), but somewhat limited. This only searches the
990 # tags visible from a specific head, not all the tags in the
990 # tags visible from a specific head, not all the tags in the
991 # whole repo.
991 # whole repo.
992 for b in branches[h]:
992 for b in branches[h]:
993 vis = False
993 vis = False
994 for bb in branches[h].keys():
994 for bb in branches[h].keys():
995 if b != bb:
995 if b != bb:
996 if b in visible(bb):
996 if b in visible(bb):
997 vis = True
997 vis = True
998 break
998 break
999 if not vis:
999 if not vis:
1000 l = out.setdefault(h, [])
1000 l = out.setdefault(h, [])
1001 l[len(l):] = self.nodetags(b)
1001 l[len(l):] = self.nodetags(b)
1002 return out
1002 return out
1003
1003
1004 def branches(self, nodes):
1004 def branches(self, nodes):
1005 if not nodes:
1005 if not nodes:
1006 nodes = [self.changelog.tip()]
1006 nodes = [self.changelog.tip()]
1007 b = []
1007 b = []
1008 for n in nodes:
1008 for n in nodes:
1009 t = n
1009 t = n
1010 while 1:
1010 while 1:
1011 p = self.changelog.parents(n)
1011 p = self.changelog.parents(n)
1012 if p[1] != nullid or p[0] == nullid:
1012 if p[1] != nullid or p[0] == nullid:
1013 b.append((t, n, p[0], p[1]))
1013 b.append((t, n, p[0], p[1]))
1014 break
1014 break
1015 n = p[0]
1015 n = p[0]
1016 return b
1016 return b
1017
1017
1018 def between(self, pairs):
1018 def between(self, pairs):
1019 r = []
1019 r = []
1020
1020
1021 for top, bottom in pairs:
1021 for top, bottom in pairs:
1022 n, l, i = top, [], 0
1022 n, l, i = top, [], 0
1023 f = 1
1023 f = 1
1024
1024
1025 while n != bottom:
1025 while n != bottom:
1026 p = self.changelog.parents(n)[0]
1026 p = self.changelog.parents(n)[0]
1027 if i == f:
1027 if i == f:
1028 l.append(n)
1028 l.append(n)
1029 f = f * 2
1029 f = f * 2
1030 n = p
1030 n = p
1031 i += 1
1031 i += 1
1032
1032
1033 r.append(l)
1033 r.append(l)
1034
1034
1035 return r
1035 return r
1036
1036
1037 def findincoming(self, remote, base=None, heads=None, force=False):
1037 def findincoming(self, remote, base=None, heads=None, force=False):
1038 """Return list of roots of the subsets of missing nodes from remote
1038 """Return list of roots of the subsets of missing nodes from remote
1039
1039
1040 If base dict is specified, assume that these nodes and their parents
1040 If base dict is specified, assume that these nodes and their parents
1041 exist on the remote side and that no child of a node of base exists
1041 exist on the remote side and that no child of a node of base exists
1042 in both remote and self.
1042 in both remote and self.
1043 Furthermore base will be updated to include the nodes that exists
1043 Furthermore base will be updated to include the nodes that exists
1044 in self and remote but no children exists in self and remote.
1044 in self and remote but no children exists in self and remote.
1045 If a list of heads is specified, return only nodes which are heads
1045 If a list of heads is specified, return only nodes which are heads
1046 or ancestors of these heads.
1046 or ancestors of these heads.
1047
1047
1048 All the ancestors of base are in self and in remote.
1048 All the ancestors of base are in self and in remote.
1049 All the descendants of the list returned are missing in self.
1049 All the descendants of the list returned are missing in self.
1050 (and so we know that the rest of the nodes are missing in remote, see
1050 (and so we know that the rest of the nodes are missing in remote, see
1051 outgoing)
1051 outgoing)
1052 """
1052 """
1053 m = self.changelog.nodemap
1053 m = self.changelog.nodemap
1054 search = []
1054 search = []
1055 fetch = {}
1055 fetch = {}
1056 seen = {}
1056 seen = {}
1057 seenbranch = {}
1057 seenbranch = {}
1058 if base == None:
1058 if base == None:
1059 base = {}
1059 base = {}
1060
1060
1061 if not heads:
1061 if not heads:
1062 heads = remote.heads()
1062 heads = remote.heads()
1063
1063
1064 if self.changelog.tip() == nullid:
1064 if self.changelog.tip() == nullid:
1065 base[nullid] = 1
1065 base[nullid] = 1
1066 if heads != [nullid]:
1066 if heads != [nullid]:
1067 return [nullid]
1067 return [nullid]
1068 return []
1068 return []
1069
1069
1070 # assume we're closer to the tip than the root
1070 # assume we're closer to the tip than the root
1071 # and start by examining the heads
1071 # and start by examining the heads
1072 self.ui.status(_("searching for changes\n"))
1072 self.ui.status(_("searching for changes\n"))
1073
1073
1074 unknown = []
1074 unknown = []
1075 for h in heads:
1075 for h in heads:
1076 if h not in m:
1076 if h not in m:
1077 unknown.append(h)
1077 unknown.append(h)
1078 else:
1078 else:
1079 base[h] = 1
1079 base[h] = 1
1080
1080
1081 if not unknown:
1081 if not unknown:
1082 return []
1082 return []
1083
1083
1084 req = dict.fromkeys(unknown)
1084 req = dict.fromkeys(unknown)
1085 reqcnt = 0
1085 reqcnt = 0
1086
1086
1087 # search through remote branches
1087 # search through remote branches
1088 # a 'branch' here is a linear segment of history, with four parts:
1088 # a 'branch' here is a linear segment of history, with four parts:
1089 # head, root, first parent, second parent
1089 # head, root, first parent, second parent
1090 # (a branch always has two parents (or none) by definition)
1090 # (a branch always has two parents (or none) by definition)
1091 unknown = remote.branches(unknown)
1091 unknown = remote.branches(unknown)
1092 while unknown:
1092 while unknown:
1093 r = []
1093 r = []
1094 while unknown:
1094 while unknown:
1095 n = unknown.pop(0)
1095 n = unknown.pop(0)
1096 if n[0] in seen:
1096 if n[0] in seen:
1097 continue
1097 continue
1098
1098
1099 self.ui.debug(_("examining %s:%s\n")
1099 self.ui.debug(_("examining %s:%s\n")
1100 % (short(n[0]), short(n[1])))
1100 % (short(n[0]), short(n[1])))
1101 if n[0] == nullid: # found the end of the branch
1101 if n[0] == nullid: # found the end of the branch
1102 pass
1102 pass
1103 elif n in seenbranch:
1103 elif n in seenbranch:
1104 self.ui.debug(_("branch already found\n"))
1104 self.ui.debug(_("branch already found\n"))
1105 continue
1105 continue
1106 elif n[1] and n[1] in m: # do we know the base?
1106 elif n[1] and n[1] in m: # do we know the base?
1107 self.ui.debug(_("found incomplete branch %s:%s\n")
1107 self.ui.debug(_("found incomplete branch %s:%s\n")
1108 % (short(n[0]), short(n[1])))
1108 % (short(n[0]), short(n[1])))
1109 search.append(n) # schedule branch range for scanning
1109 search.append(n) # schedule branch range for scanning
1110 seenbranch[n] = 1
1110 seenbranch[n] = 1
1111 else:
1111 else:
1112 if n[1] not in seen and n[1] not in fetch:
1112 if n[1] not in seen and n[1] not in fetch:
1113 if n[2] in m and n[3] in m:
1113 if n[2] in m and n[3] in m:
1114 self.ui.debug(_("found new changeset %s\n") %
1114 self.ui.debug(_("found new changeset %s\n") %
1115 short(n[1]))
1115 short(n[1]))
1116 fetch[n[1]] = 1 # earliest unknown
1116 fetch[n[1]] = 1 # earliest unknown
1117 for p in n[2:4]:
1117 for p in n[2:4]:
1118 if p in m:
1118 if p in m:
1119 base[p] = 1 # latest known
1119 base[p] = 1 # latest known
1120
1120
1121 for p in n[2:4]:
1121 for p in n[2:4]:
1122 if p not in req and p not in m:
1122 if p not in req and p not in m:
1123 r.append(p)
1123 r.append(p)
1124 req[p] = 1
1124 req[p] = 1
1125 seen[n[0]] = 1
1125 seen[n[0]] = 1
1126
1126
1127 if r:
1127 if r:
1128 reqcnt += 1
1128 reqcnt += 1
1129 self.ui.debug(_("request %d: %s\n") %
1129 self.ui.debug(_("request %d: %s\n") %
1130 (reqcnt, " ".join(map(short, r))))
1130 (reqcnt, " ".join(map(short, r))))
1131 for p in range(0, len(r), 10):
1131 for p in range(0, len(r), 10):
1132 for b in remote.branches(r[p:p+10]):
1132 for b in remote.branches(r[p:p+10]):
1133 self.ui.debug(_("received %s:%s\n") %
1133 self.ui.debug(_("received %s:%s\n") %
1134 (short(b[0]), short(b[1])))
1134 (short(b[0]), short(b[1])))
1135 unknown.append(b)
1135 unknown.append(b)
1136
1136
1137 # do binary search on the branches we found
1137 # do binary search on the branches we found
1138 while search:
1138 while search:
1139 n = search.pop(0)
1139 n = search.pop(0)
1140 reqcnt += 1
1140 reqcnt += 1
1141 l = remote.between([(n[0], n[1])])[0]
1141 l = remote.between([(n[0], n[1])])[0]
1142 l.append(n[1])
1142 l.append(n[1])
1143 p = n[0]
1143 p = n[0]
1144 f = 1
1144 f = 1
1145 for i in l:
1145 for i in l:
1146 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1146 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1147 if i in m:
1147 if i in m:
1148 if f <= 2:
1148 if f <= 2:
1149 self.ui.debug(_("found new branch changeset %s\n") %
1149 self.ui.debug(_("found new branch changeset %s\n") %
1150 short(p))
1150 short(p))
1151 fetch[p] = 1
1151 fetch[p] = 1
1152 base[i] = 1
1152 base[i] = 1
1153 else:
1153 else:
1154 self.ui.debug(_("narrowed branch search to %s:%s\n")
1154 self.ui.debug(_("narrowed branch search to %s:%s\n")
1155 % (short(p), short(i)))
1155 % (short(p), short(i)))
1156 search.append((p, i))
1156 search.append((p, i))
1157 break
1157 break
1158 p, f = i, f * 2
1158 p, f = i, f * 2
1159
1159
1160 # sanity check our fetch list
1160 # sanity check our fetch list
1161 for f in fetch.keys():
1161 for f in fetch.keys():
1162 if f in m:
1162 if f in m:
1163 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1163 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1164
1164
1165 if base.keys() == [nullid]:
1165 if base.keys() == [nullid]:
1166 if force:
1166 if force:
1167 self.ui.warn(_("warning: repository is unrelated\n"))
1167 self.ui.warn(_("warning: repository is unrelated\n"))
1168 else:
1168 else:
1169 raise util.Abort(_("repository is unrelated"))
1169 raise util.Abort(_("repository is unrelated"))
1170
1170
1171 self.ui.debug(_("found new changesets starting at ") +
1171 self.ui.debug(_("found new changesets starting at ") +
1172 " ".join([short(f) for f in fetch]) + "\n")
1172 " ".join([short(f) for f in fetch]) + "\n")
1173
1173
1174 self.ui.debug(_("%d total queries\n") % reqcnt)
1174 self.ui.debug(_("%d total queries\n") % reqcnt)
1175
1175
1176 return fetch.keys()
1176 return fetch.keys()
1177
1177
1178 def findoutgoing(self, remote, base=None, heads=None, force=False):
1178 def findoutgoing(self, remote, base=None, heads=None, force=False):
1179 """Return list of nodes that are roots of subsets not in remote
1179 """Return list of nodes that are roots of subsets not in remote
1180
1180
1181 If base dict is specified, assume that these nodes and their parents
1181 If base dict is specified, assume that these nodes and their parents
1182 exist on the remote side.
1182 exist on the remote side.
1183 If a list of heads is specified, return only nodes which are heads
1183 If a list of heads is specified, return only nodes which are heads
1184 or ancestors of these heads, and return a second element which
1184 or ancestors of these heads, and return a second element which
1185 contains all remote heads which get new children.
1185 contains all remote heads which get new children.
1186 """
1186 """
1187 if base == None:
1187 if base == None:
1188 base = {}
1188 base = {}
1189 self.findincoming(remote, base, heads, force=force)
1189 self.findincoming(remote, base, heads, force=force)
1190
1190
1191 self.ui.debug(_("common changesets up to ")
1191 self.ui.debug(_("common changesets up to ")
1192 + " ".join(map(short, base.keys())) + "\n")
1192 + " ".join(map(short, base.keys())) + "\n")
1193
1193
1194 remain = dict.fromkeys(self.changelog.nodemap)
1194 remain = dict.fromkeys(self.changelog.nodemap)
1195
1195
1196 # prune everything remote has from the tree
1196 # prune everything remote has from the tree
1197 del remain[nullid]
1197 del remain[nullid]
1198 remove = base.keys()
1198 remove = base.keys()
1199 while remove:
1199 while remove:
1200 n = remove.pop(0)
1200 n = remove.pop(0)
1201 if n in remain:
1201 if n in remain:
1202 del remain[n]
1202 del remain[n]
1203 for p in self.changelog.parents(n):
1203 for p in self.changelog.parents(n):
1204 remove.append(p)
1204 remove.append(p)
1205
1205
1206 # find every node whose parents have been pruned
1206 # find every node whose parents have been pruned
1207 subset = []
1207 subset = []
1208 # find every remote head that will get new children
1208 # find every remote head that will get new children
1209 updated_heads = {}
1209 updated_heads = {}
1210 for n in remain:
1210 for n in remain:
1211 p1, p2 = self.changelog.parents(n)
1211 p1, p2 = self.changelog.parents(n)
1212 if p1 not in remain and p2 not in remain:
1212 if p1 not in remain and p2 not in remain:
1213 subset.append(n)
1213 subset.append(n)
1214 if heads:
1214 if heads:
1215 if p1 in heads:
1215 if p1 in heads:
1216 updated_heads[p1] = True
1216 updated_heads[p1] = True
1217 if p2 in heads:
1217 if p2 in heads:
1218 updated_heads[p2] = True
1218 updated_heads[p2] = True
1219
1219
1220 # this is the set of all roots we have to push
1220 # this is the set of all roots we have to push
1221 if heads:
1221 if heads:
1222 return subset, updated_heads.keys()
1222 return subset, updated_heads.keys()
1223 else:
1223 else:
1224 return subset
1224 return subset
1225
1225
1226 def pull(self, remote, heads=None, force=False, lock=None):
1226 def pull(self, remote, heads=None, force=False, lock=None):
1227 mylock = False
1227 mylock = False
1228 if not lock:
1228 if not lock:
1229 lock = self.lock()
1229 lock = self.lock()
1230 mylock = True
1230 mylock = True
1231
1231
1232 try:
1232 try:
1233 fetch = self.findincoming(remote, force=force)
1233 fetch = self.findincoming(remote, force=force)
1234 if fetch == [nullid]:
1234 if fetch == [nullid]:
1235 self.ui.status(_("requesting all changes\n"))
1235 self.ui.status(_("requesting all changes\n"))
1236
1236
1237 if not fetch:
1237 if not fetch:
1238 self.ui.status(_("no changes found\n"))
1238 self.ui.status(_("no changes found\n"))
1239 return 0
1239 return 0
1240
1240
1241 if heads is None:
1241 if heads is None:
1242 cg = remote.changegroup(fetch, 'pull')
1242 cg = remote.changegroup(fetch, 'pull')
1243 else:
1243 else:
1244 if 'changegroupsubset' not in remote.capabilities:
1245 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1244 cg = remote.changegroupsubset(fetch, heads, 'pull')
1246 cg = remote.changegroupsubset(fetch, heads, 'pull')
1245 return self.addchangegroup(cg, 'pull', remote.url())
1247 return self.addchangegroup(cg, 'pull', remote.url())
1246 finally:
1248 finally:
1247 if mylock:
1249 if mylock:
1248 lock.release()
1250 lock.release()
1249
1251
1250 def push(self, remote, force=False, revs=None):
1252 def push(self, remote, force=False, revs=None):
1251 # there are two ways to push to remote repo:
1253 # there are two ways to push to remote repo:
1252 #
1254 #
1253 # addchangegroup assumes local user can lock remote
1255 # addchangegroup assumes local user can lock remote
1254 # repo (local filesystem, old ssh servers).
1256 # repo (local filesystem, old ssh servers).
1255 #
1257 #
1256 # unbundle assumes local user cannot lock remote repo (new ssh
1258 # unbundle assumes local user cannot lock remote repo (new ssh
1257 # servers, http servers).
1259 # servers, http servers).
1258
1260
1259 if remote.capable('unbundle'):
1261 if remote.capable('unbundle'):
1260 return self.push_unbundle(remote, force, revs)
1262 return self.push_unbundle(remote, force, revs)
1261 return self.push_addchangegroup(remote, force, revs)
1263 return self.push_addchangegroup(remote, force, revs)
1262
1264
1263 def prepush(self, remote, force, revs):
1265 def prepush(self, remote, force, revs):
1264 base = {}
1266 base = {}
1265 remote_heads = remote.heads()
1267 remote_heads = remote.heads()
1266 inc = self.findincoming(remote, base, remote_heads, force=force)
1268 inc = self.findincoming(remote, base, remote_heads, force=force)
1267 if not force and inc:
1269 if not force and inc:
1268 self.ui.warn(_("abort: unsynced remote changes!\n"))
1270 self.ui.warn(_("abort: unsynced remote changes!\n"))
1269 self.ui.status(_("(did you forget to sync?"
1271 self.ui.status(_("(did you forget to sync?"
1270 " use push -f to force)\n"))
1272 " use push -f to force)\n"))
1271 return None, 1
1273 return None, 1
1272
1274
1273 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1275 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1274 if revs is not None:
1276 if revs is not None:
1275 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1277 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1276 else:
1278 else:
1277 bases, heads = update, self.changelog.heads()
1279 bases, heads = update, self.changelog.heads()
1278
1280
1279 if not bases:
1281 if not bases:
1280 self.ui.status(_("no changes found\n"))
1282 self.ui.status(_("no changes found\n"))
1281 return None, 1
1283 return None, 1
1282 elif not force:
1284 elif not force:
1283 # FIXME we don't properly detect creation of new heads
1285 # FIXME we don't properly detect creation of new heads
1284 # in the push -r case, assume the user knows what he's doing
1286 # in the push -r case, assume the user knows what he's doing
1285 if not revs and len(remote_heads) < len(heads) \
1287 if not revs and len(remote_heads) < len(heads) \
1286 and remote_heads != [nullid]:
1288 and remote_heads != [nullid]:
1287 self.ui.warn(_("abort: push creates new remote branches!\n"))
1289 self.ui.warn(_("abort: push creates new remote branches!\n"))
1288 self.ui.status(_("(did you forget to merge?"
1290 self.ui.status(_("(did you forget to merge?"
1289 " use push -f to force)\n"))
1291 " use push -f to force)\n"))
1290 return None, 1
1292 return None, 1
1291
1293
1292 if revs is None:
1294 if revs is None:
1293 cg = self.changegroup(update, 'push')
1295 cg = self.changegroup(update, 'push')
1294 else:
1296 else:
1295 cg = self.changegroupsubset(update, revs, 'push')
1297 cg = self.changegroupsubset(update, revs, 'push')
1296 return cg, remote_heads
1298 return cg, remote_heads
1297
1299
1298 def push_addchangegroup(self, remote, force, revs):
1300 def push_addchangegroup(self, remote, force, revs):
1299 lock = remote.lock()
1301 lock = remote.lock()
1300
1302
1301 ret = self.prepush(remote, force, revs)
1303 ret = self.prepush(remote, force, revs)
1302 if ret[0] is not None:
1304 if ret[0] is not None:
1303 cg, remote_heads = ret
1305 cg, remote_heads = ret
1304 return remote.addchangegroup(cg, 'push', self.url())
1306 return remote.addchangegroup(cg, 'push', self.url())
1305 return ret[1]
1307 return ret[1]
1306
1308
1307 def push_unbundle(self, remote, force, revs):
1309 def push_unbundle(self, remote, force, revs):
1308 # local repo finds heads on server, finds out what revs it
1310 # local repo finds heads on server, finds out what revs it
1309 # must push. once revs transferred, if server finds it has
1311 # must push. once revs transferred, if server finds it has
1310 # different heads (someone else won commit/push race), server
1312 # different heads (someone else won commit/push race), server
1311 # aborts.
1313 # aborts.
1312
1314
1313 ret = self.prepush(remote, force, revs)
1315 ret = self.prepush(remote, force, revs)
1314 if ret[0] is not None:
1316 if ret[0] is not None:
1315 cg, remote_heads = ret
1317 cg, remote_heads = ret
1316 if force: remote_heads = ['force']
1318 if force: remote_heads = ['force']
1317 return remote.unbundle(cg, remote_heads, 'push')
1319 return remote.unbundle(cg, remote_heads, 'push')
1318 return ret[1]
1320 return ret[1]
1319
1321
1320 def changegroupsubset(self, bases, heads, source):
1322 def changegroupsubset(self, bases, heads, source):
1321 """This function generates a changegroup consisting of all the nodes
1323 """This function generates a changegroup consisting of all the nodes
1322 that are descendents of any of the bases, and ancestors of any of
1324 that are descendents of any of the bases, and ancestors of any of
1323 the heads.
1325 the heads.
1324
1326
1325 It is fairly complex as determining which filenodes and which
1327 It is fairly complex as determining which filenodes and which
1326 manifest nodes need to be included for the changeset to be complete
1328 manifest nodes need to be included for the changeset to be complete
1327 is non-trivial.
1329 is non-trivial.
1328
1330
1329 Another wrinkle is doing the reverse, figuring out which changeset in
1331 Another wrinkle is doing the reverse, figuring out which changeset in
1330 the changegroup a particular filenode or manifestnode belongs to."""
1332 the changegroup a particular filenode or manifestnode belongs to."""
1331
1333
1332 self.hook('preoutgoing', throw=True, source=source)
1334 self.hook('preoutgoing', throw=True, source=source)
1333
1335
1334 # Set up some initial variables
1336 # Set up some initial variables
1335 # Make it easy to refer to self.changelog
1337 # Make it easy to refer to self.changelog
1336 cl = self.changelog
1338 cl = self.changelog
1337 # msng is short for missing - compute the list of changesets in this
1339 # msng is short for missing - compute the list of changesets in this
1338 # changegroup.
1340 # changegroup.
1339 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1341 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1340 # Some bases may turn out to be superfluous, and some heads may be
1342 # Some bases may turn out to be superfluous, and some heads may be
1341 # too. nodesbetween will return the minimal set of bases and heads
1343 # too. nodesbetween will return the minimal set of bases and heads
1342 # necessary to re-create the changegroup.
1344 # necessary to re-create the changegroup.
1343
1345
1344 # Known heads are the list of heads that it is assumed the recipient
1346 # Known heads are the list of heads that it is assumed the recipient
1345 # of this changegroup will know about.
1347 # of this changegroup will know about.
1346 knownheads = {}
1348 knownheads = {}
1347 # We assume that all parents of bases are known heads.
1349 # We assume that all parents of bases are known heads.
1348 for n in bases:
1350 for n in bases:
1349 for p in cl.parents(n):
1351 for p in cl.parents(n):
1350 if p != nullid:
1352 if p != nullid:
1351 knownheads[p] = 1
1353 knownheads[p] = 1
1352 knownheads = knownheads.keys()
1354 knownheads = knownheads.keys()
1353 if knownheads:
1355 if knownheads:
1354 # Now that we know what heads are known, we can compute which
1356 # Now that we know what heads are known, we can compute which
1355 # changesets are known. The recipient must know about all
1357 # changesets are known. The recipient must know about all
1356 # changesets required to reach the known heads from the null
1358 # changesets required to reach the known heads from the null
1357 # changeset.
1359 # changeset.
1358 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1360 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1359 junk = None
1361 junk = None
1360 # Transform the list into an ersatz set.
1362 # Transform the list into an ersatz set.
1361 has_cl_set = dict.fromkeys(has_cl_set)
1363 has_cl_set = dict.fromkeys(has_cl_set)
1362 else:
1364 else:
1363 # If there were no known heads, the recipient cannot be assumed to
1365 # If there were no known heads, the recipient cannot be assumed to
1364 # know about any changesets.
1366 # know about any changesets.
1365 has_cl_set = {}
1367 has_cl_set = {}
1366
1368
1367 # Make it easy to refer to self.manifest
1369 # Make it easy to refer to self.manifest
1368 mnfst = self.manifest
1370 mnfst = self.manifest
1369 # We don't know which manifests are missing yet
1371 # We don't know which manifests are missing yet
1370 msng_mnfst_set = {}
1372 msng_mnfst_set = {}
1371 # Nor do we know which filenodes are missing.
1373 # Nor do we know which filenodes are missing.
1372 msng_filenode_set = {}
1374 msng_filenode_set = {}
1373
1375
1374 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1376 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1375 junk = None
1377 junk = None
1376
1378
1377 # A changeset always belongs to itself, so the changenode lookup
1379 # A changeset always belongs to itself, so the changenode lookup
1378 # function for a changenode is identity.
1380 # function for a changenode is identity.
1379 def identity(x):
1381 def identity(x):
1380 return x
1382 return x
1381
1383
1382 # A function generating function. Sets up an environment for the
1384 # A function generating function. Sets up an environment for the
1383 # inner function.
1385 # inner function.
1384 def cmp_by_rev_func(revlog):
1386 def cmp_by_rev_func(revlog):
1385 # Compare two nodes by their revision number in the environment's
1387 # Compare two nodes by their revision number in the environment's
1386 # revision history. Since the revision number both represents the
1388 # revision history. Since the revision number both represents the
1387 # most efficient order to read the nodes in, and represents a
1389 # most efficient order to read the nodes in, and represents a
1388 # topological sorting of the nodes, this function is often useful.
1390 # topological sorting of the nodes, this function is often useful.
1389 def cmp_by_rev(a, b):
1391 def cmp_by_rev(a, b):
1390 return cmp(revlog.rev(a), revlog.rev(b))
1392 return cmp(revlog.rev(a), revlog.rev(b))
1391 return cmp_by_rev
1393 return cmp_by_rev
1392
1394
1393 # If we determine that a particular file or manifest node must be a
1395 # If we determine that a particular file or manifest node must be a
1394 # node that the recipient of the changegroup will already have, we can
1396 # node that the recipient of the changegroup will already have, we can
1395 # also assume the recipient will have all the parents. This function
1397 # also assume the recipient will have all the parents. This function
1396 # prunes them from the set of missing nodes.
1398 # prunes them from the set of missing nodes.
1397 def prune_parents(revlog, hasset, msngset):
1399 def prune_parents(revlog, hasset, msngset):
1398 haslst = hasset.keys()
1400 haslst = hasset.keys()
1399 haslst.sort(cmp_by_rev_func(revlog))
1401 haslst.sort(cmp_by_rev_func(revlog))
1400 for node in haslst:
1402 for node in haslst:
1401 parentlst = [p for p in revlog.parents(node) if p != nullid]
1403 parentlst = [p for p in revlog.parents(node) if p != nullid]
1402 while parentlst:
1404 while parentlst:
1403 n = parentlst.pop()
1405 n = parentlst.pop()
1404 if n not in hasset:
1406 if n not in hasset:
1405 hasset[n] = 1
1407 hasset[n] = 1
1406 p = [p for p in revlog.parents(n) if p != nullid]
1408 p = [p for p in revlog.parents(n) if p != nullid]
1407 parentlst.extend(p)
1409 parentlst.extend(p)
1408 for n in hasset:
1410 for n in hasset:
1409 msngset.pop(n, None)
1411 msngset.pop(n, None)
1410
1412
1411 # This is a function generating function used to set up an environment
1413 # This is a function generating function used to set up an environment
1412 # for the inner function to execute in.
1414 # for the inner function to execute in.
1413 def manifest_and_file_collector(changedfileset):
1415 def manifest_and_file_collector(changedfileset):
1414 # This is an information gathering function that gathers
1416 # This is an information gathering function that gathers
1415 # information from each changeset node that goes out as part of
1417 # information from each changeset node that goes out as part of
1416 # the changegroup. The information gathered is a list of which
1418 # the changegroup. The information gathered is a list of which
1417 # manifest nodes are potentially required (the recipient may
1419 # manifest nodes are potentially required (the recipient may
1418 # already have them) and total list of all files which were
1420 # already have them) and total list of all files which were
1419 # changed in any changeset in the changegroup.
1421 # changed in any changeset in the changegroup.
1420 #
1422 #
1421 # We also remember the first changenode we saw any manifest
1423 # We also remember the first changenode we saw any manifest
1422 # referenced by so we can later determine which changenode 'owns'
1424 # referenced by so we can later determine which changenode 'owns'
1423 # the manifest.
1425 # the manifest.
1424 def collect_manifests_and_files(clnode):
1426 def collect_manifests_and_files(clnode):
1425 c = cl.read(clnode)
1427 c = cl.read(clnode)
1426 for f in c[3]:
1428 for f in c[3]:
1427 # This is to make sure we only have one instance of each
1429 # This is to make sure we only have one instance of each
1428 # filename string for each filename.
1430 # filename string for each filename.
1429 changedfileset.setdefault(f, f)
1431 changedfileset.setdefault(f, f)
1430 msng_mnfst_set.setdefault(c[0], clnode)
1432 msng_mnfst_set.setdefault(c[0], clnode)
1431 return collect_manifests_and_files
1433 return collect_manifests_and_files
1432
1434
1433 # Figure out which manifest nodes (of the ones we think might be part
1435 # Figure out which manifest nodes (of the ones we think might be part
1434 # of the changegroup) the recipient must know about and remove them
1436 # of the changegroup) the recipient must know about and remove them
1435 # from the changegroup.
1437 # from the changegroup.
1436 def prune_manifests():
1438 def prune_manifests():
1437 has_mnfst_set = {}
1439 has_mnfst_set = {}
1438 for n in msng_mnfst_set:
1440 for n in msng_mnfst_set:
1439 # If a 'missing' manifest thinks it belongs to a changenode
1441 # If a 'missing' manifest thinks it belongs to a changenode
1440 # the recipient is assumed to have, obviously the recipient
1442 # the recipient is assumed to have, obviously the recipient
1441 # must have that manifest.
1443 # must have that manifest.
1442 linknode = cl.node(mnfst.linkrev(n))
1444 linknode = cl.node(mnfst.linkrev(n))
1443 if linknode in has_cl_set:
1445 if linknode in has_cl_set:
1444 has_mnfst_set[n] = 1
1446 has_mnfst_set[n] = 1
1445 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1447 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1446
1448
1447 # Use the information collected in collect_manifests_and_files to say
1449 # Use the information collected in collect_manifests_and_files to say
1448 # which changenode any manifestnode belongs to.
1450 # which changenode any manifestnode belongs to.
1449 def lookup_manifest_link(mnfstnode):
1451 def lookup_manifest_link(mnfstnode):
1450 return msng_mnfst_set[mnfstnode]
1452 return msng_mnfst_set[mnfstnode]
1451
1453
1452 # A function generating function that sets up the initial environment
1454 # A function generating function that sets up the initial environment
1453 # the inner function.
1455 # the inner function.
1454 def filenode_collector(changedfiles):
1456 def filenode_collector(changedfiles):
1455 next_rev = [0]
1457 next_rev = [0]
1456 # This gathers information from each manifestnode included in the
1458 # This gathers information from each manifestnode included in the
1457 # changegroup about which filenodes the manifest node references
1459 # changegroup about which filenodes the manifest node references
1458 # so we can include those in the changegroup too.
1460 # so we can include those in the changegroup too.
1459 #
1461 #
1460 # It also remembers which changenode each filenode belongs to. It
1462 # It also remembers which changenode each filenode belongs to. It
1461 # does this by assuming the a filenode belongs to the changenode
1463 # does this by assuming the a filenode belongs to the changenode
1462 # the first manifest that references it belongs to.
1464 # the first manifest that references it belongs to.
1463 def collect_msng_filenodes(mnfstnode):
1465 def collect_msng_filenodes(mnfstnode):
1464 r = mnfst.rev(mnfstnode)
1466 r = mnfst.rev(mnfstnode)
1465 if r == next_rev[0]:
1467 if r == next_rev[0]:
1466 # If the last rev we looked at was the one just previous,
1468 # If the last rev we looked at was the one just previous,
1467 # we only need to see a diff.
1469 # we only need to see a diff.
1468 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1470 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1469 # For each line in the delta
1471 # For each line in the delta
1470 for dline in delta.splitlines():
1472 for dline in delta.splitlines():
1471 # get the filename and filenode for that line
1473 # get the filename and filenode for that line
1472 f, fnode = dline.split('\0')
1474 f, fnode = dline.split('\0')
1473 fnode = bin(fnode[:40])
1475 fnode = bin(fnode[:40])
1474 f = changedfiles.get(f, None)
1476 f = changedfiles.get(f, None)
1475 # And if the file is in the list of files we care
1477 # And if the file is in the list of files we care
1476 # about.
1478 # about.
1477 if f is not None:
1479 if f is not None:
1478 # Get the changenode this manifest belongs to
1480 # Get the changenode this manifest belongs to
1479 clnode = msng_mnfst_set[mnfstnode]
1481 clnode = msng_mnfst_set[mnfstnode]
1480 # Create the set of filenodes for the file if
1482 # Create the set of filenodes for the file if
1481 # there isn't one already.
1483 # there isn't one already.
1482 ndset = msng_filenode_set.setdefault(f, {})
1484 ndset = msng_filenode_set.setdefault(f, {})
1483 # And set the filenode's changelog node to the
1485 # And set the filenode's changelog node to the
1484 # manifest's if it hasn't been set already.
1486 # manifest's if it hasn't been set already.
1485 ndset.setdefault(fnode, clnode)
1487 ndset.setdefault(fnode, clnode)
1486 else:
1488 else:
1487 # Otherwise we need a full manifest.
1489 # Otherwise we need a full manifest.
1488 m = mnfst.read(mnfstnode)
1490 m = mnfst.read(mnfstnode)
1489 # For every file in we care about.
1491 # For every file in we care about.
1490 for f in changedfiles:
1492 for f in changedfiles:
1491 fnode = m.get(f, None)
1493 fnode = m.get(f, None)
1492 # If it's in the manifest
1494 # If it's in the manifest
1493 if fnode is not None:
1495 if fnode is not None:
1494 # See comments above.
1496 # See comments above.
1495 clnode = msng_mnfst_set[mnfstnode]
1497 clnode = msng_mnfst_set[mnfstnode]
1496 ndset = msng_filenode_set.setdefault(f, {})
1498 ndset = msng_filenode_set.setdefault(f, {})
1497 ndset.setdefault(fnode, clnode)
1499 ndset.setdefault(fnode, clnode)
1498 # Remember the revision we hope to see next.
1500 # Remember the revision we hope to see next.
1499 next_rev[0] = r + 1
1501 next_rev[0] = r + 1
1500 return collect_msng_filenodes
1502 return collect_msng_filenodes
1501
1503
1502 # We have a list of filenodes we think we need for a file, lets remove
1504 # We have a list of filenodes we think we need for a file, lets remove
1503 # all those we now the recipient must have.
1505 # all those we now the recipient must have.
1504 def prune_filenodes(f, filerevlog):
1506 def prune_filenodes(f, filerevlog):
1505 msngset = msng_filenode_set[f]
1507 msngset = msng_filenode_set[f]
1506 hasset = {}
1508 hasset = {}
1507 # If a 'missing' filenode thinks it belongs to a changenode we
1509 # If a 'missing' filenode thinks it belongs to a changenode we
1508 # assume the recipient must have, then the recipient must have
1510 # assume the recipient must have, then the recipient must have
1509 # that filenode.
1511 # that filenode.
1510 for n in msngset:
1512 for n in msngset:
1511 clnode = cl.node(filerevlog.linkrev(n))
1513 clnode = cl.node(filerevlog.linkrev(n))
1512 if clnode in has_cl_set:
1514 if clnode in has_cl_set:
1513 hasset[n] = 1
1515 hasset[n] = 1
1514 prune_parents(filerevlog, hasset, msngset)
1516 prune_parents(filerevlog, hasset, msngset)
1515
1517
1516 # A function generator function that sets up the a context for the
1518 # A function generator function that sets up the a context for the
1517 # inner function.
1519 # inner function.
1518 def lookup_filenode_link_func(fname):
1520 def lookup_filenode_link_func(fname):
1519 msngset = msng_filenode_set[fname]
1521 msngset = msng_filenode_set[fname]
1520 # Lookup the changenode the filenode belongs to.
1522 # Lookup the changenode the filenode belongs to.
1521 def lookup_filenode_link(fnode):
1523 def lookup_filenode_link(fnode):
1522 return msngset[fnode]
1524 return msngset[fnode]
1523 return lookup_filenode_link
1525 return lookup_filenode_link
1524
1526
1525 # Now that we have all theses utility functions to help out and
1527 # Now that we have all theses utility functions to help out and
1526 # logically divide up the task, generate the group.
1528 # logically divide up the task, generate the group.
1527 def gengroup():
1529 def gengroup():
1528 # The set of changed files starts empty.
1530 # The set of changed files starts empty.
1529 changedfiles = {}
1531 changedfiles = {}
1530 # Create a changenode group generator that will call our functions
1532 # Create a changenode group generator that will call our functions
1531 # back to lookup the owning changenode and collect information.
1533 # back to lookup the owning changenode and collect information.
1532 group = cl.group(msng_cl_lst, identity,
1534 group = cl.group(msng_cl_lst, identity,
1533 manifest_and_file_collector(changedfiles))
1535 manifest_and_file_collector(changedfiles))
1534 for chnk in group:
1536 for chnk in group:
1535 yield chnk
1537 yield chnk
1536
1538
1537 # The list of manifests has been collected by the generator
1539 # The list of manifests has been collected by the generator
1538 # calling our functions back.
1540 # calling our functions back.
1539 prune_manifests()
1541 prune_manifests()
1540 msng_mnfst_lst = msng_mnfst_set.keys()
1542 msng_mnfst_lst = msng_mnfst_set.keys()
1541 # Sort the manifestnodes by revision number.
1543 # Sort the manifestnodes by revision number.
1542 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1544 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1543 # Create a generator for the manifestnodes that calls our lookup
1545 # Create a generator for the manifestnodes that calls our lookup
1544 # and data collection functions back.
1546 # and data collection functions back.
1545 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1547 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1546 filenode_collector(changedfiles))
1548 filenode_collector(changedfiles))
1547 for chnk in group:
1549 for chnk in group:
1548 yield chnk
1550 yield chnk
1549
1551
1550 # These are no longer needed, dereference and toss the memory for
1552 # These are no longer needed, dereference and toss the memory for
1551 # them.
1553 # them.
1552 msng_mnfst_lst = None
1554 msng_mnfst_lst = None
1553 msng_mnfst_set.clear()
1555 msng_mnfst_set.clear()
1554
1556
1555 changedfiles = changedfiles.keys()
1557 changedfiles = changedfiles.keys()
1556 changedfiles.sort()
1558 changedfiles.sort()
1557 # Go through all our files in order sorted by name.
1559 # Go through all our files in order sorted by name.
1558 for fname in changedfiles:
1560 for fname in changedfiles:
1559 filerevlog = self.file(fname)
1561 filerevlog = self.file(fname)
1560 # Toss out the filenodes that the recipient isn't really
1562 # Toss out the filenodes that the recipient isn't really
1561 # missing.
1563 # missing.
1562 if msng_filenode_set.has_key(fname):
1564 if msng_filenode_set.has_key(fname):
1563 prune_filenodes(fname, filerevlog)
1565 prune_filenodes(fname, filerevlog)
1564 msng_filenode_lst = msng_filenode_set[fname].keys()
1566 msng_filenode_lst = msng_filenode_set[fname].keys()
1565 else:
1567 else:
1566 msng_filenode_lst = []
1568 msng_filenode_lst = []
1567 # If any filenodes are left, generate the group for them,
1569 # If any filenodes are left, generate the group for them,
1568 # otherwise don't bother.
1570 # otherwise don't bother.
1569 if len(msng_filenode_lst) > 0:
1571 if len(msng_filenode_lst) > 0:
1570 yield changegroup.genchunk(fname)
1572 yield changegroup.genchunk(fname)
1571 # Sort the filenodes by their revision #
1573 # Sort the filenodes by their revision #
1572 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1574 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1573 # Create a group generator and only pass in a changenode
1575 # Create a group generator and only pass in a changenode
1574 # lookup function as we need to collect no information
1576 # lookup function as we need to collect no information
1575 # from filenodes.
1577 # from filenodes.
1576 group = filerevlog.group(msng_filenode_lst,
1578 group = filerevlog.group(msng_filenode_lst,
1577 lookup_filenode_link_func(fname))
1579 lookup_filenode_link_func(fname))
1578 for chnk in group:
1580 for chnk in group:
1579 yield chnk
1581 yield chnk
1580 if msng_filenode_set.has_key(fname):
1582 if msng_filenode_set.has_key(fname):
1581 # Don't need this anymore, toss it to free memory.
1583 # Don't need this anymore, toss it to free memory.
1582 del msng_filenode_set[fname]
1584 del msng_filenode_set[fname]
1583 # Signal that no more groups are left.
1585 # Signal that no more groups are left.
1584 yield changegroup.closechunk()
1586 yield changegroup.closechunk()
1585
1587
1586 if msng_cl_lst:
1588 if msng_cl_lst:
1587 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1589 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1588
1590
1589 return util.chunkbuffer(gengroup())
1591 return util.chunkbuffer(gengroup())
1590
1592
1591 def changegroup(self, basenodes, source):
1593 def changegroup(self, basenodes, source):
1592 """Generate a changegroup of all nodes that we have that a recipient
1594 """Generate a changegroup of all nodes that we have that a recipient
1593 doesn't.
1595 doesn't.
1594
1596
1595 This is much easier than the previous function as we can assume that
1597 This is much easier than the previous function as we can assume that
1596 the recipient has any changenode we aren't sending them."""
1598 the recipient has any changenode we aren't sending them."""
1597
1599
1598 self.hook('preoutgoing', throw=True, source=source)
1600 self.hook('preoutgoing', throw=True, source=source)
1599
1601
1600 cl = self.changelog
1602 cl = self.changelog
1601 nodes = cl.nodesbetween(basenodes, None)[0]
1603 nodes = cl.nodesbetween(basenodes, None)[0]
1602 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1604 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1603
1605
1604 def identity(x):
1606 def identity(x):
1605 return x
1607 return x
1606
1608
1607 def gennodelst(revlog):
1609 def gennodelst(revlog):
1608 for r in xrange(0, revlog.count()):
1610 for r in xrange(0, revlog.count()):
1609 n = revlog.node(r)
1611 n = revlog.node(r)
1610 if revlog.linkrev(n) in revset:
1612 if revlog.linkrev(n) in revset:
1611 yield n
1613 yield n
1612
1614
1613 def changed_file_collector(changedfileset):
1615 def changed_file_collector(changedfileset):
1614 def collect_changed_files(clnode):
1616 def collect_changed_files(clnode):
1615 c = cl.read(clnode)
1617 c = cl.read(clnode)
1616 for fname in c[3]:
1618 for fname in c[3]:
1617 changedfileset[fname] = 1
1619 changedfileset[fname] = 1
1618 return collect_changed_files
1620 return collect_changed_files
1619
1621
1620 def lookuprevlink_func(revlog):
1622 def lookuprevlink_func(revlog):
1621 def lookuprevlink(n):
1623 def lookuprevlink(n):
1622 return cl.node(revlog.linkrev(n))
1624 return cl.node(revlog.linkrev(n))
1623 return lookuprevlink
1625 return lookuprevlink
1624
1626
1625 def gengroup():
1627 def gengroup():
1626 # construct a list of all changed files
1628 # construct a list of all changed files
1627 changedfiles = {}
1629 changedfiles = {}
1628
1630
1629 for chnk in cl.group(nodes, identity,
1631 for chnk in cl.group(nodes, identity,
1630 changed_file_collector(changedfiles)):
1632 changed_file_collector(changedfiles)):
1631 yield chnk
1633 yield chnk
1632 changedfiles = changedfiles.keys()
1634 changedfiles = changedfiles.keys()
1633 changedfiles.sort()
1635 changedfiles.sort()
1634
1636
1635 mnfst = self.manifest
1637 mnfst = self.manifest
1636 nodeiter = gennodelst(mnfst)
1638 nodeiter = gennodelst(mnfst)
1637 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1639 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1638 yield chnk
1640 yield chnk
1639
1641
1640 for fname in changedfiles:
1642 for fname in changedfiles:
1641 filerevlog = self.file(fname)
1643 filerevlog = self.file(fname)
1642 nodeiter = gennodelst(filerevlog)
1644 nodeiter = gennodelst(filerevlog)
1643 nodeiter = list(nodeiter)
1645 nodeiter = list(nodeiter)
1644 if nodeiter:
1646 if nodeiter:
1645 yield changegroup.genchunk(fname)
1647 yield changegroup.genchunk(fname)
1646 lookup = lookuprevlink_func(filerevlog)
1648 lookup = lookuprevlink_func(filerevlog)
1647 for chnk in filerevlog.group(nodeiter, lookup):
1649 for chnk in filerevlog.group(nodeiter, lookup):
1648 yield chnk
1650 yield chnk
1649
1651
1650 yield changegroup.closechunk()
1652 yield changegroup.closechunk()
1651
1653
1652 if nodes:
1654 if nodes:
1653 self.hook('outgoing', node=hex(nodes[0]), source=source)
1655 self.hook('outgoing', node=hex(nodes[0]), source=source)
1654
1656
1655 return util.chunkbuffer(gengroup())
1657 return util.chunkbuffer(gengroup())
1656
1658
1657 def addchangegroup(self, source, srctype, url):
1659 def addchangegroup(self, source, srctype, url):
1658 """add changegroup to repo.
1660 """add changegroup to repo.
1659 returns number of heads modified or added + 1."""
1661 returns number of heads modified or added + 1."""
1660
1662
1661 def csmap(x):
1663 def csmap(x):
1662 self.ui.debug(_("add changeset %s\n") % short(x))
1664 self.ui.debug(_("add changeset %s\n") % short(x))
1663 return cl.count()
1665 return cl.count()
1664
1666
1665 def revmap(x):
1667 def revmap(x):
1666 return cl.rev(x)
1668 return cl.rev(x)
1667
1669
1668 if not source:
1670 if not source:
1669 return 0
1671 return 0
1670
1672
1671 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1673 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1672
1674
1673 changesets = files = revisions = 0
1675 changesets = files = revisions = 0
1674
1676
1675 tr = self.transaction()
1677 tr = self.transaction()
1676
1678
1677 # write changelog data to temp files so concurrent readers will not see
1679 # write changelog data to temp files so concurrent readers will not see
1678 # inconsistent view
1680 # inconsistent view
1679 cl = None
1681 cl = None
1680 try:
1682 try:
1681 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1683 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1682
1684
1683 oldheads = len(cl.heads())
1685 oldheads = len(cl.heads())
1684
1686
1685 # pull off the changeset group
1687 # pull off the changeset group
1686 self.ui.status(_("adding changesets\n"))
1688 self.ui.status(_("adding changesets\n"))
1687 cor = cl.count() - 1
1689 cor = cl.count() - 1
1688 chunkiter = changegroup.chunkiter(source)
1690 chunkiter = changegroup.chunkiter(source)
1689 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1691 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1690 raise util.Abort(_("received changelog group is empty"))
1692 raise util.Abort(_("received changelog group is empty"))
1691 cnr = cl.count() - 1
1693 cnr = cl.count() - 1
1692 changesets = cnr - cor
1694 changesets = cnr - cor
1693
1695
1694 # pull off the manifest group
1696 # pull off the manifest group
1695 self.ui.status(_("adding manifests\n"))
1697 self.ui.status(_("adding manifests\n"))
1696 chunkiter = changegroup.chunkiter(source)
1698 chunkiter = changegroup.chunkiter(source)
1697 # no need to check for empty manifest group here:
1699 # no need to check for empty manifest group here:
1698 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1700 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1699 # no new manifest will be created and the manifest group will
1701 # no new manifest will be created and the manifest group will
1700 # be empty during the pull
1702 # be empty during the pull
1701 self.manifest.addgroup(chunkiter, revmap, tr)
1703 self.manifest.addgroup(chunkiter, revmap, tr)
1702
1704
1703 # process the files
1705 # process the files
1704 self.ui.status(_("adding file changes\n"))
1706 self.ui.status(_("adding file changes\n"))
1705 while 1:
1707 while 1:
1706 f = changegroup.getchunk(source)
1708 f = changegroup.getchunk(source)
1707 if not f:
1709 if not f:
1708 break
1710 break
1709 self.ui.debug(_("adding %s revisions\n") % f)
1711 self.ui.debug(_("adding %s revisions\n") % f)
1710 fl = self.file(f)
1712 fl = self.file(f)
1711 o = fl.count()
1713 o = fl.count()
1712 chunkiter = changegroup.chunkiter(source)
1714 chunkiter = changegroup.chunkiter(source)
1713 if fl.addgroup(chunkiter, revmap, tr) is None:
1715 if fl.addgroup(chunkiter, revmap, tr) is None:
1714 raise util.Abort(_("received file revlog group is empty"))
1716 raise util.Abort(_("received file revlog group is empty"))
1715 revisions += fl.count() - o
1717 revisions += fl.count() - o
1716 files += 1
1718 files += 1
1717
1719
1718 cl.writedata()
1720 cl.writedata()
1719 finally:
1721 finally:
1720 if cl:
1722 if cl:
1721 cl.cleanup()
1723 cl.cleanup()
1722
1724
1723 # make changelog see real files again
1725 # make changelog see real files again
1724 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1726 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1725 self.changelog.checkinlinesize(tr)
1727 self.changelog.checkinlinesize(tr)
1726
1728
1727 newheads = len(self.changelog.heads())
1729 newheads = len(self.changelog.heads())
1728 heads = ""
1730 heads = ""
1729 if oldheads and newheads != oldheads:
1731 if oldheads and newheads != oldheads:
1730 heads = _(" (%+d heads)") % (newheads - oldheads)
1732 heads = _(" (%+d heads)") % (newheads - oldheads)
1731
1733
1732 self.ui.status(_("added %d changesets"
1734 self.ui.status(_("added %d changesets"
1733 " with %d changes to %d files%s\n")
1735 " with %d changes to %d files%s\n")
1734 % (changesets, revisions, files, heads))
1736 % (changesets, revisions, files, heads))
1735
1737
1736 if changesets > 0:
1738 if changesets > 0:
1737 self.hook('pretxnchangegroup', throw=True,
1739 self.hook('pretxnchangegroup', throw=True,
1738 node=hex(self.changelog.node(cor+1)), source=srctype,
1740 node=hex(self.changelog.node(cor+1)), source=srctype,
1739 url=url)
1741 url=url)
1740
1742
1741 tr.close()
1743 tr.close()
1742
1744
1743 if changesets > 0:
1745 if changesets > 0:
1744 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1746 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1745 source=srctype, url=url)
1747 source=srctype, url=url)
1746
1748
1747 for i in range(cor + 1, cnr + 1):
1749 for i in range(cor + 1, cnr + 1):
1748 self.hook("incoming", node=hex(self.changelog.node(i)),
1750 self.hook("incoming", node=hex(self.changelog.node(i)),
1749 source=srctype, url=url)
1751 source=srctype, url=url)
1750
1752
1751 return newheads - oldheads + 1
1753 return newheads - oldheads + 1
1752
1754
1753
1755
1754 def stream_in(self, remote):
1756 def stream_in(self, remote):
1755 fp = remote.stream_out()
1757 fp = remote.stream_out()
1756 resp = int(fp.readline())
1758 resp = int(fp.readline())
1757 if resp != 0:
1759 if resp != 0:
1758 raise util.Abort(_('operation forbidden by server'))
1760 raise util.Abort(_('operation forbidden by server'))
1759 self.ui.status(_('streaming all changes\n'))
1761 self.ui.status(_('streaming all changes\n'))
1760 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1762 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1761 self.ui.status(_('%d files to transfer, %s of data\n') %
1763 self.ui.status(_('%d files to transfer, %s of data\n') %
1762 (total_files, util.bytecount(total_bytes)))
1764 (total_files, util.bytecount(total_bytes)))
1763 start = time.time()
1765 start = time.time()
1764 for i in xrange(total_files):
1766 for i in xrange(total_files):
1765 name, size = fp.readline().split('\0', 1)
1767 name, size = fp.readline().split('\0', 1)
1766 size = int(size)
1768 size = int(size)
1767 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1769 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1768 ofp = self.opener(name, 'w')
1770 ofp = self.opener(name, 'w')
1769 for chunk in util.filechunkiter(fp, limit=size):
1771 for chunk in util.filechunkiter(fp, limit=size):
1770 ofp.write(chunk)
1772 ofp.write(chunk)
1771 ofp.close()
1773 ofp.close()
1772 elapsed = time.time() - start
1774 elapsed = time.time() - start
1773 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1775 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1774 (util.bytecount(total_bytes), elapsed,
1776 (util.bytecount(total_bytes), elapsed,
1775 util.bytecount(total_bytes / elapsed)))
1777 util.bytecount(total_bytes / elapsed)))
1776 self.reload()
1778 self.reload()
1777 return len(self.heads()) + 1
1779 return len(self.heads()) + 1
1778
1780
1779 def clone(self, remote, heads=[], stream=False):
1781 def clone(self, remote, heads=[], stream=False):
1780 '''clone remote repository.
1782 '''clone remote repository.
1781
1783
1782 keyword arguments:
1784 keyword arguments:
1783 heads: list of revs to clone (forces use of pull)
1785 heads: list of revs to clone (forces use of pull)
1784 stream: use streaming clone if possible'''
1786 stream: use streaming clone if possible'''
1785
1787
1786 # now, all clients that can request uncompressed clones can
1788 # now, all clients that can request uncompressed clones can
1787 # read repo formats supported by all servers that can serve
1789 # read repo formats supported by all servers that can serve
1788 # them.
1790 # them.
1789
1791
1790 # if revlog format changes, client will have to check version
1792 # if revlog format changes, client will have to check version
1791 # and format flags on "stream" capability, and use
1793 # and format flags on "stream" capability, and use
1792 # uncompressed only if compatible.
1794 # uncompressed only if compatible.
1793
1795
1794 if stream and not heads and remote.capable('stream'):
1796 if stream and not heads and remote.capable('stream'):
1795 return self.stream_in(remote)
1797 return self.stream_in(remote)
1796 return self.pull(remote, heads)
1798 return self.pull(remote, heads)
1797
1799
1798 # used to avoid circular references so destructors work
1800 # used to avoid circular references so destructors work
1799 def aftertrans(base):
1801 def aftertrans(base):
1800 p = base
1802 p = base
1801 def a():
1803 def a():
1802 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1804 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1803 util.rename(os.path.join(p, "journal.dirstate"),
1805 util.rename(os.path.join(p, "journal.dirstate"),
1804 os.path.join(p, "undo.dirstate"))
1806 os.path.join(p, "undo.dirstate"))
1805 return a
1807 return a
1806
1808
1807 def instance(ui, path, create):
1809 def instance(ui, path, create):
1808 return localrepository(ui, util.drop_scheme('file', path), create)
1810 return localrepository(ui, util.drop_scheme('file', path), create)
1809
1811
1810 def islocal(path):
1812 def islocal(path):
1811 return True
1813 return True
General Comments 0
You need to be logged in to leave comments. Login now