##// END OF EJS Templates
New UnexpectedOutput exception to catch server errors in localrepo.stream_in...
Thomas Arendsen Hein -
r3564:eda9e7c9 default
parent child Browse files
Show More
@@ -1,3548 +1,3557
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup")
15 demandload(globals(), "archival cStringIO changegroup")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted = repo.status()[:4]
24 modified, added, removed, deleted = repo.status()[:4]
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def relpath(repo, args):
28 def relpath(repo, args):
29 cwd = repo.getcwd()
29 cwd = repo.getcwd()
30 if cwd:
30 if cwd:
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 return args
32 return args
33
33
34 def logmessage(opts):
34 def logmessage(opts):
35 """ get the log message according to -m and -l option """
35 """ get the log message according to -m and -l option """
36 message = opts['message']
36 message = opts['message']
37 logfile = opts['logfile']
37 logfile = opts['logfile']
38
38
39 if message and logfile:
39 if message and logfile:
40 raise util.Abort(_('options --message and --logfile are mutually '
40 raise util.Abort(_('options --message and --logfile are mutually '
41 'exclusive'))
41 'exclusive'))
42 if not message and logfile:
42 if not message and logfile:
43 try:
43 try:
44 if logfile == '-':
44 if logfile == '-':
45 message = sys.stdin.read()
45 message = sys.stdin.read()
46 else:
46 else:
47 message = open(logfile).read()
47 message = open(logfile).read()
48 except IOError, inst:
48 except IOError, inst:
49 raise util.Abort(_("can't read commit message '%s': %s") %
49 raise util.Abort(_("can't read commit message '%s': %s") %
50 (logfile, inst.strerror))
50 (logfile, inst.strerror))
51 return message
51 return message
52
52
53 def walkchangerevs(ui, repo, pats, opts):
53 def walkchangerevs(ui, repo, pats, opts):
54 '''Iterate over files and the revs they changed in.
54 '''Iterate over files and the revs they changed in.
55
55
56 Callers most commonly need to iterate backwards over the history
56 Callers most commonly need to iterate backwards over the history
57 it is interested in. Doing so has awful (quadratic-looking)
57 it is interested in. Doing so has awful (quadratic-looking)
58 performance, so we use iterators in a "windowed" way.
58 performance, so we use iterators in a "windowed" way.
59
59
60 We walk a window of revisions in the desired order. Within the
60 We walk a window of revisions in the desired order. Within the
61 window, we first walk forwards to gather data, then in the desired
61 window, we first walk forwards to gather data, then in the desired
62 order (usually backwards) to display it.
62 order (usually backwards) to display it.
63
63
64 This function returns an (iterator, getchange, matchfn) tuple. The
64 This function returns an (iterator, getchange, matchfn) tuple. The
65 getchange function returns the changelog entry for a numeric
65 getchange function returns the changelog entry for a numeric
66 revision. The iterator yields 3-tuples. They will be of one of
66 revision. The iterator yields 3-tuples. They will be of one of
67 the following forms:
67 the following forms:
68
68
69 "window", incrementing, lastrev: stepping through a window,
69 "window", incrementing, lastrev: stepping through a window,
70 positive if walking forwards through revs, last rev in the
70 positive if walking forwards through revs, last rev in the
71 sequence iterated over - use to reset state for the current window
71 sequence iterated over - use to reset state for the current window
72
72
73 "add", rev, fns: out-of-order traversal of the given file names
73 "add", rev, fns: out-of-order traversal of the given file names
74 fns, which changed during revision rev - use to gather data for
74 fns, which changed during revision rev - use to gather data for
75 possible display
75 possible display
76
76
77 "iter", rev, None: in-order traversal of the revs earlier iterated
77 "iter", rev, None: in-order traversal of the revs earlier iterated
78 over with "add" - use to display data'''
78 over with "add" - use to display data'''
79
79
80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
81 if start < end:
81 if start < end:
82 while start < end:
82 while start < end:
83 yield start, min(windowsize, end-start)
83 yield start, min(windowsize, end-start)
84 start += windowsize
84 start += windowsize
85 if windowsize < sizelimit:
85 if windowsize < sizelimit:
86 windowsize *= 2
86 windowsize *= 2
87 else:
87 else:
88 while start > end:
88 while start > end:
89 yield start, min(windowsize, start-end-1)
89 yield start, min(windowsize, start-end-1)
90 start -= windowsize
90 start -= windowsize
91 if windowsize < sizelimit:
91 if windowsize < sizelimit:
92 windowsize *= 2
92 windowsize *= 2
93
93
94
94
95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
96 follow = opts.get('follow') or opts.get('follow_first')
96 follow = opts.get('follow') or opts.get('follow_first')
97
97
98 if repo.changelog.count() == 0:
98 if repo.changelog.count() == 0:
99 return [], False, matchfn
99 return [], False, matchfn
100
100
101 if follow:
101 if follow:
102 defrange = '%s:0' % repo.changectx().rev()
102 defrange = '%s:0' % repo.changectx().rev()
103 else:
103 else:
104 defrange = 'tip:0'
104 defrange = 'tip:0'
105 revs = map(int, cmdutil.revrange(ui, repo, opts['rev'] or [defrange]))
105 revs = map(int, cmdutil.revrange(ui, repo, opts['rev'] or [defrange]))
106 wanted = {}
106 wanted = {}
107 slowpath = anypats
107 slowpath = anypats
108 fncache = {}
108 fncache = {}
109
109
110 chcache = {}
110 chcache = {}
111 def getchange(rev):
111 def getchange(rev):
112 ch = chcache.get(rev)
112 ch = chcache.get(rev)
113 if ch is None:
113 if ch is None:
114 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
114 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
115 return ch
115 return ch
116
116
117 if not slowpath and not files:
117 if not slowpath and not files:
118 # No files, no patterns. Display all revs.
118 # No files, no patterns. Display all revs.
119 wanted = dict(zip(revs, revs))
119 wanted = dict(zip(revs, revs))
120 copies = []
120 copies = []
121 if not slowpath:
121 if not slowpath:
122 # Only files, no patterns. Check the history of each file.
122 # Only files, no patterns. Check the history of each file.
123 def filerevgen(filelog, node):
123 def filerevgen(filelog, node):
124 cl_count = repo.changelog.count()
124 cl_count = repo.changelog.count()
125 if node is None:
125 if node is None:
126 last = filelog.count() - 1
126 last = filelog.count() - 1
127 else:
127 else:
128 last = filelog.rev(node)
128 last = filelog.rev(node)
129 for i, window in increasing_windows(last, -1):
129 for i, window in increasing_windows(last, -1):
130 revs = []
130 revs = []
131 for j in xrange(i - window, i + 1):
131 for j in xrange(i - window, i + 1):
132 n = filelog.node(j)
132 n = filelog.node(j)
133 revs.append((filelog.linkrev(n),
133 revs.append((filelog.linkrev(n),
134 follow and filelog.renamed(n)))
134 follow and filelog.renamed(n)))
135 revs.reverse()
135 revs.reverse()
136 for rev in revs:
136 for rev in revs:
137 # only yield rev for which we have the changelog, it can
137 # only yield rev for which we have the changelog, it can
138 # happen while doing "hg log" during a pull or commit
138 # happen while doing "hg log" during a pull or commit
139 if rev[0] < cl_count:
139 if rev[0] < cl_count:
140 yield rev
140 yield rev
141 def iterfiles():
141 def iterfiles():
142 for filename in files:
142 for filename in files:
143 yield filename, None
143 yield filename, None
144 for filename_node in copies:
144 for filename_node in copies:
145 yield filename_node
145 yield filename_node
146 minrev, maxrev = min(revs), max(revs)
146 minrev, maxrev = min(revs), max(revs)
147 for file_, node in iterfiles():
147 for file_, node in iterfiles():
148 filelog = repo.file(file_)
148 filelog = repo.file(file_)
149 # A zero count may be a directory or deleted file, so
149 # A zero count may be a directory or deleted file, so
150 # try to find matching entries on the slow path.
150 # try to find matching entries on the slow path.
151 if filelog.count() == 0:
151 if filelog.count() == 0:
152 slowpath = True
152 slowpath = True
153 break
153 break
154 for rev, copied in filerevgen(filelog, node):
154 for rev, copied in filerevgen(filelog, node):
155 if rev <= maxrev:
155 if rev <= maxrev:
156 if rev < minrev:
156 if rev < minrev:
157 break
157 break
158 fncache.setdefault(rev, [])
158 fncache.setdefault(rev, [])
159 fncache[rev].append(file_)
159 fncache[rev].append(file_)
160 wanted[rev] = 1
160 wanted[rev] = 1
161 if follow and copied:
161 if follow and copied:
162 copies.append(copied)
162 copies.append(copied)
163 if slowpath:
163 if slowpath:
164 if follow:
164 if follow:
165 raise util.Abort(_('can only follow copies/renames for explicit '
165 raise util.Abort(_('can only follow copies/renames for explicit '
166 'file names'))
166 'file names'))
167
167
168 # The slow path checks files modified in every changeset.
168 # The slow path checks files modified in every changeset.
169 def changerevgen():
169 def changerevgen():
170 for i, window in increasing_windows(repo.changelog.count()-1, -1):
170 for i, window in increasing_windows(repo.changelog.count()-1, -1):
171 for j in xrange(i - window, i + 1):
171 for j in xrange(i - window, i + 1):
172 yield j, getchange(j)[3]
172 yield j, getchange(j)[3]
173
173
174 for rev, changefiles in changerevgen():
174 for rev, changefiles in changerevgen():
175 matches = filter(matchfn, changefiles)
175 matches = filter(matchfn, changefiles)
176 if matches:
176 if matches:
177 fncache[rev] = matches
177 fncache[rev] = matches
178 wanted[rev] = 1
178 wanted[rev] = 1
179
179
180 class followfilter:
180 class followfilter:
181 def __init__(self, onlyfirst=False):
181 def __init__(self, onlyfirst=False):
182 self.startrev = -1
182 self.startrev = -1
183 self.roots = []
183 self.roots = []
184 self.onlyfirst = onlyfirst
184 self.onlyfirst = onlyfirst
185
185
186 def match(self, rev):
186 def match(self, rev):
187 def realparents(rev):
187 def realparents(rev):
188 if self.onlyfirst:
188 if self.onlyfirst:
189 return repo.changelog.parentrevs(rev)[0:1]
189 return repo.changelog.parentrevs(rev)[0:1]
190 else:
190 else:
191 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
191 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
192
192
193 if self.startrev == -1:
193 if self.startrev == -1:
194 self.startrev = rev
194 self.startrev = rev
195 return True
195 return True
196
196
197 if rev > self.startrev:
197 if rev > self.startrev:
198 # forward: all descendants
198 # forward: all descendants
199 if not self.roots:
199 if not self.roots:
200 self.roots.append(self.startrev)
200 self.roots.append(self.startrev)
201 for parent in realparents(rev):
201 for parent in realparents(rev):
202 if parent in self.roots:
202 if parent in self.roots:
203 self.roots.append(rev)
203 self.roots.append(rev)
204 return True
204 return True
205 else:
205 else:
206 # backwards: all parents
206 # backwards: all parents
207 if not self.roots:
207 if not self.roots:
208 self.roots.extend(realparents(self.startrev))
208 self.roots.extend(realparents(self.startrev))
209 if rev in self.roots:
209 if rev in self.roots:
210 self.roots.remove(rev)
210 self.roots.remove(rev)
211 self.roots.extend(realparents(rev))
211 self.roots.extend(realparents(rev))
212 return True
212 return True
213
213
214 return False
214 return False
215
215
216 # it might be worthwhile to do this in the iterator if the rev range
216 # it might be worthwhile to do this in the iterator if the rev range
217 # is descending and the prune args are all within that range
217 # is descending and the prune args are all within that range
218 for rev in opts.get('prune', ()):
218 for rev in opts.get('prune', ()):
219 rev = repo.changelog.rev(repo.lookup(rev))
219 rev = repo.changelog.rev(repo.lookup(rev))
220 ff = followfilter()
220 ff = followfilter()
221 stop = min(revs[0], revs[-1])
221 stop = min(revs[0], revs[-1])
222 for x in xrange(rev, stop-1, -1):
222 for x in xrange(rev, stop-1, -1):
223 if ff.match(x) and wanted.has_key(x):
223 if ff.match(x) and wanted.has_key(x):
224 del wanted[x]
224 del wanted[x]
225
225
226 def iterate():
226 def iterate():
227 if follow and not files:
227 if follow and not files:
228 ff = followfilter(onlyfirst=opts.get('follow_first'))
228 ff = followfilter(onlyfirst=opts.get('follow_first'))
229 def want(rev):
229 def want(rev):
230 if ff.match(rev) and rev in wanted:
230 if ff.match(rev) and rev in wanted:
231 return True
231 return True
232 return False
232 return False
233 else:
233 else:
234 def want(rev):
234 def want(rev):
235 return rev in wanted
235 return rev in wanted
236
236
237 for i, window in increasing_windows(0, len(revs)):
237 for i, window in increasing_windows(0, len(revs)):
238 yield 'window', revs[0] < revs[-1], revs[-1]
238 yield 'window', revs[0] < revs[-1], revs[-1]
239 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
239 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
240 srevs = list(nrevs)
240 srevs = list(nrevs)
241 srevs.sort()
241 srevs.sort()
242 for rev in srevs:
242 for rev in srevs:
243 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
243 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
244 yield 'add', rev, fns
244 yield 'add', rev, fns
245 for rev in nrevs:
245 for rev in nrevs:
246 yield 'iter', rev, None
246 yield 'iter', rev, None
247 return iterate(), getchange, matchfn
247 return iterate(), getchange, matchfn
248
248
249 def write_bundle(cg, filename=None, compress=True):
249 def write_bundle(cg, filename=None, compress=True):
250 """Write a bundle file and return its filename.
250 """Write a bundle file and return its filename.
251
251
252 Existing files will not be overwritten.
252 Existing files will not be overwritten.
253 If no filename is specified, a temporary file is created.
253 If no filename is specified, a temporary file is created.
254 bz2 compression can be turned off.
254 bz2 compression can be turned off.
255 The bundle file will be deleted in case of errors.
255 The bundle file will be deleted in case of errors.
256 """
256 """
257 class nocompress(object):
257 class nocompress(object):
258 def compress(self, x):
258 def compress(self, x):
259 return x
259 return x
260 def flush(self):
260 def flush(self):
261 return ""
261 return ""
262
262
263 fh = None
263 fh = None
264 cleanup = None
264 cleanup = None
265 try:
265 try:
266 if filename:
266 if filename:
267 if os.path.exists(filename):
267 if os.path.exists(filename):
268 raise util.Abort(_("file '%s' already exists") % filename)
268 raise util.Abort(_("file '%s' already exists") % filename)
269 fh = open(filename, "wb")
269 fh = open(filename, "wb")
270 else:
270 else:
271 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
271 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
272 fh = os.fdopen(fd, "wb")
272 fh = os.fdopen(fd, "wb")
273 cleanup = filename
273 cleanup = filename
274
274
275 if compress:
275 if compress:
276 fh.write("HG10")
276 fh.write("HG10")
277 z = bz2.BZ2Compressor(9)
277 z = bz2.BZ2Compressor(9)
278 else:
278 else:
279 fh.write("HG10UN")
279 fh.write("HG10UN")
280 z = nocompress()
280 z = nocompress()
281 # parse the changegroup data, otherwise we will block
281 # parse the changegroup data, otherwise we will block
282 # in case of sshrepo because we don't know the end of the stream
282 # in case of sshrepo because we don't know the end of the stream
283
283
284 # an empty chunkiter is the end of the changegroup
284 # an empty chunkiter is the end of the changegroup
285 empty = False
285 empty = False
286 while not empty:
286 while not empty:
287 empty = True
287 empty = True
288 for chunk in changegroup.chunkiter(cg):
288 for chunk in changegroup.chunkiter(cg):
289 empty = False
289 empty = False
290 fh.write(z.compress(changegroup.genchunk(chunk)))
290 fh.write(z.compress(changegroup.genchunk(chunk)))
291 fh.write(z.compress(changegroup.closechunk()))
291 fh.write(z.compress(changegroup.closechunk()))
292 fh.write(z.flush())
292 fh.write(z.flush())
293 cleanup = None
293 cleanup = None
294 return filename
294 return filename
295 finally:
295 finally:
296 if fh is not None:
296 if fh is not None:
297 fh.close()
297 fh.close()
298 if cleanup is not None:
298 if cleanup is not None:
299 os.unlink(cleanup)
299 os.unlink(cleanup)
300
300
301 def trimuser(ui, name, rev, revcache):
301 def trimuser(ui, name, rev, revcache):
302 """trim the name of the user who committed a change"""
302 """trim the name of the user who committed a change"""
303 user = revcache.get(rev)
303 user = revcache.get(rev)
304 if user is None:
304 if user is None:
305 user = revcache[rev] = ui.shortuser(name)
305 user = revcache[rev] = ui.shortuser(name)
306 return user
306 return user
307
307
308 class changeset_printer(object):
308 class changeset_printer(object):
309 '''show changeset information when templating not requested.'''
309 '''show changeset information when templating not requested.'''
310
310
311 def __init__(self, ui, repo):
311 def __init__(self, ui, repo):
312 self.ui = ui
312 self.ui = ui
313 self.repo = repo
313 self.repo = repo
314
314
315 def show(self, rev=0, changenode=None, brinfo=None, copies=None):
315 def show(self, rev=0, changenode=None, brinfo=None, copies=None):
316 '''show a single changeset or file revision'''
316 '''show a single changeset or file revision'''
317 log = self.repo.changelog
317 log = self.repo.changelog
318 if changenode is None:
318 if changenode is None:
319 changenode = log.node(rev)
319 changenode = log.node(rev)
320 elif not rev:
320 elif not rev:
321 rev = log.rev(changenode)
321 rev = log.rev(changenode)
322
322
323 if self.ui.quiet:
323 if self.ui.quiet:
324 self.ui.write("%d:%s\n" % (rev, short(changenode)))
324 self.ui.write("%d:%s\n" % (rev, short(changenode)))
325 return
325 return
326
326
327 changes = log.read(changenode)
327 changes = log.read(changenode)
328 date = util.datestr(changes[2])
328 date = util.datestr(changes[2])
329 extra = changes[5]
329 extra = changes[5]
330 branch = extra.get("branch")
330 branch = extra.get("branch")
331
331
332 hexfunc = self.ui.debugflag and hex or short
332 hexfunc = self.ui.debugflag and hex or short
333
333
334 parents = [(log.rev(p), hexfunc(p)) for p in log.parents(changenode)
334 parents = [(log.rev(p), hexfunc(p)) for p in log.parents(changenode)
335 if self.ui.debugflag or p != nullid]
335 if self.ui.debugflag or p != nullid]
336 if (not self.ui.debugflag and len(parents) == 1 and
336 if (not self.ui.debugflag and len(parents) == 1 and
337 parents[0][0] == rev-1):
337 parents[0][0] == rev-1):
338 parents = []
338 parents = []
339
339
340 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
340 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
341
341
342 if branch:
342 if branch:
343 self.ui.status(_("branch: %s\n") % branch)
343 self.ui.status(_("branch: %s\n") % branch)
344 for tag in self.repo.nodetags(changenode):
344 for tag in self.repo.nodetags(changenode):
345 self.ui.status(_("tag: %s\n") % tag)
345 self.ui.status(_("tag: %s\n") % tag)
346 for parent in parents:
346 for parent in parents:
347 self.ui.write(_("parent: %d:%s\n") % parent)
347 self.ui.write(_("parent: %d:%s\n") % parent)
348
348
349 if brinfo and changenode in brinfo:
349 if brinfo and changenode in brinfo:
350 br = brinfo[changenode]
350 br = brinfo[changenode]
351 self.ui.write(_("branch: %s\n") % " ".join(br))
351 self.ui.write(_("branch: %s\n") % " ".join(br))
352
352
353 self.ui.debug(_("manifest: %d:%s\n") %
353 self.ui.debug(_("manifest: %d:%s\n") %
354 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
354 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
355 self.ui.status(_("user: %s\n") % changes[1])
355 self.ui.status(_("user: %s\n") % changes[1])
356 self.ui.status(_("date: %s\n") % date)
356 self.ui.status(_("date: %s\n") % date)
357
357
358 if self.ui.debugflag:
358 if self.ui.debugflag:
359 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
359 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
360 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
360 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
361 files):
361 files):
362 if value:
362 if value:
363 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
363 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
364 elif changes[3]:
364 elif changes[3]:
365 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
365 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
366 if copies:
366 if copies:
367 copies = ['%s (%s)' % c for c in copies]
367 copies = ['%s (%s)' % c for c in copies]
368 self.ui.note(_("copies: %s\n") % ' '.join(copies))
368 self.ui.note(_("copies: %s\n") % ' '.join(copies))
369
369
370 if extra and self.ui.debugflag:
370 if extra and self.ui.debugflag:
371 extraitems = extra.items()
371 extraitems = extra.items()
372 extraitems.sort()
372 extraitems.sort()
373 for key, value in extraitems:
373 for key, value in extraitems:
374 self.ui.debug(_("extra: %s=%s\n")
374 self.ui.debug(_("extra: %s=%s\n")
375 % (key, value.encode('string_escape')))
375 % (key, value.encode('string_escape')))
376
376
377 description = changes[4].strip()
377 description = changes[4].strip()
378 if description:
378 if description:
379 if self.ui.verbose:
379 if self.ui.verbose:
380 self.ui.status(_("description:\n"))
380 self.ui.status(_("description:\n"))
381 self.ui.status(description)
381 self.ui.status(description)
382 self.ui.status("\n\n")
382 self.ui.status("\n\n")
383 else:
383 else:
384 self.ui.status(_("summary: %s\n") %
384 self.ui.status(_("summary: %s\n") %
385 description.splitlines()[0])
385 description.splitlines()[0])
386 self.ui.status("\n")
386 self.ui.status("\n")
387
387
388 def show_changeset(ui, repo, opts):
388 def show_changeset(ui, repo, opts):
389 """show one changeset using template or regular display.
389 """show one changeset using template or regular display.
390
390
391 Display format will be the first non-empty hit of:
391 Display format will be the first non-empty hit of:
392 1. option 'template'
392 1. option 'template'
393 2. option 'style'
393 2. option 'style'
394 3. [ui] setting 'logtemplate'
394 3. [ui] setting 'logtemplate'
395 4. [ui] setting 'style'
395 4. [ui] setting 'style'
396 If all of these values are either the unset or the empty string,
396 If all of these values are either the unset or the empty string,
397 regular display via changeset_printer() is done.
397 regular display via changeset_printer() is done.
398 """
398 """
399 # options
399 # options
400 tmpl = opts.get('template')
400 tmpl = opts.get('template')
401 mapfile = None
401 mapfile = None
402 if tmpl:
402 if tmpl:
403 tmpl = templater.parsestring(tmpl, quoted=False)
403 tmpl = templater.parsestring(tmpl, quoted=False)
404 else:
404 else:
405 mapfile = opts.get('style')
405 mapfile = opts.get('style')
406 # ui settings
406 # ui settings
407 if not mapfile:
407 if not mapfile:
408 tmpl = ui.config('ui', 'logtemplate')
408 tmpl = ui.config('ui', 'logtemplate')
409 if tmpl:
409 if tmpl:
410 tmpl = templater.parsestring(tmpl)
410 tmpl = templater.parsestring(tmpl)
411 else:
411 else:
412 mapfile = ui.config('ui', 'style')
412 mapfile = ui.config('ui', 'style')
413
413
414 if tmpl or mapfile:
414 if tmpl or mapfile:
415 if mapfile:
415 if mapfile:
416 if not os.path.split(mapfile)[0]:
416 if not os.path.split(mapfile)[0]:
417 mapname = (templater.templatepath('map-cmdline.' + mapfile)
417 mapname = (templater.templatepath('map-cmdline.' + mapfile)
418 or templater.templatepath(mapfile))
418 or templater.templatepath(mapfile))
419 if mapname: mapfile = mapname
419 if mapname: mapfile = mapname
420 try:
420 try:
421 t = templater.changeset_templater(ui, repo, mapfile)
421 t = templater.changeset_templater(ui, repo, mapfile)
422 except SyntaxError, inst:
422 except SyntaxError, inst:
423 raise util.Abort(inst.args[0])
423 raise util.Abort(inst.args[0])
424 if tmpl: t.use_template(tmpl)
424 if tmpl: t.use_template(tmpl)
425 return t
425 return t
426 return changeset_printer(ui, repo)
426 return changeset_printer(ui, repo)
427
427
428 def setremoteconfig(ui, opts):
428 def setremoteconfig(ui, opts):
429 "copy remote options to ui tree"
429 "copy remote options to ui tree"
430 if opts.get('ssh'):
430 if opts.get('ssh'):
431 ui.setconfig("ui", "ssh", opts['ssh'])
431 ui.setconfig("ui", "ssh", opts['ssh'])
432 if opts.get('remotecmd'):
432 if opts.get('remotecmd'):
433 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
433 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
434
434
435 def show_version(ui):
435 def show_version(ui):
436 """output version and copyright information"""
436 """output version and copyright information"""
437 ui.write(_("Mercurial Distributed SCM (version %s)\n")
437 ui.write(_("Mercurial Distributed SCM (version %s)\n")
438 % version.get_version())
438 % version.get_version())
439 ui.status(_(
439 ui.status(_(
440 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
440 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
441 "This is free software; see the source for copying conditions. "
441 "This is free software; see the source for copying conditions. "
442 "There is NO\nwarranty; "
442 "There is NO\nwarranty; "
443 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
443 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
444 ))
444 ))
445
445
446 def help_(ui, name=None, with_version=False):
446 def help_(ui, name=None, with_version=False):
447 """show help for a command, extension, or list of commands
447 """show help for a command, extension, or list of commands
448
448
449 With no arguments, print a list of commands and short help.
449 With no arguments, print a list of commands and short help.
450
450
451 Given a command name, print help for that command.
451 Given a command name, print help for that command.
452
452
453 Given an extension name, print help for that extension, and the
453 Given an extension name, print help for that extension, and the
454 commands it provides."""
454 commands it provides."""
455 option_lists = []
455 option_lists = []
456
456
457 def helpcmd(name):
457 def helpcmd(name):
458 if with_version:
458 if with_version:
459 show_version(ui)
459 show_version(ui)
460 ui.write('\n')
460 ui.write('\n')
461 aliases, i = findcmd(ui, name)
461 aliases, i = findcmd(ui, name)
462 # synopsis
462 # synopsis
463 ui.write("%s\n\n" % i[2])
463 ui.write("%s\n\n" % i[2])
464
464
465 # description
465 # description
466 doc = i[0].__doc__
466 doc = i[0].__doc__
467 if not doc:
467 if not doc:
468 doc = _("(No help text available)")
468 doc = _("(No help text available)")
469 if ui.quiet:
469 if ui.quiet:
470 doc = doc.splitlines(0)[0]
470 doc = doc.splitlines(0)[0]
471 ui.write("%s\n" % doc.rstrip())
471 ui.write("%s\n" % doc.rstrip())
472
472
473 if not ui.quiet:
473 if not ui.quiet:
474 # aliases
474 # aliases
475 if len(aliases) > 1:
475 if len(aliases) > 1:
476 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
476 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
477
477
478 # options
478 # options
479 if i[1]:
479 if i[1]:
480 option_lists.append(("options", i[1]))
480 option_lists.append(("options", i[1]))
481
481
482 def helplist(select=None):
482 def helplist(select=None):
483 h = {}
483 h = {}
484 cmds = {}
484 cmds = {}
485 for c, e in table.items():
485 for c, e in table.items():
486 f = c.split("|", 1)[0]
486 f = c.split("|", 1)[0]
487 if select and not select(f):
487 if select and not select(f):
488 continue
488 continue
489 if name == "shortlist" and not f.startswith("^"):
489 if name == "shortlist" and not f.startswith("^"):
490 continue
490 continue
491 f = f.lstrip("^")
491 f = f.lstrip("^")
492 if not ui.debugflag and f.startswith("debug"):
492 if not ui.debugflag and f.startswith("debug"):
493 continue
493 continue
494 doc = e[0].__doc__
494 doc = e[0].__doc__
495 if not doc:
495 if not doc:
496 doc = _("(No help text available)")
496 doc = _("(No help text available)")
497 h[f] = doc.splitlines(0)[0].rstrip()
497 h[f] = doc.splitlines(0)[0].rstrip()
498 cmds[f] = c.lstrip("^")
498 cmds[f] = c.lstrip("^")
499
499
500 fns = h.keys()
500 fns = h.keys()
501 fns.sort()
501 fns.sort()
502 m = max(map(len, fns))
502 m = max(map(len, fns))
503 for f in fns:
503 for f in fns:
504 if ui.verbose:
504 if ui.verbose:
505 commands = cmds[f].replace("|",", ")
505 commands = cmds[f].replace("|",", ")
506 ui.write(" %s:\n %s\n"%(commands, h[f]))
506 ui.write(" %s:\n %s\n"%(commands, h[f]))
507 else:
507 else:
508 ui.write(' %-*s %s\n' % (m, f, h[f]))
508 ui.write(' %-*s %s\n' % (m, f, h[f]))
509
509
510 def helpext(name):
510 def helpext(name):
511 try:
511 try:
512 mod = findext(name)
512 mod = findext(name)
513 except KeyError:
513 except KeyError:
514 raise UnknownCommand(name)
514 raise UnknownCommand(name)
515
515
516 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
516 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
517 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
517 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
518 for d in doc[1:]:
518 for d in doc[1:]:
519 ui.write(d, '\n')
519 ui.write(d, '\n')
520
520
521 ui.status('\n')
521 ui.status('\n')
522 if ui.verbose:
522 if ui.verbose:
523 ui.status(_('list of commands:\n\n'))
523 ui.status(_('list of commands:\n\n'))
524 else:
524 else:
525 ui.status(_('list of commands (use "hg help -v %s" '
525 ui.status(_('list of commands (use "hg help -v %s" '
526 'to show aliases and global options):\n\n') % name)
526 'to show aliases and global options):\n\n') % name)
527
527
528 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
528 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
529 helplist(modcmds.has_key)
529 helplist(modcmds.has_key)
530
530
531 if name and name != 'shortlist':
531 if name and name != 'shortlist':
532 try:
532 try:
533 helpcmd(name)
533 helpcmd(name)
534 except UnknownCommand:
534 except UnknownCommand:
535 helpext(name)
535 helpext(name)
536
536
537 else:
537 else:
538 # program name
538 # program name
539 if ui.verbose or with_version:
539 if ui.verbose or with_version:
540 show_version(ui)
540 show_version(ui)
541 else:
541 else:
542 ui.status(_("Mercurial Distributed SCM\n"))
542 ui.status(_("Mercurial Distributed SCM\n"))
543 ui.status('\n')
543 ui.status('\n')
544
544
545 # list of commands
545 # list of commands
546 if name == "shortlist":
546 if name == "shortlist":
547 ui.status(_('basic commands (use "hg help" '
547 ui.status(_('basic commands (use "hg help" '
548 'for the full list or option "-v" for details):\n\n'))
548 'for the full list or option "-v" for details):\n\n'))
549 elif ui.verbose:
549 elif ui.verbose:
550 ui.status(_('list of commands:\n\n'))
550 ui.status(_('list of commands:\n\n'))
551 else:
551 else:
552 ui.status(_('list of commands (use "hg help -v" '
552 ui.status(_('list of commands (use "hg help -v" '
553 'to show aliases and global options):\n\n'))
553 'to show aliases and global options):\n\n'))
554
554
555 helplist()
555 helplist()
556
556
557 # global options
557 # global options
558 if ui.verbose:
558 if ui.verbose:
559 option_lists.append(("global options", globalopts))
559 option_lists.append(("global options", globalopts))
560
560
561 # list all option lists
561 # list all option lists
562 opt_output = []
562 opt_output = []
563 for title, options in option_lists:
563 for title, options in option_lists:
564 opt_output.append(("\n%s:\n" % title, None))
564 opt_output.append(("\n%s:\n" % title, None))
565 for shortopt, longopt, default, desc in options:
565 for shortopt, longopt, default, desc in options:
566 if "DEPRECATED" in desc and not ui.verbose: continue
566 if "DEPRECATED" in desc and not ui.verbose: continue
567 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
567 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
568 longopt and " --%s" % longopt),
568 longopt and " --%s" % longopt),
569 "%s%s" % (desc,
569 "%s%s" % (desc,
570 default
570 default
571 and _(" (default: %s)") % default
571 and _(" (default: %s)") % default
572 or "")))
572 or "")))
573
573
574 if opt_output:
574 if opt_output:
575 opts_len = max([len(line[0]) for line in opt_output if line[1]])
575 opts_len = max([len(line[0]) for line in opt_output if line[1]])
576 for first, second in opt_output:
576 for first, second in opt_output:
577 if second:
577 if second:
578 ui.write(" %-*s %s\n" % (opts_len, first, second))
578 ui.write(" %-*s %s\n" % (opts_len, first, second))
579 else:
579 else:
580 ui.write("%s\n" % first)
580 ui.write("%s\n" % first)
581
581
582 # Commands start here, listed alphabetically
582 # Commands start here, listed alphabetically
583
583
584 def add(ui, repo, *pats, **opts):
584 def add(ui, repo, *pats, **opts):
585 """add the specified files on the next commit
585 """add the specified files on the next commit
586
586
587 Schedule files to be version controlled and added to the repository.
587 Schedule files to be version controlled and added to the repository.
588
588
589 The files will be added to the repository at the next commit.
589 The files will be added to the repository at the next commit.
590
590
591 If no names are given, add all files in the repository.
591 If no names are given, add all files in the repository.
592 """
592 """
593
593
594 names = []
594 names = []
595 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
595 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
596 if exact:
596 if exact:
597 if ui.verbose:
597 if ui.verbose:
598 ui.status(_('adding %s\n') % rel)
598 ui.status(_('adding %s\n') % rel)
599 names.append(abs)
599 names.append(abs)
600 elif repo.dirstate.state(abs) == '?':
600 elif repo.dirstate.state(abs) == '?':
601 ui.status(_('adding %s\n') % rel)
601 ui.status(_('adding %s\n') % rel)
602 names.append(abs)
602 names.append(abs)
603 if not opts.get('dry_run'):
603 if not opts.get('dry_run'):
604 repo.add(names)
604 repo.add(names)
605
605
606 def addremove(ui, repo, *pats, **opts):
606 def addremove(ui, repo, *pats, **opts):
607 """add all new files, delete all missing files
607 """add all new files, delete all missing files
608
608
609 Add all new files and remove all missing files from the repository.
609 Add all new files and remove all missing files from the repository.
610
610
611 New files are ignored if they match any of the patterns in .hgignore. As
611 New files are ignored if they match any of the patterns in .hgignore. As
612 with add, these changes take effect at the next commit.
612 with add, these changes take effect at the next commit.
613
613
614 Use the -s option to detect renamed files. With a parameter > 0,
614 Use the -s option to detect renamed files. With a parameter > 0,
615 this compares every removed file with every added file and records
615 this compares every removed file with every added file and records
616 those similar enough as renames. This option takes a percentage
616 those similar enough as renames. This option takes a percentage
617 between 0 (disabled) and 100 (files must be identical) as its
617 between 0 (disabled) and 100 (files must be identical) as its
618 parameter. Detecting renamed files this way can be expensive.
618 parameter. Detecting renamed files this way can be expensive.
619 """
619 """
620 sim = float(opts.get('similarity') or 0)
620 sim = float(opts.get('similarity') or 0)
621 if sim < 0 or sim > 100:
621 if sim < 0 or sim > 100:
622 raise util.Abort(_('similarity must be between 0 and 100'))
622 raise util.Abort(_('similarity must be between 0 and 100'))
623 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
623 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
624
624
625 def annotate(ui, repo, *pats, **opts):
625 def annotate(ui, repo, *pats, **opts):
626 """show changeset information per file line
626 """show changeset information per file line
627
627
628 List changes in files, showing the revision id responsible for each line
628 List changes in files, showing the revision id responsible for each line
629
629
630 This command is useful to discover who did a change or when a change took
630 This command is useful to discover who did a change or when a change took
631 place.
631 place.
632
632
633 Without the -a option, annotate will avoid processing files it
633 Without the -a option, annotate will avoid processing files it
634 detects as binary. With -a, annotate will generate an annotation
634 detects as binary. With -a, annotate will generate an annotation
635 anyway, probably with undesirable results.
635 anyway, probably with undesirable results.
636 """
636 """
637 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
637 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
638
638
639 if not pats:
639 if not pats:
640 raise util.Abort(_('at least one file name or pattern required'))
640 raise util.Abort(_('at least one file name or pattern required'))
641
641
642 opmap = [['user', lambda x: ui.shortuser(x.user())],
642 opmap = [['user', lambda x: ui.shortuser(x.user())],
643 ['number', lambda x: str(x.rev())],
643 ['number', lambda x: str(x.rev())],
644 ['changeset', lambda x: short(x.node())],
644 ['changeset', lambda x: short(x.node())],
645 ['date', getdate], ['follow', lambda x: x.path()]]
645 ['date', getdate], ['follow', lambda x: x.path()]]
646 if (not opts['user'] and not opts['changeset'] and not opts['date']
646 if (not opts['user'] and not opts['changeset'] and not opts['date']
647 and not opts['follow']):
647 and not opts['follow']):
648 opts['number'] = 1
648 opts['number'] = 1
649
649
650 ctx = repo.changectx(opts['rev'])
650 ctx = repo.changectx(opts['rev'])
651
651
652 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
652 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
653 node=ctx.node()):
653 node=ctx.node()):
654 fctx = ctx.filectx(abs)
654 fctx = ctx.filectx(abs)
655 if not opts['text'] and util.binary(fctx.data()):
655 if not opts['text'] and util.binary(fctx.data()):
656 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
656 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
657 continue
657 continue
658
658
659 lines = fctx.annotate(follow=opts.get('follow'))
659 lines = fctx.annotate(follow=opts.get('follow'))
660 pieces = []
660 pieces = []
661
661
662 for o, f in opmap:
662 for o, f in opmap:
663 if opts[o]:
663 if opts[o]:
664 l = [f(n) for n, dummy in lines]
664 l = [f(n) for n, dummy in lines]
665 if l:
665 if l:
666 m = max(map(len, l))
666 m = max(map(len, l))
667 pieces.append(["%*s" % (m, x) for x in l])
667 pieces.append(["%*s" % (m, x) for x in l])
668
668
669 if pieces:
669 if pieces:
670 for p, l in zip(zip(*pieces), lines):
670 for p, l in zip(zip(*pieces), lines):
671 ui.write("%s: %s" % (" ".join(p), l[1]))
671 ui.write("%s: %s" % (" ".join(p), l[1]))
672
672
673 def archive(ui, repo, dest, **opts):
673 def archive(ui, repo, dest, **opts):
674 '''create unversioned archive of a repository revision
674 '''create unversioned archive of a repository revision
675
675
676 By default, the revision used is the parent of the working
676 By default, the revision used is the parent of the working
677 directory; use "-r" to specify a different revision.
677 directory; use "-r" to specify a different revision.
678
678
679 To specify the type of archive to create, use "-t". Valid
679 To specify the type of archive to create, use "-t". Valid
680 types are:
680 types are:
681
681
682 "files" (default): a directory full of files
682 "files" (default): a directory full of files
683 "tar": tar archive, uncompressed
683 "tar": tar archive, uncompressed
684 "tbz2": tar archive, compressed using bzip2
684 "tbz2": tar archive, compressed using bzip2
685 "tgz": tar archive, compressed using gzip
685 "tgz": tar archive, compressed using gzip
686 "uzip": zip archive, uncompressed
686 "uzip": zip archive, uncompressed
687 "zip": zip archive, compressed using deflate
687 "zip": zip archive, compressed using deflate
688
688
689 The exact name of the destination archive or directory is given
689 The exact name of the destination archive or directory is given
690 using a format string; see "hg help export" for details.
690 using a format string; see "hg help export" for details.
691
691
692 Each member added to an archive file has a directory prefix
692 Each member added to an archive file has a directory prefix
693 prepended. Use "-p" to specify a format string for the prefix.
693 prepended. Use "-p" to specify a format string for the prefix.
694 The default is the basename of the archive, with suffixes removed.
694 The default is the basename of the archive, with suffixes removed.
695 '''
695 '''
696
696
697 node = repo.changectx(opts['rev']).node()
697 node = repo.changectx(opts['rev']).node()
698 dest = cmdutil.make_filename(repo, dest, node)
698 dest = cmdutil.make_filename(repo, dest, node)
699 if os.path.realpath(dest) == repo.root:
699 if os.path.realpath(dest) == repo.root:
700 raise util.Abort(_('repository root cannot be destination'))
700 raise util.Abort(_('repository root cannot be destination'))
701 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
701 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
702 kind = opts.get('type') or 'files'
702 kind = opts.get('type') or 'files'
703 prefix = opts['prefix']
703 prefix = opts['prefix']
704 if dest == '-':
704 if dest == '-':
705 if kind == 'files':
705 if kind == 'files':
706 raise util.Abort(_('cannot archive plain files to stdout'))
706 raise util.Abort(_('cannot archive plain files to stdout'))
707 dest = sys.stdout
707 dest = sys.stdout
708 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
708 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
709 prefix = cmdutil.make_filename(repo, prefix, node)
709 prefix = cmdutil.make_filename(repo, prefix, node)
710 archival.archive(repo, dest, node, kind, not opts['no_decode'],
710 archival.archive(repo, dest, node, kind, not opts['no_decode'],
711 matchfn, prefix)
711 matchfn, prefix)
712
712
713 def backout(ui, repo, rev, **opts):
713 def backout(ui, repo, rev, **opts):
714 '''reverse effect of earlier changeset
714 '''reverse effect of earlier changeset
715
715
716 Commit the backed out changes as a new changeset. The new
716 Commit the backed out changes as a new changeset. The new
717 changeset is a child of the backed out changeset.
717 changeset is a child of the backed out changeset.
718
718
719 If you back out a changeset other than the tip, a new head is
719 If you back out a changeset other than the tip, a new head is
720 created. This head is the parent of the working directory. If
720 created. This head is the parent of the working directory. If
721 you back out an old changeset, your working directory will appear
721 you back out an old changeset, your working directory will appear
722 old after the backout. You should merge the backout changeset
722 old after the backout. You should merge the backout changeset
723 with another head.
723 with another head.
724
724
725 The --merge option remembers the parent of the working directory
725 The --merge option remembers the parent of the working directory
726 before starting the backout, then merges the new head with that
726 before starting the backout, then merges the new head with that
727 changeset afterwards. This saves you from doing the merge by
727 changeset afterwards. This saves you from doing the merge by
728 hand. The result of this merge is not committed, as for a normal
728 hand. The result of this merge is not committed, as for a normal
729 merge.'''
729 merge.'''
730
730
731 bail_if_changed(repo)
731 bail_if_changed(repo)
732 op1, op2 = repo.dirstate.parents()
732 op1, op2 = repo.dirstate.parents()
733 if op2 != nullid:
733 if op2 != nullid:
734 raise util.Abort(_('outstanding uncommitted merge'))
734 raise util.Abort(_('outstanding uncommitted merge'))
735 node = repo.lookup(rev)
735 node = repo.lookup(rev)
736 p1, p2 = repo.changelog.parents(node)
736 p1, p2 = repo.changelog.parents(node)
737 if p1 == nullid:
737 if p1 == nullid:
738 raise util.Abort(_('cannot back out a change with no parents'))
738 raise util.Abort(_('cannot back out a change with no parents'))
739 if p2 != nullid:
739 if p2 != nullid:
740 if not opts['parent']:
740 if not opts['parent']:
741 raise util.Abort(_('cannot back out a merge changeset without '
741 raise util.Abort(_('cannot back out a merge changeset without '
742 '--parent'))
742 '--parent'))
743 p = repo.lookup(opts['parent'])
743 p = repo.lookup(opts['parent'])
744 if p not in (p1, p2):
744 if p not in (p1, p2):
745 raise util.Abort(_('%s is not a parent of %s' %
745 raise util.Abort(_('%s is not a parent of %s' %
746 (short(p), short(node))))
746 (short(p), short(node))))
747 parent = p
747 parent = p
748 else:
748 else:
749 if opts['parent']:
749 if opts['parent']:
750 raise util.Abort(_('cannot use --parent on non-merge changeset'))
750 raise util.Abort(_('cannot use --parent on non-merge changeset'))
751 parent = p1
751 parent = p1
752 hg.clean(repo, node, show_stats=False)
752 hg.clean(repo, node, show_stats=False)
753 revert_opts = opts.copy()
753 revert_opts = opts.copy()
754 revert_opts['all'] = True
754 revert_opts['all'] = True
755 revert_opts['rev'] = hex(parent)
755 revert_opts['rev'] = hex(parent)
756 revert(ui, repo, **revert_opts)
756 revert(ui, repo, **revert_opts)
757 commit_opts = opts.copy()
757 commit_opts = opts.copy()
758 commit_opts['addremove'] = False
758 commit_opts['addremove'] = False
759 if not commit_opts['message'] and not commit_opts['logfile']:
759 if not commit_opts['message'] and not commit_opts['logfile']:
760 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
760 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
761 commit_opts['force_editor'] = True
761 commit_opts['force_editor'] = True
762 commit(ui, repo, **commit_opts)
762 commit(ui, repo, **commit_opts)
763 def nice(node):
763 def nice(node):
764 return '%d:%s' % (repo.changelog.rev(node), short(node))
764 return '%d:%s' % (repo.changelog.rev(node), short(node))
765 ui.status(_('changeset %s backs out changeset %s\n') %
765 ui.status(_('changeset %s backs out changeset %s\n') %
766 (nice(repo.changelog.tip()), nice(node)))
766 (nice(repo.changelog.tip()), nice(node)))
767 if op1 != node:
767 if op1 != node:
768 if opts['merge']:
768 if opts['merge']:
769 ui.status(_('merging with changeset %s\n') % nice(op1))
769 ui.status(_('merging with changeset %s\n') % nice(op1))
770 n = _lookup(repo, hex(op1))
770 n = _lookup(repo, hex(op1))
771 hg.merge(repo, n)
771 hg.merge(repo, n)
772 else:
772 else:
773 ui.status(_('the backout changeset is a new head - '
773 ui.status(_('the backout changeset is a new head - '
774 'do not forget to merge\n'))
774 'do not forget to merge\n'))
775 ui.status(_('(use "backout --merge" '
775 ui.status(_('(use "backout --merge" '
776 'if you want to auto-merge)\n'))
776 'if you want to auto-merge)\n'))
777
777
778 def branch(ui, repo, label=None):
778 def branch(ui, repo, label=None):
779 """set or show the current branch name
779 """set or show the current branch name
780
780
781 With <name>, set the current branch name. Otherwise, show the
781 With <name>, set the current branch name. Otherwise, show the
782 current branch name.
782 current branch name.
783 """
783 """
784
784
785 if label is not None:
785 if label is not None:
786 repo.opener("branch", "w").write(label)
786 repo.opener("branch", "w").write(label)
787 else:
787 else:
788 b = repo.workingctx().branch()
788 b = repo.workingctx().branch()
789 if b:
789 if b:
790 ui.write("%s\n" % b)
790 ui.write("%s\n" % b)
791
791
792 def branches(ui, repo):
792 def branches(ui, repo):
793 """list repository named branches
793 """list repository named branches
794
794
795 List the repository's named branches.
795 List the repository's named branches.
796 """
796 """
797 b = repo.branchtags()
797 b = repo.branchtags()
798 l = [(-repo.changelog.rev(n), n, t) for t,n in b.items()]
798 l = [(-repo.changelog.rev(n), n, t) for t,n in b.items()]
799 l.sort()
799 l.sort()
800 for r, n, t in l:
800 for r, n, t in l:
801 hexfunc = ui.debugflag and hex or short
801 hexfunc = ui.debugflag and hex or short
802 if ui.quiet:
802 if ui.quiet:
803 ui.write("%s\n" % t)
803 ui.write("%s\n" % t)
804 else:
804 else:
805 ui.write("%-30s %s:%s\n" % (t, -r, hexfunc(n)))
805 ui.write("%-30s %s:%s\n" % (t, -r, hexfunc(n)))
806
806
807 def bundle(ui, repo, fname, dest=None, **opts):
807 def bundle(ui, repo, fname, dest=None, **opts):
808 """create a changegroup file
808 """create a changegroup file
809
809
810 Generate a compressed changegroup file collecting changesets not
810 Generate a compressed changegroup file collecting changesets not
811 found in the other repository.
811 found in the other repository.
812
812
813 If no destination repository is specified the destination is assumed
813 If no destination repository is specified the destination is assumed
814 to have all the nodes specified by one or more --base parameters.
814 to have all the nodes specified by one or more --base parameters.
815
815
816 The bundle file can then be transferred using conventional means and
816 The bundle file can then be transferred using conventional means and
817 applied to another repository with the unbundle or pull command.
817 applied to another repository with the unbundle or pull command.
818 This is useful when direct push and pull are not available or when
818 This is useful when direct push and pull are not available or when
819 exporting an entire repository is undesirable.
819 exporting an entire repository is undesirable.
820
820
821 Applying bundles preserves all changeset contents including
821 Applying bundles preserves all changeset contents including
822 permissions, copy/rename information, and revision history.
822 permissions, copy/rename information, and revision history.
823 """
823 """
824 revs = opts.get('rev') or None
824 revs = opts.get('rev') or None
825 if revs:
825 if revs:
826 revs = [repo.lookup(rev) for rev in revs]
826 revs = [repo.lookup(rev) for rev in revs]
827 base = opts.get('base')
827 base = opts.get('base')
828 if base:
828 if base:
829 if dest:
829 if dest:
830 raise util.Abort(_("--base is incompatible with specifiying "
830 raise util.Abort(_("--base is incompatible with specifiying "
831 "a destination"))
831 "a destination"))
832 base = [repo.lookup(rev) for rev in base]
832 base = [repo.lookup(rev) for rev in base]
833 # create the right base
833 # create the right base
834 # XXX: nodesbetween / changegroup* should be "fixed" instead
834 # XXX: nodesbetween / changegroup* should be "fixed" instead
835 o = []
835 o = []
836 has_set = sets.Set(base)
836 has_set = sets.Set(base)
837 for n in base:
837 for n in base:
838 has_set.update(repo.changelog.reachable(n))
838 has_set.update(repo.changelog.reachable(n))
839 if revs:
839 if revs:
840 visit = list(revs)
840 visit = list(revs)
841 else:
841 else:
842 visit = repo.changelog.heads()
842 visit = repo.changelog.heads()
843 seen = sets.Set(visit)
843 seen = sets.Set(visit)
844 while visit:
844 while visit:
845 n = visit.pop(0)
845 n = visit.pop(0)
846 parents = [p for p in repo.changelog.parents(n)
846 parents = [p for p in repo.changelog.parents(n)
847 if p != nullid and p not in has_set]
847 if p != nullid and p not in has_set]
848 if len(parents) == 0:
848 if len(parents) == 0:
849 o.insert(0, n)
849 o.insert(0, n)
850 else:
850 else:
851 for p in parents:
851 for p in parents:
852 if p not in seen:
852 if p not in seen:
853 seen.add(p)
853 seen.add(p)
854 visit.append(p)
854 visit.append(p)
855 else:
855 else:
856 setremoteconfig(ui, opts)
856 setremoteconfig(ui, opts)
857 dest = ui.expandpath(dest or 'default-push', dest or 'default')
857 dest = ui.expandpath(dest or 'default-push', dest or 'default')
858 other = hg.repository(ui, dest)
858 other = hg.repository(ui, dest)
859 o = repo.findoutgoing(other, force=opts['force'])
859 o = repo.findoutgoing(other, force=opts['force'])
860
860
861 if revs:
861 if revs:
862 cg = repo.changegroupsubset(o, revs, 'bundle')
862 cg = repo.changegroupsubset(o, revs, 'bundle')
863 else:
863 else:
864 cg = repo.changegroup(o, 'bundle')
864 cg = repo.changegroup(o, 'bundle')
865 write_bundle(cg, fname)
865 write_bundle(cg, fname)
866
866
867 def cat(ui, repo, file1, *pats, **opts):
867 def cat(ui, repo, file1, *pats, **opts):
868 """output the latest or given revisions of files
868 """output the latest or given revisions of files
869
869
870 Print the specified files as they were at the given revision.
870 Print the specified files as they were at the given revision.
871 If no revision is given then working dir parent is used, or tip
871 If no revision is given then working dir parent is used, or tip
872 if no revision is checked out.
872 if no revision is checked out.
873
873
874 Output may be to a file, in which case the name of the file is
874 Output may be to a file, in which case the name of the file is
875 given using a format string. The formatting rules are the same as
875 given using a format string. The formatting rules are the same as
876 for the export command, with the following additions:
876 for the export command, with the following additions:
877
877
878 %s basename of file being printed
878 %s basename of file being printed
879 %d dirname of file being printed, or '.' if in repo root
879 %d dirname of file being printed, or '.' if in repo root
880 %p root-relative path name of file being printed
880 %p root-relative path name of file being printed
881 """
881 """
882 ctx = repo.changectx(opts['rev'])
882 ctx = repo.changectx(opts['rev'])
883 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
883 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
884 ctx.node()):
884 ctx.node()):
885 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
885 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
886 fp.write(ctx.filectx(abs).data())
886 fp.write(ctx.filectx(abs).data())
887
887
888 def clone(ui, source, dest=None, **opts):
888 def clone(ui, source, dest=None, **opts):
889 """make a copy of an existing repository
889 """make a copy of an existing repository
890
890
891 Create a copy of an existing repository in a new directory.
891 Create a copy of an existing repository in a new directory.
892
892
893 If no destination directory name is specified, it defaults to the
893 If no destination directory name is specified, it defaults to the
894 basename of the source.
894 basename of the source.
895
895
896 The location of the source is added to the new repository's
896 The location of the source is added to the new repository's
897 .hg/hgrc file, as the default to be used for future pulls.
897 .hg/hgrc file, as the default to be used for future pulls.
898
898
899 For efficiency, hardlinks are used for cloning whenever the source
899 For efficiency, hardlinks are used for cloning whenever the source
900 and destination are on the same filesystem (note this applies only
900 and destination are on the same filesystem (note this applies only
901 to the repository data, not to the checked out files). Some
901 to the repository data, not to the checked out files). Some
902 filesystems, such as AFS, implement hardlinking incorrectly, but
902 filesystems, such as AFS, implement hardlinking incorrectly, but
903 do not report errors. In these cases, use the --pull option to
903 do not report errors. In these cases, use the --pull option to
904 avoid hardlinking.
904 avoid hardlinking.
905
905
906 You can safely clone repositories and checked out files using full
906 You can safely clone repositories and checked out files using full
907 hardlinks with
907 hardlinks with
908
908
909 $ cp -al REPO REPOCLONE
909 $ cp -al REPO REPOCLONE
910
910
911 which is the fastest way to clone. However, the operation is not
911 which is the fastest way to clone. However, the operation is not
912 atomic (making sure REPO is not modified during the operation is
912 atomic (making sure REPO is not modified during the operation is
913 up to you) and you have to make sure your editor breaks hardlinks
913 up to you) and you have to make sure your editor breaks hardlinks
914 (Emacs and most Linux Kernel tools do so).
914 (Emacs and most Linux Kernel tools do so).
915
915
916 If you use the -r option to clone up to a specific revision, no
916 If you use the -r option to clone up to a specific revision, no
917 subsequent revisions will be present in the cloned repository.
917 subsequent revisions will be present in the cloned repository.
918 This option implies --pull, even on local repositories.
918 This option implies --pull, even on local repositories.
919
919
920 See pull for valid source format details.
920 See pull for valid source format details.
921
921
922 It is possible to specify an ssh:// URL as the destination, but no
922 It is possible to specify an ssh:// URL as the destination, but no
923 .hg/hgrc will be created on the remote side. Look at the help text
923 .hg/hgrc will be created on the remote side. Look at the help text
924 for the pull command for important details about ssh:// URLs.
924 for the pull command for important details about ssh:// URLs.
925 """
925 """
926 setremoteconfig(ui, opts)
926 setremoteconfig(ui, opts)
927 hg.clone(ui, ui.expandpath(source), dest,
927 hg.clone(ui, ui.expandpath(source), dest,
928 pull=opts['pull'],
928 pull=opts['pull'],
929 stream=opts['uncompressed'],
929 stream=opts['uncompressed'],
930 rev=opts['rev'],
930 rev=opts['rev'],
931 update=not opts['noupdate'])
931 update=not opts['noupdate'])
932
932
933 def commit(ui, repo, *pats, **opts):
933 def commit(ui, repo, *pats, **opts):
934 """commit the specified files or all outstanding changes
934 """commit the specified files or all outstanding changes
935
935
936 Commit changes to the given files into the repository.
936 Commit changes to the given files into the repository.
937
937
938 If a list of files is omitted, all changes reported by "hg status"
938 If a list of files is omitted, all changes reported by "hg status"
939 will be committed.
939 will be committed.
940
940
941 If no commit message is specified, the editor configured in your hgrc
941 If no commit message is specified, the editor configured in your hgrc
942 or in the EDITOR environment variable is started to enter a message.
942 or in the EDITOR environment variable is started to enter a message.
943 """
943 """
944 message = logmessage(opts)
944 message = logmessage(opts)
945
945
946 if opts['addremove']:
946 if opts['addremove']:
947 cmdutil.addremove(repo, pats, opts)
947 cmdutil.addremove(repo, pats, opts)
948 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
948 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
949 if pats:
949 if pats:
950 modified, added, removed = repo.status(files=fns, match=match)[:3]
950 modified, added, removed = repo.status(files=fns, match=match)[:3]
951 files = modified + added + removed
951 files = modified + added + removed
952 else:
952 else:
953 files = []
953 files = []
954 try:
954 try:
955 repo.commit(files, message, opts['user'], opts['date'], match,
955 repo.commit(files, message, opts['user'], opts['date'], match,
956 force_editor=opts.get('force_editor'))
956 force_editor=opts.get('force_editor'))
957 except ValueError, inst:
957 except ValueError, inst:
958 raise util.Abort(str(inst))
958 raise util.Abort(str(inst))
959
959
960 def docopy(ui, repo, pats, opts, wlock):
960 def docopy(ui, repo, pats, opts, wlock):
961 # called with the repo lock held
961 # called with the repo lock held
962 cwd = repo.getcwd()
962 cwd = repo.getcwd()
963 errors = 0
963 errors = 0
964 copied = []
964 copied = []
965 targets = {}
965 targets = {}
966
966
967 def okaytocopy(abs, rel, exact):
967 def okaytocopy(abs, rel, exact):
968 reasons = {'?': _('is not managed'),
968 reasons = {'?': _('is not managed'),
969 'a': _('has been marked for add'),
969 'a': _('has been marked for add'),
970 'r': _('has been marked for remove')}
970 'r': _('has been marked for remove')}
971 state = repo.dirstate.state(abs)
971 state = repo.dirstate.state(abs)
972 reason = reasons.get(state)
972 reason = reasons.get(state)
973 if reason:
973 if reason:
974 if state == 'a':
974 if state == 'a':
975 origsrc = repo.dirstate.copied(abs)
975 origsrc = repo.dirstate.copied(abs)
976 if origsrc is not None:
976 if origsrc is not None:
977 return origsrc
977 return origsrc
978 if exact:
978 if exact:
979 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
979 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
980 else:
980 else:
981 return abs
981 return abs
982
982
983 def copy(origsrc, abssrc, relsrc, target, exact):
983 def copy(origsrc, abssrc, relsrc, target, exact):
984 abstarget = util.canonpath(repo.root, cwd, target)
984 abstarget = util.canonpath(repo.root, cwd, target)
985 reltarget = util.pathto(cwd, abstarget)
985 reltarget = util.pathto(cwd, abstarget)
986 prevsrc = targets.get(abstarget)
986 prevsrc = targets.get(abstarget)
987 if prevsrc is not None:
987 if prevsrc is not None:
988 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
988 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
989 (reltarget, abssrc, prevsrc))
989 (reltarget, abssrc, prevsrc))
990 return
990 return
991 if (not opts['after'] and os.path.exists(reltarget) or
991 if (not opts['after'] and os.path.exists(reltarget) or
992 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
992 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
993 if not opts['force']:
993 if not opts['force']:
994 ui.warn(_('%s: not overwriting - file exists\n') %
994 ui.warn(_('%s: not overwriting - file exists\n') %
995 reltarget)
995 reltarget)
996 return
996 return
997 if not opts['after'] and not opts.get('dry_run'):
997 if not opts['after'] and not opts.get('dry_run'):
998 os.unlink(reltarget)
998 os.unlink(reltarget)
999 if opts['after']:
999 if opts['after']:
1000 if not os.path.exists(reltarget):
1000 if not os.path.exists(reltarget):
1001 return
1001 return
1002 else:
1002 else:
1003 targetdir = os.path.dirname(reltarget) or '.'
1003 targetdir = os.path.dirname(reltarget) or '.'
1004 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1004 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1005 os.makedirs(targetdir)
1005 os.makedirs(targetdir)
1006 try:
1006 try:
1007 restore = repo.dirstate.state(abstarget) == 'r'
1007 restore = repo.dirstate.state(abstarget) == 'r'
1008 if restore and not opts.get('dry_run'):
1008 if restore and not opts.get('dry_run'):
1009 repo.undelete([abstarget], wlock)
1009 repo.undelete([abstarget], wlock)
1010 try:
1010 try:
1011 if not opts.get('dry_run'):
1011 if not opts.get('dry_run'):
1012 shutil.copyfile(relsrc, reltarget)
1012 shutil.copyfile(relsrc, reltarget)
1013 shutil.copymode(relsrc, reltarget)
1013 shutil.copymode(relsrc, reltarget)
1014 restore = False
1014 restore = False
1015 finally:
1015 finally:
1016 if restore:
1016 if restore:
1017 repo.remove([abstarget], wlock)
1017 repo.remove([abstarget], wlock)
1018 except shutil.Error, inst:
1018 except shutil.Error, inst:
1019 raise util.Abort(str(inst))
1019 raise util.Abort(str(inst))
1020 except IOError, inst:
1020 except IOError, inst:
1021 if inst.errno == errno.ENOENT:
1021 if inst.errno == errno.ENOENT:
1022 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1022 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1023 else:
1023 else:
1024 ui.warn(_('%s: cannot copy - %s\n') %
1024 ui.warn(_('%s: cannot copy - %s\n') %
1025 (relsrc, inst.strerror))
1025 (relsrc, inst.strerror))
1026 errors += 1
1026 errors += 1
1027 return
1027 return
1028 if ui.verbose or not exact:
1028 if ui.verbose or not exact:
1029 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1029 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1030 targets[abstarget] = abssrc
1030 targets[abstarget] = abssrc
1031 if abstarget != origsrc and not opts.get('dry_run'):
1031 if abstarget != origsrc and not opts.get('dry_run'):
1032 repo.copy(origsrc, abstarget, wlock)
1032 repo.copy(origsrc, abstarget, wlock)
1033 copied.append((abssrc, relsrc, exact))
1033 copied.append((abssrc, relsrc, exact))
1034
1034
1035 def targetpathfn(pat, dest, srcs):
1035 def targetpathfn(pat, dest, srcs):
1036 if os.path.isdir(pat):
1036 if os.path.isdir(pat):
1037 abspfx = util.canonpath(repo.root, cwd, pat)
1037 abspfx = util.canonpath(repo.root, cwd, pat)
1038 if destdirexists:
1038 if destdirexists:
1039 striplen = len(os.path.split(abspfx)[0])
1039 striplen = len(os.path.split(abspfx)[0])
1040 else:
1040 else:
1041 striplen = len(abspfx)
1041 striplen = len(abspfx)
1042 if striplen:
1042 if striplen:
1043 striplen += len(os.sep)
1043 striplen += len(os.sep)
1044 res = lambda p: os.path.join(dest, p[striplen:])
1044 res = lambda p: os.path.join(dest, p[striplen:])
1045 elif destdirexists:
1045 elif destdirexists:
1046 res = lambda p: os.path.join(dest, os.path.basename(p))
1046 res = lambda p: os.path.join(dest, os.path.basename(p))
1047 else:
1047 else:
1048 res = lambda p: dest
1048 res = lambda p: dest
1049 return res
1049 return res
1050
1050
1051 def targetpathafterfn(pat, dest, srcs):
1051 def targetpathafterfn(pat, dest, srcs):
1052 if util.patkind(pat, None)[0]:
1052 if util.patkind(pat, None)[0]:
1053 # a mercurial pattern
1053 # a mercurial pattern
1054 res = lambda p: os.path.join(dest, os.path.basename(p))
1054 res = lambda p: os.path.join(dest, os.path.basename(p))
1055 else:
1055 else:
1056 abspfx = util.canonpath(repo.root, cwd, pat)
1056 abspfx = util.canonpath(repo.root, cwd, pat)
1057 if len(abspfx) < len(srcs[0][0]):
1057 if len(abspfx) < len(srcs[0][0]):
1058 # A directory. Either the target path contains the last
1058 # A directory. Either the target path contains the last
1059 # component of the source path or it does not.
1059 # component of the source path or it does not.
1060 def evalpath(striplen):
1060 def evalpath(striplen):
1061 score = 0
1061 score = 0
1062 for s in srcs:
1062 for s in srcs:
1063 t = os.path.join(dest, s[0][striplen:])
1063 t = os.path.join(dest, s[0][striplen:])
1064 if os.path.exists(t):
1064 if os.path.exists(t):
1065 score += 1
1065 score += 1
1066 return score
1066 return score
1067
1067
1068 striplen = len(abspfx)
1068 striplen = len(abspfx)
1069 if striplen:
1069 if striplen:
1070 striplen += len(os.sep)
1070 striplen += len(os.sep)
1071 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1071 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1072 score = evalpath(striplen)
1072 score = evalpath(striplen)
1073 striplen1 = len(os.path.split(abspfx)[0])
1073 striplen1 = len(os.path.split(abspfx)[0])
1074 if striplen1:
1074 if striplen1:
1075 striplen1 += len(os.sep)
1075 striplen1 += len(os.sep)
1076 if evalpath(striplen1) > score:
1076 if evalpath(striplen1) > score:
1077 striplen = striplen1
1077 striplen = striplen1
1078 res = lambda p: os.path.join(dest, p[striplen:])
1078 res = lambda p: os.path.join(dest, p[striplen:])
1079 else:
1079 else:
1080 # a file
1080 # a file
1081 if destdirexists:
1081 if destdirexists:
1082 res = lambda p: os.path.join(dest, os.path.basename(p))
1082 res = lambda p: os.path.join(dest, os.path.basename(p))
1083 else:
1083 else:
1084 res = lambda p: dest
1084 res = lambda p: dest
1085 return res
1085 return res
1086
1086
1087
1087
1088 pats = list(pats)
1088 pats = list(pats)
1089 if not pats:
1089 if not pats:
1090 raise util.Abort(_('no source or destination specified'))
1090 raise util.Abort(_('no source or destination specified'))
1091 if len(pats) == 1:
1091 if len(pats) == 1:
1092 raise util.Abort(_('no destination specified'))
1092 raise util.Abort(_('no destination specified'))
1093 dest = pats.pop()
1093 dest = pats.pop()
1094 destdirexists = os.path.isdir(dest)
1094 destdirexists = os.path.isdir(dest)
1095 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1095 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1096 raise util.Abort(_('with multiple sources, destination must be an '
1096 raise util.Abort(_('with multiple sources, destination must be an '
1097 'existing directory'))
1097 'existing directory'))
1098 if opts['after']:
1098 if opts['after']:
1099 tfn = targetpathafterfn
1099 tfn = targetpathafterfn
1100 else:
1100 else:
1101 tfn = targetpathfn
1101 tfn = targetpathfn
1102 copylist = []
1102 copylist = []
1103 for pat in pats:
1103 for pat in pats:
1104 srcs = []
1104 srcs = []
1105 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1105 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1106 origsrc = okaytocopy(abssrc, relsrc, exact)
1106 origsrc = okaytocopy(abssrc, relsrc, exact)
1107 if origsrc:
1107 if origsrc:
1108 srcs.append((origsrc, abssrc, relsrc, exact))
1108 srcs.append((origsrc, abssrc, relsrc, exact))
1109 if not srcs:
1109 if not srcs:
1110 continue
1110 continue
1111 copylist.append((tfn(pat, dest, srcs), srcs))
1111 copylist.append((tfn(pat, dest, srcs), srcs))
1112 if not copylist:
1112 if not copylist:
1113 raise util.Abort(_('no files to copy'))
1113 raise util.Abort(_('no files to copy'))
1114
1114
1115 for targetpath, srcs in copylist:
1115 for targetpath, srcs in copylist:
1116 for origsrc, abssrc, relsrc, exact in srcs:
1116 for origsrc, abssrc, relsrc, exact in srcs:
1117 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1117 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1118
1118
1119 if errors:
1119 if errors:
1120 ui.warn(_('(consider using --after)\n'))
1120 ui.warn(_('(consider using --after)\n'))
1121 return errors, copied
1121 return errors, copied
1122
1122
1123 def copy(ui, repo, *pats, **opts):
1123 def copy(ui, repo, *pats, **opts):
1124 """mark files as copied for the next commit
1124 """mark files as copied for the next commit
1125
1125
1126 Mark dest as having copies of source files. If dest is a
1126 Mark dest as having copies of source files. If dest is a
1127 directory, copies are put in that directory. If dest is a file,
1127 directory, copies are put in that directory. If dest is a file,
1128 there can only be one source.
1128 there can only be one source.
1129
1129
1130 By default, this command copies the contents of files as they
1130 By default, this command copies the contents of files as they
1131 stand in the working directory. If invoked with --after, the
1131 stand in the working directory. If invoked with --after, the
1132 operation is recorded, but no copying is performed.
1132 operation is recorded, but no copying is performed.
1133
1133
1134 This command takes effect in the next commit.
1134 This command takes effect in the next commit.
1135
1135
1136 NOTE: This command should be treated as experimental. While it
1136 NOTE: This command should be treated as experimental. While it
1137 should properly record copied files, this information is not yet
1137 should properly record copied files, this information is not yet
1138 fully used by merge, nor fully reported by log.
1138 fully used by merge, nor fully reported by log.
1139 """
1139 """
1140 wlock = repo.wlock(0)
1140 wlock = repo.wlock(0)
1141 errs, copied = docopy(ui, repo, pats, opts, wlock)
1141 errs, copied = docopy(ui, repo, pats, opts, wlock)
1142 return errs
1142 return errs
1143
1143
1144 def debugancestor(ui, index, rev1, rev2):
1144 def debugancestor(ui, index, rev1, rev2):
1145 """find the ancestor revision of two revisions in a given index"""
1145 """find the ancestor revision of two revisions in a given index"""
1146 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1146 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1147 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1147 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1148 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1148 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1149
1149
1150 def debugcomplete(ui, cmd='', **opts):
1150 def debugcomplete(ui, cmd='', **opts):
1151 """returns the completion list associated with the given command"""
1151 """returns the completion list associated with the given command"""
1152
1152
1153 if opts['options']:
1153 if opts['options']:
1154 options = []
1154 options = []
1155 otables = [globalopts]
1155 otables = [globalopts]
1156 if cmd:
1156 if cmd:
1157 aliases, entry = findcmd(ui, cmd)
1157 aliases, entry = findcmd(ui, cmd)
1158 otables.append(entry[1])
1158 otables.append(entry[1])
1159 for t in otables:
1159 for t in otables:
1160 for o in t:
1160 for o in t:
1161 if o[0]:
1161 if o[0]:
1162 options.append('-%s' % o[0])
1162 options.append('-%s' % o[0])
1163 options.append('--%s' % o[1])
1163 options.append('--%s' % o[1])
1164 ui.write("%s\n" % "\n".join(options))
1164 ui.write("%s\n" % "\n".join(options))
1165 return
1165 return
1166
1166
1167 clist = findpossible(ui, cmd).keys()
1167 clist = findpossible(ui, cmd).keys()
1168 clist.sort()
1168 clist.sort()
1169 ui.write("%s\n" % "\n".join(clist))
1169 ui.write("%s\n" % "\n".join(clist))
1170
1170
1171 def debugrebuildstate(ui, repo, rev=None):
1171 def debugrebuildstate(ui, repo, rev=None):
1172 """rebuild the dirstate as it would look like for the given revision"""
1172 """rebuild the dirstate as it would look like for the given revision"""
1173 if not rev:
1173 if not rev:
1174 rev = repo.changelog.tip()
1174 rev = repo.changelog.tip()
1175 else:
1175 else:
1176 rev = repo.lookup(rev)
1176 rev = repo.lookup(rev)
1177 change = repo.changelog.read(rev)
1177 change = repo.changelog.read(rev)
1178 n = change[0]
1178 n = change[0]
1179 files = repo.manifest.read(n)
1179 files = repo.manifest.read(n)
1180 wlock = repo.wlock()
1180 wlock = repo.wlock()
1181 repo.dirstate.rebuild(rev, files)
1181 repo.dirstate.rebuild(rev, files)
1182
1182
1183 def debugcheckstate(ui, repo):
1183 def debugcheckstate(ui, repo):
1184 """validate the correctness of the current dirstate"""
1184 """validate the correctness of the current dirstate"""
1185 parent1, parent2 = repo.dirstate.parents()
1185 parent1, parent2 = repo.dirstate.parents()
1186 repo.dirstate.read()
1186 repo.dirstate.read()
1187 dc = repo.dirstate.map
1187 dc = repo.dirstate.map
1188 keys = dc.keys()
1188 keys = dc.keys()
1189 keys.sort()
1189 keys.sort()
1190 m1n = repo.changelog.read(parent1)[0]
1190 m1n = repo.changelog.read(parent1)[0]
1191 m2n = repo.changelog.read(parent2)[0]
1191 m2n = repo.changelog.read(parent2)[0]
1192 m1 = repo.manifest.read(m1n)
1192 m1 = repo.manifest.read(m1n)
1193 m2 = repo.manifest.read(m2n)
1193 m2 = repo.manifest.read(m2n)
1194 errors = 0
1194 errors = 0
1195 for f in dc:
1195 for f in dc:
1196 state = repo.dirstate.state(f)
1196 state = repo.dirstate.state(f)
1197 if state in "nr" and f not in m1:
1197 if state in "nr" and f not in m1:
1198 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1198 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1199 errors += 1
1199 errors += 1
1200 if state in "a" and f in m1:
1200 if state in "a" and f in m1:
1201 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1201 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1202 errors += 1
1202 errors += 1
1203 if state in "m" and f not in m1 and f not in m2:
1203 if state in "m" and f not in m1 and f not in m2:
1204 ui.warn(_("%s in state %s, but not in either manifest\n") %
1204 ui.warn(_("%s in state %s, but not in either manifest\n") %
1205 (f, state))
1205 (f, state))
1206 errors += 1
1206 errors += 1
1207 for f in m1:
1207 for f in m1:
1208 state = repo.dirstate.state(f)
1208 state = repo.dirstate.state(f)
1209 if state not in "nrm":
1209 if state not in "nrm":
1210 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1210 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1211 errors += 1
1211 errors += 1
1212 if errors:
1212 if errors:
1213 error = _(".hg/dirstate inconsistent with current parent's manifest")
1213 error = _(".hg/dirstate inconsistent with current parent's manifest")
1214 raise util.Abort(error)
1214 raise util.Abort(error)
1215
1215
1216 def showconfig(ui, repo, *values, **opts):
1216 def showconfig(ui, repo, *values, **opts):
1217 """show combined config settings from all hgrc files
1217 """show combined config settings from all hgrc files
1218
1218
1219 With no args, print names and values of all config items.
1219 With no args, print names and values of all config items.
1220
1220
1221 With one arg of the form section.name, print just the value of
1221 With one arg of the form section.name, print just the value of
1222 that config item.
1222 that config item.
1223
1223
1224 With multiple args, print names and values of all config items
1224 With multiple args, print names and values of all config items
1225 with matching section names."""
1225 with matching section names."""
1226
1226
1227 untrusted = bool(opts.get('untrusted'))
1227 untrusted = bool(opts.get('untrusted'))
1228 if values:
1228 if values:
1229 if len([v for v in values if '.' in v]) > 1:
1229 if len([v for v in values if '.' in v]) > 1:
1230 raise util.Abort(_('only one config item permitted'))
1230 raise util.Abort(_('only one config item permitted'))
1231 for section, name, value in ui.walkconfig(untrusted=untrusted):
1231 for section, name, value in ui.walkconfig(untrusted=untrusted):
1232 sectname = section + '.' + name
1232 sectname = section + '.' + name
1233 if values:
1233 if values:
1234 for v in values:
1234 for v in values:
1235 if v == section:
1235 if v == section:
1236 ui.write('%s=%s\n' % (sectname, value))
1236 ui.write('%s=%s\n' % (sectname, value))
1237 elif v == sectname:
1237 elif v == sectname:
1238 ui.write(value, '\n')
1238 ui.write(value, '\n')
1239 else:
1239 else:
1240 ui.write('%s=%s\n' % (sectname, value))
1240 ui.write('%s=%s\n' % (sectname, value))
1241
1241
1242 def debugsetparents(ui, repo, rev1, rev2=None):
1242 def debugsetparents(ui, repo, rev1, rev2=None):
1243 """manually set the parents of the current working directory
1243 """manually set the parents of the current working directory
1244
1244
1245 This is useful for writing repository conversion tools, but should
1245 This is useful for writing repository conversion tools, but should
1246 be used with care.
1246 be used with care.
1247 """
1247 """
1248
1248
1249 if not rev2:
1249 if not rev2:
1250 rev2 = hex(nullid)
1250 rev2 = hex(nullid)
1251
1251
1252 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1252 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1253
1253
1254 def debugstate(ui, repo):
1254 def debugstate(ui, repo):
1255 """show the contents of the current dirstate"""
1255 """show the contents of the current dirstate"""
1256 repo.dirstate.read()
1256 repo.dirstate.read()
1257 dc = repo.dirstate.map
1257 dc = repo.dirstate.map
1258 keys = dc.keys()
1258 keys = dc.keys()
1259 keys.sort()
1259 keys.sort()
1260 for file_ in keys:
1260 for file_ in keys:
1261 ui.write("%c %3o %10d %s %s\n"
1261 ui.write("%c %3o %10d %s %s\n"
1262 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1262 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1263 time.strftime("%x %X",
1263 time.strftime("%x %X",
1264 time.localtime(dc[file_][3])), file_))
1264 time.localtime(dc[file_][3])), file_))
1265 for f in repo.dirstate.copies():
1265 for f in repo.dirstate.copies():
1266 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1266 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1267
1267
1268 def debugdata(ui, file_, rev):
1268 def debugdata(ui, file_, rev):
1269 """dump the contents of an data file revision"""
1269 """dump the contents of an data file revision"""
1270 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1270 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1271 file_[:-2] + ".i", file_, 0)
1271 file_[:-2] + ".i", file_, 0)
1272 try:
1272 try:
1273 ui.write(r.revision(r.lookup(rev)))
1273 ui.write(r.revision(r.lookup(rev)))
1274 except KeyError:
1274 except KeyError:
1275 raise util.Abort(_('invalid revision identifier %s') % rev)
1275 raise util.Abort(_('invalid revision identifier %s') % rev)
1276
1276
1277 def debugindex(ui, file_):
1277 def debugindex(ui, file_):
1278 """dump the contents of an index file"""
1278 """dump the contents of an index file"""
1279 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1279 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1280 ui.write(" rev offset length base linkrev" +
1280 ui.write(" rev offset length base linkrev" +
1281 " nodeid p1 p2\n")
1281 " nodeid p1 p2\n")
1282 for i in xrange(r.count()):
1282 for i in xrange(r.count()):
1283 node = r.node(i)
1283 node = r.node(i)
1284 pp = r.parents(node)
1284 pp = r.parents(node)
1285 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1285 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1286 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1286 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1287 short(node), short(pp[0]), short(pp[1])))
1287 short(node), short(pp[0]), short(pp[1])))
1288
1288
1289 def debugindexdot(ui, file_):
1289 def debugindexdot(ui, file_):
1290 """dump an index DAG as a .dot file"""
1290 """dump an index DAG as a .dot file"""
1291 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1291 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1292 ui.write("digraph G {\n")
1292 ui.write("digraph G {\n")
1293 for i in xrange(r.count()):
1293 for i in xrange(r.count()):
1294 node = r.node(i)
1294 node = r.node(i)
1295 pp = r.parents(node)
1295 pp = r.parents(node)
1296 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1296 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1297 if pp[1] != nullid:
1297 if pp[1] != nullid:
1298 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1298 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1299 ui.write("}\n")
1299 ui.write("}\n")
1300
1300
1301 def debugrename(ui, repo, file, rev=None):
1301 def debugrename(ui, repo, file, rev=None):
1302 """dump rename information"""
1302 """dump rename information"""
1303 r = repo.file(relpath(repo, [file])[0])
1303 r = repo.file(relpath(repo, [file])[0])
1304 if rev:
1304 if rev:
1305 try:
1305 try:
1306 # assume all revision numbers are for changesets
1306 # assume all revision numbers are for changesets
1307 n = repo.lookup(rev)
1307 n = repo.lookup(rev)
1308 change = repo.changelog.read(n)
1308 change = repo.changelog.read(n)
1309 m = repo.manifest.read(change[0])
1309 m = repo.manifest.read(change[0])
1310 n = m[relpath(repo, [file])[0]]
1310 n = m[relpath(repo, [file])[0]]
1311 except (hg.RepoError, KeyError):
1311 except (hg.RepoError, KeyError):
1312 n = r.lookup(rev)
1312 n = r.lookup(rev)
1313 else:
1313 else:
1314 n = r.tip()
1314 n = r.tip()
1315 m = r.renamed(n)
1315 m = r.renamed(n)
1316 if m:
1316 if m:
1317 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1317 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1318 else:
1318 else:
1319 ui.write(_("not renamed\n"))
1319 ui.write(_("not renamed\n"))
1320
1320
1321 def debugwalk(ui, repo, *pats, **opts):
1321 def debugwalk(ui, repo, *pats, **opts):
1322 """show how files match on given patterns"""
1322 """show how files match on given patterns"""
1323 items = list(cmdutil.walk(repo, pats, opts))
1323 items = list(cmdutil.walk(repo, pats, opts))
1324 if not items:
1324 if not items:
1325 return
1325 return
1326 fmt = '%%s %%-%ds %%-%ds %%s' % (
1326 fmt = '%%s %%-%ds %%-%ds %%s' % (
1327 max([len(abs) for (src, abs, rel, exact) in items]),
1327 max([len(abs) for (src, abs, rel, exact) in items]),
1328 max([len(rel) for (src, abs, rel, exact) in items]))
1328 max([len(rel) for (src, abs, rel, exact) in items]))
1329 for src, abs, rel, exact in items:
1329 for src, abs, rel, exact in items:
1330 line = fmt % (src, abs, rel, exact and 'exact' or '')
1330 line = fmt % (src, abs, rel, exact and 'exact' or '')
1331 ui.write("%s\n" % line.rstrip())
1331 ui.write("%s\n" % line.rstrip())
1332
1332
1333 def diff(ui, repo, *pats, **opts):
1333 def diff(ui, repo, *pats, **opts):
1334 """diff repository (or selected files)
1334 """diff repository (or selected files)
1335
1335
1336 Show differences between revisions for the specified files.
1336 Show differences between revisions for the specified files.
1337
1337
1338 Differences between files are shown using the unified diff format.
1338 Differences between files are shown using the unified diff format.
1339
1339
1340 When two revision arguments are given, then changes are shown
1340 When two revision arguments are given, then changes are shown
1341 between those revisions. If only one revision is specified then
1341 between those revisions. If only one revision is specified then
1342 that revision is compared to the working directory, and, when no
1342 that revision is compared to the working directory, and, when no
1343 revisions are specified, the working directory files are compared
1343 revisions are specified, the working directory files are compared
1344 to its parent.
1344 to its parent.
1345
1345
1346 Without the -a option, diff will avoid generating diffs of files
1346 Without the -a option, diff will avoid generating diffs of files
1347 it detects as binary. With -a, diff will generate a diff anyway,
1347 it detects as binary. With -a, diff will generate a diff anyway,
1348 probably with undesirable results.
1348 probably with undesirable results.
1349 """
1349 """
1350 node1, node2 = cmdutil.revpair(ui, repo, opts['rev'])
1350 node1, node2 = cmdutil.revpair(ui, repo, opts['rev'])
1351
1351
1352 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1352 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1353
1353
1354 patch.diff(repo, node1, node2, fns, match=matchfn,
1354 patch.diff(repo, node1, node2, fns, match=matchfn,
1355 opts=patch.diffopts(ui, opts))
1355 opts=patch.diffopts(ui, opts))
1356
1356
1357 def export(ui, repo, *changesets, **opts):
1357 def export(ui, repo, *changesets, **opts):
1358 """dump the header and diffs for one or more changesets
1358 """dump the header and diffs for one or more changesets
1359
1359
1360 Print the changeset header and diffs for one or more revisions.
1360 Print the changeset header and diffs for one or more revisions.
1361
1361
1362 The information shown in the changeset header is: author,
1362 The information shown in the changeset header is: author,
1363 changeset hash, parent and commit comment.
1363 changeset hash, parent and commit comment.
1364
1364
1365 Output may be to a file, in which case the name of the file is
1365 Output may be to a file, in which case the name of the file is
1366 given using a format string. The formatting rules are as follows:
1366 given using a format string. The formatting rules are as follows:
1367
1367
1368 %% literal "%" character
1368 %% literal "%" character
1369 %H changeset hash (40 bytes of hexadecimal)
1369 %H changeset hash (40 bytes of hexadecimal)
1370 %N number of patches being generated
1370 %N number of patches being generated
1371 %R changeset revision number
1371 %R changeset revision number
1372 %b basename of the exporting repository
1372 %b basename of the exporting repository
1373 %h short-form changeset hash (12 bytes of hexadecimal)
1373 %h short-form changeset hash (12 bytes of hexadecimal)
1374 %n zero-padded sequence number, starting at 1
1374 %n zero-padded sequence number, starting at 1
1375 %r zero-padded changeset revision number
1375 %r zero-padded changeset revision number
1376
1376
1377 Without the -a option, export will avoid generating diffs of files
1377 Without the -a option, export will avoid generating diffs of files
1378 it detects as binary. With -a, export will generate a diff anyway,
1378 it detects as binary. With -a, export will generate a diff anyway,
1379 probably with undesirable results.
1379 probably with undesirable results.
1380
1380
1381 With the --switch-parent option, the diff will be against the second
1381 With the --switch-parent option, the diff will be against the second
1382 parent. It can be useful to review a merge.
1382 parent. It can be useful to review a merge.
1383 """
1383 """
1384 if not changesets:
1384 if not changesets:
1385 raise util.Abort(_("export requires at least one changeset"))
1385 raise util.Abort(_("export requires at least one changeset"))
1386 revs = list(cmdutil.revrange(ui, repo, changesets))
1386 revs = list(cmdutil.revrange(ui, repo, changesets))
1387 if len(revs) > 1:
1387 if len(revs) > 1:
1388 ui.note(_('exporting patches:\n'))
1388 ui.note(_('exporting patches:\n'))
1389 else:
1389 else:
1390 ui.note(_('exporting patch:\n'))
1390 ui.note(_('exporting patch:\n'))
1391 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1391 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1392 switch_parent=opts['switch_parent'],
1392 switch_parent=opts['switch_parent'],
1393 opts=patch.diffopts(ui, opts))
1393 opts=patch.diffopts(ui, opts))
1394
1394
1395 def grep(ui, repo, pattern, *pats, **opts):
1395 def grep(ui, repo, pattern, *pats, **opts):
1396 """search for a pattern in specified files and revisions
1396 """search for a pattern in specified files and revisions
1397
1397
1398 Search revisions of files for a regular expression.
1398 Search revisions of files for a regular expression.
1399
1399
1400 This command behaves differently than Unix grep. It only accepts
1400 This command behaves differently than Unix grep. It only accepts
1401 Python/Perl regexps. It searches repository history, not the
1401 Python/Perl regexps. It searches repository history, not the
1402 working directory. It always prints the revision number in which
1402 working directory. It always prints the revision number in which
1403 a match appears.
1403 a match appears.
1404
1404
1405 By default, grep only prints output for the first revision of a
1405 By default, grep only prints output for the first revision of a
1406 file in which it finds a match. To get it to print every revision
1406 file in which it finds a match. To get it to print every revision
1407 that contains a change in match status ("-" for a match that
1407 that contains a change in match status ("-" for a match that
1408 becomes a non-match, or "+" for a non-match that becomes a match),
1408 becomes a non-match, or "+" for a non-match that becomes a match),
1409 use the --all flag.
1409 use the --all flag.
1410 """
1410 """
1411 reflags = 0
1411 reflags = 0
1412 if opts['ignore_case']:
1412 if opts['ignore_case']:
1413 reflags |= re.I
1413 reflags |= re.I
1414 regexp = re.compile(pattern, reflags)
1414 regexp = re.compile(pattern, reflags)
1415 sep, eol = ':', '\n'
1415 sep, eol = ':', '\n'
1416 if opts['print0']:
1416 if opts['print0']:
1417 sep = eol = '\0'
1417 sep = eol = '\0'
1418
1418
1419 fcache = {}
1419 fcache = {}
1420 def getfile(fn):
1420 def getfile(fn):
1421 if fn not in fcache:
1421 if fn not in fcache:
1422 fcache[fn] = repo.file(fn)
1422 fcache[fn] = repo.file(fn)
1423 return fcache[fn]
1423 return fcache[fn]
1424
1424
1425 def matchlines(body):
1425 def matchlines(body):
1426 begin = 0
1426 begin = 0
1427 linenum = 0
1427 linenum = 0
1428 while True:
1428 while True:
1429 match = regexp.search(body, begin)
1429 match = regexp.search(body, begin)
1430 if not match:
1430 if not match:
1431 break
1431 break
1432 mstart, mend = match.span()
1432 mstart, mend = match.span()
1433 linenum += body.count('\n', begin, mstart) + 1
1433 linenum += body.count('\n', begin, mstart) + 1
1434 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1434 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1435 lend = body.find('\n', mend)
1435 lend = body.find('\n', mend)
1436 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1436 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1437 begin = lend + 1
1437 begin = lend + 1
1438
1438
1439 class linestate(object):
1439 class linestate(object):
1440 def __init__(self, line, linenum, colstart, colend):
1440 def __init__(self, line, linenum, colstart, colend):
1441 self.line = line
1441 self.line = line
1442 self.linenum = linenum
1442 self.linenum = linenum
1443 self.colstart = colstart
1443 self.colstart = colstart
1444 self.colend = colend
1444 self.colend = colend
1445
1445
1446 def __eq__(self, other):
1446 def __eq__(self, other):
1447 return self.line == other.line
1447 return self.line == other.line
1448
1448
1449 matches = {}
1449 matches = {}
1450 copies = {}
1450 copies = {}
1451 def grepbody(fn, rev, body):
1451 def grepbody(fn, rev, body):
1452 matches[rev].setdefault(fn, [])
1452 matches[rev].setdefault(fn, [])
1453 m = matches[rev][fn]
1453 m = matches[rev][fn]
1454 for lnum, cstart, cend, line in matchlines(body):
1454 for lnum, cstart, cend, line in matchlines(body):
1455 s = linestate(line, lnum, cstart, cend)
1455 s = linestate(line, lnum, cstart, cend)
1456 m.append(s)
1456 m.append(s)
1457
1457
1458 def difflinestates(a, b):
1458 def difflinestates(a, b):
1459 sm = difflib.SequenceMatcher(None, a, b)
1459 sm = difflib.SequenceMatcher(None, a, b)
1460 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1460 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1461 if tag == 'insert':
1461 if tag == 'insert':
1462 for i in xrange(blo, bhi):
1462 for i in xrange(blo, bhi):
1463 yield ('+', b[i])
1463 yield ('+', b[i])
1464 elif tag == 'delete':
1464 elif tag == 'delete':
1465 for i in xrange(alo, ahi):
1465 for i in xrange(alo, ahi):
1466 yield ('-', a[i])
1466 yield ('-', a[i])
1467 elif tag == 'replace':
1467 elif tag == 'replace':
1468 for i in xrange(alo, ahi):
1468 for i in xrange(alo, ahi):
1469 yield ('-', a[i])
1469 yield ('-', a[i])
1470 for i in xrange(blo, bhi):
1470 for i in xrange(blo, bhi):
1471 yield ('+', b[i])
1471 yield ('+', b[i])
1472
1472
1473 prev = {}
1473 prev = {}
1474 ucache = {}
1474 ucache = {}
1475 def display(fn, rev, states, prevstates):
1475 def display(fn, rev, states, prevstates):
1476 counts = {'-': 0, '+': 0}
1476 counts = {'-': 0, '+': 0}
1477 filerevmatches = {}
1477 filerevmatches = {}
1478 if incrementing or not opts['all']:
1478 if incrementing or not opts['all']:
1479 a, b = prevstates, states
1479 a, b = prevstates, states
1480 else:
1480 else:
1481 a, b = states, prevstates
1481 a, b = states, prevstates
1482 for change, l in difflinestates(a, b):
1482 for change, l in difflinestates(a, b):
1483 if incrementing or not opts['all']:
1483 if incrementing or not opts['all']:
1484 r = rev
1484 r = rev
1485 else:
1485 else:
1486 r = prev[fn]
1486 r = prev[fn]
1487 cols = [fn, str(r)]
1487 cols = [fn, str(r)]
1488 if opts['line_number']:
1488 if opts['line_number']:
1489 cols.append(str(l.linenum))
1489 cols.append(str(l.linenum))
1490 if opts['all']:
1490 if opts['all']:
1491 cols.append(change)
1491 cols.append(change)
1492 if opts['user']:
1492 if opts['user']:
1493 cols.append(trimuser(ui, getchange(r)[1], rev,
1493 cols.append(trimuser(ui, getchange(r)[1], rev,
1494 ucache))
1494 ucache))
1495 if opts['files_with_matches']:
1495 if opts['files_with_matches']:
1496 c = (fn, rev)
1496 c = (fn, rev)
1497 if c in filerevmatches:
1497 if c in filerevmatches:
1498 continue
1498 continue
1499 filerevmatches[c] = 1
1499 filerevmatches[c] = 1
1500 else:
1500 else:
1501 cols.append(l.line)
1501 cols.append(l.line)
1502 ui.write(sep.join(cols), eol)
1502 ui.write(sep.join(cols), eol)
1503 counts[change] += 1
1503 counts[change] += 1
1504 return counts['+'], counts['-']
1504 return counts['+'], counts['-']
1505
1505
1506 fstate = {}
1506 fstate = {}
1507 skip = {}
1507 skip = {}
1508 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1508 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1509 count = 0
1509 count = 0
1510 incrementing = False
1510 incrementing = False
1511 follow = opts.get('follow')
1511 follow = opts.get('follow')
1512 for st, rev, fns in changeiter:
1512 for st, rev, fns in changeiter:
1513 if st == 'window':
1513 if st == 'window':
1514 incrementing = rev
1514 incrementing = rev
1515 matches.clear()
1515 matches.clear()
1516 elif st == 'add':
1516 elif st == 'add':
1517 change = repo.changelog.read(repo.lookup(str(rev)))
1517 change = repo.changelog.read(repo.lookup(str(rev)))
1518 mf = repo.manifest.read(change[0])
1518 mf = repo.manifest.read(change[0])
1519 matches[rev] = {}
1519 matches[rev] = {}
1520 for fn in fns:
1520 for fn in fns:
1521 if fn in skip:
1521 if fn in skip:
1522 continue
1522 continue
1523 fstate.setdefault(fn, {})
1523 fstate.setdefault(fn, {})
1524 try:
1524 try:
1525 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1525 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1526 if follow:
1526 if follow:
1527 copied = getfile(fn).renamed(mf[fn])
1527 copied = getfile(fn).renamed(mf[fn])
1528 if copied:
1528 if copied:
1529 copies.setdefault(rev, {})[fn] = copied[0]
1529 copies.setdefault(rev, {})[fn] = copied[0]
1530 except KeyError:
1530 except KeyError:
1531 pass
1531 pass
1532 elif st == 'iter':
1532 elif st == 'iter':
1533 states = matches[rev].items()
1533 states = matches[rev].items()
1534 states.sort()
1534 states.sort()
1535 for fn, m in states:
1535 for fn, m in states:
1536 copy = copies.get(rev, {}).get(fn)
1536 copy = copies.get(rev, {}).get(fn)
1537 if fn in skip:
1537 if fn in skip:
1538 if copy:
1538 if copy:
1539 skip[copy] = True
1539 skip[copy] = True
1540 continue
1540 continue
1541 if incrementing or not opts['all'] or fstate[fn]:
1541 if incrementing or not opts['all'] or fstate[fn]:
1542 pos, neg = display(fn, rev, m, fstate[fn])
1542 pos, neg = display(fn, rev, m, fstate[fn])
1543 count += pos + neg
1543 count += pos + neg
1544 if pos and not opts['all']:
1544 if pos and not opts['all']:
1545 skip[fn] = True
1545 skip[fn] = True
1546 if copy:
1546 if copy:
1547 skip[copy] = True
1547 skip[copy] = True
1548 fstate[fn] = m
1548 fstate[fn] = m
1549 if copy:
1549 if copy:
1550 fstate[copy] = m
1550 fstate[copy] = m
1551 prev[fn] = rev
1551 prev[fn] = rev
1552
1552
1553 if not incrementing:
1553 if not incrementing:
1554 fstate = fstate.items()
1554 fstate = fstate.items()
1555 fstate.sort()
1555 fstate.sort()
1556 for fn, state in fstate:
1556 for fn, state in fstate:
1557 if fn in skip:
1557 if fn in skip:
1558 continue
1558 continue
1559 if fn not in copies.get(prev[fn], {}):
1559 if fn not in copies.get(prev[fn], {}):
1560 display(fn, rev, {}, state)
1560 display(fn, rev, {}, state)
1561 return (count == 0 and 1) or 0
1561 return (count == 0 and 1) or 0
1562
1562
1563 def heads(ui, repo, **opts):
1563 def heads(ui, repo, **opts):
1564 """show current repository heads
1564 """show current repository heads
1565
1565
1566 Show all repository head changesets.
1566 Show all repository head changesets.
1567
1567
1568 Repository "heads" are changesets that don't have children
1568 Repository "heads" are changesets that don't have children
1569 changesets. They are where development generally takes place and
1569 changesets. They are where development generally takes place and
1570 are the usual targets for update and merge operations.
1570 are the usual targets for update and merge operations.
1571 """
1571 """
1572 if opts['rev']:
1572 if opts['rev']:
1573 heads = repo.heads(repo.lookup(opts['rev']))
1573 heads = repo.heads(repo.lookup(opts['rev']))
1574 else:
1574 else:
1575 heads = repo.heads()
1575 heads = repo.heads()
1576 br = None
1576 br = None
1577 if opts['branches']:
1577 if opts['branches']:
1578 ui.warn(_("the --branches option is deprecated, "
1578 ui.warn(_("the --branches option is deprecated, "
1579 "please use 'hg branches' instead\n"))
1579 "please use 'hg branches' instead\n"))
1580 br = repo.branchlookup(heads)
1580 br = repo.branchlookup(heads)
1581 displayer = show_changeset(ui, repo, opts)
1581 displayer = show_changeset(ui, repo, opts)
1582 for n in heads:
1582 for n in heads:
1583 displayer.show(changenode=n, brinfo=br)
1583 displayer.show(changenode=n, brinfo=br)
1584
1584
1585 def identify(ui, repo):
1585 def identify(ui, repo):
1586 """print information about the working copy
1586 """print information about the working copy
1587
1587
1588 Print a short summary of the current state of the repo.
1588 Print a short summary of the current state of the repo.
1589
1589
1590 This summary identifies the repository state using one or two parent
1590 This summary identifies the repository state using one or two parent
1591 hash identifiers, followed by a "+" if there are uncommitted changes
1591 hash identifiers, followed by a "+" if there are uncommitted changes
1592 in the working directory, followed by a list of tags for this revision.
1592 in the working directory, followed by a list of tags for this revision.
1593 """
1593 """
1594 parents = [p for p in repo.dirstate.parents() if p != nullid]
1594 parents = [p for p in repo.dirstate.parents() if p != nullid]
1595 if not parents:
1595 if not parents:
1596 ui.write(_("unknown\n"))
1596 ui.write(_("unknown\n"))
1597 return
1597 return
1598
1598
1599 hexfunc = ui.debugflag and hex or short
1599 hexfunc = ui.debugflag and hex or short
1600 modified, added, removed, deleted = repo.status()[:4]
1600 modified, added, removed, deleted = repo.status()[:4]
1601 output = ["%s%s" %
1601 output = ["%s%s" %
1602 ('+'.join([hexfunc(parent) for parent in parents]),
1602 ('+'.join([hexfunc(parent) for parent in parents]),
1603 (modified or added or removed or deleted) and "+" or "")]
1603 (modified or added or removed or deleted) and "+" or "")]
1604
1604
1605 if not ui.quiet:
1605 if not ui.quiet:
1606
1606
1607 branch = repo.workingctx().branch()
1607 branch = repo.workingctx().branch()
1608 if branch:
1608 if branch:
1609 output.append("(%s)" % branch)
1609 output.append("(%s)" % branch)
1610
1610
1611 # multiple tags for a single parent separated by '/'
1611 # multiple tags for a single parent separated by '/'
1612 parenttags = ['/'.join(tags)
1612 parenttags = ['/'.join(tags)
1613 for tags in map(repo.nodetags, parents) if tags]
1613 for tags in map(repo.nodetags, parents) if tags]
1614 # tags for multiple parents separated by ' + '
1614 # tags for multiple parents separated by ' + '
1615 if parenttags:
1615 if parenttags:
1616 output.append(' + '.join(parenttags))
1616 output.append(' + '.join(parenttags))
1617
1617
1618 ui.write("%s\n" % ' '.join(output))
1618 ui.write("%s\n" % ' '.join(output))
1619
1619
1620 def import_(ui, repo, patch1, *patches, **opts):
1620 def import_(ui, repo, patch1, *patches, **opts):
1621 """import an ordered set of patches
1621 """import an ordered set of patches
1622
1622
1623 Import a list of patches and commit them individually.
1623 Import a list of patches and commit them individually.
1624
1624
1625 If there are outstanding changes in the working directory, import
1625 If there are outstanding changes in the working directory, import
1626 will abort unless given the -f flag.
1626 will abort unless given the -f flag.
1627
1627
1628 You can import a patch straight from a mail message. Even patches
1628 You can import a patch straight from a mail message. Even patches
1629 as attachments work (body part must be type text/plain or
1629 as attachments work (body part must be type text/plain or
1630 text/x-patch to be used). From and Subject headers of email
1630 text/x-patch to be used). From and Subject headers of email
1631 message are used as default committer and commit message. All
1631 message are used as default committer and commit message. All
1632 text/plain body parts before first diff are added to commit
1632 text/plain body parts before first diff are added to commit
1633 message.
1633 message.
1634
1634
1635 If imported patch was generated by hg export, user and description
1635 If imported patch was generated by hg export, user and description
1636 from patch override values from message headers and body. Values
1636 from patch override values from message headers and body. Values
1637 given on command line with -m and -u override these.
1637 given on command line with -m and -u override these.
1638
1638
1639 To read a patch from standard input, use patch name "-".
1639 To read a patch from standard input, use patch name "-".
1640 """
1640 """
1641 patches = (patch1,) + patches
1641 patches = (patch1,) + patches
1642
1642
1643 if not opts['force']:
1643 if not opts['force']:
1644 bail_if_changed(repo)
1644 bail_if_changed(repo)
1645
1645
1646 d = opts["base"]
1646 d = opts["base"]
1647 strip = opts["strip"]
1647 strip = opts["strip"]
1648
1648
1649 wlock = repo.wlock()
1649 wlock = repo.wlock()
1650 lock = repo.lock()
1650 lock = repo.lock()
1651
1651
1652 for p in patches:
1652 for p in patches:
1653 pf = os.path.join(d, p)
1653 pf = os.path.join(d, p)
1654
1654
1655 if pf == '-':
1655 if pf == '-':
1656 ui.status(_("applying patch from stdin\n"))
1656 ui.status(_("applying patch from stdin\n"))
1657 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1657 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1658 else:
1658 else:
1659 ui.status(_("applying %s\n") % p)
1659 ui.status(_("applying %s\n") % p)
1660 tmpname, message, user, date = patch.extract(ui, file(pf))
1660 tmpname, message, user, date = patch.extract(ui, file(pf))
1661
1661
1662 if tmpname is None:
1662 if tmpname is None:
1663 raise util.Abort(_('no diffs found'))
1663 raise util.Abort(_('no diffs found'))
1664
1664
1665 try:
1665 try:
1666 if opts['message']:
1666 if opts['message']:
1667 # pickup the cmdline msg
1667 # pickup the cmdline msg
1668 message = opts['message']
1668 message = opts['message']
1669 elif message:
1669 elif message:
1670 # pickup the patch msg
1670 # pickup the patch msg
1671 message = message.strip()
1671 message = message.strip()
1672 else:
1672 else:
1673 # launch the editor
1673 # launch the editor
1674 message = None
1674 message = None
1675 ui.debug(_('message:\n%s\n') % message)
1675 ui.debug(_('message:\n%s\n') % message)
1676
1676
1677 files = {}
1677 files = {}
1678 try:
1678 try:
1679 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1679 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1680 files=files)
1680 files=files)
1681 finally:
1681 finally:
1682 files = patch.updatedir(ui, repo, files, wlock=wlock)
1682 files = patch.updatedir(ui, repo, files, wlock=wlock)
1683 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1683 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1684 finally:
1684 finally:
1685 os.unlink(tmpname)
1685 os.unlink(tmpname)
1686
1686
1687 def incoming(ui, repo, source="default", **opts):
1687 def incoming(ui, repo, source="default", **opts):
1688 """show new changesets found in source
1688 """show new changesets found in source
1689
1689
1690 Show new changesets found in the specified path/URL or the default
1690 Show new changesets found in the specified path/URL or the default
1691 pull location. These are the changesets that would be pulled if a pull
1691 pull location. These are the changesets that would be pulled if a pull
1692 was requested.
1692 was requested.
1693
1693
1694 For remote repository, using --bundle avoids downloading the changesets
1694 For remote repository, using --bundle avoids downloading the changesets
1695 twice if the incoming is followed by a pull.
1695 twice if the incoming is followed by a pull.
1696
1696
1697 See pull for valid source format details.
1697 See pull for valid source format details.
1698 """
1698 """
1699 source = ui.expandpath(source)
1699 source = ui.expandpath(source)
1700 setremoteconfig(ui, opts)
1700 setremoteconfig(ui, opts)
1701
1701
1702 other = hg.repository(ui, source)
1702 other = hg.repository(ui, source)
1703 incoming = repo.findincoming(other, force=opts["force"])
1703 incoming = repo.findincoming(other, force=opts["force"])
1704 if not incoming:
1704 if not incoming:
1705 ui.status(_("no changes found\n"))
1705 ui.status(_("no changes found\n"))
1706 return
1706 return
1707
1707
1708 cleanup = None
1708 cleanup = None
1709 try:
1709 try:
1710 fname = opts["bundle"]
1710 fname = opts["bundle"]
1711 if fname or not other.local():
1711 if fname or not other.local():
1712 # create a bundle (uncompressed if other repo is not local)
1712 # create a bundle (uncompressed if other repo is not local)
1713 cg = other.changegroup(incoming, "incoming")
1713 cg = other.changegroup(incoming, "incoming")
1714 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1714 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1715 # keep written bundle?
1715 # keep written bundle?
1716 if opts["bundle"]:
1716 if opts["bundle"]:
1717 cleanup = None
1717 cleanup = None
1718 if not other.local():
1718 if not other.local():
1719 # use the created uncompressed bundlerepo
1719 # use the created uncompressed bundlerepo
1720 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1720 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1721
1721
1722 revs = None
1722 revs = None
1723 if opts['rev']:
1723 if opts['rev']:
1724 revs = [other.lookup(rev) for rev in opts['rev']]
1724 revs = [other.lookup(rev) for rev in opts['rev']]
1725 o = other.changelog.nodesbetween(incoming, revs)[0]
1725 o = other.changelog.nodesbetween(incoming, revs)[0]
1726 if opts['newest_first']:
1726 if opts['newest_first']:
1727 o.reverse()
1727 o.reverse()
1728 displayer = show_changeset(ui, other, opts)
1728 displayer = show_changeset(ui, other, opts)
1729 for n in o:
1729 for n in o:
1730 parents = [p for p in other.changelog.parents(n) if p != nullid]
1730 parents = [p for p in other.changelog.parents(n) if p != nullid]
1731 if opts['no_merges'] and len(parents) == 2:
1731 if opts['no_merges'] and len(parents) == 2:
1732 continue
1732 continue
1733 displayer.show(changenode=n)
1733 displayer.show(changenode=n)
1734 if opts['patch']:
1734 if opts['patch']:
1735 prev = (parents and parents[0]) or nullid
1735 prev = (parents and parents[0]) or nullid
1736 patch.diff(other, prev, n, fp=repo.ui)
1736 patch.diff(other, prev, n, fp=repo.ui)
1737 ui.write("\n")
1737 ui.write("\n")
1738 finally:
1738 finally:
1739 if hasattr(other, 'close'):
1739 if hasattr(other, 'close'):
1740 other.close()
1740 other.close()
1741 if cleanup:
1741 if cleanup:
1742 os.unlink(cleanup)
1742 os.unlink(cleanup)
1743
1743
1744 def init(ui, dest=".", **opts):
1744 def init(ui, dest=".", **opts):
1745 """create a new repository in the given directory
1745 """create a new repository in the given directory
1746
1746
1747 Initialize a new repository in the given directory. If the given
1747 Initialize a new repository in the given directory. If the given
1748 directory does not exist, it is created.
1748 directory does not exist, it is created.
1749
1749
1750 If no directory is given, the current directory is used.
1750 If no directory is given, the current directory is used.
1751
1751
1752 It is possible to specify an ssh:// URL as the destination.
1752 It is possible to specify an ssh:// URL as the destination.
1753 Look at the help text for the pull command for important details
1753 Look at the help text for the pull command for important details
1754 about ssh:// URLs.
1754 about ssh:// URLs.
1755 """
1755 """
1756 setremoteconfig(ui, opts)
1756 setremoteconfig(ui, opts)
1757 hg.repository(ui, dest, create=1)
1757 hg.repository(ui, dest, create=1)
1758
1758
1759 def locate(ui, repo, *pats, **opts):
1759 def locate(ui, repo, *pats, **opts):
1760 """locate files matching specific patterns
1760 """locate files matching specific patterns
1761
1761
1762 Print all files under Mercurial control whose names match the
1762 Print all files under Mercurial control whose names match the
1763 given patterns.
1763 given patterns.
1764
1764
1765 This command searches the current directory and its
1765 This command searches the current directory and its
1766 subdirectories. To search an entire repository, move to the root
1766 subdirectories. To search an entire repository, move to the root
1767 of the repository.
1767 of the repository.
1768
1768
1769 If no patterns are given to match, this command prints all file
1769 If no patterns are given to match, this command prints all file
1770 names.
1770 names.
1771
1771
1772 If you want to feed the output of this command into the "xargs"
1772 If you want to feed the output of this command into the "xargs"
1773 command, use the "-0" option to both this command and "xargs".
1773 command, use the "-0" option to both this command and "xargs".
1774 This will avoid the problem of "xargs" treating single filenames
1774 This will avoid the problem of "xargs" treating single filenames
1775 that contain white space as multiple filenames.
1775 that contain white space as multiple filenames.
1776 """
1776 """
1777 end = opts['print0'] and '\0' or '\n'
1777 end = opts['print0'] and '\0' or '\n'
1778 rev = opts['rev']
1778 rev = opts['rev']
1779 if rev:
1779 if rev:
1780 node = repo.lookup(rev)
1780 node = repo.lookup(rev)
1781 else:
1781 else:
1782 node = None
1782 node = None
1783
1783
1784 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1784 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1785 head='(?:.*/|)'):
1785 head='(?:.*/|)'):
1786 if not node and repo.dirstate.state(abs) == '?':
1786 if not node and repo.dirstate.state(abs) == '?':
1787 continue
1787 continue
1788 if opts['fullpath']:
1788 if opts['fullpath']:
1789 ui.write(os.path.join(repo.root, abs), end)
1789 ui.write(os.path.join(repo.root, abs), end)
1790 else:
1790 else:
1791 ui.write(((pats and rel) or abs), end)
1791 ui.write(((pats and rel) or abs), end)
1792
1792
1793 def log(ui, repo, *pats, **opts):
1793 def log(ui, repo, *pats, **opts):
1794 """show revision history of entire repository or files
1794 """show revision history of entire repository or files
1795
1795
1796 Print the revision history of the specified files or the entire
1796 Print the revision history of the specified files or the entire
1797 project.
1797 project.
1798
1798
1799 File history is shown without following rename or copy history of
1799 File history is shown without following rename or copy history of
1800 files. Use -f/--follow with a file name to follow history across
1800 files. Use -f/--follow with a file name to follow history across
1801 renames and copies. --follow without a file name will only show
1801 renames and copies. --follow without a file name will only show
1802 ancestors or descendants of the starting revision. --follow-first
1802 ancestors or descendants of the starting revision. --follow-first
1803 only follows the first parent of merge revisions.
1803 only follows the first parent of merge revisions.
1804
1804
1805 If no revision range is specified, the default is tip:0 unless
1805 If no revision range is specified, the default is tip:0 unless
1806 --follow is set, in which case the working directory parent is
1806 --follow is set, in which case the working directory parent is
1807 used as the starting revision.
1807 used as the starting revision.
1808
1808
1809 By default this command outputs: changeset id and hash, tags,
1809 By default this command outputs: changeset id and hash, tags,
1810 non-trivial parents, user, date and time, and a summary for each
1810 non-trivial parents, user, date and time, and a summary for each
1811 commit. When the -v/--verbose switch is used, the list of changed
1811 commit. When the -v/--verbose switch is used, the list of changed
1812 files and full commit message is shown.
1812 files and full commit message is shown.
1813 """
1813 """
1814 class dui(object):
1814 class dui(object):
1815 # Implement and delegate some ui protocol. Save hunks of
1815 # Implement and delegate some ui protocol. Save hunks of
1816 # output for later display in the desired order.
1816 # output for later display in the desired order.
1817 def __init__(self, ui):
1817 def __init__(self, ui):
1818 self.ui = ui
1818 self.ui = ui
1819 self.hunk = {}
1819 self.hunk = {}
1820 self.header = {}
1820 self.header = {}
1821 def bump(self, rev):
1821 def bump(self, rev):
1822 self.rev = rev
1822 self.rev = rev
1823 self.hunk[rev] = []
1823 self.hunk[rev] = []
1824 self.header[rev] = []
1824 self.header[rev] = []
1825 def note(self, *args):
1825 def note(self, *args):
1826 if self.verbose:
1826 if self.verbose:
1827 self.write(*args)
1827 self.write(*args)
1828 def status(self, *args):
1828 def status(self, *args):
1829 if not self.quiet:
1829 if not self.quiet:
1830 self.write(*args)
1830 self.write(*args)
1831 def write(self, *args):
1831 def write(self, *args):
1832 self.hunk[self.rev].append(args)
1832 self.hunk[self.rev].append(args)
1833 def write_header(self, *args):
1833 def write_header(self, *args):
1834 self.header[self.rev].append(args)
1834 self.header[self.rev].append(args)
1835 def debug(self, *args):
1835 def debug(self, *args):
1836 if self.debugflag:
1836 if self.debugflag:
1837 self.write(*args)
1837 self.write(*args)
1838 def __getattr__(self, key):
1838 def __getattr__(self, key):
1839 return getattr(self.ui, key)
1839 return getattr(self.ui, key)
1840
1840
1841 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1841 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1842
1842
1843 if opts['branches']:
1843 if opts['branches']:
1844 ui.warn(_("the --branches option is deprecated, "
1844 ui.warn(_("the --branches option is deprecated, "
1845 "please use 'hg branches' instead\n"))
1845 "please use 'hg branches' instead\n"))
1846
1846
1847 if opts['limit']:
1847 if opts['limit']:
1848 try:
1848 try:
1849 limit = int(opts['limit'])
1849 limit = int(opts['limit'])
1850 except ValueError:
1850 except ValueError:
1851 raise util.Abort(_('limit must be a positive integer'))
1851 raise util.Abort(_('limit must be a positive integer'))
1852 if limit <= 0: raise util.Abort(_('limit must be positive'))
1852 if limit <= 0: raise util.Abort(_('limit must be positive'))
1853 else:
1853 else:
1854 limit = sys.maxint
1854 limit = sys.maxint
1855 count = 0
1855 count = 0
1856
1856
1857 if opts['copies'] and opts['rev']:
1857 if opts['copies'] and opts['rev']:
1858 endrev = max([int(i)
1858 endrev = max([int(i)
1859 for i in cmdutil.revrange(ui, repo, opts['rev'])]) + 1
1859 for i in cmdutil.revrange(ui, repo, opts['rev'])]) + 1
1860 else:
1860 else:
1861 endrev = repo.changelog.count()
1861 endrev = repo.changelog.count()
1862 rcache = {}
1862 rcache = {}
1863 ncache = {}
1863 ncache = {}
1864 dcache = []
1864 dcache = []
1865 def getrenamed(fn, rev, man):
1865 def getrenamed(fn, rev, man):
1866 '''looks up all renames for a file (up to endrev) the first
1866 '''looks up all renames for a file (up to endrev) the first
1867 time the file is given. It indexes on the changerev and only
1867 time the file is given. It indexes on the changerev and only
1868 parses the manifest if linkrev != changerev.
1868 parses the manifest if linkrev != changerev.
1869 Returns rename info for fn at changerev rev.'''
1869 Returns rename info for fn at changerev rev.'''
1870 if fn not in rcache:
1870 if fn not in rcache:
1871 rcache[fn] = {}
1871 rcache[fn] = {}
1872 ncache[fn] = {}
1872 ncache[fn] = {}
1873 fl = repo.file(fn)
1873 fl = repo.file(fn)
1874 for i in xrange(fl.count()):
1874 for i in xrange(fl.count()):
1875 node = fl.node(i)
1875 node = fl.node(i)
1876 lr = fl.linkrev(node)
1876 lr = fl.linkrev(node)
1877 renamed = fl.renamed(node)
1877 renamed = fl.renamed(node)
1878 rcache[fn][lr] = renamed
1878 rcache[fn][lr] = renamed
1879 if renamed:
1879 if renamed:
1880 ncache[fn][node] = renamed
1880 ncache[fn][node] = renamed
1881 if lr >= endrev:
1881 if lr >= endrev:
1882 break
1882 break
1883 if rev in rcache[fn]:
1883 if rev in rcache[fn]:
1884 return rcache[fn][rev]
1884 return rcache[fn][rev]
1885 mr = repo.manifest.rev(man)
1885 mr = repo.manifest.rev(man)
1886 if repo.manifest.parentrevs(mr) != (mr - 1, -1):
1886 if repo.manifest.parentrevs(mr) != (mr - 1, -1):
1887 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1887 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1888 if not dcache or dcache[0] != man:
1888 if not dcache or dcache[0] != man:
1889 dcache[:] = [man, repo.manifest.readdelta(man)]
1889 dcache[:] = [man, repo.manifest.readdelta(man)]
1890 if fn in dcache[1]:
1890 if fn in dcache[1]:
1891 return ncache[fn].get(dcache[1][fn])
1891 return ncache[fn].get(dcache[1][fn])
1892 return None
1892 return None
1893
1893
1894 displayer = show_changeset(ui, repo, opts)
1894 displayer = show_changeset(ui, repo, opts)
1895 for st, rev, fns in changeiter:
1895 for st, rev, fns in changeiter:
1896 if st == 'window':
1896 if st == 'window':
1897 du = dui(ui)
1897 du = dui(ui)
1898 displayer.ui = du
1898 displayer.ui = du
1899 elif st == 'add':
1899 elif st == 'add':
1900 du.bump(rev)
1900 du.bump(rev)
1901 changenode = repo.changelog.node(rev)
1901 changenode = repo.changelog.node(rev)
1902 parents = [p for p in repo.changelog.parents(changenode)
1902 parents = [p for p in repo.changelog.parents(changenode)
1903 if p != nullid]
1903 if p != nullid]
1904 if opts['no_merges'] and len(parents) == 2:
1904 if opts['no_merges'] and len(parents) == 2:
1905 continue
1905 continue
1906 if opts['only_merges'] and len(parents) != 2:
1906 if opts['only_merges'] and len(parents) != 2:
1907 continue
1907 continue
1908
1908
1909 if opts['keyword']:
1909 if opts['keyword']:
1910 changes = getchange(rev)
1910 changes = getchange(rev)
1911 miss = 0
1911 miss = 0
1912 for k in [kw.lower() for kw in opts['keyword']]:
1912 for k in [kw.lower() for kw in opts['keyword']]:
1913 if not (k in changes[1].lower() or
1913 if not (k in changes[1].lower() or
1914 k in changes[4].lower() or
1914 k in changes[4].lower() or
1915 k in " ".join(changes[3][:20]).lower()):
1915 k in " ".join(changes[3][:20]).lower()):
1916 miss = 1
1916 miss = 1
1917 break
1917 break
1918 if miss:
1918 if miss:
1919 continue
1919 continue
1920
1920
1921 br = None
1921 br = None
1922 if opts['branches']:
1922 if opts['branches']:
1923 br = repo.branchlookup([repo.changelog.node(rev)])
1923 br = repo.branchlookup([repo.changelog.node(rev)])
1924
1924
1925 copies = []
1925 copies = []
1926 if opts.get('copies') and rev:
1926 if opts.get('copies') and rev:
1927 mf = getchange(rev)[0]
1927 mf = getchange(rev)[0]
1928 for fn in getchange(rev)[3]:
1928 for fn in getchange(rev)[3]:
1929 rename = getrenamed(fn, rev, mf)
1929 rename = getrenamed(fn, rev, mf)
1930 if rename:
1930 if rename:
1931 copies.append((fn, rename[0]))
1931 copies.append((fn, rename[0]))
1932 displayer.show(rev, brinfo=br, copies=copies)
1932 displayer.show(rev, brinfo=br, copies=copies)
1933 if opts['patch']:
1933 if opts['patch']:
1934 prev = (parents and parents[0]) or nullid
1934 prev = (parents and parents[0]) or nullid
1935 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1935 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1936 du.write("\n\n")
1936 du.write("\n\n")
1937 elif st == 'iter':
1937 elif st == 'iter':
1938 if count == limit: break
1938 if count == limit: break
1939 if du.header[rev]:
1939 if du.header[rev]:
1940 for args in du.header[rev]:
1940 for args in du.header[rev]:
1941 ui.write_header(*args)
1941 ui.write_header(*args)
1942 if du.hunk[rev]:
1942 if du.hunk[rev]:
1943 count += 1
1943 count += 1
1944 for args in du.hunk[rev]:
1944 for args in du.hunk[rev]:
1945 ui.write(*args)
1945 ui.write(*args)
1946
1946
1947 def manifest(ui, repo, rev=None):
1947 def manifest(ui, repo, rev=None):
1948 """output the latest or given revision of the project manifest
1948 """output the latest or given revision of the project manifest
1949
1949
1950 Print a list of version controlled files for the given revision.
1950 Print a list of version controlled files for the given revision.
1951
1951
1952 The manifest is the list of files being version controlled. If no revision
1952 The manifest is the list of files being version controlled. If no revision
1953 is given then the tip is used.
1953 is given then the tip is used.
1954 """
1954 """
1955 if rev:
1955 if rev:
1956 try:
1956 try:
1957 # assume all revision numbers are for changesets
1957 # assume all revision numbers are for changesets
1958 n = repo.lookup(rev)
1958 n = repo.lookup(rev)
1959 change = repo.changelog.read(n)
1959 change = repo.changelog.read(n)
1960 n = change[0]
1960 n = change[0]
1961 except hg.RepoError:
1961 except hg.RepoError:
1962 n = repo.manifest.lookup(rev)
1962 n = repo.manifest.lookup(rev)
1963 else:
1963 else:
1964 n = repo.manifest.tip()
1964 n = repo.manifest.tip()
1965 m = repo.manifest.read(n)
1965 m = repo.manifest.read(n)
1966 files = m.keys()
1966 files = m.keys()
1967 files.sort()
1967 files.sort()
1968
1968
1969 for f in files:
1969 for f in files:
1970 ui.write("%40s %3s %s\n" % (hex(m[f]),
1970 ui.write("%40s %3s %s\n" % (hex(m[f]),
1971 m.execf(f) and "755" or "644", f))
1971 m.execf(f) and "755" or "644", f))
1972
1972
1973 def merge(ui, repo, node=None, force=None, branch=None):
1973 def merge(ui, repo, node=None, force=None, branch=None):
1974 """Merge working directory with another revision
1974 """Merge working directory with another revision
1975
1975
1976 Merge the contents of the current working directory and the
1976 Merge the contents of the current working directory and the
1977 requested revision. Files that changed between either parent are
1977 requested revision. Files that changed between either parent are
1978 marked as changed for the next commit and a commit must be
1978 marked as changed for the next commit and a commit must be
1979 performed before any further updates are allowed.
1979 performed before any further updates are allowed.
1980
1980
1981 If no revision is specified, the working directory's parent is a
1981 If no revision is specified, the working directory's parent is a
1982 head revision, and the repository contains exactly one other head,
1982 head revision, and the repository contains exactly one other head,
1983 the other head is merged with by default. Otherwise, an explicit
1983 the other head is merged with by default. Otherwise, an explicit
1984 revision to merge with must be provided.
1984 revision to merge with must be provided.
1985 """
1985 """
1986
1986
1987 if node or branch:
1987 if node or branch:
1988 node = _lookup(repo, node, branch)
1988 node = _lookup(repo, node, branch)
1989 else:
1989 else:
1990 heads = repo.heads()
1990 heads = repo.heads()
1991 if len(heads) > 2:
1991 if len(heads) > 2:
1992 raise util.Abort(_('repo has %d heads - '
1992 raise util.Abort(_('repo has %d heads - '
1993 'please merge with an explicit rev') %
1993 'please merge with an explicit rev') %
1994 len(heads))
1994 len(heads))
1995 if len(heads) == 1:
1995 if len(heads) == 1:
1996 raise util.Abort(_('there is nothing to merge - '
1996 raise util.Abort(_('there is nothing to merge - '
1997 'use "hg update" instead'))
1997 'use "hg update" instead'))
1998 parent = repo.dirstate.parents()[0]
1998 parent = repo.dirstate.parents()[0]
1999 if parent not in heads:
1999 if parent not in heads:
2000 raise util.Abort(_('working dir not at a head rev - '
2000 raise util.Abort(_('working dir not at a head rev - '
2001 'use "hg update" or merge with an explicit rev'))
2001 'use "hg update" or merge with an explicit rev'))
2002 node = parent == heads[0] and heads[-1] or heads[0]
2002 node = parent == heads[0] and heads[-1] or heads[0]
2003 return hg.merge(repo, node, force=force)
2003 return hg.merge(repo, node, force=force)
2004
2004
2005 def outgoing(ui, repo, dest=None, **opts):
2005 def outgoing(ui, repo, dest=None, **opts):
2006 """show changesets not found in destination
2006 """show changesets not found in destination
2007
2007
2008 Show changesets not found in the specified destination repository or
2008 Show changesets not found in the specified destination repository or
2009 the default push location. These are the changesets that would be pushed
2009 the default push location. These are the changesets that would be pushed
2010 if a push was requested.
2010 if a push was requested.
2011
2011
2012 See pull for valid destination format details.
2012 See pull for valid destination format details.
2013 """
2013 """
2014 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2014 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2015 setremoteconfig(ui, opts)
2015 setremoteconfig(ui, opts)
2016 revs = None
2016 revs = None
2017 if opts['rev']:
2017 if opts['rev']:
2018 revs = [repo.lookup(rev) for rev in opts['rev']]
2018 revs = [repo.lookup(rev) for rev in opts['rev']]
2019
2019
2020 other = hg.repository(ui, dest)
2020 other = hg.repository(ui, dest)
2021 o = repo.findoutgoing(other, force=opts['force'])
2021 o = repo.findoutgoing(other, force=opts['force'])
2022 if not o:
2022 if not o:
2023 ui.status(_("no changes found\n"))
2023 ui.status(_("no changes found\n"))
2024 return
2024 return
2025 o = repo.changelog.nodesbetween(o, revs)[0]
2025 o = repo.changelog.nodesbetween(o, revs)[0]
2026 if opts['newest_first']:
2026 if opts['newest_first']:
2027 o.reverse()
2027 o.reverse()
2028 displayer = show_changeset(ui, repo, opts)
2028 displayer = show_changeset(ui, repo, opts)
2029 for n in o:
2029 for n in o:
2030 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2030 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2031 if opts['no_merges'] and len(parents) == 2:
2031 if opts['no_merges'] and len(parents) == 2:
2032 continue
2032 continue
2033 displayer.show(changenode=n)
2033 displayer.show(changenode=n)
2034 if opts['patch']:
2034 if opts['patch']:
2035 prev = (parents and parents[0]) or nullid
2035 prev = (parents and parents[0]) or nullid
2036 patch.diff(repo, prev, n)
2036 patch.diff(repo, prev, n)
2037 ui.write("\n")
2037 ui.write("\n")
2038
2038
2039 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2039 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2040 """show the parents of the working dir or revision
2040 """show the parents of the working dir or revision
2041
2041
2042 Print the working directory's parent revisions.
2042 Print the working directory's parent revisions.
2043 """
2043 """
2044 # legacy
2044 # legacy
2045 if file_ and not rev:
2045 if file_ and not rev:
2046 try:
2046 try:
2047 rev = repo.lookup(file_)
2047 rev = repo.lookup(file_)
2048 file_ = None
2048 file_ = None
2049 except hg.RepoError:
2049 except hg.RepoError:
2050 pass
2050 pass
2051 else:
2051 else:
2052 ui.warn(_("'hg parent REV' is deprecated, "
2052 ui.warn(_("'hg parent REV' is deprecated, "
2053 "please use 'hg parents -r REV instead\n"))
2053 "please use 'hg parents -r REV instead\n"))
2054
2054
2055 if rev:
2055 if rev:
2056 if file_:
2056 if file_:
2057 ctx = repo.filectx(file_, changeid=rev)
2057 ctx = repo.filectx(file_, changeid=rev)
2058 else:
2058 else:
2059 ctx = repo.changectx(rev)
2059 ctx = repo.changectx(rev)
2060 p = [cp.node() for cp in ctx.parents()]
2060 p = [cp.node() for cp in ctx.parents()]
2061 else:
2061 else:
2062 p = repo.dirstate.parents()
2062 p = repo.dirstate.parents()
2063
2063
2064 br = None
2064 br = None
2065 if branches is not None:
2065 if branches is not None:
2066 ui.warn(_("the --branches option is deprecated, "
2066 ui.warn(_("the --branches option is deprecated, "
2067 "please use 'hg branches' instead\n"))
2067 "please use 'hg branches' instead\n"))
2068 br = repo.branchlookup(p)
2068 br = repo.branchlookup(p)
2069 displayer = show_changeset(ui, repo, opts)
2069 displayer = show_changeset(ui, repo, opts)
2070 for n in p:
2070 for n in p:
2071 if n != nullid:
2071 if n != nullid:
2072 displayer.show(changenode=n, brinfo=br)
2072 displayer.show(changenode=n, brinfo=br)
2073
2073
2074 def paths(ui, repo, search=None):
2074 def paths(ui, repo, search=None):
2075 """show definition of symbolic path names
2075 """show definition of symbolic path names
2076
2076
2077 Show definition of symbolic path name NAME. If no name is given, show
2077 Show definition of symbolic path name NAME. If no name is given, show
2078 definition of available names.
2078 definition of available names.
2079
2079
2080 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2080 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2081 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2081 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2082 """
2082 """
2083 if search:
2083 if search:
2084 for name, path in ui.configitems("paths"):
2084 for name, path in ui.configitems("paths"):
2085 if name == search:
2085 if name == search:
2086 ui.write("%s\n" % path)
2086 ui.write("%s\n" % path)
2087 return
2087 return
2088 ui.warn(_("not found!\n"))
2088 ui.warn(_("not found!\n"))
2089 return 1
2089 return 1
2090 else:
2090 else:
2091 for name, path in ui.configitems("paths"):
2091 for name, path in ui.configitems("paths"):
2092 ui.write("%s = %s\n" % (name, path))
2092 ui.write("%s = %s\n" % (name, path))
2093
2093
2094 def postincoming(ui, repo, modheads, optupdate):
2094 def postincoming(ui, repo, modheads, optupdate):
2095 if modheads == 0:
2095 if modheads == 0:
2096 return
2096 return
2097 if optupdate:
2097 if optupdate:
2098 if modheads == 1:
2098 if modheads == 1:
2099 return hg.update(repo, repo.changelog.tip()) # update
2099 return hg.update(repo, repo.changelog.tip()) # update
2100 else:
2100 else:
2101 ui.status(_("not updating, since new heads added\n"))
2101 ui.status(_("not updating, since new heads added\n"))
2102 if modheads > 1:
2102 if modheads > 1:
2103 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2103 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2104 else:
2104 else:
2105 ui.status(_("(run 'hg update' to get a working copy)\n"))
2105 ui.status(_("(run 'hg update' to get a working copy)\n"))
2106
2106
2107 def pull(ui, repo, source="default", **opts):
2107 def pull(ui, repo, source="default", **opts):
2108 """pull changes from the specified source
2108 """pull changes from the specified source
2109
2109
2110 Pull changes from a remote repository to a local one.
2110 Pull changes from a remote repository to a local one.
2111
2111
2112 This finds all changes from the repository at the specified path
2112 This finds all changes from the repository at the specified path
2113 or URL and adds them to the local repository. By default, this
2113 or URL and adds them to the local repository. By default, this
2114 does not update the copy of the project in the working directory.
2114 does not update the copy of the project in the working directory.
2115
2115
2116 Valid URLs are of the form:
2116 Valid URLs are of the form:
2117
2117
2118 local/filesystem/path (or file://local/filesystem/path)
2118 local/filesystem/path (or file://local/filesystem/path)
2119 http://[user@]host[:port]/[path]
2119 http://[user@]host[:port]/[path]
2120 https://[user@]host[:port]/[path]
2120 https://[user@]host[:port]/[path]
2121 ssh://[user@]host[:port]/[path]
2121 ssh://[user@]host[:port]/[path]
2122 static-http://host[:port]/[path]
2122 static-http://host[:port]/[path]
2123
2123
2124 Paths in the local filesystem can either point to Mercurial
2124 Paths in the local filesystem can either point to Mercurial
2125 repositories or to bundle files (as created by 'hg bundle' or
2125 repositories or to bundle files (as created by 'hg bundle' or
2126 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2126 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2127 allows access to a Mercurial repository where you simply use a web
2127 allows access to a Mercurial repository where you simply use a web
2128 server to publish the .hg directory as static content.
2128 server to publish the .hg directory as static content.
2129
2129
2130 Some notes about using SSH with Mercurial:
2130 Some notes about using SSH with Mercurial:
2131 - SSH requires an accessible shell account on the destination machine
2131 - SSH requires an accessible shell account on the destination machine
2132 and a copy of hg in the remote path or specified with as remotecmd.
2132 and a copy of hg in the remote path or specified with as remotecmd.
2133 - path is relative to the remote user's home directory by default.
2133 - path is relative to the remote user's home directory by default.
2134 Use an extra slash at the start of a path to specify an absolute path:
2134 Use an extra slash at the start of a path to specify an absolute path:
2135 ssh://example.com//tmp/repository
2135 ssh://example.com//tmp/repository
2136 - Mercurial doesn't use its own compression via SSH; the right thing
2136 - Mercurial doesn't use its own compression via SSH; the right thing
2137 to do is to configure it in your ~/.ssh/config, e.g.:
2137 to do is to configure it in your ~/.ssh/config, e.g.:
2138 Host *.mylocalnetwork.example.com
2138 Host *.mylocalnetwork.example.com
2139 Compression no
2139 Compression no
2140 Host *
2140 Host *
2141 Compression yes
2141 Compression yes
2142 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2142 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2143 with the --ssh command line option.
2143 with the --ssh command line option.
2144 """
2144 """
2145 source = ui.expandpath(source)
2145 source = ui.expandpath(source)
2146 setremoteconfig(ui, opts)
2146 setremoteconfig(ui, opts)
2147
2147
2148 other = hg.repository(ui, source)
2148 other = hg.repository(ui, source)
2149 ui.status(_('pulling from %s\n') % (source))
2149 ui.status(_('pulling from %s\n') % (source))
2150 revs = None
2150 revs = None
2151 if opts['rev']:
2151 if opts['rev']:
2152 if 'lookup' in other.capabilities:
2152 if 'lookup' in other.capabilities:
2153 revs = [other.lookup(rev) for rev in opts['rev']]
2153 revs = [other.lookup(rev) for rev in opts['rev']]
2154 else:
2154 else:
2155 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2155 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2156 raise util.Abort(error)
2156 raise util.Abort(error)
2157 modheads = repo.pull(other, heads=revs, force=opts['force'])
2157 modheads = repo.pull(other, heads=revs, force=opts['force'])
2158 return postincoming(ui, repo, modheads, opts['update'])
2158 return postincoming(ui, repo, modheads, opts['update'])
2159
2159
2160 def push(ui, repo, dest=None, **opts):
2160 def push(ui, repo, dest=None, **opts):
2161 """push changes to the specified destination
2161 """push changes to the specified destination
2162
2162
2163 Push changes from the local repository to the given destination.
2163 Push changes from the local repository to the given destination.
2164
2164
2165 This is the symmetrical operation for pull. It helps to move
2165 This is the symmetrical operation for pull. It helps to move
2166 changes from the current repository to a different one. If the
2166 changes from the current repository to a different one. If the
2167 destination is local this is identical to a pull in that directory
2167 destination is local this is identical to a pull in that directory
2168 from the current one.
2168 from the current one.
2169
2169
2170 By default, push will refuse to run if it detects the result would
2170 By default, push will refuse to run if it detects the result would
2171 increase the number of remote heads. This generally indicates the
2171 increase the number of remote heads. This generally indicates the
2172 the client has forgotten to sync and merge before pushing.
2172 the client has forgotten to sync and merge before pushing.
2173
2173
2174 Valid URLs are of the form:
2174 Valid URLs are of the form:
2175
2175
2176 local/filesystem/path (or file://local/filesystem/path)
2176 local/filesystem/path (or file://local/filesystem/path)
2177 ssh://[user@]host[:port]/[path]
2177 ssh://[user@]host[:port]/[path]
2178 http://[user@]host[:port]/[path]
2178 http://[user@]host[:port]/[path]
2179 https://[user@]host[:port]/[path]
2179 https://[user@]host[:port]/[path]
2180
2180
2181 Look at the help text for the pull command for important details
2181 Look at the help text for the pull command for important details
2182 about ssh:// URLs.
2182 about ssh:// URLs.
2183
2183
2184 Pushing to http:// and https:// URLs is only possible, if this
2184 Pushing to http:// and https:// URLs is only possible, if this
2185 feature is explicitly enabled on the remote Mercurial server.
2185 feature is explicitly enabled on the remote Mercurial server.
2186 """
2186 """
2187 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2187 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2188 setremoteconfig(ui, opts)
2188 setremoteconfig(ui, opts)
2189
2189
2190 other = hg.repository(ui, dest)
2190 other = hg.repository(ui, dest)
2191 ui.status('pushing to %s\n' % (dest))
2191 ui.status('pushing to %s\n' % (dest))
2192 revs = None
2192 revs = None
2193 if opts['rev']:
2193 if opts['rev']:
2194 revs = [repo.lookup(rev) for rev in opts['rev']]
2194 revs = [repo.lookup(rev) for rev in opts['rev']]
2195 r = repo.push(other, opts['force'], revs=revs)
2195 r = repo.push(other, opts['force'], revs=revs)
2196 return r == 0
2196 return r == 0
2197
2197
2198 def rawcommit(ui, repo, *flist, **rc):
2198 def rawcommit(ui, repo, *flist, **rc):
2199 """raw commit interface (DEPRECATED)
2199 """raw commit interface (DEPRECATED)
2200
2200
2201 (DEPRECATED)
2201 (DEPRECATED)
2202 Lowlevel commit, for use in helper scripts.
2202 Lowlevel commit, for use in helper scripts.
2203
2203
2204 This command is not intended to be used by normal users, as it is
2204 This command is not intended to be used by normal users, as it is
2205 primarily useful for importing from other SCMs.
2205 primarily useful for importing from other SCMs.
2206
2206
2207 This command is now deprecated and will be removed in a future
2207 This command is now deprecated and will be removed in a future
2208 release, please use debugsetparents and commit instead.
2208 release, please use debugsetparents and commit instead.
2209 """
2209 """
2210
2210
2211 ui.warn(_("(the rawcommit command is deprecated)\n"))
2211 ui.warn(_("(the rawcommit command is deprecated)\n"))
2212
2212
2213 message = rc['message']
2213 message = rc['message']
2214 if not message and rc['logfile']:
2214 if not message and rc['logfile']:
2215 try:
2215 try:
2216 message = open(rc['logfile']).read()
2216 message = open(rc['logfile']).read()
2217 except IOError:
2217 except IOError:
2218 pass
2218 pass
2219 if not message and not rc['logfile']:
2219 if not message and not rc['logfile']:
2220 raise util.Abort(_("missing commit message"))
2220 raise util.Abort(_("missing commit message"))
2221
2221
2222 files = relpath(repo, list(flist))
2222 files = relpath(repo, list(flist))
2223 if rc['files']:
2223 if rc['files']:
2224 files += open(rc['files']).read().splitlines()
2224 files += open(rc['files']).read().splitlines()
2225
2225
2226 rc['parent'] = map(repo.lookup, rc['parent'])
2226 rc['parent'] = map(repo.lookup, rc['parent'])
2227
2227
2228 try:
2228 try:
2229 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2229 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2230 except ValueError, inst:
2230 except ValueError, inst:
2231 raise util.Abort(str(inst))
2231 raise util.Abort(str(inst))
2232
2232
2233 def recover(ui, repo):
2233 def recover(ui, repo):
2234 """roll back an interrupted transaction
2234 """roll back an interrupted transaction
2235
2235
2236 Recover from an interrupted commit or pull.
2236 Recover from an interrupted commit or pull.
2237
2237
2238 This command tries to fix the repository status after an interrupted
2238 This command tries to fix the repository status after an interrupted
2239 operation. It should only be necessary when Mercurial suggests it.
2239 operation. It should only be necessary when Mercurial suggests it.
2240 """
2240 """
2241 if repo.recover():
2241 if repo.recover():
2242 return hg.verify(repo)
2242 return hg.verify(repo)
2243 return 1
2243 return 1
2244
2244
2245 def remove(ui, repo, *pats, **opts):
2245 def remove(ui, repo, *pats, **opts):
2246 """remove the specified files on the next commit
2246 """remove the specified files on the next commit
2247
2247
2248 Schedule the indicated files for removal from the repository.
2248 Schedule the indicated files for removal from the repository.
2249
2249
2250 This command schedules the files to be removed at the next commit.
2250 This command schedules the files to be removed at the next commit.
2251 This only removes files from the current branch, not from the
2251 This only removes files from the current branch, not from the
2252 entire project history. If the files still exist in the working
2252 entire project history. If the files still exist in the working
2253 directory, they will be deleted from it. If invoked with --after,
2253 directory, they will be deleted from it. If invoked with --after,
2254 files that have been manually deleted are marked as removed.
2254 files that have been manually deleted are marked as removed.
2255
2255
2256 Modified files and added files are not removed by default. To
2256 Modified files and added files are not removed by default. To
2257 remove them, use the -f/--force option.
2257 remove them, use the -f/--force option.
2258 """
2258 """
2259 names = []
2259 names = []
2260 if not opts['after'] and not pats:
2260 if not opts['after'] and not pats:
2261 raise util.Abort(_('no files specified'))
2261 raise util.Abort(_('no files specified'))
2262 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2262 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2263 exact = dict.fromkeys(files)
2263 exact = dict.fromkeys(files)
2264 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2264 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2265 modified, added, removed, deleted, unknown = mardu
2265 modified, added, removed, deleted, unknown = mardu
2266 remove, forget = [], []
2266 remove, forget = [], []
2267 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2267 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2268 reason = None
2268 reason = None
2269 if abs not in deleted and opts['after']:
2269 if abs not in deleted and opts['after']:
2270 reason = _('is still present')
2270 reason = _('is still present')
2271 elif abs in modified and not opts['force']:
2271 elif abs in modified and not opts['force']:
2272 reason = _('is modified (use -f to force removal)')
2272 reason = _('is modified (use -f to force removal)')
2273 elif abs in added:
2273 elif abs in added:
2274 if opts['force']:
2274 if opts['force']:
2275 forget.append(abs)
2275 forget.append(abs)
2276 continue
2276 continue
2277 reason = _('has been marked for add (use -f to force removal)')
2277 reason = _('has been marked for add (use -f to force removal)')
2278 elif abs in unknown:
2278 elif abs in unknown:
2279 reason = _('is not managed')
2279 reason = _('is not managed')
2280 elif abs in removed:
2280 elif abs in removed:
2281 continue
2281 continue
2282 if reason:
2282 if reason:
2283 if exact:
2283 if exact:
2284 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2284 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2285 else:
2285 else:
2286 if ui.verbose or not exact:
2286 if ui.verbose or not exact:
2287 ui.status(_('removing %s\n') % rel)
2287 ui.status(_('removing %s\n') % rel)
2288 remove.append(abs)
2288 remove.append(abs)
2289 repo.forget(forget)
2289 repo.forget(forget)
2290 repo.remove(remove, unlink=not opts['after'])
2290 repo.remove(remove, unlink=not opts['after'])
2291
2291
2292 def rename(ui, repo, *pats, **opts):
2292 def rename(ui, repo, *pats, **opts):
2293 """rename files; equivalent of copy + remove
2293 """rename files; equivalent of copy + remove
2294
2294
2295 Mark dest as copies of sources; mark sources for deletion. If
2295 Mark dest as copies of sources; mark sources for deletion. If
2296 dest is a directory, copies are put in that directory. If dest is
2296 dest is a directory, copies are put in that directory. If dest is
2297 a file, there can only be one source.
2297 a file, there can only be one source.
2298
2298
2299 By default, this command copies the contents of files as they
2299 By default, this command copies the contents of files as they
2300 stand in the working directory. If invoked with --after, the
2300 stand in the working directory. If invoked with --after, the
2301 operation is recorded, but no copying is performed.
2301 operation is recorded, but no copying is performed.
2302
2302
2303 This command takes effect in the next commit.
2303 This command takes effect in the next commit.
2304
2304
2305 NOTE: This command should be treated as experimental. While it
2305 NOTE: This command should be treated as experimental. While it
2306 should properly record rename files, this information is not yet
2306 should properly record rename files, this information is not yet
2307 fully used by merge, nor fully reported by log.
2307 fully used by merge, nor fully reported by log.
2308 """
2308 """
2309 wlock = repo.wlock(0)
2309 wlock = repo.wlock(0)
2310 errs, copied = docopy(ui, repo, pats, opts, wlock)
2310 errs, copied = docopy(ui, repo, pats, opts, wlock)
2311 names = []
2311 names = []
2312 for abs, rel, exact in copied:
2312 for abs, rel, exact in copied:
2313 if ui.verbose or not exact:
2313 if ui.verbose or not exact:
2314 ui.status(_('removing %s\n') % rel)
2314 ui.status(_('removing %s\n') % rel)
2315 names.append(abs)
2315 names.append(abs)
2316 if not opts.get('dry_run'):
2316 if not opts.get('dry_run'):
2317 repo.remove(names, True, wlock)
2317 repo.remove(names, True, wlock)
2318 return errs
2318 return errs
2319
2319
2320 def revert(ui, repo, *pats, **opts):
2320 def revert(ui, repo, *pats, **opts):
2321 """revert files or dirs to their states as of some revision
2321 """revert files or dirs to their states as of some revision
2322
2322
2323 With no revision specified, revert the named files or directories
2323 With no revision specified, revert the named files or directories
2324 to the contents they had in the parent of the working directory.
2324 to the contents they had in the parent of the working directory.
2325 This restores the contents of the affected files to an unmodified
2325 This restores the contents of the affected files to an unmodified
2326 state. If the working directory has two parents, you must
2326 state. If the working directory has two parents, you must
2327 explicitly specify the revision to revert to.
2327 explicitly specify the revision to revert to.
2328
2328
2329 Modified files are saved with a .orig suffix before reverting.
2329 Modified files are saved with a .orig suffix before reverting.
2330 To disable these backups, use --no-backup.
2330 To disable these backups, use --no-backup.
2331
2331
2332 Using the -r option, revert the given files or directories to their
2332 Using the -r option, revert the given files or directories to their
2333 contents as of a specific revision. This can be helpful to "roll
2333 contents as of a specific revision. This can be helpful to "roll
2334 back" some or all of a change that should not have been committed.
2334 back" some or all of a change that should not have been committed.
2335
2335
2336 Revert modifies the working directory. It does not commit any
2336 Revert modifies the working directory. It does not commit any
2337 changes, or change the parent of the working directory. If you
2337 changes, or change the parent of the working directory. If you
2338 revert to a revision other than the parent of the working
2338 revert to a revision other than the parent of the working
2339 directory, the reverted files will thus appear modified
2339 directory, the reverted files will thus appear modified
2340 afterwards.
2340 afterwards.
2341
2341
2342 If a file has been deleted, it is recreated. If the executable
2342 If a file has been deleted, it is recreated. If the executable
2343 mode of a file was changed, it is reset.
2343 mode of a file was changed, it is reset.
2344
2344
2345 If names are given, all files matching the names are reverted.
2345 If names are given, all files matching the names are reverted.
2346
2346
2347 If no arguments are given, no files are reverted.
2347 If no arguments are given, no files are reverted.
2348 """
2348 """
2349
2349
2350 if not pats and not opts['all']:
2350 if not pats and not opts['all']:
2351 raise util.Abort(_('no files or directories specified; '
2351 raise util.Abort(_('no files or directories specified; '
2352 'use --all to revert the whole repo'))
2352 'use --all to revert the whole repo'))
2353
2353
2354 parent, p2 = repo.dirstate.parents()
2354 parent, p2 = repo.dirstate.parents()
2355 if not opts['rev'] and p2 != nullid:
2355 if not opts['rev'] and p2 != nullid:
2356 raise util.Abort(_('uncommitted merge - please provide a '
2356 raise util.Abort(_('uncommitted merge - please provide a '
2357 'specific revision'))
2357 'specific revision'))
2358 node = repo.changectx(opts['rev']).node()
2358 node = repo.changectx(opts['rev']).node()
2359 mf = repo.manifest.read(repo.changelog.read(node)[0])
2359 mf = repo.manifest.read(repo.changelog.read(node)[0])
2360 if node == parent:
2360 if node == parent:
2361 pmf = mf
2361 pmf = mf
2362 else:
2362 else:
2363 pmf = None
2363 pmf = None
2364
2364
2365 wlock = repo.wlock()
2365 wlock = repo.wlock()
2366
2366
2367 # need all matching names in dirstate and manifest of target rev,
2367 # need all matching names in dirstate and manifest of target rev,
2368 # so have to walk both. do not print errors if files exist in one
2368 # so have to walk both. do not print errors if files exist in one
2369 # but not other.
2369 # but not other.
2370
2370
2371 names = {}
2371 names = {}
2372 target_only = {}
2372 target_only = {}
2373
2373
2374 # walk dirstate.
2374 # walk dirstate.
2375
2375
2376 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2376 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2377 badmatch=mf.has_key):
2377 badmatch=mf.has_key):
2378 names[abs] = (rel, exact)
2378 names[abs] = (rel, exact)
2379 if src == 'b':
2379 if src == 'b':
2380 target_only[abs] = True
2380 target_only[abs] = True
2381
2381
2382 # walk target manifest.
2382 # walk target manifest.
2383
2383
2384 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2384 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2385 badmatch=names.has_key):
2385 badmatch=names.has_key):
2386 if abs in names: continue
2386 if abs in names: continue
2387 names[abs] = (rel, exact)
2387 names[abs] = (rel, exact)
2388 target_only[abs] = True
2388 target_only[abs] = True
2389
2389
2390 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2390 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2391 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2391 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2392
2392
2393 revert = ([], _('reverting %s\n'))
2393 revert = ([], _('reverting %s\n'))
2394 add = ([], _('adding %s\n'))
2394 add = ([], _('adding %s\n'))
2395 remove = ([], _('removing %s\n'))
2395 remove = ([], _('removing %s\n'))
2396 forget = ([], _('forgetting %s\n'))
2396 forget = ([], _('forgetting %s\n'))
2397 undelete = ([], _('undeleting %s\n'))
2397 undelete = ([], _('undeleting %s\n'))
2398 update = {}
2398 update = {}
2399
2399
2400 disptable = (
2400 disptable = (
2401 # dispatch table:
2401 # dispatch table:
2402 # file state
2402 # file state
2403 # action if in target manifest
2403 # action if in target manifest
2404 # action if not in target manifest
2404 # action if not in target manifest
2405 # make backup if in target manifest
2405 # make backup if in target manifest
2406 # make backup if not in target manifest
2406 # make backup if not in target manifest
2407 (modified, revert, remove, True, True),
2407 (modified, revert, remove, True, True),
2408 (added, revert, forget, True, False),
2408 (added, revert, forget, True, False),
2409 (removed, undelete, None, False, False),
2409 (removed, undelete, None, False, False),
2410 (deleted, revert, remove, False, False),
2410 (deleted, revert, remove, False, False),
2411 (unknown, add, None, True, False),
2411 (unknown, add, None, True, False),
2412 (target_only, add, None, False, False),
2412 (target_only, add, None, False, False),
2413 )
2413 )
2414
2414
2415 entries = names.items()
2415 entries = names.items()
2416 entries.sort()
2416 entries.sort()
2417
2417
2418 for abs, (rel, exact) in entries:
2418 for abs, (rel, exact) in entries:
2419 mfentry = mf.get(abs)
2419 mfentry = mf.get(abs)
2420 def handle(xlist, dobackup):
2420 def handle(xlist, dobackup):
2421 xlist[0].append(abs)
2421 xlist[0].append(abs)
2422 update[abs] = 1
2422 update[abs] = 1
2423 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2423 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2424 bakname = "%s.orig" % rel
2424 bakname = "%s.orig" % rel
2425 ui.note(_('saving current version of %s as %s\n') %
2425 ui.note(_('saving current version of %s as %s\n') %
2426 (rel, bakname))
2426 (rel, bakname))
2427 if not opts.get('dry_run'):
2427 if not opts.get('dry_run'):
2428 shutil.copyfile(rel, bakname)
2428 shutil.copyfile(rel, bakname)
2429 shutil.copymode(rel, bakname)
2429 shutil.copymode(rel, bakname)
2430 if ui.verbose or not exact:
2430 if ui.verbose or not exact:
2431 ui.status(xlist[1] % rel)
2431 ui.status(xlist[1] % rel)
2432 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2432 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2433 if abs not in table: continue
2433 if abs not in table: continue
2434 # file has changed in dirstate
2434 # file has changed in dirstate
2435 if mfentry:
2435 if mfentry:
2436 handle(hitlist, backuphit)
2436 handle(hitlist, backuphit)
2437 elif misslist is not None:
2437 elif misslist is not None:
2438 handle(misslist, backupmiss)
2438 handle(misslist, backupmiss)
2439 else:
2439 else:
2440 if exact: ui.warn(_('file not managed: %s\n' % rel))
2440 if exact: ui.warn(_('file not managed: %s\n' % rel))
2441 break
2441 break
2442 else:
2442 else:
2443 # file has not changed in dirstate
2443 # file has not changed in dirstate
2444 if node == parent:
2444 if node == parent:
2445 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2445 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2446 continue
2446 continue
2447 if pmf is None:
2447 if pmf is None:
2448 # only need parent manifest in this unlikely case,
2448 # only need parent manifest in this unlikely case,
2449 # so do not read by default
2449 # so do not read by default
2450 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2450 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2451 if abs in pmf:
2451 if abs in pmf:
2452 if mfentry:
2452 if mfentry:
2453 # if version of file is same in parent and target
2453 # if version of file is same in parent and target
2454 # manifests, do nothing
2454 # manifests, do nothing
2455 if pmf[abs] != mfentry:
2455 if pmf[abs] != mfentry:
2456 handle(revert, False)
2456 handle(revert, False)
2457 else:
2457 else:
2458 handle(remove, False)
2458 handle(remove, False)
2459
2459
2460 if not opts.get('dry_run'):
2460 if not opts.get('dry_run'):
2461 repo.dirstate.forget(forget[0])
2461 repo.dirstate.forget(forget[0])
2462 r = hg.revert(repo, node, update.has_key, wlock)
2462 r = hg.revert(repo, node, update.has_key, wlock)
2463 repo.dirstate.update(add[0], 'a')
2463 repo.dirstate.update(add[0], 'a')
2464 repo.dirstate.update(undelete[0], 'n')
2464 repo.dirstate.update(undelete[0], 'n')
2465 repo.dirstate.update(remove[0], 'r')
2465 repo.dirstate.update(remove[0], 'r')
2466 return r
2466 return r
2467
2467
2468 def rollback(ui, repo):
2468 def rollback(ui, repo):
2469 """roll back the last transaction in this repository
2469 """roll back the last transaction in this repository
2470
2470
2471 Roll back the last transaction in this repository, restoring the
2471 Roll back the last transaction in this repository, restoring the
2472 project to its state prior to the transaction.
2472 project to its state prior to the transaction.
2473
2473
2474 Transactions are used to encapsulate the effects of all commands
2474 Transactions are used to encapsulate the effects of all commands
2475 that create new changesets or propagate existing changesets into a
2475 that create new changesets or propagate existing changesets into a
2476 repository. For example, the following commands are transactional,
2476 repository. For example, the following commands are transactional,
2477 and their effects can be rolled back:
2477 and their effects can be rolled back:
2478
2478
2479 commit
2479 commit
2480 import
2480 import
2481 pull
2481 pull
2482 push (with this repository as destination)
2482 push (with this repository as destination)
2483 unbundle
2483 unbundle
2484
2484
2485 This command should be used with care. There is only one level of
2485 This command should be used with care. There is only one level of
2486 rollback, and there is no way to undo a rollback.
2486 rollback, and there is no way to undo a rollback.
2487
2487
2488 This command is not intended for use on public repositories. Once
2488 This command is not intended for use on public repositories. Once
2489 changes are visible for pull by other users, rolling a transaction
2489 changes are visible for pull by other users, rolling a transaction
2490 back locally is ineffective (someone else may already have pulled
2490 back locally is ineffective (someone else may already have pulled
2491 the changes). Furthermore, a race is possible with readers of the
2491 the changes). Furthermore, a race is possible with readers of the
2492 repository; for example an in-progress pull from the repository
2492 repository; for example an in-progress pull from the repository
2493 may fail if a rollback is performed.
2493 may fail if a rollback is performed.
2494 """
2494 """
2495 repo.rollback()
2495 repo.rollback()
2496
2496
2497 def root(ui, repo):
2497 def root(ui, repo):
2498 """print the root (top) of the current working dir
2498 """print the root (top) of the current working dir
2499
2499
2500 Print the root directory of the current repository.
2500 Print the root directory of the current repository.
2501 """
2501 """
2502 ui.write(repo.root + "\n")
2502 ui.write(repo.root + "\n")
2503
2503
2504 def serve(ui, repo, **opts):
2504 def serve(ui, repo, **opts):
2505 """export the repository via HTTP
2505 """export the repository via HTTP
2506
2506
2507 Start a local HTTP repository browser and pull server.
2507 Start a local HTTP repository browser and pull server.
2508
2508
2509 By default, the server logs accesses to stdout and errors to
2509 By default, the server logs accesses to stdout and errors to
2510 stderr. Use the "-A" and "-E" options to log to files.
2510 stderr. Use the "-A" and "-E" options to log to files.
2511 """
2511 """
2512
2512
2513 if opts["stdio"]:
2513 if opts["stdio"]:
2514 if repo is None:
2514 if repo is None:
2515 raise hg.RepoError(_("There is no Mercurial repository here"
2515 raise hg.RepoError(_("There is no Mercurial repository here"
2516 " (.hg not found)"))
2516 " (.hg not found)"))
2517 s = sshserver.sshserver(ui, repo)
2517 s = sshserver.sshserver(ui, repo)
2518 s.serve_forever()
2518 s.serve_forever()
2519
2519
2520 optlist = ("name templates style address port ipv6"
2520 optlist = ("name templates style address port ipv6"
2521 " accesslog errorlog webdir_conf")
2521 " accesslog errorlog webdir_conf")
2522 for o in optlist.split():
2522 for o in optlist.split():
2523 if opts[o]:
2523 if opts[o]:
2524 ui.setconfig("web", o, str(opts[o]))
2524 ui.setconfig("web", o, str(opts[o]))
2525
2525
2526 if repo is None and not ui.config("web", "webdir_conf"):
2526 if repo is None and not ui.config("web", "webdir_conf"):
2527 raise hg.RepoError(_("There is no Mercurial repository here"
2527 raise hg.RepoError(_("There is no Mercurial repository here"
2528 " (.hg not found)"))
2528 " (.hg not found)"))
2529
2529
2530 if opts['daemon'] and not opts['daemon_pipefds']:
2530 if opts['daemon'] and not opts['daemon_pipefds']:
2531 rfd, wfd = os.pipe()
2531 rfd, wfd = os.pipe()
2532 args = sys.argv[:]
2532 args = sys.argv[:]
2533 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2533 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2535 args[0], args)
2535 args[0], args)
2536 os.close(wfd)
2536 os.close(wfd)
2537 os.read(rfd, 1)
2537 os.read(rfd, 1)
2538 os._exit(0)
2538 os._exit(0)
2539
2539
2540 try:
2540 try:
2541 httpd = hgweb.server.create_server(ui, repo)
2541 httpd = hgweb.server.create_server(ui, repo)
2542 except socket.error, inst:
2542 except socket.error, inst:
2543 raise util.Abort(_('cannot start server: %s') % inst.args[1])
2543 raise util.Abort(_('cannot start server: %s') % inst.args[1])
2544
2544
2545 if ui.verbose:
2545 if ui.verbose:
2546 addr, port = httpd.socket.getsockname()
2546 addr, port = httpd.socket.getsockname()
2547 if addr == '0.0.0.0':
2547 if addr == '0.0.0.0':
2548 addr = socket.gethostname()
2548 addr = socket.gethostname()
2549 else:
2549 else:
2550 try:
2550 try:
2551 addr = socket.gethostbyaddr(addr)[0]
2551 addr = socket.gethostbyaddr(addr)[0]
2552 except socket.error:
2552 except socket.error:
2553 pass
2553 pass
2554 if port != 80:
2554 if port != 80:
2555 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2555 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2556 else:
2556 else:
2557 ui.status(_('listening at http://%s/\n') % addr)
2557 ui.status(_('listening at http://%s/\n') % addr)
2558
2558
2559 if opts['pid_file']:
2559 if opts['pid_file']:
2560 fp = open(opts['pid_file'], 'w')
2560 fp = open(opts['pid_file'], 'w')
2561 fp.write(str(os.getpid()) + '\n')
2561 fp.write(str(os.getpid()) + '\n')
2562 fp.close()
2562 fp.close()
2563
2563
2564 if opts['daemon_pipefds']:
2564 if opts['daemon_pipefds']:
2565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2566 os.close(rfd)
2566 os.close(rfd)
2567 os.write(wfd, 'y')
2567 os.write(wfd, 'y')
2568 os.close(wfd)
2568 os.close(wfd)
2569 sys.stdout.flush()
2569 sys.stdout.flush()
2570 sys.stderr.flush()
2570 sys.stderr.flush()
2571 fd = os.open(util.nulldev, os.O_RDWR)
2571 fd = os.open(util.nulldev, os.O_RDWR)
2572 if fd != 0: os.dup2(fd, 0)
2572 if fd != 0: os.dup2(fd, 0)
2573 if fd != 1: os.dup2(fd, 1)
2573 if fd != 1: os.dup2(fd, 1)
2574 if fd != 2: os.dup2(fd, 2)
2574 if fd != 2: os.dup2(fd, 2)
2575 if fd not in (0, 1, 2): os.close(fd)
2575 if fd not in (0, 1, 2): os.close(fd)
2576
2576
2577 httpd.serve_forever()
2577 httpd.serve_forever()
2578
2578
2579 def status(ui, repo, *pats, **opts):
2579 def status(ui, repo, *pats, **opts):
2580 """show changed files in the working directory
2580 """show changed files in the working directory
2581
2581
2582 Show status of files in the repository. If names are given, only
2582 Show status of files in the repository. If names are given, only
2583 files that match are shown. Files that are clean or ignored, are
2583 files that match are shown. Files that are clean or ignored, are
2584 not listed unless -c (clean), -i (ignored) or -A is given.
2584 not listed unless -c (clean), -i (ignored) or -A is given.
2585
2585
2586 If one revision is given, it is used as the base revision.
2586 If one revision is given, it is used as the base revision.
2587 If two revisions are given, the difference between them is shown.
2587 If two revisions are given, the difference between them is shown.
2588
2588
2589 The codes used to show the status of files are:
2589 The codes used to show the status of files are:
2590 M = modified
2590 M = modified
2591 A = added
2591 A = added
2592 R = removed
2592 R = removed
2593 C = clean
2593 C = clean
2594 ! = deleted, but still tracked
2594 ! = deleted, but still tracked
2595 ? = not tracked
2595 ? = not tracked
2596 I = ignored (not shown by default)
2596 I = ignored (not shown by default)
2597 = the previous added file was copied from here
2597 = the previous added file was copied from here
2598 """
2598 """
2599
2599
2600 all = opts['all']
2600 all = opts['all']
2601 node1, node2 = cmdutil.revpair(ui, repo, opts.get('rev'))
2601 node1, node2 = cmdutil.revpair(ui, repo, opts.get('rev'))
2602
2602
2603 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2603 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2604 cwd = (pats and repo.getcwd()) or ''
2604 cwd = (pats and repo.getcwd()) or ''
2605 modified, added, removed, deleted, unknown, ignored, clean = [
2605 modified, added, removed, deleted, unknown, ignored, clean = [
2606 [util.pathto(cwd, x) for x in n]
2606 [util.pathto(cwd, x) for x in n]
2607 for n in repo.status(node1=node1, node2=node2, files=files,
2607 for n in repo.status(node1=node1, node2=node2, files=files,
2608 match=matchfn,
2608 match=matchfn,
2609 list_ignored=all or opts['ignored'],
2609 list_ignored=all or opts['ignored'],
2610 list_clean=all or opts['clean'])]
2610 list_clean=all or opts['clean'])]
2611
2611
2612 changetypes = (('modified', 'M', modified),
2612 changetypes = (('modified', 'M', modified),
2613 ('added', 'A', added),
2613 ('added', 'A', added),
2614 ('removed', 'R', removed),
2614 ('removed', 'R', removed),
2615 ('deleted', '!', deleted),
2615 ('deleted', '!', deleted),
2616 ('unknown', '?', unknown),
2616 ('unknown', '?', unknown),
2617 ('ignored', 'I', ignored))
2617 ('ignored', 'I', ignored))
2618
2618
2619 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2619 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2620
2620
2621 end = opts['print0'] and '\0' or '\n'
2621 end = opts['print0'] and '\0' or '\n'
2622
2622
2623 for opt, char, changes in ([ct for ct in explicit_changetypes
2623 for opt, char, changes in ([ct for ct in explicit_changetypes
2624 if all or opts[ct[0]]]
2624 if all or opts[ct[0]]]
2625 or changetypes):
2625 or changetypes):
2626 if opts['no_status']:
2626 if opts['no_status']:
2627 format = "%%s%s" % end
2627 format = "%%s%s" % end
2628 else:
2628 else:
2629 format = "%s %%s%s" % (char, end)
2629 format = "%s %%s%s" % (char, end)
2630
2630
2631 for f in changes:
2631 for f in changes:
2632 ui.write(format % f)
2632 ui.write(format % f)
2633 if ((all or opts.get('copies')) and not opts.get('no_status')):
2633 if ((all or opts.get('copies')) and not opts.get('no_status')):
2634 copied = repo.dirstate.copied(f)
2634 copied = repo.dirstate.copied(f)
2635 if copied:
2635 if copied:
2636 ui.write(' %s%s' % (copied, end))
2636 ui.write(' %s%s' % (copied, end))
2637
2637
2638 def tag(ui, repo, name, rev_=None, **opts):
2638 def tag(ui, repo, name, rev_=None, **opts):
2639 """add a tag for the current tip or a given revision
2639 """add a tag for the current tip or a given revision
2640
2640
2641 Name a particular revision using <name>.
2641 Name a particular revision using <name>.
2642
2642
2643 Tags are used to name particular revisions of the repository and are
2643 Tags are used to name particular revisions of the repository and are
2644 very useful to compare different revision, to go back to significant
2644 very useful to compare different revision, to go back to significant
2645 earlier versions or to mark branch points as releases, etc.
2645 earlier versions or to mark branch points as releases, etc.
2646
2646
2647 If no revision is given, the parent of the working directory is used.
2647 If no revision is given, the parent of the working directory is used.
2648
2648
2649 To facilitate version control, distribution, and merging of tags,
2649 To facilitate version control, distribution, and merging of tags,
2650 they are stored as a file named ".hgtags" which is managed
2650 they are stored as a file named ".hgtags" which is managed
2651 similarly to other project files and can be hand-edited if
2651 similarly to other project files and can be hand-edited if
2652 necessary. The file '.hg/localtags' is used for local tags (not
2652 necessary. The file '.hg/localtags' is used for local tags (not
2653 shared among repositories).
2653 shared among repositories).
2654 """
2654 """
2655 if name in ['tip', '.']:
2655 if name in ['tip', '.']:
2656 raise util.Abort(_("the name '%s' is reserved") % name)
2656 raise util.Abort(_("the name '%s' is reserved") % name)
2657 if rev_ is not None:
2657 if rev_ is not None:
2658 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2658 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2659 "please use 'hg tag [-r REV] NAME' instead\n"))
2659 "please use 'hg tag [-r REV] NAME' instead\n"))
2660 if opts['rev']:
2660 if opts['rev']:
2661 raise util.Abort(_("use only one form to specify the revision"))
2661 raise util.Abort(_("use only one form to specify the revision"))
2662 if opts['rev']:
2662 if opts['rev']:
2663 rev_ = opts['rev']
2663 rev_ = opts['rev']
2664 if not rev_ and repo.dirstate.parents()[1] != nullid:
2664 if not rev_ and repo.dirstate.parents()[1] != nullid:
2665 raise util.Abort(_('uncommitted merge - please provide a '
2665 raise util.Abort(_('uncommitted merge - please provide a '
2666 'specific revision'))
2666 'specific revision'))
2667 r = repo.changectx(rev_).node()
2667 r = repo.changectx(rev_).node()
2668
2668
2669 message = opts['message']
2669 message = opts['message']
2670 if not message:
2670 if not message:
2671 message = _('Added tag %s for changeset %s') % (name, short(r))
2671 message = _('Added tag %s for changeset %s') % (name, short(r))
2672
2672
2673 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2673 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2674
2674
2675 def tags(ui, repo):
2675 def tags(ui, repo):
2676 """list repository tags
2676 """list repository tags
2677
2677
2678 List the repository tags.
2678 List the repository tags.
2679
2679
2680 This lists both regular and local tags.
2680 This lists both regular and local tags.
2681 """
2681 """
2682
2682
2683 l = repo.tagslist()
2683 l = repo.tagslist()
2684 l.reverse()
2684 l.reverse()
2685 hexfunc = ui.debugflag and hex or short
2685 hexfunc = ui.debugflag and hex or short
2686 for t, n in l:
2686 for t, n in l:
2687 try:
2687 try:
2688 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2688 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2689 except KeyError:
2689 except KeyError:
2690 r = " ?:?"
2690 r = " ?:?"
2691 if ui.quiet:
2691 if ui.quiet:
2692 ui.write("%s\n" % t)
2692 ui.write("%s\n" % t)
2693 else:
2693 else:
2694 ui.write("%-30s %s\n" % (t, r))
2694 ui.write("%-30s %s\n" % (t, r))
2695
2695
2696 def tip(ui, repo, **opts):
2696 def tip(ui, repo, **opts):
2697 """show the tip revision
2697 """show the tip revision
2698
2698
2699 Show the tip revision.
2699 Show the tip revision.
2700 """
2700 """
2701 n = repo.changelog.tip()
2701 n = repo.changelog.tip()
2702 br = None
2702 br = None
2703 if opts['branches']:
2703 if opts['branches']:
2704 ui.warn(_("the --branches option is deprecated, "
2704 ui.warn(_("the --branches option is deprecated, "
2705 "please use 'hg branches' instead\n"))
2705 "please use 'hg branches' instead\n"))
2706 br = repo.branchlookup([n])
2706 br = repo.branchlookup([n])
2707 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2707 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2708 if opts['patch']:
2708 if opts['patch']:
2709 patch.diff(repo, repo.changelog.parents(n)[0], n)
2709 patch.diff(repo, repo.changelog.parents(n)[0], n)
2710
2710
2711 def unbundle(ui, repo, fname, **opts):
2711 def unbundle(ui, repo, fname, **opts):
2712 """apply a changegroup file
2712 """apply a changegroup file
2713
2713
2714 Apply a compressed changegroup file generated by the bundle
2714 Apply a compressed changegroup file generated by the bundle
2715 command.
2715 command.
2716 """
2716 """
2717 f = urllib.urlopen(fname)
2717 f = urllib.urlopen(fname)
2718
2718
2719 header = f.read(6)
2719 header = f.read(6)
2720 if not header.startswith("HG"):
2720 if not header.startswith("HG"):
2721 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2721 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2722 elif not header.startswith("HG10"):
2722 elif not header.startswith("HG10"):
2723 raise util.Abort(_("%s: unknown bundle version") % fname)
2723 raise util.Abort(_("%s: unknown bundle version") % fname)
2724 elif header == "HG10BZ":
2724 elif header == "HG10BZ":
2725 def generator(f):
2725 def generator(f):
2726 zd = bz2.BZ2Decompressor()
2726 zd = bz2.BZ2Decompressor()
2727 zd.decompress("BZ")
2727 zd.decompress("BZ")
2728 for chunk in f:
2728 for chunk in f:
2729 yield zd.decompress(chunk)
2729 yield zd.decompress(chunk)
2730 elif header == "HG10UN":
2730 elif header == "HG10UN":
2731 def generator(f):
2731 def generator(f):
2732 for chunk in f:
2732 for chunk in f:
2733 yield chunk
2733 yield chunk
2734 else:
2734 else:
2735 raise util.Abort(_("%s: unknown bundle compression type")
2735 raise util.Abort(_("%s: unknown bundle compression type")
2736 % fname)
2736 % fname)
2737 gen = generator(util.filechunkiter(f, 4096))
2737 gen = generator(util.filechunkiter(f, 4096))
2738 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2738 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2739 'bundle:' + fname)
2739 'bundle:' + fname)
2740 return postincoming(ui, repo, modheads, opts['update'])
2740 return postincoming(ui, repo, modheads, opts['update'])
2741
2741
2742 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2742 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2743 branch=None):
2743 branch=None):
2744 """update or merge working directory
2744 """update or merge working directory
2745
2745
2746 Update the working directory to the specified revision.
2746 Update the working directory to the specified revision.
2747
2747
2748 If there are no outstanding changes in the working directory and
2748 If there are no outstanding changes in the working directory and
2749 there is a linear relationship between the current version and the
2749 there is a linear relationship between the current version and the
2750 requested version, the result is the requested version.
2750 requested version, the result is the requested version.
2751
2751
2752 To merge the working directory with another revision, use the
2752 To merge the working directory with another revision, use the
2753 merge command.
2753 merge command.
2754
2754
2755 By default, update will refuse to run if doing so would require
2755 By default, update will refuse to run if doing so would require
2756 merging or discarding local changes.
2756 merging or discarding local changes.
2757 """
2757 """
2758 node = _lookup(repo, node, branch)
2758 node = _lookup(repo, node, branch)
2759 if clean:
2759 if clean:
2760 return hg.clean(repo, node)
2760 return hg.clean(repo, node)
2761 else:
2761 else:
2762 return hg.update(repo, node)
2762 return hg.update(repo, node)
2763
2763
2764 def _lookup(repo, node, branch=None):
2764 def _lookup(repo, node, branch=None):
2765 if branch:
2765 if branch:
2766 repo.ui.warn(_("the --branch option is deprecated, "
2766 repo.ui.warn(_("the --branch option is deprecated, "
2767 "please use 'hg branch' instead\n"))
2767 "please use 'hg branch' instead\n"))
2768 br = repo.branchlookup(branch=branch)
2768 br = repo.branchlookup(branch=branch)
2769 found = []
2769 found = []
2770 for x in br:
2770 for x in br:
2771 if branch in br[x]:
2771 if branch in br[x]:
2772 found.append(x)
2772 found.append(x)
2773 if len(found) > 1:
2773 if len(found) > 1:
2774 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2774 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2775 for x in found:
2775 for x in found:
2776 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2776 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2777 raise util.Abort("")
2777 raise util.Abort("")
2778 if len(found) == 1:
2778 if len(found) == 1:
2779 node = found[0]
2779 node = found[0]
2780 repo.ui.warn(_("Using head %s for branch %s\n")
2780 repo.ui.warn(_("Using head %s for branch %s\n")
2781 % (short(node), branch))
2781 % (short(node), branch))
2782 else:
2782 else:
2783 raise util.Abort(_("branch %s not found") % branch)
2783 raise util.Abort(_("branch %s not found") % branch)
2784 else:
2784 else:
2785 node = node and repo.lookup(node) or repo.changelog.tip()
2785 node = node and repo.lookup(node) or repo.changelog.tip()
2786 return node
2786 return node
2787
2787
2788 def verify(ui, repo):
2788 def verify(ui, repo):
2789 """verify the integrity of the repository
2789 """verify the integrity of the repository
2790
2790
2791 Verify the integrity of the current repository.
2791 Verify the integrity of the current repository.
2792
2792
2793 This will perform an extensive check of the repository's
2793 This will perform an extensive check of the repository's
2794 integrity, validating the hashes and checksums of each entry in
2794 integrity, validating the hashes and checksums of each entry in
2795 the changelog, manifest, and tracked files, as well as the
2795 the changelog, manifest, and tracked files, as well as the
2796 integrity of their crosslinks and indices.
2796 integrity of their crosslinks and indices.
2797 """
2797 """
2798 return hg.verify(repo)
2798 return hg.verify(repo)
2799
2799
2800 # Command options and aliases are listed here, alphabetically
2800 # Command options and aliases are listed here, alphabetically
2801
2801
2802 globalopts = [
2802 globalopts = [
2803 ('R', 'repository', '',
2803 ('R', 'repository', '',
2804 _('repository root directory or symbolic path name')),
2804 _('repository root directory or symbolic path name')),
2805 ('', 'cwd', '', _('change working directory')),
2805 ('', 'cwd', '', _('change working directory')),
2806 ('y', 'noninteractive', None,
2806 ('y', 'noninteractive', None,
2807 _('do not prompt, assume \'yes\' for any required answers')),
2807 _('do not prompt, assume \'yes\' for any required answers')),
2808 ('q', 'quiet', None, _('suppress output')),
2808 ('q', 'quiet', None, _('suppress output')),
2809 ('v', 'verbose', None, _('enable additional output')),
2809 ('v', 'verbose', None, _('enable additional output')),
2810 ('', 'config', [], _('set/override config option')),
2810 ('', 'config', [], _('set/override config option')),
2811 ('', 'debug', None, _('enable debugging output')),
2811 ('', 'debug', None, _('enable debugging output')),
2812 ('', 'debugger', None, _('start debugger')),
2812 ('', 'debugger', None, _('start debugger')),
2813 ('', 'lsprof', None, _('print improved command execution profile')),
2813 ('', 'lsprof', None, _('print improved command execution profile')),
2814 ('', 'traceback', None, _('print traceback on exception')),
2814 ('', 'traceback', None, _('print traceback on exception')),
2815 ('', 'time', None, _('time how long the command takes')),
2815 ('', 'time', None, _('time how long the command takes')),
2816 ('', 'profile', None, _('print command execution profile')),
2816 ('', 'profile', None, _('print command execution profile')),
2817 ('', 'version', None, _('output version information and exit')),
2817 ('', 'version', None, _('output version information and exit')),
2818 ('h', 'help', None, _('display help and exit')),
2818 ('h', 'help', None, _('display help and exit')),
2819 ]
2819 ]
2820
2820
2821 dryrunopts = [('n', 'dry-run', None,
2821 dryrunopts = [('n', 'dry-run', None,
2822 _('do not perform actions, just print output'))]
2822 _('do not perform actions, just print output'))]
2823
2823
2824 remoteopts = [
2824 remoteopts = [
2825 ('e', 'ssh', '', _('specify ssh command to use')),
2825 ('e', 'ssh', '', _('specify ssh command to use')),
2826 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2826 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2827 ]
2827 ]
2828
2828
2829 walkopts = [
2829 walkopts = [
2830 ('I', 'include', [], _('include names matching the given patterns')),
2830 ('I', 'include', [], _('include names matching the given patterns')),
2831 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2831 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2832 ]
2832 ]
2833
2833
2834 table = {
2834 table = {
2835 "^add":
2835 "^add":
2836 (add,
2836 (add,
2837 walkopts + dryrunopts,
2837 walkopts + dryrunopts,
2838 _('hg add [OPTION]... [FILE]...')),
2838 _('hg add [OPTION]... [FILE]...')),
2839 "addremove":
2839 "addremove":
2840 (addremove,
2840 (addremove,
2841 [('s', 'similarity', '',
2841 [('s', 'similarity', '',
2842 _('guess renamed files by similarity (0<=s<=100)')),
2842 _('guess renamed files by similarity (0<=s<=100)')),
2843 ] + walkopts + dryrunopts,
2843 ] + walkopts + dryrunopts,
2844 _('hg addremove [OPTION]... [FILE]...')),
2844 _('hg addremove [OPTION]... [FILE]...')),
2845 "^annotate":
2845 "^annotate":
2846 (annotate,
2846 (annotate,
2847 [('r', 'rev', '', _('annotate the specified revision')),
2847 [('r', 'rev', '', _('annotate the specified revision')),
2848 ('f', 'follow', None, _('follow file copies and renames')),
2848 ('f', 'follow', None, _('follow file copies and renames')),
2849 ('a', 'text', None, _('treat all files as text')),
2849 ('a', 'text', None, _('treat all files as text')),
2850 ('u', 'user', None, _('list the author')),
2850 ('u', 'user', None, _('list the author')),
2851 ('d', 'date', None, _('list the date')),
2851 ('d', 'date', None, _('list the date')),
2852 ('n', 'number', None, _('list the revision number (default)')),
2852 ('n', 'number', None, _('list the revision number (default)')),
2853 ('c', 'changeset', None, _('list the changeset')),
2853 ('c', 'changeset', None, _('list the changeset')),
2854 ] + walkopts,
2854 ] + walkopts,
2855 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2855 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2856 "archive":
2856 "archive":
2857 (archive,
2857 (archive,
2858 [('', 'no-decode', None, _('do not pass files through decoders')),
2858 [('', 'no-decode', None, _('do not pass files through decoders')),
2859 ('p', 'prefix', '', _('directory prefix for files in archive')),
2859 ('p', 'prefix', '', _('directory prefix for files in archive')),
2860 ('r', 'rev', '', _('revision to distribute')),
2860 ('r', 'rev', '', _('revision to distribute')),
2861 ('t', 'type', '', _('type of distribution to create')),
2861 ('t', 'type', '', _('type of distribution to create')),
2862 ] + walkopts,
2862 ] + walkopts,
2863 _('hg archive [OPTION]... DEST')),
2863 _('hg archive [OPTION]... DEST')),
2864 "backout":
2864 "backout":
2865 (backout,
2865 (backout,
2866 [('', 'merge', None,
2866 [('', 'merge', None,
2867 _('merge with old dirstate parent after backout')),
2867 _('merge with old dirstate parent after backout')),
2868 ('m', 'message', '', _('use <text> as commit message')),
2868 ('m', 'message', '', _('use <text> as commit message')),
2869 ('l', 'logfile', '', _('read commit message from <file>')),
2869 ('l', 'logfile', '', _('read commit message from <file>')),
2870 ('d', 'date', '', _('record datecode as commit date')),
2870 ('d', 'date', '', _('record datecode as commit date')),
2871 ('', 'parent', '', _('parent to choose when backing out merge')),
2871 ('', 'parent', '', _('parent to choose when backing out merge')),
2872 ('u', 'user', '', _('record user as committer')),
2872 ('u', 'user', '', _('record user as committer')),
2873 ] + walkopts,
2873 ] + walkopts,
2874 _('hg backout [OPTION]... REV')),
2874 _('hg backout [OPTION]... REV')),
2875 "branch": (branch, [], _('hg branch [NAME]')),
2875 "branch": (branch, [], _('hg branch [NAME]')),
2876 "branches": (branches, [], _('hg branches')),
2876 "branches": (branches, [], _('hg branches')),
2877 "bundle":
2877 "bundle":
2878 (bundle,
2878 (bundle,
2879 [('f', 'force', None,
2879 [('f', 'force', None,
2880 _('run even when remote repository is unrelated')),
2880 _('run even when remote repository is unrelated')),
2881 ('r', 'rev', [],
2881 ('r', 'rev', [],
2882 _('a changeset you would like to bundle')),
2882 _('a changeset you would like to bundle')),
2883 ('', 'base', [],
2883 ('', 'base', [],
2884 _('a base changeset to specify instead of a destination')),
2884 _('a base changeset to specify instead of a destination')),
2885 ] + remoteopts,
2885 ] + remoteopts,
2886 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2886 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2887 "cat":
2887 "cat":
2888 (cat,
2888 (cat,
2889 [('o', 'output', '', _('print output to file with formatted name')),
2889 [('o', 'output', '', _('print output to file with formatted name')),
2890 ('r', 'rev', '', _('print the given revision')),
2890 ('r', 'rev', '', _('print the given revision')),
2891 ] + walkopts,
2891 ] + walkopts,
2892 _('hg cat [OPTION]... FILE...')),
2892 _('hg cat [OPTION]... FILE...')),
2893 "^clone":
2893 "^clone":
2894 (clone,
2894 (clone,
2895 [('U', 'noupdate', None, _('do not update the new working directory')),
2895 [('U', 'noupdate', None, _('do not update the new working directory')),
2896 ('r', 'rev', [],
2896 ('r', 'rev', [],
2897 _('a changeset you would like to have after cloning')),
2897 _('a changeset you would like to have after cloning')),
2898 ('', 'pull', None, _('use pull protocol to copy metadata')),
2898 ('', 'pull', None, _('use pull protocol to copy metadata')),
2899 ('', 'uncompressed', None,
2899 ('', 'uncompressed', None,
2900 _('use uncompressed transfer (fast over LAN)')),
2900 _('use uncompressed transfer (fast over LAN)')),
2901 ] + remoteopts,
2901 ] + remoteopts,
2902 _('hg clone [OPTION]... SOURCE [DEST]')),
2902 _('hg clone [OPTION]... SOURCE [DEST]')),
2903 "^commit|ci":
2903 "^commit|ci":
2904 (commit,
2904 (commit,
2905 [('A', 'addremove', None,
2905 [('A', 'addremove', None,
2906 _('mark new/missing files as added/removed before committing')),
2906 _('mark new/missing files as added/removed before committing')),
2907 ('m', 'message', '', _('use <text> as commit message')),
2907 ('m', 'message', '', _('use <text> as commit message')),
2908 ('l', 'logfile', '', _('read the commit message from <file>')),
2908 ('l', 'logfile', '', _('read the commit message from <file>')),
2909 ('d', 'date', '', _('record datecode as commit date')),
2909 ('d', 'date', '', _('record datecode as commit date')),
2910 ('u', 'user', '', _('record user as commiter')),
2910 ('u', 'user', '', _('record user as commiter')),
2911 ] + walkopts,
2911 ] + walkopts,
2912 _('hg commit [OPTION]... [FILE]...')),
2912 _('hg commit [OPTION]... [FILE]...')),
2913 "copy|cp":
2913 "copy|cp":
2914 (copy,
2914 (copy,
2915 [('A', 'after', None, _('record a copy that has already occurred')),
2915 [('A', 'after', None, _('record a copy that has already occurred')),
2916 ('f', 'force', None,
2916 ('f', 'force', None,
2917 _('forcibly copy over an existing managed file')),
2917 _('forcibly copy over an existing managed file')),
2918 ] + walkopts + dryrunopts,
2918 ] + walkopts + dryrunopts,
2919 _('hg copy [OPTION]... [SOURCE]... DEST')),
2919 _('hg copy [OPTION]... [SOURCE]... DEST')),
2920 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2920 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2921 "debugcomplete":
2921 "debugcomplete":
2922 (debugcomplete,
2922 (debugcomplete,
2923 [('o', 'options', None, _('show the command options'))],
2923 [('o', 'options', None, _('show the command options'))],
2924 _('debugcomplete [-o] CMD')),
2924 _('debugcomplete [-o] CMD')),
2925 "debugrebuildstate":
2925 "debugrebuildstate":
2926 (debugrebuildstate,
2926 (debugrebuildstate,
2927 [('r', 'rev', '', _('revision to rebuild to'))],
2927 [('r', 'rev', '', _('revision to rebuild to'))],
2928 _('debugrebuildstate [-r REV] [REV]')),
2928 _('debugrebuildstate [-r REV] [REV]')),
2929 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2929 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2930 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2930 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2931 "debugstate": (debugstate, [], _('debugstate')),
2931 "debugstate": (debugstate, [], _('debugstate')),
2932 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2932 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2933 "debugindex": (debugindex, [], _('debugindex FILE')),
2933 "debugindex": (debugindex, [], _('debugindex FILE')),
2934 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2934 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2935 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2935 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2936 "debugwalk":
2936 "debugwalk":
2937 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2937 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2938 "^diff":
2938 "^diff":
2939 (diff,
2939 (diff,
2940 [('r', 'rev', [], _('revision')),
2940 [('r', 'rev', [], _('revision')),
2941 ('a', 'text', None, _('treat all files as text')),
2941 ('a', 'text', None, _('treat all files as text')),
2942 ('p', 'show-function', None,
2942 ('p', 'show-function', None,
2943 _('show which function each change is in')),
2943 _('show which function each change is in')),
2944 ('g', 'git', None, _('use git extended diff format')),
2944 ('g', 'git', None, _('use git extended diff format')),
2945 ('', 'nodates', None, _("don't include dates in diff headers")),
2945 ('', 'nodates', None, _("don't include dates in diff headers")),
2946 ('w', 'ignore-all-space', None,
2946 ('w', 'ignore-all-space', None,
2947 _('ignore white space when comparing lines')),
2947 _('ignore white space when comparing lines')),
2948 ('b', 'ignore-space-change', None,
2948 ('b', 'ignore-space-change', None,
2949 _('ignore changes in the amount of white space')),
2949 _('ignore changes in the amount of white space')),
2950 ('B', 'ignore-blank-lines', None,
2950 ('B', 'ignore-blank-lines', None,
2951 _('ignore changes whose lines are all blank')),
2951 _('ignore changes whose lines are all blank')),
2952 ] + walkopts,
2952 ] + walkopts,
2953 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2953 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2954 "^export":
2954 "^export":
2955 (export,
2955 (export,
2956 [('o', 'output', '', _('print output to file with formatted name')),
2956 [('o', 'output', '', _('print output to file with formatted name')),
2957 ('a', 'text', None, _('treat all files as text')),
2957 ('a', 'text', None, _('treat all files as text')),
2958 ('g', 'git', None, _('use git extended diff format')),
2958 ('g', 'git', None, _('use git extended diff format')),
2959 ('', 'nodates', None, _("don't include dates in diff headers")),
2959 ('', 'nodates', None, _("don't include dates in diff headers")),
2960 ('', 'switch-parent', None, _('diff against the second parent'))],
2960 ('', 'switch-parent', None, _('diff against the second parent'))],
2961 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2961 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2962 "grep":
2962 "grep":
2963 (grep,
2963 (grep,
2964 [('0', 'print0', None, _('end fields with NUL')),
2964 [('0', 'print0', None, _('end fields with NUL')),
2965 ('', 'all', None, _('print all revisions that match')),
2965 ('', 'all', None, _('print all revisions that match')),
2966 ('f', 'follow', None,
2966 ('f', 'follow', None,
2967 _('follow changeset history, or file history across copies and renames')),
2967 _('follow changeset history, or file history across copies and renames')),
2968 ('i', 'ignore-case', None, _('ignore case when matching')),
2968 ('i', 'ignore-case', None, _('ignore case when matching')),
2969 ('l', 'files-with-matches', None,
2969 ('l', 'files-with-matches', None,
2970 _('print only filenames and revs that match')),
2970 _('print only filenames and revs that match')),
2971 ('n', 'line-number', None, _('print matching line numbers')),
2971 ('n', 'line-number', None, _('print matching line numbers')),
2972 ('r', 'rev', [], _('search in given revision range')),
2972 ('r', 'rev', [], _('search in given revision range')),
2973 ('u', 'user', None, _('print user who committed change')),
2973 ('u', 'user', None, _('print user who committed change')),
2974 ] + walkopts,
2974 ] + walkopts,
2975 _('hg grep [OPTION]... PATTERN [FILE]...')),
2975 _('hg grep [OPTION]... PATTERN [FILE]...')),
2976 "heads":
2976 "heads":
2977 (heads,
2977 (heads,
2978 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2978 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2979 ('', 'style', '', _('display using template map file')),
2979 ('', 'style', '', _('display using template map file')),
2980 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2980 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2981 ('', 'template', '', _('display with template'))],
2981 ('', 'template', '', _('display with template'))],
2982 _('hg heads [-r REV]')),
2982 _('hg heads [-r REV]')),
2983 "help": (help_, [], _('hg help [COMMAND]')),
2983 "help": (help_, [], _('hg help [COMMAND]')),
2984 "identify|id": (identify, [], _('hg identify')),
2984 "identify|id": (identify, [], _('hg identify')),
2985 "import|patch":
2985 "import|patch":
2986 (import_,
2986 (import_,
2987 [('p', 'strip', 1,
2987 [('p', 'strip', 1,
2988 _('directory strip option for patch. This has the same\n'
2988 _('directory strip option for patch. This has the same\n'
2989 'meaning as the corresponding patch option')),
2989 'meaning as the corresponding patch option')),
2990 ('m', 'message', '', _('use <text> as commit message')),
2990 ('m', 'message', '', _('use <text> as commit message')),
2991 ('b', 'base', '', _('base path (DEPRECATED)')),
2991 ('b', 'base', '', _('base path (DEPRECATED)')),
2992 ('f', 'force', None,
2992 ('f', 'force', None,
2993 _('skip check for outstanding uncommitted changes'))],
2993 _('skip check for outstanding uncommitted changes'))],
2994 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2994 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2995 "incoming|in": (incoming,
2995 "incoming|in": (incoming,
2996 [('M', 'no-merges', None, _('do not show merges')),
2996 [('M', 'no-merges', None, _('do not show merges')),
2997 ('f', 'force', None,
2997 ('f', 'force', None,
2998 _('run even when remote repository is unrelated')),
2998 _('run even when remote repository is unrelated')),
2999 ('', 'style', '', _('display using template map file')),
2999 ('', 'style', '', _('display using template map file')),
3000 ('n', 'newest-first', None, _('show newest record first')),
3000 ('n', 'newest-first', None, _('show newest record first')),
3001 ('', 'bundle', '', _('file to store the bundles into')),
3001 ('', 'bundle', '', _('file to store the bundles into')),
3002 ('p', 'patch', None, _('show patch')),
3002 ('p', 'patch', None, _('show patch')),
3003 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3003 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3004 ('', 'template', '', _('display with template')),
3004 ('', 'template', '', _('display with template')),
3005 ] + remoteopts,
3005 ] + remoteopts,
3006 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3006 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3007 ' [--bundle FILENAME] [SOURCE]')),
3007 ' [--bundle FILENAME] [SOURCE]')),
3008 "^init":
3008 "^init":
3009 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3009 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3010 "locate":
3010 "locate":
3011 (locate,
3011 (locate,
3012 [('r', 'rev', '', _('search the repository as it stood at rev')),
3012 [('r', 'rev', '', _('search the repository as it stood at rev')),
3013 ('0', 'print0', None,
3013 ('0', 'print0', None,
3014 _('end filenames with NUL, for use with xargs')),
3014 _('end filenames with NUL, for use with xargs')),
3015 ('f', 'fullpath', None,
3015 ('f', 'fullpath', None,
3016 _('print complete paths from the filesystem root')),
3016 _('print complete paths from the filesystem root')),
3017 ] + walkopts,
3017 ] + walkopts,
3018 _('hg locate [OPTION]... [PATTERN]...')),
3018 _('hg locate [OPTION]... [PATTERN]...')),
3019 "^log|history":
3019 "^log|history":
3020 (log,
3020 (log,
3021 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3021 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3022 ('f', 'follow', None,
3022 ('f', 'follow', None,
3023 _('follow changeset history, or file history across copies and renames')),
3023 _('follow changeset history, or file history across copies and renames')),
3024 ('', 'follow-first', None,
3024 ('', 'follow-first', None,
3025 _('only follow the first parent of merge changesets')),
3025 _('only follow the first parent of merge changesets')),
3026 ('C', 'copies', None, _('show copied files')),
3026 ('C', 'copies', None, _('show copied files')),
3027 ('k', 'keyword', [], _('search for a keyword')),
3027 ('k', 'keyword', [], _('search for a keyword')),
3028 ('l', 'limit', '', _('limit number of changes displayed')),
3028 ('l', 'limit', '', _('limit number of changes displayed')),
3029 ('r', 'rev', [], _('show the specified revision or range')),
3029 ('r', 'rev', [], _('show the specified revision or range')),
3030 ('M', 'no-merges', None, _('do not show merges')),
3030 ('M', 'no-merges', None, _('do not show merges')),
3031 ('', 'style', '', _('display using template map file')),
3031 ('', 'style', '', _('display using template map file')),
3032 ('m', 'only-merges', None, _('show only merges')),
3032 ('m', 'only-merges', None, _('show only merges')),
3033 ('p', 'patch', None, _('show patch')),
3033 ('p', 'patch', None, _('show patch')),
3034 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3034 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3035 ('', 'template', '', _('display with template')),
3035 ('', 'template', '', _('display with template')),
3036 ] + walkopts,
3036 ] + walkopts,
3037 _('hg log [OPTION]... [FILE]')),
3037 _('hg log [OPTION]... [FILE]')),
3038 "manifest": (manifest, [], _('hg manifest [REV]')),
3038 "manifest": (manifest, [], _('hg manifest [REV]')),
3039 "merge":
3039 "merge":
3040 (merge,
3040 (merge,
3041 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
3041 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
3042 ('f', 'force', None, _('force a merge with outstanding changes'))],
3042 ('f', 'force', None, _('force a merge with outstanding changes'))],
3043 _('hg merge [-f] [REV]')),
3043 _('hg merge [-f] [REV]')),
3044 "outgoing|out": (outgoing,
3044 "outgoing|out": (outgoing,
3045 [('M', 'no-merges', None, _('do not show merges')),
3045 [('M', 'no-merges', None, _('do not show merges')),
3046 ('f', 'force', None,
3046 ('f', 'force', None,
3047 _('run even when remote repository is unrelated')),
3047 _('run even when remote repository is unrelated')),
3048 ('p', 'patch', None, _('show patch')),
3048 ('p', 'patch', None, _('show patch')),
3049 ('', 'style', '', _('display using template map file')),
3049 ('', 'style', '', _('display using template map file')),
3050 ('r', 'rev', [], _('a specific revision you would like to push')),
3050 ('r', 'rev', [], _('a specific revision you would like to push')),
3051 ('n', 'newest-first', None, _('show newest record first')),
3051 ('n', 'newest-first', None, _('show newest record first')),
3052 ('', 'template', '', _('display with template')),
3052 ('', 'template', '', _('display with template')),
3053 ] + remoteopts,
3053 ] + remoteopts,
3054 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3054 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3055 "^parents":
3055 "^parents":
3056 (parents,
3056 (parents,
3057 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3057 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3058 ('r', 'rev', '', _('show parents from the specified rev')),
3058 ('r', 'rev', '', _('show parents from the specified rev')),
3059 ('', 'style', '', _('display using template map file')),
3059 ('', 'style', '', _('display using template map file')),
3060 ('', 'template', '', _('display with template'))],
3060 ('', 'template', '', _('display with template'))],
3061 _('hg parents [-r REV] [FILE]')),
3061 _('hg parents [-r REV] [FILE]')),
3062 "paths": (paths, [], _('hg paths [NAME]')),
3062 "paths": (paths, [], _('hg paths [NAME]')),
3063 "^pull":
3063 "^pull":
3064 (pull,
3064 (pull,
3065 [('u', 'update', None,
3065 [('u', 'update', None,
3066 _('update to new tip if changesets were pulled')),
3066 _('update to new tip if changesets were pulled')),
3067 ('f', 'force', None,
3067 ('f', 'force', None,
3068 _('run even when remote repository is unrelated')),
3068 _('run even when remote repository is unrelated')),
3069 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3069 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3070 ] + remoteopts,
3070 ] + remoteopts,
3071 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3071 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3072 "^push":
3072 "^push":
3073 (push,
3073 (push,
3074 [('f', 'force', None, _('force push')),
3074 [('f', 'force', None, _('force push')),
3075 ('r', 'rev', [], _('a specific revision you would like to push')),
3075 ('r', 'rev', [], _('a specific revision you would like to push')),
3076 ] + remoteopts,
3076 ] + remoteopts,
3077 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3077 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3078 "debugrawcommit|rawcommit":
3078 "debugrawcommit|rawcommit":
3079 (rawcommit,
3079 (rawcommit,
3080 [('p', 'parent', [], _('parent')),
3080 [('p', 'parent', [], _('parent')),
3081 ('d', 'date', '', _('date code')),
3081 ('d', 'date', '', _('date code')),
3082 ('u', 'user', '', _('user')),
3082 ('u', 'user', '', _('user')),
3083 ('F', 'files', '', _('file list')),
3083 ('F', 'files', '', _('file list')),
3084 ('m', 'message', '', _('commit message')),
3084 ('m', 'message', '', _('commit message')),
3085 ('l', 'logfile', '', _('commit message file'))],
3085 ('l', 'logfile', '', _('commit message file'))],
3086 _('hg debugrawcommit [OPTION]... [FILE]...')),
3086 _('hg debugrawcommit [OPTION]... [FILE]...')),
3087 "recover": (recover, [], _('hg recover')),
3087 "recover": (recover, [], _('hg recover')),
3088 "^remove|rm":
3088 "^remove|rm":
3089 (remove,
3089 (remove,
3090 [('A', 'after', None, _('record remove that has already occurred')),
3090 [('A', 'after', None, _('record remove that has already occurred')),
3091 ('f', 'force', None, _('remove file even if modified')),
3091 ('f', 'force', None, _('remove file even if modified')),
3092 ] + walkopts,
3092 ] + walkopts,
3093 _('hg remove [OPTION]... FILE...')),
3093 _('hg remove [OPTION]... FILE...')),
3094 "rename|mv":
3094 "rename|mv":
3095 (rename,
3095 (rename,
3096 [('A', 'after', None, _('record a rename that has already occurred')),
3096 [('A', 'after', None, _('record a rename that has already occurred')),
3097 ('f', 'force', None,
3097 ('f', 'force', None,
3098 _('forcibly copy over an existing managed file')),
3098 _('forcibly copy over an existing managed file')),
3099 ] + walkopts + dryrunopts,
3099 ] + walkopts + dryrunopts,
3100 _('hg rename [OPTION]... SOURCE... DEST')),
3100 _('hg rename [OPTION]... SOURCE... DEST')),
3101 "^revert":
3101 "^revert":
3102 (revert,
3102 (revert,
3103 [('a', 'all', None, _('revert all changes when no arguments given')),
3103 [('a', 'all', None, _('revert all changes when no arguments given')),
3104 ('r', 'rev', '', _('revision to revert to')),
3104 ('r', 'rev', '', _('revision to revert to')),
3105 ('', 'no-backup', None, _('do not save backup copies of files')),
3105 ('', 'no-backup', None, _('do not save backup copies of files')),
3106 ] + walkopts + dryrunopts,
3106 ] + walkopts + dryrunopts,
3107 _('hg revert [-r REV] [NAME]...')),
3107 _('hg revert [-r REV] [NAME]...')),
3108 "rollback": (rollback, [], _('hg rollback')),
3108 "rollback": (rollback, [], _('hg rollback')),
3109 "root": (root, [], _('hg root')),
3109 "root": (root, [], _('hg root')),
3110 "showconfig|debugconfig":
3110 "showconfig|debugconfig":
3111 (showconfig,
3111 (showconfig,
3112 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3112 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3113 _('showconfig [-u] [NAME]...')),
3113 _('showconfig [-u] [NAME]...')),
3114 "^serve":
3114 "^serve":
3115 (serve,
3115 (serve,
3116 [('A', 'accesslog', '', _('name of access log file to write to')),
3116 [('A', 'accesslog', '', _('name of access log file to write to')),
3117 ('d', 'daemon', None, _('run server in background')),
3117 ('d', 'daemon', None, _('run server in background')),
3118 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3118 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3119 ('E', 'errorlog', '', _('name of error log file to write to')),
3119 ('E', 'errorlog', '', _('name of error log file to write to')),
3120 ('p', 'port', 0, _('port to use (default: 8000)')),
3120 ('p', 'port', 0, _('port to use (default: 8000)')),
3121 ('a', 'address', '', _('address to use')),
3121 ('a', 'address', '', _('address to use')),
3122 ('n', 'name', '',
3122 ('n', 'name', '',
3123 _('name to show in web pages (default: working dir)')),
3123 _('name to show in web pages (default: working dir)')),
3124 ('', 'webdir-conf', '', _('name of the webdir config file'
3124 ('', 'webdir-conf', '', _('name of the webdir config file'
3125 ' (serve more than one repo)')),
3125 ' (serve more than one repo)')),
3126 ('', 'pid-file', '', _('name of file to write process ID to')),
3126 ('', 'pid-file', '', _('name of file to write process ID to')),
3127 ('', 'stdio', None, _('for remote clients')),
3127 ('', 'stdio', None, _('for remote clients')),
3128 ('t', 'templates', '', _('web templates to use')),
3128 ('t', 'templates', '', _('web templates to use')),
3129 ('', 'style', '', _('template style to use')),
3129 ('', 'style', '', _('template style to use')),
3130 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3130 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3131 _('hg serve [OPTION]...')),
3131 _('hg serve [OPTION]...')),
3132 "^status|st":
3132 "^status|st":
3133 (status,
3133 (status,
3134 [('A', 'all', None, _('show status of all files')),
3134 [('A', 'all', None, _('show status of all files')),
3135 ('m', 'modified', None, _('show only modified files')),
3135 ('m', 'modified', None, _('show only modified files')),
3136 ('a', 'added', None, _('show only added files')),
3136 ('a', 'added', None, _('show only added files')),
3137 ('r', 'removed', None, _('show only removed files')),
3137 ('r', 'removed', None, _('show only removed files')),
3138 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3138 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3139 ('c', 'clean', None, _('show only files without changes')),
3139 ('c', 'clean', None, _('show only files without changes')),
3140 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3140 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3141 ('i', 'ignored', None, _('show ignored files')),
3141 ('i', 'ignored', None, _('show ignored files')),
3142 ('n', 'no-status', None, _('hide status prefix')),
3142 ('n', 'no-status', None, _('hide status prefix')),
3143 ('C', 'copies', None, _('show source of copied files')),
3143 ('C', 'copies', None, _('show source of copied files')),
3144 ('0', 'print0', None,
3144 ('0', 'print0', None,
3145 _('end filenames with NUL, for use with xargs')),
3145 _('end filenames with NUL, for use with xargs')),
3146 ('', 'rev', [], _('show difference from revision')),
3146 ('', 'rev', [], _('show difference from revision')),
3147 ] + walkopts,
3147 ] + walkopts,
3148 _('hg status [OPTION]... [FILE]...')),
3148 _('hg status [OPTION]... [FILE]...')),
3149 "tag":
3149 "tag":
3150 (tag,
3150 (tag,
3151 [('l', 'local', None, _('make the tag local')),
3151 [('l', 'local', None, _('make the tag local')),
3152 ('m', 'message', '', _('message for tag commit log entry')),
3152 ('m', 'message', '', _('message for tag commit log entry')),
3153 ('d', 'date', '', _('record datecode as commit date')),
3153 ('d', 'date', '', _('record datecode as commit date')),
3154 ('u', 'user', '', _('record user as commiter')),
3154 ('u', 'user', '', _('record user as commiter')),
3155 ('r', 'rev', '', _('revision to tag'))],
3155 ('r', 'rev', '', _('revision to tag'))],
3156 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3156 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3157 "tags": (tags, [], _('hg tags')),
3157 "tags": (tags, [], _('hg tags')),
3158 "tip":
3158 "tip":
3159 (tip,
3159 (tip,
3160 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3160 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3161 ('', 'style', '', _('display using template map file')),
3161 ('', 'style', '', _('display using template map file')),
3162 ('p', 'patch', None, _('show patch')),
3162 ('p', 'patch', None, _('show patch')),
3163 ('', 'template', '', _('display with template'))],
3163 ('', 'template', '', _('display with template'))],
3164 _('hg tip [-p]')),
3164 _('hg tip [-p]')),
3165 "unbundle":
3165 "unbundle":
3166 (unbundle,
3166 (unbundle,
3167 [('u', 'update', None,
3167 [('u', 'update', None,
3168 _('update to new tip if changesets were unbundled'))],
3168 _('update to new tip if changesets were unbundled'))],
3169 _('hg unbundle [-u] FILE')),
3169 _('hg unbundle [-u] FILE')),
3170 "^update|up|checkout|co":
3170 "^update|up|checkout|co":
3171 (update,
3171 (update,
3172 [('b', 'branch', '',
3172 [('b', 'branch', '',
3173 _('checkout the head of a specific branch (DEPRECATED)')),
3173 _('checkout the head of a specific branch (DEPRECATED)')),
3174 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3174 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3175 ('C', 'clean', None, _('overwrite locally modified files')),
3175 ('C', 'clean', None, _('overwrite locally modified files')),
3176 ('f', 'force', None, _('force a merge with outstanding changes'))],
3176 ('f', 'force', None, _('force a merge with outstanding changes'))],
3177 _('hg update [-C] [-f] [REV]')),
3177 _('hg update [-C] [-f] [REV]')),
3178 "verify": (verify, [], _('hg verify')),
3178 "verify": (verify, [], _('hg verify')),
3179 "version": (show_version, [], _('hg version')),
3179 "version": (show_version, [], _('hg version')),
3180 }
3180 }
3181
3181
3182 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3182 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3183 " debugindex debugindexdot")
3183 " debugindex debugindexdot")
3184 optionalrepo = ("paths serve showconfig")
3184 optionalrepo = ("paths serve showconfig")
3185
3185
3186 def findpossible(ui, cmd):
3186 def findpossible(ui, cmd):
3187 """
3187 """
3188 Return cmd -> (aliases, command table entry)
3188 Return cmd -> (aliases, command table entry)
3189 for each matching command.
3189 for each matching command.
3190 Return debug commands (or their aliases) only if no normal command matches.
3190 Return debug commands (or their aliases) only if no normal command matches.
3191 """
3191 """
3192 choice = {}
3192 choice = {}
3193 debugchoice = {}
3193 debugchoice = {}
3194 for e in table.keys():
3194 for e in table.keys():
3195 aliases = e.lstrip("^").split("|")
3195 aliases = e.lstrip("^").split("|")
3196 found = None
3196 found = None
3197 if cmd in aliases:
3197 if cmd in aliases:
3198 found = cmd
3198 found = cmd
3199 elif not ui.config("ui", "strict"):
3199 elif not ui.config("ui", "strict"):
3200 for a in aliases:
3200 for a in aliases:
3201 if a.startswith(cmd):
3201 if a.startswith(cmd):
3202 found = a
3202 found = a
3203 break
3203 break
3204 if found is not None:
3204 if found is not None:
3205 if aliases[0].startswith("debug") or found.startswith("debug"):
3205 if aliases[0].startswith("debug") or found.startswith("debug"):
3206 debugchoice[found] = (aliases, table[e])
3206 debugchoice[found] = (aliases, table[e])
3207 else:
3207 else:
3208 choice[found] = (aliases, table[e])
3208 choice[found] = (aliases, table[e])
3209
3209
3210 if not choice and debugchoice:
3210 if not choice and debugchoice:
3211 choice = debugchoice
3211 choice = debugchoice
3212
3212
3213 return choice
3213 return choice
3214
3214
3215 def findcmd(ui, cmd):
3215 def findcmd(ui, cmd):
3216 """Return (aliases, command table entry) for command string."""
3216 """Return (aliases, command table entry) for command string."""
3217 choice = findpossible(ui, cmd)
3217 choice = findpossible(ui, cmd)
3218
3218
3219 if choice.has_key(cmd):
3219 if choice.has_key(cmd):
3220 return choice[cmd]
3220 return choice[cmd]
3221
3221
3222 if len(choice) > 1:
3222 if len(choice) > 1:
3223 clist = choice.keys()
3223 clist = choice.keys()
3224 clist.sort()
3224 clist.sort()
3225 raise AmbiguousCommand(cmd, clist)
3225 raise AmbiguousCommand(cmd, clist)
3226
3226
3227 if choice:
3227 if choice:
3228 return choice.values()[0]
3228 return choice.values()[0]
3229
3229
3230 raise UnknownCommand(cmd)
3230 raise UnknownCommand(cmd)
3231
3231
3232 def catchterm(*args):
3232 def catchterm(*args):
3233 raise util.SignalInterrupt
3233 raise util.SignalInterrupt
3234
3234
3235 def run():
3235 def run():
3236 sys.exit(dispatch(sys.argv[1:]))
3236 sys.exit(dispatch(sys.argv[1:]))
3237
3237
3238 class ParseError(Exception):
3238 class ParseError(Exception):
3239 """Exception raised on errors in parsing the command line."""
3239 """Exception raised on errors in parsing the command line."""
3240
3240
3241 def parse(ui, args):
3241 def parse(ui, args):
3242 options = {}
3242 options = {}
3243 cmdoptions = {}
3243 cmdoptions = {}
3244
3244
3245 try:
3245 try:
3246 args = fancyopts.fancyopts(args, globalopts, options)
3246 args = fancyopts.fancyopts(args, globalopts, options)
3247 except fancyopts.getopt.GetoptError, inst:
3247 except fancyopts.getopt.GetoptError, inst:
3248 raise ParseError(None, inst)
3248 raise ParseError(None, inst)
3249
3249
3250 if args:
3250 if args:
3251 cmd, args = args[0], args[1:]
3251 cmd, args = args[0], args[1:]
3252 aliases, i = findcmd(ui, cmd)
3252 aliases, i = findcmd(ui, cmd)
3253 cmd = aliases[0]
3253 cmd = aliases[0]
3254 defaults = ui.config("defaults", cmd)
3254 defaults = ui.config("defaults", cmd)
3255 if defaults:
3255 if defaults:
3256 args = shlex.split(defaults) + args
3256 args = shlex.split(defaults) + args
3257 c = list(i[1])
3257 c = list(i[1])
3258 else:
3258 else:
3259 cmd = None
3259 cmd = None
3260 c = []
3260 c = []
3261
3261
3262 # combine global options into local
3262 # combine global options into local
3263 for o in globalopts:
3263 for o in globalopts:
3264 c.append((o[0], o[1], options[o[1]], o[3]))
3264 c.append((o[0], o[1], options[o[1]], o[3]))
3265
3265
3266 try:
3266 try:
3267 args = fancyopts.fancyopts(args, c, cmdoptions)
3267 args = fancyopts.fancyopts(args, c, cmdoptions)
3268 except fancyopts.getopt.GetoptError, inst:
3268 except fancyopts.getopt.GetoptError, inst:
3269 raise ParseError(cmd, inst)
3269 raise ParseError(cmd, inst)
3270
3270
3271 # separate global options back out
3271 # separate global options back out
3272 for o in globalopts:
3272 for o in globalopts:
3273 n = o[1]
3273 n = o[1]
3274 options[n] = cmdoptions[n]
3274 options[n] = cmdoptions[n]
3275 del cmdoptions[n]
3275 del cmdoptions[n]
3276
3276
3277 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3277 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3278
3278
3279 external = {}
3279 external = {}
3280
3280
3281 def findext(name):
3281 def findext(name):
3282 '''return module with given extension name'''
3282 '''return module with given extension name'''
3283 try:
3283 try:
3284 return sys.modules[external[name]]
3284 return sys.modules[external[name]]
3285 except KeyError:
3285 except KeyError:
3286 for k, v in external.iteritems():
3286 for k, v in external.iteritems():
3287 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3287 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3288 return sys.modules[v]
3288 return sys.modules[v]
3289 raise KeyError(name)
3289 raise KeyError(name)
3290
3290
3291 def load_extensions(ui):
3291 def load_extensions(ui):
3292 added = []
3292 added = []
3293 for ext_name, load_from_name in ui.extensions():
3293 for ext_name, load_from_name in ui.extensions():
3294 if ext_name in external:
3294 if ext_name in external:
3295 continue
3295 continue
3296 try:
3296 try:
3297 if load_from_name:
3297 if load_from_name:
3298 # the module will be loaded in sys.modules
3298 # the module will be loaded in sys.modules
3299 # choose an unique name so that it doesn't
3299 # choose an unique name so that it doesn't
3300 # conflicts with other modules
3300 # conflicts with other modules
3301 module_name = "hgext_%s" % ext_name.replace('.', '_')
3301 module_name = "hgext_%s" % ext_name.replace('.', '_')
3302 mod = imp.load_source(module_name, load_from_name)
3302 mod = imp.load_source(module_name, load_from_name)
3303 else:
3303 else:
3304 def importh(name):
3304 def importh(name):
3305 mod = __import__(name)
3305 mod = __import__(name)
3306 components = name.split('.')
3306 components = name.split('.')
3307 for comp in components[1:]:
3307 for comp in components[1:]:
3308 mod = getattr(mod, comp)
3308 mod = getattr(mod, comp)
3309 return mod
3309 return mod
3310 try:
3310 try:
3311 mod = importh("hgext.%s" % ext_name)
3311 mod = importh("hgext.%s" % ext_name)
3312 except ImportError:
3312 except ImportError:
3313 mod = importh(ext_name)
3313 mod = importh(ext_name)
3314 external[ext_name] = mod.__name__
3314 external[ext_name] = mod.__name__
3315 added.append((mod, ext_name))
3315 added.append((mod, ext_name))
3316 except (util.SignalInterrupt, KeyboardInterrupt):
3316 except (util.SignalInterrupt, KeyboardInterrupt):
3317 raise
3317 raise
3318 except Exception, inst:
3318 except Exception, inst:
3319 ui.warn(_("*** failed to import extension %s: %s\n") %
3319 ui.warn(_("*** failed to import extension %s: %s\n") %
3320 (ext_name, inst))
3320 (ext_name, inst))
3321 if ui.print_exc():
3321 if ui.print_exc():
3322 return 1
3322 return 1
3323
3323
3324 for mod, name in added:
3324 for mod, name in added:
3325 uisetup = getattr(mod, 'uisetup', None)
3325 uisetup = getattr(mod, 'uisetup', None)
3326 if uisetup:
3326 if uisetup:
3327 uisetup(ui)
3327 uisetup(ui)
3328 cmdtable = getattr(mod, 'cmdtable', {})
3328 cmdtable = getattr(mod, 'cmdtable', {})
3329 for t in cmdtable:
3329 for t in cmdtable:
3330 if t in table:
3330 if t in table:
3331 ui.warn(_("module %s overrides %s\n") % (name, t))
3331 ui.warn(_("module %s overrides %s\n") % (name, t))
3332 table.update(cmdtable)
3332 table.update(cmdtable)
3333
3333
3334 def parseconfig(config):
3334 def parseconfig(config):
3335 """parse the --config options from the command line"""
3335 """parse the --config options from the command line"""
3336 parsed = []
3336 parsed = []
3337 for cfg in config:
3337 for cfg in config:
3338 try:
3338 try:
3339 name, value = cfg.split('=', 1)
3339 name, value = cfg.split('=', 1)
3340 section, name = name.split('.', 1)
3340 section, name = name.split('.', 1)
3341 if not section or not name:
3341 if not section or not name:
3342 raise IndexError
3342 raise IndexError
3343 parsed.append((section, name, value))
3343 parsed.append((section, name, value))
3344 except (IndexError, ValueError):
3344 except (IndexError, ValueError):
3345 raise util.Abort(_('malformed --config option: %s') % cfg)
3345 raise util.Abort(_('malformed --config option: %s') % cfg)
3346 return parsed
3346 return parsed
3347
3347
3348 def dispatch(args):
3348 def dispatch(args):
3349 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3349 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3350 num = getattr(signal, name, None)
3350 num = getattr(signal, name, None)
3351 if num: signal.signal(num, catchterm)
3351 if num: signal.signal(num, catchterm)
3352
3352
3353 try:
3353 try:
3354 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3354 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3355 except util.Abort, inst:
3355 except util.Abort, inst:
3356 sys.stderr.write(_("abort: %s\n") % inst)
3356 sys.stderr.write(_("abort: %s\n") % inst)
3357 return -1
3357 return -1
3358
3358
3359 load_extensions(u)
3359 load_extensions(u)
3360 u.addreadhook(load_extensions)
3360 u.addreadhook(load_extensions)
3361
3361
3362 try:
3362 try:
3363 cmd, func, args, options, cmdoptions = parse(u, args)
3363 cmd, func, args, options, cmdoptions = parse(u, args)
3364 if options["time"]:
3364 if options["time"]:
3365 def get_times():
3365 def get_times():
3366 t = os.times()
3366 t = os.times()
3367 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3367 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3368 t = (t[0], t[1], t[2], t[3], time.clock())
3368 t = (t[0], t[1], t[2], t[3], time.clock())
3369 return t
3369 return t
3370 s = get_times()
3370 s = get_times()
3371 def print_time():
3371 def print_time():
3372 t = get_times()
3372 t = get_times()
3373 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3373 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3374 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3374 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3375 atexit.register(print_time)
3375 atexit.register(print_time)
3376
3376
3377 # enter the debugger before command execution
3377 # enter the debugger before command execution
3378 if options['debugger']:
3378 if options['debugger']:
3379 pdb.set_trace()
3379 pdb.set_trace()
3380
3380
3381 try:
3381 try:
3382 if options['cwd']:
3382 if options['cwd']:
3383 try:
3383 try:
3384 os.chdir(options['cwd'])
3384 os.chdir(options['cwd'])
3385 except OSError, inst:
3385 except OSError, inst:
3386 raise util.Abort('%s: %s' %
3386 raise util.Abort('%s: %s' %
3387 (options['cwd'], inst.strerror))
3387 (options['cwd'], inst.strerror))
3388
3388
3389 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3389 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3390 not options["noninteractive"], options["traceback"],
3390 not options["noninteractive"], options["traceback"],
3391 parseconfig(options["config"]))
3391 parseconfig(options["config"]))
3392
3392
3393 path = u.expandpath(options["repository"]) or ""
3393 path = u.expandpath(options["repository"]) or ""
3394 repo = path and hg.repository(u, path=path) or None
3394 repo = path and hg.repository(u, path=path) or None
3395 if repo and not repo.local():
3395 if repo and not repo.local():
3396 raise util.Abort(_("repository '%s' is not local") % path)
3396 raise util.Abort(_("repository '%s' is not local") % path)
3397
3397
3398 if options['help']:
3398 if options['help']:
3399 return help_(u, cmd, options['version'])
3399 return help_(u, cmd, options['version'])
3400 elif options['version']:
3400 elif options['version']:
3401 return show_version(u)
3401 return show_version(u)
3402 elif not cmd:
3402 elif not cmd:
3403 return help_(u, 'shortlist')
3403 return help_(u, 'shortlist')
3404
3404
3405 if cmd not in norepo.split():
3405 if cmd not in norepo.split():
3406 try:
3406 try:
3407 if not repo:
3407 if not repo:
3408 repo = hg.repository(u, path=path)
3408 repo = hg.repository(u, path=path)
3409 u = repo.ui
3409 u = repo.ui
3410 for name in external.itervalues():
3410 for name in external.itervalues():
3411 mod = sys.modules[name]
3411 mod = sys.modules[name]
3412 if hasattr(mod, 'reposetup'):
3412 if hasattr(mod, 'reposetup'):
3413 mod.reposetup(u, repo)
3413 mod.reposetup(u, repo)
3414 hg.repo_setup_hooks.append(mod.reposetup)
3414 hg.repo_setup_hooks.append(mod.reposetup)
3415 except hg.RepoError:
3415 except hg.RepoError:
3416 if cmd not in optionalrepo.split():
3416 if cmd not in optionalrepo.split():
3417 raise
3417 raise
3418 d = lambda: func(u, repo, *args, **cmdoptions)
3418 d = lambda: func(u, repo, *args, **cmdoptions)
3419 else:
3419 else:
3420 d = lambda: func(u, *args, **cmdoptions)
3420 d = lambda: func(u, *args, **cmdoptions)
3421
3421
3422 try:
3422 try:
3423 if options['profile']:
3423 if options['profile']:
3424 import hotshot, hotshot.stats
3424 import hotshot, hotshot.stats
3425 prof = hotshot.Profile("hg.prof")
3425 prof = hotshot.Profile("hg.prof")
3426 try:
3426 try:
3427 try:
3427 try:
3428 return prof.runcall(d)
3428 return prof.runcall(d)
3429 except:
3429 except:
3430 try:
3430 try:
3431 u.warn(_('exception raised - generating '
3431 u.warn(_('exception raised - generating '
3432 'profile anyway\n'))
3432 'profile anyway\n'))
3433 except:
3433 except:
3434 pass
3434 pass
3435 raise
3435 raise
3436 finally:
3436 finally:
3437 prof.close()
3437 prof.close()
3438 stats = hotshot.stats.load("hg.prof")
3438 stats = hotshot.stats.load("hg.prof")
3439 stats.strip_dirs()
3439 stats.strip_dirs()
3440 stats.sort_stats('time', 'calls')
3440 stats.sort_stats('time', 'calls')
3441 stats.print_stats(40)
3441 stats.print_stats(40)
3442 elif options['lsprof']:
3442 elif options['lsprof']:
3443 try:
3443 try:
3444 from mercurial import lsprof
3444 from mercurial import lsprof
3445 except ImportError:
3445 except ImportError:
3446 raise util.Abort(_(
3446 raise util.Abort(_(
3447 'lsprof not available - install from '
3447 'lsprof not available - install from '
3448 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3448 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3449 p = lsprof.Profiler()
3449 p = lsprof.Profiler()
3450 p.enable(subcalls=True)
3450 p.enable(subcalls=True)
3451 try:
3451 try:
3452 return d()
3452 return d()
3453 finally:
3453 finally:
3454 p.disable()
3454 p.disable()
3455 stats = lsprof.Stats(p.getstats())
3455 stats = lsprof.Stats(p.getstats())
3456 stats.sort()
3456 stats.sort()
3457 stats.pprint(top=10, file=sys.stderr, climit=5)
3457 stats.pprint(top=10, file=sys.stderr, climit=5)
3458 else:
3458 else:
3459 return d()
3459 return d()
3460 finally:
3460 finally:
3461 u.flush()
3461 u.flush()
3462 except:
3462 except:
3463 # enter the debugger when we hit an exception
3463 # enter the debugger when we hit an exception
3464 if options['debugger']:
3464 if options['debugger']:
3465 pdb.post_mortem(sys.exc_info()[2])
3465 pdb.post_mortem(sys.exc_info()[2])
3466 u.print_exc()
3466 u.print_exc()
3467 raise
3467 raise
3468 except ParseError, inst:
3468 except ParseError, inst:
3469 if inst.args[0]:
3469 if inst.args[0]:
3470 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3470 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3471 help_(u, inst.args[0])
3471 help_(u, inst.args[0])
3472 else:
3472 else:
3473 u.warn(_("hg: %s\n") % inst.args[1])
3473 u.warn(_("hg: %s\n") % inst.args[1])
3474 help_(u, 'shortlist')
3474 help_(u, 'shortlist')
3475 except AmbiguousCommand, inst:
3475 except AmbiguousCommand, inst:
3476 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3476 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3477 (inst.args[0], " ".join(inst.args[1])))
3477 (inst.args[0], " ".join(inst.args[1])))
3478 except UnknownCommand, inst:
3478 except UnknownCommand, inst:
3479 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3479 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3480 help_(u, 'shortlist')
3480 help_(u, 'shortlist')
3481 except hg.RepoError, inst:
3481 except hg.RepoError, inst:
3482 u.warn(_("abort: %s!\n") % inst)
3482 u.warn(_("abort: %s!\n") % inst)
3483 except lock.LockHeld, inst:
3483 except lock.LockHeld, inst:
3484 if inst.errno == errno.ETIMEDOUT:
3484 if inst.errno == errno.ETIMEDOUT:
3485 reason = _('timed out waiting for lock held by %s') % inst.locker
3485 reason = _('timed out waiting for lock held by %s') % inst.locker
3486 else:
3486 else:
3487 reason = _('lock held by %s') % inst.locker
3487 reason = _('lock held by %s') % inst.locker
3488 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3488 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3489 except lock.LockUnavailable, inst:
3489 except lock.LockUnavailable, inst:
3490 u.warn(_("abort: could not lock %s: %s\n") %
3490 u.warn(_("abort: could not lock %s: %s\n") %
3491 (inst.desc or inst.filename, inst.strerror))
3491 (inst.desc or inst.filename, inst.strerror))
3492 except revlog.RevlogError, inst:
3492 except revlog.RevlogError, inst:
3493 u.warn(_("abort: %s!\n") % inst)
3493 u.warn(_("abort: %s!\n") % inst)
3494 except util.SignalInterrupt:
3494 except util.SignalInterrupt:
3495 u.warn(_("killed!\n"))
3495 u.warn(_("killed!\n"))
3496 except KeyboardInterrupt:
3496 except KeyboardInterrupt:
3497 try:
3497 try:
3498 u.warn(_("interrupted!\n"))
3498 u.warn(_("interrupted!\n"))
3499 except IOError, inst:
3499 except IOError, inst:
3500 if inst.errno == errno.EPIPE:
3500 if inst.errno == errno.EPIPE:
3501 if u.debugflag:
3501 if u.debugflag:
3502 u.warn(_("\nbroken pipe\n"))
3502 u.warn(_("\nbroken pipe\n"))
3503 else:
3503 else:
3504 raise
3504 raise
3505 except IOError, inst:
3505 except IOError, inst:
3506 if hasattr(inst, "code"):
3506 if hasattr(inst, "code"):
3507 u.warn(_("abort: %s\n") % inst)
3507 u.warn(_("abort: %s\n") % inst)
3508 elif hasattr(inst, "reason"):
3508 elif hasattr(inst, "reason"):
3509 u.warn(_("abort: error: %s\n") % inst.reason[1])
3509 u.warn(_("abort: error: %s\n") % inst.reason[1])
3510 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3510 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3511 if u.debugflag:
3511 if u.debugflag:
3512 u.warn(_("broken pipe\n"))
3512 u.warn(_("broken pipe\n"))
3513 elif getattr(inst, "strerror", None):
3513 elif getattr(inst, "strerror", None):
3514 if getattr(inst, "filename", None):
3514 if getattr(inst, "filename", None):
3515 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3515 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3516 else:
3516 else:
3517 u.warn(_("abort: %s\n") % inst.strerror)
3517 u.warn(_("abort: %s\n") % inst.strerror)
3518 else:
3518 else:
3519 raise
3519 raise
3520 except OSError, inst:
3520 except OSError, inst:
3521 if getattr(inst, "filename", None):
3521 if getattr(inst, "filename", None):
3522 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3522 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3523 else:
3523 else:
3524 u.warn(_("abort: %s\n") % inst.strerror)
3524 u.warn(_("abort: %s\n") % inst.strerror)
3525 except util.UnexpectedOutput, inst:
3526 u.warn(_("abort: %s") % inst[0])
3527 if not isinstance(inst[1], basestring):
3528 u.warn(" %r\n" % (inst[1],))
3529 elif not inst[1]:
3530 u.warn(_(" empty string\n"))
3531 else:
3532 u.warn("\n%r%s\n" %
3533 (inst[1][:400], len(inst[1]) > 400 and '...' or ''))
3525 except util.Abort, inst:
3534 except util.Abort, inst:
3526 u.warn(_("abort: %s\n") % inst)
3535 u.warn(_("abort: %s\n") % inst)
3527 except TypeError, inst:
3536 except TypeError, inst:
3528 # was this an argument error?
3537 # was this an argument error?
3529 tb = traceback.extract_tb(sys.exc_info()[2])
3538 tb = traceback.extract_tb(sys.exc_info()[2])
3530 if len(tb) > 2: # no
3539 if len(tb) > 2: # no
3531 raise
3540 raise
3532 u.debug(inst, "\n")
3541 u.debug(inst, "\n")
3533 u.warn(_("%s: invalid arguments\n") % cmd)
3542 u.warn(_("%s: invalid arguments\n") % cmd)
3534 help_(u, cmd)
3543 help_(u, cmd)
3535 except SystemExit, inst:
3544 except SystemExit, inst:
3536 # Commands shouldn't sys.exit directly, but give a return code.
3545 # Commands shouldn't sys.exit directly, but give a return code.
3537 # Just in case catch this and and pass exit code to caller.
3546 # Just in case catch this and and pass exit code to caller.
3538 return inst.code
3547 return inst.code
3539 except:
3548 except:
3540 u.warn(_("** unknown exception encountered, details follow\n"))
3549 u.warn(_("** unknown exception encountered, details follow\n"))
3541 u.warn(_("** report bug details to "
3550 u.warn(_("** report bug details to "
3542 "http://www.selenic.com/mercurial/bts\n"))
3551 "http://www.selenic.com/mercurial/bts\n"))
3543 u.warn(_("** or mercurial@selenic.com\n"))
3552 u.warn(_("** or mercurial@selenic.com\n"))
3544 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3553 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3545 % version.get_version())
3554 % version.get_version())
3546 raise
3555 raise
3547
3556
3548 return -1
3557 return -1
@@ -1,1841 +1,1856
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ('lookup', 'changegroupsubset')
18 capabilities = ('lookup', 'changegroupsubset')
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("There is no Mercurial repository"
30 raise repo.RepoError(_("There is no Mercurial repository"
31 " here (.hg not found)"))
31 " here (.hg not found)"))
32 path = p
32 path = p
33 self.path = os.path.join(path, ".hg")
33 self.path = os.path.join(path, ".hg")
34
34
35 if not os.path.isdir(self.path):
35 if not os.path.isdir(self.path):
36 if create:
36 if create:
37 if not os.path.exists(path):
37 if not os.path.exists(path):
38 os.mkdir(path)
38 os.mkdir(path)
39 os.mkdir(self.path)
39 os.mkdir(self.path)
40 os.mkdir(self.join("data"))
40 os.mkdir(self.join("data"))
41 else:
41 else:
42 raise repo.RepoError(_("repository %s not found") % path)
42 raise repo.RepoError(_("repository %s not found") % path)
43 elif create:
43 elif create:
44 raise repo.RepoError(_("repository %s already exists") % path)
44 raise repo.RepoError(_("repository %s already exists") % path)
45
45
46 self.root = os.path.abspath(path)
46 self.root = os.path.abspath(path)
47 self.origroot = path
47 self.origroot = path
48 self.ui = ui.ui(parentui=parentui)
48 self.ui = ui.ui(parentui=parentui)
49 self.opener = util.opener(self.path)
49 self.opener = util.opener(self.path)
50 self.sopener = util.opener(self.path)
50 self.sopener = util.opener(self.path)
51 self.wopener = util.opener(self.root)
51 self.wopener = util.opener(self.root)
52
52
53 try:
53 try:
54 self.ui.readconfig(self.join("hgrc"), self.root)
54 self.ui.readconfig(self.join("hgrc"), self.root)
55 except IOError:
55 except IOError:
56 pass
56 pass
57
57
58 v = self.ui.configrevlog()
58 v = self.ui.configrevlog()
59 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
59 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
60 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
60 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
61 fl = v.get('flags', None)
61 fl = v.get('flags', None)
62 flags = 0
62 flags = 0
63 if fl != None:
63 if fl != None:
64 for x in fl.split():
64 for x in fl.split():
65 flags |= revlog.flagstr(x)
65 flags |= revlog.flagstr(x)
66 elif self.revlogv1:
66 elif self.revlogv1:
67 flags = revlog.REVLOG_DEFAULT_FLAGS
67 flags = revlog.REVLOG_DEFAULT_FLAGS
68
68
69 v = self.revlogversion | flags
69 v = self.revlogversion | flags
70 self.manifest = manifest.manifest(self.sopener, v)
70 self.manifest = manifest.manifest(self.sopener, v)
71 self.changelog = changelog.changelog(self.sopener, v)
71 self.changelog = changelog.changelog(self.sopener, v)
72
72
73 # the changelog might not have the inline index flag
73 # the changelog might not have the inline index flag
74 # on. If the format of the changelog is the same as found in
74 # on. If the format of the changelog is the same as found in
75 # .hgrc, apply any flags found in the .hgrc as well.
75 # .hgrc, apply any flags found in the .hgrc as well.
76 # Otherwise, just version from the changelog
76 # Otherwise, just version from the changelog
77 v = self.changelog.version
77 v = self.changelog.version
78 if v == self.revlogversion:
78 if v == self.revlogversion:
79 v |= flags
79 v |= flags
80 self.revlogversion = v
80 self.revlogversion = v
81
81
82 self.tagscache = None
82 self.tagscache = None
83 self.branchcache = None
83 self.branchcache = None
84 self.nodetagscache = None
84 self.nodetagscache = None
85 self.encodepats = None
85 self.encodepats = None
86 self.decodepats = None
86 self.decodepats = None
87 self.transhandle = None
87 self.transhandle = None
88
88
89 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
89 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
90
90
91 def url(self):
91 def url(self):
92 return 'file:' + self.root
92 return 'file:' + self.root
93
93
94 def hook(self, name, throw=False, **args):
94 def hook(self, name, throw=False, **args):
95 def callhook(hname, funcname):
95 def callhook(hname, funcname):
96 '''call python hook. hook is callable object, looked up as
96 '''call python hook. hook is callable object, looked up as
97 name in python module. if callable returns "true", hook
97 name in python module. if callable returns "true", hook
98 fails, else passes. if hook raises exception, treated as
98 fails, else passes. if hook raises exception, treated as
99 hook failure. exception propagates if throw is "true".
99 hook failure. exception propagates if throw is "true".
100
100
101 reason for "true" meaning "hook failed" is so that
101 reason for "true" meaning "hook failed" is so that
102 unmodified commands (e.g. mercurial.commands.update) can
102 unmodified commands (e.g. mercurial.commands.update) can
103 be run as hooks without wrappers to convert return values.'''
103 be run as hooks without wrappers to convert return values.'''
104
104
105 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
105 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
106 d = funcname.rfind('.')
106 d = funcname.rfind('.')
107 if d == -1:
107 if d == -1:
108 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
108 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
109 % (hname, funcname))
109 % (hname, funcname))
110 modname = funcname[:d]
110 modname = funcname[:d]
111 try:
111 try:
112 obj = __import__(modname)
112 obj = __import__(modname)
113 except ImportError:
113 except ImportError:
114 try:
114 try:
115 # extensions are loaded with hgext_ prefix
115 # extensions are loaded with hgext_ prefix
116 obj = __import__("hgext_%s" % modname)
116 obj = __import__("hgext_%s" % modname)
117 except ImportError:
117 except ImportError:
118 raise util.Abort(_('%s hook is invalid '
118 raise util.Abort(_('%s hook is invalid '
119 '(import of "%s" failed)') %
119 '(import of "%s" failed)') %
120 (hname, modname))
120 (hname, modname))
121 try:
121 try:
122 for p in funcname.split('.')[1:]:
122 for p in funcname.split('.')[1:]:
123 obj = getattr(obj, p)
123 obj = getattr(obj, p)
124 except AttributeError, err:
124 except AttributeError, err:
125 raise util.Abort(_('%s hook is invalid '
125 raise util.Abort(_('%s hook is invalid '
126 '("%s" is not defined)') %
126 '("%s" is not defined)') %
127 (hname, funcname))
127 (hname, funcname))
128 if not callable(obj):
128 if not callable(obj):
129 raise util.Abort(_('%s hook is invalid '
129 raise util.Abort(_('%s hook is invalid '
130 '("%s" is not callable)') %
130 '("%s" is not callable)') %
131 (hname, funcname))
131 (hname, funcname))
132 try:
132 try:
133 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
133 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
134 except (KeyboardInterrupt, util.SignalInterrupt):
134 except (KeyboardInterrupt, util.SignalInterrupt):
135 raise
135 raise
136 except Exception, exc:
136 except Exception, exc:
137 if isinstance(exc, util.Abort):
137 if isinstance(exc, util.Abort):
138 self.ui.warn(_('error: %s hook failed: %s\n') %
138 self.ui.warn(_('error: %s hook failed: %s\n') %
139 (hname, exc.args[0]))
139 (hname, exc.args[0]))
140 else:
140 else:
141 self.ui.warn(_('error: %s hook raised an exception: '
141 self.ui.warn(_('error: %s hook raised an exception: '
142 '%s\n') % (hname, exc))
142 '%s\n') % (hname, exc))
143 if throw:
143 if throw:
144 raise
144 raise
145 self.ui.print_exc()
145 self.ui.print_exc()
146 return True
146 return True
147 if r:
147 if r:
148 if throw:
148 if throw:
149 raise util.Abort(_('%s hook failed') % hname)
149 raise util.Abort(_('%s hook failed') % hname)
150 self.ui.warn(_('warning: %s hook failed\n') % hname)
150 self.ui.warn(_('warning: %s hook failed\n') % hname)
151 return r
151 return r
152
152
153 def runhook(name, cmd):
153 def runhook(name, cmd):
154 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
154 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
155 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
155 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
156 r = util.system(cmd, environ=env, cwd=self.root)
156 r = util.system(cmd, environ=env, cwd=self.root)
157 if r:
157 if r:
158 desc, r = util.explain_exit(r)
158 desc, r = util.explain_exit(r)
159 if throw:
159 if throw:
160 raise util.Abort(_('%s hook %s') % (name, desc))
160 raise util.Abort(_('%s hook %s') % (name, desc))
161 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
161 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
162 return r
162 return r
163
163
164 r = False
164 r = False
165 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
165 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
166 if hname.split(".", 1)[0] == name and cmd]
166 if hname.split(".", 1)[0] == name and cmd]
167 hooks.sort()
167 hooks.sort()
168 for hname, cmd in hooks:
168 for hname, cmd in hooks:
169 if cmd.startswith('python:'):
169 if cmd.startswith('python:'):
170 r = callhook(hname, cmd[7:].strip()) or r
170 r = callhook(hname, cmd[7:].strip()) or r
171 else:
171 else:
172 r = runhook(hname, cmd) or r
172 r = runhook(hname, cmd) or r
173 return r
173 return r
174
174
175 tag_disallowed = ':\r\n'
175 tag_disallowed = ':\r\n'
176
176
177 def tag(self, name, node, message, local, user, date):
177 def tag(self, name, node, message, local, user, date):
178 '''tag a revision with a symbolic name.
178 '''tag a revision with a symbolic name.
179
179
180 if local is True, the tag is stored in a per-repository file.
180 if local is True, the tag is stored in a per-repository file.
181 otherwise, it is stored in the .hgtags file, and a new
181 otherwise, it is stored in the .hgtags file, and a new
182 changeset is committed with the change.
182 changeset is committed with the change.
183
183
184 keyword arguments:
184 keyword arguments:
185
185
186 local: whether to store tag in non-version-controlled file
186 local: whether to store tag in non-version-controlled file
187 (default False)
187 (default False)
188
188
189 message: commit message to use if committing
189 message: commit message to use if committing
190
190
191 user: name of user to use if committing
191 user: name of user to use if committing
192
192
193 date: date tuple to use if committing'''
193 date: date tuple to use if committing'''
194
194
195 for c in self.tag_disallowed:
195 for c in self.tag_disallowed:
196 if c in name:
196 if c in name:
197 raise util.Abort(_('%r cannot be used in a tag name') % c)
197 raise util.Abort(_('%r cannot be used in a tag name') % c)
198
198
199 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
199 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
200
200
201 if local:
201 if local:
202 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
202 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
203 self.hook('tag', node=hex(node), tag=name, local=local)
203 self.hook('tag', node=hex(node), tag=name, local=local)
204 return
204 return
205
205
206 for x in self.status()[:5]:
206 for x in self.status()[:5]:
207 if '.hgtags' in x:
207 if '.hgtags' in x:
208 raise util.Abort(_('working copy of .hgtags is changed '
208 raise util.Abort(_('working copy of .hgtags is changed '
209 '(please commit .hgtags manually)'))
209 '(please commit .hgtags manually)'))
210
210
211 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
211 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
212 if self.dirstate.state('.hgtags') == '?':
212 if self.dirstate.state('.hgtags') == '?':
213 self.add(['.hgtags'])
213 self.add(['.hgtags'])
214
214
215 self.commit(['.hgtags'], message, user, date)
215 self.commit(['.hgtags'], message, user, date)
216 self.hook('tag', node=hex(node), tag=name, local=local)
216 self.hook('tag', node=hex(node), tag=name, local=local)
217
217
218 def tags(self):
218 def tags(self):
219 '''return a mapping of tag to node'''
219 '''return a mapping of tag to node'''
220 if not self.tagscache:
220 if not self.tagscache:
221 self.tagscache = {}
221 self.tagscache = {}
222
222
223 def parsetag(line, context):
223 def parsetag(line, context):
224 if not line:
224 if not line:
225 return
225 return
226 s = l.split(" ", 1)
226 s = l.split(" ", 1)
227 if len(s) != 2:
227 if len(s) != 2:
228 self.ui.warn(_("%s: cannot parse entry\n") % context)
228 self.ui.warn(_("%s: cannot parse entry\n") % context)
229 return
229 return
230 node, key = s
230 node, key = s
231 key = key.strip()
231 key = key.strip()
232 try:
232 try:
233 bin_n = bin(node)
233 bin_n = bin(node)
234 except TypeError:
234 except TypeError:
235 self.ui.warn(_("%s: node '%s' is not well formed\n") %
235 self.ui.warn(_("%s: node '%s' is not well formed\n") %
236 (context, node))
236 (context, node))
237 return
237 return
238 if bin_n not in self.changelog.nodemap:
238 if bin_n not in self.changelog.nodemap:
239 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
239 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
240 (context, key))
240 (context, key))
241 return
241 return
242 self.tagscache[key] = bin_n
242 self.tagscache[key] = bin_n
243
243
244 # read the tags file from each head, ending with the tip,
244 # read the tags file from each head, ending with the tip,
245 # and add each tag found to the map, with "newer" ones
245 # and add each tag found to the map, with "newer" ones
246 # taking precedence
246 # taking precedence
247 heads = self.heads()
247 heads = self.heads()
248 heads.reverse()
248 heads.reverse()
249 seen = {}
249 seen = {}
250 for node in heads:
250 for node in heads:
251 f = self.filectx('.hgtags', node)
251 f = self.filectx('.hgtags', node)
252 if not f or f.filerev() in seen: continue
252 if not f or f.filerev() in seen: continue
253 seen[f.filerev()] = 1
253 seen[f.filerev()] = 1
254 count = 0
254 count = 0
255 for l in f.data().splitlines():
255 for l in f.data().splitlines():
256 count += 1
256 count += 1
257 parsetag(l, _("%s, line %d") % (str(f), count))
257 parsetag(l, _("%s, line %d") % (str(f), count))
258
258
259 try:
259 try:
260 f = self.opener("localtags")
260 f = self.opener("localtags")
261 count = 0
261 count = 0
262 for l in f:
262 for l in f:
263 count += 1
263 count += 1
264 parsetag(l, _("localtags, line %d") % count)
264 parsetag(l, _("localtags, line %d") % count)
265 except IOError:
265 except IOError:
266 pass
266 pass
267
267
268 self.tagscache['tip'] = self.changelog.tip()
268 self.tagscache['tip'] = self.changelog.tip()
269
269
270 return self.tagscache
270 return self.tagscache
271
271
272 def tagslist(self):
272 def tagslist(self):
273 '''return a list of tags ordered by revision'''
273 '''return a list of tags ordered by revision'''
274 l = []
274 l = []
275 for t, n in self.tags().items():
275 for t, n in self.tags().items():
276 try:
276 try:
277 r = self.changelog.rev(n)
277 r = self.changelog.rev(n)
278 except:
278 except:
279 r = -2 # sort to the beginning of the list if unknown
279 r = -2 # sort to the beginning of the list if unknown
280 l.append((r, t, n))
280 l.append((r, t, n))
281 l.sort()
281 l.sort()
282 return [(t, n) for r, t, n in l]
282 return [(t, n) for r, t, n in l]
283
283
284 def nodetags(self, node):
284 def nodetags(self, node):
285 '''return the tags associated with a node'''
285 '''return the tags associated with a node'''
286 if not self.nodetagscache:
286 if not self.nodetagscache:
287 self.nodetagscache = {}
287 self.nodetagscache = {}
288 for t, n in self.tags().items():
288 for t, n in self.tags().items():
289 self.nodetagscache.setdefault(n, []).append(t)
289 self.nodetagscache.setdefault(n, []).append(t)
290 return self.nodetagscache.get(node, [])
290 return self.nodetagscache.get(node, [])
291
291
292 def branchtags(self):
292 def branchtags(self):
293 if self.branchcache != None:
293 if self.branchcache != None:
294 return self.branchcache
294 return self.branchcache
295
295
296 self.branchcache = {} # avoid recursion in changectx
296 self.branchcache = {} # avoid recursion in changectx
297
297
298 partial, last, lrev = self._readbranchcache()
298 partial, last, lrev = self._readbranchcache()
299
299
300 tiprev = self.changelog.count() - 1
300 tiprev = self.changelog.count() - 1
301 if lrev != tiprev:
301 if lrev != tiprev:
302 self._updatebranchcache(partial, lrev+1, tiprev+1)
302 self._updatebranchcache(partial, lrev+1, tiprev+1)
303 self._writebranchcache(partial, self.changelog.tip(), tiprev)
303 self._writebranchcache(partial, self.changelog.tip(), tiprev)
304
304
305 self.branchcache = partial
305 self.branchcache = partial
306 return self.branchcache
306 return self.branchcache
307
307
308 def _readbranchcache(self):
308 def _readbranchcache(self):
309 partial = {}
309 partial = {}
310 try:
310 try:
311 f = self.opener("branches.cache")
311 f = self.opener("branches.cache")
312 last, lrev = f.readline().rstrip().split(" ", 1)
312 last, lrev = f.readline().rstrip().split(" ", 1)
313 last, lrev = bin(last), int(lrev)
313 last, lrev = bin(last), int(lrev)
314 if (lrev < self.changelog.count() and
314 if (lrev < self.changelog.count() and
315 self.changelog.node(lrev) == last): # sanity check
315 self.changelog.node(lrev) == last): # sanity check
316 for l in f:
316 for l in f:
317 node, label = l.rstrip().split(" ", 1)
317 node, label = l.rstrip().split(" ", 1)
318 partial[label] = bin(node)
318 partial[label] = bin(node)
319 else: # invalidate the cache
319 else: # invalidate the cache
320 last, lrev = nullid, -1
320 last, lrev = nullid, -1
321 f.close()
321 f.close()
322 except IOError:
322 except IOError:
323 last, lrev = nullid, -1
323 last, lrev = nullid, -1
324 return partial, last, lrev
324 return partial, last, lrev
325
325
326 def _writebranchcache(self, branches, tip, tiprev):
326 def _writebranchcache(self, branches, tip, tiprev):
327 try:
327 try:
328 f = self.opener("branches.cache", "w")
328 f = self.opener("branches.cache", "w")
329 f.write("%s %s\n" % (hex(tip), tiprev))
329 f.write("%s %s\n" % (hex(tip), tiprev))
330 for label, node in branches.iteritems():
330 for label, node in branches.iteritems():
331 f.write("%s %s\n" % (hex(node), label))
331 f.write("%s %s\n" % (hex(node), label))
332 except IOError:
332 except IOError:
333 pass
333 pass
334
334
335 def _updatebranchcache(self, partial, start, end):
335 def _updatebranchcache(self, partial, start, end):
336 for r in xrange(start, end):
336 for r in xrange(start, end):
337 c = self.changectx(r)
337 c = self.changectx(r)
338 b = c.branch()
338 b = c.branch()
339 if b:
339 if b:
340 partial[b] = c.node()
340 partial[b] = c.node()
341
341
342 def lookup(self, key):
342 def lookup(self, key):
343 if key == '.':
343 if key == '.':
344 key = self.dirstate.parents()[0]
344 key = self.dirstate.parents()[0]
345 if key == nullid:
345 if key == nullid:
346 raise repo.RepoError(_("no revision checked out"))
346 raise repo.RepoError(_("no revision checked out"))
347 n = self.changelog._match(key)
347 n = self.changelog._match(key)
348 if n:
348 if n:
349 return n
349 return n
350 if key in self.tags():
350 if key in self.tags():
351 return self.tags()[key]
351 return self.tags()[key]
352 if key in self.branchtags():
352 if key in self.branchtags():
353 return self.branchtags()[key]
353 return self.branchtags()[key]
354 n = self.changelog._partialmatch(key)
354 n = self.changelog._partialmatch(key)
355 if n:
355 if n:
356 return n
356 return n
357 raise repo.RepoError(_("unknown revision '%s'") % key)
357 raise repo.RepoError(_("unknown revision '%s'") % key)
358
358
359 def dev(self):
359 def dev(self):
360 return os.lstat(self.path).st_dev
360 return os.lstat(self.path).st_dev
361
361
362 def local(self):
362 def local(self):
363 return True
363 return True
364
364
365 def join(self, f):
365 def join(self, f):
366 return os.path.join(self.path, f)
366 return os.path.join(self.path, f)
367
367
368 def sjoin(self, f):
368 def sjoin(self, f):
369 return os.path.join(self.path, f)
369 return os.path.join(self.path, f)
370
370
371 def wjoin(self, f):
371 def wjoin(self, f):
372 return os.path.join(self.root, f)
372 return os.path.join(self.root, f)
373
373
374 def file(self, f):
374 def file(self, f):
375 if f[0] == '/':
375 if f[0] == '/':
376 f = f[1:]
376 f = f[1:]
377 return filelog.filelog(self.sopener, f, self.revlogversion)
377 return filelog.filelog(self.sopener, f, self.revlogversion)
378
378
379 def changectx(self, changeid=None):
379 def changectx(self, changeid=None):
380 return context.changectx(self, changeid)
380 return context.changectx(self, changeid)
381
381
382 def workingctx(self):
382 def workingctx(self):
383 return context.workingctx(self)
383 return context.workingctx(self)
384
384
385 def parents(self, changeid=None):
385 def parents(self, changeid=None):
386 '''
386 '''
387 get list of changectxs for parents of changeid or working directory
387 get list of changectxs for parents of changeid or working directory
388 '''
388 '''
389 if changeid is None:
389 if changeid is None:
390 pl = self.dirstate.parents()
390 pl = self.dirstate.parents()
391 else:
391 else:
392 n = self.changelog.lookup(changeid)
392 n = self.changelog.lookup(changeid)
393 pl = self.changelog.parents(n)
393 pl = self.changelog.parents(n)
394 if pl[1] == nullid:
394 if pl[1] == nullid:
395 return [self.changectx(pl[0])]
395 return [self.changectx(pl[0])]
396 return [self.changectx(pl[0]), self.changectx(pl[1])]
396 return [self.changectx(pl[0]), self.changectx(pl[1])]
397
397
398 def filectx(self, path, changeid=None, fileid=None):
398 def filectx(self, path, changeid=None, fileid=None):
399 """changeid can be a changeset revision, node, or tag.
399 """changeid can be a changeset revision, node, or tag.
400 fileid can be a file revision or node."""
400 fileid can be a file revision or node."""
401 return context.filectx(self, path, changeid, fileid)
401 return context.filectx(self, path, changeid, fileid)
402
402
403 def getcwd(self):
403 def getcwd(self):
404 return self.dirstate.getcwd()
404 return self.dirstate.getcwd()
405
405
406 def wfile(self, f, mode='r'):
406 def wfile(self, f, mode='r'):
407 return self.wopener(f, mode)
407 return self.wopener(f, mode)
408
408
409 def wread(self, filename):
409 def wread(self, filename):
410 if self.encodepats == None:
410 if self.encodepats == None:
411 l = []
411 l = []
412 for pat, cmd in self.ui.configitems("encode"):
412 for pat, cmd in self.ui.configitems("encode"):
413 mf = util.matcher(self.root, "", [pat], [], [])[1]
413 mf = util.matcher(self.root, "", [pat], [], [])[1]
414 l.append((mf, cmd))
414 l.append((mf, cmd))
415 self.encodepats = l
415 self.encodepats = l
416
416
417 data = self.wopener(filename, 'r').read()
417 data = self.wopener(filename, 'r').read()
418
418
419 for mf, cmd in self.encodepats:
419 for mf, cmd in self.encodepats:
420 if mf(filename):
420 if mf(filename):
421 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
421 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
422 data = util.filter(data, cmd)
422 data = util.filter(data, cmd)
423 break
423 break
424
424
425 return data
425 return data
426
426
427 def wwrite(self, filename, data, fd=None):
427 def wwrite(self, filename, data, fd=None):
428 if self.decodepats == None:
428 if self.decodepats == None:
429 l = []
429 l = []
430 for pat, cmd in self.ui.configitems("decode"):
430 for pat, cmd in self.ui.configitems("decode"):
431 mf = util.matcher(self.root, "", [pat], [], [])[1]
431 mf = util.matcher(self.root, "", [pat], [], [])[1]
432 l.append((mf, cmd))
432 l.append((mf, cmd))
433 self.decodepats = l
433 self.decodepats = l
434
434
435 for mf, cmd in self.decodepats:
435 for mf, cmd in self.decodepats:
436 if mf(filename):
436 if mf(filename):
437 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
437 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
438 data = util.filter(data, cmd)
438 data = util.filter(data, cmd)
439 break
439 break
440
440
441 if fd:
441 if fd:
442 return fd.write(data)
442 return fd.write(data)
443 return self.wopener(filename, 'w').write(data)
443 return self.wopener(filename, 'w').write(data)
444
444
445 def transaction(self):
445 def transaction(self):
446 tr = self.transhandle
446 tr = self.transhandle
447 if tr != None and tr.running():
447 if tr != None and tr.running():
448 return tr.nest()
448 return tr.nest()
449
449
450 # save dirstate for rollback
450 # save dirstate for rollback
451 try:
451 try:
452 ds = self.opener("dirstate").read()
452 ds = self.opener("dirstate").read()
453 except IOError:
453 except IOError:
454 ds = ""
454 ds = ""
455 self.opener("journal.dirstate", "w").write(ds)
455 self.opener("journal.dirstate", "w").write(ds)
456
456
457 tr = transaction.transaction(self.ui.warn, self.sopener,
457 tr = transaction.transaction(self.ui.warn, self.sopener,
458 self.sjoin("journal"),
458 self.sjoin("journal"),
459 aftertrans(self.path))
459 aftertrans(self.path))
460 self.transhandle = tr
460 self.transhandle = tr
461 return tr
461 return tr
462
462
463 def recover(self):
463 def recover(self):
464 l = self.lock()
464 l = self.lock()
465 if os.path.exists(self.sjoin("journal")):
465 if os.path.exists(self.sjoin("journal")):
466 self.ui.status(_("rolling back interrupted transaction\n"))
466 self.ui.status(_("rolling back interrupted transaction\n"))
467 transaction.rollback(self.sopener, self.sjoin("journal"))
467 transaction.rollback(self.sopener, self.sjoin("journal"))
468 self.reload()
468 self.reload()
469 return True
469 return True
470 else:
470 else:
471 self.ui.warn(_("no interrupted transaction available\n"))
471 self.ui.warn(_("no interrupted transaction available\n"))
472 return False
472 return False
473
473
474 def rollback(self, wlock=None):
474 def rollback(self, wlock=None):
475 if not wlock:
475 if not wlock:
476 wlock = self.wlock()
476 wlock = self.wlock()
477 l = self.lock()
477 l = self.lock()
478 if os.path.exists(self.sjoin("undo")):
478 if os.path.exists(self.sjoin("undo")):
479 self.ui.status(_("rolling back last transaction\n"))
479 self.ui.status(_("rolling back last transaction\n"))
480 transaction.rollback(self.sopener, self.sjoin("undo"))
480 transaction.rollback(self.sopener, self.sjoin("undo"))
481 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
481 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
482 self.reload()
482 self.reload()
483 self.wreload()
483 self.wreload()
484 else:
484 else:
485 self.ui.warn(_("no rollback information available\n"))
485 self.ui.warn(_("no rollback information available\n"))
486
486
487 def wreload(self):
487 def wreload(self):
488 self.dirstate.read()
488 self.dirstate.read()
489
489
490 def reload(self):
490 def reload(self):
491 self.changelog.load()
491 self.changelog.load()
492 self.manifest.load()
492 self.manifest.load()
493 self.tagscache = None
493 self.tagscache = None
494 self.nodetagscache = None
494 self.nodetagscache = None
495
495
496 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
496 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
497 desc=None):
497 desc=None):
498 try:
498 try:
499 l = lock.lock(lockname, 0, releasefn, desc=desc)
499 l = lock.lock(lockname, 0, releasefn, desc=desc)
500 except lock.LockHeld, inst:
500 except lock.LockHeld, inst:
501 if not wait:
501 if not wait:
502 raise
502 raise
503 self.ui.warn(_("waiting for lock on %s held by %s\n") %
503 self.ui.warn(_("waiting for lock on %s held by %s\n") %
504 (desc, inst.args[0]))
504 (desc, inst.args[0]))
505 # default to 600 seconds timeout
505 # default to 600 seconds timeout
506 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
506 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
507 releasefn, desc=desc)
507 releasefn, desc=desc)
508 if acquirefn:
508 if acquirefn:
509 acquirefn()
509 acquirefn()
510 return l
510 return l
511
511
512 def lock(self, wait=1):
512 def lock(self, wait=1):
513 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
513 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
514 desc=_('repository %s') % self.origroot)
514 desc=_('repository %s') % self.origroot)
515
515
516 def wlock(self, wait=1):
516 def wlock(self, wait=1):
517 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
517 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
518 self.wreload,
518 self.wreload,
519 desc=_('working directory of %s') % self.origroot)
519 desc=_('working directory of %s') % self.origroot)
520
520
521 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
521 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
522 """
522 """
523 commit an individual file as part of a larger transaction
523 commit an individual file as part of a larger transaction
524 """
524 """
525
525
526 t = self.wread(fn)
526 t = self.wread(fn)
527 fl = self.file(fn)
527 fl = self.file(fn)
528 fp1 = manifest1.get(fn, nullid)
528 fp1 = manifest1.get(fn, nullid)
529 fp2 = manifest2.get(fn, nullid)
529 fp2 = manifest2.get(fn, nullid)
530
530
531 meta = {}
531 meta = {}
532 cp = self.dirstate.copied(fn)
532 cp = self.dirstate.copied(fn)
533 if cp:
533 if cp:
534 meta["copy"] = cp
534 meta["copy"] = cp
535 if not manifest2: # not a branch merge
535 if not manifest2: # not a branch merge
536 meta["copyrev"] = hex(manifest1.get(cp, nullid))
536 meta["copyrev"] = hex(manifest1.get(cp, nullid))
537 fp2 = nullid
537 fp2 = nullid
538 elif fp2 != nullid: # copied on remote side
538 elif fp2 != nullid: # copied on remote side
539 meta["copyrev"] = hex(manifest1.get(cp, nullid))
539 meta["copyrev"] = hex(manifest1.get(cp, nullid))
540 else: # copied on local side, reversed
540 else: # copied on local side, reversed
541 meta["copyrev"] = hex(manifest2.get(cp))
541 meta["copyrev"] = hex(manifest2.get(cp))
542 fp2 = nullid
542 fp2 = nullid
543 self.ui.debug(_(" %s: copy %s:%s\n") %
543 self.ui.debug(_(" %s: copy %s:%s\n") %
544 (fn, cp, meta["copyrev"]))
544 (fn, cp, meta["copyrev"]))
545 fp1 = nullid
545 fp1 = nullid
546 elif fp2 != nullid:
546 elif fp2 != nullid:
547 # is one parent an ancestor of the other?
547 # is one parent an ancestor of the other?
548 fpa = fl.ancestor(fp1, fp2)
548 fpa = fl.ancestor(fp1, fp2)
549 if fpa == fp1:
549 if fpa == fp1:
550 fp1, fp2 = fp2, nullid
550 fp1, fp2 = fp2, nullid
551 elif fpa == fp2:
551 elif fpa == fp2:
552 fp2 = nullid
552 fp2 = nullid
553
553
554 # is the file unmodified from the parent? report existing entry
554 # is the file unmodified from the parent? report existing entry
555 if fp2 == nullid and not fl.cmp(fp1, t):
555 if fp2 == nullid and not fl.cmp(fp1, t):
556 return fp1
556 return fp1
557
557
558 changelist.append(fn)
558 changelist.append(fn)
559 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
559 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
560
560
561 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
561 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
562 orig_parent = self.dirstate.parents()[0] or nullid
562 orig_parent = self.dirstate.parents()[0] or nullid
563 p1 = p1 or self.dirstate.parents()[0] or nullid
563 p1 = p1 or self.dirstate.parents()[0] or nullid
564 p2 = p2 or self.dirstate.parents()[1] or nullid
564 p2 = p2 or self.dirstate.parents()[1] or nullid
565 c1 = self.changelog.read(p1)
565 c1 = self.changelog.read(p1)
566 c2 = self.changelog.read(p2)
566 c2 = self.changelog.read(p2)
567 m1 = self.manifest.read(c1[0]).copy()
567 m1 = self.manifest.read(c1[0]).copy()
568 m2 = self.manifest.read(c2[0])
568 m2 = self.manifest.read(c2[0])
569 changed = []
569 changed = []
570 removed = []
570 removed = []
571
571
572 if orig_parent == p1:
572 if orig_parent == p1:
573 update_dirstate = 1
573 update_dirstate = 1
574 else:
574 else:
575 update_dirstate = 0
575 update_dirstate = 0
576
576
577 if not wlock:
577 if not wlock:
578 wlock = self.wlock()
578 wlock = self.wlock()
579 l = self.lock()
579 l = self.lock()
580 tr = self.transaction()
580 tr = self.transaction()
581 linkrev = self.changelog.count()
581 linkrev = self.changelog.count()
582 for f in files:
582 for f in files:
583 try:
583 try:
584 m1[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
584 m1[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
585 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
585 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
586 except IOError:
586 except IOError:
587 try:
587 try:
588 del m1[f]
588 del m1[f]
589 if update_dirstate:
589 if update_dirstate:
590 self.dirstate.forget([f])
590 self.dirstate.forget([f])
591 removed.append(f)
591 removed.append(f)
592 except:
592 except:
593 # deleted from p2?
593 # deleted from p2?
594 pass
594 pass
595
595
596 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
596 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
597 user = user or self.ui.username()
597 user = user or self.ui.username()
598 n = self.changelog.add(mnode, changed + removed, text,
598 n = self.changelog.add(mnode, changed + removed, text,
599 tr, p1, p2, user, date)
599 tr, p1, p2, user, date)
600 tr.close()
600 tr.close()
601 if update_dirstate:
601 if update_dirstate:
602 self.dirstate.setparents(n, nullid)
602 self.dirstate.setparents(n, nullid)
603
603
604 def commit(self, files=None, text="", user=None, date=None,
604 def commit(self, files=None, text="", user=None, date=None,
605 match=util.always, force=False, lock=None, wlock=None,
605 match=util.always, force=False, lock=None, wlock=None,
606 force_editor=False):
606 force_editor=False):
607 commit = []
607 commit = []
608 remove = []
608 remove = []
609 changed = []
609 changed = []
610
610
611 if files:
611 if files:
612 for f in files:
612 for f in files:
613 s = self.dirstate.state(f)
613 s = self.dirstate.state(f)
614 if s in 'nmai':
614 if s in 'nmai':
615 commit.append(f)
615 commit.append(f)
616 elif s == 'r':
616 elif s == 'r':
617 remove.append(f)
617 remove.append(f)
618 else:
618 else:
619 self.ui.warn(_("%s not tracked!\n") % f)
619 self.ui.warn(_("%s not tracked!\n") % f)
620 else:
620 else:
621 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
621 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
622 commit = modified + added
622 commit = modified + added
623 remove = removed
623 remove = removed
624
624
625 p1, p2 = self.dirstate.parents()
625 p1, p2 = self.dirstate.parents()
626 c1 = self.changelog.read(p1)
626 c1 = self.changelog.read(p1)
627 c2 = self.changelog.read(p2)
627 c2 = self.changelog.read(p2)
628 m1 = self.manifest.read(c1[0]).copy()
628 m1 = self.manifest.read(c1[0]).copy()
629 m2 = self.manifest.read(c2[0])
629 m2 = self.manifest.read(c2[0])
630
630
631 branchname = self.workingctx().branch()
631 branchname = self.workingctx().branch()
632 oldname = c1[5].get("branch", "")
632 oldname = c1[5].get("branch", "")
633
633
634 if not commit and not remove and not force and p2 == nullid and \
634 if not commit and not remove and not force and p2 == nullid and \
635 branchname == oldname:
635 branchname == oldname:
636 self.ui.status(_("nothing changed\n"))
636 self.ui.status(_("nothing changed\n"))
637 return None
637 return None
638
638
639 xp1 = hex(p1)
639 xp1 = hex(p1)
640 if p2 == nullid: xp2 = ''
640 if p2 == nullid: xp2 = ''
641 else: xp2 = hex(p2)
641 else: xp2 = hex(p2)
642
642
643 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
643 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
644
644
645 if not wlock:
645 if not wlock:
646 wlock = self.wlock()
646 wlock = self.wlock()
647 if not lock:
647 if not lock:
648 lock = self.lock()
648 lock = self.lock()
649 tr = self.transaction()
649 tr = self.transaction()
650
650
651 # check in files
651 # check in files
652 new = {}
652 new = {}
653 linkrev = self.changelog.count()
653 linkrev = self.changelog.count()
654 commit.sort()
654 commit.sort()
655 for f in commit:
655 for f in commit:
656 self.ui.note(f + "\n")
656 self.ui.note(f + "\n")
657 try:
657 try:
658 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
658 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
659 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
659 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
660 except IOError:
660 except IOError:
661 self.ui.warn(_("trouble committing %s!\n") % f)
661 self.ui.warn(_("trouble committing %s!\n") % f)
662 raise
662 raise
663
663
664 # update manifest
664 # update manifest
665 m1.update(new)
665 m1.update(new)
666 for f in remove:
666 for f in remove:
667 if f in m1:
667 if f in m1:
668 del m1[f]
668 del m1[f]
669 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
669 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
670
670
671 # add changeset
671 # add changeset
672 new = new.keys()
672 new = new.keys()
673 new.sort()
673 new.sort()
674
674
675 user = user or self.ui.username()
675 user = user or self.ui.username()
676 if not text or force_editor:
676 if not text or force_editor:
677 edittext = []
677 edittext = []
678 if text:
678 if text:
679 edittext.append(text)
679 edittext.append(text)
680 edittext.append("")
680 edittext.append("")
681 if p2 != nullid:
681 if p2 != nullid:
682 edittext.append("HG: branch merge")
682 edittext.append("HG: branch merge")
683 edittext.extend(["HG: changed %s" % f for f in changed])
683 edittext.extend(["HG: changed %s" % f for f in changed])
684 edittext.extend(["HG: removed %s" % f for f in remove])
684 edittext.extend(["HG: removed %s" % f for f in remove])
685 if not changed and not remove:
685 if not changed and not remove:
686 edittext.append("HG: no files changed")
686 edittext.append("HG: no files changed")
687 edittext.append("")
687 edittext.append("")
688 # run editor in the repository root
688 # run editor in the repository root
689 olddir = os.getcwd()
689 olddir = os.getcwd()
690 os.chdir(self.root)
690 os.chdir(self.root)
691 text = self.ui.edit("\n".join(edittext), user)
691 text = self.ui.edit("\n".join(edittext), user)
692 os.chdir(olddir)
692 os.chdir(olddir)
693
693
694 lines = [line.rstrip() for line in text.rstrip().splitlines()]
694 lines = [line.rstrip() for line in text.rstrip().splitlines()]
695 while lines and not lines[0]:
695 while lines and not lines[0]:
696 del lines[0]
696 del lines[0]
697 if not lines:
697 if not lines:
698 return None
698 return None
699 text = '\n'.join(lines)
699 text = '\n'.join(lines)
700 extra = {}
700 extra = {}
701 if branchname:
701 if branchname:
702 extra["branch"] = branchname
702 extra["branch"] = branchname
703 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
703 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
704 user, date, extra)
704 user, date, extra)
705 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
705 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
706 parent2=xp2)
706 parent2=xp2)
707 tr.close()
707 tr.close()
708
708
709 self.dirstate.setparents(n)
709 self.dirstate.setparents(n)
710 self.dirstate.update(new, "n")
710 self.dirstate.update(new, "n")
711 self.dirstate.forget(remove)
711 self.dirstate.forget(remove)
712
712
713 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
713 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
714 return n
714 return n
715
715
716 def walk(self, node=None, files=[], match=util.always, badmatch=None):
716 def walk(self, node=None, files=[], match=util.always, badmatch=None):
717 if node:
717 if node:
718 fdict = dict.fromkeys(files)
718 fdict = dict.fromkeys(files)
719 for fn in self.manifest.read(self.changelog.read(node)[0]):
719 for fn in self.manifest.read(self.changelog.read(node)[0]):
720 for ffn in fdict:
720 for ffn in fdict:
721 # match if the file is the exact name or a directory
721 # match if the file is the exact name or a directory
722 if ffn == fn or fn.startswith("%s/" % ffn):
722 if ffn == fn or fn.startswith("%s/" % ffn):
723 del fdict[ffn]
723 del fdict[ffn]
724 break
724 break
725 if match(fn):
725 if match(fn):
726 yield 'm', fn
726 yield 'm', fn
727 for fn in fdict:
727 for fn in fdict:
728 if badmatch and badmatch(fn):
728 if badmatch and badmatch(fn):
729 if match(fn):
729 if match(fn):
730 yield 'b', fn
730 yield 'b', fn
731 else:
731 else:
732 self.ui.warn(_('%s: No such file in rev %s\n') % (
732 self.ui.warn(_('%s: No such file in rev %s\n') % (
733 util.pathto(self.getcwd(), fn), short(node)))
733 util.pathto(self.getcwd(), fn), short(node)))
734 else:
734 else:
735 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
735 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
736 yield src, fn
736 yield src, fn
737
737
738 def status(self, node1=None, node2=None, files=[], match=util.always,
738 def status(self, node1=None, node2=None, files=[], match=util.always,
739 wlock=None, list_ignored=False, list_clean=False):
739 wlock=None, list_ignored=False, list_clean=False):
740 """return status of files between two nodes or node and working directory
740 """return status of files between two nodes or node and working directory
741
741
742 If node1 is None, use the first dirstate parent instead.
742 If node1 is None, use the first dirstate parent instead.
743 If node2 is None, compare node1 with working directory.
743 If node2 is None, compare node1 with working directory.
744 """
744 """
745
745
746 def fcmp(fn, mf):
746 def fcmp(fn, mf):
747 t1 = self.wread(fn)
747 t1 = self.wread(fn)
748 return self.file(fn).cmp(mf.get(fn, nullid), t1)
748 return self.file(fn).cmp(mf.get(fn, nullid), t1)
749
749
750 def mfmatches(node):
750 def mfmatches(node):
751 change = self.changelog.read(node)
751 change = self.changelog.read(node)
752 mf = self.manifest.read(change[0]).copy()
752 mf = self.manifest.read(change[0]).copy()
753 for fn in mf.keys():
753 for fn in mf.keys():
754 if not match(fn):
754 if not match(fn):
755 del mf[fn]
755 del mf[fn]
756 return mf
756 return mf
757
757
758 modified, added, removed, deleted, unknown = [], [], [], [], []
758 modified, added, removed, deleted, unknown = [], [], [], [], []
759 ignored, clean = [], []
759 ignored, clean = [], []
760
760
761 compareworking = False
761 compareworking = False
762 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
762 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
763 compareworking = True
763 compareworking = True
764
764
765 if not compareworking:
765 if not compareworking:
766 # read the manifest from node1 before the manifest from node2,
766 # read the manifest from node1 before the manifest from node2,
767 # so that we'll hit the manifest cache if we're going through
767 # so that we'll hit the manifest cache if we're going through
768 # all the revisions in parent->child order.
768 # all the revisions in parent->child order.
769 mf1 = mfmatches(node1)
769 mf1 = mfmatches(node1)
770
770
771 # are we comparing the working directory?
771 # are we comparing the working directory?
772 if not node2:
772 if not node2:
773 if not wlock:
773 if not wlock:
774 try:
774 try:
775 wlock = self.wlock(wait=0)
775 wlock = self.wlock(wait=0)
776 except lock.LockException:
776 except lock.LockException:
777 wlock = None
777 wlock = None
778 (lookup, modified, added, removed, deleted, unknown,
778 (lookup, modified, added, removed, deleted, unknown,
779 ignored, clean) = self.dirstate.status(files, match,
779 ignored, clean) = self.dirstate.status(files, match,
780 list_ignored, list_clean)
780 list_ignored, list_clean)
781
781
782 # are we comparing working dir against its parent?
782 # are we comparing working dir against its parent?
783 if compareworking:
783 if compareworking:
784 if lookup:
784 if lookup:
785 # do a full compare of any files that might have changed
785 # do a full compare of any files that might have changed
786 mf2 = mfmatches(self.dirstate.parents()[0])
786 mf2 = mfmatches(self.dirstate.parents()[0])
787 for f in lookup:
787 for f in lookup:
788 if fcmp(f, mf2):
788 if fcmp(f, mf2):
789 modified.append(f)
789 modified.append(f)
790 else:
790 else:
791 clean.append(f)
791 clean.append(f)
792 if wlock is not None:
792 if wlock is not None:
793 self.dirstate.update([f], "n")
793 self.dirstate.update([f], "n")
794 else:
794 else:
795 # we are comparing working dir against non-parent
795 # we are comparing working dir against non-parent
796 # generate a pseudo-manifest for the working dir
796 # generate a pseudo-manifest for the working dir
797 # XXX: create it in dirstate.py ?
797 # XXX: create it in dirstate.py ?
798 mf2 = mfmatches(self.dirstate.parents()[0])
798 mf2 = mfmatches(self.dirstate.parents()[0])
799 for f in lookup + modified + added:
799 for f in lookup + modified + added:
800 mf2[f] = ""
800 mf2[f] = ""
801 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
801 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
802 for f in removed:
802 for f in removed:
803 if f in mf2:
803 if f in mf2:
804 del mf2[f]
804 del mf2[f]
805 else:
805 else:
806 # we are comparing two revisions
806 # we are comparing two revisions
807 mf2 = mfmatches(node2)
807 mf2 = mfmatches(node2)
808
808
809 if not compareworking:
809 if not compareworking:
810 # flush lists from dirstate before comparing manifests
810 # flush lists from dirstate before comparing manifests
811 modified, added, clean = [], [], []
811 modified, added, clean = [], [], []
812
812
813 # make sure to sort the files so we talk to the disk in a
813 # make sure to sort the files so we talk to the disk in a
814 # reasonable order
814 # reasonable order
815 mf2keys = mf2.keys()
815 mf2keys = mf2.keys()
816 mf2keys.sort()
816 mf2keys.sort()
817 for fn in mf2keys:
817 for fn in mf2keys:
818 if mf1.has_key(fn):
818 if mf1.has_key(fn):
819 if mf1.flags(fn) != mf2.flags(fn) or \
819 if mf1.flags(fn) != mf2.flags(fn) or \
820 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
820 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
821 modified.append(fn)
821 modified.append(fn)
822 elif list_clean:
822 elif list_clean:
823 clean.append(fn)
823 clean.append(fn)
824 del mf1[fn]
824 del mf1[fn]
825 else:
825 else:
826 added.append(fn)
826 added.append(fn)
827
827
828 removed = mf1.keys()
828 removed = mf1.keys()
829
829
830 # sort and return results:
830 # sort and return results:
831 for l in modified, added, removed, deleted, unknown, ignored, clean:
831 for l in modified, added, removed, deleted, unknown, ignored, clean:
832 l.sort()
832 l.sort()
833 return (modified, added, removed, deleted, unknown, ignored, clean)
833 return (modified, added, removed, deleted, unknown, ignored, clean)
834
834
835 def add(self, list, wlock=None):
835 def add(self, list, wlock=None):
836 if not wlock:
836 if not wlock:
837 wlock = self.wlock()
837 wlock = self.wlock()
838 for f in list:
838 for f in list:
839 p = self.wjoin(f)
839 p = self.wjoin(f)
840 if not os.path.exists(p):
840 if not os.path.exists(p):
841 self.ui.warn(_("%s does not exist!\n") % f)
841 self.ui.warn(_("%s does not exist!\n") % f)
842 elif not os.path.isfile(p):
842 elif not os.path.isfile(p):
843 self.ui.warn(_("%s not added: only files supported currently\n")
843 self.ui.warn(_("%s not added: only files supported currently\n")
844 % f)
844 % f)
845 elif self.dirstate.state(f) in 'an':
845 elif self.dirstate.state(f) in 'an':
846 self.ui.warn(_("%s already tracked!\n") % f)
846 self.ui.warn(_("%s already tracked!\n") % f)
847 else:
847 else:
848 self.dirstate.update([f], "a")
848 self.dirstate.update([f], "a")
849
849
850 def forget(self, list, wlock=None):
850 def forget(self, list, wlock=None):
851 if not wlock:
851 if not wlock:
852 wlock = self.wlock()
852 wlock = self.wlock()
853 for f in list:
853 for f in list:
854 if self.dirstate.state(f) not in 'ai':
854 if self.dirstate.state(f) not in 'ai':
855 self.ui.warn(_("%s not added!\n") % f)
855 self.ui.warn(_("%s not added!\n") % f)
856 else:
856 else:
857 self.dirstate.forget([f])
857 self.dirstate.forget([f])
858
858
859 def remove(self, list, unlink=False, wlock=None):
859 def remove(self, list, unlink=False, wlock=None):
860 if unlink:
860 if unlink:
861 for f in list:
861 for f in list:
862 try:
862 try:
863 util.unlink(self.wjoin(f))
863 util.unlink(self.wjoin(f))
864 except OSError, inst:
864 except OSError, inst:
865 if inst.errno != errno.ENOENT:
865 if inst.errno != errno.ENOENT:
866 raise
866 raise
867 if not wlock:
867 if not wlock:
868 wlock = self.wlock()
868 wlock = self.wlock()
869 for f in list:
869 for f in list:
870 p = self.wjoin(f)
870 p = self.wjoin(f)
871 if os.path.exists(p):
871 if os.path.exists(p):
872 self.ui.warn(_("%s still exists!\n") % f)
872 self.ui.warn(_("%s still exists!\n") % f)
873 elif self.dirstate.state(f) == 'a':
873 elif self.dirstate.state(f) == 'a':
874 self.dirstate.forget([f])
874 self.dirstate.forget([f])
875 elif f not in self.dirstate:
875 elif f not in self.dirstate:
876 self.ui.warn(_("%s not tracked!\n") % f)
876 self.ui.warn(_("%s not tracked!\n") % f)
877 else:
877 else:
878 self.dirstate.update([f], "r")
878 self.dirstate.update([f], "r")
879
879
880 def undelete(self, list, wlock=None):
880 def undelete(self, list, wlock=None):
881 p = self.dirstate.parents()[0]
881 p = self.dirstate.parents()[0]
882 mn = self.changelog.read(p)[0]
882 mn = self.changelog.read(p)[0]
883 m = self.manifest.read(mn)
883 m = self.manifest.read(mn)
884 if not wlock:
884 if not wlock:
885 wlock = self.wlock()
885 wlock = self.wlock()
886 for f in list:
886 for f in list:
887 if self.dirstate.state(f) not in "r":
887 if self.dirstate.state(f) not in "r":
888 self.ui.warn("%s not removed!\n" % f)
888 self.ui.warn("%s not removed!\n" % f)
889 else:
889 else:
890 t = self.file(f).read(m[f])
890 t = self.file(f).read(m[f])
891 self.wwrite(f, t)
891 self.wwrite(f, t)
892 util.set_exec(self.wjoin(f), m.execf(f))
892 util.set_exec(self.wjoin(f), m.execf(f))
893 self.dirstate.update([f], "n")
893 self.dirstate.update([f], "n")
894
894
895 def copy(self, source, dest, wlock=None):
895 def copy(self, source, dest, wlock=None):
896 p = self.wjoin(dest)
896 p = self.wjoin(dest)
897 if not os.path.exists(p):
897 if not os.path.exists(p):
898 self.ui.warn(_("%s does not exist!\n") % dest)
898 self.ui.warn(_("%s does not exist!\n") % dest)
899 elif not os.path.isfile(p):
899 elif not os.path.isfile(p):
900 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
900 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
901 else:
901 else:
902 if not wlock:
902 if not wlock:
903 wlock = self.wlock()
903 wlock = self.wlock()
904 if self.dirstate.state(dest) == '?':
904 if self.dirstate.state(dest) == '?':
905 self.dirstate.update([dest], "a")
905 self.dirstate.update([dest], "a")
906 self.dirstate.copy(source, dest)
906 self.dirstate.copy(source, dest)
907
907
908 def heads(self, start=None):
908 def heads(self, start=None):
909 heads = self.changelog.heads(start)
909 heads = self.changelog.heads(start)
910 # sort the output in rev descending order
910 # sort the output in rev descending order
911 heads = [(-self.changelog.rev(h), h) for h in heads]
911 heads = [(-self.changelog.rev(h), h) for h in heads]
912 heads.sort()
912 heads.sort()
913 return [n for (r, n) in heads]
913 return [n for (r, n) in heads]
914
914
915 # branchlookup returns a dict giving a list of branches for
915 # branchlookup returns a dict giving a list of branches for
916 # each head. A branch is defined as the tag of a node or
916 # each head. A branch is defined as the tag of a node or
917 # the branch of the node's parents. If a node has multiple
917 # the branch of the node's parents. If a node has multiple
918 # branch tags, tags are eliminated if they are visible from other
918 # branch tags, tags are eliminated if they are visible from other
919 # branch tags.
919 # branch tags.
920 #
920 #
921 # So, for this graph: a->b->c->d->e
921 # So, for this graph: a->b->c->d->e
922 # \ /
922 # \ /
923 # aa -----/
923 # aa -----/
924 # a has tag 2.6.12
924 # a has tag 2.6.12
925 # d has tag 2.6.13
925 # d has tag 2.6.13
926 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
926 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
927 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
927 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
928 # from the list.
928 # from the list.
929 #
929 #
930 # It is possible that more than one head will have the same branch tag.
930 # It is possible that more than one head will have the same branch tag.
931 # callers need to check the result for multiple heads under the same
931 # callers need to check the result for multiple heads under the same
932 # branch tag if that is a problem for them (ie checkout of a specific
932 # branch tag if that is a problem for them (ie checkout of a specific
933 # branch).
933 # branch).
934 #
934 #
935 # passing in a specific branch will limit the depth of the search
935 # passing in a specific branch will limit the depth of the search
936 # through the parents. It won't limit the branches returned in the
936 # through the parents. It won't limit the branches returned in the
937 # result though.
937 # result though.
938 def branchlookup(self, heads=None, branch=None):
938 def branchlookup(self, heads=None, branch=None):
939 if not heads:
939 if not heads:
940 heads = self.heads()
940 heads = self.heads()
941 headt = [ h for h in heads ]
941 headt = [ h for h in heads ]
942 chlog = self.changelog
942 chlog = self.changelog
943 branches = {}
943 branches = {}
944 merges = []
944 merges = []
945 seenmerge = {}
945 seenmerge = {}
946
946
947 # traverse the tree once for each head, recording in the branches
947 # traverse the tree once for each head, recording in the branches
948 # dict which tags are visible from this head. The branches
948 # dict which tags are visible from this head. The branches
949 # dict also records which tags are visible from each tag
949 # dict also records which tags are visible from each tag
950 # while we traverse.
950 # while we traverse.
951 while headt or merges:
951 while headt or merges:
952 if merges:
952 if merges:
953 n, found = merges.pop()
953 n, found = merges.pop()
954 visit = [n]
954 visit = [n]
955 else:
955 else:
956 h = headt.pop()
956 h = headt.pop()
957 visit = [h]
957 visit = [h]
958 found = [h]
958 found = [h]
959 seen = {}
959 seen = {}
960 while visit:
960 while visit:
961 n = visit.pop()
961 n = visit.pop()
962 if n in seen:
962 if n in seen:
963 continue
963 continue
964 pp = chlog.parents(n)
964 pp = chlog.parents(n)
965 tags = self.nodetags(n)
965 tags = self.nodetags(n)
966 if tags:
966 if tags:
967 for x in tags:
967 for x in tags:
968 if x == 'tip':
968 if x == 'tip':
969 continue
969 continue
970 for f in found:
970 for f in found:
971 branches.setdefault(f, {})[n] = 1
971 branches.setdefault(f, {})[n] = 1
972 branches.setdefault(n, {})[n] = 1
972 branches.setdefault(n, {})[n] = 1
973 break
973 break
974 if n not in found:
974 if n not in found:
975 found.append(n)
975 found.append(n)
976 if branch in tags:
976 if branch in tags:
977 continue
977 continue
978 seen[n] = 1
978 seen[n] = 1
979 if pp[1] != nullid and n not in seenmerge:
979 if pp[1] != nullid and n not in seenmerge:
980 merges.append((pp[1], [x for x in found]))
980 merges.append((pp[1], [x for x in found]))
981 seenmerge[n] = 1
981 seenmerge[n] = 1
982 if pp[0] != nullid:
982 if pp[0] != nullid:
983 visit.append(pp[0])
983 visit.append(pp[0])
984 # traverse the branches dict, eliminating branch tags from each
984 # traverse the branches dict, eliminating branch tags from each
985 # head that are visible from another branch tag for that head.
985 # head that are visible from another branch tag for that head.
986 out = {}
986 out = {}
987 viscache = {}
987 viscache = {}
988 for h in heads:
988 for h in heads:
989 def visible(node):
989 def visible(node):
990 if node in viscache:
990 if node in viscache:
991 return viscache[node]
991 return viscache[node]
992 ret = {}
992 ret = {}
993 visit = [node]
993 visit = [node]
994 while visit:
994 while visit:
995 x = visit.pop()
995 x = visit.pop()
996 if x in viscache:
996 if x in viscache:
997 ret.update(viscache[x])
997 ret.update(viscache[x])
998 elif x not in ret:
998 elif x not in ret:
999 ret[x] = 1
999 ret[x] = 1
1000 if x in branches:
1000 if x in branches:
1001 visit[len(visit):] = branches[x].keys()
1001 visit[len(visit):] = branches[x].keys()
1002 viscache[node] = ret
1002 viscache[node] = ret
1003 return ret
1003 return ret
1004 if h not in branches:
1004 if h not in branches:
1005 continue
1005 continue
1006 # O(n^2), but somewhat limited. This only searches the
1006 # O(n^2), but somewhat limited. This only searches the
1007 # tags visible from a specific head, not all the tags in the
1007 # tags visible from a specific head, not all the tags in the
1008 # whole repo.
1008 # whole repo.
1009 for b in branches[h]:
1009 for b in branches[h]:
1010 vis = False
1010 vis = False
1011 for bb in branches[h].keys():
1011 for bb in branches[h].keys():
1012 if b != bb:
1012 if b != bb:
1013 if b in visible(bb):
1013 if b in visible(bb):
1014 vis = True
1014 vis = True
1015 break
1015 break
1016 if not vis:
1016 if not vis:
1017 l = out.setdefault(h, [])
1017 l = out.setdefault(h, [])
1018 l[len(l):] = self.nodetags(b)
1018 l[len(l):] = self.nodetags(b)
1019 return out
1019 return out
1020
1020
1021 def branches(self, nodes):
1021 def branches(self, nodes):
1022 if not nodes:
1022 if not nodes:
1023 nodes = [self.changelog.tip()]
1023 nodes = [self.changelog.tip()]
1024 b = []
1024 b = []
1025 for n in nodes:
1025 for n in nodes:
1026 t = n
1026 t = n
1027 while 1:
1027 while 1:
1028 p = self.changelog.parents(n)
1028 p = self.changelog.parents(n)
1029 if p[1] != nullid or p[0] == nullid:
1029 if p[1] != nullid or p[0] == nullid:
1030 b.append((t, n, p[0], p[1]))
1030 b.append((t, n, p[0], p[1]))
1031 break
1031 break
1032 n = p[0]
1032 n = p[0]
1033 return b
1033 return b
1034
1034
1035 def between(self, pairs):
1035 def between(self, pairs):
1036 r = []
1036 r = []
1037
1037
1038 for top, bottom in pairs:
1038 for top, bottom in pairs:
1039 n, l, i = top, [], 0
1039 n, l, i = top, [], 0
1040 f = 1
1040 f = 1
1041
1041
1042 while n != bottom:
1042 while n != bottom:
1043 p = self.changelog.parents(n)[0]
1043 p = self.changelog.parents(n)[0]
1044 if i == f:
1044 if i == f:
1045 l.append(n)
1045 l.append(n)
1046 f = f * 2
1046 f = f * 2
1047 n = p
1047 n = p
1048 i += 1
1048 i += 1
1049
1049
1050 r.append(l)
1050 r.append(l)
1051
1051
1052 return r
1052 return r
1053
1053
1054 def findincoming(self, remote, base=None, heads=None, force=False):
1054 def findincoming(self, remote, base=None, heads=None, force=False):
1055 """Return list of roots of the subsets of missing nodes from remote
1055 """Return list of roots of the subsets of missing nodes from remote
1056
1056
1057 If base dict is specified, assume that these nodes and their parents
1057 If base dict is specified, assume that these nodes and their parents
1058 exist on the remote side and that no child of a node of base exists
1058 exist on the remote side and that no child of a node of base exists
1059 in both remote and self.
1059 in both remote and self.
1060 Furthermore base will be updated to include the nodes that exists
1060 Furthermore base will be updated to include the nodes that exists
1061 in self and remote but no children exists in self and remote.
1061 in self and remote but no children exists in self and remote.
1062 If a list of heads is specified, return only nodes which are heads
1062 If a list of heads is specified, return only nodes which are heads
1063 or ancestors of these heads.
1063 or ancestors of these heads.
1064
1064
1065 All the ancestors of base are in self and in remote.
1065 All the ancestors of base are in self and in remote.
1066 All the descendants of the list returned are missing in self.
1066 All the descendants of the list returned are missing in self.
1067 (and so we know that the rest of the nodes are missing in remote, see
1067 (and so we know that the rest of the nodes are missing in remote, see
1068 outgoing)
1068 outgoing)
1069 """
1069 """
1070 m = self.changelog.nodemap
1070 m = self.changelog.nodemap
1071 search = []
1071 search = []
1072 fetch = {}
1072 fetch = {}
1073 seen = {}
1073 seen = {}
1074 seenbranch = {}
1074 seenbranch = {}
1075 if base == None:
1075 if base == None:
1076 base = {}
1076 base = {}
1077
1077
1078 if not heads:
1078 if not heads:
1079 heads = remote.heads()
1079 heads = remote.heads()
1080
1080
1081 if self.changelog.tip() == nullid:
1081 if self.changelog.tip() == nullid:
1082 base[nullid] = 1
1082 base[nullid] = 1
1083 if heads != [nullid]:
1083 if heads != [nullid]:
1084 return [nullid]
1084 return [nullid]
1085 return []
1085 return []
1086
1086
1087 # assume we're closer to the tip than the root
1087 # assume we're closer to the tip than the root
1088 # and start by examining the heads
1088 # and start by examining the heads
1089 self.ui.status(_("searching for changes\n"))
1089 self.ui.status(_("searching for changes\n"))
1090
1090
1091 unknown = []
1091 unknown = []
1092 for h in heads:
1092 for h in heads:
1093 if h not in m:
1093 if h not in m:
1094 unknown.append(h)
1094 unknown.append(h)
1095 else:
1095 else:
1096 base[h] = 1
1096 base[h] = 1
1097
1097
1098 if not unknown:
1098 if not unknown:
1099 return []
1099 return []
1100
1100
1101 req = dict.fromkeys(unknown)
1101 req = dict.fromkeys(unknown)
1102 reqcnt = 0
1102 reqcnt = 0
1103
1103
1104 # search through remote branches
1104 # search through remote branches
1105 # a 'branch' here is a linear segment of history, with four parts:
1105 # a 'branch' here is a linear segment of history, with four parts:
1106 # head, root, first parent, second parent
1106 # head, root, first parent, second parent
1107 # (a branch always has two parents (or none) by definition)
1107 # (a branch always has two parents (or none) by definition)
1108 unknown = remote.branches(unknown)
1108 unknown = remote.branches(unknown)
1109 while unknown:
1109 while unknown:
1110 r = []
1110 r = []
1111 while unknown:
1111 while unknown:
1112 n = unknown.pop(0)
1112 n = unknown.pop(0)
1113 if n[0] in seen:
1113 if n[0] in seen:
1114 continue
1114 continue
1115
1115
1116 self.ui.debug(_("examining %s:%s\n")
1116 self.ui.debug(_("examining %s:%s\n")
1117 % (short(n[0]), short(n[1])))
1117 % (short(n[0]), short(n[1])))
1118 if n[0] == nullid: # found the end of the branch
1118 if n[0] == nullid: # found the end of the branch
1119 pass
1119 pass
1120 elif n in seenbranch:
1120 elif n in seenbranch:
1121 self.ui.debug(_("branch already found\n"))
1121 self.ui.debug(_("branch already found\n"))
1122 continue
1122 continue
1123 elif n[1] and n[1] in m: # do we know the base?
1123 elif n[1] and n[1] in m: # do we know the base?
1124 self.ui.debug(_("found incomplete branch %s:%s\n")
1124 self.ui.debug(_("found incomplete branch %s:%s\n")
1125 % (short(n[0]), short(n[1])))
1125 % (short(n[0]), short(n[1])))
1126 search.append(n) # schedule branch range for scanning
1126 search.append(n) # schedule branch range for scanning
1127 seenbranch[n] = 1
1127 seenbranch[n] = 1
1128 else:
1128 else:
1129 if n[1] not in seen and n[1] not in fetch:
1129 if n[1] not in seen and n[1] not in fetch:
1130 if n[2] in m and n[3] in m:
1130 if n[2] in m and n[3] in m:
1131 self.ui.debug(_("found new changeset %s\n") %
1131 self.ui.debug(_("found new changeset %s\n") %
1132 short(n[1]))
1132 short(n[1]))
1133 fetch[n[1]] = 1 # earliest unknown
1133 fetch[n[1]] = 1 # earliest unknown
1134 for p in n[2:4]:
1134 for p in n[2:4]:
1135 if p in m:
1135 if p in m:
1136 base[p] = 1 # latest known
1136 base[p] = 1 # latest known
1137
1137
1138 for p in n[2:4]:
1138 for p in n[2:4]:
1139 if p not in req and p not in m:
1139 if p not in req and p not in m:
1140 r.append(p)
1140 r.append(p)
1141 req[p] = 1
1141 req[p] = 1
1142 seen[n[0]] = 1
1142 seen[n[0]] = 1
1143
1143
1144 if r:
1144 if r:
1145 reqcnt += 1
1145 reqcnt += 1
1146 self.ui.debug(_("request %d: %s\n") %
1146 self.ui.debug(_("request %d: %s\n") %
1147 (reqcnt, " ".join(map(short, r))))
1147 (reqcnt, " ".join(map(short, r))))
1148 for p in xrange(0, len(r), 10):
1148 for p in xrange(0, len(r), 10):
1149 for b in remote.branches(r[p:p+10]):
1149 for b in remote.branches(r[p:p+10]):
1150 self.ui.debug(_("received %s:%s\n") %
1150 self.ui.debug(_("received %s:%s\n") %
1151 (short(b[0]), short(b[1])))
1151 (short(b[0]), short(b[1])))
1152 unknown.append(b)
1152 unknown.append(b)
1153
1153
1154 # do binary search on the branches we found
1154 # do binary search on the branches we found
1155 while search:
1155 while search:
1156 n = search.pop(0)
1156 n = search.pop(0)
1157 reqcnt += 1
1157 reqcnt += 1
1158 l = remote.between([(n[0], n[1])])[0]
1158 l = remote.between([(n[0], n[1])])[0]
1159 l.append(n[1])
1159 l.append(n[1])
1160 p = n[0]
1160 p = n[0]
1161 f = 1
1161 f = 1
1162 for i in l:
1162 for i in l:
1163 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1163 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1164 if i in m:
1164 if i in m:
1165 if f <= 2:
1165 if f <= 2:
1166 self.ui.debug(_("found new branch changeset %s\n") %
1166 self.ui.debug(_("found new branch changeset %s\n") %
1167 short(p))
1167 short(p))
1168 fetch[p] = 1
1168 fetch[p] = 1
1169 base[i] = 1
1169 base[i] = 1
1170 else:
1170 else:
1171 self.ui.debug(_("narrowed branch search to %s:%s\n")
1171 self.ui.debug(_("narrowed branch search to %s:%s\n")
1172 % (short(p), short(i)))
1172 % (short(p), short(i)))
1173 search.append((p, i))
1173 search.append((p, i))
1174 break
1174 break
1175 p, f = i, f * 2
1175 p, f = i, f * 2
1176
1176
1177 # sanity check our fetch list
1177 # sanity check our fetch list
1178 for f in fetch.keys():
1178 for f in fetch.keys():
1179 if f in m:
1179 if f in m:
1180 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1180 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1181
1181
1182 if base.keys() == [nullid]:
1182 if base.keys() == [nullid]:
1183 if force:
1183 if force:
1184 self.ui.warn(_("warning: repository is unrelated\n"))
1184 self.ui.warn(_("warning: repository is unrelated\n"))
1185 else:
1185 else:
1186 raise util.Abort(_("repository is unrelated"))
1186 raise util.Abort(_("repository is unrelated"))
1187
1187
1188 self.ui.debug(_("found new changesets starting at ") +
1188 self.ui.debug(_("found new changesets starting at ") +
1189 " ".join([short(f) for f in fetch]) + "\n")
1189 " ".join([short(f) for f in fetch]) + "\n")
1190
1190
1191 self.ui.debug(_("%d total queries\n") % reqcnt)
1191 self.ui.debug(_("%d total queries\n") % reqcnt)
1192
1192
1193 return fetch.keys()
1193 return fetch.keys()
1194
1194
1195 def findoutgoing(self, remote, base=None, heads=None, force=False):
1195 def findoutgoing(self, remote, base=None, heads=None, force=False):
1196 """Return list of nodes that are roots of subsets not in remote
1196 """Return list of nodes that are roots of subsets not in remote
1197
1197
1198 If base dict is specified, assume that these nodes and their parents
1198 If base dict is specified, assume that these nodes and their parents
1199 exist on the remote side.
1199 exist on the remote side.
1200 If a list of heads is specified, return only nodes which are heads
1200 If a list of heads is specified, return only nodes which are heads
1201 or ancestors of these heads, and return a second element which
1201 or ancestors of these heads, and return a second element which
1202 contains all remote heads which get new children.
1202 contains all remote heads which get new children.
1203 """
1203 """
1204 if base == None:
1204 if base == None:
1205 base = {}
1205 base = {}
1206 self.findincoming(remote, base, heads, force=force)
1206 self.findincoming(remote, base, heads, force=force)
1207
1207
1208 self.ui.debug(_("common changesets up to ")
1208 self.ui.debug(_("common changesets up to ")
1209 + " ".join(map(short, base.keys())) + "\n")
1209 + " ".join(map(short, base.keys())) + "\n")
1210
1210
1211 remain = dict.fromkeys(self.changelog.nodemap)
1211 remain = dict.fromkeys(self.changelog.nodemap)
1212
1212
1213 # prune everything remote has from the tree
1213 # prune everything remote has from the tree
1214 del remain[nullid]
1214 del remain[nullid]
1215 remove = base.keys()
1215 remove = base.keys()
1216 while remove:
1216 while remove:
1217 n = remove.pop(0)
1217 n = remove.pop(0)
1218 if n in remain:
1218 if n in remain:
1219 del remain[n]
1219 del remain[n]
1220 for p in self.changelog.parents(n):
1220 for p in self.changelog.parents(n):
1221 remove.append(p)
1221 remove.append(p)
1222
1222
1223 # find every node whose parents have been pruned
1223 # find every node whose parents have been pruned
1224 subset = []
1224 subset = []
1225 # find every remote head that will get new children
1225 # find every remote head that will get new children
1226 updated_heads = {}
1226 updated_heads = {}
1227 for n in remain:
1227 for n in remain:
1228 p1, p2 = self.changelog.parents(n)
1228 p1, p2 = self.changelog.parents(n)
1229 if p1 not in remain and p2 not in remain:
1229 if p1 not in remain and p2 not in remain:
1230 subset.append(n)
1230 subset.append(n)
1231 if heads:
1231 if heads:
1232 if p1 in heads:
1232 if p1 in heads:
1233 updated_heads[p1] = True
1233 updated_heads[p1] = True
1234 if p2 in heads:
1234 if p2 in heads:
1235 updated_heads[p2] = True
1235 updated_heads[p2] = True
1236
1236
1237 # this is the set of all roots we have to push
1237 # this is the set of all roots we have to push
1238 if heads:
1238 if heads:
1239 return subset, updated_heads.keys()
1239 return subset, updated_heads.keys()
1240 else:
1240 else:
1241 return subset
1241 return subset
1242
1242
1243 def pull(self, remote, heads=None, force=False, lock=None):
1243 def pull(self, remote, heads=None, force=False, lock=None):
1244 mylock = False
1244 mylock = False
1245 if not lock:
1245 if not lock:
1246 lock = self.lock()
1246 lock = self.lock()
1247 mylock = True
1247 mylock = True
1248
1248
1249 try:
1249 try:
1250 fetch = self.findincoming(remote, force=force)
1250 fetch = self.findincoming(remote, force=force)
1251 if fetch == [nullid]:
1251 if fetch == [nullid]:
1252 self.ui.status(_("requesting all changes\n"))
1252 self.ui.status(_("requesting all changes\n"))
1253
1253
1254 if not fetch:
1254 if not fetch:
1255 self.ui.status(_("no changes found\n"))
1255 self.ui.status(_("no changes found\n"))
1256 return 0
1256 return 0
1257
1257
1258 if heads is None:
1258 if heads is None:
1259 cg = remote.changegroup(fetch, 'pull')
1259 cg = remote.changegroup(fetch, 'pull')
1260 else:
1260 else:
1261 if 'changegroupsubset' not in remote.capabilities:
1261 if 'changegroupsubset' not in remote.capabilities:
1262 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1262 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1263 cg = remote.changegroupsubset(fetch, heads, 'pull')
1263 cg = remote.changegroupsubset(fetch, heads, 'pull')
1264 return self.addchangegroup(cg, 'pull', remote.url())
1264 return self.addchangegroup(cg, 'pull', remote.url())
1265 finally:
1265 finally:
1266 if mylock:
1266 if mylock:
1267 lock.release()
1267 lock.release()
1268
1268
1269 def push(self, remote, force=False, revs=None):
1269 def push(self, remote, force=False, revs=None):
1270 # there are two ways to push to remote repo:
1270 # there are two ways to push to remote repo:
1271 #
1271 #
1272 # addchangegroup assumes local user can lock remote
1272 # addchangegroup assumes local user can lock remote
1273 # repo (local filesystem, old ssh servers).
1273 # repo (local filesystem, old ssh servers).
1274 #
1274 #
1275 # unbundle assumes local user cannot lock remote repo (new ssh
1275 # unbundle assumes local user cannot lock remote repo (new ssh
1276 # servers, http servers).
1276 # servers, http servers).
1277
1277
1278 if remote.capable('unbundle'):
1278 if remote.capable('unbundle'):
1279 return self.push_unbundle(remote, force, revs)
1279 return self.push_unbundle(remote, force, revs)
1280 return self.push_addchangegroup(remote, force, revs)
1280 return self.push_addchangegroup(remote, force, revs)
1281
1281
1282 def prepush(self, remote, force, revs):
1282 def prepush(self, remote, force, revs):
1283 base = {}
1283 base = {}
1284 remote_heads = remote.heads()
1284 remote_heads = remote.heads()
1285 inc = self.findincoming(remote, base, remote_heads, force=force)
1285 inc = self.findincoming(remote, base, remote_heads, force=force)
1286 if not force and inc:
1286 if not force and inc:
1287 self.ui.warn(_("abort: unsynced remote changes!\n"))
1287 self.ui.warn(_("abort: unsynced remote changes!\n"))
1288 self.ui.status(_("(did you forget to sync?"
1288 self.ui.status(_("(did you forget to sync?"
1289 " use push -f to force)\n"))
1289 " use push -f to force)\n"))
1290 return None, 1
1290 return None, 1
1291
1291
1292 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1292 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1293 if revs is not None:
1293 if revs is not None:
1294 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1294 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1295 else:
1295 else:
1296 bases, heads = update, self.changelog.heads()
1296 bases, heads = update, self.changelog.heads()
1297
1297
1298 if not bases:
1298 if not bases:
1299 self.ui.status(_("no changes found\n"))
1299 self.ui.status(_("no changes found\n"))
1300 return None, 1
1300 return None, 1
1301 elif not force:
1301 elif not force:
1302 # FIXME we don't properly detect creation of new heads
1302 # FIXME we don't properly detect creation of new heads
1303 # in the push -r case, assume the user knows what he's doing
1303 # in the push -r case, assume the user knows what he's doing
1304 if not revs and len(remote_heads) < len(heads) \
1304 if not revs and len(remote_heads) < len(heads) \
1305 and remote_heads != [nullid]:
1305 and remote_heads != [nullid]:
1306 self.ui.warn(_("abort: push creates new remote branches!\n"))
1306 self.ui.warn(_("abort: push creates new remote branches!\n"))
1307 self.ui.status(_("(did you forget to merge?"
1307 self.ui.status(_("(did you forget to merge?"
1308 " use push -f to force)\n"))
1308 " use push -f to force)\n"))
1309 return None, 1
1309 return None, 1
1310
1310
1311 if revs is None:
1311 if revs is None:
1312 cg = self.changegroup(update, 'push')
1312 cg = self.changegroup(update, 'push')
1313 else:
1313 else:
1314 cg = self.changegroupsubset(update, revs, 'push')
1314 cg = self.changegroupsubset(update, revs, 'push')
1315 return cg, remote_heads
1315 return cg, remote_heads
1316
1316
1317 def push_addchangegroup(self, remote, force, revs):
1317 def push_addchangegroup(self, remote, force, revs):
1318 lock = remote.lock()
1318 lock = remote.lock()
1319
1319
1320 ret = self.prepush(remote, force, revs)
1320 ret = self.prepush(remote, force, revs)
1321 if ret[0] is not None:
1321 if ret[0] is not None:
1322 cg, remote_heads = ret
1322 cg, remote_heads = ret
1323 return remote.addchangegroup(cg, 'push', self.url())
1323 return remote.addchangegroup(cg, 'push', self.url())
1324 return ret[1]
1324 return ret[1]
1325
1325
1326 def push_unbundle(self, remote, force, revs):
1326 def push_unbundle(self, remote, force, revs):
1327 # local repo finds heads on server, finds out what revs it
1327 # local repo finds heads on server, finds out what revs it
1328 # must push. once revs transferred, if server finds it has
1328 # must push. once revs transferred, if server finds it has
1329 # different heads (someone else won commit/push race), server
1329 # different heads (someone else won commit/push race), server
1330 # aborts.
1330 # aborts.
1331
1331
1332 ret = self.prepush(remote, force, revs)
1332 ret = self.prepush(remote, force, revs)
1333 if ret[0] is not None:
1333 if ret[0] is not None:
1334 cg, remote_heads = ret
1334 cg, remote_heads = ret
1335 if force: remote_heads = ['force']
1335 if force: remote_heads = ['force']
1336 return remote.unbundle(cg, remote_heads, 'push')
1336 return remote.unbundle(cg, remote_heads, 'push')
1337 return ret[1]
1337 return ret[1]
1338
1338
1339 def changegroupinfo(self, nodes):
1339 def changegroupinfo(self, nodes):
1340 self.ui.note(_("%d changesets found\n") % len(nodes))
1340 self.ui.note(_("%d changesets found\n") % len(nodes))
1341 if self.ui.debugflag:
1341 if self.ui.debugflag:
1342 self.ui.debug(_("List of changesets:\n"))
1342 self.ui.debug(_("List of changesets:\n"))
1343 for node in nodes:
1343 for node in nodes:
1344 self.ui.debug("%s\n" % hex(node))
1344 self.ui.debug("%s\n" % hex(node))
1345
1345
1346 def changegroupsubset(self, bases, heads, source):
1346 def changegroupsubset(self, bases, heads, source):
1347 """This function generates a changegroup consisting of all the nodes
1347 """This function generates a changegroup consisting of all the nodes
1348 that are descendents of any of the bases, and ancestors of any of
1348 that are descendents of any of the bases, and ancestors of any of
1349 the heads.
1349 the heads.
1350
1350
1351 It is fairly complex as determining which filenodes and which
1351 It is fairly complex as determining which filenodes and which
1352 manifest nodes need to be included for the changeset to be complete
1352 manifest nodes need to be included for the changeset to be complete
1353 is non-trivial.
1353 is non-trivial.
1354
1354
1355 Another wrinkle is doing the reverse, figuring out which changeset in
1355 Another wrinkle is doing the reverse, figuring out which changeset in
1356 the changegroup a particular filenode or manifestnode belongs to."""
1356 the changegroup a particular filenode or manifestnode belongs to."""
1357
1357
1358 self.hook('preoutgoing', throw=True, source=source)
1358 self.hook('preoutgoing', throw=True, source=source)
1359
1359
1360 # Set up some initial variables
1360 # Set up some initial variables
1361 # Make it easy to refer to self.changelog
1361 # Make it easy to refer to self.changelog
1362 cl = self.changelog
1362 cl = self.changelog
1363 # msng is short for missing - compute the list of changesets in this
1363 # msng is short for missing - compute the list of changesets in this
1364 # changegroup.
1364 # changegroup.
1365 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1365 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1366 self.changegroupinfo(msng_cl_lst)
1366 self.changegroupinfo(msng_cl_lst)
1367 # Some bases may turn out to be superfluous, and some heads may be
1367 # Some bases may turn out to be superfluous, and some heads may be
1368 # too. nodesbetween will return the minimal set of bases and heads
1368 # too. nodesbetween will return the minimal set of bases and heads
1369 # necessary to re-create the changegroup.
1369 # necessary to re-create the changegroup.
1370
1370
1371 # Known heads are the list of heads that it is assumed the recipient
1371 # Known heads are the list of heads that it is assumed the recipient
1372 # of this changegroup will know about.
1372 # of this changegroup will know about.
1373 knownheads = {}
1373 knownheads = {}
1374 # We assume that all parents of bases are known heads.
1374 # We assume that all parents of bases are known heads.
1375 for n in bases:
1375 for n in bases:
1376 for p in cl.parents(n):
1376 for p in cl.parents(n):
1377 if p != nullid:
1377 if p != nullid:
1378 knownheads[p] = 1
1378 knownheads[p] = 1
1379 knownheads = knownheads.keys()
1379 knownheads = knownheads.keys()
1380 if knownheads:
1380 if knownheads:
1381 # Now that we know what heads are known, we can compute which
1381 # Now that we know what heads are known, we can compute which
1382 # changesets are known. The recipient must know about all
1382 # changesets are known. The recipient must know about all
1383 # changesets required to reach the known heads from the null
1383 # changesets required to reach the known heads from the null
1384 # changeset.
1384 # changeset.
1385 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1385 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1386 junk = None
1386 junk = None
1387 # Transform the list into an ersatz set.
1387 # Transform the list into an ersatz set.
1388 has_cl_set = dict.fromkeys(has_cl_set)
1388 has_cl_set = dict.fromkeys(has_cl_set)
1389 else:
1389 else:
1390 # If there were no known heads, the recipient cannot be assumed to
1390 # If there were no known heads, the recipient cannot be assumed to
1391 # know about any changesets.
1391 # know about any changesets.
1392 has_cl_set = {}
1392 has_cl_set = {}
1393
1393
1394 # Make it easy to refer to self.manifest
1394 # Make it easy to refer to self.manifest
1395 mnfst = self.manifest
1395 mnfst = self.manifest
1396 # We don't know which manifests are missing yet
1396 # We don't know which manifests are missing yet
1397 msng_mnfst_set = {}
1397 msng_mnfst_set = {}
1398 # Nor do we know which filenodes are missing.
1398 # Nor do we know which filenodes are missing.
1399 msng_filenode_set = {}
1399 msng_filenode_set = {}
1400
1400
1401 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1401 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1402 junk = None
1402 junk = None
1403
1403
1404 # A changeset always belongs to itself, so the changenode lookup
1404 # A changeset always belongs to itself, so the changenode lookup
1405 # function for a changenode is identity.
1405 # function for a changenode is identity.
1406 def identity(x):
1406 def identity(x):
1407 return x
1407 return x
1408
1408
1409 # A function generating function. Sets up an environment for the
1409 # A function generating function. Sets up an environment for the
1410 # inner function.
1410 # inner function.
1411 def cmp_by_rev_func(revlog):
1411 def cmp_by_rev_func(revlog):
1412 # Compare two nodes by their revision number in the environment's
1412 # Compare two nodes by their revision number in the environment's
1413 # revision history. Since the revision number both represents the
1413 # revision history. Since the revision number both represents the
1414 # most efficient order to read the nodes in, and represents a
1414 # most efficient order to read the nodes in, and represents a
1415 # topological sorting of the nodes, this function is often useful.
1415 # topological sorting of the nodes, this function is often useful.
1416 def cmp_by_rev(a, b):
1416 def cmp_by_rev(a, b):
1417 return cmp(revlog.rev(a), revlog.rev(b))
1417 return cmp(revlog.rev(a), revlog.rev(b))
1418 return cmp_by_rev
1418 return cmp_by_rev
1419
1419
1420 # If we determine that a particular file or manifest node must be a
1420 # If we determine that a particular file or manifest node must be a
1421 # node that the recipient of the changegroup will already have, we can
1421 # node that the recipient of the changegroup will already have, we can
1422 # also assume the recipient will have all the parents. This function
1422 # also assume the recipient will have all the parents. This function
1423 # prunes them from the set of missing nodes.
1423 # prunes them from the set of missing nodes.
1424 def prune_parents(revlog, hasset, msngset):
1424 def prune_parents(revlog, hasset, msngset):
1425 haslst = hasset.keys()
1425 haslst = hasset.keys()
1426 haslst.sort(cmp_by_rev_func(revlog))
1426 haslst.sort(cmp_by_rev_func(revlog))
1427 for node in haslst:
1427 for node in haslst:
1428 parentlst = [p for p in revlog.parents(node) if p != nullid]
1428 parentlst = [p for p in revlog.parents(node) if p != nullid]
1429 while parentlst:
1429 while parentlst:
1430 n = parentlst.pop()
1430 n = parentlst.pop()
1431 if n not in hasset:
1431 if n not in hasset:
1432 hasset[n] = 1
1432 hasset[n] = 1
1433 p = [p for p in revlog.parents(n) if p != nullid]
1433 p = [p for p in revlog.parents(n) if p != nullid]
1434 parentlst.extend(p)
1434 parentlst.extend(p)
1435 for n in hasset:
1435 for n in hasset:
1436 msngset.pop(n, None)
1436 msngset.pop(n, None)
1437
1437
1438 # This is a function generating function used to set up an environment
1438 # This is a function generating function used to set up an environment
1439 # for the inner function to execute in.
1439 # for the inner function to execute in.
1440 def manifest_and_file_collector(changedfileset):
1440 def manifest_and_file_collector(changedfileset):
1441 # This is an information gathering function that gathers
1441 # This is an information gathering function that gathers
1442 # information from each changeset node that goes out as part of
1442 # information from each changeset node that goes out as part of
1443 # the changegroup. The information gathered is a list of which
1443 # the changegroup. The information gathered is a list of which
1444 # manifest nodes are potentially required (the recipient may
1444 # manifest nodes are potentially required (the recipient may
1445 # already have them) and total list of all files which were
1445 # already have them) and total list of all files which were
1446 # changed in any changeset in the changegroup.
1446 # changed in any changeset in the changegroup.
1447 #
1447 #
1448 # We also remember the first changenode we saw any manifest
1448 # We also remember the first changenode we saw any manifest
1449 # referenced by so we can later determine which changenode 'owns'
1449 # referenced by so we can later determine which changenode 'owns'
1450 # the manifest.
1450 # the manifest.
1451 def collect_manifests_and_files(clnode):
1451 def collect_manifests_and_files(clnode):
1452 c = cl.read(clnode)
1452 c = cl.read(clnode)
1453 for f in c[3]:
1453 for f in c[3]:
1454 # This is to make sure we only have one instance of each
1454 # This is to make sure we only have one instance of each
1455 # filename string for each filename.
1455 # filename string for each filename.
1456 changedfileset.setdefault(f, f)
1456 changedfileset.setdefault(f, f)
1457 msng_mnfst_set.setdefault(c[0], clnode)
1457 msng_mnfst_set.setdefault(c[0], clnode)
1458 return collect_manifests_and_files
1458 return collect_manifests_and_files
1459
1459
1460 # Figure out which manifest nodes (of the ones we think might be part
1460 # Figure out which manifest nodes (of the ones we think might be part
1461 # of the changegroup) the recipient must know about and remove them
1461 # of the changegroup) the recipient must know about and remove them
1462 # from the changegroup.
1462 # from the changegroup.
1463 def prune_manifests():
1463 def prune_manifests():
1464 has_mnfst_set = {}
1464 has_mnfst_set = {}
1465 for n in msng_mnfst_set:
1465 for n in msng_mnfst_set:
1466 # If a 'missing' manifest thinks it belongs to a changenode
1466 # If a 'missing' manifest thinks it belongs to a changenode
1467 # the recipient is assumed to have, obviously the recipient
1467 # the recipient is assumed to have, obviously the recipient
1468 # must have that manifest.
1468 # must have that manifest.
1469 linknode = cl.node(mnfst.linkrev(n))
1469 linknode = cl.node(mnfst.linkrev(n))
1470 if linknode in has_cl_set:
1470 if linknode in has_cl_set:
1471 has_mnfst_set[n] = 1
1471 has_mnfst_set[n] = 1
1472 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1472 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1473
1473
1474 # Use the information collected in collect_manifests_and_files to say
1474 # Use the information collected in collect_manifests_and_files to say
1475 # which changenode any manifestnode belongs to.
1475 # which changenode any manifestnode belongs to.
1476 def lookup_manifest_link(mnfstnode):
1476 def lookup_manifest_link(mnfstnode):
1477 return msng_mnfst_set[mnfstnode]
1477 return msng_mnfst_set[mnfstnode]
1478
1478
1479 # A function generating function that sets up the initial environment
1479 # A function generating function that sets up the initial environment
1480 # the inner function.
1480 # the inner function.
1481 def filenode_collector(changedfiles):
1481 def filenode_collector(changedfiles):
1482 next_rev = [0]
1482 next_rev = [0]
1483 # This gathers information from each manifestnode included in the
1483 # This gathers information from each manifestnode included in the
1484 # changegroup about which filenodes the manifest node references
1484 # changegroup about which filenodes the manifest node references
1485 # so we can include those in the changegroup too.
1485 # so we can include those in the changegroup too.
1486 #
1486 #
1487 # It also remembers which changenode each filenode belongs to. It
1487 # It also remembers which changenode each filenode belongs to. It
1488 # does this by assuming the a filenode belongs to the changenode
1488 # does this by assuming the a filenode belongs to the changenode
1489 # the first manifest that references it belongs to.
1489 # the first manifest that references it belongs to.
1490 def collect_msng_filenodes(mnfstnode):
1490 def collect_msng_filenodes(mnfstnode):
1491 r = mnfst.rev(mnfstnode)
1491 r = mnfst.rev(mnfstnode)
1492 if r == next_rev[0]:
1492 if r == next_rev[0]:
1493 # If the last rev we looked at was the one just previous,
1493 # If the last rev we looked at was the one just previous,
1494 # we only need to see a diff.
1494 # we only need to see a diff.
1495 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1495 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1496 # For each line in the delta
1496 # For each line in the delta
1497 for dline in delta.splitlines():
1497 for dline in delta.splitlines():
1498 # get the filename and filenode for that line
1498 # get the filename and filenode for that line
1499 f, fnode = dline.split('\0')
1499 f, fnode = dline.split('\0')
1500 fnode = bin(fnode[:40])
1500 fnode = bin(fnode[:40])
1501 f = changedfiles.get(f, None)
1501 f = changedfiles.get(f, None)
1502 # And if the file is in the list of files we care
1502 # And if the file is in the list of files we care
1503 # about.
1503 # about.
1504 if f is not None:
1504 if f is not None:
1505 # Get the changenode this manifest belongs to
1505 # Get the changenode this manifest belongs to
1506 clnode = msng_mnfst_set[mnfstnode]
1506 clnode = msng_mnfst_set[mnfstnode]
1507 # Create the set of filenodes for the file if
1507 # Create the set of filenodes for the file if
1508 # there isn't one already.
1508 # there isn't one already.
1509 ndset = msng_filenode_set.setdefault(f, {})
1509 ndset = msng_filenode_set.setdefault(f, {})
1510 # And set the filenode's changelog node to the
1510 # And set the filenode's changelog node to the
1511 # manifest's if it hasn't been set already.
1511 # manifest's if it hasn't been set already.
1512 ndset.setdefault(fnode, clnode)
1512 ndset.setdefault(fnode, clnode)
1513 else:
1513 else:
1514 # Otherwise we need a full manifest.
1514 # Otherwise we need a full manifest.
1515 m = mnfst.read(mnfstnode)
1515 m = mnfst.read(mnfstnode)
1516 # For every file in we care about.
1516 # For every file in we care about.
1517 for f in changedfiles:
1517 for f in changedfiles:
1518 fnode = m.get(f, None)
1518 fnode = m.get(f, None)
1519 # If it's in the manifest
1519 # If it's in the manifest
1520 if fnode is not None:
1520 if fnode is not None:
1521 # See comments above.
1521 # See comments above.
1522 clnode = msng_mnfst_set[mnfstnode]
1522 clnode = msng_mnfst_set[mnfstnode]
1523 ndset = msng_filenode_set.setdefault(f, {})
1523 ndset = msng_filenode_set.setdefault(f, {})
1524 ndset.setdefault(fnode, clnode)
1524 ndset.setdefault(fnode, clnode)
1525 # Remember the revision we hope to see next.
1525 # Remember the revision we hope to see next.
1526 next_rev[0] = r + 1
1526 next_rev[0] = r + 1
1527 return collect_msng_filenodes
1527 return collect_msng_filenodes
1528
1528
1529 # We have a list of filenodes we think we need for a file, lets remove
1529 # We have a list of filenodes we think we need for a file, lets remove
1530 # all those we now the recipient must have.
1530 # all those we now the recipient must have.
1531 def prune_filenodes(f, filerevlog):
1531 def prune_filenodes(f, filerevlog):
1532 msngset = msng_filenode_set[f]
1532 msngset = msng_filenode_set[f]
1533 hasset = {}
1533 hasset = {}
1534 # If a 'missing' filenode thinks it belongs to a changenode we
1534 # If a 'missing' filenode thinks it belongs to a changenode we
1535 # assume the recipient must have, then the recipient must have
1535 # assume the recipient must have, then the recipient must have
1536 # that filenode.
1536 # that filenode.
1537 for n in msngset:
1537 for n in msngset:
1538 clnode = cl.node(filerevlog.linkrev(n))
1538 clnode = cl.node(filerevlog.linkrev(n))
1539 if clnode in has_cl_set:
1539 if clnode in has_cl_set:
1540 hasset[n] = 1
1540 hasset[n] = 1
1541 prune_parents(filerevlog, hasset, msngset)
1541 prune_parents(filerevlog, hasset, msngset)
1542
1542
1543 # A function generator function that sets up the a context for the
1543 # A function generator function that sets up the a context for the
1544 # inner function.
1544 # inner function.
1545 def lookup_filenode_link_func(fname):
1545 def lookup_filenode_link_func(fname):
1546 msngset = msng_filenode_set[fname]
1546 msngset = msng_filenode_set[fname]
1547 # Lookup the changenode the filenode belongs to.
1547 # Lookup the changenode the filenode belongs to.
1548 def lookup_filenode_link(fnode):
1548 def lookup_filenode_link(fnode):
1549 return msngset[fnode]
1549 return msngset[fnode]
1550 return lookup_filenode_link
1550 return lookup_filenode_link
1551
1551
1552 # Now that we have all theses utility functions to help out and
1552 # Now that we have all theses utility functions to help out and
1553 # logically divide up the task, generate the group.
1553 # logically divide up the task, generate the group.
1554 def gengroup():
1554 def gengroup():
1555 # The set of changed files starts empty.
1555 # The set of changed files starts empty.
1556 changedfiles = {}
1556 changedfiles = {}
1557 # Create a changenode group generator that will call our functions
1557 # Create a changenode group generator that will call our functions
1558 # back to lookup the owning changenode and collect information.
1558 # back to lookup the owning changenode and collect information.
1559 group = cl.group(msng_cl_lst, identity,
1559 group = cl.group(msng_cl_lst, identity,
1560 manifest_and_file_collector(changedfiles))
1560 manifest_and_file_collector(changedfiles))
1561 for chnk in group:
1561 for chnk in group:
1562 yield chnk
1562 yield chnk
1563
1563
1564 # The list of manifests has been collected by the generator
1564 # The list of manifests has been collected by the generator
1565 # calling our functions back.
1565 # calling our functions back.
1566 prune_manifests()
1566 prune_manifests()
1567 msng_mnfst_lst = msng_mnfst_set.keys()
1567 msng_mnfst_lst = msng_mnfst_set.keys()
1568 # Sort the manifestnodes by revision number.
1568 # Sort the manifestnodes by revision number.
1569 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1569 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1570 # Create a generator for the manifestnodes that calls our lookup
1570 # Create a generator for the manifestnodes that calls our lookup
1571 # and data collection functions back.
1571 # and data collection functions back.
1572 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1572 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1573 filenode_collector(changedfiles))
1573 filenode_collector(changedfiles))
1574 for chnk in group:
1574 for chnk in group:
1575 yield chnk
1575 yield chnk
1576
1576
1577 # These are no longer needed, dereference and toss the memory for
1577 # These are no longer needed, dereference and toss the memory for
1578 # them.
1578 # them.
1579 msng_mnfst_lst = None
1579 msng_mnfst_lst = None
1580 msng_mnfst_set.clear()
1580 msng_mnfst_set.clear()
1581
1581
1582 changedfiles = changedfiles.keys()
1582 changedfiles = changedfiles.keys()
1583 changedfiles.sort()
1583 changedfiles.sort()
1584 # Go through all our files in order sorted by name.
1584 # Go through all our files in order sorted by name.
1585 for fname in changedfiles:
1585 for fname in changedfiles:
1586 filerevlog = self.file(fname)
1586 filerevlog = self.file(fname)
1587 # Toss out the filenodes that the recipient isn't really
1587 # Toss out the filenodes that the recipient isn't really
1588 # missing.
1588 # missing.
1589 if msng_filenode_set.has_key(fname):
1589 if msng_filenode_set.has_key(fname):
1590 prune_filenodes(fname, filerevlog)
1590 prune_filenodes(fname, filerevlog)
1591 msng_filenode_lst = msng_filenode_set[fname].keys()
1591 msng_filenode_lst = msng_filenode_set[fname].keys()
1592 else:
1592 else:
1593 msng_filenode_lst = []
1593 msng_filenode_lst = []
1594 # If any filenodes are left, generate the group for them,
1594 # If any filenodes are left, generate the group for them,
1595 # otherwise don't bother.
1595 # otherwise don't bother.
1596 if len(msng_filenode_lst) > 0:
1596 if len(msng_filenode_lst) > 0:
1597 yield changegroup.genchunk(fname)
1597 yield changegroup.genchunk(fname)
1598 # Sort the filenodes by their revision #
1598 # Sort the filenodes by their revision #
1599 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1599 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1600 # Create a group generator and only pass in a changenode
1600 # Create a group generator and only pass in a changenode
1601 # lookup function as we need to collect no information
1601 # lookup function as we need to collect no information
1602 # from filenodes.
1602 # from filenodes.
1603 group = filerevlog.group(msng_filenode_lst,
1603 group = filerevlog.group(msng_filenode_lst,
1604 lookup_filenode_link_func(fname))
1604 lookup_filenode_link_func(fname))
1605 for chnk in group:
1605 for chnk in group:
1606 yield chnk
1606 yield chnk
1607 if msng_filenode_set.has_key(fname):
1607 if msng_filenode_set.has_key(fname):
1608 # Don't need this anymore, toss it to free memory.
1608 # Don't need this anymore, toss it to free memory.
1609 del msng_filenode_set[fname]
1609 del msng_filenode_set[fname]
1610 # Signal that no more groups are left.
1610 # Signal that no more groups are left.
1611 yield changegroup.closechunk()
1611 yield changegroup.closechunk()
1612
1612
1613 if msng_cl_lst:
1613 if msng_cl_lst:
1614 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1614 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1615
1615
1616 return util.chunkbuffer(gengroup())
1616 return util.chunkbuffer(gengroup())
1617
1617
1618 def changegroup(self, basenodes, source):
1618 def changegroup(self, basenodes, source):
1619 """Generate a changegroup of all nodes that we have that a recipient
1619 """Generate a changegroup of all nodes that we have that a recipient
1620 doesn't.
1620 doesn't.
1621
1621
1622 This is much easier than the previous function as we can assume that
1622 This is much easier than the previous function as we can assume that
1623 the recipient has any changenode we aren't sending them."""
1623 the recipient has any changenode we aren't sending them."""
1624
1624
1625 self.hook('preoutgoing', throw=True, source=source)
1625 self.hook('preoutgoing', throw=True, source=source)
1626
1626
1627 cl = self.changelog
1627 cl = self.changelog
1628 nodes = cl.nodesbetween(basenodes, None)[0]
1628 nodes = cl.nodesbetween(basenodes, None)[0]
1629 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1629 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1630 self.changegroupinfo(nodes)
1630 self.changegroupinfo(nodes)
1631
1631
1632 def identity(x):
1632 def identity(x):
1633 return x
1633 return x
1634
1634
1635 def gennodelst(revlog):
1635 def gennodelst(revlog):
1636 for r in xrange(0, revlog.count()):
1636 for r in xrange(0, revlog.count()):
1637 n = revlog.node(r)
1637 n = revlog.node(r)
1638 if revlog.linkrev(n) in revset:
1638 if revlog.linkrev(n) in revset:
1639 yield n
1639 yield n
1640
1640
1641 def changed_file_collector(changedfileset):
1641 def changed_file_collector(changedfileset):
1642 def collect_changed_files(clnode):
1642 def collect_changed_files(clnode):
1643 c = cl.read(clnode)
1643 c = cl.read(clnode)
1644 for fname in c[3]:
1644 for fname in c[3]:
1645 changedfileset[fname] = 1
1645 changedfileset[fname] = 1
1646 return collect_changed_files
1646 return collect_changed_files
1647
1647
1648 def lookuprevlink_func(revlog):
1648 def lookuprevlink_func(revlog):
1649 def lookuprevlink(n):
1649 def lookuprevlink(n):
1650 return cl.node(revlog.linkrev(n))
1650 return cl.node(revlog.linkrev(n))
1651 return lookuprevlink
1651 return lookuprevlink
1652
1652
1653 def gengroup():
1653 def gengroup():
1654 # construct a list of all changed files
1654 # construct a list of all changed files
1655 changedfiles = {}
1655 changedfiles = {}
1656
1656
1657 for chnk in cl.group(nodes, identity,
1657 for chnk in cl.group(nodes, identity,
1658 changed_file_collector(changedfiles)):
1658 changed_file_collector(changedfiles)):
1659 yield chnk
1659 yield chnk
1660 changedfiles = changedfiles.keys()
1660 changedfiles = changedfiles.keys()
1661 changedfiles.sort()
1661 changedfiles.sort()
1662
1662
1663 mnfst = self.manifest
1663 mnfst = self.manifest
1664 nodeiter = gennodelst(mnfst)
1664 nodeiter = gennodelst(mnfst)
1665 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1665 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1666 yield chnk
1666 yield chnk
1667
1667
1668 for fname in changedfiles:
1668 for fname in changedfiles:
1669 filerevlog = self.file(fname)
1669 filerevlog = self.file(fname)
1670 nodeiter = gennodelst(filerevlog)
1670 nodeiter = gennodelst(filerevlog)
1671 nodeiter = list(nodeiter)
1671 nodeiter = list(nodeiter)
1672 if nodeiter:
1672 if nodeiter:
1673 yield changegroup.genchunk(fname)
1673 yield changegroup.genchunk(fname)
1674 lookup = lookuprevlink_func(filerevlog)
1674 lookup = lookuprevlink_func(filerevlog)
1675 for chnk in filerevlog.group(nodeiter, lookup):
1675 for chnk in filerevlog.group(nodeiter, lookup):
1676 yield chnk
1676 yield chnk
1677
1677
1678 yield changegroup.closechunk()
1678 yield changegroup.closechunk()
1679
1679
1680 if nodes:
1680 if nodes:
1681 self.hook('outgoing', node=hex(nodes[0]), source=source)
1681 self.hook('outgoing', node=hex(nodes[0]), source=source)
1682
1682
1683 return util.chunkbuffer(gengroup())
1683 return util.chunkbuffer(gengroup())
1684
1684
1685 def addchangegroup(self, source, srctype, url):
1685 def addchangegroup(self, source, srctype, url):
1686 """add changegroup to repo.
1686 """add changegroup to repo.
1687 returns number of heads modified or added + 1."""
1687 returns number of heads modified or added + 1."""
1688
1688
1689 def csmap(x):
1689 def csmap(x):
1690 self.ui.debug(_("add changeset %s\n") % short(x))
1690 self.ui.debug(_("add changeset %s\n") % short(x))
1691 return cl.count()
1691 return cl.count()
1692
1692
1693 def revmap(x):
1693 def revmap(x):
1694 return cl.rev(x)
1694 return cl.rev(x)
1695
1695
1696 if not source:
1696 if not source:
1697 return 0
1697 return 0
1698
1698
1699 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1699 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1700
1700
1701 changesets = files = revisions = 0
1701 changesets = files = revisions = 0
1702
1702
1703 tr = self.transaction()
1703 tr = self.transaction()
1704
1704
1705 # write changelog data to temp files so concurrent readers will not see
1705 # write changelog data to temp files so concurrent readers will not see
1706 # inconsistent view
1706 # inconsistent view
1707 cl = None
1707 cl = None
1708 try:
1708 try:
1709 cl = appendfile.appendchangelog(self.sopener,
1709 cl = appendfile.appendchangelog(self.sopener,
1710 self.changelog.version)
1710 self.changelog.version)
1711
1711
1712 oldheads = len(cl.heads())
1712 oldheads = len(cl.heads())
1713
1713
1714 # pull off the changeset group
1714 # pull off the changeset group
1715 self.ui.status(_("adding changesets\n"))
1715 self.ui.status(_("adding changesets\n"))
1716 cor = cl.count() - 1
1716 cor = cl.count() - 1
1717 chunkiter = changegroup.chunkiter(source)
1717 chunkiter = changegroup.chunkiter(source)
1718 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1718 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1719 raise util.Abort(_("received changelog group is empty"))
1719 raise util.Abort(_("received changelog group is empty"))
1720 cnr = cl.count() - 1
1720 cnr = cl.count() - 1
1721 changesets = cnr - cor
1721 changesets = cnr - cor
1722
1722
1723 # pull off the manifest group
1723 # pull off the manifest group
1724 self.ui.status(_("adding manifests\n"))
1724 self.ui.status(_("adding manifests\n"))
1725 chunkiter = changegroup.chunkiter(source)
1725 chunkiter = changegroup.chunkiter(source)
1726 # no need to check for empty manifest group here:
1726 # no need to check for empty manifest group here:
1727 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1727 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1728 # no new manifest will be created and the manifest group will
1728 # no new manifest will be created and the manifest group will
1729 # be empty during the pull
1729 # be empty during the pull
1730 self.manifest.addgroup(chunkiter, revmap, tr)
1730 self.manifest.addgroup(chunkiter, revmap, tr)
1731
1731
1732 # process the files
1732 # process the files
1733 self.ui.status(_("adding file changes\n"))
1733 self.ui.status(_("adding file changes\n"))
1734 while 1:
1734 while 1:
1735 f = changegroup.getchunk(source)
1735 f = changegroup.getchunk(source)
1736 if not f:
1736 if not f:
1737 break
1737 break
1738 self.ui.debug(_("adding %s revisions\n") % f)
1738 self.ui.debug(_("adding %s revisions\n") % f)
1739 fl = self.file(f)
1739 fl = self.file(f)
1740 o = fl.count()
1740 o = fl.count()
1741 chunkiter = changegroup.chunkiter(source)
1741 chunkiter = changegroup.chunkiter(source)
1742 if fl.addgroup(chunkiter, revmap, tr) is None:
1742 if fl.addgroup(chunkiter, revmap, tr) is None:
1743 raise util.Abort(_("received file revlog group is empty"))
1743 raise util.Abort(_("received file revlog group is empty"))
1744 revisions += fl.count() - o
1744 revisions += fl.count() - o
1745 files += 1
1745 files += 1
1746
1746
1747 cl.writedata()
1747 cl.writedata()
1748 finally:
1748 finally:
1749 if cl:
1749 if cl:
1750 cl.cleanup()
1750 cl.cleanup()
1751
1751
1752 # make changelog see real files again
1752 # make changelog see real files again
1753 self.changelog = changelog.changelog(self.sopener,
1753 self.changelog = changelog.changelog(self.sopener,
1754 self.changelog.version)
1754 self.changelog.version)
1755 self.changelog.checkinlinesize(tr)
1755 self.changelog.checkinlinesize(tr)
1756
1756
1757 newheads = len(self.changelog.heads())
1757 newheads = len(self.changelog.heads())
1758 heads = ""
1758 heads = ""
1759 if oldheads and newheads != oldheads:
1759 if oldheads and newheads != oldheads:
1760 heads = _(" (%+d heads)") % (newheads - oldheads)
1760 heads = _(" (%+d heads)") % (newheads - oldheads)
1761
1761
1762 self.ui.status(_("added %d changesets"
1762 self.ui.status(_("added %d changesets"
1763 " with %d changes to %d files%s\n")
1763 " with %d changes to %d files%s\n")
1764 % (changesets, revisions, files, heads))
1764 % (changesets, revisions, files, heads))
1765
1765
1766 if changesets > 0:
1766 if changesets > 0:
1767 self.hook('pretxnchangegroup', throw=True,
1767 self.hook('pretxnchangegroup', throw=True,
1768 node=hex(self.changelog.node(cor+1)), source=srctype,
1768 node=hex(self.changelog.node(cor+1)), source=srctype,
1769 url=url)
1769 url=url)
1770
1770
1771 tr.close()
1771 tr.close()
1772
1772
1773 if changesets > 0:
1773 if changesets > 0:
1774 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1774 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1775 source=srctype, url=url)
1775 source=srctype, url=url)
1776
1776
1777 for i in xrange(cor + 1, cnr + 1):
1777 for i in xrange(cor + 1, cnr + 1):
1778 self.hook("incoming", node=hex(self.changelog.node(i)),
1778 self.hook("incoming", node=hex(self.changelog.node(i)),
1779 source=srctype, url=url)
1779 source=srctype, url=url)
1780
1780
1781 return newheads - oldheads + 1
1781 return newheads - oldheads + 1
1782
1782
1783
1783
1784 def stream_in(self, remote):
1784 def stream_in(self, remote):
1785 fp = remote.stream_out()
1785 fp = remote.stream_out()
1786 resp = int(fp.readline())
1786 l = fp.readline()
1787 try:
1788 resp = int(l)
1789 except ValueError:
1790 raise util.UnexpectedOutput(
1791 _('Unexpected response from remote server:'), l)
1787 if resp != 0:
1792 if resp != 0:
1788 raise util.Abort(_('operation forbidden by server'))
1793 raise util.Abort(_('operation forbidden by server'))
1789 self.ui.status(_('streaming all changes\n'))
1794 self.ui.status(_('streaming all changes\n'))
1790 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1795 l = fp.readline()
1796 try:
1797 total_files, total_bytes = map(int, l.split(' ', 1))
1798 except ValueError, TypeError:
1799 raise util.UnexpectedOutput(
1800 _('Unexpected response from remote server:'), l)
1791 self.ui.status(_('%d files to transfer, %s of data\n') %
1801 self.ui.status(_('%d files to transfer, %s of data\n') %
1792 (total_files, util.bytecount(total_bytes)))
1802 (total_files, util.bytecount(total_bytes)))
1793 start = time.time()
1803 start = time.time()
1794 for i in xrange(total_files):
1804 for i in xrange(total_files):
1795 name, size = fp.readline().split('\0', 1)
1805 l = fp.readline()
1806 try:
1807 name, size = l.split('\0', 1)
1796 size = int(size)
1808 size = int(size)
1809 except ValueError, TypeError:
1810 raise util.UnexpectedOutput(
1811 _('Unexpected response from remote server:'), l)
1797 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1812 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1798 ofp = self.sopener(name, 'w')
1813 ofp = self.sopener(name, 'w')
1799 for chunk in util.filechunkiter(fp, limit=size):
1814 for chunk in util.filechunkiter(fp, limit=size):
1800 ofp.write(chunk)
1815 ofp.write(chunk)
1801 ofp.close()
1816 ofp.close()
1802 elapsed = time.time() - start
1817 elapsed = time.time() - start
1803 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1818 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1804 (util.bytecount(total_bytes), elapsed,
1819 (util.bytecount(total_bytes), elapsed,
1805 util.bytecount(total_bytes / elapsed)))
1820 util.bytecount(total_bytes / elapsed)))
1806 self.reload()
1821 self.reload()
1807 return len(self.heads()) + 1
1822 return len(self.heads()) + 1
1808
1823
1809 def clone(self, remote, heads=[], stream=False):
1824 def clone(self, remote, heads=[], stream=False):
1810 '''clone remote repository.
1825 '''clone remote repository.
1811
1826
1812 keyword arguments:
1827 keyword arguments:
1813 heads: list of revs to clone (forces use of pull)
1828 heads: list of revs to clone (forces use of pull)
1814 stream: use streaming clone if possible'''
1829 stream: use streaming clone if possible'''
1815
1830
1816 # now, all clients that can request uncompressed clones can
1831 # now, all clients that can request uncompressed clones can
1817 # read repo formats supported by all servers that can serve
1832 # read repo formats supported by all servers that can serve
1818 # them.
1833 # them.
1819
1834
1820 # if revlog format changes, client will have to check version
1835 # if revlog format changes, client will have to check version
1821 # and format flags on "stream" capability, and use
1836 # and format flags on "stream" capability, and use
1822 # uncompressed only if compatible.
1837 # uncompressed only if compatible.
1823
1838
1824 if stream and not heads and remote.capable('stream'):
1839 if stream and not heads and remote.capable('stream'):
1825 return self.stream_in(remote)
1840 return self.stream_in(remote)
1826 return self.pull(remote, heads)
1841 return self.pull(remote, heads)
1827
1842
1828 # used to avoid circular references so destructors work
1843 # used to avoid circular references so destructors work
1829 def aftertrans(base):
1844 def aftertrans(base):
1830 p = base
1845 p = base
1831 def a():
1846 def a():
1832 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1847 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1833 util.rename(os.path.join(p, "journal.dirstate"),
1848 util.rename(os.path.join(p, "journal.dirstate"),
1834 os.path.join(p, "undo.dirstate"))
1849 os.path.join(p, "undo.dirstate"))
1835 return a
1850 return a
1836
1851
1837 def instance(ui, path, create):
1852 def instance(ui, path, create):
1838 return localrepository(ui, util.drop_scheme('file', path), create)
1853 return localrepository(ui, util.drop_scheme('file', path), create)
1839
1854
1840 def islocal(path):
1855 def islocal(path):
1841 return True
1856 return True
@@ -1,1051 +1,1054
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import gettext as _
15 from i18n import gettext as _
16 from demandload import *
16 from demandload import *
17 demandload(globals(), "cStringIO errno getpass popen2 re shutil sys tempfile")
17 demandload(globals(), "cStringIO errno getpass popen2 re shutil sys tempfile")
18 demandload(globals(), "os threading time calendar ConfigParser")
18 demandload(globals(), "os threading time calendar ConfigParser")
19
19
20 # used by parsedate
20 # used by parsedate
21 defaultdateformats = ('%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M',
21 defaultdateformats = ('%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M',
22 '%a %b %d %H:%M:%S %Y')
22 '%a %b %d %H:%M:%S %Y')
23
23
24 class SignalInterrupt(Exception):
24 class SignalInterrupt(Exception):
25 """Exception raised on SIGTERM and SIGHUP."""
25 """Exception raised on SIGTERM and SIGHUP."""
26
26
27 # like SafeConfigParser but with case-sensitive keys
27 # like SafeConfigParser but with case-sensitive keys
28 class configparser(ConfigParser.SafeConfigParser):
28 class configparser(ConfigParser.SafeConfigParser):
29 def optionxform(self, optionstr):
29 def optionxform(self, optionstr):
30 return optionstr
30 return optionstr
31
31
32 def cachefunc(func):
32 def cachefunc(func):
33 '''cache the result of function calls'''
33 '''cache the result of function calls'''
34 # XXX doesn't handle keywords args
34 # XXX doesn't handle keywords args
35 cache = {}
35 cache = {}
36 if func.func_code.co_argcount == 1:
36 if func.func_code.co_argcount == 1:
37 # we gain a small amount of time because
37 # we gain a small amount of time because
38 # we don't need to pack/unpack the list
38 # we don't need to pack/unpack the list
39 def f(arg):
39 def f(arg):
40 if arg not in cache:
40 if arg not in cache:
41 cache[arg] = func(arg)
41 cache[arg] = func(arg)
42 return cache[arg]
42 return cache[arg]
43 else:
43 else:
44 def f(*args):
44 def f(*args):
45 if args not in cache:
45 if args not in cache:
46 cache[args] = func(*args)
46 cache[args] = func(*args)
47 return cache[args]
47 return cache[args]
48
48
49 return f
49 return f
50
50
51 def pipefilter(s, cmd):
51 def pipefilter(s, cmd):
52 '''filter string S through command CMD, returning its output'''
52 '''filter string S through command CMD, returning its output'''
53 (pout, pin) = popen2.popen2(cmd, -1, 'b')
53 (pout, pin) = popen2.popen2(cmd, -1, 'b')
54 def writer():
54 def writer():
55 try:
55 try:
56 pin.write(s)
56 pin.write(s)
57 pin.close()
57 pin.close()
58 except IOError, inst:
58 except IOError, inst:
59 if inst.errno != errno.EPIPE:
59 if inst.errno != errno.EPIPE:
60 raise
60 raise
61
61
62 # we should use select instead on UNIX, but this will work on most
62 # we should use select instead on UNIX, but this will work on most
63 # systems, including Windows
63 # systems, including Windows
64 w = threading.Thread(target=writer)
64 w = threading.Thread(target=writer)
65 w.start()
65 w.start()
66 f = pout.read()
66 f = pout.read()
67 pout.close()
67 pout.close()
68 w.join()
68 w.join()
69 return f
69 return f
70
70
71 def tempfilter(s, cmd):
71 def tempfilter(s, cmd):
72 '''filter string S through a pair of temporary files with CMD.
72 '''filter string S through a pair of temporary files with CMD.
73 CMD is used as a template to create the real command to be run,
73 CMD is used as a template to create the real command to be run,
74 with the strings INFILE and OUTFILE replaced by the real names of
74 with the strings INFILE and OUTFILE replaced by the real names of
75 the temporary files generated.'''
75 the temporary files generated.'''
76 inname, outname = None, None
76 inname, outname = None, None
77 try:
77 try:
78 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
78 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
79 fp = os.fdopen(infd, 'wb')
79 fp = os.fdopen(infd, 'wb')
80 fp.write(s)
80 fp.write(s)
81 fp.close()
81 fp.close()
82 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
82 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
83 os.close(outfd)
83 os.close(outfd)
84 cmd = cmd.replace('INFILE', inname)
84 cmd = cmd.replace('INFILE', inname)
85 cmd = cmd.replace('OUTFILE', outname)
85 cmd = cmd.replace('OUTFILE', outname)
86 code = os.system(cmd)
86 code = os.system(cmd)
87 if code: raise Abort(_("command '%s' failed: %s") %
87 if code: raise Abort(_("command '%s' failed: %s") %
88 (cmd, explain_exit(code)))
88 (cmd, explain_exit(code)))
89 return open(outname, 'rb').read()
89 return open(outname, 'rb').read()
90 finally:
90 finally:
91 try:
91 try:
92 if inname: os.unlink(inname)
92 if inname: os.unlink(inname)
93 except: pass
93 except: pass
94 try:
94 try:
95 if outname: os.unlink(outname)
95 if outname: os.unlink(outname)
96 except: pass
96 except: pass
97
97
98 filtertable = {
98 filtertable = {
99 'tempfile:': tempfilter,
99 'tempfile:': tempfilter,
100 'pipe:': pipefilter,
100 'pipe:': pipefilter,
101 }
101 }
102
102
103 def filter(s, cmd):
103 def filter(s, cmd):
104 "filter a string through a command that transforms its input to its output"
104 "filter a string through a command that transforms its input to its output"
105 for name, fn in filtertable.iteritems():
105 for name, fn in filtertable.iteritems():
106 if cmd.startswith(name):
106 if cmd.startswith(name):
107 return fn(s, cmd[len(name):].lstrip())
107 return fn(s, cmd[len(name):].lstrip())
108 return pipefilter(s, cmd)
108 return pipefilter(s, cmd)
109
109
110 def find_in_path(name, path, default=None):
110 def find_in_path(name, path, default=None):
111 '''find name in search path. path can be string (will be split
111 '''find name in search path. path can be string (will be split
112 with os.pathsep), or iterable thing that returns strings. if name
112 with os.pathsep), or iterable thing that returns strings. if name
113 found, return path to name. else return default.'''
113 found, return path to name. else return default.'''
114 if isinstance(path, str):
114 if isinstance(path, str):
115 path = path.split(os.pathsep)
115 path = path.split(os.pathsep)
116 for p in path:
116 for p in path:
117 p_name = os.path.join(p, name)
117 p_name = os.path.join(p, name)
118 if os.path.exists(p_name):
118 if os.path.exists(p_name):
119 return p_name
119 return p_name
120 return default
120 return default
121
121
122 def binary(s):
122 def binary(s):
123 """return true if a string is binary data using diff's heuristic"""
123 """return true if a string is binary data using diff's heuristic"""
124 if s and '\0' in s[:4096]:
124 if s and '\0' in s[:4096]:
125 return True
125 return True
126 return False
126 return False
127
127
128 def unique(g):
128 def unique(g):
129 """return the uniq elements of iterable g"""
129 """return the uniq elements of iterable g"""
130 seen = {}
130 seen = {}
131 for f in g:
131 for f in g:
132 if f not in seen:
132 if f not in seen:
133 seen[f] = 1
133 seen[f] = 1
134 yield f
134 yield f
135
135
136 class Abort(Exception):
136 class Abort(Exception):
137 """Raised if a command needs to print an error and exit."""
137 """Raised if a command needs to print an error and exit."""
138
138
139 class UnexpectedOutput(Abort):
140 """Raised to print an error with part of output and exit."""
141
139 def always(fn): return True
142 def always(fn): return True
140 def never(fn): return False
143 def never(fn): return False
141
144
142 def patkind(name, dflt_pat='glob'):
145 def patkind(name, dflt_pat='glob'):
143 """Split a string into an optional pattern kind prefix and the
146 """Split a string into an optional pattern kind prefix and the
144 actual pattern."""
147 actual pattern."""
145 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
148 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
146 if name.startswith(prefix + ':'): return name.split(':', 1)
149 if name.startswith(prefix + ':'): return name.split(':', 1)
147 return dflt_pat, name
150 return dflt_pat, name
148
151
149 def globre(pat, head='^', tail='$'):
152 def globre(pat, head='^', tail='$'):
150 "convert a glob pattern into a regexp"
153 "convert a glob pattern into a regexp"
151 i, n = 0, len(pat)
154 i, n = 0, len(pat)
152 res = ''
155 res = ''
153 group = False
156 group = False
154 def peek(): return i < n and pat[i]
157 def peek(): return i < n and pat[i]
155 while i < n:
158 while i < n:
156 c = pat[i]
159 c = pat[i]
157 i = i+1
160 i = i+1
158 if c == '*':
161 if c == '*':
159 if peek() == '*':
162 if peek() == '*':
160 i += 1
163 i += 1
161 res += '.*'
164 res += '.*'
162 else:
165 else:
163 res += '[^/]*'
166 res += '[^/]*'
164 elif c == '?':
167 elif c == '?':
165 res += '.'
168 res += '.'
166 elif c == '[':
169 elif c == '[':
167 j = i
170 j = i
168 if j < n and pat[j] in '!]':
171 if j < n and pat[j] in '!]':
169 j += 1
172 j += 1
170 while j < n and pat[j] != ']':
173 while j < n and pat[j] != ']':
171 j += 1
174 j += 1
172 if j >= n:
175 if j >= n:
173 res += '\\['
176 res += '\\['
174 else:
177 else:
175 stuff = pat[i:j].replace('\\','\\\\')
178 stuff = pat[i:j].replace('\\','\\\\')
176 i = j + 1
179 i = j + 1
177 if stuff[0] == '!':
180 if stuff[0] == '!':
178 stuff = '^' + stuff[1:]
181 stuff = '^' + stuff[1:]
179 elif stuff[0] == '^':
182 elif stuff[0] == '^':
180 stuff = '\\' + stuff
183 stuff = '\\' + stuff
181 res = '%s[%s]' % (res, stuff)
184 res = '%s[%s]' % (res, stuff)
182 elif c == '{':
185 elif c == '{':
183 group = True
186 group = True
184 res += '(?:'
187 res += '(?:'
185 elif c == '}' and group:
188 elif c == '}' and group:
186 res += ')'
189 res += ')'
187 group = False
190 group = False
188 elif c == ',' and group:
191 elif c == ',' and group:
189 res += '|'
192 res += '|'
190 elif c == '\\':
193 elif c == '\\':
191 p = peek()
194 p = peek()
192 if p:
195 if p:
193 i += 1
196 i += 1
194 res += re.escape(p)
197 res += re.escape(p)
195 else:
198 else:
196 res += re.escape(c)
199 res += re.escape(c)
197 else:
200 else:
198 res += re.escape(c)
201 res += re.escape(c)
199 return head + res + tail
202 return head + res + tail
200
203
201 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
204 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
202
205
203 def pathto(n1, n2):
206 def pathto(n1, n2):
204 '''return the relative path from one place to another.
207 '''return the relative path from one place to another.
205 this returns a path in the form used by the local filesystem, not hg.'''
208 this returns a path in the form used by the local filesystem, not hg.'''
206 if not n1: return localpath(n2)
209 if not n1: return localpath(n2)
207 a, b = n1.split('/'), n2.split('/')
210 a, b = n1.split('/'), n2.split('/')
208 a.reverse()
211 a.reverse()
209 b.reverse()
212 b.reverse()
210 while a and b and a[-1] == b[-1]:
213 while a and b and a[-1] == b[-1]:
211 a.pop()
214 a.pop()
212 b.pop()
215 b.pop()
213 b.reverse()
216 b.reverse()
214 return os.sep.join((['..'] * len(a)) + b)
217 return os.sep.join((['..'] * len(a)) + b)
215
218
216 def canonpath(root, cwd, myname):
219 def canonpath(root, cwd, myname):
217 """return the canonical path of myname, given cwd and root"""
220 """return the canonical path of myname, given cwd and root"""
218 if root == os.sep:
221 if root == os.sep:
219 rootsep = os.sep
222 rootsep = os.sep
220 elif root.endswith(os.sep):
223 elif root.endswith(os.sep):
221 rootsep = root
224 rootsep = root
222 else:
225 else:
223 rootsep = root + os.sep
226 rootsep = root + os.sep
224 name = myname
227 name = myname
225 if not os.path.isabs(name):
228 if not os.path.isabs(name):
226 name = os.path.join(root, cwd, name)
229 name = os.path.join(root, cwd, name)
227 name = os.path.normpath(name)
230 name = os.path.normpath(name)
228 if name != rootsep and name.startswith(rootsep):
231 if name != rootsep and name.startswith(rootsep):
229 name = name[len(rootsep):]
232 name = name[len(rootsep):]
230 audit_path(name)
233 audit_path(name)
231 return pconvert(name)
234 return pconvert(name)
232 elif name == root:
235 elif name == root:
233 return ''
236 return ''
234 else:
237 else:
235 # Determine whether `name' is in the hierarchy at or beneath `root',
238 # Determine whether `name' is in the hierarchy at or beneath `root',
236 # by iterating name=dirname(name) until that causes no change (can't
239 # by iterating name=dirname(name) until that causes no change (can't
237 # check name == '/', because that doesn't work on windows). For each
240 # check name == '/', because that doesn't work on windows). For each
238 # `name', compare dev/inode numbers. If they match, the list `rel'
241 # `name', compare dev/inode numbers. If they match, the list `rel'
239 # holds the reversed list of components making up the relative file
242 # holds the reversed list of components making up the relative file
240 # name we want.
243 # name we want.
241 root_st = os.stat(root)
244 root_st = os.stat(root)
242 rel = []
245 rel = []
243 while True:
246 while True:
244 try:
247 try:
245 name_st = os.stat(name)
248 name_st = os.stat(name)
246 except OSError:
249 except OSError:
247 break
250 break
248 if samestat(name_st, root_st):
251 if samestat(name_st, root_st):
249 rel.reverse()
252 rel.reverse()
250 name = os.path.join(*rel)
253 name = os.path.join(*rel)
251 audit_path(name)
254 audit_path(name)
252 return pconvert(name)
255 return pconvert(name)
253 dirname, basename = os.path.split(name)
256 dirname, basename = os.path.split(name)
254 rel.append(basename)
257 rel.append(basename)
255 if dirname == name:
258 if dirname == name:
256 break
259 break
257 name = dirname
260 name = dirname
258
261
259 raise Abort('%s not under root' % myname)
262 raise Abort('%s not under root' % myname)
260
263
261 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
264 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
262 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
265 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
263
266
264 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
267 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
265 if os.name == 'nt':
268 if os.name == 'nt':
266 dflt_pat = 'glob'
269 dflt_pat = 'glob'
267 else:
270 else:
268 dflt_pat = 'relpath'
271 dflt_pat = 'relpath'
269 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
272 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
270
273
271 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
274 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
272 """build a function to match a set of file patterns
275 """build a function to match a set of file patterns
273
276
274 arguments:
277 arguments:
275 canonroot - the canonical root of the tree you're matching against
278 canonroot - the canonical root of the tree you're matching against
276 cwd - the current working directory, if relevant
279 cwd - the current working directory, if relevant
277 names - patterns to find
280 names - patterns to find
278 inc - patterns to include
281 inc - patterns to include
279 exc - patterns to exclude
282 exc - patterns to exclude
280 head - a regex to prepend to patterns to control whether a match is rooted
283 head - a regex to prepend to patterns to control whether a match is rooted
281
284
282 a pattern is one of:
285 a pattern is one of:
283 'glob:<rooted glob>'
286 'glob:<rooted glob>'
284 're:<rooted regexp>'
287 're:<rooted regexp>'
285 'path:<rooted path>'
288 'path:<rooted path>'
286 'relglob:<relative glob>'
289 'relglob:<relative glob>'
287 'relpath:<relative path>'
290 'relpath:<relative path>'
288 'relre:<relative regexp>'
291 'relre:<relative regexp>'
289 '<rooted path or regexp>'
292 '<rooted path or regexp>'
290
293
291 returns:
294 returns:
292 a 3-tuple containing
295 a 3-tuple containing
293 - list of explicit non-pattern names passed in
296 - list of explicit non-pattern names passed in
294 - a bool match(filename) function
297 - a bool match(filename) function
295 - a bool indicating if any patterns were passed in
298 - a bool indicating if any patterns were passed in
296
299
297 todo:
300 todo:
298 make head regex a rooted bool
301 make head regex a rooted bool
299 """
302 """
300
303
301 def contains_glob(name):
304 def contains_glob(name):
302 for c in name:
305 for c in name:
303 if c in _globchars: return True
306 if c in _globchars: return True
304 return False
307 return False
305
308
306 def regex(kind, name, tail):
309 def regex(kind, name, tail):
307 '''convert a pattern into a regular expression'''
310 '''convert a pattern into a regular expression'''
308 if kind == 're':
311 if kind == 're':
309 return name
312 return name
310 elif kind == 'path':
313 elif kind == 'path':
311 return '^' + re.escape(name) + '(?:/|$)'
314 return '^' + re.escape(name) + '(?:/|$)'
312 elif kind == 'relglob':
315 elif kind == 'relglob':
313 return head + globre(name, '(?:|.*/)', tail)
316 return head + globre(name, '(?:|.*/)', tail)
314 elif kind == 'relpath':
317 elif kind == 'relpath':
315 return head + re.escape(name) + tail
318 return head + re.escape(name) + tail
316 elif kind == 'relre':
319 elif kind == 'relre':
317 if name.startswith('^'):
320 if name.startswith('^'):
318 return name
321 return name
319 return '.*' + name
322 return '.*' + name
320 return head + globre(name, '', tail)
323 return head + globre(name, '', tail)
321
324
322 def matchfn(pats, tail):
325 def matchfn(pats, tail):
323 """build a matching function from a set of patterns"""
326 """build a matching function from a set of patterns"""
324 if not pats:
327 if not pats:
325 return
328 return
326 matches = []
329 matches = []
327 for k, p in pats:
330 for k, p in pats:
328 try:
331 try:
329 pat = '(?:%s)' % regex(k, p, tail)
332 pat = '(?:%s)' % regex(k, p, tail)
330 matches.append(re.compile(pat).match)
333 matches.append(re.compile(pat).match)
331 except re.error:
334 except re.error:
332 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
335 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
333 else: raise Abort("invalid pattern (%s): %s" % (k, p))
336 else: raise Abort("invalid pattern (%s): %s" % (k, p))
334
337
335 def buildfn(text):
338 def buildfn(text):
336 for m in matches:
339 for m in matches:
337 r = m(text)
340 r = m(text)
338 if r:
341 if r:
339 return r
342 return r
340
343
341 return buildfn
344 return buildfn
342
345
343 def globprefix(pat):
346 def globprefix(pat):
344 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
347 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
345 root = []
348 root = []
346 for p in pat.split(os.sep):
349 for p in pat.split(os.sep):
347 if contains_glob(p): break
350 if contains_glob(p): break
348 root.append(p)
351 root.append(p)
349 return '/'.join(root)
352 return '/'.join(root)
350
353
351 pats = []
354 pats = []
352 files = []
355 files = []
353 roots = []
356 roots = []
354 for kind, name in [patkind(p, dflt_pat) for p in names]:
357 for kind, name in [patkind(p, dflt_pat) for p in names]:
355 if kind in ('glob', 'relpath'):
358 if kind in ('glob', 'relpath'):
356 name = canonpath(canonroot, cwd, name)
359 name = canonpath(canonroot, cwd, name)
357 if name == '':
360 if name == '':
358 kind, name = 'glob', '**'
361 kind, name = 'glob', '**'
359 if kind in ('glob', 'path', 're'):
362 if kind in ('glob', 'path', 're'):
360 pats.append((kind, name))
363 pats.append((kind, name))
361 if kind == 'glob':
364 if kind == 'glob':
362 root = globprefix(name)
365 root = globprefix(name)
363 if root: roots.append(root)
366 if root: roots.append(root)
364 elif kind == 'relpath':
367 elif kind == 'relpath':
365 files.append((kind, name))
368 files.append((kind, name))
366 roots.append(name)
369 roots.append(name)
367
370
368 patmatch = matchfn(pats, '$') or always
371 patmatch = matchfn(pats, '$') or always
369 filematch = matchfn(files, '(?:/|$)') or always
372 filematch = matchfn(files, '(?:/|$)') or always
370 incmatch = always
373 incmatch = always
371 if inc:
374 if inc:
372 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
375 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
373 incmatch = matchfn(inckinds, '(?:/|$)')
376 incmatch = matchfn(inckinds, '(?:/|$)')
374 excmatch = lambda fn: False
377 excmatch = lambda fn: False
375 if exc:
378 if exc:
376 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
379 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
377 excmatch = matchfn(exckinds, '(?:/|$)')
380 excmatch = matchfn(exckinds, '(?:/|$)')
378
381
379 return (roots,
382 return (roots,
380 lambda fn: (incmatch(fn) and not excmatch(fn) and
383 lambda fn: (incmatch(fn) and not excmatch(fn) and
381 (fn.endswith('/') or
384 (fn.endswith('/') or
382 (not pats and not files) or
385 (not pats and not files) or
383 (pats and patmatch(fn)) or
386 (pats and patmatch(fn)) or
384 (files and filematch(fn)))),
387 (files and filematch(fn)))),
385 (inc or exc or (pats and pats != [('glob', '**')])) and True)
388 (inc or exc or (pats and pats != [('glob', '**')])) and True)
386
389
387 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
390 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
388 '''enhanced shell command execution.
391 '''enhanced shell command execution.
389 run with environment maybe modified, maybe in different dir.
392 run with environment maybe modified, maybe in different dir.
390
393
391 if command fails and onerr is None, return status. if ui object,
394 if command fails and onerr is None, return status. if ui object,
392 print error message and return status, else raise onerr object as
395 print error message and return status, else raise onerr object as
393 exception.'''
396 exception.'''
394 def py2shell(val):
397 def py2shell(val):
395 'convert python object into string that is useful to shell'
398 'convert python object into string that is useful to shell'
396 if val in (None, False):
399 if val in (None, False):
397 return '0'
400 return '0'
398 if val == True:
401 if val == True:
399 return '1'
402 return '1'
400 return str(val)
403 return str(val)
401 oldenv = {}
404 oldenv = {}
402 for k in environ:
405 for k in environ:
403 oldenv[k] = os.environ.get(k)
406 oldenv[k] = os.environ.get(k)
404 if cwd is not None:
407 if cwd is not None:
405 oldcwd = os.getcwd()
408 oldcwd = os.getcwd()
406 try:
409 try:
407 for k, v in environ.iteritems():
410 for k, v in environ.iteritems():
408 os.environ[k] = py2shell(v)
411 os.environ[k] = py2shell(v)
409 if cwd is not None and oldcwd != cwd:
412 if cwd is not None and oldcwd != cwd:
410 os.chdir(cwd)
413 os.chdir(cwd)
411 rc = os.system(cmd)
414 rc = os.system(cmd)
412 if rc and onerr:
415 if rc and onerr:
413 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
416 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
414 explain_exit(rc)[0])
417 explain_exit(rc)[0])
415 if errprefix:
418 if errprefix:
416 errmsg = '%s: %s' % (errprefix, errmsg)
419 errmsg = '%s: %s' % (errprefix, errmsg)
417 try:
420 try:
418 onerr.warn(errmsg + '\n')
421 onerr.warn(errmsg + '\n')
419 except AttributeError:
422 except AttributeError:
420 raise onerr(errmsg)
423 raise onerr(errmsg)
421 return rc
424 return rc
422 finally:
425 finally:
423 for k, v in oldenv.iteritems():
426 for k, v in oldenv.iteritems():
424 if v is None:
427 if v is None:
425 del os.environ[k]
428 del os.environ[k]
426 else:
429 else:
427 os.environ[k] = v
430 os.environ[k] = v
428 if cwd is not None and oldcwd != cwd:
431 if cwd is not None and oldcwd != cwd:
429 os.chdir(oldcwd)
432 os.chdir(oldcwd)
430
433
431 def rename(src, dst):
434 def rename(src, dst):
432 """forcibly rename a file"""
435 """forcibly rename a file"""
433 try:
436 try:
434 os.rename(src, dst)
437 os.rename(src, dst)
435 except OSError, err:
438 except OSError, err:
436 # on windows, rename to existing file is not allowed, so we
439 # on windows, rename to existing file is not allowed, so we
437 # must delete destination first. but if file is open, unlink
440 # must delete destination first. but if file is open, unlink
438 # schedules it for delete but does not delete it. rename
441 # schedules it for delete but does not delete it. rename
439 # happens immediately even for open files, so we create
442 # happens immediately even for open files, so we create
440 # temporary file, delete it, rename destination to that name,
443 # temporary file, delete it, rename destination to that name,
441 # then delete that. then rename is safe to do.
444 # then delete that. then rename is safe to do.
442 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
445 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
443 os.close(fd)
446 os.close(fd)
444 os.unlink(temp)
447 os.unlink(temp)
445 os.rename(dst, temp)
448 os.rename(dst, temp)
446 os.unlink(temp)
449 os.unlink(temp)
447 os.rename(src, dst)
450 os.rename(src, dst)
448
451
449 def unlink(f):
452 def unlink(f):
450 """unlink and remove the directory if it is empty"""
453 """unlink and remove the directory if it is empty"""
451 os.unlink(f)
454 os.unlink(f)
452 # try removing directories that might now be empty
455 # try removing directories that might now be empty
453 try:
456 try:
454 os.removedirs(os.path.dirname(f))
457 os.removedirs(os.path.dirname(f))
455 except OSError:
458 except OSError:
456 pass
459 pass
457
460
458 def copyfiles(src, dst, hardlink=None):
461 def copyfiles(src, dst, hardlink=None):
459 """Copy a directory tree using hardlinks if possible"""
462 """Copy a directory tree using hardlinks if possible"""
460
463
461 if hardlink is None:
464 if hardlink is None:
462 hardlink = (os.stat(src).st_dev ==
465 hardlink = (os.stat(src).st_dev ==
463 os.stat(os.path.dirname(dst)).st_dev)
466 os.stat(os.path.dirname(dst)).st_dev)
464
467
465 if os.path.isdir(src):
468 if os.path.isdir(src):
466 os.mkdir(dst)
469 os.mkdir(dst)
467 for name in os.listdir(src):
470 for name in os.listdir(src):
468 srcname = os.path.join(src, name)
471 srcname = os.path.join(src, name)
469 dstname = os.path.join(dst, name)
472 dstname = os.path.join(dst, name)
470 copyfiles(srcname, dstname, hardlink)
473 copyfiles(srcname, dstname, hardlink)
471 else:
474 else:
472 if hardlink:
475 if hardlink:
473 try:
476 try:
474 os_link(src, dst)
477 os_link(src, dst)
475 except (IOError, OSError):
478 except (IOError, OSError):
476 hardlink = False
479 hardlink = False
477 shutil.copy(src, dst)
480 shutil.copy(src, dst)
478 else:
481 else:
479 shutil.copy(src, dst)
482 shutil.copy(src, dst)
480
483
481 def audit_path(path):
484 def audit_path(path):
482 """Abort if path contains dangerous components"""
485 """Abort if path contains dangerous components"""
483 parts = os.path.normcase(path).split(os.sep)
486 parts = os.path.normcase(path).split(os.sep)
484 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
487 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
485 or os.pardir in parts):
488 or os.pardir in parts):
486 raise Abort(_("path contains illegal component: %s\n") % path)
489 raise Abort(_("path contains illegal component: %s\n") % path)
487
490
488 def _makelock_file(info, pathname):
491 def _makelock_file(info, pathname):
489 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
492 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
490 os.write(ld, info)
493 os.write(ld, info)
491 os.close(ld)
494 os.close(ld)
492
495
493 def _readlock_file(pathname):
496 def _readlock_file(pathname):
494 return posixfile(pathname).read()
497 return posixfile(pathname).read()
495
498
496 def nlinks(pathname):
499 def nlinks(pathname):
497 """Return number of hardlinks for the given file."""
500 """Return number of hardlinks for the given file."""
498 return os.lstat(pathname).st_nlink
501 return os.lstat(pathname).st_nlink
499
502
500 if hasattr(os, 'link'):
503 if hasattr(os, 'link'):
501 os_link = os.link
504 os_link = os.link
502 else:
505 else:
503 def os_link(src, dst):
506 def os_link(src, dst):
504 raise OSError(0, _("Hardlinks not supported"))
507 raise OSError(0, _("Hardlinks not supported"))
505
508
506 def fstat(fp):
509 def fstat(fp):
507 '''stat file object that may not have fileno method.'''
510 '''stat file object that may not have fileno method.'''
508 try:
511 try:
509 return os.fstat(fp.fileno())
512 return os.fstat(fp.fileno())
510 except AttributeError:
513 except AttributeError:
511 return os.stat(fp.name)
514 return os.stat(fp.name)
512
515
513 posixfile = file
516 posixfile = file
514
517
515 def is_win_9x():
518 def is_win_9x():
516 '''return true if run on windows 95, 98 or me.'''
519 '''return true if run on windows 95, 98 or me.'''
517 try:
520 try:
518 return sys.getwindowsversion()[3] == 1
521 return sys.getwindowsversion()[3] == 1
519 except AttributeError:
522 except AttributeError:
520 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
523 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
521
524
522 def username(uid=None):
525 def username(uid=None):
523 """Return the name of the user with the given uid.
526 """Return the name of the user with the given uid.
524
527
525 If uid is None, return the name of the current user."""
528 If uid is None, return the name of the current user."""
526 try:
529 try:
527 import pwd
530 import pwd
528 if uid is None:
531 if uid is None:
529 uid = os.getuid()
532 uid = os.getuid()
530 try:
533 try:
531 return pwd.getpwuid(uid)[0]
534 return pwd.getpwuid(uid)[0]
532 except KeyError:
535 except KeyError:
533 return str(uid)
536 return str(uid)
534 except ImportError:
537 except ImportError:
535 return None
538 return None
536
539
537 def groupname(gid=None):
540 def groupname(gid=None):
538 """Return the name of the group with the given gid.
541 """Return the name of the group with the given gid.
539
542
540 If gid is None, return the name of the current group."""
543 If gid is None, return the name of the current group."""
541 try:
544 try:
542 import grp
545 import grp
543 if gid is None:
546 if gid is None:
544 gid = os.getgid()
547 gid = os.getgid()
545 try:
548 try:
546 return grp.getgrgid(gid)[0]
549 return grp.getgrgid(gid)[0]
547 except KeyError:
550 except KeyError:
548 return str(gid)
551 return str(gid)
549 except ImportError:
552 except ImportError:
550 return None
553 return None
551
554
552 # Platform specific variants
555 # Platform specific variants
553 if os.name == 'nt':
556 if os.name == 'nt':
554 demandload(globals(), "msvcrt")
557 demandload(globals(), "msvcrt")
555 nulldev = 'NUL:'
558 nulldev = 'NUL:'
556
559
557 class winstdout:
560 class winstdout:
558 '''stdout on windows misbehaves if sent through a pipe'''
561 '''stdout on windows misbehaves if sent through a pipe'''
559
562
560 def __init__(self, fp):
563 def __init__(self, fp):
561 self.fp = fp
564 self.fp = fp
562
565
563 def __getattr__(self, key):
566 def __getattr__(self, key):
564 return getattr(self.fp, key)
567 return getattr(self.fp, key)
565
568
566 def close(self):
569 def close(self):
567 try:
570 try:
568 self.fp.close()
571 self.fp.close()
569 except: pass
572 except: pass
570
573
571 def write(self, s):
574 def write(self, s):
572 try:
575 try:
573 return self.fp.write(s)
576 return self.fp.write(s)
574 except IOError, inst:
577 except IOError, inst:
575 if inst.errno != 0: raise
578 if inst.errno != 0: raise
576 self.close()
579 self.close()
577 raise IOError(errno.EPIPE, 'Broken pipe')
580 raise IOError(errno.EPIPE, 'Broken pipe')
578
581
579 sys.stdout = winstdout(sys.stdout)
582 sys.stdout = winstdout(sys.stdout)
580
583
581 def system_rcpath():
584 def system_rcpath():
582 try:
585 try:
583 return system_rcpath_win32()
586 return system_rcpath_win32()
584 except:
587 except:
585 return [r'c:\mercurial\mercurial.ini']
588 return [r'c:\mercurial\mercurial.ini']
586
589
587 def os_rcpath():
590 def os_rcpath():
588 '''return default os-specific hgrc search path'''
591 '''return default os-specific hgrc search path'''
589 path = system_rcpath()
592 path = system_rcpath()
590 path.append(user_rcpath())
593 path.append(user_rcpath())
591 userprofile = os.environ.get('USERPROFILE')
594 userprofile = os.environ.get('USERPROFILE')
592 if userprofile:
595 if userprofile:
593 path.append(os.path.join(userprofile, 'mercurial.ini'))
596 path.append(os.path.join(userprofile, 'mercurial.ini'))
594 return path
597 return path
595
598
596 def user_rcpath():
599 def user_rcpath():
597 '''return os-specific hgrc search path to the user dir'''
600 '''return os-specific hgrc search path to the user dir'''
598 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
601 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
599
602
600 def parse_patch_output(output_line):
603 def parse_patch_output(output_line):
601 """parses the output produced by patch and returns the file name"""
604 """parses the output produced by patch and returns the file name"""
602 pf = output_line[14:]
605 pf = output_line[14:]
603 if pf[0] == '`':
606 if pf[0] == '`':
604 pf = pf[1:-1] # Remove the quotes
607 pf = pf[1:-1] # Remove the quotes
605 return pf
608 return pf
606
609
607 def testpid(pid):
610 def testpid(pid):
608 '''return False if pid dead, True if running or not known'''
611 '''return False if pid dead, True if running or not known'''
609 return True
612 return True
610
613
611 def is_exec(f, last):
614 def is_exec(f, last):
612 return last
615 return last
613
616
614 def set_exec(f, mode):
617 def set_exec(f, mode):
615 pass
618 pass
616
619
617 def set_binary(fd):
620 def set_binary(fd):
618 msvcrt.setmode(fd.fileno(), os.O_BINARY)
621 msvcrt.setmode(fd.fileno(), os.O_BINARY)
619
622
620 def pconvert(path):
623 def pconvert(path):
621 return path.replace("\\", "/")
624 return path.replace("\\", "/")
622
625
623 def localpath(path):
626 def localpath(path):
624 return path.replace('/', '\\')
627 return path.replace('/', '\\')
625
628
626 def normpath(path):
629 def normpath(path):
627 return pconvert(os.path.normpath(path))
630 return pconvert(os.path.normpath(path))
628
631
629 makelock = _makelock_file
632 makelock = _makelock_file
630 readlock = _readlock_file
633 readlock = _readlock_file
631
634
632 def samestat(s1, s2):
635 def samestat(s1, s2):
633 return False
636 return False
634
637
635 def shellquote(s):
638 def shellquote(s):
636 return '"%s"' % s.replace('"', '\\"')
639 return '"%s"' % s.replace('"', '\\"')
637
640
638 def explain_exit(code):
641 def explain_exit(code):
639 return _("exited with status %d") % code, code
642 return _("exited with status %d") % code, code
640
643
641 try:
644 try:
642 # override functions with win32 versions if possible
645 # override functions with win32 versions if possible
643 from util_win32 import *
646 from util_win32 import *
644 if not is_win_9x():
647 if not is_win_9x():
645 posixfile = posixfile_nt
648 posixfile = posixfile_nt
646 except ImportError:
649 except ImportError:
647 pass
650 pass
648
651
649 else:
652 else:
650 nulldev = '/dev/null'
653 nulldev = '/dev/null'
651
654
652 def rcfiles(path):
655 def rcfiles(path):
653 rcs = [os.path.join(path, 'hgrc')]
656 rcs = [os.path.join(path, 'hgrc')]
654 rcdir = os.path.join(path, 'hgrc.d')
657 rcdir = os.path.join(path, 'hgrc.d')
655 try:
658 try:
656 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
659 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
657 if f.endswith(".rc")])
660 if f.endswith(".rc")])
658 except OSError:
661 except OSError:
659 pass
662 pass
660 return rcs
663 return rcs
661
664
662 def os_rcpath():
665 def os_rcpath():
663 '''return default os-specific hgrc search path'''
666 '''return default os-specific hgrc search path'''
664 path = []
667 path = []
665 # old mod_python does not set sys.argv
668 # old mod_python does not set sys.argv
666 if len(getattr(sys, 'argv', [])) > 0:
669 if len(getattr(sys, 'argv', [])) > 0:
667 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
670 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
668 '/../etc/mercurial'))
671 '/../etc/mercurial'))
669 path.extend(rcfiles('/etc/mercurial'))
672 path.extend(rcfiles('/etc/mercurial'))
670 path.append(os.path.expanduser('~/.hgrc'))
673 path.append(os.path.expanduser('~/.hgrc'))
671 path = [os.path.normpath(f) for f in path]
674 path = [os.path.normpath(f) for f in path]
672 return path
675 return path
673
676
674 def parse_patch_output(output_line):
677 def parse_patch_output(output_line):
675 """parses the output produced by patch and returns the file name"""
678 """parses the output produced by patch and returns the file name"""
676 pf = output_line[14:]
679 pf = output_line[14:]
677 if pf.startswith("'") and pf.endswith("'") and " " in pf:
680 if pf.startswith("'") and pf.endswith("'") and " " in pf:
678 pf = pf[1:-1] # Remove the quotes
681 pf = pf[1:-1] # Remove the quotes
679 return pf
682 return pf
680
683
681 def is_exec(f, last):
684 def is_exec(f, last):
682 """check whether a file is executable"""
685 """check whether a file is executable"""
683 return (os.lstat(f).st_mode & 0100 != 0)
686 return (os.lstat(f).st_mode & 0100 != 0)
684
687
685 def set_exec(f, mode):
688 def set_exec(f, mode):
686 s = os.lstat(f).st_mode
689 s = os.lstat(f).st_mode
687 if (s & 0100 != 0) == mode:
690 if (s & 0100 != 0) == mode:
688 return
691 return
689 if mode:
692 if mode:
690 # Turn on +x for every +r bit when making a file executable
693 # Turn on +x for every +r bit when making a file executable
691 # and obey umask.
694 # and obey umask.
692 umask = os.umask(0)
695 umask = os.umask(0)
693 os.umask(umask)
696 os.umask(umask)
694 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
697 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
695 else:
698 else:
696 os.chmod(f, s & 0666)
699 os.chmod(f, s & 0666)
697
700
698 def set_binary(fd):
701 def set_binary(fd):
699 pass
702 pass
700
703
701 def pconvert(path):
704 def pconvert(path):
702 return path
705 return path
703
706
704 def localpath(path):
707 def localpath(path):
705 return path
708 return path
706
709
707 normpath = os.path.normpath
710 normpath = os.path.normpath
708 samestat = os.path.samestat
711 samestat = os.path.samestat
709
712
710 def makelock(info, pathname):
713 def makelock(info, pathname):
711 try:
714 try:
712 os.symlink(info, pathname)
715 os.symlink(info, pathname)
713 except OSError, why:
716 except OSError, why:
714 if why.errno == errno.EEXIST:
717 if why.errno == errno.EEXIST:
715 raise
718 raise
716 else:
719 else:
717 _makelock_file(info, pathname)
720 _makelock_file(info, pathname)
718
721
719 def readlock(pathname):
722 def readlock(pathname):
720 try:
723 try:
721 return os.readlink(pathname)
724 return os.readlink(pathname)
722 except OSError, why:
725 except OSError, why:
723 if why.errno == errno.EINVAL:
726 if why.errno == errno.EINVAL:
724 return _readlock_file(pathname)
727 return _readlock_file(pathname)
725 else:
728 else:
726 raise
729 raise
727
730
728 def shellquote(s):
731 def shellquote(s):
729 return "'%s'" % s.replace("'", "'\\''")
732 return "'%s'" % s.replace("'", "'\\''")
730
733
731 def testpid(pid):
734 def testpid(pid):
732 '''return False if pid dead, True if running or not sure'''
735 '''return False if pid dead, True if running or not sure'''
733 try:
736 try:
734 os.kill(pid, 0)
737 os.kill(pid, 0)
735 return True
738 return True
736 except OSError, inst:
739 except OSError, inst:
737 return inst.errno != errno.ESRCH
740 return inst.errno != errno.ESRCH
738
741
739 def explain_exit(code):
742 def explain_exit(code):
740 """return a 2-tuple (desc, code) describing a process's status"""
743 """return a 2-tuple (desc, code) describing a process's status"""
741 if os.WIFEXITED(code):
744 if os.WIFEXITED(code):
742 val = os.WEXITSTATUS(code)
745 val = os.WEXITSTATUS(code)
743 return _("exited with status %d") % val, val
746 return _("exited with status %d") % val, val
744 elif os.WIFSIGNALED(code):
747 elif os.WIFSIGNALED(code):
745 val = os.WTERMSIG(code)
748 val = os.WTERMSIG(code)
746 return _("killed by signal %d") % val, val
749 return _("killed by signal %d") % val, val
747 elif os.WIFSTOPPED(code):
750 elif os.WIFSTOPPED(code):
748 val = os.WSTOPSIG(code)
751 val = os.WSTOPSIG(code)
749 return _("stopped by signal %d") % val, val
752 return _("stopped by signal %d") % val, val
750 raise ValueError(_("invalid exit code"))
753 raise ValueError(_("invalid exit code"))
751
754
752 def opener(base, audit=True):
755 def opener(base, audit=True):
753 """
756 """
754 return a function that opens files relative to base
757 return a function that opens files relative to base
755
758
756 this function is used to hide the details of COW semantics and
759 this function is used to hide the details of COW semantics and
757 remote file access from higher level code.
760 remote file access from higher level code.
758 """
761 """
759 p = base
762 p = base
760 audit_p = audit
763 audit_p = audit
761
764
762 def mktempcopy(name):
765 def mktempcopy(name):
763 d, fn = os.path.split(name)
766 d, fn = os.path.split(name)
764 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
767 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
765 os.close(fd)
768 os.close(fd)
766 ofp = posixfile(temp, "wb")
769 ofp = posixfile(temp, "wb")
767 try:
770 try:
768 try:
771 try:
769 ifp = posixfile(name, "rb")
772 ifp = posixfile(name, "rb")
770 except IOError, inst:
773 except IOError, inst:
771 if not getattr(inst, 'filename', None):
774 if not getattr(inst, 'filename', None):
772 inst.filename = name
775 inst.filename = name
773 raise
776 raise
774 for chunk in filechunkiter(ifp):
777 for chunk in filechunkiter(ifp):
775 ofp.write(chunk)
778 ofp.write(chunk)
776 ifp.close()
779 ifp.close()
777 ofp.close()
780 ofp.close()
778 except:
781 except:
779 try: os.unlink(temp)
782 try: os.unlink(temp)
780 except: pass
783 except: pass
781 raise
784 raise
782 st = os.lstat(name)
785 st = os.lstat(name)
783 os.chmod(temp, st.st_mode)
786 os.chmod(temp, st.st_mode)
784 return temp
787 return temp
785
788
786 class atomictempfile(posixfile):
789 class atomictempfile(posixfile):
787 """the file will only be copied when rename is called"""
790 """the file will only be copied when rename is called"""
788 def __init__(self, name, mode):
791 def __init__(self, name, mode):
789 self.__name = name
792 self.__name = name
790 self.temp = mktempcopy(name)
793 self.temp = mktempcopy(name)
791 posixfile.__init__(self, self.temp, mode)
794 posixfile.__init__(self, self.temp, mode)
792 def rename(self):
795 def rename(self):
793 if not self.closed:
796 if not self.closed:
794 posixfile.close(self)
797 posixfile.close(self)
795 rename(self.temp, localpath(self.__name))
798 rename(self.temp, localpath(self.__name))
796 def __del__(self):
799 def __del__(self):
797 if not self.closed:
800 if not self.closed:
798 try:
801 try:
799 os.unlink(self.temp)
802 os.unlink(self.temp)
800 except: pass
803 except: pass
801 posixfile.close(self)
804 posixfile.close(self)
802
805
803 class atomicfile(atomictempfile):
806 class atomicfile(atomictempfile):
804 """the file will only be copied on close"""
807 """the file will only be copied on close"""
805 def __init__(self, name, mode):
808 def __init__(self, name, mode):
806 atomictempfile.__init__(self, name, mode)
809 atomictempfile.__init__(self, name, mode)
807 def close(self):
810 def close(self):
808 self.rename()
811 self.rename()
809 def __del__(self):
812 def __del__(self):
810 self.rename()
813 self.rename()
811
814
812 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
815 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
813 if audit_p:
816 if audit_p:
814 audit_path(path)
817 audit_path(path)
815 f = os.path.join(p, path)
818 f = os.path.join(p, path)
816
819
817 if not text:
820 if not text:
818 mode += "b" # for that other OS
821 mode += "b" # for that other OS
819
822
820 if mode[0] != "r":
823 if mode[0] != "r":
821 try:
824 try:
822 nlink = nlinks(f)
825 nlink = nlinks(f)
823 except OSError:
826 except OSError:
824 d = os.path.dirname(f)
827 d = os.path.dirname(f)
825 if not os.path.isdir(d):
828 if not os.path.isdir(d):
826 os.makedirs(d)
829 os.makedirs(d)
827 else:
830 else:
828 if atomic:
831 if atomic:
829 return atomicfile(f, mode)
832 return atomicfile(f, mode)
830 elif atomictemp:
833 elif atomictemp:
831 return atomictempfile(f, mode)
834 return atomictempfile(f, mode)
832 if nlink > 1:
835 if nlink > 1:
833 rename(mktempcopy(f), f)
836 rename(mktempcopy(f), f)
834 return posixfile(f, mode)
837 return posixfile(f, mode)
835
838
836 return o
839 return o
837
840
838 class chunkbuffer(object):
841 class chunkbuffer(object):
839 """Allow arbitrary sized chunks of data to be efficiently read from an
842 """Allow arbitrary sized chunks of data to be efficiently read from an
840 iterator over chunks of arbitrary size."""
843 iterator over chunks of arbitrary size."""
841
844
842 def __init__(self, in_iter, targetsize = 2**16):
845 def __init__(self, in_iter, targetsize = 2**16):
843 """in_iter is the iterator that's iterating over the input chunks.
846 """in_iter is the iterator that's iterating over the input chunks.
844 targetsize is how big a buffer to try to maintain."""
847 targetsize is how big a buffer to try to maintain."""
845 self.in_iter = iter(in_iter)
848 self.in_iter = iter(in_iter)
846 self.buf = ''
849 self.buf = ''
847 self.targetsize = int(targetsize)
850 self.targetsize = int(targetsize)
848 if self.targetsize <= 0:
851 if self.targetsize <= 0:
849 raise ValueError(_("targetsize must be greater than 0, was %d") %
852 raise ValueError(_("targetsize must be greater than 0, was %d") %
850 targetsize)
853 targetsize)
851 self.iterempty = False
854 self.iterempty = False
852
855
853 def fillbuf(self):
856 def fillbuf(self):
854 """Ignore target size; read every chunk from iterator until empty."""
857 """Ignore target size; read every chunk from iterator until empty."""
855 if not self.iterempty:
858 if not self.iterempty:
856 collector = cStringIO.StringIO()
859 collector = cStringIO.StringIO()
857 collector.write(self.buf)
860 collector.write(self.buf)
858 for ch in self.in_iter:
861 for ch in self.in_iter:
859 collector.write(ch)
862 collector.write(ch)
860 self.buf = collector.getvalue()
863 self.buf = collector.getvalue()
861 self.iterempty = True
864 self.iterempty = True
862
865
863 def read(self, l):
866 def read(self, l):
864 """Read L bytes of data from the iterator of chunks of data.
867 """Read L bytes of data from the iterator of chunks of data.
865 Returns less than L bytes if the iterator runs dry."""
868 Returns less than L bytes if the iterator runs dry."""
866 if l > len(self.buf) and not self.iterempty:
869 if l > len(self.buf) and not self.iterempty:
867 # Clamp to a multiple of self.targetsize
870 # Clamp to a multiple of self.targetsize
868 targetsize = self.targetsize * ((l // self.targetsize) + 1)
871 targetsize = self.targetsize * ((l // self.targetsize) + 1)
869 collector = cStringIO.StringIO()
872 collector = cStringIO.StringIO()
870 collector.write(self.buf)
873 collector.write(self.buf)
871 collected = len(self.buf)
874 collected = len(self.buf)
872 for chunk in self.in_iter:
875 for chunk in self.in_iter:
873 collector.write(chunk)
876 collector.write(chunk)
874 collected += len(chunk)
877 collected += len(chunk)
875 if collected >= targetsize:
878 if collected >= targetsize:
876 break
879 break
877 if collected < targetsize:
880 if collected < targetsize:
878 self.iterempty = True
881 self.iterempty = True
879 self.buf = collector.getvalue()
882 self.buf = collector.getvalue()
880 s, self.buf = self.buf[:l], buffer(self.buf, l)
883 s, self.buf = self.buf[:l], buffer(self.buf, l)
881 return s
884 return s
882
885
883 def filechunkiter(f, size=65536, limit=None):
886 def filechunkiter(f, size=65536, limit=None):
884 """Create a generator that produces the data in the file size
887 """Create a generator that produces the data in the file size
885 (default 65536) bytes at a time, up to optional limit (default is
888 (default 65536) bytes at a time, up to optional limit (default is
886 to read all data). Chunks may be less than size bytes if the
889 to read all data). Chunks may be less than size bytes if the
887 chunk is the last chunk in the file, or the file is a socket or
890 chunk is the last chunk in the file, or the file is a socket or
888 some other type of file that sometimes reads less data than is
891 some other type of file that sometimes reads less data than is
889 requested."""
892 requested."""
890 assert size >= 0
893 assert size >= 0
891 assert limit is None or limit >= 0
894 assert limit is None or limit >= 0
892 while True:
895 while True:
893 if limit is None: nbytes = size
896 if limit is None: nbytes = size
894 else: nbytes = min(limit, size)
897 else: nbytes = min(limit, size)
895 s = nbytes and f.read(nbytes)
898 s = nbytes and f.read(nbytes)
896 if not s: break
899 if not s: break
897 if limit: limit -= len(s)
900 if limit: limit -= len(s)
898 yield s
901 yield s
899
902
900 def makedate():
903 def makedate():
901 lt = time.localtime()
904 lt = time.localtime()
902 if lt[8] == 1 and time.daylight:
905 if lt[8] == 1 and time.daylight:
903 tz = time.altzone
906 tz = time.altzone
904 else:
907 else:
905 tz = time.timezone
908 tz = time.timezone
906 return time.mktime(lt), tz
909 return time.mktime(lt), tz
907
910
908 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
911 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
909 """represent a (unixtime, offset) tuple as a localized time.
912 """represent a (unixtime, offset) tuple as a localized time.
910 unixtime is seconds since the epoch, and offset is the time zone's
913 unixtime is seconds since the epoch, and offset is the time zone's
911 number of seconds away from UTC. if timezone is false, do not
914 number of seconds away from UTC. if timezone is false, do not
912 append time zone to string."""
915 append time zone to string."""
913 t, tz = date or makedate()
916 t, tz = date or makedate()
914 s = time.strftime(format, time.gmtime(float(t) - tz))
917 s = time.strftime(format, time.gmtime(float(t) - tz))
915 if timezone:
918 if timezone:
916 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
919 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
917 return s
920 return s
918
921
919 def strdate(string, format='%a %b %d %H:%M:%S %Y'):
922 def strdate(string, format='%a %b %d %H:%M:%S %Y'):
920 """parse a localized time string and return a (unixtime, offset) tuple.
923 """parse a localized time string and return a (unixtime, offset) tuple.
921 if the string cannot be parsed, ValueError is raised."""
924 if the string cannot be parsed, ValueError is raised."""
922 def hastimezone(string):
925 def hastimezone(string):
923 return (string[-4:].isdigit() and
926 return (string[-4:].isdigit() and
924 (string[-5] == '+' or string[-5] == '-') and
927 (string[-5] == '+' or string[-5] == '-') and
925 string[-6].isspace())
928 string[-6].isspace())
926
929
927 # NOTE: unixtime = localunixtime + offset
930 # NOTE: unixtime = localunixtime + offset
928 if hastimezone(string):
931 if hastimezone(string):
929 date, tz = string[:-6], string[-5:]
932 date, tz = string[:-6], string[-5:]
930 tz = int(tz)
933 tz = int(tz)
931 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
934 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
932 else:
935 else:
933 date, offset = string, None
936 date, offset = string, None
934 timetuple = time.strptime(date, format)
937 timetuple = time.strptime(date, format)
935 localunixtime = int(calendar.timegm(timetuple))
938 localunixtime = int(calendar.timegm(timetuple))
936 if offset is None:
939 if offset is None:
937 # local timezone
940 # local timezone
938 unixtime = int(time.mktime(timetuple))
941 unixtime = int(time.mktime(timetuple))
939 offset = unixtime - localunixtime
942 offset = unixtime - localunixtime
940 else:
943 else:
941 unixtime = localunixtime + offset
944 unixtime = localunixtime + offset
942 return unixtime, offset
945 return unixtime, offset
943
946
944 def parsedate(string, formats=None):
947 def parsedate(string, formats=None):
945 """parse a localized time string and return a (unixtime, offset) tuple.
948 """parse a localized time string and return a (unixtime, offset) tuple.
946 The date may be a "unixtime offset" string or in one of the specified
949 The date may be a "unixtime offset" string or in one of the specified
947 formats."""
950 formats."""
948 if not formats:
951 if not formats:
949 formats = defaultdateformats
952 formats = defaultdateformats
950 try:
953 try:
951 when, offset = map(int, string.split(' '))
954 when, offset = map(int, string.split(' '))
952 except ValueError:
955 except ValueError:
953 for format in formats:
956 for format in formats:
954 try:
957 try:
955 when, offset = strdate(string, format)
958 when, offset = strdate(string, format)
956 except ValueError:
959 except ValueError:
957 pass
960 pass
958 else:
961 else:
959 break
962 break
960 else:
963 else:
961 raise ValueError(_('invalid date: %r '
964 raise ValueError(_('invalid date: %r '
962 'see hg(1) manual page for details')
965 'see hg(1) manual page for details')
963 % string)
966 % string)
964 # validate explicit (probably user-specified) date and
967 # validate explicit (probably user-specified) date and
965 # time zone offset. values must fit in signed 32 bits for
968 # time zone offset. values must fit in signed 32 bits for
966 # current 32-bit linux runtimes. timezones go from UTC-12
969 # current 32-bit linux runtimes. timezones go from UTC-12
967 # to UTC+14
970 # to UTC+14
968 if abs(when) > 0x7fffffff:
971 if abs(when) > 0x7fffffff:
969 raise ValueError(_('date exceeds 32 bits: %d') % when)
972 raise ValueError(_('date exceeds 32 bits: %d') % when)
970 if offset < -50400 or offset > 43200:
973 if offset < -50400 or offset > 43200:
971 raise ValueError(_('impossible time zone offset: %d') % offset)
974 raise ValueError(_('impossible time zone offset: %d') % offset)
972 return when, offset
975 return when, offset
973
976
974 def shortuser(user):
977 def shortuser(user):
975 """Return a short representation of a user name or email address."""
978 """Return a short representation of a user name or email address."""
976 f = user.find('@')
979 f = user.find('@')
977 if f >= 0:
980 if f >= 0:
978 user = user[:f]
981 user = user[:f]
979 f = user.find('<')
982 f = user.find('<')
980 if f >= 0:
983 if f >= 0:
981 user = user[f+1:]
984 user = user[f+1:]
982 f = user.find(' ')
985 f = user.find(' ')
983 if f >= 0:
986 if f >= 0:
984 user = user[:f]
987 user = user[:f]
985 return user
988 return user
986
989
987 def walkrepos(path):
990 def walkrepos(path):
988 '''yield every hg repository under path, recursively.'''
991 '''yield every hg repository under path, recursively.'''
989 def errhandler(err):
992 def errhandler(err):
990 if err.filename == path:
993 if err.filename == path:
991 raise err
994 raise err
992
995
993 for root, dirs, files in os.walk(path, onerror=errhandler):
996 for root, dirs, files in os.walk(path, onerror=errhandler):
994 for d in dirs:
997 for d in dirs:
995 if d == '.hg':
998 if d == '.hg':
996 yield root
999 yield root
997 dirs[:] = []
1000 dirs[:] = []
998 break
1001 break
999
1002
1000 _rcpath = None
1003 _rcpath = None
1001
1004
1002 def rcpath():
1005 def rcpath():
1003 '''return hgrc search path. if env var HGRCPATH is set, use it.
1006 '''return hgrc search path. if env var HGRCPATH is set, use it.
1004 for each item in path, if directory, use files ending in .rc,
1007 for each item in path, if directory, use files ending in .rc,
1005 else use item.
1008 else use item.
1006 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1009 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1007 if no HGRCPATH, use default os-specific path.'''
1010 if no HGRCPATH, use default os-specific path.'''
1008 global _rcpath
1011 global _rcpath
1009 if _rcpath is None:
1012 if _rcpath is None:
1010 if 'HGRCPATH' in os.environ:
1013 if 'HGRCPATH' in os.environ:
1011 _rcpath = []
1014 _rcpath = []
1012 for p in os.environ['HGRCPATH'].split(os.pathsep):
1015 for p in os.environ['HGRCPATH'].split(os.pathsep):
1013 if not p: continue
1016 if not p: continue
1014 if os.path.isdir(p):
1017 if os.path.isdir(p):
1015 for f in os.listdir(p):
1018 for f in os.listdir(p):
1016 if f.endswith('.rc'):
1019 if f.endswith('.rc'):
1017 _rcpath.append(os.path.join(p, f))
1020 _rcpath.append(os.path.join(p, f))
1018 else:
1021 else:
1019 _rcpath.append(p)
1022 _rcpath.append(p)
1020 else:
1023 else:
1021 _rcpath = os_rcpath()
1024 _rcpath = os_rcpath()
1022 return _rcpath
1025 return _rcpath
1023
1026
1024 def bytecount(nbytes):
1027 def bytecount(nbytes):
1025 '''return byte count formatted as readable string, with units'''
1028 '''return byte count formatted as readable string, with units'''
1026
1029
1027 units = (
1030 units = (
1028 (100, 1<<30, _('%.0f GB')),
1031 (100, 1<<30, _('%.0f GB')),
1029 (10, 1<<30, _('%.1f GB')),
1032 (10, 1<<30, _('%.1f GB')),
1030 (1, 1<<30, _('%.2f GB')),
1033 (1, 1<<30, _('%.2f GB')),
1031 (100, 1<<20, _('%.0f MB')),
1034 (100, 1<<20, _('%.0f MB')),
1032 (10, 1<<20, _('%.1f MB')),
1035 (10, 1<<20, _('%.1f MB')),
1033 (1, 1<<20, _('%.2f MB')),
1036 (1, 1<<20, _('%.2f MB')),
1034 (100, 1<<10, _('%.0f KB')),
1037 (100, 1<<10, _('%.0f KB')),
1035 (10, 1<<10, _('%.1f KB')),
1038 (10, 1<<10, _('%.1f KB')),
1036 (1, 1<<10, _('%.2f KB')),
1039 (1, 1<<10, _('%.2f KB')),
1037 (1, 1, _('%.0f bytes')),
1040 (1, 1, _('%.0f bytes')),
1038 )
1041 )
1039
1042
1040 for multiplier, divisor, format in units:
1043 for multiplier, divisor, format in units:
1041 if nbytes >= divisor * multiplier:
1044 if nbytes >= divisor * multiplier:
1042 return format % (nbytes / float(divisor))
1045 return format % (nbytes / float(divisor))
1043 return units[-1][2] % nbytes
1046 return units[-1][2] % nbytes
1044
1047
1045 def drop_scheme(scheme, path):
1048 def drop_scheme(scheme, path):
1046 sc = scheme + ':'
1049 sc = scheme + ':'
1047 if path.startswith(sc):
1050 if path.startswith(sc):
1048 path = path[len(sc):]
1051 path = path[len(sc):]
1049 if path.startswith('//'):
1052 if path.startswith('//'):
1050 path = path[2:]
1053 path = path[2:]
1051 return path
1054 return path
General Comments 0
You need to be logged in to leave comments. Login now