##// END OF EJS Templates
If local repository is missing, make error message clearer....
Thomas Arendsen Hein -
r3079:4c9fcb5e default
parent child Browse files
Show More
@@ -1,3507 +1,3509 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup")
15 demandload(globals(), "archival cStringIO changegroup")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted = repo.status()[:4]
24 modified, added, removed, deleted = repo.status()[:4]
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def relpath(repo, args):
28 def relpath(repo, args):
29 cwd = repo.getcwd()
29 cwd = repo.getcwd()
30 if cwd:
30 if cwd:
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 return args
32 return args
33
33
34 def logmessage(opts):
34 def logmessage(opts):
35 """ get the log message according to -m and -l option """
35 """ get the log message according to -m and -l option """
36 message = opts['message']
36 message = opts['message']
37 logfile = opts['logfile']
37 logfile = opts['logfile']
38
38
39 if message and logfile:
39 if message and logfile:
40 raise util.Abort(_('options --message and --logfile are mutually '
40 raise util.Abort(_('options --message and --logfile are mutually '
41 'exclusive'))
41 'exclusive'))
42 if not message and logfile:
42 if not message and logfile:
43 try:
43 try:
44 if logfile == '-':
44 if logfile == '-':
45 message = sys.stdin.read()
45 message = sys.stdin.read()
46 else:
46 else:
47 message = open(logfile).read()
47 message = open(logfile).read()
48 except IOError, inst:
48 except IOError, inst:
49 raise util.Abort(_("can't read commit message '%s': %s") %
49 raise util.Abort(_("can't read commit message '%s': %s") %
50 (logfile, inst.strerror))
50 (logfile, inst.strerror))
51 return message
51 return message
52
52
53 def walkchangerevs(ui, repo, pats, opts):
53 def walkchangerevs(ui, repo, pats, opts):
54 '''Iterate over files and the revs they changed in.
54 '''Iterate over files and the revs they changed in.
55
55
56 Callers most commonly need to iterate backwards over the history
56 Callers most commonly need to iterate backwards over the history
57 it is interested in. Doing so has awful (quadratic-looking)
57 it is interested in. Doing so has awful (quadratic-looking)
58 performance, so we use iterators in a "windowed" way.
58 performance, so we use iterators in a "windowed" way.
59
59
60 We walk a window of revisions in the desired order. Within the
60 We walk a window of revisions in the desired order. Within the
61 window, we first walk forwards to gather data, then in the desired
61 window, we first walk forwards to gather data, then in the desired
62 order (usually backwards) to display it.
62 order (usually backwards) to display it.
63
63
64 This function returns an (iterator, getchange, matchfn) tuple. The
64 This function returns an (iterator, getchange, matchfn) tuple. The
65 getchange function returns the changelog entry for a numeric
65 getchange function returns the changelog entry for a numeric
66 revision. The iterator yields 3-tuples. They will be of one of
66 revision. The iterator yields 3-tuples. They will be of one of
67 the following forms:
67 the following forms:
68
68
69 "window", incrementing, lastrev: stepping through a window,
69 "window", incrementing, lastrev: stepping through a window,
70 positive if walking forwards through revs, last rev in the
70 positive if walking forwards through revs, last rev in the
71 sequence iterated over - use to reset state for the current window
71 sequence iterated over - use to reset state for the current window
72
72
73 "add", rev, fns: out-of-order traversal of the given file names
73 "add", rev, fns: out-of-order traversal of the given file names
74 fns, which changed during revision rev - use to gather data for
74 fns, which changed during revision rev - use to gather data for
75 possible display
75 possible display
76
76
77 "iter", rev, None: in-order traversal of the revs earlier iterated
77 "iter", rev, None: in-order traversal of the revs earlier iterated
78 over with "add" - use to display data'''
78 over with "add" - use to display data'''
79
79
80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
81 if start < end:
81 if start < end:
82 while start < end:
82 while start < end:
83 yield start, min(windowsize, end-start)
83 yield start, min(windowsize, end-start)
84 start += windowsize
84 start += windowsize
85 if windowsize < sizelimit:
85 if windowsize < sizelimit:
86 windowsize *= 2
86 windowsize *= 2
87 else:
87 else:
88 while start > end:
88 while start > end:
89 yield start, min(windowsize, start-end-1)
89 yield start, min(windowsize, start-end-1)
90 start -= windowsize
90 start -= windowsize
91 if windowsize < sizelimit:
91 if windowsize < sizelimit:
92 windowsize *= 2
92 windowsize *= 2
93
93
94
94
95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
96 follow = opts.get('follow') or opts.get('follow_first')
96 follow = opts.get('follow') or opts.get('follow_first')
97
97
98 if repo.changelog.count() == 0:
98 if repo.changelog.count() == 0:
99 return [], False, matchfn
99 return [], False, matchfn
100
100
101 if follow:
101 if follow:
102 p = repo.dirstate.parents()[0]
102 p = repo.dirstate.parents()[0]
103 if p == nullid:
103 if p == nullid:
104 ui.warn(_('No working directory revision; defaulting to tip\n'))
104 ui.warn(_('No working directory revision; defaulting to tip\n'))
105 start = 'tip'
105 start = 'tip'
106 else:
106 else:
107 start = repo.changelog.rev(p)
107 start = repo.changelog.rev(p)
108 defrange = '%s:0' % start
108 defrange = '%s:0' % start
109 else:
109 else:
110 defrange = 'tip:0'
110 defrange = 'tip:0'
111 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
111 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
112 wanted = {}
112 wanted = {}
113 slowpath = anypats
113 slowpath = anypats
114 fncache = {}
114 fncache = {}
115
115
116 chcache = {}
116 chcache = {}
117 def getchange(rev):
117 def getchange(rev):
118 ch = chcache.get(rev)
118 ch = chcache.get(rev)
119 if ch is None:
119 if ch is None:
120 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
120 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
121 return ch
121 return ch
122
122
123 if not slowpath and not files:
123 if not slowpath and not files:
124 # No files, no patterns. Display all revs.
124 # No files, no patterns. Display all revs.
125 wanted = dict(zip(revs, revs))
125 wanted = dict(zip(revs, revs))
126 copies = []
126 copies = []
127 if not slowpath:
127 if not slowpath:
128 # Only files, no patterns. Check the history of each file.
128 # Only files, no patterns. Check the history of each file.
129 def filerevgen(filelog, node):
129 def filerevgen(filelog, node):
130 cl_count = repo.changelog.count()
130 cl_count = repo.changelog.count()
131 if node is None:
131 if node is None:
132 last = filelog.count() - 1
132 last = filelog.count() - 1
133 else:
133 else:
134 last = filelog.rev(node)
134 last = filelog.rev(node)
135 for i, window in increasing_windows(last, -1):
135 for i, window in increasing_windows(last, -1):
136 revs = []
136 revs = []
137 for j in xrange(i - window, i + 1):
137 for j in xrange(i - window, i + 1):
138 n = filelog.node(j)
138 n = filelog.node(j)
139 revs.append((filelog.linkrev(n),
139 revs.append((filelog.linkrev(n),
140 follow and filelog.renamed(n)))
140 follow and filelog.renamed(n)))
141 revs.reverse()
141 revs.reverse()
142 for rev in revs:
142 for rev in revs:
143 # only yield rev for which we have the changelog, it can
143 # only yield rev for which we have the changelog, it can
144 # happen while doing "hg log" during a pull or commit
144 # happen while doing "hg log" during a pull or commit
145 if rev[0] < cl_count:
145 if rev[0] < cl_count:
146 yield rev
146 yield rev
147 def iterfiles():
147 def iterfiles():
148 for filename in files:
148 for filename in files:
149 yield filename, None
149 yield filename, None
150 for filename_node in copies:
150 for filename_node in copies:
151 yield filename_node
151 yield filename_node
152 minrev, maxrev = min(revs), max(revs)
152 minrev, maxrev = min(revs), max(revs)
153 for file_, node in iterfiles():
153 for file_, node in iterfiles():
154 filelog = repo.file(file_)
154 filelog = repo.file(file_)
155 # A zero count may be a directory or deleted file, so
155 # A zero count may be a directory or deleted file, so
156 # try to find matching entries on the slow path.
156 # try to find matching entries on the slow path.
157 if filelog.count() == 0:
157 if filelog.count() == 0:
158 slowpath = True
158 slowpath = True
159 break
159 break
160 for rev, copied in filerevgen(filelog, node):
160 for rev, copied in filerevgen(filelog, node):
161 if rev <= maxrev:
161 if rev <= maxrev:
162 if rev < minrev:
162 if rev < minrev:
163 break
163 break
164 fncache.setdefault(rev, [])
164 fncache.setdefault(rev, [])
165 fncache[rev].append(file_)
165 fncache[rev].append(file_)
166 wanted[rev] = 1
166 wanted[rev] = 1
167 if follow and copied:
167 if follow and copied:
168 copies.append(copied)
168 copies.append(copied)
169 if slowpath:
169 if slowpath:
170 if follow:
170 if follow:
171 raise util.Abort(_('can only follow copies/renames for explicit '
171 raise util.Abort(_('can only follow copies/renames for explicit '
172 'file names'))
172 'file names'))
173
173
174 # The slow path checks files modified in every changeset.
174 # The slow path checks files modified in every changeset.
175 def changerevgen():
175 def changerevgen():
176 for i, window in increasing_windows(repo.changelog.count()-1, -1):
176 for i, window in increasing_windows(repo.changelog.count()-1, -1):
177 for j in xrange(i - window, i + 1):
177 for j in xrange(i - window, i + 1):
178 yield j, getchange(j)[3]
178 yield j, getchange(j)[3]
179
179
180 for rev, changefiles in changerevgen():
180 for rev, changefiles in changerevgen():
181 matches = filter(matchfn, changefiles)
181 matches = filter(matchfn, changefiles)
182 if matches:
182 if matches:
183 fncache[rev] = matches
183 fncache[rev] = matches
184 wanted[rev] = 1
184 wanted[rev] = 1
185
185
186 class followfilter:
186 class followfilter:
187 def __init__(self, onlyfirst=False):
187 def __init__(self, onlyfirst=False):
188 self.startrev = -1
188 self.startrev = -1
189 self.roots = []
189 self.roots = []
190 self.onlyfirst = onlyfirst
190 self.onlyfirst = onlyfirst
191
191
192 def match(self, rev):
192 def match(self, rev):
193 def realparents(rev):
193 def realparents(rev):
194 if self.onlyfirst:
194 if self.onlyfirst:
195 return repo.changelog.parentrevs(rev)[0:1]
195 return repo.changelog.parentrevs(rev)[0:1]
196 else:
196 else:
197 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
197 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
198
198
199 if self.startrev == -1:
199 if self.startrev == -1:
200 self.startrev = rev
200 self.startrev = rev
201 return True
201 return True
202
202
203 if rev > self.startrev:
203 if rev > self.startrev:
204 # forward: all descendants
204 # forward: all descendants
205 if not self.roots:
205 if not self.roots:
206 self.roots.append(self.startrev)
206 self.roots.append(self.startrev)
207 for parent in realparents(rev):
207 for parent in realparents(rev):
208 if parent in self.roots:
208 if parent in self.roots:
209 self.roots.append(rev)
209 self.roots.append(rev)
210 return True
210 return True
211 else:
211 else:
212 # backwards: all parents
212 # backwards: all parents
213 if not self.roots:
213 if not self.roots:
214 self.roots.extend(realparents(self.startrev))
214 self.roots.extend(realparents(self.startrev))
215 if rev in self.roots:
215 if rev in self.roots:
216 self.roots.remove(rev)
216 self.roots.remove(rev)
217 self.roots.extend(realparents(rev))
217 self.roots.extend(realparents(rev))
218 return True
218 return True
219
219
220 return False
220 return False
221
221
222 # it might be worthwhile to do this in the iterator if the rev range
222 # it might be worthwhile to do this in the iterator if the rev range
223 # is descending and the prune args are all within that range
223 # is descending and the prune args are all within that range
224 for rev in opts.get('prune', ()):
224 for rev in opts.get('prune', ()):
225 rev = repo.changelog.rev(repo.lookup(rev))
225 rev = repo.changelog.rev(repo.lookup(rev))
226 ff = followfilter()
226 ff = followfilter()
227 stop = min(revs[0], revs[-1])
227 stop = min(revs[0], revs[-1])
228 for x in range(rev, stop-1, -1):
228 for x in range(rev, stop-1, -1):
229 if ff.match(x) and wanted.has_key(x):
229 if ff.match(x) and wanted.has_key(x):
230 del wanted[x]
230 del wanted[x]
231
231
232 def iterate():
232 def iterate():
233 if follow and not files:
233 if follow and not files:
234 ff = followfilter(onlyfirst=opts.get('follow_first'))
234 ff = followfilter(onlyfirst=opts.get('follow_first'))
235 def want(rev):
235 def want(rev):
236 if ff.match(rev) and rev in wanted:
236 if ff.match(rev) and rev in wanted:
237 return True
237 return True
238 return False
238 return False
239 else:
239 else:
240 def want(rev):
240 def want(rev):
241 return rev in wanted
241 return rev in wanted
242
242
243 for i, window in increasing_windows(0, len(revs)):
243 for i, window in increasing_windows(0, len(revs)):
244 yield 'window', revs[0] < revs[-1], revs[-1]
244 yield 'window', revs[0] < revs[-1], revs[-1]
245 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
245 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
246 srevs = list(nrevs)
246 srevs = list(nrevs)
247 srevs.sort()
247 srevs.sort()
248 for rev in srevs:
248 for rev in srevs:
249 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
249 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
250 yield 'add', rev, fns
250 yield 'add', rev, fns
251 for rev in nrevs:
251 for rev in nrevs:
252 yield 'iter', rev, None
252 yield 'iter', rev, None
253 return iterate(), getchange, matchfn
253 return iterate(), getchange, matchfn
254
254
255 revrangesep = ':'
255 revrangesep = ':'
256
256
257 def revfix(repo, val, defval):
257 def revfix(repo, val, defval):
258 '''turn user-level id of changeset into rev number.
258 '''turn user-level id of changeset into rev number.
259 user-level id can be tag, changeset, rev number, or negative rev
259 user-level id can be tag, changeset, rev number, or negative rev
260 number relative to number of revs (-1 is tip, etc).'''
260 number relative to number of revs (-1 is tip, etc).'''
261 if not val:
261 if not val:
262 return defval
262 return defval
263 try:
263 try:
264 num = int(val)
264 num = int(val)
265 if str(num) != val:
265 if str(num) != val:
266 raise ValueError
266 raise ValueError
267 if num < 0:
267 if num < 0:
268 num += repo.changelog.count()
268 num += repo.changelog.count()
269 if num < 0:
269 if num < 0:
270 num = 0
270 num = 0
271 elif num >= repo.changelog.count():
271 elif num >= repo.changelog.count():
272 raise ValueError
272 raise ValueError
273 except ValueError:
273 except ValueError:
274 try:
274 try:
275 num = repo.changelog.rev(repo.lookup(val))
275 num = repo.changelog.rev(repo.lookup(val))
276 except KeyError:
276 except KeyError:
277 raise util.Abort(_('invalid revision identifier %s') % val)
277 raise util.Abort(_('invalid revision identifier %s') % val)
278 return num
278 return num
279
279
280 def revpair(ui, repo, revs):
280 def revpair(ui, repo, revs):
281 '''return pair of nodes, given list of revisions. second item can
281 '''return pair of nodes, given list of revisions. second item can
282 be None, meaning use working dir.'''
282 be None, meaning use working dir.'''
283 if not revs:
283 if not revs:
284 return repo.dirstate.parents()[0], None
284 return repo.dirstate.parents()[0], None
285 end = None
285 end = None
286 if len(revs) == 1:
286 if len(revs) == 1:
287 start = revs[0]
287 start = revs[0]
288 if revrangesep in start:
288 if revrangesep in start:
289 start, end = start.split(revrangesep, 1)
289 start, end = start.split(revrangesep, 1)
290 start = revfix(repo, start, 0)
290 start = revfix(repo, start, 0)
291 end = revfix(repo, end, repo.changelog.count() - 1)
291 end = revfix(repo, end, repo.changelog.count() - 1)
292 else:
292 else:
293 start = revfix(repo, start, None)
293 start = revfix(repo, start, None)
294 elif len(revs) == 2:
294 elif len(revs) == 2:
295 if revrangesep in revs[0] or revrangesep in revs[1]:
295 if revrangesep in revs[0] or revrangesep in revs[1]:
296 raise util.Abort(_('too many revisions specified'))
296 raise util.Abort(_('too many revisions specified'))
297 start = revfix(repo, revs[0], None)
297 start = revfix(repo, revs[0], None)
298 end = revfix(repo, revs[1], None)
298 end = revfix(repo, revs[1], None)
299 else:
299 else:
300 raise util.Abort(_('too many revisions specified'))
300 raise util.Abort(_('too many revisions specified'))
301 if end is not None: end = repo.lookup(str(end))
301 if end is not None: end = repo.lookup(str(end))
302 return repo.lookup(str(start)), end
302 return repo.lookup(str(start)), end
303
303
304 def revrange(ui, repo, revs):
304 def revrange(ui, repo, revs):
305 """Yield revision as strings from a list of revision specifications."""
305 """Yield revision as strings from a list of revision specifications."""
306 seen = {}
306 seen = {}
307 for spec in revs:
307 for spec in revs:
308 if revrangesep in spec:
308 if revrangesep in spec:
309 start, end = spec.split(revrangesep, 1)
309 start, end = spec.split(revrangesep, 1)
310 start = revfix(repo, start, 0)
310 start = revfix(repo, start, 0)
311 end = revfix(repo, end, repo.changelog.count() - 1)
311 end = revfix(repo, end, repo.changelog.count() - 1)
312 step = start > end and -1 or 1
312 step = start > end and -1 or 1
313 for rev in xrange(start, end+step, step):
313 for rev in xrange(start, end+step, step):
314 if rev in seen:
314 if rev in seen:
315 continue
315 continue
316 seen[rev] = 1
316 seen[rev] = 1
317 yield str(rev)
317 yield str(rev)
318 else:
318 else:
319 rev = revfix(repo, spec, None)
319 rev = revfix(repo, spec, None)
320 if rev in seen:
320 if rev in seen:
321 continue
321 continue
322 seen[rev] = 1
322 seen[rev] = 1
323 yield str(rev)
323 yield str(rev)
324
324
325 def write_bundle(cg, filename=None, compress=True):
325 def write_bundle(cg, filename=None, compress=True):
326 """Write a bundle file and return its filename.
326 """Write a bundle file and return its filename.
327
327
328 Existing files will not be overwritten.
328 Existing files will not be overwritten.
329 If no filename is specified, a temporary file is created.
329 If no filename is specified, a temporary file is created.
330 bz2 compression can be turned off.
330 bz2 compression can be turned off.
331 The bundle file will be deleted in case of errors.
331 The bundle file will be deleted in case of errors.
332 """
332 """
333 class nocompress(object):
333 class nocompress(object):
334 def compress(self, x):
334 def compress(self, x):
335 return x
335 return x
336 def flush(self):
336 def flush(self):
337 return ""
337 return ""
338
338
339 fh = None
339 fh = None
340 cleanup = None
340 cleanup = None
341 try:
341 try:
342 if filename:
342 if filename:
343 if os.path.exists(filename):
343 if os.path.exists(filename):
344 raise util.Abort(_("file '%s' already exists") % filename)
344 raise util.Abort(_("file '%s' already exists") % filename)
345 fh = open(filename, "wb")
345 fh = open(filename, "wb")
346 else:
346 else:
347 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
347 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
348 fh = os.fdopen(fd, "wb")
348 fh = os.fdopen(fd, "wb")
349 cleanup = filename
349 cleanup = filename
350
350
351 if compress:
351 if compress:
352 fh.write("HG10")
352 fh.write("HG10")
353 z = bz2.BZ2Compressor(9)
353 z = bz2.BZ2Compressor(9)
354 else:
354 else:
355 fh.write("HG10UN")
355 fh.write("HG10UN")
356 z = nocompress()
356 z = nocompress()
357 # parse the changegroup data, otherwise we will block
357 # parse the changegroup data, otherwise we will block
358 # in case of sshrepo because we don't know the end of the stream
358 # in case of sshrepo because we don't know the end of the stream
359
359
360 # an empty chunkiter is the end of the changegroup
360 # an empty chunkiter is the end of the changegroup
361 empty = False
361 empty = False
362 while not empty:
362 while not empty:
363 empty = True
363 empty = True
364 for chunk in changegroup.chunkiter(cg):
364 for chunk in changegroup.chunkiter(cg):
365 empty = False
365 empty = False
366 fh.write(z.compress(changegroup.genchunk(chunk)))
366 fh.write(z.compress(changegroup.genchunk(chunk)))
367 fh.write(z.compress(changegroup.closechunk()))
367 fh.write(z.compress(changegroup.closechunk()))
368 fh.write(z.flush())
368 fh.write(z.flush())
369 cleanup = None
369 cleanup = None
370 return filename
370 return filename
371 finally:
371 finally:
372 if fh is not None:
372 if fh is not None:
373 fh.close()
373 fh.close()
374 if cleanup is not None:
374 if cleanup is not None:
375 os.unlink(cleanup)
375 os.unlink(cleanup)
376
376
377 def trimuser(ui, name, rev, revcache):
377 def trimuser(ui, name, rev, revcache):
378 """trim the name of the user who committed a change"""
378 """trim the name of the user who committed a change"""
379 user = revcache.get(rev)
379 user = revcache.get(rev)
380 if user is None:
380 if user is None:
381 user = revcache[rev] = ui.shortuser(name)
381 user = revcache[rev] = ui.shortuser(name)
382 return user
382 return user
383
383
384 class changeset_printer(object):
384 class changeset_printer(object):
385 '''show changeset information when templating not requested.'''
385 '''show changeset information when templating not requested.'''
386
386
387 def __init__(self, ui, repo):
387 def __init__(self, ui, repo):
388 self.ui = ui
388 self.ui = ui
389 self.repo = repo
389 self.repo = repo
390
390
391 def show(self, rev=0, changenode=None, brinfo=None):
391 def show(self, rev=0, changenode=None, brinfo=None):
392 '''show a single changeset or file revision'''
392 '''show a single changeset or file revision'''
393 log = self.repo.changelog
393 log = self.repo.changelog
394 if changenode is None:
394 if changenode is None:
395 changenode = log.node(rev)
395 changenode = log.node(rev)
396 elif not rev:
396 elif not rev:
397 rev = log.rev(changenode)
397 rev = log.rev(changenode)
398
398
399 if self.ui.quiet:
399 if self.ui.quiet:
400 self.ui.write("%d:%s\n" % (rev, short(changenode)))
400 self.ui.write("%d:%s\n" % (rev, short(changenode)))
401 return
401 return
402
402
403 changes = log.read(changenode)
403 changes = log.read(changenode)
404 date = util.datestr(changes[2])
404 date = util.datestr(changes[2])
405
405
406 hexfunc = self.ui.debugflag and hex or short
406 hexfunc = self.ui.debugflag and hex or short
407
407
408 parents = [(log.rev(p), hexfunc(p)) for p in log.parents(changenode)
408 parents = [(log.rev(p), hexfunc(p)) for p in log.parents(changenode)
409 if self.ui.debugflag or p != nullid]
409 if self.ui.debugflag or p != nullid]
410 if (not self.ui.debugflag and len(parents) == 1 and
410 if (not self.ui.debugflag and len(parents) == 1 and
411 parents[0][0] == rev-1):
411 parents[0][0] == rev-1):
412 parents = []
412 parents = []
413
413
414 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
414 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
415
415
416 for tag in self.repo.nodetags(changenode):
416 for tag in self.repo.nodetags(changenode):
417 self.ui.status(_("tag: %s\n") % tag)
417 self.ui.status(_("tag: %s\n") % tag)
418 for parent in parents:
418 for parent in parents:
419 self.ui.write(_("parent: %d:%s\n") % parent)
419 self.ui.write(_("parent: %d:%s\n") % parent)
420
420
421 if brinfo and changenode in brinfo:
421 if brinfo and changenode in brinfo:
422 br = brinfo[changenode]
422 br = brinfo[changenode]
423 self.ui.write(_("branch: %s\n") % " ".join(br))
423 self.ui.write(_("branch: %s\n") % " ".join(br))
424
424
425 self.ui.debug(_("manifest: %d:%s\n") %
425 self.ui.debug(_("manifest: %d:%s\n") %
426 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
426 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
427 self.ui.status(_("user: %s\n") % changes[1])
427 self.ui.status(_("user: %s\n") % changes[1])
428 self.ui.status(_("date: %s\n") % date)
428 self.ui.status(_("date: %s\n") % date)
429
429
430 if self.ui.debugflag:
430 if self.ui.debugflag:
431 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
431 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
432 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
432 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
433 files):
433 files):
434 if value:
434 if value:
435 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
435 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
436 else:
436 else:
437 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
437 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
438
438
439 description = changes[4].strip()
439 description = changes[4].strip()
440 if description:
440 if description:
441 if self.ui.verbose:
441 if self.ui.verbose:
442 self.ui.status(_("description:\n"))
442 self.ui.status(_("description:\n"))
443 self.ui.status(description)
443 self.ui.status(description)
444 self.ui.status("\n\n")
444 self.ui.status("\n\n")
445 else:
445 else:
446 self.ui.status(_("summary: %s\n") %
446 self.ui.status(_("summary: %s\n") %
447 description.splitlines()[0])
447 description.splitlines()[0])
448 self.ui.status("\n")
448 self.ui.status("\n")
449
449
450 def show_changeset(ui, repo, opts):
450 def show_changeset(ui, repo, opts):
451 '''show one changeset. uses template or regular display. caller
451 '''show one changeset. uses template or regular display. caller
452 can pass in 'style' and 'template' options in opts.'''
452 can pass in 'style' and 'template' options in opts.'''
453
453
454 tmpl = opts.get('template')
454 tmpl = opts.get('template')
455 if tmpl:
455 if tmpl:
456 tmpl = templater.parsestring(tmpl, quoted=False)
456 tmpl = templater.parsestring(tmpl, quoted=False)
457 else:
457 else:
458 tmpl = ui.config('ui', 'logtemplate')
458 tmpl = ui.config('ui', 'logtemplate')
459 if tmpl: tmpl = templater.parsestring(tmpl)
459 if tmpl: tmpl = templater.parsestring(tmpl)
460 mapfile = opts.get('style') or ui.config('ui', 'style')
460 mapfile = opts.get('style') or ui.config('ui', 'style')
461 if tmpl or mapfile:
461 if tmpl or mapfile:
462 if mapfile:
462 if mapfile:
463 if not os.path.isfile(mapfile):
463 if not os.path.isfile(mapfile):
464 mapname = templater.templatepath('map-cmdline.' + mapfile)
464 mapname = templater.templatepath('map-cmdline.' + mapfile)
465 if not mapname: mapname = templater.templatepath(mapfile)
465 if not mapname: mapname = templater.templatepath(mapfile)
466 if mapname: mapfile = mapname
466 if mapname: mapfile = mapname
467 try:
467 try:
468 t = templater.changeset_templater(ui, repo, mapfile)
468 t = templater.changeset_templater(ui, repo, mapfile)
469 except SyntaxError, inst:
469 except SyntaxError, inst:
470 raise util.Abort(inst.args[0])
470 raise util.Abort(inst.args[0])
471 if tmpl: t.use_template(tmpl)
471 if tmpl: t.use_template(tmpl)
472 return t
472 return t
473 return changeset_printer(ui, repo)
473 return changeset_printer(ui, repo)
474
474
475 def setremoteconfig(ui, opts):
475 def setremoteconfig(ui, opts):
476 "copy remote options to ui tree"
476 "copy remote options to ui tree"
477 if opts.get('ssh'):
477 if opts.get('ssh'):
478 ui.setconfig("ui", "ssh", opts['ssh'])
478 ui.setconfig("ui", "ssh", opts['ssh'])
479 if opts.get('remotecmd'):
479 if opts.get('remotecmd'):
480 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
480 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
481
481
482 def show_version(ui):
482 def show_version(ui):
483 """output version and copyright information"""
483 """output version and copyright information"""
484 ui.write(_("Mercurial Distributed SCM (version %s)\n")
484 ui.write(_("Mercurial Distributed SCM (version %s)\n")
485 % version.get_version())
485 % version.get_version())
486 ui.status(_(
486 ui.status(_(
487 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
487 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
488 "This is free software; see the source for copying conditions. "
488 "This is free software; see the source for copying conditions. "
489 "There is NO\nwarranty; "
489 "There is NO\nwarranty; "
490 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
490 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
491 ))
491 ))
492
492
493 def help_(ui, name=None, with_version=False):
493 def help_(ui, name=None, with_version=False):
494 """show help for a command, extension, or list of commands
494 """show help for a command, extension, or list of commands
495
495
496 With no arguments, print a list of commands and short help.
496 With no arguments, print a list of commands and short help.
497
497
498 Given a command name, print help for that command.
498 Given a command name, print help for that command.
499
499
500 Given an extension name, print help for that extension, and the
500 Given an extension name, print help for that extension, and the
501 commands it provides."""
501 commands it provides."""
502 option_lists = []
502 option_lists = []
503
503
504 def helpcmd(name):
504 def helpcmd(name):
505 if with_version:
505 if with_version:
506 show_version(ui)
506 show_version(ui)
507 ui.write('\n')
507 ui.write('\n')
508 aliases, i = findcmd(ui, name)
508 aliases, i = findcmd(ui, name)
509 # synopsis
509 # synopsis
510 ui.write("%s\n\n" % i[2])
510 ui.write("%s\n\n" % i[2])
511
511
512 # description
512 # description
513 doc = i[0].__doc__
513 doc = i[0].__doc__
514 if not doc:
514 if not doc:
515 doc = _("(No help text available)")
515 doc = _("(No help text available)")
516 if ui.quiet:
516 if ui.quiet:
517 doc = doc.splitlines(0)[0]
517 doc = doc.splitlines(0)[0]
518 ui.write("%s\n" % doc.rstrip())
518 ui.write("%s\n" % doc.rstrip())
519
519
520 if not ui.quiet:
520 if not ui.quiet:
521 # aliases
521 # aliases
522 if len(aliases) > 1:
522 if len(aliases) > 1:
523 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
523 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
524
524
525 # options
525 # options
526 if i[1]:
526 if i[1]:
527 option_lists.append(("options", i[1]))
527 option_lists.append(("options", i[1]))
528
528
529 def helplist(select=None):
529 def helplist(select=None):
530 h = {}
530 h = {}
531 cmds = {}
531 cmds = {}
532 for c, e in table.items():
532 for c, e in table.items():
533 f = c.split("|", 1)[0]
533 f = c.split("|", 1)[0]
534 if select and not select(f):
534 if select and not select(f):
535 continue
535 continue
536 if name == "shortlist" and not f.startswith("^"):
536 if name == "shortlist" and not f.startswith("^"):
537 continue
537 continue
538 f = f.lstrip("^")
538 f = f.lstrip("^")
539 if not ui.debugflag and f.startswith("debug"):
539 if not ui.debugflag and f.startswith("debug"):
540 continue
540 continue
541 doc = e[0].__doc__
541 doc = e[0].__doc__
542 if not doc:
542 if not doc:
543 doc = _("(No help text available)")
543 doc = _("(No help text available)")
544 h[f] = doc.splitlines(0)[0].rstrip()
544 h[f] = doc.splitlines(0)[0].rstrip()
545 cmds[f] = c.lstrip("^")
545 cmds[f] = c.lstrip("^")
546
546
547 fns = h.keys()
547 fns = h.keys()
548 fns.sort()
548 fns.sort()
549 m = max(map(len, fns))
549 m = max(map(len, fns))
550 for f in fns:
550 for f in fns:
551 if ui.verbose:
551 if ui.verbose:
552 commands = cmds[f].replace("|",", ")
552 commands = cmds[f].replace("|",", ")
553 ui.write(" %s:\n %s\n"%(commands, h[f]))
553 ui.write(" %s:\n %s\n"%(commands, h[f]))
554 else:
554 else:
555 ui.write(' %-*s %s\n' % (m, f, h[f]))
555 ui.write(' %-*s %s\n' % (m, f, h[f]))
556
556
557 def helpext(name):
557 def helpext(name):
558 try:
558 try:
559 mod = findext(name)
559 mod = findext(name)
560 except KeyError:
560 except KeyError:
561 raise UnknownCommand(name)
561 raise UnknownCommand(name)
562
562
563 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
563 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
564 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
564 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
565 for d in doc[1:]:
565 for d in doc[1:]:
566 ui.write(d, '\n')
566 ui.write(d, '\n')
567
567
568 ui.status('\n')
568 ui.status('\n')
569 if ui.verbose:
569 if ui.verbose:
570 ui.status(_('list of commands:\n\n'))
570 ui.status(_('list of commands:\n\n'))
571 else:
571 else:
572 ui.status(_('list of commands (use "hg help -v %s" '
572 ui.status(_('list of commands (use "hg help -v %s" '
573 'to show aliases and global options):\n\n') % name)
573 'to show aliases and global options):\n\n') % name)
574
574
575 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
575 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
576 helplist(modcmds.has_key)
576 helplist(modcmds.has_key)
577
577
578 if name and name != 'shortlist':
578 if name and name != 'shortlist':
579 try:
579 try:
580 helpcmd(name)
580 helpcmd(name)
581 except UnknownCommand:
581 except UnknownCommand:
582 helpext(name)
582 helpext(name)
583
583
584 else:
584 else:
585 # program name
585 # program name
586 if ui.verbose or with_version:
586 if ui.verbose or with_version:
587 show_version(ui)
587 show_version(ui)
588 else:
588 else:
589 ui.status(_("Mercurial Distributed SCM\n"))
589 ui.status(_("Mercurial Distributed SCM\n"))
590 ui.status('\n')
590 ui.status('\n')
591
591
592 # list of commands
592 # list of commands
593 if name == "shortlist":
593 if name == "shortlist":
594 ui.status(_('basic commands (use "hg help" '
594 ui.status(_('basic commands (use "hg help" '
595 'for the full list or option "-v" for details):\n\n'))
595 'for the full list or option "-v" for details):\n\n'))
596 elif ui.verbose:
596 elif ui.verbose:
597 ui.status(_('list of commands:\n\n'))
597 ui.status(_('list of commands:\n\n'))
598 else:
598 else:
599 ui.status(_('list of commands (use "hg help -v" '
599 ui.status(_('list of commands (use "hg help -v" '
600 'to show aliases and global options):\n\n'))
600 'to show aliases and global options):\n\n'))
601
601
602 helplist()
602 helplist()
603
603
604 # global options
604 # global options
605 if ui.verbose:
605 if ui.verbose:
606 option_lists.append(("global options", globalopts))
606 option_lists.append(("global options", globalopts))
607
607
608 # list all option lists
608 # list all option lists
609 opt_output = []
609 opt_output = []
610 for title, options in option_lists:
610 for title, options in option_lists:
611 opt_output.append(("\n%s:\n" % title, None))
611 opt_output.append(("\n%s:\n" % title, None))
612 for shortopt, longopt, default, desc in options:
612 for shortopt, longopt, default, desc in options:
613 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
613 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
614 longopt and " --%s" % longopt),
614 longopt and " --%s" % longopt),
615 "%s%s" % (desc,
615 "%s%s" % (desc,
616 default
616 default
617 and _(" (default: %s)") % default
617 and _(" (default: %s)") % default
618 or "")))
618 or "")))
619
619
620 if opt_output:
620 if opt_output:
621 opts_len = max([len(line[0]) for line in opt_output if line[1]])
621 opts_len = max([len(line[0]) for line in opt_output if line[1]])
622 for first, second in opt_output:
622 for first, second in opt_output:
623 if second:
623 if second:
624 ui.write(" %-*s %s\n" % (opts_len, first, second))
624 ui.write(" %-*s %s\n" % (opts_len, first, second))
625 else:
625 else:
626 ui.write("%s\n" % first)
626 ui.write("%s\n" % first)
627
627
628 # Commands start here, listed alphabetically
628 # Commands start here, listed alphabetically
629
629
630 def add(ui, repo, *pats, **opts):
630 def add(ui, repo, *pats, **opts):
631 """add the specified files on the next commit
631 """add the specified files on the next commit
632
632
633 Schedule files to be version controlled and added to the repository.
633 Schedule files to be version controlled and added to the repository.
634
634
635 The files will be added to the repository at the next commit.
635 The files will be added to the repository at the next commit.
636
636
637 If no names are given, add all files in the repository.
637 If no names are given, add all files in the repository.
638 """
638 """
639
639
640 names = []
640 names = []
641 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
641 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
642 if exact:
642 if exact:
643 if ui.verbose:
643 if ui.verbose:
644 ui.status(_('adding %s\n') % rel)
644 ui.status(_('adding %s\n') % rel)
645 names.append(abs)
645 names.append(abs)
646 elif repo.dirstate.state(abs) == '?':
646 elif repo.dirstate.state(abs) == '?':
647 ui.status(_('adding %s\n') % rel)
647 ui.status(_('adding %s\n') % rel)
648 names.append(abs)
648 names.append(abs)
649 if not opts.get('dry_run'):
649 if not opts.get('dry_run'):
650 repo.add(names)
650 repo.add(names)
651
651
652 def addremove(ui, repo, *pats, **opts):
652 def addremove(ui, repo, *pats, **opts):
653 """add all new files, delete all missing files (DEPRECATED)
653 """add all new files, delete all missing files (DEPRECATED)
654
654
655 Add all new files and remove all missing files from the repository.
655 Add all new files and remove all missing files from the repository.
656
656
657 New files are ignored if they match any of the patterns in .hgignore. As
657 New files are ignored if they match any of the patterns in .hgignore. As
658 with add, these changes take effect at the next commit.
658 with add, these changes take effect at the next commit.
659
659
660 Use the -s option to detect renamed files. With a parameter > 0,
660 Use the -s option to detect renamed files. With a parameter > 0,
661 this compares every removed file with every added file and records
661 this compares every removed file with every added file and records
662 those similar enough as renames. This option takes a percentage
662 those similar enough as renames. This option takes a percentage
663 between 0 (disabled) and 100 (files must be identical) as its
663 between 0 (disabled) and 100 (files must be identical) as its
664 parameter. Detecting renamed files this way can be expensive.
664 parameter. Detecting renamed files this way can be expensive.
665 """
665 """
666 sim = float(opts.get('similarity') or 0)
666 sim = float(opts.get('similarity') or 0)
667 if sim < 0 or sim > 100:
667 if sim < 0 or sim > 100:
668 raise util.Abort(_('similarity must be between 0 and 100'))
668 raise util.Abort(_('similarity must be between 0 and 100'))
669 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
669 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
670
670
671 def annotate(ui, repo, *pats, **opts):
671 def annotate(ui, repo, *pats, **opts):
672 """show changeset information per file line
672 """show changeset information per file line
673
673
674 List changes in files, showing the revision id responsible for each line
674 List changes in files, showing the revision id responsible for each line
675
675
676 This command is useful to discover who did a change or when a change took
676 This command is useful to discover who did a change or when a change took
677 place.
677 place.
678
678
679 Without the -a option, annotate will avoid processing files it
679 Without the -a option, annotate will avoid processing files it
680 detects as binary. With -a, annotate will generate an annotation
680 detects as binary. With -a, annotate will generate an annotation
681 anyway, probably with undesirable results.
681 anyway, probably with undesirable results.
682 """
682 """
683 def getnode(rev):
683 def getnode(rev):
684 return short(repo.changelog.node(rev))
684 return short(repo.changelog.node(rev))
685
685
686 ucache = {}
686 ucache = {}
687 def getname(rev):
687 def getname(rev):
688 try:
688 try:
689 return ucache[rev]
689 return ucache[rev]
690 except:
690 except:
691 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
691 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
692 ucache[rev] = u
692 ucache[rev] = u
693 return u
693 return u
694
694
695 dcache = {}
695 dcache = {}
696 def getdate(rev):
696 def getdate(rev):
697 datestr = dcache.get(rev)
697 datestr = dcache.get(rev)
698 if datestr is None:
698 if datestr is None:
699 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
699 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
700 return datestr
700 return datestr
701
701
702 if not pats:
702 if not pats:
703 raise util.Abort(_('at least one file name or pattern required'))
703 raise util.Abort(_('at least one file name or pattern required'))
704
704
705 opmap = [['user', getname], ['number', str], ['changeset', getnode],
705 opmap = [['user', getname], ['number', str], ['changeset', getnode],
706 ['date', getdate]]
706 ['date', getdate]]
707 if not opts['user'] and not opts['changeset'] and not opts['date']:
707 if not opts['user'] and not opts['changeset'] and not opts['date']:
708 opts['number'] = 1
708 opts['number'] = 1
709
709
710 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
710 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
711
711
712 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
712 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
713 node=ctx.node()):
713 node=ctx.node()):
714 fctx = ctx.filectx(abs)
714 fctx = ctx.filectx(abs)
715 if not opts['text'] and util.binary(fctx.data()):
715 if not opts['text'] and util.binary(fctx.data()):
716 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
716 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
717 continue
717 continue
718
718
719 lines = fctx.annotate()
719 lines = fctx.annotate()
720 pieces = []
720 pieces = []
721
721
722 for o, f in opmap:
722 for o, f in opmap:
723 if opts[o]:
723 if opts[o]:
724 l = [f(n) for n, dummy in lines]
724 l = [f(n) for n, dummy in lines]
725 if l:
725 if l:
726 m = max(map(len, l))
726 m = max(map(len, l))
727 pieces.append(["%*s" % (m, x) for x in l])
727 pieces.append(["%*s" % (m, x) for x in l])
728
728
729 if pieces:
729 if pieces:
730 for p, l in zip(zip(*pieces), lines):
730 for p, l in zip(zip(*pieces), lines):
731 ui.write("%s: %s" % (" ".join(p), l[1]))
731 ui.write("%s: %s" % (" ".join(p), l[1]))
732
732
733 def archive(ui, repo, dest, **opts):
733 def archive(ui, repo, dest, **opts):
734 '''create unversioned archive of a repository revision
734 '''create unversioned archive of a repository revision
735
735
736 By default, the revision used is the parent of the working
736 By default, the revision used is the parent of the working
737 directory; use "-r" to specify a different revision.
737 directory; use "-r" to specify a different revision.
738
738
739 To specify the type of archive to create, use "-t". Valid
739 To specify the type of archive to create, use "-t". Valid
740 types are:
740 types are:
741
741
742 "files" (default): a directory full of files
742 "files" (default): a directory full of files
743 "tar": tar archive, uncompressed
743 "tar": tar archive, uncompressed
744 "tbz2": tar archive, compressed using bzip2
744 "tbz2": tar archive, compressed using bzip2
745 "tgz": tar archive, compressed using gzip
745 "tgz": tar archive, compressed using gzip
746 "uzip": zip archive, uncompressed
746 "uzip": zip archive, uncompressed
747 "zip": zip archive, compressed using deflate
747 "zip": zip archive, compressed using deflate
748
748
749 The exact name of the destination archive or directory is given
749 The exact name of the destination archive or directory is given
750 using a format string; see "hg help export" for details.
750 using a format string; see "hg help export" for details.
751
751
752 Each member added to an archive file has a directory prefix
752 Each member added to an archive file has a directory prefix
753 prepended. Use "-p" to specify a format string for the prefix.
753 prepended. Use "-p" to specify a format string for the prefix.
754 The default is the basename of the archive, with suffixes removed.
754 The default is the basename of the archive, with suffixes removed.
755 '''
755 '''
756
756
757 if opts['rev']:
757 if opts['rev']:
758 node = repo.lookup(opts['rev'])
758 node = repo.lookup(opts['rev'])
759 else:
759 else:
760 node, p2 = repo.dirstate.parents()
760 node, p2 = repo.dirstate.parents()
761 if p2 != nullid:
761 if p2 != nullid:
762 raise util.Abort(_('uncommitted merge - please provide a '
762 raise util.Abort(_('uncommitted merge - please provide a '
763 'specific revision'))
763 'specific revision'))
764
764
765 dest = cmdutil.make_filename(repo, dest, node)
765 dest = cmdutil.make_filename(repo, dest, node)
766 if os.path.realpath(dest) == repo.root:
766 if os.path.realpath(dest) == repo.root:
767 raise util.Abort(_('repository root cannot be destination'))
767 raise util.Abort(_('repository root cannot be destination'))
768 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
768 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
769 kind = opts.get('type') or 'files'
769 kind = opts.get('type') or 'files'
770 prefix = opts['prefix']
770 prefix = opts['prefix']
771 if dest == '-':
771 if dest == '-':
772 if kind == 'files':
772 if kind == 'files':
773 raise util.Abort(_('cannot archive plain files to stdout'))
773 raise util.Abort(_('cannot archive plain files to stdout'))
774 dest = sys.stdout
774 dest = sys.stdout
775 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
775 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
776 prefix = cmdutil.make_filename(repo, prefix, node)
776 prefix = cmdutil.make_filename(repo, prefix, node)
777 archival.archive(repo, dest, node, kind, not opts['no_decode'],
777 archival.archive(repo, dest, node, kind, not opts['no_decode'],
778 matchfn, prefix)
778 matchfn, prefix)
779
779
780 def backout(ui, repo, rev, **opts):
780 def backout(ui, repo, rev, **opts):
781 '''reverse effect of earlier changeset
781 '''reverse effect of earlier changeset
782
782
783 Commit the backed out changes as a new changeset. The new
783 Commit the backed out changes as a new changeset. The new
784 changeset is a child of the backed out changeset.
784 changeset is a child of the backed out changeset.
785
785
786 If you back out a changeset other than the tip, a new head is
786 If you back out a changeset other than the tip, a new head is
787 created. This head is the parent of the working directory. If
787 created. This head is the parent of the working directory. If
788 you back out an old changeset, your working directory will appear
788 you back out an old changeset, your working directory will appear
789 old after the backout. You should merge the backout changeset
789 old after the backout. You should merge the backout changeset
790 with another head.
790 with another head.
791
791
792 The --merge option remembers the parent of the working directory
792 The --merge option remembers the parent of the working directory
793 before starting the backout, then merges the new head with that
793 before starting the backout, then merges the new head with that
794 changeset afterwards. This saves you from doing the merge by
794 changeset afterwards. This saves you from doing the merge by
795 hand. The result of this merge is not committed, as for a normal
795 hand. The result of this merge is not committed, as for a normal
796 merge.'''
796 merge.'''
797
797
798 bail_if_changed(repo)
798 bail_if_changed(repo)
799 op1, op2 = repo.dirstate.parents()
799 op1, op2 = repo.dirstate.parents()
800 if op2 != nullid:
800 if op2 != nullid:
801 raise util.Abort(_('outstanding uncommitted merge'))
801 raise util.Abort(_('outstanding uncommitted merge'))
802 node = repo.lookup(rev)
802 node = repo.lookup(rev)
803 p1, p2 = repo.changelog.parents(node)
803 p1, p2 = repo.changelog.parents(node)
804 if p1 == nullid:
804 if p1 == nullid:
805 raise util.Abort(_('cannot back out a change with no parents'))
805 raise util.Abort(_('cannot back out a change with no parents'))
806 if p2 != nullid:
806 if p2 != nullid:
807 if not opts['parent']:
807 if not opts['parent']:
808 raise util.Abort(_('cannot back out a merge changeset without '
808 raise util.Abort(_('cannot back out a merge changeset without '
809 '--parent'))
809 '--parent'))
810 p = repo.lookup(opts['parent'])
810 p = repo.lookup(opts['parent'])
811 if p not in (p1, p2):
811 if p not in (p1, p2):
812 raise util.Abort(_('%s is not a parent of %s' %
812 raise util.Abort(_('%s is not a parent of %s' %
813 (short(p), short(node))))
813 (short(p), short(node))))
814 parent = p
814 parent = p
815 else:
815 else:
816 if opts['parent']:
816 if opts['parent']:
817 raise util.Abort(_('cannot use --parent on non-merge changeset'))
817 raise util.Abort(_('cannot use --parent on non-merge changeset'))
818 parent = p1
818 parent = p1
819 hg.clean(repo, node, show_stats=False)
819 hg.clean(repo, node, show_stats=False)
820 revert_opts = opts.copy()
820 revert_opts = opts.copy()
821 revert_opts['all'] = True
821 revert_opts['all'] = True
822 revert_opts['rev'] = hex(parent)
822 revert_opts['rev'] = hex(parent)
823 revert(ui, repo, **revert_opts)
823 revert(ui, repo, **revert_opts)
824 commit_opts = opts.copy()
824 commit_opts = opts.copy()
825 commit_opts['addremove'] = False
825 commit_opts['addremove'] = False
826 if not commit_opts['message'] and not commit_opts['logfile']:
826 if not commit_opts['message'] and not commit_opts['logfile']:
827 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
827 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
828 commit_opts['force_editor'] = True
828 commit_opts['force_editor'] = True
829 commit(ui, repo, **commit_opts)
829 commit(ui, repo, **commit_opts)
830 def nice(node):
830 def nice(node):
831 return '%d:%s' % (repo.changelog.rev(node), short(node))
831 return '%d:%s' % (repo.changelog.rev(node), short(node))
832 ui.status(_('changeset %s backs out changeset %s\n') %
832 ui.status(_('changeset %s backs out changeset %s\n') %
833 (nice(repo.changelog.tip()), nice(node)))
833 (nice(repo.changelog.tip()), nice(node)))
834 if op1 != node:
834 if op1 != node:
835 if opts['merge']:
835 if opts['merge']:
836 ui.status(_('merging with changeset %s\n') % nice(op1))
836 ui.status(_('merging with changeset %s\n') % nice(op1))
837 n = _lookup(repo, hex(op1))
837 n = _lookup(repo, hex(op1))
838 hg.merge(repo, n)
838 hg.merge(repo, n)
839 else:
839 else:
840 ui.status(_('the backout changeset is a new head - '
840 ui.status(_('the backout changeset is a new head - '
841 'do not forget to merge\n'))
841 'do not forget to merge\n'))
842 ui.status(_('(use "backout --merge" '
842 ui.status(_('(use "backout --merge" '
843 'if you want to auto-merge)\n'))
843 'if you want to auto-merge)\n'))
844
844
845 def bundle(ui, repo, fname, dest=None, **opts):
845 def bundle(ui, repo, fname, dest=None, **opts):
846 """create a changegroup file
846 """create a changegroup file
847
847
848 Generate a compressed changegroup file collecting all changesets
848 Generate a compressed changegroup file collecting all changesets
849 not found in the other repository.
849 not found in the other repository.
850
850
851 This file can then be transferred using conventional means and
851 This file can then be transferred using conventional means and
852 applied to another repository with the unbundle command. This is
852 applied to another repository with the unbundle command. This is
853 useful when native push and pull are not available or when
853 useful when native push and pull are not available or when
854 exporting an entire repository is undesirable. The standard file
854 exporting an entire repository is undesirable. The standard file
855 extension is ".hg".
855 extension is ".hg".
856
856
857 Unlike import/export, this exactly preserves all changeset
857 Unlike import/export, this exactly preserves all changeset
858 contents including permissions, rename data, and revision history.
858 contents including permissions, rename data, and revision history.
859 """
859 """
860 dest = ui.expandpath(dest or 'default-push', dest or 'default')
860 dest = ui.expandpath(dest or 'default-push', dest or 'default')
861 other = hg.repository(ui, dest)
861 other = hg.repository(ui, dest)
862 o = repo.findoutgoing(other, force=opts['force'])
862 o = repo.findoutgoing(other, force=opts['force'])
863 cg = repo.changegroup(o, 'bundle')
863 cg = repo.changegroup(o, 'bundle')
864 write_bundle(cg, fname)
864 write_bundle(cg, fname)
865
865
866 def cat(ui, repo, file1, *pats, **opts):
866 def cat(ui, repo, file1, *pats, **opts):
867 """output the latest or given revisions of files
867 """output the latest or given revisions of files
868
868
869 Print the specified files as they were at the given revision.
869 Print the specified files as they were at the given revision.
870 If no revision is given then the tip is used.
870 If no revision is given then the tip is used.
871
871
872 Output may be to a file, in which case the name of the file is
872 Output may be to a file, in which case the name of the file is
873 given using a format string. The formatting rules are the same as
873 given using a format string. The formatting rules are the same as
874 for the export command, with the following additions:
874 for the export command, with the following additions:
875
875
876 %s basename of file being printed
876 %s basename of file being printed
877 %d dirname of file being printed, or '.' if in repo root
877 %d dirname of file being printed, or '.' if in repo root
878 %p root-relative path name of file being printed
878 %p root-relative path name of file being printed
879 """
879 """
880 ctx = repo.changectx(opts['rev'] or "-1")
880 ctx = repo.changectx(opts['rev'] or "-1")
881 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
881 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
882 ctx.node()):
882 ctx.node()):
883 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
883 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
884 fp.write(ctx.filectx(abs).data())
884 fp.write(ctx.filectx(abs).data())
885
885
886 def clone(ui, source, dest=None, **opts):
886 def clone(ui, source, dest=None, **opts):
887 """make a copy of an existing repository
887 """make a copy of an existing repository
888
888
889 Create a copy of an existing repository in a new directory.
889 Create a copy of an existing repository in a new directory.
890
890
891 If no destination directory name is specified, it defaults to the
891 If no destination directory name is specified, it defaults to the
892 basename of the source.
892 basename of the source.
893
893
894 The location of the source is added to the new repository's
894 The location of the source is added to the new repository's
895 .hg/hgrc file, as the default to be used for future pulls.
895 .hg/hgrc file, as the default to be used for future pulls.
896
896
897 For efficiency, hardlinks are used for cloning whenever the source
897 For efficiency, hardlinks are used for cloning whenever the source
898 and destination are on the same filesystem (note this applies only
898 and destination are on the same filesystem (note this applies only
899 to the repository data, not to the checked out files). Some
899 to the repository data, not to the checked out files). Some
900 filesystems, such as AFS, implement hardlinking incorrectly, but
900 filesystems, such as AFS, implement hardlinking incorrectly, but
901 do not report errors. In these cases, use the --pull option to
901 do not report errors. In these cases, use the --pull option to
902 avoid hardlinking.
902 avoid hardlinking.
903
903
904 You can safely clone repositories and checked out files using full
904 You can safely clone repositories and checked out files using full
905 hardlinks with
905 hardlinks with
906
906
907 $ cp -al REPO REPOCLONE
907 $ cp -al REPO REPOCLONE
908
908
909 which is the fastest way to clone. However, the operation is not
909 which is the fastest way to clone. However, the operation is not
910 atomic (making sure REPO is not modified during the operation is
910 atomic (making sure REPO is not modified during the operation is
911 up to you) and you have to make sure your editor breaks hardlinks
911 up to you) and you have to make sure your editor breaks hardlinks
912 (Emacs and most Linux Kernel tools do so).
912 (Emacs and most Linux Kernel tools do so).
913
913
914 If you use the -r option to clone up to a specific revision, no
914 If you use the -r option to clone up to a specific revision, no
915 subsequent revisions will be present in the cloned repository.
915 subsequent revisions will be present in the cloned repository.
916 This option implies --pull, even on local repositories.
916 This option implies --pull, even on local repositories.
917
917
918 See pull for valid source format details.
918 See pull for valid source format details.
919
919
920 It is possible to specify an ssh:// URL as the destination, but no
920 It is possible to specify an ssh:// URL as the destination, but no
921 .hg/hgrc will be created on the remote side. Look at the help text
921 .hg/hgrc will be created on the remote side. Look at the help text
922 for the pull command for important details about ssh:// URLs.
922 for the pull command for important details about ssh:// URLs.
923 """
923 """
924 setremoteconfig(ui, opts)
924 setremoteconfig(ui, opts)
925 hg.clone(ui, ui.expandpath(source), dest,
925 hg.clone(ui, ui.expandpath(source), dest,
926 pull=opts['pull'],
926 pull=opts['pull'],
927 stream=opts['uncompressed'],
927 stream=opts['uncompressed'],
928 rev=opts['rev'],
928 rev=opts['rev'],
929 update=not opts['noupdate'])
929 update=not opts['noupdate'])
930
930
931 def commit(ui, repo, *pats, **opts):
931 def commit(ui, repo, *pats, **opts):
932 """commit the specified files or all outstanding changes
932 """commit the specified files or all outstanding changes
933
933
934 Commit changes to the given files into the repository.
934 Commit changes to the given files into the repository.
935
935
936 If a list of files is omitted, all changes reported by "hg status"
936 If a list of files is omitted, all changes reported by "hg status"
937 will be committed.
937 will be committed.
938
938
939 If no commit message is specified, the editor configured in your hgrc
939 If no commit message is specified, the editor configured in your hgrc
940 or in the EDITOR environment variable is started to enter a message.
940 or in the EDITOR environment variable is started to enter a message.
941 """
941 """
942 message = logmessage(opts)
942 message = logmessage(opts)
943
943
944 if opts['addremove']:
944 if opts['addremove']:
945 cmdutil.addremove(repo, pats, opts)
945 cmdutil.addremove(repo, pats, opts)
946 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
946 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
947 if pats:
947 if pats:
948 modified, added, removed = repo.status(files=fns, match=match)[:3]
948 modified, added, removed = repo.status(files=fns, match=match)[:3]
949 files = modified + added + removed
949 files = modified + added + removed
950 else:
950 else:
951 files = []
951 files = []
952 try:
952 try:
953 repo.commit(files, message, opts['user'], opts['date'], match,
953 repo.commit(files, message, opts['user'], opts['date'], match,
954 force_editor=opts.get('force_editor'))
954 force_editor=opts.get('force_editor'))
955 except ValueError, inst:
955 except ValueError, inst:
956 raise util.Abort(str(inst))
956 raise util.Abort(str(inst))
957
957
958 def docopy(ui, repo, pats, opts, wlock):
958 def docopy(ui, repo, pats, opts, wlock):
959 # called with the repo lock held
959 # called with the repo lock held
960 cwd = repo.getcwd()
960 cwd = repo.getcwd()
961 errors = 0
961 errors = 0
962 copied = []
962 copied = []
963 targets = {}
963 targets = {}
964
964
965 def okaytocopy(abs, rel, exact):
965 def okaytocopy(abs, rel, exact):
966 reasons = {'?': _('is not managed'),
966 reasons = {'?': _('is not managed'),
967 'a': _('has been marked for add'),
967 'a': _('has been marked for add'),
968 'r': _('has been marked for remove')}
968 'r': _('has been marked for remove')}
969 state = repo.dirstate.state(abs)
969 state = repo.dirstate.state(abs)
970 reason = reasons.get(state)
970 reason = reasons.get(state)
971 if reason:
971 if reason:
972 if state == 'a':
972 if state == 'a':
973 origsrc = repo.dirstate.copied(abs)
973 origsrc = repo.dirstate.copied(abs)
974 if origsrc is not None:
974 if origsrc is not None:
975 return origsrc
975 return origsrc
976 if exact:
976 if exact:
977 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
977 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
978 else:
978 else:
979 return abs
979 return abs
980
980
981 def copy(origsrc, abssrc, relsrc, target, exact):
981 def copy(origsrc, abssrc, relsrc, target, exact):
982 abstarget = util.canonpath(repo.root, cwd, target)
982 abstarget = util.canonpath(repo.root, cwd, target)
983 reltarget = util.pathto(cwd, abstarget)
983 reltarget = util.pathto(cwd, abstarget)
984 prevsrc = targets.get(abstarget)
984 prevsrc = targets.get(abstarget)
985 if prevsrc is not None:
985 if prevsrc is not None:
986 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
986 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
987 (reltarget, abssrc, prevsrc))
987 (reltarget, abssrc, prevsrc))
988 return
988 return
989 if (not opts['after'] and os.path.exists(reltarget) or
989 if (not opts['after'] and os.path.exists(reltarget) or
990 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
990 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
991 if not opts['force']:
991 if not opts['force']:
992 ui.warn(_('%s: not overwriting - file exists\n') %
992 ui.warn(_('%s: not overwriting - file exists\n') %
993 reltarget)
993 reltarget)
994 return
994 return
995 if not opts['after'] and not opts.get('dry_run'):
995 if not opts['after'] and not opts.get('dry_run'):
996 os.unlink(reltarget)
996 os.unlink(reltarget)
997 if opts['after']:
997 if opts['after']:
998 if not os.path.exists(reltarget):
998 if not os.path.exists(reltarget):
999 return
999 return
1000 else:
1000 else:
1001 targetdir = os.path.dirname(reltarget) or '.'
1001 targetdir = os.path.dirname(reltarget) or '.'
1002 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1002 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1003 os.makedirs(targetdir)
1003 os.makedirs(targetdir)
1004 try:
1004 try:
1005 restore = repo.dirstate.state(abstarget) == 'r'
1005 restore = repo.dirstate.state(abstarget) == 'r'
1006 if restore and not opts.get('dry_run'):
1006 if restore and not opts.get('dry_run'):
1007 repo.undelete([abstarget], wlock)
1007 repo.undelete([abstarget], wlock)
1008 try:
1008 try:
1009 if not opts.get('dry_run'):
1009 if not opts.get('dry_run'):
1010 shutil.copyfile(relsrc, reltarget)
1010 shutil.copyfile(relsrc, reltarget)
1011 shutil.copymode(relsrc, reltarget)
1011 shutil.copymode(relsrc, reltarget)
1012 restore = False
1012 restore = False
1013 finally:
1013 finally:
1014 if restore:
1014 if restore:
1015 repo.remove([abstarget], wlock)
1015 repo.remove([abstarget], wlock)
1016 except shutil.Error, inst:
1016 except shutil.Error, inst:
1017 raise util.Abort(str(inst))
1017 raise util.Abort(str(inst))
1018 except IOError, inst:
1018 except IOError, inst:
1019 if inst.errno == errno.ENOENT:
1019 if inst.errno == errno.ENOENT:
1020 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1020 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1021 else:
1021 else:
1022 ui.warn(_('%s: cannot copy - %s\n') %
1022 ui.warn(_('%s: cannot copy - %s\n') %
1023 (relsrc, inst.strerror))
1023 (relsrc, inst.strerror))
1024 errors += 1
1024 errors += 1
1025 return
1025 return
1026 if ui.verbose or not exact:
1026 if ui.verbose or not exact:
1027 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1027 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1028 targets[abstarget] = abssrc
1028 targets[abstarget] = abssrc
1029 if abstarget != origsrc and not opts.get('dry_run'):
1029 if abstarget != origsrc and not opts.get('dry_run'):
1030 repo.copy(origsrc, abstarget, wlock)
1030 repo.copy(origsrc, abstarget, wlock)
1031 copied.append((abssrc, relsrc, exact))
1031 copied.append((abssrc, relsrc, exact))
1032
1032
1033 def targetpathfn(pat, dest, srcs):
1033 def targetpathfn(pat, dest, srcs):
1034 if os.path.isdir(pat):
1034 if os.path.isdir(pat):
1035 abspfx = util.canonpath(repo.root, cwd, pat)
1035 abspfx = util.canonpath(repo.root, cwd, pat)
1036 if destdirexists:
1036 if destdirexists:
1037 striplen = len(os.path.split(abspfx)[0])
1037 striplen = len(os.path.split(abspfx)[0])
1038 else:
1038 else:
1039 striplen = len(abspfx)
1039 striplen = len(abspfx)
1040 if striplen:
1040 if striplen:
1041 striplen += len(os.sep)
1041 striplen += len(os.sep)
1042 res = lambda p: os.path.join(dest, p[striplen:])
1042 res = lambda p: os.path.join(dest, p[striplen:])
1043 elif destdirexists:
1043 elif destdirexists:
1044 res = lambda p: os.path.join(dest, os.path.basename(p))
1044 res = lambda p: os.path.join(dest, os.path.basename(p))
1045 else:
1045 else:
1046 res = lambda p: dest
1046 res = lambda p: dest
1047 return res
1047 return res
1048
1048
1049 def targetpathafterfn(pat, dest, srcs):
1049 def targetpathafterfn(pat, dest, srcs):
1050 if util.patkind(pat, None)[0]:
1050 if util.patkind(pat, None)[0]:
1051 # a mercurial pattern
1051 # a mercurial pattern
1052 res = lambda p: os.path.join(dest, os.path.basename(p))
1052 res = lambda p: os.path.join(dest, os.path.basename(p))
1053 else:
1053 else:
1054 abspfx = util.canonpath(repo.root, cwd, pat)
1054 abspfx = util.canonpath(repo.root, cwd, pat)
1055 if len(abspfx) < len(srcs[0][0]):
1055 if len(abspfx) < len(srcs[0][0]):
1056 # A directory. Either the target path contains the last
1056 # A directory. Either the target path contains the last
1057 # component of the source path or it does not.
1057 # component of the source path or it does not.
1058 def evalpath(striplen):
1058 def evalpath(striplen):
1059 score = 0
1059 score = 0
1060 for s in srcs:
1060 for s in srcs:
1061 t = os.path.join(dest, s[0][striplen:])
1061 t = os.path.join(dest, s[0][striplen:])
1062 if os.path.exists(t):
1062 if os.path.exists(t):
1063 score += 1
1063 score += 1
1064 return score
1064 return score
1065
1065
1066 striplen = len(abspfx)
1066 striplen = len(abspfx)
1067 if striplen:
1067 if striplen:
1068 striplen += len(os.sep)
1068 striplen += len(os.sep)
1069 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1069 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1070 score = evalpath(striplen)
1070 score = evalpath(striplen)
1071 striplen1 = len(os.path.split(abspfx)[0])
1071 striplen1 = len(os.path.split(abspfx)[0])
1072 if striplen1:
1072 if striplen1:
1073 striplen1 += len(os.sep)
1073 striplen1 += len(os.sep)
1074 if evalpath(striplen1) > score:
1074 if evalpath(striplen1) > score:
1075 striplen = striplen1
1075 striplen = striplen1
1076 res = lambda p: os.path.join(dest, p[striplen:])
1076 res = lambda p: os.path.join(dest, p[striplen:])
1077 else:
1077 else:
1078 # a file
1078 # a file
1079 if destdirexists:
1079 if destdirexists:
1080 res = lambda p: os.path.join(dest, os.path.basename(p))
1080 res = lambda p: os.path.join(dest, os.path.basename(p))
1081 else:
1081 else:
1082 res = lambda p: dest
1082 res = lambda p: dest
1083 return res
1083 return res
1084
1084
1085
1085
1086 pats = list(pats)
1086 pats = list(pats)
1087 if not pats:
1087 if not pats:
1088 raise util.Abort(_('no source or destination specified'))
1088 raise util.Abort(_('no source or destination specified'))
1089 if len(pats) == 1:
1089 if len(pats) == 1:
1090 raise util.Abort(_('no destination specified'))
1090 raise util.Abort(_('no destination specified'))
1091 dest = pats.pop()
1091 dest = pats.pop()
1092 destdirexists = os.path.isdir(dest)
1092 destdirexists = os.path.isdir(dest)
1093 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1093 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1094 raise util.Abort(_('with multiple sources, destination must be an '
1094 raise util.Abort(_('with multiple sources, destination must be an '
1095 'existing directory'))
1095 'existing directory'))
1096 if opts['after']:
1096 if opts['after']:
1097 tfn = targetpathafterfn
1097 tfn = targetpathafterfn
1098 else:
1098 else:
1099 tfn = targetpathfn
1099 tfn = targetpathfn
1100 copylist = []
1100 copylist = []
1101 for pat in pats:
1101 for pat in pats:
1102 srcs = []
1102 srcs = []
1103 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1103 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1104 origsrc = okaytocopy(abssrc, relsrc, exact)
1104 origsrc = okaytocopy(abssrc, relsrc, exact)
1105 if origsrc:
1105 if origsrc:
1106 srcs.append((origsrc, abssrc, relsrc, exact))
1106 srcs.append((origsrc, abssrc, relsrc, exact))
1107 if not srcs:
1107 if not srcs:
1108 continue
1108 continue
1109 copylist.append((tfn(pat, dest, srcs), srcs))
1109 copylist.append((tfn(pat, dest, srcs), srcs))
1110 if not copylist:
1110 if not copylist:
1111 raise util.Abort(_('no files to copy'))
1111 raise util.Abort(_('no files to copy'))
1112
1112
1113 for targetpath, srcs in copylist:
1113 for targetpath, srcs in copylist:
1114 for origsrc, abssrc, relsrc, exact in srcs:
1114 for origsrc, abssrc, relsrc, exact in srcs:
1115 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1115 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1116
1116
1117 if errors:
1117 if errors:
1118 ui.warn(_('(consider using --after)\n'))
1118 ui.warn(_('(consider using --after)\n'))
1119 return errors, copied
1119 return errors, copied
1120
1120
1121 def copy(ui, repo, *pats, **opts):
1121 def copy(ui, repo, *pats, **opts):
1122 """mark files as copied for the next commit
1122 """mark files as copied for the next commit
1123
1123
1124 Mark dest as having copies of source files. If dest is a
1124 Mark dest as having copies of source files. If dest is a
1125 directory, copies are put in that directory. If dest is a file,
1125 directory, copies are put in that directory. If dest is a file,
1126 there can only be one source.
1126 there can only be one source.
1127
1127
1128 By default, this command copies the contents of files as they
1128 By default, this command copies the contents of files as they
1129 stand in the working directory. If invoked with --after, the
1129 stand in the working directory. If invoked with --after, the
1130 operation is recorded, but no copying is performed.
1130 operation is recorded, but no copying is performed.
1131
1131
1132 This command takes effect in the next commit.
1132 This command takes effect in the next commit.
1133
1133
1134 NOTE: This command should be treated as experimental. While it
1134 NOTE: This command should be treated as experimental. While it
1135 should properly record copied files, this information is not yet
1135 should properly record copied files, this information is not yet
1136 fully used by merge, nor fully reported by log.
1136 fully used by merge, nor fully reported by log.
1137 """
1137 """
1138 wlock = repo.wlock(0)
1138 wlock = repo.wlock(0)
1139 errs, copied = docopy(ui, repo, pats, opts, wlock)
1139 errs, copied = docopy(ui, repo, pats, opts, wlock)
1140 return errs
1140 return errs
1141
1141
1142 def debugancestor(ui, index, rev1, rev2):
1142 def debugancestor(ui, index, rev1, rev2):
1143 """find the ancestor revision of two revisions in a given index"""
1143 """find the ancestor revision of two revisions in a given index"""
1144 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1144 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1145 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1145 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1146 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1146 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1147
1147
1148 def debugcomplete(ui, cmd='', **opts):
1148 def debugcomplete(ui, cmd='', **opts):
1149 """returns the completion list associated with the given command"""
1149 """returns the completion list associated with the given command"""
1150
1150
1151 if opts['options']:
1151 if opts['options']:
1152 options = []
1152 options = []
1153 otables = [globalopts]
1153 otables = [globalopts]
1154 if cmd:
1154 if cmd:
1155 aliases, entry = findcmd(ui, cmd)
1155 aliases, entry = findcmd(ui, cmd)
1156 otables.append(entry[1])
1156 otables.append(entry[1])
1157 for t in otables:
1157 for t in otables:
1158 for o in t:
1158 for o in t:
1159 if o[0]:
1159 if o[0]:
1160 options.append('-%s' % o[0])
1160 options.append('-%s' % o[0])
1161 options.append('--%s' % o[1])
1161 options.append('--%s' % o[1])
1162 ui.write("%s\n" % "\n".join(options))
1162 ui.write("%s\n" % "\n".join(options))
1163 return
1163 return
1164
1164
1165 clist = findpossible(ui, cmd).keys()
1165 clist = findpossible(ui, cmd).keys()
1166 clist.sort()
1166 clist.sort()
1167 ui.write("%s\n" % "\n".join(clist))
1167 ui.write("%s\n" % "\n".join(clist))
1168
1168
1169 def debugrebuildstate(ui, repo, rev=None):
1169 def debugrebuildstate(ui, repo, rev=None):
1170 """rebuild the dirstate as it would look like for the given revision"""
1170 """rebuild the dirstate as it would look like for the given revision"""
1171 if not rev:
1171 if not rev:
1172 rev = repo.changelog.tip()
1172 rev = repo.changelog.tip()
1173 else:
1173 else:
1174 rev = repo.lookup(rev)
1174 rev = repo.lookup(rev)
1175 change = repo.changelog.read(rev)
1175 change = repo.changelog.read(rev)
1176 n = change[0]
1176 n = change[0]
1177 files = repo.manifest.read(n)
1177 files = repo.manifest.read(n)
1178 wlock = repo.wlock()
1178 wlock = repo.wlock()
1179 repo.dirstate.rebuild(rev, files)
1179 repo.dirstate.rebuild(rev, files)
1180
1180
1181 def debugcheckstate(ui, repo):
1181 def debugcheckstate(ui, repo):
1182 """validate the correctness of the current dirstate"""
1182 """validate the correctness of the current dirstate"""
1183 parent1, parent2 = repo.dirstate.parents()
1183 parent1, parent2 = repo.dirstate.parents()
1184 repo.dirstate.read()
1184 repo.dirstate.read()
1185 dc = repo.dirstate.map
1185 dc = repo.dirstate.map
1186 keys = dc.keys()
1186 keys = dc.keys()
1187 keys.sort()
1187 keys.sort()
1188 m1n = repo.changelog.read(parent1)[0]
1188 m1n = repo.changelog.read(parent1)[0]
1189 m2n = repo.changelog.read(parent2)[0]
1189 m2n = repo.changelog.read(parent2)[0]
1190 m1 = repo.manifest.read(m1n)
1190 m1 = repo.manifest.read(m1n)
1191 m2 = repo.manifest.read(m2n)
1191 m2 = repo.manifest.read(m2n)
1192 errors = 0
1192 errors = 0
1193 for f in dc:
1193 for f in dc:
1194 state = repo.dirstate.state(f)
1194 state = repo.dirstate.state(f)
1195 if state in "nr" and f not in m1:
1195 if state in "nr" and f not in m1:
1196 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1196 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1197 errors += 1
1197 errors += 1
1198 if state in "a" and f in m1:
1198 if state in "a" and f in m1:
1199 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1199 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1200 errors += 1
1200 errors += 1
1201 if state in "m" and f not in m1 and f not in m2:
1201 if state in "m" and f not in m1 and f not in m2:
1202 ui.warn(_("%s in state %s, but not in either manifest\n") %
1202 ui.warn(_("%s in state %s, but not in either manifest\n") %
1203 (f, state))
1203 (f, state))
1204 errors += 1
1204 errors += 1
1205 for f in m1:
1205 for f in m1:
1206 state = repo.dirstate.state(f)
1206 state = repo.dirstate.state(f)
1207 if state not in "nrm":
1207 if state not in "nrm":
1208 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1208 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1209 errors += 1
1209 errors += 1
1210 if errors:
1210 if errors:
1211 error = _(".hg/dirstate inconsistent with current parent's manifest")
1211 error = _(".hg/dirstate inconsistent with current parent's manifest")
1212 raise util.Abort(error)
1212 raise util.Abort(error)
1213
1213
1214 def debugconfig(ui, repo, *values):
1214 def debugconfig(ui, repo, *values):
1215 """show combined config settings from all hgrc files
1215 """show combined config settings from all hgrc files
1216
1216
1217 With no args, print names and values of all config items.
1217 With no args, print names and values of all config items.
1218
1218
1219 With one arg of the form section.name, print just the value of
1219 With one arg of the form section.name, print just the value of
1220 that config item.
1220 that config item.
1221
1221
1222 With multiple args, print names and values of all config items
1222 With multiple args, print names and values of all config items
1223 with matching section names."""
1223 with matching section names."""
1224
1224
1225 if values:
1225 if values:
1226 if len([v for v in values if '.' in v]) > 1:
1226 if len([v for v in values if '.' in v]) > 1:
1227 raise util.Abort(_('only one config item permitted'))
1227 raise util.Abort(_('only one config item permitted'))
1228 for section, name, value in ui.walkconfig():
1228 for section, name, value in ui.walkconfig():
1229 sectname = section + '.' + name
1229 sectname = section + '.' + name
1230 if values:
1230 if values:
1231 for v in values:
1231 for v in values:
1232 if v == section:
1232 if v == section:
1233 ui.write('%s=%s\n' % (sectname, value))
1233 ui.write('%s=%s\n' % (sectname, value))
1234 elif v == sectname:
1234 elif v == sectname:
1235 ui.write(value, '\n')
1235 ui.write(value, '\n')
1236 else:
1236 else:
1237 ui.write('%s=%s\n' % (sectname, value))
1237 ui.write('%s=%s\n' % (sectname, value))
1238
1238
1239 def debugsetparents(ui, repo, rev1, rev2=None):
1239 def debugsetparents(ui, repo, rev1, rev2=None):
1240 """manually set the parents of the current working directory
1240 """manually set the parents of the current working directory
1241
1241
1242 This is useful for writing repository conversion tools, but should
1242 This is useful for writing repository conversion tools, but should
1243 be used with care.
1243 be used with care.
1244 """
1244 """
1245
1245
1246 if not rev2:
1246 if not rev2:
1247 rev2 = hex(nullid)
1247 rev2 = hex(nullid)
1248
1248
1249 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1249 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1250
1250
1251 def debugstate(ui, repo):
1251 def debugstate(ui, repo):
1252 """show the contents of the current dirstate"""
1252 """show the contents of the current dirstate"""
1253 repo.dirstate.read()
1253 repo.dirstate.read()
1254 dc = repo.dirstate.map
1254 dc = repo.dirstate.map
1255 keys = dc.keys()
1255 keys = dc.keys()
1256 keys.sort()
1256 keys.sort()
1257 for file_ in keys:
1257 for file_ in keys:
1258 ui.write("%c %3o %10d %s %s\n"
1258 ui.write("%c %3o %10d %s %s\n"
1259 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1259 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1260 time.strftime("%x %X",
1260 time.strftime("%x %X",
1261 time.localtime(dc[file_][3])), file_))
1261 time.localtime(dc[file_][3])), file_))
1262 for f in repo.dirstate.copies:
1262 for f in repo.dirstate.copies:
1263 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1263 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1264
1264
1265 def debugdata(ui, file_, rev):
1265 def debugdata(ui, file_, rev):
1266 """dump the contents of an data file revision"""
1266 """dump the contents of an data file revision"""
1267 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1267 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1268 file_[:-2] + ".i", file_, 0)
1268 file_[:-2] + ".i", file_, 0)
1269 try:
1269 try:
1270 ui.write(r.revision(r.lookup(rev)))
1270 ui.write(r.revision(r.lookup(rev)))
1271 except KeyError:
1271 except KeyError:
1272 raise util.Abort(_('invalid revision identifier %s') % rev)
1272 raise util.Abort(_('invalid revision identifier %s') % rev)
1273
1273
1274 def debugindex(ui, file_):
1274 def debugindex(ui, file_):
1275 """dump the contents of an index file"""
1275 """dump the contents of an index file"""
1276 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1276 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1277 ui.write(" rev offset length base linkrev" +
1277 ui.write(" rev offset length base linkrev" +
1278 " nodeid p1 p2\n")
1278 " nodeid p1 p2\n")
1279 for i in range(r.count()):
1279 for i in range(r.count()):
1280 node = r.node(i)
1280 node = r.node(i)
1281 pp = r.parents(node)
1281 pp = r.parents(node)
1282 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1282 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1283 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1283 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1284 short(node), short(pp[0]), short(pp[1])))
1284 short(node), short(pp[0]), short(pp[1])))
1285
1285
1286 def debugindexdot(ui, file_):
1286 def debugindexdot(ui, file_):
1287 """dump an index DAG as a .dot file"""
1287 """dump an index DAG as a .dot file"""
1288 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1288 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1289 ui.write("digraph G {\n")
1289 ui.write("digraph G {\n")
1290 for i in range(r.count()):
1290 for i in range(r.count()):
1291 node = r.node(i)
1291 node = r.node(i)
1292 pp = r.parents(node)
1292 pp = r.parents(node)
1293 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1293 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1294 if pp[1] != nullid:
1294 if pp[1] != nullid:
1295 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1295 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1296 ui.write("}\n")
1296 ui.write("}\n")
1297
1297
1298 def debugrename(ui, repo, file, rev=None):
1298 def debugrename(ui, repo, file, rev=None):
1299 """dump rename information"""
1299 """dump rename information"""
1300 r = repo.file(relpath(repo, [file])[0])
1300 r = repo.file(relpath(repo, [file])[0])
1301 if rev:
1301 if rev:
1302 try:
1302 try:
1303 # assume all revision numbers are for changesets
1303 # assume all revision numbers are for changesets
1304 n = repo.lookup(rev)
1304 n = repo.lookup(rev)
1305 change = repo.changelog.read(n)
1305 change = repo.changelog.read(n)
1306 m = repo.manifest.read(change[0])
1306 m = repo.manifest.read(change[0])
1307 n = m[relpath(repo, [file])[0]]
1307 n = m[relpath(repo, [file])[0]]
1308 except (hg.RepoError, KeyError):
1308 except (hg.RepoError, KeyError):
1309 n = r.lookup(rev)
1309 n = r.lookup(rev)
1310 else:
1310 else:
1311 n = r.tip()
1311 n = r.tip()
1312 m = r.renamed(n)
1312 m = r.renamed(n)
1313 if m:
1313 if m:
1314 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1314 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1315 else:
1315 else:
1316 ui.write(_("not renamed\n"))
1316 ui.write(_("not renamed\n"))
1317
1317
1318 def debugwalk(ui, repo, *pats, **opts):
1318 def debugwalk(ui, repo, *pats, **opts):
1319 """show how files match on given patterns"""
1319 """show how files match on given patterns"""
1320 items = list(cmdutil.walk(repo, pats, opts))
1320 items = list(cmdutil.walk(repo, pats, opts))
1321 if not items:
1321 if not items:
1322 return
1322 return
1323 fmt = '%%s %%-%ds %%-%ds %%s' % (
1323 fmt = '%%s %%-%ds %%-%ds %%s' % (
1324 max([len(abs) for (src, abs, rel, exact) in items]),
1324 max([len(abs) for (src, abs, rel, exact) in items]),
1325 max([len(rel) for (src, abs, rel, exact) in items]))
1325 max([len(rel) for (src, abs, rel, exact) in items]))
1326 for src, abs, rel, exact in items:
1326 for src, abs, rel, exact in items:
1327 line = fmt % (src, abs, rel, exact and 'exact' or '')
1327 line = fmt % (src, abs, rel, exact and 'exact' or '')
1328 ui.write("%s\n" % line.rstrip())
1328 ui.write("%s\n" % line.rstrip())
1329
1329
1330 def diff(ui, repo, *pats, **opts):
1330 def diff(ui, repo, *pats, **opts):
1331 """diff repository (or selected files)
1331 """diff repository (or selected files)
1332
1332
1333 Show differences between revisions for the specified files.
1333 Show differences between revisions for the specified files.
1334
1334
1335 Differences between files are shown using the unified diff format.
1335 Differences between files are shown using the unified diff format.
1336
1336
1337 When two revision arguments are given, then changes are shown
1337 When two revision arguments are given, then changes are shown
1338 between those revisions. If only one revision is specified then
1338 between those revisions. If only one revision is specified then
1339 that revision is compared to the working directory, and, when no
1339 that revision is compared to the working directory, and, when no
1340 revisions are specified, the working directory files are compared
1340 revisions are specified, the working directory files are compared
1341 to its parent.
1341 to its parent.
1342
1342
1343 Without the -a option, diff will avoid generating diffs of files
1343 Without the -a option, diff will avoid generating diffs of files
1344 it detects as binary. With -a, diff will generate a diff anyway,
1344 it detects as binary. With -a, diff will generate a diff anyway,
1345 probably with undesirable results.
1345 probably with undesirable results.
1346 """
1346 """
1347 node1, node2 = revpair(ui, repo, opts['rev'])
1347 node1, node2 = revpair(ui, repo, opts['rev'])
1348
1348
1349 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1349 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1350
1350
1351 patch.diff(repo, node1, node2, fns, match=matchfn,
1351 patch.diff(repo, node1, node2, fns, match=matchfn,
1352 opts=patch.diffopts(ui, opts))
1352 opts=patch.diffopts(ui, opts))
1353
1353
1354 def export(ui, repo, *changesets, **opts):
1354 def export(ui, repo, *changesets, **opts):
1355 """dump the header and diffs for one or more changesets
1355 """dump the header and diffs for one or more changesets
1356
1356
1357 Print the changeset header and diffs for one or more revisions.
1357 Print the changeset header and diffs for one or more revisions.
1358
1358
1359 The information shown in the changeset header is: author,
1359 The information shown in the changeset header is: author,
1360 changeset hash, parent and commit comment.
1360 changeset hash, parent and commit comment.
1361
1361
1362 Output may be to a file, in which case the name of the file is
1362 Output may be to a file, in which case the name of the file is
1363 given using a format string. The formatting rules are as follows:
1363 given using a format string. The formatting rules are as follows:
1364
1364
1365 %% literal "%" character
1365 %% literal "%" character
1366 %H changeset hash (40 bytes of hexadecimal)
1366 %H changeset hash (40 bytes of hexadecimal)
1367 %N number of patches being generated
1367 %N number of patches being generated
1368 %R changeset revision number
1368 %R changeset revision number
1369 %b basename of the exporting repository
1369 %b basename of the exporting repository
1370 %h short-form changeset hash (12 bytes of hexadecimal)
1370 %h short-form changeset hash (12 bytes of hexadecimal)
1371 %n zero-padded sequence number, starting at 1
1371 %n zero-padded sequence number, starting at 1
1372 %r zero-padded changeset revision number
1372 %r zero-padded changeset revision number
1373
1373
1374 Without the -a option, export will avoid generating diffs of files
1374 Without the -a option, export will avoid generating diffs of files
1375 it detects as binary. With -a, export will generate a diff anyway,
1375 it detects as binary. With -a, export will generate a diff anyway,
1376 probably with undesirable results.
1376 probably with undesirable results.
1377
1377
1378 With the --switch-parent option, the diff will be against the second
1378 With the --switch-parent option, the diff will be against the second
1379 parent. It can be useful to review a merge.
1379 parent. It can be useful to review a merge.
1380 """
1380 """
1381 if not changesets:
1381 if not changesets:
1382 raise util.Abort(_("export requires at least one changeset"))
1382 raise util.Abort(_("export requires at least one changeset"))
1383 revs = list(revrange(ui, repo, changesets))
1383 revs = list(revrange(ui, repo, changesets))
1384 if len(revs) > 1:
1384 if len(revs) > 1:
1385 ui.note(_('exporting patches:\n'))
1385 ui.note(_('exporting patches:\n'))
1386 else:
1386 else:
1387 ui.note(_('exporting patch:\n'))
1387 ui.note(_('exporting patch:\n'))
1388 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1388 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1389 switch_parent=opts['switch_parent'],
1389 switch_parent=opts['switch_parent'],
1390 opts=patch.diffopts(ui, opts))
1390 opts=patch.diffopts(ui, opts))
1391
1391
1392 def forget(ui, repo, *pats, **opts):
1392 def forget(ui, repo, *pats, **opts):
1393 """don't add the specified files on the next commit (DEPRECATED)
1393 """don't add the specified files on the next commit (DEPRECATED)
1394
1394
1395 (DEPRECATED)
1395 (DEPRECATED)
1396 Undo an 'hg add' scheduled for the next commit.
1396 Undo an 'hg add' scheduled for the next commit.
1397
1397
1398 This command is now deprecated and will be removed in a future
1398 This command is now deprecated and will be removed in a future
1399 release. Please use revert instead.
1399 release. Please use revert instead.
1400 """
1400 """
1401 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1401 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1402 forget = []
1402 forget = []
1403 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1403 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1404 if repo.dirstate.state(abs) == 'a':
1404 if repo.dirstate.state(abs) == 'a':
1405 forget.append(abs)
1405 forget.append(abs)
1406 if ui.verbose or not exact:
1406 if ui.verbose or not exact:
1407 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1407 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1408 repo.forget(forget)
1408 repo.forget(forget)
1409
1409
1410 def grep(ui, repo, pattern, *pats, **opts):
1410 def grep(ui, repo, pattern, *pats, **opts):
1411 """search for a pattern in specified files and revisions
1411 """search for a pattern in specified files and revisions
1412
1412
1413 Search revisions of files for a regular expression.
1413 Search revisions of files for a regular expression.
1414
1414
1415 This command behaves differently than Unix grep. It only accepts
1415 This command behaves differently than Unix grep. It only accepts
1416 Python/Perl regexps. It searches repository history, not the
1416 Python/Perl regexps. It searches repository history, not the
1417 working directory. It always prints the revision number in which
1417 working directory. It always prints the revision number in which
1418 a match appears.
1418 a match appears.
1419
1419
1420 By default, grep only prints output for the first revision of a
1420 By default, grep only prints output for the first revision of a
1421 file in which it finds a match. To get it to print every revision
1421 file in which it finds a match. To get it to print every revision
1422 that contains a change in match status ("-" for a match that
1422 that contains a change in match status ("-" for a match that
1423 becomes a non-match, or "+" for a non-match that becomes a match),
1423 becomes a non-match, or "+" for a non-match that becomes a match),
1424 use the --all flag.
1424 use the --all flag.
1425 """
1425 """
1426 reflags = 0
1426 reflags = 0
1427 if opts['ignore_case']:
1427 if opts['ignore_case']:
1428 reflags |= re.I
1428 reflags |= re.I
1429 regexp = re.compile(pattern, reflags)
1429 regexp = re.compile(pattern, reflags)
1430 sep, eol = ':', '\n'
1430 sep, eol = ':', '\n'
1431 if opts['print0']:
1431 if opts['print0']:
1432 sep = eol = '\0'
1432 sep = eol = '\0'
1433
1433
1434 fcache = {}
1434 fcache = {}
1435 def getfile(fn):
1435 def getfile(fn):
1436 if fn not in fcache:
1436 if fn not in fcache:
1437 fcache[fn] = repo.file(fn)
1437 fcache[fn] = repo.file(fn)
1438 return fcache[fn]
1438 return fcache[fn]
1439
1439
1440 def matchlines(body):
1440 def matchlines(body):
1441 begin = 0
1441 begin = 0
1442 linenum = 0
1442 linenum = 0
1443 while True:
1443 while True:
1444 match = regexp.search(body, begin)
1444 match = regexp.search(body, begin)
1445 if not match:
1445 if not match:
1446 break
1446 break
1447 mstart, mend = match.span()
1447 mstart, mend = match.span()
1448 linenum += body.count('\n', begin, mstart) + 1
1448 linenum += body.count('\n', begin, mstart) + 1
1449 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1449 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1450 lend = body.find('\n', mend)
1450 lend = body.find('\n', mend)
1451 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1451 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1452 begin = lend + 1
1452 begin = lend + 1
1453
1453
1454 class linestate(object):
1454 class linestate(object):
1455 def __init__(self, line, linenum, colstart, colend):
1455 def __init__(self, line, linenum, colstart, colend):
1456 self.line = line
1456 self.line = line
1457 self.linenum = linenum
1457 self.linenum = linenum
1458 self.colstart = colstart
1458 self.colstart = colstart
1459 self.colend = colend
1459 self.colend = colend
1460
1460
1461 def __eq__(self, other):
1461 def __eq__(self, other):
1462 return self.line == other.line
1462 return self.line == other.line
1463
1463
1464 matches = {}
1464 matches = {}
1465 copies = {}
1465 copies = {}
1466 def grepbody(fn, rev, body):
1466 def grepbody(fn, rev, body):
1467 matches[rev].setdefault(fn, [])
1467 matches[rev].setdefault(fn, [])
1468 m = matches[rev][fn]
1468 m = matches[rev][fn]
1469 for lnum, cstart, cend, line in matchlines(body):
1469 for lnum, cstart, cend, line in matchlines(body):
1470 s = linestate(line, lnum, cstart, cend)
1470 s = linestate(line, lnum, cstart, cend)
1471 m.append(s)
1471 m.append(s)
1472
1472
1473 def difflinestates(a, b):
1473 def difflinestates(a, b):
1474 sm = difflib.SequenceMatcher(None, a, b)
1474 sm = difflib.SequenceMatcher(None, a, b)
1475 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1475 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1476 if tag == 'insert':
1476 if tag == 'insert':
1477 for i in range(blo, bhi):
1477 for i in range(blo, bhi):
1478 yield ('+', b[i])
1478 yield ('+', b[i])
1479 elif tag == 'delete':
1479 elif tag == 'delete':
1480 for i in range(alo, ahi):
1480 for i in range(alo, ahi):
1481 yield ('-', a[i])
1481 yield ('-', a[i])
1482 elif tag == 'replace':
1482 elif tag == 'replace':
1483 for i in range(alo, ahi):
1483 for i in range(alo, ahi):
1484 yield ('-', a[i])
1484 yield ('-', a[i])
1485 for i in range(blo, bhi):
1485 for i in range(blo, bhi):
1486 yield ('+', b[i])
1486 yield ('+', b[i])
1487
1487
1488 prev = {}
1488 prev = {}
1489 ucache = {}
1489 ucache = {}
1490 def display(fn, rev, states, prevstates):
1490 def display(fn, rev, states, prevstates):
1491 counts = {'-': 0, '+': 0}
1491 counts = {'-': 0, '+': 0}
1492 filerevmatches = {}
1492 filerevmatches = {}
1493 if incrementing or not opts['all']:
1493 if incrementing or not opts['all']:
1494 a, b = prevstates, states
1494 a, b = prevstates, states
1495 else:
1495 else:
1496 a, b = states, prevstates
1496 a, b = states, prevstates
1497 for change, l in difflinestates(a, b):
1497 for change, l in difflinestates(a, b):
1498 if incrementing or not opts['all']:
1498 if incrementing or not opts['all']:
1499 r = rev
1499 r = rev
1500 else:
1500 else:
1501 r = prev[fn]
1501 r = prev[fn]
1502 cols = [fn, str(r)]
1502 cols = [fn, str(r)]
1503 if opts['line_number']:
1503 if opts['line_number']:
1504 cols.append(str(l.linenum))
1504 cols.append(str(l.linenum))
1505 if opts['all']:
1505 if opts['all']:
1506 cols.append(change)
1506 cols.append(change)
1507 if opts['user']:
1507 if opts['user']:
1508 cols.append(trimuser(ui, getchange(r)[1], rev,
1508 cols.append(trimuser(ui, getchange(r)[1], rev,
1509 ucache))
1509 ucache))
1510 if opts['files_with_matches']:
1510 if opts['files_with_matches']:
1511 c = (fn, rev)
1511 c = (fn, rev)
1512 if c in filerevmatches:
1512 if c in filerevmatches:
1513 continue
1513 continue
1514 filerevmatches[c] = 1
1514 filerevmatches[c] = 1
1515 else:
1515 else:
1516 cols.append(l.line)
1516 cols.append(l.line)
1517 ui.write(sep.join(cols), eol)
1517 ui.write(sep.join(cols), eol)
1518 counts[change] += 1
1518 counts[change] += 1
1519 return counts['+'], counts['-']
1519 return counts['+'], counts['-']
1520
1520
1521 fstate = {}
1521 fstate = {}
1522 skip = {}
1522 skip = {}
1523 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1523 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1524 count = 0
1524 count = 0
1525 incrementing = False
1525 incrementing = False
1526 follow = opts.get('follow')
1526 follow = opts.get('follow')
1527 for st, rev, fns in changeiter:
1527 for st, rev, fns in changeiter:
1528 if st == 'window':
1528 if st == 'window':
1529 incrementing = rev
1529 incrementing = rev
1530 matches.clear()
1530 matches.clear()
1531 elif st == 'add':
1531 elif st == 'add':
1532 change = repo.changelog.read(repo.lookup(str(rev)))
1532 change = repo.changelog.read(repo.lookup(str(rev)))
1533 mf = repo.manifest.read(change[0])
1533 mf = repo.manifest.read(change[0])
1534 matches[rev] = {}
1534 matches[rev] = {}
1535 for fn in fns:
1535 for fn in fns:
1536 if fn in skip:
1536 if fn in skip:
1537 continue
1537 continue
1538 fstate.setdefault(fn, {})
1538 fstate.setdefault(fn, {})
1539 try:
1539 try:
1540 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1540 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1541 if follow:
1541 if follow:
1542 copied = getfile(fn).renamed(mf[fn])
1542 copied = getfile(fn).renamed(mf[fn])
1543 if copied:
1543 if copied:
1544 copies.setdefault(rev, {})[fn] = copied[0]
1544 copies.setdefault(rev, {})[fn] = copied[0]
1545 except KeyError:
1545 except KeyError:
1546 pass
1546 pass
1547 elif st == 'iter':
1547 elif st == 'iter':
1548 states = matches[rev].items()
1548 states = matches[rev].items()
1549 states.sort()
1549 states.sort()
1550 for fn, m in states:
1550 for fn, m in states:
1551 copy = copies.get(rev, {}).get(fn)
1551 copy = copies.get(rev, {}).get(fn)
1552 if fn in skip:
1552 if fn in skip:
1553 if copy:
1553 if copy:
1554 skip[copy] = True
1554 skip[copy] = True
1555 continue
1555 continue
1556 if incrementing or not opts['all'] or fstate[fn]:
1556 if incrementing or not opts['all'] or fstate[fn]:
1557 pos, neg = display(fn, rev, m, fstate[fn])
1557 pos, neg = display(fn, rev, m, fstate[fn])
1558 count += pos + neg
1558 count += pos + neg
1559 if pos and not opts['all']:
1559 if pos and not opts['all']:
1560 skip[fn] = True
1560 skip[fn] = True
1561 if copy:
1561 if copy:
1562 skip[copy] = True
1562 skip[copy] = True
1563 fstate[fn] = m
1563 fstate[fn] = m
1564 if copy:
1564 if copy:
1565 fstate[copy] = m
1565 fstate[copy] = m
1566 prev[fn] = rev
1566 prev[fn] = rev
1567
1567
1568 if not incrementing:
1568 if not incrementing:
1569 fstate = fstate.items()
1569 fstate = fstate.items()
1570 fstate.sort()
1570 fstate.sort()
1571 for fn, state in fstate:
1571 for fn, state in fstate:
1572 if fn in skip:
1572 if fn in skip:
1573 continue
1573 continue
1574 if fn not in copies.get(prev[fn], {}):
1574 if fn not in copies.get(prev[fn], {}):
1575 display(fn, rev, {}, state)
1575 display(fn, rev, {}, state)
1576 return (count == 0 and 1) or 0
1576 return (count == 0 and 1) or 0
1577
1577
1578 def heads(ui, repo, **opts):
1578 def heads(ui, repo, **opts):
1579 """show current repository heads
1579 """show current repository heads
1580
1580
1581 Show all repository head changesets.
1581 Show all repository head changesets.
1582
1582
1583 Repository "heads" are changesets that don't have children
1583 Repository "heads" are changesets that don't have children
1584 changesets. They are where development generally takes place and
1584 changesets. They are where development generally takes place and
1585 are the usual targets for update and merge operations.
1585 are the usual targets for update and merge operations.
1586 """
1586 """
1587 if opts['rev']:
1587 if opts['rev']:
1588 heads = repo.heads(repo.lookup(opts['rev']))
1588 heads = repo.heads(repo.lookup(opts['rev']))
1589 else:
1589 else:
1590 heads = repo.heads()
1590 heads = repo.heads()
1591 br = None
1591 br = None
1592 if opts['branches']:
1592 if opts['branches']:
1593 br = repo.branchlookup(heads)
1593 br = repo.branchlookup(heads)
1594 displayer = show_changeset(ui, repo, opts)
1594 displayer = show_changeset(ui, repo, opts)
1595 for n in heads:
1595 for n in heads:
1596 displayer.show(changenode=n, brinfo=br)
1596 displayer.show(changenode=n, brinfo=br)
1597
1597
1598 def identify(ui, repo):
1598 def identify(ui, repo):
1599 """print information about the working copy
1599 """print information about the working copy
1600
1600
1601 Print a short summary of the current state of the repo.
1601 Print a short summary of the current state of the repo.
1602
1602
1603 This summary identifies the repository state using one or two parent
1603 This summary identifies the repository state using one or two parent
1604 hash identifiers, followed by a "+" if there are uncommitted changes
1604 hash identifiers, followed by a "+" if there are uncommitted changes
1605 in the working directory, followed by a list of tags for this revision.
1605 in the working directory, followed by a list of tags for this revision.
1606 """
1606 """
1607 parents = [p for p in repo.dirstate.parents() if p != nullid]
1607 parents = [p for p in repo.dirstate.parents() if p != nullid]
1608 if not parents:
1608 if not parents:
1609 ui.write(_("unknown\n"))
1609 ui.write(_("unknown\n"))
1610 return
1610 return
1611
1611
1612 hexfunc = ui.debugflag and hex or short
1612 hexfunc = ui.debugflag and hex or short
1613 modified, added, removed, deleted = repo.status()[:4]
1613 modified, added, removed, deleted = repo.status()[:4]
1614 output = ["%s%s" %
1614 output = ["%s%s" %
1615 ('+'.join([hexfunc(parent) for parent in parents]),
1615 ('+'.join([hexfunc(parent) for parent in parents]),
1616 (modified or added or removed or deleted) and "+" or "")]
1616 (modified or added or removed or deleted) and "+" or "")]
1617
1617
1618 if not ui.quiet:
1618 if not ui.quiet:
1619 # multiple tags for a single parent separated by '/'
1619 # multiple tags for a single parent separated by '/'
1620 parenttags = ['/'.join(tags)
1620 parenttags = ['/'.join(tags)
1621 for tags in map(repo.nodetags, parents) if tags]
1621 for tags in map(repo.nodetags, parents) if tags]
1622 # tags for multiple parents separated by ' + '
1622 # tags for multiple parents separated by ' + '
1623 if parenttags:
1623 if parenttags:
1624 output.append(' + '.join(parenttags))
1624 output.append(' + '.join(parenttags))
1625
1625
1626 ui.write("%s\n" % ' '.join(output))
1626 ui.write("%s\n" % ' '.join(output))
1627
1627
1628 def import_(ui, repo, patch1, *patches, **opts):
1628 def import_(ui, repo, patch1, *patches, **opts):
1629 """import an ordered set of patches
1629 """import an ordered set of patches
1630
1630
1631 Import a list of patches and commit them individually.
1631 Import a list of patches and commit them individually.
1632
1632
1633 If there are outstanding changes in the working directory, import
1633 If there are outstanding changes in the working directory, import
1634 will abort unless given the -f flag.
1634 will abort unless given the -f flag.
1635
1635
1636 You can import a patch straight from a mail message. Even patches
1636 You can import a patch straight from a mail message. Even patches
1637 as attachments work (body part must be type text/plain or
1637 as attachments work (body part must be type text/plain or
1638 text/x-patch to be used). From and Subject headers of email
1638 text/x-patch to be used). From and Subject headers of email
1639 message are used as default committer and commit message. All
1639 message are used as default committer and commit message. All
1640 text/plain body parts before first diff are added to commit
1640 text/plain body parts before first diff are added to commit
1641 message.
1641 message.
1642
1642
1643 If imported patch was generated by hg export, user and description
1643 If imported patch was generated by hg export, user and description
1644 from patch override values from message headers and body. Values
1644 from patch override values from message headers and body. Values
1645 given on command line with -m and -u override these.
1645 given on command line with -m and -u override these.
1646
1646
1647 To read a patch from standard input, use patch name "-".
1647 To read a patch from standard input, use patch name "-".
1648 """
1648 """
1649 patches = (patch1,) + patches
1649 patches = (patch1,) + patches
1650
1650
1651 if not opts['force']:
1651 if not opts['force']:
1652 bail_if_changed(repo)
1652 bail_if_changed(repo)
1653
1653
1654 d = opts["base"]
1654 d = opts["base"]
1655 strip = opts["strip"]
1655 strip = opts["strip"]
1656
1656
1657 wlock = repo.wlock()
1657 wlock = repo.wlock()
1658 lock = repo.lock()
1658 lock = repo.lock()
1659
1659
1660 for p in patches:
1660 for p in patches:
1661 pf = os.path.join(d, p)
1661 pf = os.path.join(d, p)
1662
1662
1663 if pf == '-':
1663 if pf == '-':
1664 ui.status(_("applying patch from stdin\n"))
1664 ui.status(_("applying patch from stdin\n"))
1665 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1665 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1666 else:
1666 else:
1667 ui.status(_("applying %s\n") % p)
1667 ui.status(_("applying %s\n") % p)
1668 tmpname, message, user, date = patch.extract(ui, file(pf))
1668 tmpname, message, user, date = patch.extract(ui, file(pf))
1669
1669
1670 if tmpname is None:
1670 if tmpname is None:
1671 raise util.Abort(_('no diffs found'))
1671 raise util.Abort(_('no diffs found'))
1672
1672
1673 try:
1673 try:
1674 if opts['message']:
1674 if opts['message']:
1675 # pickup the cmdline msg
1675 # pickup the cmdline msg
1676 message = opts['message']
1676 message = opts['message']
1677 elif message:
1677 elif message:
1678 # pickup the patch msg
1678 # pickup the patch msg
1679 message = message.strip()
1679 message = message.strip()
1680 else:
1680 else:
1681 # launch the editor
1681 # launch the editor
1682 message = None
1682 message = None
1683 ui.debug(_('message:\n%s\n') % message)
1683 ui.debug(_('message:\n%s\n') % message)
1684
1684
1685 files, fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root)
1685 files, fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root)
1686 files = patch.updatedir(ui, repo, files, wlock=wlock)
1686 files = patch.updatedir(ui, repo, files, wlock=wlock)
1687 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1687 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1688 finally:
1688 finally:
1689 os.unlink(tmpname)
1689 os.unlink(tmpname)
1690
1690
1691 def incoming(ui, repo, source="default", **opts):
1691 def incoming(ui, repo, source="default", **opts):
1692 """show new changesets found in source
1692 """show new changesets found in source
1693
1693
1694 Show new changesets found in the specified path/URL or the default
1694 Show new changesets found in the specified path/URL or the default
1695 pull location. These are the changesets that would be pulled if a pull
1695 pull location. These are the changesets that would be pulled if a pull
1696 was requested.
1696 was requested.
1697
1697
1698 For remote repository, using --bundle avoids downloading the changesets
1698 For remote repository, using --bundle avoids downloading the changesets
1699 twice if the incoming is followed by a pull.
1699 twice if the incoming is followed by a pull.
1700
1700
1701 See pull for valid source format details.
1701 See pull for valid source format details.
1702 """
1702 """
1703 source = ui.expandpath(source)
1703 source = ui.expandpath(source)
1704 setremoteconfig(ui, opts)
1704 setremoteconfig(ui, opts)
1705
1705
1706 other = hg.repository(ui, source)
1706 other = hg.repository(ui, source)
1707 incoming = repo.findincoming(other, force=opts["force"])
1707 incoming = repo.findincoming(other, force=opts["force"])
1708 if not incoming:
1708 if not incoming:
1709 ui.status(_("no changes found\n"))
1709 ui.status(_("no changes found\n"))
1710 return
1710 return
1711
1711
1712 cleanup = None
1712 cleanup = None
1713 try:
1713 try:
1714 fname = opts["bundle"]
1714 fname = opts["bundle"]
1715 if fname or not other.local():
1715 if fname or not other.local():
1716 # create a bundle (uncompressed if other repo is not local)
1716 # create a bundle (uncompressed if other repo is not local)
1717 cg = other.changegroup(incoming, "incoming")
1717 cg = other.changegroup(incoming, "incoming")
1718 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1718 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1719 # keep written bundle?
1719 # keep written bundle?
1720 if opts["bundle"]:
1720 if opts["bundle"]:
1721 cleanup = None
1721 cleanup = None
1722 if not other.local():
1722 if not other.local():
1723 # use the created uncompressed bundlerepo
1723 # use the created uncompressed bundlerepo
1724 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1724 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1725
1725
1726 revs = None
1726 revs = None
1727 if opts['rev']:
1727 if opts['rev']:
1728 revs = [other.lookup(rev) for rev in opts['rev']]
1728 revs = [other.lookup(rev) for rev in opts['rev']]
1729 o = other.changelog.nodesbetween(incoming, revs)[0]
1729 o = other.changelog.nodesbetween(incoming, revs)[0]
1730 if opts['newest_first']:
1730 if opts['newest_first']:
1731 o.reverse()
1731 o.reverse()
1732 displayer = show_changeset(ui, other, opts)
1732 displayer = show_changeset(ui, other, opts)
1733 for n in o:
1733 for n in o:
1734 parents = [p for p in other.changelog.parents(n) if p != nullid]
1734 parents = [p for p in other.changelog.parents(n) if p != nullid]
1735 if opts['no_merges'] and len(parents) == 2:
1735 if opts['no_merges'] and len(parents) == 2:
1736 continue
1736 continue
1737 displayer.show(changenode=n)
1737 displayer.show(changenode=n)
1738 if opts['patch']:
1738 if opts['patch']:
1739 prev = (parents and parents[0]) or nullid
1739 prev = (parents and parents[0]) or nullid
1740 patch.diff(other, prev, n, fp=repo.ui)
1740 patch.diff(other, prev, n, fp=repo.ui)
1741 ui.write("\n")
1741 ui.write("\n")
1742 finally:
1742 finally:
1743 if hasattr(other, 'close'):
1743 if hasattr(other, 'close'):
1744 other.close()
1744 other.close()
1745 if cleanup:
1745 if cleanup:
1746 os.unlink(cleanup)
1746 os.unlink(cleanup)
1747
1747
1748 def init(ui, dest=".", **opts):
1748 def init(ui, dest=".", **opts):
1749 """create a new repository in the given directory
1749 """create a new repository in the given directory
1750
1750
1751 Initialize a new repository in the given directory. If the given
1751 Initialize a new repository in the given directory. If the given
1752 directory does not exist, it is created.
1752 directory does not exist, it is created.
1753
1753
1754 If no directory is given, the current directory is used.
1754 If no directory is given, the current directory is used.
1755
1755
1756 It is possible to specify an ssh:// URL as the destination.
1756 It is possible to specify an ssh:// URL as the destination.
1757 Look at the help text for the pull command for important details
1757 Look at the help text for the pull command for important details
1758 about ssh:// URLs.
1758 about ssh:// URLs.
1759 """
1759 """
1760 setremoteconfig(ui, opts)
1760 setremoteconfig(ui, opts)
1761 hg.repository(ui, dest, create=1)
1761 hg.repository(ui, dest, create=1)
1762
1762
1763 def locate(ui, repo, *pats, **opts):
1763 def locate(ui, repo, *pats, **opts):
1764 """locate files matching specific patterns
1764 """locate files matching specific patterns
1765
1765
1766 Print all files under Mercurial control whose names match the
1766 Print all files under Mercurial control whose names match the
1767 given patterns.
1767 given patterns.
1768
1768
1769 This command searches the current directory and its
1769 This command searches the current directory and its
1770 subdirectories. To search an entire repository, move to the root
1770 subdirectories. To search an entire repository, move to the root
1771 of the repository.
1771 of the repository.
1772
1772
1773 If no patterns are given to match, this command prints all file
1773 If no patterns are given to match, this command prints all file
1774 names.
1774 names.
1775
1775
1776 If you want to feed the output of this command into the "xargs"
1776 If you want to feed the output of this command into the "xargs"
1777 command, use the "-0" option to both this command and "xargs".
1777 command, use the "-0" option to both this command and "xargs".
1778 This will avoid the problem of "xargs" treating single filenames
1778 This will avoid the problem of "xargs" treating single filenames
1779 that contain white space as multiple filenames.
1779 that contain white space as multiple filenames.
1780 """
1780 """
1781 end = opts['print0'] and '\0' or '\n'
1781 end = opts['print0'] and '\0' or '\n'
1782 rev = opts['rev']
1782 rev = opts['rev']
1783 if rev:
1783 if rev:
1784 node = repo.lookup(rev)
1784 node = repo.lookup(rev)
1785 else:
1785 else:
1786 node = None
1786 node = None
1787
1787
1788 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1788 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1789 head='(?:.*/|)'):
1789 head='(?:.*/|)'):
1790 if not node and repo.dirstate.state(abs) == '?':
1790 if not node and repo.dirstate.state(abs) == '?':
1791 continue
1791 continue
1792 if opts['fullpath']:
1792 if opts['fullpath']:
1793 ui.write(os.path.join(repo.root, abs), end)
1793 ui.write(os.path.join(repo.root, abs), end)
1794 else:
1794 else:
1795 ui.write(((pats and rel) or abs), end)
1795 ui.write(((pats and rel) or abs), end)
1796
1796
1797 def log(ui, repo, *pats, **opts):
1797 def log(ui, repo, *pats, **opts):
1798 """show revision history of entire repository or files
1798 """show revision history of entire repository or files
1799
1799
1800 Print the revision history of the specified files or the entire
1800 Print the revision history of the specified files or the entire
1801 project.
1801 project.
1802
1802
1803 File history is shown without following rename or copy history of
1803 File history is shown without following rename or copy history of
1804 files. Use -f/--follow with a file name to follow history across
1804 files. Use -f/--follow with a file name to follow history across
1805 renames and copies. --follow without a file name will only show
1805 renames and copies. --follow without a file name will only show
1806 ancestors or descendants of the starting revision. --follow-first
1806 ancestors or descendants of the starting revision. --follow-first
1807 only follows the first parent of merge revisions.
1807 only follows the first parent of merge revisions.
1808
1808
1809 If no revision range is specified, the default is tip:0 unless
1809 If no revision range is specified, the default is tip:0 unless
1810 --follow is set, in which case the working directory parent is
1810 --follow is set, in which case the working directory parent is
1811 used as the starting revision.
1811 used as the starting revision.
1812
1812
1813 By default this command outputs: changeset id and hash, tags,
1813 By default this command outputs: changeset id and hash, tags,
1814 non-trivial parents, user, date and time, and a summary for each
1814 non-trivial parents, user, date and time, and a summary for each
1815 commit. When the -v/--verbose switch is used, the list of changed
1815 commit. When the -v/--verbose switch is used, the list of changed
1816 files and full commit message is shown.
1816 files and full commit message is shown.
1817 """
1817 """
1818 class dui(object):
1818 class dui(object):
1819 # Implement and delegate some ui protocol. Save hunks of
1819 # Implement and delegate some ui protocol. Save hunks of
1820 # output for later display in the desired order.
1820 # output for later display in the desired order.
1821 def __init__(self, ui):
1821 def __init__(self, ui):
1822 self.ui = ui
1822 self.ui = ui
1823 self.hunk = {}
1823 self.hunk = {}
1824 self.header = {}
1824 self.header = {}
1825 def bump(self, rev):
1825 def bump(self, rev):
1826 self.rev = rev
1826 self.rev = rev
1827 self.hunk[rev] = []
1827 self.hunk[rev] = []
1828 self.header[rev] = []
1828 self.header[rev] = []
1829 def note(self, *args):
1829 def note(self, *args):
1830 if self.verbose:
1830 if self.verbose:
1831 self.write(*args)
1831 self.write(*args)
1832 def status(self, *args):
1832 def status(self, *args):
1833 if not self.quiet:
1833 if not self.quiet:
1834 self.write(*args)
1834 self.write(*args)
1835 def write(self, *args):
1835 def write(self, *args):
1836 self.hunk[self.rev].append(args)
1836 self.hunk[self.rev].append(args)
1837 def write_header(self, *args):
1837 def write_header(self, *args):
1838 self.header[self.rev].append(args)
1838 self.header[self.rev].append(args)
1839 def debug(self, *args):
1839 def debug(self, *args):
1840 if self.debugflag:
1840 if self.debugflag:
1841 self.write(*args)
1841 self.write(*args)
1842 def __getattr__(self, key):
1842 def __getattr__(self, key):
1843 return getattr(self.ui, key)
1843 return getattr(self.ui, key)
1844
1844
1845 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1845 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1846
1846
1847 if opts['limit']:
1847 if opts['limit']:
1848 try:
1848 try:
1849 limit = int(opts['limit'])
1849 limit = int(opts['limit'])
1850 except ValueError:
1850 except ValueError:
1851 raise util.Abort(_('limit must be a positive integer'))
1851 raise util.Abort(_('limit must be a positive integer'))
1852 if limit <= 0: raise util.Abort(_('limit must be positive'))
1852 if limit <= 0: raise util.Abort(_('limit must be positive'))
1853 else:
1853 else:
1854 limit = sys.maxint
1854 limit = sys.maxint
1855 count = 0
1855 count = 0
1856
1856
1857 displayer = show_changeset(ui, repo, opts)
1857 displayer = show_changeset(ui, repo, opts)
1858 for st, rev, fns in changeiter:
1858 for st, rev, fns in changeiter:
1859 if st == 'window':
1859 if st == 'window':
1860 du = dui(ui)
1860 du = dui(ui)
1861 displayer.ui = du
1861 displayer.ui = du
1862 elif st == 'add':
1862 elif st == 'add':
1863 du.bump(rev)
1863 du.bump(rev)
1864 changenode = repo.changelog.node(rev)
1864 changenode = repo.changelog.node(rev)
1865 parents = [p for p in repo.changelog.parents(changenode)
1865 parents = [p for p in repo.changelog.parents(changenode)
1866 if p != nullid]
1866 if p != nullid]
1867 if opts['no_merges'] and len(parents) == 2:
1867 if opts['no_merges'] and len(parents) == 2:
1868 continue
1868 continue
1869 if opts['only_merges'] and len(parents) != 2:
1869 if opts['only_merges'] and len(parents) != 2:
1870 continue
1870 continue
1871
1871
1872 if opts['keyword']:
1872 if opts['keyword']:
1873 changes = getchange(rev)
1873 changes = getchange(rev)
1874 miss = 0
1874 miss = 0
1875 for k in [kw.lower() for kw in opts['keyword']]:
1875 for k in [kw.lower() for kw in opts['keyword']]:
1876 if not (k in changes[1].lower() or
1876 if not (k in changes[1].lower() or
1877 k in changes[4].lower() or
1877 k in changes[4].lower() or
1878 k in " ".join(changes[3][:20]).lower()):
1878 k in " ".join(changes[3][:20]).lower()):
1879 miss = 1
1879 miss = 1
1880 break
1880 break
1881 if miss:
1881 if miss:
1882 continue
1882 continue
1883
1883
1884 br = None
1884 br = None
1885 if opts['branches']:
1885 if opts['branches']:
1886 br = repo.branchlookup([repo.changelog.node(rev)])
1886 br = repo.branchlookup([repo.changelog.node(rev)])
1887
1887
1888 displayer.show(rev, brinfo=br)
1888 displayer.show(rev, brinfo=br)
1889 if opts['patch']:
1889 if opts['patch']:
1890 prev = (parents and parents[0]) or nullid
1890 prev = (parents and parents[0]) or nullid
1891 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1891 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1892 du.write("\n\n")
1892 du.write("\n\n")
1893 elif st == 'iter':
1893 elif st == 'iter':
1894 if count == limit: break
1894 if count == limit: break
1895 if du.header[rev]:
1895 if du.header[rev]:
1896 for args in du.header[rev]:
1896 for args in du.header[rev]:
1897 ui.write_header(*args)
1897 ui.write_header(*args)
1898 if du.hunk[rev]:
1898 if du.hunk[rev]:
1899 count += 1
1899 count += 1
1900 for args in du.hunk[rev]:
1900 for args in du.hunk[rev]:
1901 ui.write(*args)
1901 ui.write(*args)
1902
1902
1903 def manifest(ui, repo, rev=None):
1903 def manifest(ui, repo, rev=None):
1904 """output the latest or given revision of the project manifest
1904 """output the latest or given revision of the project manifest
1905
1905
1906 Print a list of version controlled files for the given revision.
1906 Print a list of version controlled files for the given revision.
1907
1907
1908 The manifest is the list of files being version controlled. If no revision
1908 The manifest is the list of files being version controlled. If no revision
1909 is given then the tip is used.
1909 is given then the tip is used.
1910 """
1910 """
1911 if rev:
1911 if rev:
1912 try:
1912 try:
1913 # assume all revision numbers are for changesets
1913 # assume all revision numbers are for changesets
1914 n = repo.lookup(rev)
1914 n = repo.lookup(rev)
1915 change = repo.changelog.read(n)
1915 change = repo.changelog.read(n)
1916 n = change[0]
1916 n = change[0]
1917 except hg.RepoError:
1917 except hg.RepoError:
1918 n = repo.manifest.lookup(rev)
1918 n = repo.manifest.lookup(rev)
1919 else:
1919 else:
1920 n = repo.manifest.tip()
1920 n = repo.manifest.tip()
1921 m = repo.manifest.read(n)
1921 m = repo.manifest.read(n)
1922 files = m.keys()
1922 files = m.keys()
1923 files.sort()
1923 files.sort()
1924
1924
1925 for f in files:
1925 for f in files:
1926 ui.write("%40s %3s %s\n" % (hex(m[f]),
1926 ui.write("%40s %3s %s\n" % (hex(m[f]),
1927 m.execf(f) and "755" or "644", f))
1927 m.execf(f) and "755" or "644", f))
1928
1928
1929 def merge(ui, repo, node=None, force=None, branch=None):
1929 def merge(ui, repo, node=None, force=None, branch=None):
1930 """Merge working directory with another revision
1930 """Merge working directory with another revision
1931
1931
1932 Merge the contents of the current working directory and the
1932 Merge the contents of the current working directory and the
1933 requested revision. Files that changed between either parent are
1933 requested revision. Files that changed between either parent are
1934 marked as changed for the next commit and a commit must be
1934 marked as changed for the next commit and a commit must be
1935 performed before any further updates are allowed.
1935 performed before any further updates are allowed.
1936
1936
1937 If no revision is specified, the working directory's parent is a
1937 If no revision is specified, the working directory's parent is a
1938 head revision, and the repository contains exactly one other head,
1938 head revision, and the repository contains exactly one other head,
1939 the other head is merged with by default. Otherwise, an explicit
1939 the other head is merged with by default. Otherwise, an explicit
1940 revision to merge with must be provided.
1940 revision to merge with must be provided.
1941 """
1941 """
1942
1942
1943 if node or branch:
1943 if node or branch:
1944 node = _lookup(repo, node, branch)
1944 node = _lookup(repo, node, branch)
1945 else:
1945 else:
1946 heads = repo.heads()
1946 heads = repo.heads()
1947 if len(heads) > 2:
1947 if len(heads) > 2:
1948 raise util.Abort(_('repo has %d heads - '
1948 raise util.Abort(_('repo has %d heads - '
1949 'please merge with an explicit rev') %
1949 'please merge with an explicit rev') %
1950 len(heads))
1950 len(heads))
1951 if len(heads) == 1:
1951 if len(heads) == 1:
1952 raise util.Abort(_('there is nothing to merge - '
1952 raise util.Abort(_('there is nothing to merge - '
1953 'use "hg update" instead'))
1953 'use "hg update" instead'))
1954 parent = repo.dirstate.parents()[0]
1954 parent = repo.dirstate.parents()[0]
1955 if parent not in heads:
1955 if parent not in heads:
1956 raise util.Abort(_('working dir not at a head rev - '
1956 raise util.Abort(_('working dir not at a head rev - '
1957 'use "hg update" or merge with an explicit rev'))
1957 'use "hg update" or merge with an explicit rev'))
1958 node = parent == heads[0] and heads[-1] or heads[0]
1958 node = parent == heads[0] and heads[-1] or heads[0]
1959 return hg.merge(repo, node, force=force)
1959 return hg.merge(repo, node, force=force)
1960
1960
1961 def outgoing(ui, repo, dest=None, **opts):
1961 def outgoing(ui, repo, dest=None, **opts):
1962 """show changesets not found in destination
1962 """show changesets not found in destination
1963
1963
1964 Show changesets not found in the specified destination repository or
1964 Show changesets not found in the specified destination repository or
1965 the default push location. These are the changesets that would be pushed
1965 the default push location. These are the changesets that would be pushed
1966 if a push was requested.
1966 if a push was requested.
1967
1967
1968 See pull for valid destination format details.
1968 See pull for valid destination format details.
1969 """
1969 """
1970 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1970 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1971 setremoteconfig(ui, opts)
1971 setremoteconfig(ui, opts)
1972 revs = None
1972 revs = None
1973 if opts['rev']:
1973 if opts['rev']:
1974 revs = [repo.lookup(rev) for rev in opts['rev']]
1974 revs = [repo.lookup(rev) for rev in opts['rev']]
1975
1975
1976 other = hg.repository(ui, dest)
1976 other = hg.repository(ui, dest)
1977 o = repo.findoutgoing(other, force=opts['force'])
1977 o = repo.findoutgoing(other, force=opts['force'])
1978 if not o:
1978 if not o:
1979 ui.status(_("no changes found\n"))
1979 ui.status(_("no changes found\n"))
1980 return
1980 return
1981 o = repo.changelog.nodesbetween(o, revs)[0]
1981 o = repo.changelog.nodesbetween(o, revs)[0]
1982 if opts['newest_first']:
1982 if opts['newest_first']:
1983 o.reverse()
1983 o.reverse()
1984 displayer = show_changeset(ui, repo, opts)
1984 displayer = show_changeset(ui, repo, opts)
1985 for n in o:
1985 for n in o:
1986 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1986 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1987 if opts['no_merges'] and len(parents) == 2:
1987 if opts['no_merges'] and len(parents) == 2:
1988 continue
1988 continue
1989 displayer.show(changenode=n)
1989 displayer.show(changenode=n)
1990 if opts['patch']:
1990 if opts['patch']:
1991 prev = (parents and parents[0]) or nullid
1991 prev = (parents and parents[0]) or nullid
1992 patch.diff(repo, prev, n)
1992 patch.diff(repo, prev, n)
1993 ui.write("\n")
1993 ui.write("\n")
1994
1994
1995 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
1995 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
1996 """show the parents of the working dir or revision
1996 """show the parents of the working dir or revision
1997
1997
1998 Print the working directory's parent revisions.
1998 Print the working directory's parent revisions.
1999 """
1999 """
2000 # legacy
2000 # legacy
2001 if file_ and not rev:
2001 if file_ and not rev:
2002 try:
2002 try:
2003 rev = repo.lookup(file_)
2003 rev = repo.lookup(file_)
2004 file_ = None
2004 file_ = None
2005 except hg.RepoError:
2005 except hg.RepoError:
2006 pass
2006 pass
2007 else:
2007 else:
2008 ui.warn(_("'hg parent REV' is deprecated, "
2008 ui.warn(_("'hg parent REV' is deprecated, "
2009 "please use 'hg parents -r REV instead\n"))
2009 "please use 'hg parents -r REV instead\n"))
2010
2010
2011 if rev:
2011 if rev:
2012 if file_:
2012 if file_:
2013 ctx = repo.filectx(file_, changeid=rev)
2013 ctx = repo.filectx(file_, changeid=rev)
2014 else:
2014 else:
2015 ctx = repo.changectx(rev)
2015 ctx = repo.changectx(rev)
2016 p = [cp.node() for cp in ctx.parents()]
2016 p = [cp.node() for cp in ctx.parents()]
2017 else:
2017 else:
2018 p = repo.dirstate.parents()
2018 p = repo.dirstate.parents()
2019
2019
2020 br = None
2020 br = None
2021 if branches is not None:
2021 if branches is not None:
2022 br = repo.branchlookup(p)
2022 br = repo.branchlookup(p)
2023 displayer = show_changeset(ui, repo, opts)
2023 displayer = show_changeset(ui, repo, opts)
2024 for n in p:
2024 for n in p:
2025 if n != nullid:
2025 if n != nullid:
2026 displayer.show(changenode=n, brinfo=br)
2026 displayer.show(changenode=n, brinfo=br)
2027
2027
2028 def paths(ui, repo, search=None):
2028 def paths(ui, repo, search=None):
2029 """show definition of symbolic path names
2029 """show definition of symbolic path names
2030
2030
2031 Show definition of symbolic path name NAME. If no name is given, show
2031 Show definition of symbolic path name NAME. If no name is given, show
2032 definition of available names.
2032 definition of available names.
2033
2033
2034 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2034 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2035 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2035 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2036 """
2036 """
2037 if search:
2037 if search:
2038 for name, path in ui.configitems("paths"):
2038 for name, path in ui.configitems("paths"):
2039 if name == search:
2039 if name == search:
2040 ui.write("%s\n" % path)
2040 ui.write("%s\n" % path)
2041 return
2041 return
2042 ui.warn(_("not found!\n"))
2042 ui.warn(_("not found!\n"))
2043 return 1
2043 return 1
2044 else:
2044 else:
2045 for name, path in ui.configitems("paths"):
2045 for name, path in ui.configitems("paths"):
2046 ui.write("%s = %s\n" % (name, path))
2046 ui.write("%s = %s\n" % (name, path))
2047
2047
2048 def postincoming(ui, repo, modheads, optupdate):
2048 def postincoming(ui, repo, modheads, optupdate):
2049 if modheads == 0:
2049 if modheads == 0:
2050 return
2050 return
2051 if optupdate:
2051 if optupdate:
2052 if modheads == 1:
2052 if modheads == 1:
2053 return hg.update(repo, repo.changelog.tip()) # update
2053 return hg.update(repo, repo.changelog.tip()) # update
2054 else:
2054 else:
2055 ui.status(_("not updating, since new heads added\n"))
2055 ui.status(_("not updating, since new heads added\n"))
2056 if modheads > 1:
2056 if modheads > 1:
2057 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2057 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2058 else:
2058 else:
2059 ui.status(_("(run 'hg update' to get a working copy)\n"))
2059 ui.status(_("(run 'hg update' to get a working copy)\n"))
2060
2060
2061 def pull(ui, repo, source="default", **opts):
2061 def pull(ui, repo, source="default", **opts):
2062 """pull changes from the specified source
2062 """pull changes from the specified source
2063
2063
2064 Pull changes from a remote repository to a local one.
2064 Pull changes from a remote repository to a local one.
2065
2065
2066 This finds all changes from the repository at the specified path
2066 This finds all changes from the repository at the specified path
2067 or URL and adds them to the local repository. By default, this
2067 or URL and adds them to the local repository. By default, this
2068 does not update the copy of the project in the working directory.
2068 does not update the copy of the project in the working directory.
2069
2069
2070 Valid URLs are of the form:
2070 Valid URLs are of the form:
2071
2071
2072 local/filesystem/path
2072 local/filesystem/path
2073 http://[user@]host[:port]/[path]
2073 http://[user@]host[:port]/[path]
2074 https://[user@]host[:port]/[path]
2074 https://[user@]host[:port]/[path]
2075 ssh://[user@]host[:port]/[path]
2075 ssh://[user@]host[:port]/[path]
2076
2076
2077 Some notes about using SSH with Mercurial:
2077 Some notes about using SSH with Mercurial:
2078 - SSH requires an accessible shell account on the destination machine
2078 - SSH requires an accessible shell account on the destination machine
2079 and a copy of hg in the remote path or specified with as remotecmd.
2079 and a copy of hg in the remote path or specified with as remotecmd.
2080 - path is relative to the remote user's home directory by default.
2080 - path is relative to the remote user's home directory by default.
2081 Use an extra slash at the start of a path to specify an absolute path:
2081 Use an extra slash at the start of a path to specify an absolute path:
2082 ssh://example.com//tmp/repository
2082 ssh://example.com//tmp/repository
2083 - Mercurial doesn't use its own compression via SSH; the right thing
2083 - Mercurial doesn't use its own compression via SSH; the right thing
2084 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2084 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2085 Host *.mylocalnetwork.example.com
2085 Host *.mylocalnetwork.example.com
2086 Compression off
2086 Compression off
2087 Host *
2087 Host *
2088 Compression on
2088 Compression on
2089 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2089 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2090 with the --ssh command line option.
2090 with the --ssh command line option.
2091 """
2091 """
2092 source = ui.expandpath(source)
2092 source = ui.expandpath(source)
2093 setremoteconfig(ui, opts)
2093 setremoteconfig(ui, opts)
2094
2094
2095 other = hg.repository(ui, source)
2095 other = hg.repository(ui, source)
2096 ui.status(_('pulling from %s\n') % (source))
2096 ui.status(_('pulling from %s\n') % (source))
2097 revs = None
2097 revs = None
2098 if opts['rev'] and not other.local():
2098 if opts['rev'] and not other.local():
2099 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2099 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2100 elif opts['rev']:
2100 elif opts['rev']:
2101 revs = [other.lookup(rev) for rev in opts['rev']]
2101 revs = [other.lookup(rev) for rev in opts['rev']]
2102 modheads = repo.pull(other, heads=revs, force=opts['force'])
2102 modheads = repo.pull(other, heads=revs, force=opts['force'])
2103 return postincoming(ui, repo, modheads, opts['update'])
2103 return postincoming(ui, repo, modheads, opts['update'])
2104
2104
2105 def push(ui, repo, dest=None, **opts):
2105 def push(ui, repo, dest=None, **opts):
2106 """push changes to the specified destination
2106 """push changes to the specified destination
2107
2107
2108 Push changes from the local repository to the given destination.
2108 Push changes from the local repository to the given destination.
2109
2109
2110 This is the symmetrical operation for pull. It helps to move
2110 This is the symmetrical operation for pull. It helps to move
2111 changes from the current repository to a different one. If the
2111 changes from the current repository to a different one. If the
2112 destination is local this is identical to a pull in that directory
2112 destination is local this is identical to a pull in that directory
2113 from the current one.
2113 from the current one.
2114
2114
2115 By default, push will refuse to run if it detects the result would
2115 By default, push will refuse to run if it detects the result would
2116 increase the number of remote heads. This generally indicates the
2116 increase the number of remote heads. This generally indicates the
2117 the client has forgotten to sync and merge before pushing.
2117 the client has forgotten to sync and merge before pushing.
2118
2118
2119 Valid URLs are of the form:
2119 Valid URLs are of the form:
2120
2120
2121 local/filesystem/path
2121 local/filesystem/path
2122 ssh://[user@]host[:port]/[path]
2122 ssh://[user@]host[:port]/[path]
2123
2123
2124 Look at the help text for the pull command for important details
2124 Look at the help text for the pull command for important details
2125 about ssh:// URLs.
2125 about ssh:// URLs.
2126
2126
2127 Pushing to http:// and https:// URLs is possible, too, if this
2127 Pushing to http:// and https:// URLs is possible, too, if this
2128 feature is enabled on the remote Mercurial server.
2128 feature is enabled on the remote Mercurial server.
2129 """
2129 """
2130 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2130 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2131 setremoteconfig(ui, opts)
2131 setremoteconfig(ui, opts)
2132
2132
2133 other = hg.repository(ui, dest)
2133 other = hg.repository(ui, dest)
2134 ui.status('pushing to %s\n' % (dest))
2134 ui.status('pushing to %s\n' % (dest))
2135 revs = None
2135 revs = None
2136 if opts['rev']:
2136 if opts['rev']:
2137 revs = [repo.lookup(rev) for rev in opts['rev']]
2137 revs = [repo.lookup(rev) for rev in opts['rev']]
2138 r = repo.push(other, opts['force'], revs=revs)
2138 r = repo.push(other, opts['force'], revs=revs)
2139 return r == 0
2139 return r == 0
2140
2140
2141 def rawcommit(ui, repo, *flist, **rc):
2141 def rawcommit(ui, repo, *flist, **rc):
2142 """raw commit interface (DEPRECATED)
2142 """raw commit interface (DEPRECATED)
2143
2143
2144 (DEPRECATED)
2144 (DEPRECATED)
2145 Lowlevel commit, for use in helper scripts.
2145 Lowlevel commit, for use in helper scripts.
2146
2146
2147 This command is not intended to be used by normal users, as it is
2147 This command is not intended to be used by normal users, as it is
2148 primarily useful for importing from other SCMs.
2148 primarily useful for importing from other SCMs.
2149
2149
2150 This command is now deprecated and will be removed in a future
2150 This command is now deprecated and will be removed in a future
2151 release, please use debugsetparents and commit instead.
2151 release, please use debugsetparents and commit instead.
2152 """
2152 """
2153
2153
2154 ui.warn(_("(the rawcommit command is deprecated)\n"))
2154 ui.warn(_("(the rawcommit command is deprecated)\n"))
2155
2155
2156 message = rc['message']
2156 message = rc['message']
2157 if not message and rc['logfile']:
2157 if not message and rc['logfile']:
2158 try:
2158 try:
2159 message = open(rc['logfile']).read()
2159 message = open(rc['logfile']).read()
2160 except IOError:
2160 except IOError:
2161 pass
2161 pass
2162 if not message and not rc['logfile']:
2162 if not message and not rc['logfile']:
2163 raise util.Abort(_("missing commit message"))
2163 raise util.Abort(_("missing commit message"))
2164
2164
2165 files = relpath(repo, list(flist))
2165 files = relpath(repo, list(flist))
2166 if rc['files']:
2166 if rc['files']:
2167 files += open(rc['files']).read().splitlines()
2167 files += open(rc['files']).read().splitlines()
2168
2168
2169 rc['parent'] = map(repo.lookup, rc['parent'])
2169 rc['parent'] = map(repo.lookup, rc['parent'])
2170
2170
2171 try:
2171 try:
2172 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2172 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2173 except ValueError, inst:
2173 except ValueError, inst:
2174 raise util.Abort(str(inst))
2174 raise util.Abort(str(inst))
2175
2175
2176 def recover(ui, repo):
2176 def recover(ui, repo):
2177 """roll back an interrupted transaction
2177 """roll back an interrupted transaction
2178
2178
2179 Recover from an interrupted commit or pull.
2179 Recover from an interrupted commit or pull.
2180
2180
2181 This command tries to fix the repository status after an interrupted
2181 This command tries to fix the repository status after an interrupted
2182 operation. It should only be necessary when Mercurial suggests it.
2182 operation. It should only be necessary when Mercurial suggests it.
2183 """
2183 """
2184 if repo.recover():
2184 if repo.recover():
2185 return hg.verify(repo)
2185 return hg.verify(repo)
2186 return 1
2186 return 1
2187
2187
2188 def remove(ui, repo, *pats, **opts):
2188 def remove(ui, repo, *pats, **opts):
2189 """remove the specified files on the next commit
2189 """remove the specified files on the next commit
2190
2190
2191 Schedule the indicated files for removal from the repository.
2191 Schedule the indicated files for removal from the repository.
2192
2192
2193 This command schedules the files to be removed at the next commit.
2193 This command schedules the files to be removed at the next commit.
2194 This only removes files from the current branch, not from the
2194 This only removes files from the current branch, not from the
2195 entire project history. If the files still exist in the working
2195 entire project history. If the files still exist in the working
2196 directory, they will be deleted from it. If invoked with --after,
2196 directory, they will be deleted from it. If invoked with --after,
2197 files that have been manually deleted are marked as removed.
2197 files that have been manually deleted are marked as removed.
2198
2198
2199 Modified files and added files are not removed by default. To
2199 Modified files and added files are not removed by default. To
2200 remove them, use the -f/--force option.
2200 remove them, use the -f/--force option.
2201 """
2201 """
2202 names = []
2202 names = []
2203 if not opts['after'] and not pats:
2203 if not opts['after'] and not pats:
2204 raise util.Abort(_('no files specified'))
2204 raise util.Abort(_('no files specified'))
2205 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2205 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2206 exact = dict.fromkeys(files)
2206 exact = dict.fromkeys(files)
2207 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2207 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2208 modified, added, removed, deleted, unknown = mardu
2208 modified, added, removed, deleted, unknown = mardu
2209 remove, forget = [], []
2209 remove, forget = [], []
2210 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2210 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2211 reason = None
2211 reason = None
2212 if abs not in deleted and opts['after']:
2212 if abs not in deleted and opts['after']:
2213 reason = _('is still present')
2213 reason = _('is still present')
2214 elif abs in modified and not opts['force']:
2214 elif abs in modified and not opts['force']:
2215 reason = _('is modified (use -f to force removal)')
2215 reason = _('is modified (use -f to force removal)')
2216 elif abs in added:
2216 elif abs in added:
2217 if opts['force']:
2217 if opts['force']:
2218 forget.append(abs)
2218 forget.append(abs)
2219 continue
2219 continue
2220 reason = _('has been marked for add (use -f to force removal)')
2220 reason = _('has been marked for add (use -f to force removal)')
2221 elif abs in unknown:
2221 elif abs in unknown:
2222 reason = _('is not managed')
2222 reason = _('is not managed')
2223 elif abs in removed:
2223 elif abs in removed:
2224 continue
2224 continue
2225 if reason:
2225 if reason:
2226 if exact:
2226 if exact:
2227 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2227 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2228 else:
2228 else:
2229 if ui.verbose or not exact:
2229 if ui.verbose or not exact:
2230 ui.status(_('removing %s\n') % rel)
2230 ui.status(_('removing %s\n') % rel)
2231 remove.append(abs)
2231 remove.append(abs)
2232 repo.forget(forget)
2232 repo.forget(forget)
2233 repo.remove(remove, unlink=not opts['after'])
2233 repo.remove(remove, unlink=not opts['after'])
2234
2234
2235 def rename(ui, repo, *pats, **opts):
2235 def rename(ui, repo, *pats, **opts):
2236 """rename files; equivalent of copy + remove
2236 """rename files; equivalent of copy + remove
2237
2237
2238 Mark dest as copies of sources; mark sources for deletion. If
2238 Mark dest as copies of sources; mark sources for deletion. If
2239 dest is a directory, copies are put in that directory. If dest is
2239 dest is a directory, copies are put in that directory. If dest is
2240 a file, there can only be one source.
2240 a file, there can only be one source.
2241
2241
2242 By default, this command copies the contents of files as they
2242 By default, this command copies the contents of files as they
2243 stand in the working directory. If invoked with --after, the
2243 stand in the working directory. If invoked with --after, the
2244 operation is recorded, but no copying is performed.
2244 operation is recorded, but no copying is performed.
2245
2245
2246 This command takes effect in the next commit.
2246 This command takes effect in the next commit.
2247
2247
2248 NOTE: This command should be treated as experimental. While it
2248 NOTE: This command should be treated as experimental. While it
2249 should properly record rename files, this information is not yet
2249 should properly record rename files, this information is not yet
2250 fully used by merge, nor fully reported by log.
2250 fully used by merge, nor fully reported by log.
2251 """
2251 """
2252 wlock = repo.wlock(0)
2252 wlock = repo.wlock(0)
2253 errs, copied = docopy(ui, repo, pats, opts, wlock)
2253 errs, copied = docopy(ui, repo, pats, opts, wlock)
2254 names = []
2254 names = []
2255 for abs, rel, exact in copied:
2255 for abs, rel, exact in copied:
2256 if ui.verbose or not exact:
2256 if ui.verbose or not exact:
2257 ui.status(_('removing %s\n') % rel)
2257 ui.status(_('removing %s\n') % rel)
2258 names.append(abs)
2258 names.append(abs)
2259 if not opts.get('dry_run'):
2259 if not opts.get('dry_run'):
2260 repo.remove(names, True, wlock)
2260 repo.remove(names, True, wlock)
2261 return errs
2261 return errs
2262
2262
2263 def revert(ui, repo, *pats, **opts):
2263 def revert(ui, repo, *pats, **opts):
2264 """revert files or dirs to their states as of some revision
2264 """revert files or dirs to their states as of some revision
2265
2265
2266 With no revision specified, revert the named files or directories
2266 With no revision specified, revert the named files or directories
2267 to the contents they had in the parent of the working directory.
2267 to the contents they had in the parent of the working directory.
2268 This restores the contents of the affected files to an unmodified
2268 This restores the contents of the affected files to an unmodified
2269 state. If the working directory has two parents, you must
2269 state. If the working directory has two parents, you must
2270 explicitly specify the revision to revert to.
2270 explicitly specify the revision to revert to.
2271
2271
2272 Modified files are saved with a .orig suffix before reverting.
2272 Modified files are saved with a .orig suffix before reverting.
2273 To disable these backups, use --no-backup.
2273 To disable these backups, use --no-backup.
2274
2274
2275 Using the -r option, revert the given files or directories to their
2275 Using the -r option, revert the given files or directories to their
2276 contents as of a specific revision. This can be helpful to "roll
2276 contents as of a specific revision. This can be helpful to "roll
2277 back" some or all of a change that should not have been committed.
2277 back" some or all of a change that should not have been committed.
2278
2278
2279 Revert modifies the working directory. It does not commit any
2279 Revert modifies the working directory. It does not commit any
2280 changes, or change the parent of the working directory. If you
2280 changes, or change the parent of the working directory. If you
2281 revert to a revision other than the parent of the working
2281 revert to a revision other than the parent of the working
2282 directory, the reverted files will thus appear modified
2282 directory, the reverted files will thus appear modified
2283 afterwards.
2283 afterwards.
2284
2284
2285 If a file has been deleted, it is recreated. If the executable
2285 If a file has been deleted, it is recreated. If the executable
2286 mode of a file was changed, it is reset.
2286 mode of a file was changed, it is reset.
2287
2287
2288 If names are given, all files matching the names are reverted.
2288 If names are given, all files matching the names are reverted.
2289
2289
2290 If no arguments are given, no files are reverted.
2290 If no arguments are given, no files are reverted.
2291 """
2291 """
2292
2292
2293 if not pats and not opts['all']:
2293 if not pats and not opts['all']:
2294 raise util.Abort(_('no files or directories specified; '
2294 raise util.Abort(_('no files or directories specified; '
2295 'use --all to revert the whole repo'))
2295 'use --all to revert the whole repo'))
2296
2296
2297 parent, p2 = repo.dirstate.parents()
2297 parent, p2 = repo.dirstate.parents()
2298 if opts['rev']:
2298 if opts['rev']:
2299 node = repo.lookup(opts['rev'])
2299 node = repo.lookup(opts['rev'])
2300 elif p2 != nullid:
2300 elif p2 != nullid:
2301 raise util.Abort(_('working dir has two parents; '
2301 raise util.Abort(_('working dir has two parents; '
2302 'you must specify the revision to revert to'))
2302 'you must specify the revision to revert to'))
2303 else:
2303 else:
2304 node = parent
2304 node = parent
2305 mf = repo.manifest.read(repo.changelog.read(node)[0])
2305 mf = repo.manifest.read(repo.changelog.read(node)[0])
2306 if node == parent:
2306 if node == parent:
2307 pmf = mf
2307 pmf = mf
2308 else:
2308 else:
2309 pmf = None
2309 pmf = None
2310
2310
2311 wlock = repo.wlock()
2311 wlock = repo.wlock()
2312
2312
2313 # need all matching names in dirstate and manifest of target rev,
2313 # need all matching names in dirstate and manifest of target rev,
2314 # so have to walk both. do not print errors if files exist in one
2314 # so have to walk both. do not print errors if files exist in one
2315 # but not other.
2315 # but not other.
2316
2316
2317 names = {}
2317 names = {}
2318 target_only = {}
2318 target_only = {}
2319
2319
2320 # walk dirstate.
2320 # walk dirstate.
2321
2321
2322 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2322 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2323 badmatch=mf.has_key):
2323 badmatch=mf.has_key):
2324 names[abs] = (rel, exact)
2324 names[abs] = (rel, exact)
2325 if src == 'b':
2325 if src == 'b':
2326 target_only[abs] = True
2326 target_only[abs] = True
2327
2327
2328 # walk target manifest.
2328 # walk target manifest.
2329
2329
2330 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2330 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2331 badmatch=names.has_key):
2331 badmatch=names.has_key):
2332 if abs in names: continue
2332 if abs in names: continue
2333 names[abs] = (rel, exact)
2333 names[abs] = (rel, exact)
2334 target_only[abs] = True
2334 target_only[abs] = True
2335
2335
2336 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2336 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2337 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2337 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2338
2338
2339 revert = ([], _('reverting %s\n'))
2339 revert = ([], _('reverting %s\n'))
2340 add = ([], _('adding %s\n'))
2340 add = ([], _('adding %s\n'))
2341 remove = ([], _('removing %s\n'))
2341 remove = ([], _('removing %s\n'))
2342 forget = ([], _('forgetting %s\n'))
2342 forget = ([], _('forgetting %s\n'))
2343 undelete = ([], _('undeleting %s\n'))
2343 undelete = ([], _('undeleting %s\n'))
2344 update = {}
2344 update = {}
2345
2345
2346 disptable = (
2346 disptable = (
2347 # dispatch table:
2347 # dispatch table:
2348 # file state
2348 # file state
2349 # action if in target manifest
2349 # action if in target manifest
2350 # action if not in target manifest
2350 # action if not in target manifest
2351 # make backup if in target manifest
2351 # make backup if in target manifest
2352 # make backup if not in target manifest
2352 # make backup if not in target manifest
2353 (modified, revert, remove, True, True),
2353 (modified, revert, remove, True, True),
2354 (added, revert, forget, True, False),
2354 (added, revert, forget, True, False),
2355 (removed, undelete, None, False, False),
2355 (removed, undelete, None, False, False),
2356 (deleted, revert, remove, False, False),
2356 (deleted, revert, remove, False, False),
2357 (unknown, add, None, True, False),
2357 (unknown, add, None, True, False),
2358 (target_only, add, None, False, False),
2358 (target_only, add, None, False, False),
2359 )
2359 )
2360
2360
2361 entries = names.items()
2361 entries = names.items()
2362 entries.sort()
2362 entries.sort()
2363
2363
2364 for abs, (rel, exact) in entries:
2364 for abs, (rel, exact) in entries:
2365 mfentry = mf.get(abs)
2365 mfentry = mf.get(abs)
2366 def handle(xlist, dobackup):
2366 def handle(xlist, dobackup):
2367 xlist[0].append(abs)
2367 xlist[0].append(abs)
2368 update[abs] = 1
2368 update[abs] = 1
2369 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2369 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2370 bakname = "%s.orig" % rel
2370 bakname = "%s.orig" % rel
2371 ui.note(_('saving current version of %s as %s\n') %
2371 ui.note(_('saving current version of %s as %s\n') %
2372 (rel, bakname))
2372 (rel, bakname))
2373 if not opts.get('dry_run'):
2373 if not opts.get('dry_run'):
2374 shutil.copyfile(rel, bakname)
2374 shutil.copyfile(rel, bakname)
2375 shutil.copymode(rel, bakname)
2375 shutil.copymode(rel, bakname)
2376 if ui.verbose or not exact:
2376 if ui.verbose or not exact:
2377 ui.status(xlist[1] % rel)
2377 ui.status(xlist[1] % rel)
2378 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2378 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2379 if abs not in table: continue
2379 if abs not in table: continue
2380 # file has changed in dirstate
2380 # file has changed in dirstate
2381 if mfentry:
2381 if mfentry:
2382 handle(hitlist, backuphit)
2382 handle(hitlist, backuphit)
2383 elif misslist is not None:
2383 elif misslist is not None:
2384 handle(misslist, backupmiss)
2384 handle(misslist, backupmiss)
2385 else:
2385 else:
2386 if exact: ui.warn(_('file not managed: %s\n' % rel))
2386 if exact: ui.warn(_('file not managed: %s\n' % rel))
2387 break
2387 break
2388 else:
2388 else:
2389 # file has not changed in dirstate
2389 # file has not changed in dirstate
2390 if node == parent:
2390 if node == parent:
2391 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2391 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2392 continue
2392 continue
2393 if pmf is None:
2393 if pmf is None:
2394 # only need parent manifest in this unlikely case,
2394 # only need parent manifest in this unlikely case,
2395 # so do not read by default
2395 # so do not read by default
2396 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2396 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2397 if abs in pmf:
2397 if abs in pmf:
2398 if mfentry:
2398 if mfentry:
2399 # if version of file is same in parent and target
2399 # if version of file is same in parent and target
2400 # manifests, do nothing
2400 # manifests, do nothing
2401 if pmf[abs] != mfentry:
2401 if pmf[abs] != mfentry:
2402 handle(revert, False)
2402 handle(revert, False)
2403 else:
2403 else:
2404 handle(remove, False)
2404 handle(remove, False)
2405
2405
2406 if not opts.get('dry_run'):
2406 if not opts.get('dry_run'):
2407 repo.dirstate.forget(forget[0])
2407 repo.dirstate.forget(forget[0])
2408 r = hg.revert(repo, node, update.has_key, wlock)
2408 r = hg.revert(repo, node, update.has_key, wlock)
2409 repo.dirstate.update(add[0], 'a')
2409 repo.dirstate.update(add[0], 'a')
2410 repo.dirstate.update(undelete[0], 'n')
2410 repo.dirstate.update(undelete[0], 'n')
2411 repo.dirstate.update(remove[0], 'r')
2411 repo.dirstate.update(remove[0], 'r')
2412 return r
2412 return r
2413
2413
2414 def rollback(ui, repo):
2414 def rollback(ui, repo):
2415 """roll back the last transaction in this repository
2415 """roll back the last transaction in this repository
2416
2416
2417 Roll back the last transaction in this repository, restoring the
2417 Roll back the last transaction in this repository, restoring the
2418 project to its state prior to the transaction.
2418 project to its state prior to the transaction.
2419
2419
2420 Transactions are used to encapsulate the effects of all commands
2420 Transactions are used to encapsulate the effects of all commands
2421 that create new changesets or propagate existing changesets into a
2421 that create new changesets or propagate existing changesets into a
2422 repository. For example, the following commands are transactional,
2422 repository. For example, the following commands are transactional,
2423 and their effects can be rolled back:
2423 and their effects can be rolled back:
2424
2424
2425 commit
2425 commit
2426 import
2426 import
2427 pull
2427 pull
2428 push (with this repository as destination)
2428 push (with this repository as destination)
2429 unbundle
2429 unbundle
2430
2430
2431 This command should be used with care. There is only one level of
2431 This command should be used with care. There is only one level of
2432 rollback, and there is no way to undo a rollback.
2432 rollback, and there is no way to undo a rollback.
2433
2433
2434 This command is not intended for use on public repositories. Once
2434 This command is not intended for use on public repositories. Once
2435 changes are visible for pull by other users, rolling a transaction
2435 changes are visible for pull by other users, rolling a transaction
2436 back locally is ineffective (someone else may already have pulled
2436 back locally is ineffective (someone else may already have pulled
2437 the changes). Furthermore, a race is possible with readers of the
2437 the changes). Furthermore, a race is possible with readers of the
2438 repository; for example an in-progress pull from the repository
2438 repository; for example an in-progress pull from the repository
2439 may fail if a rollback is performed.
2439 may fail if a rollback is performed.
2440 """
2440 """
2441 repo.rollback()
2441 repo.rollback()
2442
2442
2443 def root(ui, repo):
2443 def root(ui, repo):
2444 """print the root (top) of the current working dir
2444 """print the root (top) of the current working dir
2445
2445
2446 Print the root directory of the current repository.
2446 Print the root directory of the current repository.
2447 """
2447 """
2448 ui.write(repo.root + "\n")
2448 ui.write(repo.root + "\n")
2449
2449
2450 def serve(ui, repo, **opts):
2450 def serve(ui, repo, **opts):
2451 """export the repository via HTTP
2451 """export the repository via HTTP
2452
2452
2453 Start a local HTTP repository browser and pull server.
2453 Start a local HTTP repository browser and pull server.
2454
2454
2455 By default, the server logs accesses to stdout and errors to
2455 By default, the server logs accesses to stdout and errors to
2456 stderr. Use the "-A" and "-E" options to log to files.
2456 stderr. Use the "-A" and "-E" options to log to files.
2457 """
2457 """
2458
2458
2459 if opts["stdio"]:
2459 if opts["stdio"]:
2460 if repo is None:
2460 if repo is None:
2461 raise hg.RepoError(_('no repo found'))
2461 raise hg.RepoError(_("There is no Mercurial repository here"
2462 " (.hg not found)"))
2462 s = sshserver.sshserver(ui, repo)
2463 s = sshserver.sshserver(ui, repo)
2463 s.serve_forever()
2464 s.serve_forever()
2464
2465
2465 optlist = ("name templates style address port ipv6"
2466 optlist = ("name templates style address port ipv6"
2466 " accesslog errorlog webdir_conf")
2467 " accesslog errorlog webdir_conf")
2467 for o in optlist.split():
2468 for o in optlist.split():
2468 if opts[o]:
2469 if opts[o]:
2469 ui.setconfig("web", o, opts[o])
2470 ui.setconfig("web", o, opts[o])
2470
2471
2471 if repo is None and not ui.config("web", "webdir_conf"):
2472 if repo is None and not ui.config("web", "webdir_conf"):
2472 raise hg.RepoError(_('no repo found'))
2473 raise hg.RepoError(_("There is no Mercurial repository here"
2474 " (.hg not found)"))
2473
2475
2474 if opts['daemon'] and not opts['daemon_pipefds']:
2476 if opts['daemon'] and not opts['daemon_pipefds']:
2475 rfd, wfd = os.pipe()
2477 rfd, wfd = os.pipe()
2476 args = sys.argv[:]
2478 args = sys.argv[:]
2477 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2479 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2478 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2480 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2479 args[0], args)
2481 args[0], args)
2480 os.close(wfd)
2482 os.close(wfd)
2481 os.read(rfd, 1)
2483 os.read(rfd, 1)
2482 os._exit(0)
2484 os._exit(0)
2483
2485
2484 try:
2486 try:
2485 httpd = hgweb.server.create_server(ui, repo)
2487 httpd = hgweb.server.create_server(ui, repo)
2486 except socket.error, inst:
2488 except socket.error, inst:
2487 raise util.Abort(_('cannot start server: %s') % inst.args[1])
2489 raise util.Abort(_('cannot start server: %s') % inst.args[1])
2488
2490
2489 if ui.verbose:
2491 if ui.verbose:
2490 addr, port = httpd.socket.getsockname()
2492 addr, port = httpd.socket.getsockname()
2491 if addr == '0.0.0.0':
2493 if addr == '0.0.0.0':
2492 addr = socket.gethostname()
2494 addr = socket.gethostname()
2493 else:
2495 else:
2494 try:
2496 try:
2495 addr = socket.gethostbyaddr(addr)[0]
2497 addr = socket.gethostbyaddr(addr)[0]
2496 except socket.error:
2498 except socket.error:
2497 pass
2499 pass
2498 if port != 80:
2500 if port != 80:
2499 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2501 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2500 else:
2502 else:
2501 ui.status(_('listening at http://%s/\n') % addr)
2503 ui.status(_('listening at http://%s/\n') % addr)
2502
2504
2503 if opts['pid_file']:
2505 if opts['pid_file']:
2504 fp = open(opts['pid_file'], 'w')
2506 fp = open(opts['pid_file'], 'w')
2505 fp.write(str(os.getpid()) + '\n')
2507 fp.write(str(os.getpid()) + '\n')
2506 fp.close()
2508 fp.close()
2507
2509
2508 if opts['daemon_pipefds']:
2510 if opts['daemon_pipefds']:
2509 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2511 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2510 os.close(rfd)
2512 os.close(rfd)
2511 os.write(wfd, 'y')
2513 os.write(wfd, 'y')
2512 os.close(wfd)
2514 os.close(wfd)
2513 sys.stdout.flush()
2515 sys.stdout.flush()
2514 sys.stderr.flush()
2516 sys.stderr.flush()
2515 fd = os.open(util.nulldev, os.O_RDWR)
2517 fd = os.open(util.nulldev, os.O_RDWR)
2516 if fd != 0: os.dup2(fd, 0)
2518 if fd != 0: os.dup2(fd, 0)
2517 if fd != 1: os.dup2(fd, 1)
2519 if fd != 1: os.dup2(fd, 1)
2518 if fd != 2: os.dup2(fd, 2)
2520 if fd != 2: os.dup2(fd, 2)
2519 if fd not in (0, 1, 2): os.close(fd)
2521 if fd not in (0, 1, 2): os.close(fd)
2520
2522
2521 httpd.serve_forever()
2523 httpd.serve_forever()
2522
2524
2523 def status(ui, repo, *pats, **opts):
2525 def status(ui, repo, *pats, **opts):
2524 """show changed files in the working directory
2526 """show changed files in the working directory
2525
2527
2526 Show status of files in the repository. If names are given, only
2528 Show status of files in the repository. If names are given, only
2527 files that match are shown. Files that are clean or ignored, are
2529 files that match are shown. Files that are clean or ignored, are
2528 not listed unless -c (clean), -i (ignored) or -A is given.
2530 not listed unless -c (clean), -i (ignored) or -A is given.
2529
2531
2530 The codes used to show the status of files are:
2532 The codes used to show the status of files are:
2531 M = modified
2533 M = modified
2532 A = added
2534 A = added
2533 R = removed
2535 R = removed
2534 C = clean
2536 C = clean
2535 ! = deleted, but still tracked
2537 ! = deleted, but still tracked
2536 ? = not tracked
2538 ? = not tracked
2537 I = ignored (not shown by default)
2539 I = ignored (not shown by default)
2538 = the previous added file was copied from here
2540 = the previous added file was copied from here
2539 """
2541 """
2540
2542
2541 all = opts['all']
2543 all = opts['all']
2542
2544
2543 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2545 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2544 cwd = (pats and repo.getcwd()) or ''
2546 cwd = (pats and repo.getcwd()) or ''
2545 modified, added, removed, deleted, unknown, ignored, clean = [
2547 modified, added, removed, deleted, unknown, ignored, clean = [
2546 [util.pathto(cwd, x) for x in n]
2548 [util.pathto(cwd, x) for x in n]
2547 for n in repo.status(files=files, match=matchfn,
2549 for n in repo.status(files=files, match=matchfn,
2548 list_ignored=all or opts['ignored'],
2550 list_ignored=all or opts['ignored'],
2549 list_clean=all or opts['clean'])]
2551 list_clean=all or opts['clean'])]
2550
2552
2551 changetypes = (('modified', 'M', modified),
2553 changetypes = (('modified', 'M', modified),
2552 ('added', 'A', added),
2554 ('added', 'A', added),
2553 ('removed', 'R', removed),
2555 ('removed', 'R', removed),
2554 ('deleted', '!', deleted),
2556 ('deleted', '!', deleted),
2555 ('unknown', '?', unknown),
2557 ('unknown', '?', unknown),
2556 ('ignored', 'I', ignored))
2558 ('ignored', 'I', ignored))
2557
2559
2558 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2560 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2559
2561
2560 end = opts['print0'] and '\0' or '\n'
2562 end = opts['print0'] and '\0' or '\n'
2561
2563
2562 for opt, char, changes in ([ct for ct in explicit_changetypes
2564 for opt, char, changes in ([ct for ct in explicit_changetypes
2563 if all or opts[ct[0]]]
2565 if all or opts[ct[0]]]
2564 or changetypes):
2566 or changetypes):
2565 if opts['no_status']:
2567 if opts['no_status']:
2566 format = "%%s%s" % end
2568 format = "%%s%s" % end
2567 else:
2569 else:
2568 format = "%s %%s%s" % (char, end)
2570 format = "%s %%s%s" % (char, end)
2569
2571
2570 for f in changes:
2572 for f in changes:
2571 ui.write(format % f)
2573 ui.write(format % f)
2572 if ((all or opts.get('copies')) and not opts.get('no_status')
2574 if ((all or opts.get('copies')) and not opts.get('no_status')
2573 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2575 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2574 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2576 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2575
2577
2576 def tag(ui, repo, name, rev_=None, **opts):
2578 def tag(ui, repo, name, rev_=None, **opts):
2577 """add a tag for the current tip or a given revision
2579 """add a tag for the current tip or a given revision
2578
2580
2579 Name a particular revision using <name>.
2581 Name a particular revision using <name>.
2580
2582
2581 Tags are used to name particular revisions of the repository and are
2583 Tags are used to name particular revisions of the repository and are
2582 very useful to compare different revision, to go back to significant
2584 very useful to compare different revision, to go back to significant
2583 earlier versions or to mark branch points as releases, etc.
2585 earlier versions or to mark branch points as releases, etc.
2584
2586
2585 If no revision is given, the parent of the working directory is used.
2587 If no revision is given, the parent of the working directory is used.
2586
2588
2587 To facilitate version control, distribution, and merging of tags,
2589 To facilitate version control, distribution, and merging of tags,
2588 they are stored as a file named ".hgtags" which is managed
2590 they are stored as a file named ".hgtags" which is managed
2589 similarly to other project files and can be hand-edited if
2591 similarly to other project files and can be hand-edited if
2590 necessary. The file '.hg/localtags' is used for local tags (not
2592 necessary. The file '.hg/localtags' is used for local tags (not
2591 shared among repositories).
2593 shared among repositories).
2592 """
2594 """
2593 if name in ['tip', '.']:
2595 if name in ['tip', '.']:
2594 raise util.Abort(_("the name '%s' is reserved") % name)
2596 raise util.Abort(_("the name '%s' is reserved") % name)
2595 if rev_ is not None:
2597 if rev_ is not None:
2596 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2598 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2597 "please use 'hg tag [-r REV] NAME' instead\n"))
2599 "please use 'hg tag [-r REV] NAME' instead\n"))
2598 if opts['rev']:
2600 if opts['rev']:
2599 raise util.Abort(_("use only one form to specify the revision"))
2601 raise util.Abort(_("use only one form to specify the revision"))
2600 if opts['rev']:
2602 if opts['rev']:
2601 rev_ = opts['rev']
2603 rev_ = opts['rev']
2602 if rev_:
2604 if rev_:
2603 r = repo.lookup(rev_)
2605 r = repo.lookup(rev_)
2604 else:
2606 else:
2605 p1, p2 = repo.dirstate.parents()
2607 p1, p2 = repo.dirstate.parents()
2606 if p1 == nullid:
2608 if p1 == nullid:
2607 raise util.Abort(_('no revision to tag'))
2609 raise util.Abort(_('no revision to tag'))
2608 if p2 != nullid:
2610 if p2 != nullid:
2609 raise util.Abort(_('outstanding uncommitted merges'))
2611 raise util.Abort(_('outstanding uncommitted merges'))
2610 r = p1
2612 r = p1
2611
2613
2612 message = opts['message']
2614 message = opts['message']
2613 if not message:
2615 if not message:
2614 message = _('Added tag %s for changeset %s') % (name, short(r))
2616 message = _('Added tag %s for changeset %s') % (name, short(r))
2615
2617
2616 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2618 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2617
2619
2618 def tags(ui, repo):
2620 def tags(ui, repo):
2619 """list repository tags
2621 """list repository tags
2620
2622
2621 List the repository tags.
2623 List the repository tags.
2622
2624
2623 This lists both regular and local tags.
2625 This lists both regular and local tags.
2624 """
2626 """
2625
2627
2626 l = repo.tagslist()
2628 l = repo.tagslist()
2627 l.reverse()
2629 l.reverse()
2628 hexfunc = ui.debugflag and hex or short
2630 hexfunc = ui.debugflag and hex or short
2629 for t, n in l:
2631 for t, n in l:
2630 try:
2632 try:
2631 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2633 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2632 except KeyError:
2634 except KeyError:
2633 r = " ?:?"
2635 r = " ?:?"
2634 if ui.quiet:
2636 if ui.quiet:
2635 ui.write("%s\n" % t)
2637 ui.write("%s\n" % t)
2636 else:
2638 else:
2637 ui.write("%-30s %s\n" % (t, r))
2639 ui.write("%-30s %s\n" % (t, r))
2638
2640
2639 def tip(ui, repo, **opts):
2641 def tip(ui, repo, **opts):
2640 """show the tip revision
2642 """show the tip revision
2641
2643
2642 Show the tip revision.
2644 Show the tip revision.
2643 """
2645 """
2644 n = repo.changelog.tip()
2646 n = repo.changelog.tip()
2645 br = None
2647 br = None
2646 if opts['branches']:
2648 if opts['branches']:
2647 br = repo.branchlookup([n])
2649 br = repo.branchlookup([n])
2648 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2650 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2649 if opts['patch']:
2651 if opts['patch']:
2650 patch.diff(repo, repo.changelog.parents(n)[0], n)
2652 patch.diff(repo, repo.changelog.parents(n)[0], n)
2651
2653
2652 def unbundle(ui, repo, fname, **opts):
2654 def unbundle(ui, repo, fname, **opts):
2653 """apply a changegroup file
2655 """apply a changegroup file
2654
2656
2655 Apply a compressed changegroup file generated by the bundle
2657 Apply a compressed changegroup file generated by the bundle
2656 command.
2658 command.
2657 """
2659 """
2658 f = urllib.urlopen(fname)
2660 f = urllib.urlopen(fname)
2659
2661
2660 header = f.read(6)
2662 header = f.read(6)
2661 if not header.startswith("HG"):
2663 if not header.startswith("HG"):
2662 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2664 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2663 elif not header.startswith("HG10"):
2665 elif not header.startswith("HG10"):
2664 raise util.Abort(_("%s: unknown bundle version") % fname)
2666 raise util.Abort(_("%s: unknown bundle version") % fname)
2665 elif header == "HG10BZ":
2667 elif header == "HG10BZ":
2666 def generator(f):
2668 def generator(f):
2667 zd = bz2.BZ2Decompressor()
2669 zd = bz2.BZ2Decompressor()
2668 zd.decompress("BZ")
2670 zd.decompress("BZ")
2669 for chunk in f:
2671 for chunk in f:
2670 yield zd.decompress(chunk)
2672 yield zd.decompress(chunk)
2671 elif header == "HG10UN":
2673 elif header == "HG10UN":
2672 def generator(f):
2674 def generator(f):
2673 for chunk in f:
2675 for chunk in f:
2674 yield chunk
2676 yield chunk
2675 else:
2677 else:
2676 raise util.Abort(_("%s: unknown bundle compression type")
2678 raise util.Abort(_("%s: unknown bundle compression type")
2677 % fname)
2679 % fname)
2678 gen = generator(util.filechunkiter(f, 4096))
2680 gen = generator(util.filechunkiter(f, 4096))
2679 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2681 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2680 'bundle:' + fname)
2682 'bundle:' + fname)
2681 return postincoming(ui, repo, modheads, opts['update'])
2683 return postincoming(ui, repo, modheads, opts['update'])
2682
2684
2683 def undo(ui, repo):
2685 def undo(ui, repo):
2684 """undo the last commit or pull (DEPRECATED)
2686 """undo the last commit or pull (DEPRECATED)
2685
2687
2686 (DEPRECATED)
2688 (DEPRECATED)
2687 This command is now deprecated and will be removed in a future
2689 This command is now deprecated and will be removed in a future
2688 release. Please use the rollback command instead. For usage
2690 release. Please use the rollback command instead. For usage
2689 instructions, see the rollback command.
2691 instructions, see the rollback command.
2690 """
2692 """
2691 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2693 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2692 repo.rollback()
2694 repo.rollback()
2693
2695
2694 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2696 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2695 branch=None):
2697 branch=None):
2696 """update or merge working directory
2698 """update or merge working directory
2697
2699
2698 Update the working directory to the specified revision.
2700 Update the working directory to the specified revision.
2699
2701
2700 If there are no outstanding changes in the working directory and
2702 If there are no outstanding changes in the working directory and
2701 there is a linear relationship between the current version and the
2703 there is a linear relationship between the current version and the
2702 requested version, the result is the requested version.
2704 requested version, the result is the requested version.
2703
2705
2704 To merge the working directory with another revision, use the
2706 To merge the working directory with another revision, use the
2705 merge command.
2707 merge command.
2706
2708
2707 By default, update will refuse to run if doing so would require
2709 By default, update will refuse to run if doing so would require
2708 merging or discarding local changes.
2710 merging or discarding local changes.
2709 """
2711 """
2710 node = _lookup(repo, node, branch)
2712 node = _lookup(repo, node, branch)
2711 if merge:
2713 if merge:
2712 ui.warn(_('(the -m/--merge option is deprecated; '
2714 ui.warn(_('(the -m/--merge option is deprecated; '
2713 'use the merge command instead)\n'))
2715 'use the merge command instead)\n'))
2714 return hg.merge(repo, node, force=force)
2716 return hg.merge(repo, node, force=force)
2715 elif clean:
2717 elif clean:
2716 return hg.clean(repo, node)
2718 return hg.clean(repo, node)
2717 else:
2719 else:
2718 return hg.update(repo, node)
2720 return hg.update(repo, node)
2719
2721
2720 def _lookup(repo, node, branch=None):
2722 def _lookup(repo, node, branch=None):
2721 if branch:
2723 if branch:
2722 br = repo.branchlookup(branch=branch)
2724 br = repo.branchlookup(branch=branch)
2723 found = []
2725 found = []
2724 for x in br:
2726 for x in br:
2725 if branch in br[x]:
2727 if branch in br[x]:
2726 found.append(x)
2728 found.append(x)
2727 if len(found) > 1:
2729 if len(found) > 1:
2728 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2730 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2729 for x in found:
2731 for x in found:
2730 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2732 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2731 raise util.Abort("")
2733 raise util.Abort("")
2732 if len(found) == 1:
2734 if len(found) == 1:
2733 node = found[0]
2735 node = found[0]
2734 repo.ui.warn(_("Using head %s for branch %s\n")
2736 repo.ui.warn(_("Using head %s for branch %s\n")
2735 % (short(node), branch))
2737 % (short(node), branch))
2736 else:
2738 else:
2737 raise util.Abort(_("branch %s not found") % branch)
2739 raise util.Abort(_("branch %s not found") % branch)
2738 else:
2740 else:
2739 node = node and repo.lookup(node) or repo.changelog.tip()
2741 node = node and repo.lookup(node) or repo.changelog.tip()
2740 return node
2742 return node
2741
2743
2742 def verify(ui, repo):
2744 def verify(ui, repo):
2743 """verify the integrity of the repository
2745 """verify the integrity of the repository
2744
2746
2745 Verify the integrity of the current repository.
2747 Verify the integrity of the current repository.
2746
2748
2747 This will perform an extensive check of the repository's
2749 This will perform an extensive check of the repository's
2748 integrity, validating the hashes and checksums of each entry in
2750 integrity, validating the hashes and checksums of each entry in
2749 the changelog, manifest, and tracked files, as well as the
2751 the changelog, manifest, and tracked files, as well as the
2750 integrity of their crosslinks and indices.
2752 integrity of their crosslinks and indices.
2751 """
2753 """
2752 return hg.verify(repo)
2754 return hg.verify(repo)
2753
2755
2754 # Command options and aliases are listed here, alphabetically
2756 # Command options and aliases are listed here, alphabetically
2755
2757
2756 table = {
2758 table = {
2757 "^add":
2759 "^add":
2758 (add,
2760 (add,
2759 [('I', 'include', [], _('include names matching the given patterns')),
2761 [('I', 'include', [], _('include names matching the given patterns')),
2760 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2762 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2761 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2763 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2762 _('hg add [OPTION]... [FILE]...')),
2764 _('hg add [OPTION]... [FILE]...')),
2763 "addremove":
2765 "addremove":
2764 (addremove,
2766 (addremove,
2765 [('I', 'include', [], _('include names matching the given patterns')),
2767 [('I', 'include', [], _('include names matching the given patterns')),
2766 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2768 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2767 ('n', 'dry-run', None,
2769 ('n', 'dry-run', None,
2768 _('do not perform actions, just print output')),
2770 _('do not perform actions, just print output')),
2769 ('s', 'similarity', '',
2771 ('s', 'similarity', '',
2770 _('guess renamed files by similarity (0<=s<=1)'))],
2772 _('guess renamed files by similarity (0<=s<=1)'))],
2771 _('hg addremove [OPTION]... [FILE]...')),
2773 _('hg addremove [OPTION]... [FILE]...')),
2772 "^annotate":
2774 "^annotate":
2773 (annotate,
2775 (annotate,
2774 [('r', 'rev', '', _('annotate the specified revision')),
2776 [('r', 'rev', '', _('annotate the specified revision')),
2775 ('a', 'text', None, _('treat all files as text')),
2777 ('a', 'text', None, _('treat all files as text')),
2776 ('u', 'user', None, _('list the author')),
2778 ('u', 'user', None, _('list the author')),
2777 ('d', 'date', None, _('list the date')),
2779 ('d', 'date', None, _('list the date')),
2778 ('n', 'number', None, _('list the revision number (default)')),
2780 ('n', 'number', None, _('list the revision number (default)')),
2779 ('c', 'changeset', None, _('list the changeset')),
2781 ('c', 'changeset', None, _('list the changeset')),
2780 ('I', 'include', [], _('include names matching the given patterns')),
2782 ('I', 'include', [], _('include names matching the given patterns')),
2781 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2783 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2782 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2784 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2783 "archive":
2785 "archive":
2784 (archive,
2786 (archive,
2785 [('', 'no-decode', None, _('do not pass files through decoders')),
2787 [('', 'no-decode', None, _('do not pass files through decoders')),
2786 ('p', 'prefix', '', _('directory prefix for files in archive')),
2788 ('p', 'prefix', '', _('directory prefix for files in archive')),
2787 ('r', 'rev', '', _('revision to distribute')),
2789 ('r', 'rev', '', _('revision to distribute')),
2788 ('t', 'type', '', _('type of distribution to create')),
2790 ('t', 'type', '', _('type of distribution to create')),
2789 ('I', 'include', [], _('include names matching the given patterns')),
2791 ('I', 'include', [], _('include names matching the given patterns')),
2790 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2792 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2791 _('hg archive [OPTION]... DEST')),
2793 _('hg archive [OPTION]... DEST')),
2792 "backout":
2794 "backout":
2793 (backout,
2795 (backout,
2794 [('', 'merge', None,
2796 [('', 'merge', None,
2795 _('merge with old dirstate parent after backout')),
2797 _('merge with old dirstate parent after backout')),
2796 ('m', 'message', '', _('use <text> as commit message')),
2798 ('m', 'message', '', _('use <text> as commit message')),
2797 ('l', 'logfile', '', _('read commit message from <file>')),
2799 ('l', 'logfile', '', _('read commit message from <file>')),
2798 ('d', 'date', '', _('record datecode as commit date')),
2800 ('d', 'date', '', _('record datecode as commit date')),
2799 ('', 'parent', '', _('parent to choose when backing out merge')),
2801 ('', 'parent', '', _('parent to choose when backing out merge')),
2800 ('u', 'user', '', _('record user as committer')),
2802 ('u', 'user', '', _('record user as committer')),
2801 ('I', 'include', [], _('include names matching the given patterns')),
2803 ('I', 'include', [], _('include names matching the given patterns')),
2802 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2804 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2803 _('hg backout [OPTION]... REV')),
2805 _('hg backout [OPTION]... REV')),
2804 "bundle":
2806 "bundle":
2805 (bundle,
2807 (bundle,
2806 [('f', 'force', None,
2808 [('f', 'force', None,
2807 _('run even when remote repository is unrelated'))],
2809 _('run even when remote repository is unrelated'))],
2808 _('hg bundle FILE DEST')),
2810 _('hg bundle FILE DEST')),
2809 "cat":
2811 "cat":
2810 (cat,
2812 (cat,
2811 [('o', 'output', '', _('print output to file with formatted name')),
2813 [('o', 'output', '', _('print output to file with formatted name')),
2812 ('r', 'rev', '', _('print the given revision')),
2814 ('r', 'rev', '', _('print the given revision')),
2813 ('I', 'include', [], _('include names matching the given patterns')),
2815 ('I', 'include', [], _('include names matching the given patterns')),
2814 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2816 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2815 _('hg cat [OPTION]... FILE...')),
2817 _('hg cat [OPTION]... FILE...')),
2816 "^clone":
2818 "^clone":
2817 (clone,
2819 (clone,
2818 [('U', 'noupdate', None, _('do not update the new working directory')),
2820 [('U', 'noupdate', None, _('do not update the new working directory')),
2819 ('r', 'rev', [],
2821 ('r', 'rev', [],
2820 _('a changeset you would like to have after cloning')),
2822 _('a changeset you would like to have after cloning')),
2821 ('', 'pull', None, _('use pull protocol to copy metadata')),
2823 ('', 'pull', None, _('use pull protocol to copy metadata')),
2822 ('', 'uncompressed', None,
2824 ('', 'uncompressed', None,
2823 _('use uncompressed transfer (fast over LAN)')),
2825 _('use uncompressed transfer (fast over LAN)')),
2824 ('e', 'ssh', '', _('specify ssh command to use')),
2826 ('e', 'ssh', '', _('specify ssh command to use')),
2825 ('', 'remotecmd', '',
2827 ('', 'remotecmd', '',
2826 _('specify hg command to run on the remote side'))],
2828 _('specify hg command to run on the remote side'))],
2827 _('hg clone [OPTION]... SOURCE [DEST]')),
2829 _('hg clone [OPTION]... SOURCE [DEST]')),
2828 "^commit|ci":
2830 "^commit|ci":
2829 (commit,
2831 (commit,
2830 [('A', 'addremove', None,
2832 [('A', 'addremove', None,
2831 _('mark new/missing files as added/removed before committing')),
2833 _('mark new/missing files as added/removed before committing')),
2832 ('m', 'message', '', _('use <text> as commit message')),
2834 ('m', 'message', '', _('use <text> as commit message')),
2833 ('l', 'logfile', '', _('read the commit message from <file>')),
2835 ('l', 'logfile', '', _('read the commit message from <file>')),
2834 ('d', 'date', '', _('record datecode as commit date')),
2836 ('d', 'date', '', _('record datecode as commit date')),
2835 ('u', 'user', '', _('record user as commiter')),
2837 ('u', 'user', '', _('record user as commiter')),
2836 ('I', 'include', [], _('include names matching the given patterns')),
2838 ('I', 'include', [], _('include names matching the given patterns')),
2837 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2839 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2838 _('hg commit [OPTION]... [FILE]...')),
2840 _('hg commit [OPTION]... [FILE]...')),
2839 "copy|cp":
2841 "copy|cp":
2840 (copy,
2842 (copy,
2841 [('A', 'after', None, _('record a copy that has already occurred')),
2843 [('A', 'after', None, _('record a copy that has already occurred')),
2842 ('f', 'force', None,
2844 ('f', 'force', None,
2843 _('forcibly copy over an existing managed file')),
2845 _('forcibly copy over an existing managed file')),
2844 ('I', 'include', [], _('include names matching the given patterns')),
2846 ('I', 'include', [], _('include names matching the given patterns')),
2845 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2847 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2846 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2848 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2847 _('hg copy [OPTION]... [SOURCE]... DEST')),
2849 _('hg copy [OPTION]... [SOURCE]... DEST')),
2848 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2850 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2849 "debugcomplete":
2851 "debugcomplete":
2850 (debugcomplete,
2852 (debugcomplete,
2851 [('o', 'options', None, _('show the command options'))],
2853 [('o', 'options', None, _('show the command options'))],
2852 _('debugcomplete [-o] CMD')),
2854 _('debugcomplete [-o] CMD')),
2853 "debugrebuildstate":
2855 "debugrebuildstate":
2854 (debugrebuildstate,
2856 (debugrebuildstate,
2855 [('r', 'rev', '', _('revision to rebuild to'))],
2857 [('r', 'rev', '', _('revision to rebuild to'))],
2856 _('debugrebuildstate [-r REV] [REV]')),
2858 _('debugrebuildstate [-r REV] [REV]')),
2857 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2859 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2858 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2860 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2859 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2861 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2860 "debugstate": (debugstate, [], _('debugstate')),
2862 "debugstate": (debugstate, [], _('debugstate')),
2861 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2863 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2862 "debugindex": (debugindex, [], _('debugindex FILE')),
2864 "debugindex": (debugindex, [], _('debugindex FILE')),
2863 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2865 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2864 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2866 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2865 "debugwalk":
2867 "debugwalk":
2866 (debugwalk,
2868 (debugwalk,
2867 [('I', 'include', [], _('include names matching the given patterns')),
2869 [('I', 'include', [], _('include names matching the given patterns')),
2868 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2870 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2869 _('debugwalk [OPTION]... [FILE]...')),
2871 _('debugwalk [OPTION]... [FILE]...')),
2870 "^diff":
2872 "^diff":
2871 (diff,
2873 (diff,
2872 [('r', 'rev', [], _('revision')),
2874 [('r', 'rev', [], _('revision')),
2873 ('a', 'text', None, _('treat all files as text')),
2875 ('a', 'text', None, _('treat all files as text')),
2874 ('p', 'show-function', None,
2876 ('p', 'show-function', None,
2875 _('show which function each change is in')),
2877 _('show which function each change is in')),
2876 ('g', 'git', None, _('use git extended diff format')),
2878 ('g', 'git', None, _('use git extended diff format')),
2877 ('w', 'ignore-all-space', None,
2879 ('w', 'ignore-all-space', None,
2878 _('ignore white space when comparing lines')),
2880 _('ignore white space when comparing lines')),
2879 ('b', 'ignore-space-change', None,
2881 ('b', 'ignore-space-change', None,
2880 _('ignore changes in the amount of white space')),
2882 _('ignore changes in the amount of white space')),
2881 ('B', 'ignore-blank-lines', None,
2883 ('B', 'ignore-blank-lines', None,
2882 _('ignore changes whose lines are all blank')),
2884 _('ignore changes whose lines are all blank')),
2883 ('I', 'include', [], _('include names matching the given patterns')),
2885 ('I', 'include', [], _('include names matching the given patterns')),
2884 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2886 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2885 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2887 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2886 "^export":
2888 "^export":
2887 (export,
2889 (export,
2888 [('o', 'output', '', _('print output to file with formatted name')),
2890 [('o', 'output', '', _('print output to file with formatted name')),
2889 ('a', 'text', None, _('treat all files as text')),
2891 ('a', 'text', None, _('treat all files as text')),
2890 ('g', 'git', None, _('use git extended diff format')),
2892 ('g', 'git', None, _('use git extended diff format')),
2891 ('', 'switch-parent', None, _('diff against the second parent'))],
2893 ('', 'switch-parent', None, _('diff against the second parent'))],
2892 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2894 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2893 "debugforget|forget":
2895 "debugforget|forget":
2894 (forget,
2896 (forget,
2895 [('I', 'include', [], _('include names matching the given patterns')),
2897 [('I', 'include', [], _('include names matching the given patterns')),
2896 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2898 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2897 _('hg forget [OPTION]... FILE...')),
2899 _('hg forget [OPTION]... FILE...')),
2898 "grep":
2900 "grep":
2899 (grep,
2901 (grep,
2900 [('0', 'print0', None, _('end fields with NUL')),
2902 [('0', 'print0', None, _('end fields with NUL')),
2901 ('', 'all', None, _('print all revisions that match')),
2903 ('', 'all', None, _('print all revisions that match')),
2902 ('f', 'follow', None,
2904 ('f', 'follow', None,
2903 _('follow changeset history, or file history across copies and renames')),
2905 _('follow changeset history, or file history across copies and renames')),
2904 ('i', 'ignore-case', None, _('ignore case when matching')),
2906 ('i', 'ignore-case', None, _('ignore case when matching')),
2905 ('l', 'files-with-matches', None,
2907 ('l', 'files-with-matches', None,
2906 _('print only filenames and revs that match')),
2908 _('print only filenames and revs that match')),
2907 ('n', 'line-number', None, _('print matching line numbers')),
2909 ('n', 'line-number', None, _('print matching line numbers')),
2908 ('r', 'rev', [], _('search in given revision range')),
2910 ('r', 'rev', [], _('search in given revision range')),
2909 ('u', 'user', None, _('print user who committed change')),
2911 ('u', 'user', None, _('print user who committed change')),
2910 ('I', 'include', [], _('include names matching the given patterns')),
2912 ('I', 'include', [], _('include names matching the given patterns')),
2911 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2913 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2912 _('hg grep [OPTION]... PATTERN [FILE]...')),
2914 _('hg grep [OPTION]... PATTERN [FILE]...')),
2913 "heads":
2915 "heads":
2914 (heads,
2916 (heads,
2915 [('b', 'branches', None, _('show branches')),
2917 [('b', 'branches', None, _('show branches')),
2916 ('', 'style', '', _('display using template map file')),
2918 ('', 'style', '', _('display using template map file')),
2917 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2919 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2918 ('', 'template', '', _('display with template'))],
2920 ('', 'template', '', _('display with template'))],
2919 _('hg heads [-b] [-r <rev>]')),
2921 _('hg heads [-b] [-r <rev>]')),
2920 "help": (help_, [], _('hg help [COMMAND]')),
2922 "help": (help_, [], _('hg help [COMMAND]')),
2921 "identify|id": (identify, [], _('hg identify')),
2923 "identify|id": (identify, [], _('hg identify')),
2922 "import|patch":
2924 "import|patch":
2923 (import_,
2925 (import_,
2924 [('p', 'strip', 1,
2926 [('p', 'strip', 1,
2925 _('directory strip option for patch. This has the same\n'
2927 _('directory strip option for patch. This has the same\n'
2926 'meaning as the corresponding patch option')),
2928 'meaning as the corresponding patch option')),
2927 ('m', 'message', '', _('use <text> as commit message')),
2929 ('m', 'message', '', _('use <text> as commit message')),
2928 ('b', 'base', '', _('base path')),
2930 ('b', 'base', '', _('base path')),
2929 ('f', 'force', None,
2931 ('f', 'force', None,
2930 _('skip check for outstanding uncommitted changes'))],
2932 _('skip check for outstanding uncommitted changes'))],
2931 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2933 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2932 "incoming|in": (incoming,
2934 "incoming|in": (incoming,
2933 [('M', 'no-merges', None, _('do not show merges')),
2935 [('M', 'no-merges', None, _('do not show merges')),
2934 ('f', 'force', None,
2936 ('f', 'force', None,
2935 _('run even when remote repository is unrelated')),
2937 _('run even when remote repository is unrelated')),
2936 ('', 'style', '', _('display using template map file')),
2938 ('', 'style', '', _('display using template map file')),
2937 ('n', 'newest-first', None, _('show newest record first')),
2939 ('n', 'newest-first', None, _('show newest record first')),
2938 ('', 'bundle', '', _('file to store the bundles into')),
2940 ('', 'bundle', '', _('file to store the bundles into')),
2939 ('p', 'patch', None, _('show patch')),
2941 ('p', 'patch', None, _('show patch')),
2940 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2942 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2941 ('', 'template', '', _('display with template')),
2943 ('', 'template', '', _('display with template')),
2942 ('e', 'ssh', '', _('specify ssh command to use')),
2944 ('e', 'ssh', '', _('specify ssh command to use')),
2943 ('', 'remotecmd', '',
2945 ('', 'remotecmd', '',
2944 _('specify hg command to run on the remote side'))],
2946 _('specify hg command to run on the remote side'))],
2945 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2947 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2946 ' [--bundle FILENAME] [SOURCE]')),
2948 ' [--bundle FILENAME] [SOURCE]')),
2947 "^init":
2949 "^init":
2948 (init,
2950 (init,
2949 [('e', 'ssh', '', _('specify ssh command to use')),
2951 [('e', 'ssh', '', _('specify ssh command to use')),
2950 ('', 'remotecmd', '',
2952 ('', 'remotecmd', '',
2951 _('specify hg command to run on the remote side'))],
2953 _('specify hg command to run on the remote side'))],
2952 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2954 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2953 "locate":
2955 "locate":
2954 (locate,
2956 (locate,
2955 [('r', 'rev', '', _('search the repository as it stood at rev')),
2957 [('r', 'rev', '', _('search the repository as it stood at rev')),
2956 ('0', 'print0', None,
2958 ('0', 'print0', None,
2957 _('end filenames with NUL, for use with xargs')),
2959 _('end filenames with NUL, for use with xargs')),
2958 ('f', 'fullpath', None,
2960 ('f', 'fullpath', None,
2959 _('print complete paths from the filesystem root')),
2961 _('print complete paths from the filesystem root')),
2960 ('I', 'include', [], _('include names matching the given patterns')),
2962 ('I', 'include', [], _('include names matching the given patterns')),
2961 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2963 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2962 _('hg locate [OPTION]... [PATTERN]...')),
2964 _('hg locate [OPTION]... [PATTERN]...')),
2963 "^log|history":
2965 "^log|history":
2964 (log,
2966 (log,
2965 [('b', 'branches', None, _('show branches')),
2967 [('b', 'branches', None, _('show branches')),
2966 ('f', 'follow', None,
2968 ('f', 'follow', None,
2967 _('follow changeset history, or file history across copies and renames')),
2969 _('follow changeset history, or file history across copies and renames')),
2968 ('', 'follow-first', None,
2970 ('', 'follow-first', None,
2969 _('only follow the first parent of merge changesets')),
2971 _('only follow the first parent of merge changesets')),
2970 ('k', 'keyword', [], _('search for a keyword')),
2972 ('k', 'keyword', [], _('search for a keyword')),
2971 ('l', 'limit', '', _('limit number of changes displayed')),
2973 ('l', 'limit', '', _('limit number of changes displayed')),
2972 ('r', 'rev', [], _('show the specified revision or range')),
2974 ('r', 'rev', [], _('show the specified revision or range')),
2973 ('M', 'no-merges', None, _('do not show merges')),
2975 ('M', 'no-merges', None, _('do not show merges')),
2974 ('', 'style', '', _('display using template map file')),
2976 ('', 'style', '', _('display using template map file')),
2975 ('m', 'only-merges', None, _('show only merges')),
2977 ('m', 'only-merges', None, _('show only merges')),
2976 ('p', 'patch', None, _('show patch')),
2978 ('p', 'patch', None, _('show patch')),
2977 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2979 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2978 ('', 'template', '', _('display with template')),
2980 ('', 'template', '', _('display with template')),
2979 ('I', 'include', [], _('include names matching the given patterns')),
2981 ('I', 'include', [], _('include names matching the given patterns')),
2980 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2982 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2981 _('hg log [OPTION]... [FILE]')),
2983 _('hg log [OPTION]... [FILE]')),
2982 "manifest": (manifest, [], _('hg manifest [REV]')),
2984 "manifest": (manifest, [], _('hg manifest [REV]')),
2983 "merge":
2985 "merge":
2984 (merge,
2986 (merge,
2985 [('b', 'branch', '', _('merge with head of a specific branch')),
2987 [('b', 'branch', '', _('merge with head of a specific branch')),
2986 ('f', 'force', None, _('force a merge with outstanding changes'))],
2988 ('f', 'force', None, _('force a merge with outstanding changes'))],
2987 _('hg merge [-b TAG] [-f] [REV]')),
2989 _('hg merge [-b TAG] [-f] [REV]')),
2988 "outgoing|out": (outgoing,
2990 "outgoing|out": (outgoing,
2989 [('M', 'no-merges', None, _('do not show merges')),
2991 [('M', 'no-merges', None, _('do not show merges')),
2990 ('f', 'force', None,
2992 ('f', 'force', None,
2991 _('run even when remote repository is unrelated')),
2993 _('run even when remote repository is unrelated')),
2992 ('p', 'patch', None, _('show patch')),
2994 ('p', 'patch', None, _('show patch')),
2993 ('', 'style', '', _('display using template map file')),
2995 ('', 'style', '', _('display using template map file')),
2994 ('r', 'rev', [], _('a specific revision you would like to push')),
2996 ('r', 'rev', [], _('a specific revision you would like to push')),
2995 ('n', 'newest-first', None, _('show newest record first')),
2997 ('n', 'newest-first', None, _('show newest record first')),
2996 ('', 'template', '', _('display with template')),
2998 ('', 'template', '', _('display with template')),
2997 ('e', 'ssh', '', _('specify ssh command to use')),
2999 ('e', 'ssh', '', _('specify ssh command to use')),
2998 ('', 'remotecmd', '',
3000 ('', 'remotecmd', '',
2999 _('specify hg command to run on the remote side'))],
3001 _('specify hg command to run on the remote side'))],
3000 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3002 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3001 "^parents":
3003 "^parents":
3002 (parents,
3004 (parents,
3003 [('b', 'branches', None, _('show branches')),
3005 [('b', 'branches', None, _('show branches')),
3004 ('r', 'rev', '', _('show parents from the specified rev')),
3006 ('r', 'rev', '', _('show parents from the specified rev')),
3005 ('', 'style', '', _('display using template map file')),
3007 ('', 'style', '', _('display using template map file')),
3006 ('', 'template', '', _('display with template'))],
3008 ('', 'template', '', _('display with template'))],
3007 _('hg parents [-b] [-r REV] [FILE]')),
3009 _('hg parents [-b] [-r REV] [FILE]')),
3008 "paths": (paths, [], _('hg paths [NAME]')),
3010 "paths": (paths, [], _('hg paths [NAME]')),
3009 "^pull":
3011 "^pull":
3010 (pull,
3012 (pull,
3011 [('u', 'update', None,
3013 [('u', 'update', None,
3012 _('update the working directory to tip after pull')),
3014 _('update the working directory to tip after pull')),
3013 ('e', 'ssh', '', _('specify ssh command to use')),
3015 ('e', 'ssh', '', _('specify ssh command to use')),
3014 ('f', 'force', None,
3016 ('f', 'force', None,
3015 _('run even when remote repository is unrelated')),
3017 _('run even when remote repository is unrelated')),
3016 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3018 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3017 ('', 'remotecmd', '',
3019 ('', 'remotecmd', '',
3018 _('specify hg command to run on the remote side'))],
3020 _('specify hg command to run on the remote side'))],
3019 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3021 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3020 "^push":
3022 "^push":
3021 (push,
3023 (push,
3022 [('f', 'force', None, _('force push')),
3024 [('f', 'force', None, _('force push')),
3023 ('e', 'ssh', '', _('specify ssh command to use')),
3025 ('e', 'ssh', '', _('specify ssh command to use')),
3024 ('r', 'rev', [], _('a specific revision you would like to push')),
3026 ('r', 'rev', [], _('a specific revision you would like to push')),
3025 ('', 'remotecmd', '',
3027 ('', 'remotecmd', '',
3026 _('specify hg command to run on the remote side'))],
3028 _('specify hg command to run on the remote side'))],
3027 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3029 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3028 "debugrawcommit|rawcommit":
3030 "debugrawcommit|rawcommit":
3029 (rawcommit,
3031 (rawcommit,
3030 [('p', 'parent', [], _('parent')),
3032 [('p', 'parent', [], _('parent')),
3031 ('d', 'date', '', _('date code')),
3033 ('d', 'date', '', _('date code')),
3032 ('u', 'user', '', _('user')),
3034 ('u', 'user', '', _('user')),
3033 ('F', 'files', '', _('file list')),
3035 ('F', 'files', '', _('file list')),
3034 ('m', 'message', '', _('commit message')),
3036 ('m', 'message', '', _('commit message')),
3035 ('l', 'logfile', '', _('commit message file'))],
3037 ('l', 'logfile', '', _('commit message file'))],
3036 _('hg debugrawcommit [OPTION]... [FILE]...')),
3038 _('hg debugrawcommit [OPTION]... [FILE]...')),
3037 "recover": (recover, [], _('hg recover')),
3039 "recover": (recover, [], _('hg recover')),
3038 "^remove|rm":
3040 "^remove|rm":
3039 (remove,
3041 (remove,
3040 [('A', 'after', None, _('record remove that has already occurred')),
3042 [('A', 'after', None, _('record remove that has already occurred')),
3041 ('f', 'force', None, _('remove file even if modified')),
3043 ('f', 'force', None, _('remove file even if modified')),
3042 ('I', 'include', [], _('include names matching the given patterns')),
3044 ('I', 'include', [], _('include names matching the given patterns')),
3043 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3045 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3044 _('hg remove [OPTION]... FILE...')),
3046 _('hg remove [OPTION]... FILE...')),
3045 "rename|mv":
3047 "rename|mv":
3046 (rename,
3048 (rename,
3047 [('A', 'after', None, _('record a rename that has already occurred')),
3049 [('A', 'after', None, _('record a rename that has already occurred')),
3048 ('f', 'force', None,
3050 ('f', 'force', None,
3049 _('forcibly copy over an existing managed file')),
3051 _('forcibly copy over an existing managed file')),
3050 ('I', 'include', [], _('include names matching the given patterns')),
3052 ('I', 'include', [], _('include names matching the given patterns')),
3051 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3053 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3052 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3054 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3053 _('hg rename [OPTION]... SOURCE... DEST')),
3055 _('hg rename [OPTION]... SOURCE... DEST')),
3054 "^revert":
3056 "^revert":
3055 (revert,
3057 (revert,
3056 [('a', 'all', None, _('revert all changes when no arguments given')),
3058 [('a', 'all', None, _('revert all changes when no arguments given')),
3057 ('r', 'rev', '', _('revision to revert to')),
3059 ('r', 'rev', '', _('revision to revert to')),
3058 ('', 'no-backup', None, _('do not save backup copies of files')),
3060 ('', 'no-backup', None, _('do not save backup copies of files')),
3059 ('I', 'include', [], _('include names matching given patterns')),
3061 ('I', 'include', [], _('include names matching given patterns')),
3060 ('X', 'exclude', [], _('exclude names matching given patterns')),
3062 ('X', 'exclude', [], _('exclude names matching given patterns')),
3061 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3063 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3062 _('hg revert [-r REV] [NAME]...')),
3064 _('hg revert [-r REV] [NAME]...')),
3063 "rollback": (rollback, [], _('hg rollback')),
3065 "rollback": (rollback, [], _('hg rollback')),
3064 "root": (root, [], _('hg root')),
3066 "root": (root, [], _('hg root')),
3065 "^serve":
3067 "^serve":
3066 (serve,
3068 (serve,
3067 [('A', 'accesslog', '', _('name of access log file to write to')),
3069 [('A', 'accesslog', '', _('name of access log file to write to')),
3068 ('d', 'daemon', None, _('run server in background')),
3070 ('d', 'daemon', None, _('run server in background')),
3069 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3071 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3070 ('E', 'errorlog', '', _('name of error log file to write to')),
3072 ('E', 'errorlog', '', _('name of error log file to write to')),
3071 ('p', 'port', 0, _('port to use (default: 8000)')),
3073 ('p', 'port', 0, _('port to use (default: 8000)')),
3072 ('a', 'address', '', _('address to use')),
3074 ('a', 'address', '', _('address to use')),
3073 ('n', 'name', '',
3075 ('n', 'name', '',
3074 _('name to show in web pages (default: working dir)')),
3076 _('name to show in web pages (default: working dir)')),
3075 ('', 'webdir-conf', '', _('name of the webdir config file'
3077 ('', 'webdir-conf', '', _('name of the webdir config file'
3076 ' (serve more than one repo)')),
3078 ' (serve more than one repo)')),
3077 ('', 'pid-file', '', _('name of file to write process ID to')),
3079 ('', 'pid-file', '', _('name of file to write process ID to')),
3078 ('', 'stdio', None, _('for remote clients')),
3080 ('', 'stdio', None, _('for remote clients')),
3079 ('t', 'templates', '', _('web templates to use')),
3081 ('t', 'templates', '', _('web templates to use')),
3080 ('', 'style', '', _('template style to use')),
3082 ('', 'style', '', _('template style to use')),
3081 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3083 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3082 _('hg serve [OPTION]...')),
3084 _('hg serve [OPTION]...')),
3083 "^status|st":
3085 "^status|st":
3084 (status,
3086 (status,
3085 [('A', 'all', None, _('show status of all files')),
3087 [('A', 'all', None, _('show status of all files')),
3086 ('m', 'modified', None, _('show only modified files')),
3088 ('m', 'modified', None, _('show only modified files')),
3087 ('a', 'added', None, _('show only added files')),
3089 ('a', 'added', None, _('show only added files')),
3088 ('r', 'removed', None, _('show only removed files')),
3090 ('r', 'removed', None, _('show only removed files')),
3089 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3091 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3090 ('c', 'clean', None, _('show only files without changes')),
3092 ('c', 'clean', None, _('show only files without changes')),
3091 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3093 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3092 ('i', 'ignored', None, _('show ignored files')),
3094 ('i', 'ignored', None, _('show ignored files')),
3093 ('n', 'no-status', None, _('hide status prefix')),
3095 ('n', 'no-status', None, _('hide status prefix')),
3094 ('C', 'copies', None, _('show source of copied files')),
3096 ('C', 'copies', None, _('show source of copied files')),
3095 ('0', 'print0', None,
3097 ('0', 'print0', None,
3096 _('end filenames with NUL, for use with xargs')),
3098 _('end filenames with NUL, for use with xargs')),
3097 ('I', 'include', [], _('include names matching the given patterns')),
3099 ('I', 'include', [], _('include names matching the given patterns')),
3098 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3100 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3099 _('hg status [OPTION]... [FILE]...')),
3101 _('hg status [OPTION]... [FILE]...')),
3100 "tag":
3102 "tag":
3101 (tag,
3103 (tag,
3102 [('l', 'local', None, _('make the tag local')),
3104 [('l', 'local', None, _('make the tag local')),
3103 ('m', 'message', '', _('message for tag commit log entry')),
3105 ('m', 'message', '', _('message for tag commit log entry')),
3104 ('d', 'date', '', _('record datecode as commit date')),
3106 ('d', 'date', '', _('record datecode as commit date')),
3105 ('u', 'user', '', _('record user as commiter')),
3107 ('u', 'user', '', _('record user as commiter')),
3106 ('r', 'rev', '', _('revision to tag'))],
3108 ('r', 'rev', '', _('revision to tag'))],
3107 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3109 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3108 "tags": (tags, [], _('hg tags')),
3110 "tags": (tags, [], _('hg tags')),
3109 "tip":
3111 "tip":
3110 (tip,
3112 (tip,
3111 [('b', 'branches', None, _('show branches')),
3113 [('b', 'branches', None, _('show branches')),
3112 ('', 'style', '', _('display using template map file')),
3114 ('', 'style', '', _('display using template map file')),
3113 ('p', 'patch', None, _('show patch')),
3115 ('p', 'patch', None, _('show patch')),
3114 ('', 'template', '', _('display with template'))],
3116 ('', 'template', '', _('display with template'))],
3115 _('hg tip [-b] [-p]')),
3117 _('hg tip [-b] [-p]')),
3116 "unbundle":
3118 "unbundle":
3117 (unbundle,
3119 (unbundle,
3118 [('u', 'update', None,
3120 [('u', 'update', None,
3119 _('update the working directory to tip after unbundle'))],
3121 _('update the working directory to tip after unbundle'))],
3120 _('hg unbundle [-u] FILE')),
3122 _('hg unbundle [-u] FILE')),
3121 "debugundo|undo": (undo, [], _('hg undo')),
3123 "debugundo|undo": (undo, [], _('hg undo')),
3122 "^update|up|checkout|co":
3124 "^update|up|checkout|co":
3123 (update,
3125 (update,
3124 [('b', 'branch', '', _('checkout the head of a specific branch')),
3126 [('b', 'branch', '', _('checkout the head of a specific branch')),
3125 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3127 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3126 ('C', 'clean', None, _('overwrite locally modified files')),
3128 ('C', 'clean', None, _('overwrite locally modified files')),
3127 ('f', 'force', None, _('force a merge with outstanding changes'))],
3129 ('f', 'force', None, _('force a merge with outstanding changes'))],
3128 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3130 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3129 "verify": (verify, [], _('hg verify')),
3131 "verify": (verify, [], _('hg verify')),
3130 "version": (show_version, [], _('hg version')),
3132 "version": (show_version, [], _('hg version')),
3131 }
3133 }
3132
3134
3133 globalopts = [
3135 globalopts = [
3134 ('R', 'repository', '',
3136 ('R', 'repository', '',
3135 _('repository root directory or symbolic path name')),
3137 _('repository root directory or symbolic path name')),
3136 ('', 'cwd', '', _('change working directory')),
3138 ('', 'cwd', '', _('change working directory')),
3137 ('y', 'noninteractive', None,
3139 ('y', 'noninteractive', None,
3138 _('do not prompt, assume \'yes\' for any required answers')),
3140 _('do not prompt, assume \'yes\' for any required answers')),
3139 ('q', 'quiet', None, _('suppress output')),
3141 ('q', 'quiet', None, _('suppress output')),
3140 ('v', 'verbose', None, _('enable additional output')),
3142 ('v', 'verbose', None, _('enable additional output')),
3141 ('', 'config', [], _('set/override config option')),
3143 ('', 'config', [], _('set/override config option')),
3142 ('', 'debug', None, _('enable debugging output')),
3144 ('', 'debug', None, _('enable debugging output')),
3143 ('', 'debugger', None, _('start debugger')),
3145 ('', 'debugger', None, _('start debugger')),
3144 ('', 'lsprof', None, _('print improved command execution profile')),
3146 ('', 'lsprof', None, _('print improved command execution profile')),
3145 ('', 'traceback', None, _('print traceback on exception')),
3147 ('', 'traceback', None, _('print traceback on exception')),
3146 ('', 'time', None, _('time how long the command takes')),
3148 ('', 'time', None, _('time how long the command takes')),
3147 ('', 'profile', None, _('print command execution profile')),
3149 ('', 'profile', None, _('print command execution profile')),
3148 ('', 'version', None, _('output version information and exit')),
3150 ('', 'version', None, _('output version information and exit')),
3149 ('h', 'help', None, _('display help and exit')),
3151 ('h', 'help', None, _('display help and exit')),
3150 ]
3152 ]
3151
3153
3152 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3154 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3153 " debugindex debugindexdot")
3155 " debugindex debugindexdot")
3154 optionalrepo = ("paths serve debugconfig")
3156 optionalrepo = ("paths serve debugconfig")
3155
3157
3156 def findpossible(ui, cmd):
3158 def findpossible(ui, cmd):
3157 """
3159 """
3158 Return cmd -> (aliases, command table entry)
3160 Return cmd -> (aliases, command table entry)
3159 for each matching command.
3161 for each matching command.
3160 Return debug commands (or their aliases) only if no normal command matches.
3162 Return debug commands (or their aliases) only if no normal command matches.
3161 """
3163 """
3162 choice = {}
3164 choice = {}
3163 debugchoice = {}
3165 debugchoice = {}
3164 for e in table.keys():
3166 for e in table.keys():
3165 aliases = e.lstrip("^").split("|")
3167 aliases = e.lstrip("^").split("|")
3166 found = None
3168 found = None
3167 if cmd in aliases:
3169 if cmd in aliases:
3168 found = cmd
3170 found = cmd
3169 elif not ui.config("ui", "strict"):
3171 elif not ui.config("ui", "strict"):
3170 for a in aliases:
3172 for a in aliases:
3171 if a.startswith(cmd):
3173 if a.startswith(cmd):
3172 found = a
3174 found = a
3173 break
3175 break
3174 if found is not None:
3176 if found is not None:
3175 if aliases[0].startswith("debug"):
3177 if aliases[0].startswith("debug"):
3176 debugchoice[found] = (aliases, table[e])
3178 debugchoice[found] = (aliases, table[e])
3177 else:
3179 else:
3178 choice[found] = (aliases, table[e])
3180 choice[found] = (aliases, table[e])
3179
3181
3180 if not choice and debugchoice:
3182 if not choice and debugchoice:
3181 choice = debugchoice
3183 choice = debugchoice
3182
3184
3183 return choice
3185 return choice
3184
3186
3185 def findcmd(ui, cmd):
3187 def findcmd(ui, cmd):
3186 """Return (aliases, command table entry) for command string."""
3188 """Return (aliases, command table entry) for command string."""
3187 choice = findpossible(ui, cmd)
3189 choice = findpossible(ui, cmd)
3188
3190
3189 if choice.has_key(cmd):
3191 if choice.has_key(cmd):
3190 return choice[cmd]
3192 return choice[cmd]
3191
3193
3192 if len(choice) > 1:
3194 if len(choice) > 1:
3193 clist = choice.keys()
3195 clist = choice.keys()
3194 clist.sort()
3196 clist.sort()
3195 raise AmbiguousCommand(cmd, clist)
3197 raise AmbiguousCommand(cmd, clist)
3196
3198
3197 if choice:
3199 if choice:
3198 return choice.values()[0]
3200 return choice.values()[0]
3199
3201
3200 raise UnknownCommand(cmd)
3202 raise UnknownCommand(cmd)
3201
3203
3202 def catchterm(*args):
3204 def catchterm(*args):
3203 raise util.SignalInterrupt
3205 raise util.SignalInterrupt
3204
3206
3205 def run():
3207 def run():
3206 sys.exit(dispatch(sys.argv[1:]))
3208 sys.exit(dispatch(sys.argv[1:]))
3207
3209
3208 class ParseError(Exception):
3210 class ParseError(Exception):
3209 """Exception raised on errors in parsing the command line."""
3211 """Exception raised on errors in parsing the command line."""
3210
3212
3211 def parse(ui, args):
3213 def parse(ui, args):
3212 options = {}
3214 options = {}
3213 cmdoptions = {}
3215 cmdoptions = {}
3214
3216
3215 try:
3217 try:
3216 args = fancyopts.fancyopts(args, globalopts, options)
3218 args = fancyopts.fancyopts(args, globalopts, options)
3217 except fancyopts.getopt.GetoptError, inst:
3219 except fancyopts.getopt.GetoptError, inst:
3218 raise ParseError(None, inst)
3220 raise ParseError(None, inst)
3219
3221
3220 if args:
3222 if args:
3221 cmd, args = args[0], args[1:]
3223 cmd, args = args[0], args[1:]
3222 aliases, i = findcmd(ui, cmd)
3224 aliases, i = findcmd(ui, cmd)
3223 cmd = aliases[0]
3225 cmd = aliases[0]
3224 defaults = ui.config("defaults", cmd)
3226 defaults = ui.config("defaults", cmd)
3225 if defaults:
3227 if defaults:
3226 args = shlex.split(defaults) + args
3228 args = shlex.split(defaults) + args
3227 c = list(i[1])
3229 c = list(i[1])
3228 else:
3230 else:
3229 cmd = None
3231 cmd = None
3230 c = []
3232 c = []
3231
3233
3232 # combine global options into local
3234 # combine global options into local
3233 for o in globalopts:
3235 for o in globalopts:
3234 c.append((o[0], o[1], options[o[1]], o[3]))
3236 c.append((o[0], o[1], options[o[1]], o[3]))
3235
3237
3236 try:
3238 try:
3237 args = fancyopts.fancyopts(args, c, cmdoptions)
3239 args = fancyopts.fancyopts(args, c, cmdoptions)
3238 except fancyopts.getopt.GetoptError, inst:
3240 except fancyopts.getopt.GetoptError, inst:
3239 raise ParseError(cmd, inst)
3241 raise ParseError(cmd, inst)
3240
3242
3241 # separate global options back out
3243 # separate global options back out
3242 for o in globalopts:
3244 for o in globalopts:
3243 n = o[1]
3245 n = o[1]
3244 options[n] = cmdoptions[n]
3246 options[n] = cmdoptions[n]
3245 del cmdoptions[n]
3247 del cmdoptions[n]
3246
3248
3247 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3249 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3248
3250
3249 external = {}
3251 external = {}
3250
3252
3251 def findext(name):
3253 def findext(name):
3252 '''return module with given extension name'''
3254 '''return module with given extension name'''
3253 try:
3255 try:
3254 return sys.modules[external[name]]
3256 return sys.modules[external[name]]
3255 except KeyError:
3257 except KeyError:
3256 for k, v in external.iteritems():
3258 for k, v in external.iteritems():
3257 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3259 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3258 return sys.modules[v]
3260 return sys.modules[v]
3259 raise KeyError(name)
3261 raise KeyError(name)
3260
3262
3261 def load_extensions(ui):
3263 def load_extensions(ui):
3262 added = []
3264 added = []
3263 for ext_name, load_from_name in ui.extensions():
3265 for ext_name, load_from_name in ui.extensions():
3264 if ext_name in external:
3266 if ext_name in external:
3265 continue
3267 continue
3266 try:
3268 try:
3267 if load_from_name:
3269 if load_from_name:
3268 # the module will be loaded in sys.modules
3270 # the module will be loaded in sys.modules
3269 # choose an unique name so that it doesn't
3271 # choose an unique name so that it doesn't
3270 # conflicts with other modules
3272 # conflicts with other modules
3271 module_name = "hgext_%s" % ext_name.replace('.', '_')
3273 module_name = "hgext_%s" % ext_name.replace('.', '_')
3272 mod = imp.load_source(module_name, load_from_name)
3274 mod = imp.load_source(module_name, load_from_name)
3273 else:
3275 else:
3274 def importh(name):
3276 def importh(name):
3275 mod = __import__(name)
3277 mod = __import__(name)
3276 components = name.split('.')
3278 components = name.split('.')
3277 for comp in components[1:]:
3279 for comp in components[1:]:
3278 mod = getattr(mod, comp)
3280 mod = getattr(mod, comp)
3279 return mod
3281 return mod
3280 try:
3282 try:
3281 mod = importh("hgext.%s" % ext_name)
3283 mod = importh("hgext.%s" % ext_name)
3282 except ImportError:
3284 except ImportError:
3283 mod = importh(ext_name)
3285 mod = importh(ext_name)
3284 external[ext_name] = mod.__name__
3286 external[ext_name] = mod.__name__
3285 added.append((mod, ext_name))
3287 added.append((mod, ext_name))
3286 except (util.SignalInterrupt, KeyboardInterrupt):
3288 except (util.SignalInterrupt, KeyboardInterrupt):
3287 raise
3289 raise
3288 except Exception, inst:
3290 except Exception, inst:
3289 ui.warn(_("*** failed to import extension %s: %s\n") %
3291 ui.warn(_("*** failed to import extension %s: %s\n") %
3290 (ext_name, inst))
3292 (ext_name, inst))
3291 if ui.print_exc():
3293 if ui.print_exc():
3292 return 1
3294 return 1
3293
3295
3294 for mod, name in added:
3296 for mod, name in added:
3295 uisetup = getattr(mod, 'uisetup', None)
3297 uisetup = getattr(mod, 'uisetup', None)
3296 if uisetup:
3298 if uisetup:
3297 uisetup(ui)
3299 uisetup(ui)
3298 cmdtable = getattr(mod, 'cmdtable', {})
3300 cmdtable = getattr(mod, 'cmdtable', {})
3299 for t in cmdtable:
3301 for t in cmdtable:
3300 if t in table:
3302 if t in table:
3301 ui.warn(_("module %s overrides %s\n") % (name, t))
3303 ui.warn(_("module %s overrides %s\n") % (name, t))
3302 table.update(cmdtable)
3304 table.update(cmdtable)
3303
3305
3304 def dispatch(args):
3306 def dispatch(args):
3305 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3307 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3306 num = getattr(signal, name, None)
3308 num = getattr(signal, name, None)
3307 if num: signal.signal(num, catchterm)
3309 if num: signal.signal(num, catchterm)
3308
3310
3309 try:
3311 try:
3310 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3312 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3311 except util.Abort, inst:
3313 except util.Abort, inst:
3312 sys.stderr.write(_("abort: %s\n") % inst)
3314 sys.stderr.write(_("abort: %s\n") % inst)
3313 return -1
3315 return -1
3314
3316
3315 load_extensions(u)
3317 load_extensions(u)
3316 u.addreadhook(load_extensions)
3318 u.addreadhook(load_extensions)
3317
3319
3318 try:
3320 try:
3319 cmd, func, args, options, cmdoptions = parse(u, args)
3321 cmd, func, args, options, cmdoptions = parse(u, args)
3320 if options["time"]:
3322 if options["time"]:
3321 def get_times():
3323 def get_times():
3322 t = os.times()
3324 t = os.times()
3323 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3325 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3324 t = (t[0], t[1], t[2], t[3], time.clock())
3326 t = (t[0], t[1], t[2], t[3], time.clock())
3325 return t
3327 return t
3326 s = get_times()
3328 s = get_times()
3327 def print_time():
3329 def print_time():
3328 t = get_times()
3330 t = get_times()
3329 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3331 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3330 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3332 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3331 atexit.register(print_time)
3333 atexit.register(print_time)
3332
3334
3333 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3335 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3334 not options["noninteractive"], options["traceback"],
3336 not options["noninteractive"], options["traceback"],
3335 options["config"])
3337 options["config"])
3336
3338
3337 # enter the debugger before command execution
3339 # enter the debugger before command execution
3338 if options['debugger']:
3340 if options['debugger']:
3339 pdb.set_trace()
3341 pdb.set_trace()
3340
3342
3341 try:
3343 try:
3342 if options['cwd']:
3344 if options['cwd']:
3343 try:
3345 try:
3344 os.chdir(options['cwd'])
3346 os.chdir(options['cwd'])
3345 except OSError, inst:
3347 except OSError, inst:
3346 raise util.Abort('%s: %s' %
3348 raise util.Abort('%s: %s' %
3347 (options['cwd'], inst.strerror))
3349 (options['cwd'], inst.strerror))
3348
3350
3349 path = u.expandpath(options["repository"]) or ""
3351 path = u.expandpath(options["repository"]) or ""
3350 repo = path and hg.repository(u, path=path) or None
3352 repo = path and hg.repository(u, path=path) or None
3351
3353
3352 if options['help']:
3354 if options['help']:
3353 return help_(u, cmd, options['version'])
3355 return help_(u, cmd, options['version'])
3354 elif options['version']:
3356 elif options['version']:
3355 return show_version(u)
3357 return show_version(u)
3356 elif not cmd:
3358 elif not cmd:
3357 return help_(u, 'shortlist')
3359 return help_(u, 'shortlist')
3358
3360
3359 if cmd not in norepo.split():
3361 if cmd not in norepo.split():
3360 try:
3362 try:
3361 if not repo:
3363 if not repo:
3362 repo = hg.repository(u, path=path)
3364 repo = hg.repository(u, path=path)
3363 u = repo.ui
3365 u = repo.ui
3364 for name in external.itervalues():
3366 for name in external.itervalues():
3365 mod = sys.modules[name]
3367 mod = sys.modules[name]
3366 if hasattr(mod, 'reposetup'):
3368 if hasattr(mod, 'reposetup'):
3367 mod.reposetup(u, repo)
3369 mod.reposetup(u, repo)
3368 hg.repo_setup_hooks.append(mod.reposetup)
3370 hg.repo_setup_hooks.append(mod.reposetup)
3369 except hg.RepoError:
3371 except hg.RepoError:
3370 if cmd not in optionalrepo.split():
3372 if cmd not in optionalrepo.split():
3371 raise
3373 raise
3372 d = lambda: func(u, repo, *args, **cmdoptions)
3374 d = lambda: func(u, repo, *args, **cmdoptions)
3373 else:
3375 else:
3374 d = lambda: func(u, *args, **cmdoptions)
3376 d = lambda: func(u, *args, **cmdoptions)
3375
3377
3376 # reupdate the options, repo/.hg/hgrc may have changed them
3378 # reupdate the options, repo/.hg/hgrc may have changed them
3377 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3379 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3378 not options["noninteractive"], options["traceback"],
3380 not options["noninteractive"], options["traceback"],
3379 options["config"])
3381 options["config"])
3380
3382
3381 try:
3383 try:
3382 if options['profile']:
3384 if options['profile']:
3383 import hotshot, hotshot.stats
3385 import hotshot, hotshot.stats
3384 prof = hotshot.Profile("hg.prof")
3386 prof = hotshot.Profile("hg.prof")
3385 try:
3387 try:
3386 try:
3388 try:
3387 return prof.runcall(d)
3389 return prof.runcall(d)
3388 except:
3390 except:
3389 try:
3391 try:
3390 u.warn(_('exception raised - generating '
3392 u.warn(_('exception raised - generating '
3391 'profile anyway\n'))
3393 'profile anyway\n'))
3392 except:
3394 except:
3393 pass
3395 pass
3394 raise
3396 raise
3395 finally:
3397 finally:
3396 prof.close()
3398 prof.close()
3397 stats = hotshot.stats.load("hg.prof")
3399 stats = hotshot.stats.load("hg.prof")
3398 stats.strip_dirs()
3400 stats.strip_dirs()
3399 stats.sort_stats('time', 'calls')
3401 stats.sort_stats('time', 'calls')
3400 stats.print_stats(40)
3402 stats.print_stats(40)
3401 elif options['lsprof']:
3403 elif options['lsprof']:
3402 try:
3404 try:
3403 from mercurial import lsprof
3405 from mercurial import lsprof
3404 except ImportError:
3406 except ImportError:
3405 raise util.Abort(_(
3407 raise util.Abort(_(
3406 'lsprof not available - install from '
3408 'lsprof not available - install from '
3407 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3409 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3408 p = lsprof.Profiler()
3410 p = lsprof.Profiler()
3409 p.enable(subcalls=True)
3411 p.enable(subcalls=True)
3410 try:
3412 try:
3411 return d()
3413 return d()
3412 finally:
3414 finally:
3413 p.disable()
3415 p.disable()
3414 stats = lsprof.Stats(p.getstats())
3416 stats = lsprof.Stats(p.getstats())
3415 stats.sort()
3417 stats.sort()
3416 stats.pprint(top=10, file=sys.stderr, climit=5)
3418 stats.pprint(top=10, file=sys.stderr, climit=5)
3417 else:
3419 else:
3418 return d()
3420 return d()
3419 finally:
3421 finally:
3420 u.flush()
3422 u.flush()
3421 except:
3423 except:
3422 # enter the debugger when we hit an exception
3424 # enter the debugger when we hit an exception
3423 if options['debugger']:
3425 if options['debugger']:
3424 pdb.post_mortem(sys.exc_info()[2])
3426 pdb.post_mortem(sys.exc_info()[2])
3425 u.print_exc()
3427 u.print_exc()
3426 raise
3428 raise
3427 except ParseError, inst:
3429 except ParseError, inst:
3428 if inst.args[0]:
3430 if inst.args[0]:
3429 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3431 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3430 help_(u, inst.args[0])
3432 help_(u, inst.args[0])
3431 else:
3433 else:
3432 u.warn(_("hg: %s\n") % inst.args[1])
3434 u.warn(_("hg: %s\n") % inst.args[1])
3433 help_(u, 'shortlist')
3435 help_(u, 'shortlist')
3434 except AmbiguousCommand, inst:
3436 except AmbiguousCommand, inst:
3435 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3437 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3436 (inst.args[0], " ".join(inst.args[1])))
3438 (inst.args[0], " ".join(inst.args[1])))
3437 except UnknownCommand, inst:
3439 except UnknownCommand, inst:
3438 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3440 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3439 help_(u, 'shortlist')
3441 help_(u, 'shortlist')
3440 except hg.RepoError, inst:
3442 except hg.RepoError, inst:
3441 u.warn(_("abort: %s!\n") % inst)
3443 u.warn(_("abort: %s!\n") % inst)
3442 except lock.LockHeld, inst:
3444 except lock.LockHeld, inst:
3443 if inst.errno == errno.ETIMEDOUT:
3445 if inst.errno == errno.ETIMEDOUT:
3444 reason = _('timed out waiting for lock held by %s') % inst.locker
3446 reason = _('timed out waiting for lock held by %s') % inst.locker
3445 else:
3447 else:
3446 reason = _('lock held by %s') % inst.locker
3448 reason = _('lock held by %s') % inst.locker
3447 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3449 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3448 except lock.LockUnavailable, inst:
3450 except lock.LockUnavailable, inst:
3449 u.warn(_("abort: could not lock %s: %s\n") %
3451 u.warn(_("abort: could not lock %s: %s\n") %
3450 (inst.desc or inst.filename, inst.strerror))
3452 (inst.desc or inst.filename, inst.strerror))
3451 except revlog.RevlogError, inst:
3453 except revlog.RevlogError, inst:
3452 u.warn(_("abort: %s!\n") % inst)
3454 u.warn(_("abort: %s!\n") % inst)
3453 except util.SignalInterrupt:
3455 except util.SignalInterrupt:
3454 u.warn(_("killed!\n"))
3456 u.warn(_("killed!\n"))
3455 except KeyboardInterrupt:
3457 except KeyboardInterrupt:
3456 try:
3458 try:
3457 u.warn(_("interrupted!\n"))
3459 u.warn(_("interrupted!\n"))
3458 except IOError, inst:
3460 except IOError, inst:
3459 if inst.errno == errno.EPIPE:
3461 if inst.errno == errno.EPIPE:
3460 if u.debugflag:
3462 if u.debugflag:
3461 u.warn(_("\nbroken pipe\n"))
3463 u.warn(_("\nbroken pipe\n"))
3462 else:
3464 else:
3463 raise
3465 raise
3464 except IOError, inst:
3466 except IOError, inst:
3465 if hasattr(inst, "code"):
3467 if hasattr(inst, "code"):
3466 u.warn(_("abort: %s\n") % inst)
3468 u.warn(_("abort: %s\n") % inst)
3467 elif hasattr(inst, "reason"):
3469 elif hasattr(inst, "reason"):
3468 u.warn(_("abort: error: %s\n") % inst.reason[1])
3470 u.warn(_("abort: error: %s\n") % inst.reason[1])
3469 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3471 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3470 if u.debugflag:
3472 if u.debugflag:
3471 u.warn(_("broken pipe\n"))
3473 u.warn(_("broken pipe\n"))
3472 elif getattr(inst, "strerror", None):
3474 elif getattr(inst, "strerror", None):
3473 if getattr(inst, "filename", None):
3475 if getattr(inst, "filename", None):
3474 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3476 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3475 else:
3477 else:
3476 u.warn(_("abort: %s\n") % inst.strerror)
3478 u.warn(_("abort: %s\n") % inst.strerror)
3477 else:
3479 else:
3478 raise
3480 raise
3479 except OSError, inst:
3481 except OSError, inst:
3480 if hasattr(inst, "filename"):
3482 if hasattr(inst, "filename"):
3481 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3483 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3482 else:
3484 else:
3483 u.warn(_("abort: %s\n") % inst.strerror)
3485 u.warn(_("abort: %s\n") % inst.strerror)
3484 except util.Abort, inst:
3486 except util.Abort, inst:
3485 u.warn(_("abort: %s\n") % inst)
3487 u.warn(_("abort: %s\n") % inst)
3486 except TypeError, inst:
3488 except TypeError, inst:
3487 # was this an argument error?
3489 # was this an argument error?
3488 tb = traceback.extract_tb(sys.exc_info()[2])
3490 tb = traceback.extract_tb(sys.exc_info()[2])
3489 if len(tb) > 2: # no
3491 if len(tb) > 2: # no
3490 raise
3492 raise
3491 u.debug(inst, "\n")
3493 u.debug(inst, "\n")
3492 u.warn(_("%s: invalid arguments\n") % cmd)
3494 u.warn(_("%s: invalid arguments\n") % cmd)
3493 help_(u, cmd)
3495 help_(u, cmd)
3494 except SystemExit, inst:
3496 except SystemExit, inst:
3495 # Commands shouldn't sys.exit directly, but give a return code.
3497 # Commands shouldn't sys.exit directly, but give a return code.
3496 # Just in case catch this and and pass exit code to caller.
3498 # Just in case catch this and and pass exit code to caller.
3497 return inst.code
3499 return inst.code
3498 except:
3500 except:
3499 u.warn(_("** unknown exception encountered, details follow\n"))
3501 u.warn(_("** unknown exception encountered, details follow\n"))
3500 u.warn(_("** report bug details to "
3502 u.warn(_("** report bug details to "
3501 "http://www.selenic.com/mercurial/bts\n"))
3503 "http://www.selenic.com/mercurial/bts\n"))
3502 u.warn(_("** or mercurial@selenic.com\n"))
3504 u.warn(_("** or mercurial@selenic.com\n"))
3503 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3505 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3504 % version.get_version())
3506 % version.get_version())
3505 raise
3507 raise
3506
3508
3507 return -1
3509 return -1
@@ -1,224 +1,225 b''
1 # hgweb/server.py - The standalone hg web server.
1 # hgweb/server.py - The standalone hg web server.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from mercurial.demandload import demandload
9 from mercurial.demandload import demandload
10 import os, sys, errno
10 import os, sys, errno
11 demandload(globals(), "urllib BaseHTTPServer socket SocketServer")
11 demandload(globals(), "urllib BaseHTTPServer socket SocketServer")
12 demandload(globals(), "mercurial:ui,hg,util,templater")
12 demandload(globals(), "mercurial:ui,hg,util,templater")
13 demandload(globals(), "hgweb_mod:hgweb hgwebdir_mod:hgwebdir request:wsgiapplication")
13 demandload(globals(), "hgweb_mod:hgweb hgwebdir_mod:hgwebdir request:wsgiapplication")
14 from mercurial.i18n import gettext as _
14 from mercurial.i18n import gettext as _
15
15
16 def _splitURI(uri):
16 def _splitURI(uri):
17 """ Return path and query splited from uri
17 """ Return path and query splited from uri
18
18
19 Just like CGI environment, the path is unquoted, the query is
19 Just like CGI environment, the path is unquoted, the query is
20 not.
20 not.
21 """
21 """
22 if '?' in uri:
22 if '?' in uri:
23 path, query = uri.split('?', 1)
23 path, query = uri.split('?', 1)
24 else:
24 else:
25 path, query = uri, ''
25 path, query = uri, ''
26 return urllib.unquote(path), query
26 return urllib.unquote(path), query
27
27
28 class _error_logger(object):
28 class _error_logger(object):
29 def __init__(self, handler):
29 def __init__(self, handler):
30 self.handler = handler
30 self.handler = handler
31 def flush(self):
31 def flush(self):
32 pass
32 pass
33 def write(str):
33 def write(str):
34 self.writelines(str.split('\n'))
34 self.writelines(str.split('\n'))
35 def writelines(seq):
35 def writelines(seq):
36 for msg in seq:
36 for msg in seq:
37 self.handler.log_error("HG error: %s", msg)
37 self.handler.log_error("HG error: %s", msg)
38
38
39 class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
39 class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
40 def __init__(self, *args, **kargs):
40 def __init__(self, *args, **kargs):
41 self.protocol_version = 'HTTP/1.1'
41 self.protocol_version = 'HTTP/1.1'
42 BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
42 BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
43
43
44 def log_error(self, format, *args):
44 def log_error(self, format, *args):
45 errorlog = self.server.errorlog
45 errorlog = self.server.errorlog
46 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
46 errorlog.write("%s - - [%s] %s\n" % (self.address_string(),
47 self.log_date_time_string(),
47 self.log_date_time_string(),
48 format % args))
48 format % args))
49
49
50 def log_message(self, format, *args):
50 def log_message(self, format, *args):
51 accesslog = self.server.accesslog
51 accesslog = self.server.accesslog
52 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
52 accesslog.write("%s - - [%s] %s\n" % (self.address_string(),
53 self.log_date_time_string(),
53 self.log_date_time_string(),
54 format % args))
54 format % args))
55
55
56 def do_POST(self):
56 def do_POST(self):
57 try:
57 try:
58 self.do_hgweb()
58 self.do_hgweb()
59 except socket.error, inst:
59 except socket.error, inst:
60 if inst[0] != errno.EPIPE:
60 if inst[0] != errno.EPIPE:
61 raise
61 raise
62
62
63 def do_GET(self):
63 def do_GET(self):
64 self.do_POST()
64 self.do_POST()
65
65
66 def do_hgweb(self):
66 def do_hgweb(self):
67 path_info, query = _splitURI(self.path)
67 path_info, query = _splitURI(self.path)
68
68
69 env = {}
69 env = {}
70 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
70 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
71 env['REQUEST_METHOD'] = self.command
71 env['REQUEST_METHOD'] = self.command
72 env['SERVER_NAME'] = self.server.server_name
72 env['SERVER_NAME'] = self.server.server_name
73 env['SERVER_PORT'] = str(self.server.server_port)
73 env['SERVER_PORT'] = str(self.server.server_port)
74 env['REQUEST_URI'] = "/"
74 env['REQUEST_URI'] = "/"
75 env['PATH_INFO'] = path_info
75 env['PATH_INFO'] = path_info
76 if query:
76 if query:
77 env['QUERY_STRING'] = query
77 env['QUERY_STRING'] = query
78 host = self.address_string()
78 host = self.address_string()
79 if host != self.client_address[0]:
79 if host != self.client_address[0]:
80 env['REMOTE_HOST'] = host
80 env['REMOTE_HOST'] = host
81 env['REMOTE_ADDR'] = self.client_address[0]
81 env['REMOTE_ADDR'] = self.client_address[0]
82
82
83 if self.headers.typeheader is None:
83 if self.headers.typeheader is None:
84 env['CONTENT_TYPE'] = self.headers.type
84 env['CONTENT_TYPE'] = self.headers.type
85 else:
85 else:
86 env['CONTENT_TYPE'] = self.headers.typeheader
86 env['CONTENT_TYPE'] = self.headers.typeheader
87 length = self.headers.getheader('content-length')
87 length = self.headers.getheader('content-length')
88 if length:
88 if length:
89 env['CONTENT_LENGTH'] = length
89 env['CONTENT_LENGTH'] = length
90 for header in [h for h in self.headers.keys() \
90 for header in [h for h in self.headers.keys() \
91 if h not in ('content-type', 'content-length')]:
91 if h not in ('content-type', 'content-length')]:
92 hkey = 'HTTP_' + header.replace('-', '_').upper()
92 hkey = 'HTTP_' + header.replace('-', '_').upper()
93 hval = self.headers.getheader(header)
93 hval = self.headers.getheader(header)
94 hval = hval.replace('\n', '').strip()
94 hval = hval.replace('\n', '').strip()
95 if hval:
95 if hval:
96 env[hkey] = hval
96 env[hkey] = hval
97 env['SERVER_PROTOCOL'] = self.request_version
97 env['SERVER_PROTOCOL'] = self.request_version
98 env['wsgi.version'] = (1, 0)
98 env['wsgi.version'] = (1, 0)
99 env['wsgi.url_scheme'] = 'http'
99 env['wsgi.url_scheme'] = 'http'
100 env['wsgi.input'] = self.rfile
100 env['wsgi.input'] = self.rfile
101 env['wsgi.errors'] = _error_logger(self)
101 env['wsgi.errors'] = _error_logger(self)
102 env['wsgi.multithread'] = isinstance(self.server,
102 env['wsgi.multithread'] = isinstance(self.server,
103 SocketServer.ThreadingMixIn)
103 SocketServer.ThreadingMixIn)
104 env['wsgi.multiprocess'] = isinstance(self.server,
104 env['wsgi.multiprocess'] = isinstance(self.server,
105 SocketServer.ForkingMixIn)
105 SocketServer.ForkingMixIn)
106 env['wsgi.run_once'] = 0
106 env['wsgi.run_once'] = 0
107
107
108 self.close_connection = True
108 self.close_connection = True
109 self.saved_status = None
109 self.saved_status = None
110 self.saved_headers = []
110 self.saved_headers = []
111 self.sent_headers = False
111 self.sent_headers = False
112 self.length = None
112 self.length = None
113 req = self.server.reqmaker(env, self._start_response)
113 req = self.server.reqmaker(env, self._start_response)
114 for data in req:
114 for data in req:
115 if data:
115 if data:
116 self._write(data)
116 self._write(data)
117
117
118 def send_headers(self):
118 def send_headers(self):
119 if not self.saved_status:
119 if not self.saved_status:
120 raise AssertionError("Sending headers before start_response() called")
120 raise AssertionError("Sending headers before start_response() called")
121 saved_status = self.saved_status.split(None, 1)
121 saved_status = self.saved_status.split(None, 1)
122 saved_status[0] = int(saved_status[0])
122 saved_status[0] = int(saved_status[0])
123 self.send_response(*saved_status)
123 self.send_response(*saved_status)
124 should_close = True
124 should_close = True
125 for h in self.saved_headers:
125 for h in self.saved_headers:
126 self.send_header(*h)
126 self.send_header(*h)
127 if h[0].lower() == 'content-length':
127 if h[0].lower() == 'content-length':
128 should_close = False
128 should_close = False
129 self.length = int(h[1])
129 self.length = int(h[1])
130 # The value of the Connection header is a list of case-insensitive
130 # The value of the Connection header is a list of case-insensitive
131 # tokens separated by commas and optional whitespace.
131 # tokens separated by commas and optional whitespace.
132 if 'close' in [token.strip().lower() for token in
132 if 'close' in [token.strip().lower() for token in
133 self.headers.get('connection', '').split(',')]:
133 self.headers.get('connection', '').split(',')]:
134 should_close = True
134 should_close = True
135 if should_close:
135 if should_close:
136 self.send_header('Connection', 'close')
136 self.send_header('Connection', 'close')
137 self.close_connection = should_close
137 self.close_connection = should_close
138 self.end_headers()
138 self.end_headers()
139 self.sent_headers = True
139 self.sent_headers = True
140
140
141 def _start_response(self, http_status, headers, exc_info=None):
141 def _start_response(self, http_status, headers, exc_info=None):
142 code, msg = http_status.split(None, 1)
142 code, msg = http_status.split(None, 1)
143 code = int(code)
143 code = int(code)
144 self.saved_status = http_status
144 self.saved_status = http_status
145 bad_headers = ('connection', 'transfer-encoding')
145 bad_headers = ('connection', 'transfer-encoding')
146 self.saved_headers = [ h for h in headers \
146 self.saved_headers = [ h for h in headers \
147 if h[0].lower() not in bad_headers ]
147 if h[0].lower() not in bad_headers ]
148 return self._write
148 return self._write
149
149
150 def _write(self, data):
150 def _write(self, data):
151 if not self.saved_status:
151 if not self.saved_status:
152 raise AssertionError("data written before start_response() called")
152 raise AssertionError("data written before start_response() called")
153 elif not self.sent_headers:
153 elif not self.sent_headers:
154 self.send_headers()
154 self.send_headers()
155 if self.length is not None:
155 if self.length is not None:
156 if len(data) > self.length:
156 if len(data) > self.length:
157 raise AssertionError("Content-length header sent, but more bytes than specified are being written.")
157 raise AssertionError("Content-length header sent, but more bytes than specified are being written.")
158 self.length = self.length - len(data)
158 self.length = self.length - len(data)
159 self.wfile.write(data)
159 self.wfile.write(data)
160 self.wfile.flush()
160 self.wfile.flush()
161
161
162 def create_server(ui, repo):
162 def create_server(ui, repo):
163 use_threads = True
163 use_threads = True
164
164
165 def openlog(opt, default):
165 def openlog(opt, default):
166 if opt and opt != '-':
166 if opt and opt != '-':
167 return open(opt, 'w')
167 return open(opt, 'w')
168 return default
168 return default
169
169
170 address = ui.config("web", "address", "")
170 address = ui.config("web", "address", "")
171 port = int(ui.config("web", "port", 8000))
171 port = int(ui.config("web", "port", 8000))
172 use_ipv6 = ui.configbool("web", "ipv6")
172 use_ipv6 = ui.configbool("web", "ipv6")
173 webdir_conf = ui.config("web", "webdir_conf")
173 webdir_conf = ui.config("web", "webdir_conf")
174 accesslog = openlog(ui.config("web", "accesslog", "-"), sys.stdout)
174 accesslog = openlog(ui.config("web", "accesslog", "-"), sys.stdout)
175 errorlog = openlog(ui.config("web", "errorlog", "-"), sys.stderr)
175 errorlog = openlog(ui.config("web", "errorlog", "-"), sys.stderr)
176
176
177 if use_threads:
177 if use_threads:
178 try:
178 try:
179 from threading import activeCount
179 from threading import activeCount
180 except ImportError:
180 except ImportError:
181 use_threads = False
181 use_threads = False
182
182
183 if use_threads:
183 if use_threads:
184 _mixin = SocketServer.ThreadingMixIn
184 _mixin = SocketServer.ThreadingMixIn
185 else:
185 else:
186 if hasattr(os, "fork"):
186 if hasattr(os, "fork"):
187 _mixin = SocketServer.ForkingMixIn
187 _mixin = SocketServer.ForkingMixIn
188 else:
188 else:
189 class _mixin: pass
189 class _mixin: pass
190
190
191 class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
191 class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
192 def __init__(self, *args, **kargs):
192 def __init__(self, *args, **kargs):
193 BaseHTTPServer.HTTPServer.__init__(self, *args, **kargs)
193 BaseHTTPServer.HTTPServer.__init__(self, *args, **kargs)
194 self.accesslog = accesslog
194 self.accesslog = accesslog
195 self.errorlog = errorlog
195 self.errorlog = errorlog
196 self.repo = repo
196 self.repo = repo
197 self.webdir_conf = webdir_conf
197 self.webdir_conf = webdir_conf
198 self.webdirmaker = hgwebdir
198 self.webdirmaker = hgwebdir
199 self.repoviewmaker = hgweb
199 self.repoviewmaker = hgweb
200 self.reqmaker = wsgiapplication(self.make_handler)
200 self.reqmaker = wsgiapplication(self.make_handler)
201 self.daemon_threads = True
201 self.daemon_threads = True
202
202
203 def make_handler(self):
203 def make_handler(self):
204 if self.webdir_conf:
204 if self.webdir_conf:
205 hgwebobj = self.webdirmaker(self.webdir_conf)
205 hgwebobj = self.webdirmaker(self.webdir_conf)
206 elif self.repo is not None:
206 elif self.repo is not None:
207 hgwebobj = self.repoviewmaker(repo.__class__(repo.ui,
207 hgwebobj = self.repoviewmaker(repo.__class__(repo.ui,
208 repo.origroot))
208 repo.origroot))
209 else:
209 else:
210 raise hg.RepoError(_('no repo found'))
210 raise hg.RepoError(_("There is no Mercurial repository here"
211 " (.hg not found)"))
211 return hgwebobj
212 return hgwebobj
212
213
213 class IPv6HTTPServer(MercurialHTTPServer):
214 class IPv6HTTPServer(MercurialHTTPServer):
214 address_family = getattr(socket, 'AF_INET6', None)
215 address_family = getattr(socket, 'AF_INET6', None)
215
216
216 def __init__(self, *args, **kwargs):
217 def __init__(self, *args, **kwargs):
217 if self.address_family is None:
218 if self.address_family is None:
218 raise hg.RepoError(_('IPv6 not available on this system'))
219 raise hg.RepoError(_('IPv6 not available on this system'))
219 super(IPv6HTTPServer, self).__init__(*args, **kwargs)
220 super(IPv6HTTPServer, self).__init__(*args, **kwargs)
220
221
221 if use_ipv6:
222 if use_ipv6:
222 return IPv6HTTPServer((address, port), _hgwebhandler)
223 return IPv6HTTPServer((address, port), _hgwebhandler)
223 else:
224 else:
224 return MercurialHTTPServer((address, port), _hgwebhandler)
225 return MercurialHTTPServer((address, port), _hgwebhandler)
@@ -1,1751 +1,1752 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ()
18 capabilities = ()
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("no repo found"))
30 raise repo.RepoError(_("There is no Mercurial repository"
31 " here (.hg not found)"))
31 path = p
32 path = p
32 self.path = os.path.join(path, ".hg")
33 self.path = os.path.join(path, ".hg")
33
34
34 if not os.path.isdir(self.path):
35 if not os.path.isdir(self.path):
35 if create:
36 if create:
36 if not os.path.exists(path):
37 if not os.path.exists(path):
37 os.mkdir(path)
38 os.mkdir(path)
38 os.mkdir(self.path)
39 os.mkdir(self.path)
39 os.mkdir(self.join("data"))
40 os.mkdir(self.join("data"))
40 else:
41 else:
41 raise repo.RepoError(_("repository %s not found") % path)
42 raise repo.RepoError(_("repository %s not found") % path)
42 elif create:
43 elif create:
43 raise repo.RepoError(_("repository %s already exists") % path)
44 raise repo.RepoError(_("repository %s already exists") % path)
44
45
45 self.root = os.path.abspath(path)
46 self.root = os.path.abspath(path)
46 self.origroot = path
47 self.origroot = path
47 self.ui = ui.ui(parentui=parentui)
48 self.ui = ui.ui(parentui=parentui)
48 self.opener = util.opener(self.path)
49 self.opener = util.opener(self.path)
49 self.wopener = util.opener(self.root)
50 self.wopener = util.opener(self.root)
50
51
51 try:
52 try:
52 self.ui.readconfig(self.join("hgrc"), self.root)
53 self.ui.readconfig(self.join("hgrc"), self.root)
53 except IOError:
54 except IOError:
54 pass
55 pass
55
56
56 v = self.ui.revlogopts
57 v = self.ui.revlogopts
57 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
58 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
58 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
59 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
59 fl = v.get('flags', None)
60 fl = v.get('flags', None)
60 flags = 0
61 flags = 0
61 if fl != None:
62 if fl != None:
62 for x in fl.split():
63 for x in fl.split():
63 flags |= revlog.flagstr(x)
64 flags |= revlog.flagstr(x)
64 elif self.revlogv1:
65 elif self.revlogv1:
65 flags = revlog.REVLOG_DEFAULT_FLAGS
66 flags = revlog.REVLOG_DEFAULT_FLAGS
66
67
67 v = self.revlogversion | flags
68 v = self.revlogversion | flags
68 self.manifest = manifest.manifest(self.opener, v)
69 self.manifest = manifest.manifest(self.opener, v)
69 self.changelog = changelog.changelog(self.opener, v)
70 self.changelog = changelog.changelog(self.opener, v)
70
71
71 # the changelog might not have the inline index flag
72 # the changelog might not have the inline index flag
72 # on. If the format of the changelog is the same as found in
73 # on. If the format of the changelog is the same as found in
73 # .hgrc, apply any flags found in the .hgrc as well.
74 # .hgrc, apply any flags found in the .hgrc as well.
74 # Otherwise, just version from the changelog
75 # Otherwise, just version from the changelog
75 v = self.changelog.version
76 v = self.changelog.version
76 if v == self.revlogversion:
77 if v == self.revlogversion:
77 v |= flags
78 v |= flags
78 self.revlogversion = v
79 self.revlogversion = v
79
80
80 self.tagscache = None
81 self.tagscache = None
81 self.nodetagscache = None
82 self.nodetagscache = None
82 self.encodepats = None
83 self.encodepats = None
83 self.decodepats = None
84 self.decodepats = None
84 self.transhandle = None
85 self.transhandle = None
85
86
86 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
87 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
87
88
88 def url(self):
89 def url(self):
89 return 'file:' + self.root
90 return 'file:' + self.root
90
91
91 def hook(self, name, throw=False, **args):
92 def hook(self, name, throw=False, **args):
92 def callhook(hname, funcname):
93 def callhook(hname, funcname):
93 '''call python hook. hook is callable object, looked up as
94 '''call python hook. hook is callable object, looked up as
94 name in python module. if callable returns "true", hook
95 name in python module. if callable returns "true", hook
95 fails, else passes. if hook raises exception, treated as
96 fails, else passes. if hook raises exception, treated as
96 hook failure. exception propagates if throw is "true".
97 hook failure. exception propagates if throw is "true".
97
98
98 reason for "true" meaning "hook failed" is so that
99 reason for "true" meaning "hook failed" is so that
99 unmodified commands (e.g. mercurial.commands.update) can
100 unmodified commands (e.g. mercurial.commands.update) can
100 be run as hooks without wrappers to convert return values.'''
101 be run as hooks without wrappers to convert return values.'''
101
102
102 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
103 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
103 d = funcname.rfind('.')
104 d = funcname.rfind('.')
104 if d == -1:
105 if d == -1:
105 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
106 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
106 % (hname, funcname))
107 % (hname, funcname))
107 modname = funcname[:d]
108 modname = funcname[:d]
108 try:
109 try:
109 obj = __import__(modname)
110 obj = __import__(modname)
110 except ImportError:
111 except ImportError:
111 try:
112 try:
112 # extensions are loaded with hgext_ prefix
113 # extensions are loaded with hgext_ prefix
113 obj = __import__("hgext_%s" % modname)
114 obj = __import__("hgext_%s" % modname)
114 except ImportError:
115 except ImportError:
115 raise util.Abort(_('%s hook is invalid '
116 raise util.Abort(_('%s hook is invalid '
116 '(import of "%s" failed)') %
117 '(import of "%s" failed)') %
117 (hname, modname))
118 (hname, modname))
118 try:
119 try:
119 for p in funcname.split('.')[1:]:
120 for p in funcname.split('.')[1:]:
120 obj = getattr(obj, p)
121 obj = getattr(obj, p)
121 except AttributeError, err:
122 except AttributeError, err:
122 raise util.Abort(_('%s hook is invalid '
123 raise util.Abort(_('%s hook is invalid '
123 '("%s" is not defined)') %
124 '("%s" is not defined)') %
124 (hname, funcname))
125 (hname, funcname))
125 if not callable(obj):
126 if not callable(obj):
126 raise util.Abort(_('%s hook is invalid '
127 raise util.Abort(_('%s hook is invalid '
127 '("%s" is not callable)') %
128 '("%s" is not callable)') %
128 (hname, funcname))
129 (hname, funcname))
129 try:
130 try:
130 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
131 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
131 except (KeyboardInterrupt, util.SignalInterrupt):
132 except (KeyboardInterrupt, util.SignalInterrupt):
132 raise
133 raise
133 except Exception, exc:
134 except Exception, exc:
134 if isinstance(exc, util.Abort):
135 if isinstance(exc, util.Abort):
135 self.ui.warn(_('error: %s hook failed: %s\n') %
136 self.ui.warn(_('error: %s hook failed: %s\n') %
136 (hname, exc.args[0]))
137 (hname, exc.args[0]))
137 else:
138 else:
138 self.ui.warn(_('error: %s hook raised an exception: '
139 self.ui.warn(_('error: %s hook raised an exception: '
139 '%s\n') % (hname, exc))
140 '%s\n') % (hname, exc))
140 if throw:
141 if throw:
141 raise
142 raise
142 self.ui.print_exc()
143 self.ui.print_exc()
143 return True
144 return True
144 if r:
145 if r:
145 if throw:
146 if throw:
146 raise util.Abort(_('%s hook failed') % hname)
147 raise util.Abort(_('%s hook failed') % hname)
147 self.ui.warn(_('warning: %s hook failed\n') % hname)
148 self.ui.warn(_('warning: %s hook failed\n') % hname)
148 return r
149 return r
149
150
150 def runhook(name, cmd):
151 def runhook(name, cmd):
151 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
152 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
152 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
153 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
153 r = util.system(cmd, environ=env, cwd=self.root)
154 r = util.system(cmd, environ=env, cwd=self.root)
154 if r:
155 if r:
155 desc, r = util.explain_exit(r)
156 desc, r = util.explain_exit(r)
156 if throw:
157 if throw:
157 raise util.Abort(_('%s hook %s') % (name, desc))
158 raise util.Abort(_('%s hook %s') % (name, desc))
158 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
159 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
159 return r
160 return r
160
161
161 r = False
162 r = False
162 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
163 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
163 if hname.split(".", 1)[0] == name and cmd]
164 if hname.split(".", 1)[0] == name and cmd]
164 hooks.sort()
165 hooks.sort()
165 for hname, cmd in hooks:
166 for hname, cmd in hooks:
166 if cmd.startswith('python:'):
167 if cmd.startswith('python:'):
167 r = callhook(hname, cmd[7:].strip()) or r
168 r = callhook(hname, cmd[7:].strip()) or r
168 else:
169 else:
169 r = runhook(hname, cmd) or r
170 r = runhook(hname, cmd) or r
170 return r
171 return r
171
172
172 tag_disallowed = ':\r\n'
173 tag_disallowed = ':\r\n'
173
174
174 def tag(self, name, node, message, local, user, date):
175 def tag(self, name, node, message, local, user, date):
175 '''tag a revision with a symbolic name.
176 '''tag a revision with a symbolic name.
176
177
177 if local is True, the tag is stored in a per-repository file.
178 if local is True, the tag is stored in a per-repository file.
178 otherwise, it is stored in the .hgtags file, and a new
179 otherwise, it is stored in the .hgtags file, and a new
179 changeset is committed with the change.
180 changeset is committed with the change.
180
181
181 keyword arguments:
182 keyword arguments:
182
183
183 local: whether to store tag in non-version-controlled file
184 local: whether to store tag in non-version-controlled file
184 (default False)
185 (default False)
185
186
186 message: commit message to use if committing
187 message: commit message to use if committing
187
188
188 user: name of user to use if committing
189 user: name of user to use if committing
189
190
190 date: date tuple to use if committing'''
191 date: date tuple to use if committing'''
191
192
192 for c in self.tag_disallowed:
193 for c in self.tag_disallowed:
193 if c in name:
194 if c in name:
194 raise util.Abort(_('%r cannot be used in a tag name') % c)
195 raise util.Abort(_('%r cannot be used in a tag name') % c)
195
196
196 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
197 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
197
198
198 if local:
199 if local:
199 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
200 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
200 self.hook('tag', node=hex(node), tag=name, local=local)
201 self.hook('tag', node=hex(node), tag=name, local=local)
201 return
202 return
202
203
203 for x in self.status()[:5]:
204 for x in self.status()[:5]:
204 if '.hgtags' in x:
205 if '.hgtags' in x:
205 raise util.Abort(_('working copy of .hgtags is changed '
206 raise util.Abort(_('working copy of .hgtags is changed '
206 '(please commit .hgtags manually)'))
207 '(please commit .hgtags manually)'))
207
208
208 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
209 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
209 if self.dirstate.state('.hgtags') == '?':
210 if self.dirstate.state('.hgtags') == '?':
210 self.add(['.hgtags'])
211 self.add(['.hgtags'])
211
212
212 self.commit(['.hgtags'], message, user, date)
213 self.commit(['.hgtags'], message, user, date)
213 self.hook('tag', node=hex(node), tag=name, local=local)
214 self.hook('tag', node=hex(node), tag=name, local=local)
214
215
215 def tags(self):
216 def tags(self):
216 '''return a mapping of tag to node'''
217 '''return a mapping of tag to node'''
217 if not self.tagscache:
218 if not self.tagscache:
218 self.tagscache = {}
219 self.tagscache = {}
219
220
220 def parsetag(line, context):
221 def parsetag(line, context):
221 if not line:
222 if not line:
222 return
223 return
223 s = l.split(" ", 1)
224 s = l.split(" ", 1)
224 if len(s) != 2:
225 if len(s) != 2:
225 self.ui.warn(_("%s: cannot parse entry\n") % context)
226 self.ui.warn(_("%s: cannot parse entry\n") % context)
226 return
227 return
227 node, key = s
228 node, key = s
228 key = key.strip()
229 key = key.strip()
229 try:
230 try:
230 bin_n = bin(node)
231 bin_n = bin(node)
231 except TypeError:
232 except TypeError:
232 self.ui.warn(_("%s: node '%s' is not well formed\n") %
233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
233 (context, node))
234 (context, node))
234 return
235 return
235 if bin_n not in self.changelog.nodemap:
236 if bin_n not in self.changelog.nodemap:
236 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
237 (context, key))
238 (context, key))
238 return
239 return
239 self.tagscache[key] = bin_n
240 self.tagscache[key] = bin_n
240
241
241 # read the tags file from each head, ending with the tip,
242 # read the tags file from each head, ending with the tip,
242 # and add each tag found to the map, with "newer" ones
243 # and add each tag found to the map, with "newer" ones
243 # taking precedence
244 # taking precedence
244 heads = self.heads()
245 heads = self.heads()
245 heads.reverse()
246 heads.reverse()
246 fl = self.file(".hgtags")
247 fl = self.file(".hgtags")
247 for node in heads:
248 for node in heads:
248 change = self.changelog.read(node)
249 change = self.changelog.read(node)
249 rev = self.changelog.rev(node)
250 rev = self.changelog.rev(node)
250 fn, ff = self.manifest.find(change[0], '.hgtags')
251 fn, ff = self.manifest.find(change[0], '.hgtags')
251 if fn is None: continue
252 if fn is None: continue
252 count = 0
253 count = 0
253 for l in fl.read(fn).splitlines():
254 for l in fl.read(fn).splitlines():
254 count += 1
255 count += 1
255 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
256 (rev, short(node), count))
257 (rev, short(node), count))
257 try:
258 try:
258 f = self.opener("localtags")
259 f = self.opener("localtags")
259 count = 0
260 count = 0
260 for l in f:
261 for l in f:
261 count += 1
262 count += 1
262 parsetag(l, _("localtags, line %d") % count)
263 parsetag(l, _("localtags, line %d") % count)
263 except IOError:
264 except IOError:
264 pass
265 pass
265
266
266 self.tagscache['tip'] = self.changelog.tip()
267 self.tagscache['tip'] = self.changelog.tip()
267
268
268 return self.tagscache
269 return self.tagscache
269
270
270 def tagslist(self):
271 def tagslist(self):
271 '''return a list of tags ordered by revision'''
272 '''return a list of tags ordered by revision'''
272 l = []
273 l = []
273 for t, n in self.tags().items():
274 for t, n in self.tags().items():
274 try:
275 try:
275 r = self.changelog.rev(n)
276 r = self.changelog.rev(n)
276 except:
277 except:
277 r = -2 # sort to the beginning of the list if unknown
278 r = -2 # sort to the beginning of the list if unknown
278 l.append((r, t, n))
279 l.append((r, t, n))
279 l.sort()
280 l.sort()
280 return [(t, n) for r, t, n in l]
281 return [(t, n) for r, t, n in l]
281
282
282 def nodetags(self, node):
283 def nodetags(self, node):
283 '''return the tags associated with a node'''
284 '''return the tags associated with a node'''
284 if not self.nodetagscache:
285 if not self.nodetagscache:
285 self.nodetagscache = {}
286 self.nodetagscache = {}
286 for t, n in self.tags().items():
287 for t, n in self.tags().items():
287 self.nodetagscache.setdefault(n, []).append(t)
288 self.nodetagscache.setdefault(n, []).append(t)
288 return self.nodetagscache.get(node, [])
289 return self.nodetagscache.get(node, [])
289
290
290 def lookup(self, key):
291 def lookup(self, key):
291 try:
292 try:
292 return self.tags()[key]
293 return self.tags()[key]
293 except KeyError:
294 except KeyError:
294 if key == '.':
295 if key == '.':
295 key = self.dirstate.parents()[0]
296 key = self.dirstate.parents()[0]
296 if key == nullid:
297 if key == nullid:
297 raise repo.RepoError(_("no revision checked out"))
298 raise repo.RepoError(_("no revision checked out"))
298 try:
299 try:
299 return self.changelog.lookup(key)
300 return self.changelog.lookup(key)
300 except:
301 except:
301 raise repo.RepoError(_("unknown revision '%s'") % key)
302 raise repo.RepoError(_("unknown revision '%s'") % key)
302
303
303 def dev(self):
304 def dev(self):
304 return os.lstat(self.path).st_dev
305 return os.lstat(self.path).st_dev
305
306
306 def local(self):
307 def local(self):
307 return True
308 return True
308
309
309 def join(self, f):
310 def join(self, f):
310 return os.path.join(self.path, f)
311 return os.path.join(self.path, f)
311
312
312 def wjoin(self, f):
313 def wjoin(self, f):
313 return os.path.join(self.root, f)
314 return os.path.join(self.root, f)
314
315
315 def file(self, f):
316 def file(self, f):
316 if f[0] == '/':
317 if f[0] == '/':
317 f = f[1:]
318 f = f[1:]
318 return filelog.filelog(self.opener, f, self.revlogversion)
319 return filelog.filelog(self.opener, f, self.revlogversion)
319
320
320 def changectx(self, changeid):
321 def changectx(self, changeid):
321 return context.changectx(self, changeid)
322 return context.changectx(self, changeid)
322
323
323 def filectx(self, path, changeid=None, fileid=None):
324 def filectx(self, path, changeid=None, fileid=None):
324 """changeid can be a changeset revision, node, or tag.
325 """changeid can be a changeset revision, node, or tag.
325 fileid can be a file revision or node."""
326 fileid can be a file revision or node."""
326 return context.filectx(self, path, changeid, fileid)
327 return context.filectx(self, path, changeid, fileid)
327
328
328 def getcwd(self):
329 def getcwd(self):
329 return self.dirstate.getcwd()
330 return self.dirstate.getcwd()
330
331
331 def wfile(self, f, mode='r'):
332 def wfile(self, f, mode='r'):
332 return self.wopener(f, mode)
333 return self.wopener(f, mode)
333
334
334 def wread(self, filename):
335 def wread(self, filename):
335 if self.encodepats == None:
336 if self.encodepats == None:
336 l = []
337 l = []
337 for pat, cmd in self.ui.configitems("encode"):
338 for pat, cmd in self.ui.configitems("encode"):
338 mf = util.matcher(self.root, "", [pat], [], [])[1]
339 mf = util.matcher(self.root, "", [pat], [], [])[1]
339 l.append((mf, cmd))
340 l.append((mf, cmd))
340 self.encodepats = l
341 self.encodepats = l
341
342
342 data = self.wopener(filename, 'r').read()
343 data = self.wopener(filename, 'r').read()
343
344
344 for mf, cmd in self.encodepats:
345 for mf, cmd in self.encodepats:
345 if mf(filename):
346 if mf(filename):
346 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
347 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
347 data = util.filter(data, cmd)
348 data = util.filter(data, cmd)
348 break
349 break
349
350
350 return data
351 return data
351
352
352 def wwrite(self, filename, data, fd=None):
353 def wwrite(self, filename, data, fd=None):
353 if self.decodepats == None:
354 if self.decodepats == None:
354 l = []
355 l = []
355 for pat, cmd in self.ui.configitems("decode"):
356 for pat, cmd in self.ui.configitems("decode"):
356 mf = util.matcher(self.root, "", [pat], [], [])[1]
357 mf = util.matcher(self.root, "", [pat], [], [])[1]
357 l.append((mf, cmd))
358 l.append((mf, cmd))
358 self.decodepats = l
359 self.decodepats = l
359
360
360 for mf, cmd in self.decodepats:
361 for mf, cmd in self.decodepats:
361 if mf(filename):
362 if mf(filename):
362 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
363 data = util.filter(data, cmd)
364 data = util.filter(data, cmd)
364 break
365 break
365
366
366 if fd:
367 if fd:
367 return fd.write(data)
368 return fd.write(data)
368 return self.wopener(filename, 'w').write(data)
369 return self.wopener(filename, 'w').write(data)
369
370
370 def transaction(self):
371 def transaction(self):
371 tr = self.transhandle
372 tr = self.transhandle
372 if tr != None and tr.running():
373 if tr != None and tr.running():
373 return tr.nest()
374 return tr.nest()
374
375
375 # save dirstate for rollback
376 # save dirstate for rollback
376 try:
377 try:
377 ds = self.opener("dirstate").read()
378 ds = self.opener("dirstate").read()
378 except IOError:
379 except IOError:
379 ds = ""
380 ds = ""
380 self.opener("journal.dirstate", "w").write(ds)
381 self.opener("journal.dirstate", "w").write(ds)
381
382
382 tr = transaction.transaction(self.ui.warn, self.opener,
383 tr = transaction.transaction(self.ui.warn, self.opener,
383 self.join("journal"),
384 self.join("journal"),
384 aftertrans(self.path))
385 aftertrans(self.path))
385 self.transhandle = tr
386 self.transhandle = tr
386 return tr
387 return tr
387
388
388 def recover(self):
389 def recover(self):
389 l = self.lock()
390 l = self.lock()
390 if os.path.exists(self.join("journal")):
391 if os.path.exists(self.join("journal")):
391 self.ui.status(_("rolling back interrupted transaction\n"))
392 self.ui.status(_("rolling back interrupted transaction\n"))
392 transaction.rollback(self.opener, self.join("journal"))
393 transaction.rollback(self.opener, self.join("journal"))
393 self.reload()
394 self.reload()
394 return True
395 return True
395 else:
396 else:
396 self.ui.warn(_("no interrupted transaction available\n"))
397 self.ui.warn(_("no interrupted transaction available\n"))
397 return False
398 return False
398
399
399 def rollback(self, wlock=None):
400 def rollback(self, wlock=None):
400 if not wlock:
401 if not wlock:
401 wlock = self.wlock()
402 wlock = self.wlock()
402 l = self.lock()
403 l = self.lock()
403 if os.path.exists(self.join("undo")):
404 if os.path.exists(self.join("undo")):
404 self.ui.status(_("rolling back last transaction\n"))
405 self.ui.status(_("rolling back last transaction\n"))
405 transaction.rollback(self.opener, self.join("undo"))
406 transaction.rollback(self.opener, self.join("undo"))
406 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
407 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
407 self.reload()
408 self.reload()
408 self.wreload()
409 self.wreload()
409 else:
410 else:
410 self.ui.warn(_("no rollback information available\n"))
411 self.ui.warn(_("no rollback information available\n"))
411
412
412 def wreload(self):
413 def wreload(self):
413 self.dirstate.read()
414 self.dirstate.read()
414
415
415 def reload(self):
416 def reload(self):
416 self.changelog.load()
417 self.changelog.load()
417 self.manifest.load()
418 self.manifest.load()
418 self.tagscache = None
419 self.tagscache = None
419 self.nodetagscache = None
420 self.nodetagscache = None
420
421
421 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
422 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
422 desc=None):
423 desc=None):
423 try:
424 try:
424 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
425 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
425 except lock.LockHeld, inst:
426 except lock.LockHeld, inst:
426 if not wait:
427 if not wait:
427 raise
428 raise
428 self.ui.warn(_("waiting for lock on %s held by %s\n") %
429 self.ui.warn(_("waiting for lock on %s held by %s\n") %
429 (desc, inst.args[0]))
430 (desc, inst.args[0]))
430 # default to 600 seconds timeout
431 # default to 600 seconds timeout
431 l = lock.lock(self.join(lockname),
432 l = lock.lock(self.join(lockname),
432 int(self.ui.config("ui", "timeout") or 600),
433 int(self.ui.config("ui", "timeout") or 600),
433 releasefn, desc=desc)
434 releasefn, desc=desc)
434 if acquirefn:
435 if acquirefn:
435 acquirefn()
436 acquirefn()
436 return l
437 return l
437
438
438 def lock(self, wait=1):
439 def lock(self, wait=1):
439 return self.do_lock("lock", wait, acquirefn=self.reload,
440 return self.do_lock("lock", wait, acquirefn=self.reload,
440 desc=_('repository %s') % self.origroot)
441 desc=_('repository %s') % self.origroot)
441
442
442 def wlock(self, wait=1):
443 def wlock(self, wait=1):
443 return self.do_lock("wlock", wait, self.dirstate.write,
444 return self.do_lock("wlock", wait, self.dirstate.write,
444 self.wreload,
445 self.wreload,
445 desc=_('working directory of %s') % self.origroot)
446 desc=_('working directory of %s') % self.origroot)
446
447
447 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
448 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
448 "determine whether a new filenode is needed"
449 "determine whether a new filenode is needed"
449 fp1 = manifest1.get(filename, nullid)
450 fp1 = manifest1.get(filename, nullid)
450 fp2 = manifest2.get(filename, nullid)
451 fp2 = manifest2.get(filename, nullid)
451
452
452 if fp2 != nullid:
453 if fp2 != nullid:
453 # is one parent an ancestor of the other?
454 # is one parent an ancestor of the other?
454 fpa = filelog.ancestor(fp1, fp2)
455 fpa = filelog.ancestor(fp1, fp2)
455 if fpa == fp1:
456 if fpa == fp1:
456 fp1, fp2 = fp2, nullid
457 fp1, fp2 = fp2, nullid
457 elif fpa == fp2:
458 elif fpa == fp2:
458 fp2 = nullid
459 fp2 = nullid
459
460
460 # is the file unmodified from the parent? report existing entry
461 # is the file unmodified from the parent? report existing entry
461 if fp2 == nullid and text == filelog.read(fp1):
462 if fp2 == nullid and text == filelog.read(fp1):
462 return (fp1, None, None)
463 return (fp1, None, None)
463
464
464 return (None, fp1, fp2)
465 return (None, fp1, fp2)
465
466
466 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
467 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
467 orig_parent = self.dirstate.parents()[0] or nullid
468 orig_parent = self.dirstate.parents()[0] or nullid
468 p1 = p1 or self.dirstate.parents()[0] or nullid
469 p1 = p1 or self.dirstate.parents()[0] or nullid
469 p2 = p2 or self.dirstate.parents()[1] or nullid
470 p2 = p2 or self.dirstate.parents()[1] or nullid
470 c1 = self.changelog.read(p1)
471 c1 = self.changelog.read(p1)
471 c2 = self.changelog.read(p2)
472 c2 = self.changelog.read(p2)
472 m1 = self.manifest.read(c1[0]).copy()
473 m1 = self.manifest.read(c1[0]).copy()
473 m2 = self.manifest.read(c2[0])
474 m2 = self.manifest.read(c2[0])
474 changed = []
475 changed = []
475
476
476 if orig_parent == p1:
477 if orig_parent == p1:
477 update_dirstate = 1
478 update_dirstate = 1
478 else:
479 else:
479 update_dirstate = 0
480 update_dirstate = 0
480
481
481 if not wlock:
482 if not wlock:
482 wlock = self.wlock()
483 wlock = self.wlock()
483 l = self.lock()
484 l = self.lock()
484 tr = self.transaction()
485 tr = self.transaction()
485 linkrev = self.changelog.count()
486 linkrev = self.changelog.count()
486 for f in files:
487 for f in files:
487 try:
488 try:
488 t = self.wread(f)
489 t = self.wread(f)
489 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
490 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
490 r = self.file(f)
491 r = self.file(f)
491
492
492 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
493 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
493 if entry:
494 if entry:
494 m1[f] = entry
495 m1[f] = entry
495 continue
496 continue
496
497
497 m1[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
498 m1[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
498 changed.append(f)
499 changed.append(f)
499 if update_dirstate:
500 if update_dirstate:
500 self.dirstate.update([f], "n")
501 self.dirstate.update([f], "n")
501 except IOError:
502 except IOError:
502 try:
503 try:
503 del m1[f]
504 del m1[f]
504 if update_dirstate:
505 if update_dirstate:
505 self.dirstate.forget([f])
506 self.dirstate.forget([f])
506 except:
507 except:
507 # deleted from p2?
508 # deleted from p2?
508 pass
509 pass
509
510
510 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
511 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
511 user = user or self.ui.username()
512 user = user or self.ui.username()
512 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
513 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
513 tr.close()
514 tr.close()
514 if update_dirstate:
515 if update_dirstate:
515 self.dirstate.setparents(n, nullid)
516 self.dirstate.setparents(n, nullid)
516
517
517 def commit(self, files=None, text="", user=None, date=None,
518 def commit(self, files=None, text="", user=None, date=None,
518 match=util.always, force=False, lock=None, wlock=None,
519 match=util.always, force=False, lock=None, wlock=None,
519 force_editor=False):
520 force_editor=False):
520 commit = []
521 commit = []
521 remove = []
522 remove = []
522 changed = []
523 changed = []
523
524
524 if files:
525 if files:
525 for f in files:
526 for f in files:
526 s = self.dirstate.state(f)
527 s = self.dirstate.state(f)
527 if s in 'nmai':
528 if s in 'nmai':
528 commit.append(f)
529 commit.append(f)
529 elif s == 'r':
530 elif s == 'r':
530 remove.append(f)
531 remove.append(f)
531 else:
532 else:
532 self.ui.warn(_("%s not tracked!\n") % f)
533 self.ui.warn(_("%s not tracked!\n") % f)
533 else:
534 else:
534 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
535 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
535 commit = modified + added
536 commit = modified + added
536 remove = removed
537 remove = removed
537
538
538 p1, p2 = self.dirstate.parents()
539 p1, p2 = self.dirstate.parents()
539 c1 = self.changelog.read(p1)
540 c1 = self.changelog.read(p1)
540 c2 = self.changelog.read(p2)
541 c2 = self.changelog.read(p2)
541 m1 = self.manifest.read(c1[0]).copy()
542 m1 = self.manifest.read(c1[0]).copy()
542 m2 = self.manifest.read(c2[0])
543 m2 = self.manifest.read(c2[0])
543
544
544 if not commit and not remove and not force and p2 == nullid:
545 if not commit and not remove and not force and p2 == nullid:
545 self.ui.status(_("nothing changed\n"))
546 self.ui.status(_("nothing changed\n"))
546 return None
547 return None
547
548
548 xp1 = hex(p1)
549 xp1 = hex(p1)
549 if p2 == nullid: xp2 = ''
550 if p2 == nullid: xp2 = ''
550 else: xp2 = hex(p2)
551 else: xp2 = hex(p2)
551
552
552 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
553 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
553
554
554 if not wlock:
555 if not wlock:
555 wlock = self.wlock()
556 wlock = self.wlock()
556 if not lock:
557 if not lock:
557 lock = self.lock()
558 lock = self.lock()
558 tr = self.transaction()
559 tr = self.transaction()
559
560
560 # check in files
561 # check in files
561 new = {}
562 new = {}
562 linkrev = self.changelog.count()
563 linkrev = self.changelog.count()
563 commit.sort()
564 commit.sort()
564 for f in commit:
565 for f in commit:
565 self.ui.note(f + "\n")
566 self.ui.note(f + "\n")
566 try:
567 try:
567 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
568 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
568 t = self.wread(f)
569 t = self.wread(f)
569 except IOError:
570 except IOError:
570 self.ui.warn(_("trouble committing %s!\n") % f)
571 self.ui.warn(_("trouble committing %s!\n") % f)
571 raise
572 raise
572
573
573 r = self.file(f)
574 r = self.file(f)
574
575
575 meta = {}
576 meta = {}
576 cp = self.dirstate.copied(f)
577 cp = self.dirstate.copied(f)
577 if cp:
578 if cp:
578 meta["copy"] = cp
579 meta["copy"] = cp
579 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
580 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
580 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
581 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
581 fp1, fp2 = nullid, nullid
582 fp1, fp2 = nullid, nullid
582 else:
583 else:
583 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
584 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
584 if entry:
585 if entry:
585 new[f] = entry
586 new[f] = entry
586 continue
587 continue
587
588
588 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
589 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
589 # remember what we've added so that we can later calculate
590 # remember what we've added so that we can later calculate
590 # the files to pull from a set of changesets
591 # the files to pull from a set of changesets
591 changed.append(f)
592 changed.append(f)
592
593
593 # update manifest
594 # update manifest
594 m1.update(new)
595 m1.update(new)
595 for f in remove:
596 for f in remove:
596 if f in m1:
597 if f in m1:
597 del m1[f]
598 del m1[f]
598 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
599 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
599 (new, remove))
600 (new, remove))
600
601
601 # add changeset
602 # add changeset
602 new = new.keys()
603 new = new.keys()
603 new.sort()
604 new.sort()
604
605
605 user = user or self.ui.username()
606 user = user or self.ui.username()
606 if not text or force_editor:
607 if not text or force_editor:
607 edittext = []
608 edittext = []
608 if text:
609 if text:
609 edittext.append(text)
610 edittext.append(text)
610 edittext.append("")
611 edittext.append("")
611 if p2 != nullid:
612 if p2 != nullid:
612 edittext.append("HG: branch merge")
613 edittext.append("HG: branch merge")
613 edittext.extend(["HG: changed %s" % f for f in changed])
614 edittext.extend(["HG: changed %s" % f for f in changed])
614 edittext.extend(["HG: removed %s" % f for f in remove])
615 edittext.extend(["HG: removed %s" % f for f in remove])
615 if not changed and not remove:
616 if not changed and not remove:
616 edittext.append("HG: no files changed")
617 edittext.append("HG: no files changed")
617 edittext.append("")
618 edittext.append("")
618 # run editor in the repository root
619 # run editor in the repository root
619 olddir = os.getcwd()
620 olddir = os.getcwd()
620 os.chdir(self.root)
621 os.chdir(self.root)
621 text = self.ui.edit("\n".join(edittext), user)
622 text = self.ui.edit("\n".join(edittext), user)
622 os.chdir(olddir)
623 os.chdir(olddir)
623
624
624 lines = [line.rstrip() for line in text.rstrip().splitlines()]
625 lines = [line.rstrip() for line in text.rstrip().splitlines()]
625 while lines and not lines[0]:
626 while lines and not lines[0]:
626 del lines[0]
627 del lines[0]
627 if not lines:
628 if not lines:
628 return None
629 return None
629 text = '\n'.join(lines)
630 text = '\n'.join(lines)
630 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
631 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
631 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
632 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
632 parent2=xp2)
633 parent2=xp2)
633 tr.close()
634 tr.close()
634
635
635 self.dirstate.setparents(n)
636 self.dirstate.setparents(n)
636 self.dirstate.update(new, "n")
637 self.dirstate.update(new, "n")
637 self.dirstate.forget(remove)
638 self.dirstate.forget(remove)
638
639
639 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
640 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
640 return n
641 return n
641
642
642 def walk(self, node=None, files=[], match=util.always, badmatch=None):
643 def walk(self, node=None, files=[], match=util.always, badmatch=None):
643 if node:
644 if node:
644 fdict = dict.fromkeys(files)
645 fdict = dict.fromkeys(files)
645 for fn in self.manifest.read(self.changelog.read(node)[0]):
646 for fn in self.manifest.read(self.changelog.read(node)[0]):
646 for ffn in fdict:
647 for ffn in fdict:
647 # match if the file is the exact name or a directory
648 # match if the file is the exact name or a directory
648 if ffn == fn or fn.startswith("%s/" % ffn):
649 if ffn == fn or fn.startswith("%s/" % ffn):
649 del fdict[ffn]
650 del fdict[ffn]
650 break
651 break
651 if match(fn):
652 if match(fn):
652 yield 'm', fn
653 yield 'm', fn
653 for fn in fdict:
654 for fn in fdict:
654 if badmatch and badmatch(fn):
655 if badmatch and badmatch(fn):
655 if match(fn):
656 if match(fn):
656 yield 'b', fn
657 yield 'b', fn
657 else:
658 else:
658 self.ui.warn(_('%s: No such file in rev %s\n') % (
659 self.ui.warn(_('%s: No such file in rev %s\n') % (
659 util.pathto(self.getcwd(), fn), short(node)))
660 util.pathto(self.getcwd(), fn), short(node)))
660 else:
661 else:
661 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
662 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
662 yield src, fn
663 yield src, fn
663
664
664 def status(self, node1=None, node2=None, files=[], match=util.always,
665 def status(self, node1=None, node2=None, files=[], match=util.always,
665 wlock=None, list_ignored=False, list_clean=False):
666 wlock=None, list_ignored=False, list_clean=False):
666 """return status of files between two nodes or node and working directory
667 """return status of files between two nodes or node and working directory
667
668
668 If node1 is None, use the first dirstate parent instead.
669 If node1 is None, use the first dirstate parent instead.
669 If node2 is None, compare node1 with working directory.
670 If node2 is None, compare node1 with working directory.
670 """
671 """
671
672
672 def fcmp(fn, mf):
673 def fcmp(fn, mf):
673 t1 = self.wread(fn)
674 t1 = self.wread(fn)
674 return self.file(fn).cmp(mf.get(fn, nullid), t1)
675 return self.file(fn).cmp(mf.get(fn, nullid), t1)
675
676
676 def mfmatches(node):
677 def mfmatches(node):
677 change = self.changelog.read(node)
678 change = self.changelog.read(node)
678 mf = dict(self.manifest.read(change[0]))
679 mf = dict(self.manifest.read(change[0]))
679 for fn in mf.keys():
680 for fn in mf.keys():
680 if not match(fn):
681 if not match(fn):
681 del mf[fn]
682 del mf[fn]
682 return mf
683 return mf
683
684
684 modified, added, removed, deleted, unknown = [], [], [], [], []
685 modified, added, removed, deleted, unknown = [], [], [], [], []
685 ignored, clean = [], []
686 ignored, clean = [], []
686
687
687 compareworking = False
688 compareworking = False
688 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
689 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
689 compareworking = True
690 compareworking = True
690
691
691 if not compareworking:
692 if not compareworking:
692 # read the manifest from node1 before the manifest from node2,
693 # read the manifest from node1 before the manifest from node2,
693 # so that we'll hit the manifest cache if we're going through
694 # so that we'll hit the manifest cache if we're going through
694 # all the revisions in parent->child order.
695 # all the revisions in parent->child order.
695 mf1 = mfmatches(node1)
696 mf1 = mfmatches(node1)
696
697
697 # are we comparing the working directory?
698 # are we comparing the working directory?
698 if not node2:
699 if not node2:
699 if not wlock:
700 if not wlock:
700 try:
701 try:
701 wlock = self.wlock(wait=0)
702 wlock = self.wlock(wait=0)
702 except lock.LockException:
703 except lock.LockException:
703 wlock = None
704 wlock = None
704 (lookup, modified, added, removed, deleted, unknown,
705 (lookup, modified, added, removed, deleted, unknown,
705 ignored, clean) = self.dirstate.status(files, match,
706 ignored, clean) = self.dirstate.status(files, match,
706 list_ignored, list_clean)
707 list_ignored, list_clean)
707
708
708 # are we comparing working dir against its parent?
709 # are we comparing working dir against its parent?
709 if compareworking:
710 if compareworking:
710 if lookup:
711 if lookup:
711 # do a full compare of any files that might have changed
712 # do a full compare of any files that might have changed
712 mf2 = mfmatches(self.dirstate.parents()[0])
713 mf2 = mfmatches(self.dirstate.parents()[0])
713 for f in lookup:
714 for f in lookup:
714 if fcmp(f, mf2):
715 if fcmp(f, mf2):
715 modified.append(f)
716 modified.append(f)
716 else:
717 else:
717 clean.append(f)
718 clean.append(f)
718 if wlock is not None:
719 if wlock is not None:
719 self.dirstate.update([f], "n")
720 self.dirstate.update([f], "n")
720 else:
721 else:
721 # we are comparing working dir against non-parent
722 # we are comparing working dir against non-parent
722 # generate a pseudo-manifest for the working dir
723 # generate a pseudo-manifest for the working dir
723 mf2 = mfmatches(self.dirstate.parents()[0])
724 mf2 = mfmatches(self.dirstate.parents()[0])
724 for f in lookup + modified + added:
725 for f in lookup + modified + added:
725 mf2[f] = ""
726 mf2[f] = ""
726 for f in removed:
727 for f in removed:
727 if f in mf2:
728 if f in mf2:
728 del mf2[f]
729 del mf2[f]
729 else:
730 else:
730 # we are comparing two revisions
731 # we are comparing two revisions
731 mf2 = mfmatches(node2)
732 mf2 = mfmatches(node2)
732
733
733 if not compareworking:
734 if not compareworking:
734 # flush lists from dirstate before comparing manifests
735 # flush lists from dirstate before comparing manifests
735 modified, added, clean = [], [], []
736 modified, added, clean = [], [], []
736
737
737 # make sure to sort the files so we talk to the disk in a
738 # make sure to sort the files so we talk to the disk in a
738 # reasonable order
739 # reasonable order
739 mf2keys = mf2.keys()
740 mf2keys = mf2.keys()
740 mf2keys.sort()
741 mf2keys.sort()
741 for fn in mf2keys:
742 for fn in mf2keys:
742 if mf1.has_key(fn):
743 if mf1.has_key(fn):
743 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
744 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
744 modified.append(fn)
745 modified.append(fn)
745 elif list_clean:
746 elif list_clean:
746 clean.append(fn)
747 clean.append(fn)
747 del mf1[fn]
748 del mf1[fn]
748 else:
749 else:
749 added.append(fn)
750 added.append(fn)
750
751
751 removed = mf1.keys()
752 removed = mf1.keys()
752
753
753 # sort and return results:
754 # sort and return results:
754 for l in modified, added, removed, deleted, unknown, ignored, clean:
755 for l in modified, added, removed, deleted, unknown, ignored, clean:
755 l.sort()
756 l.sort()
756 return (modified, added, removed, deleted, unknown, ignored, clean)
757 return (modified, added, removed, deleted, unknown, ignored, clean)
757
758
758 def add(self, list, wlock=None):
759 def add(self, list, wlock=None):
759 if not wlock:
760 if not wlock:
760 wlock = self.wlock()
761 wlock = self.wlock()
761 for f in list:
762 for f in list:
762 p = self.wjoin(f)
763 p = self.wjoin(f)
763 if not os.path.exists(p):
764 if not os.path.exists(p):
764 self.ui.warn(_("%s does not exist!\n") % f)
765 self.ui.warn(_("%s does not exist!\n") % f)
765 elif not os.path.isfile(p):
766 elif not os.path.isfile(p):
766 self.ui.warn(_("%s not added: only files supported currently\n")
767 self.ui.warn(_("%s not added: only files supported currently\n")
767 % f)
768 % f)
768 elif self.dirstate.state(f) in 'an':
769 elif self.dirstate.state(f) in 'an':
769 self.ui.warn(_("%s already tracked!\n") % f)
770 self.ui.warn(_("%s already tracked!\n") % f)
770 else:
771 else:
771 self.dirstate.update([f], "a")
772 self.dirstate.update([f], "a")
772
773
773 def forget(self, list, wlock=None):
774 def forget(self, list, wlock=None):
774 if not wlock:
775 if not wlock:
775 wlock = self.wlock()
776 wlock = self.wlock()
776 for f in list:
777 for f in list:
777 if self.dirstate.state(f) not in 'ai':
778 if self.dirstate.state(f) not in 'ai':
778 self.ui.warn(_("%s not added!\n") % f)
779 self.ui.warn(_("%s not added!\n") % f)
779 else:
780 else:
780 self.dirstate.forget([f])
781 self.dirstate.forget([f])
781
782
782 def remove(self, list, unlink=False, wlock=None):
783 def remove(self, list, unlink=False, wlock=None):
783 if unlink:
784 if unlink:
784 for f in list:
785 for f in list:
785 try:
786 try:
786 util.unlink(self.wjoin(f))
787 util.unlink(self.wjoin(f))
787 except OSError, inst:
788 except OSError, inst:
788 if inst.errno != errno.ENOENT:
789 if inst.errno != errno.ENOENT:
789 raise
790 raise
790 if not wlock:
791 if not wlock:
791 wlock = self.wlock()
792 wlock = self.wlock()
792 for f in list:
793 for f in list:
793 p = self.wjoin(f)
794 p = self.wjoin(f)
794 if os.path.exists(p):
795 if os.path.exists(p):
795 self.ui.warn(_("%s still exists!\n") % f)
796 self.ui.warn(_("%s still exists!\n") % f)
796 elif self.dirstate.state(f) == 'a':
797 elif self.dirstate.state(f) == 'a':
797 self.dirstate.forget([f])
798 self.dirstate.forget([f])
798 elif f not in self.dirstate:
799 elif f not in self.dirstate:
799 self.ui.warn(_("%s not tracked!\n") % f)
800 self.ui.warn(_("%s not tracked!\n") % f)
800 else:
801 else:
801 self.dirstate.update([f], "r")
802 self.dirstate.update([f], "r")
802
803
803 def undelete(self, list, wlock=None):
804 def undelete(self, list, wlock=None):
804 p = self.dirstate.parents()[0]
805 p = self.dirstate.parents()[0]
805 mn = self.changelog.read(p)[0]
806 mn = self.changelog.read(p)[0]
806 m = self.manifest.read(mn)
807 m = self.manifest.read(mn)
807 if not wlock:
808 if not wlock:
808 wlock = self.wlock()
809 wlock = self.wlock()
809 for f in list:
810 for f in list:
810 if self.dirstate.state(f) not in "r":
811 if self.dirstate.state(f) not in "r":
811 self.ui.warn("%s not removed!\n" % f)
812 self.ui.warn("%s not removed!\n" % f)
812 else:
813 else:
813 t = self.file(f).read(m[f])
814 t = self.file(f).read(m[f])
814 self.wwrite(f, t)
815 self.wwrite(f, t)
815 util.set_exec(self.wjoin(f), m.execf(f))
816 util.set_exec(self.wjoin(f), m.execf(f))
816 self.dirstate.update([f], "n")
817 self.dirstate.update([f], "n")
817
818
818 def copy(self, source, dest, wlock=None):
819 def copy(self, source, dest, wlock=None):
819 p = self.wjoin(dest)
820 p = self.wjoin(dest)
820 if not os.path.exists(p):
821 if not os.path.exists(p):
821 self.ui.warn(_("%s does not exist!\n") % dest)
822 self.ui.warn(_("%s does not exist!\n") % dest)
822 elif not os.path.isfile(p):
823 elif not os.path.isfile(p):
823 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
824 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
824 else:
825 else:
825 if not wlock:
826 if not wlock:
826 wlock = self.wlock()
827 wlock = self.wlock()
827 if self.dirstate.state(dest) == '?':
828 if self.dirstate.state(dest) == '?':
828 self.dirstate.update([dest], "a")
829 self.dirstate.update([dest], "a")
829 self.dirstate.copy(source, dest)
830 self.dirstate.copy(source, dest)
830
831
831 def heads(self, start=None):
832 def heads(self, start=None):
832 heads = self.changelog.heads(start)
833 heads = self.changelog.heads(start)
833 # sort the output in rev descending order
834 # sort the output in rev descending order
834 heads = [(-self.changelog.rev(h), h) for h in heads]
835 heads = [(-self.changelog.rev(h), h) for h in heads]
835 heads.sort()
836 heads.sort()
836 return [n for (r, n) in heads]
837 return [n for (r, n) in heads]
837
838
838 # branchlookup returns a dict giving a list of branches for
839 # branchlookup returns a dict giving a list of branches for
839 # each head. A branch is defined as the tag of a node or
840 # each head. A branch is defined as the tag of a node or
840 # the branch of the node's parents. If a node has multiple
841 # the branch of the node's parents. If a node has multiple
841 # branch tags, tags are eliminated if they are visible from other
842 # branch tags, tags are eliminated if they are visible from other
842 # branch tags.
843 # branch tags.
843 #
844 #
844 # So, for this graph: a->b->c->d->e
845 # So, for this graph: a->b->c->d->e
845 # \ /
846 # \ /
846 # aa -----/
847 # aa -----/
847 # a has tag 2.6.12
848 # a has tag 2.6.12
848 # d has tag 2.6.13
849 # d has tag 2.6.13
849 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
850 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
850 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
851 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
851 # from the list.
852 # from the list.
852 #
853 #
853 # It is possible that more than one head will have the same branch tag.
854 # It is possible that more than one head will have the same branch tag.
854 # callers need to check the result for multiple heads under the same
855 # callers need to check the result for multiple heads under the same
855 # branch tag if that is a problem for them (ie checkout of a specific
856 # branch tag if that is a problem for them (ie checkout of a specific
856 # branch).
857 # branch).
857 #
858 #
858 # passing in a specific branch will limit the depth of the search
859 # passing in a specific branch will limit the depth of the search
859 # through the parents. It won't limit the branches returned in the
860 # through the parents. It won't limit the branches returned in the
860 # result though.
861 # result though.
861 def branchlookup(self, heads=None, branch=None):
862 def branchlookup(self, heads=None, branch=None):
862 if not heads:
863 if not heads:
863 heads = self.heads()
864 heads = self.heads()
864 headt = [ h for h in heads ]
865 headt = [ h for h in heads ]
865 chlog = self.changelog
866 chlog = self.changelog
866 branches = {}
867 branches = {}
867 merges = []
868 merges = []
868 seenmerge = {}
869 seenmerge = {}
869
870
870 # traverse the tree once for each head, recording in the branches
871 # traverse the tree once for each head, recording in the branches
871 # dict which tags are visible from this head. The branches
872 # dict which tags are visible from this head. The branches
872 # dict also records which tags are visible from each tag
873 # dict also records which tags are visible from each tag
873 # while we traverse.
874 # while we traverse.
874 while headt or merges:
875 while headt or merges:
875 if merges:
876 if merges:
876 n, found = merges.pop()
877 n, found = merges.pop()
877 visit = [n]
878 visit = [n]
878 else:
879 else:
879 h = headt.pop()
880 h = headt.pop()
880 visit = [h]
881 visit = [h]
881 found = [h]
882 found = [h]
882 seen = {}
883 seen = {}
883 while visit:
884 while visit:
884 n = visit.pop()
885 n = visit.pop()
885 if n in seen:
886 if n in seen:
886 continue
887 continue
887 pp = chlog.parents(n)
888 pp = chlog.parents(n)
888 tags = self.nodetags(n)
889 tags = self.nodetags(n)
889 if tags:
890 if tags:
890 for x in tags:
891 for x in tags:
891 if x == 'tip':
892 if x == 'tip':
892 continue
893 continue
893 for f in found:
894 for f in found:
894 branches.setdefault(f, {})[n] = 1
895 branches.setdefault(f, {})[n] = 1
895 branches.setdefault(n, {})[n] = 1
896 branches.setdefault(n, {})[n] = 1
896 break
897 break
897 if n not in found:
898 if n not in found:
898 found.append(n)
899 found.append(n)
899 if branch in tags:
900 if branch in tags:
900 continue
901 continue
901 seen[n] = 1
902 seen[n] = 1
902 if pp[1] != nullid and n not in seenmerge:
903 if pp[1] != nullid and n not in seenmerge:
903 merges.append((pp[1], [x for x in found]))
904 merges.append((pp[1], [x for x in found]))
904 seenmerge[n] = 1
905 seenmerge[n] = 1
905 if pp[0] != nullid:
906 if pp[0] != nullid:
906 visit.append(pp[0])
907 visit.append(pp[0])
907 # traverse the branches dict, eliminating branch tags from each
908 # traverse the branches dict, eliminating branch tags from each
908 # head that are visible from another branch tag for that head.
909 # head that are visible from another branch tag for that head.
909 out = {}
910 out = {}
910 viscache = {}
911 viscache = {}
911 for h in heads:
912 for h in heads:
912 def visible(node):
913 def visible(node):
913 if node in viscache:
914 if node in viscache:
914 return viscache[node]
915 return viscache[node]
915 ret = {}
916 ret = {}
916 visit = [node]
917 visit = [node]
917 while visit:
918 while visit:
918 x = visit.pop()
919 x = visit.pop()
919 if x in viscache:
920 if x in viscache:
920 ret.update(viscache[x])
921 ret.update(viscache[x])
921 elif x not in ret:
922 elif x not in ret:
922 ret[x] = 1
923 ret[x] = 1
923 if x in branches:
924 if x in branches:
924 visit[len(visit):] = branches[x].keys()
925 visit[len(visit):] = branches[x].keys()
925 viscache[node] = ret
926 viscache[node] = ret
926 return ret
927 return ret
927 if h not in branches:
928 if h not in branches:
928 continue
929 continue
929 # O(n^2), but somewhat limited. This only searches the
930 # O(n^2), but somewhat limited. This only searches the
930 # tags visible from a specific head, not all the tags in the
931 # tags visible from a specific head, not all the tags in the
931 # whole repo.
932 # whole repo.
932 for b in branches[h]:
933 for b in branches[h]:
933 vis = False
934 vis = False
934 for bb in branches[h].keys():
935 for bb in branches[h].keys():
935 if b != bb:
936 if b != bb:
936 if b in visible(bb):
937 if b in visible(bb):
937 vis = True
938 vis = True
938 break
939 break
939 if not vis:
940 if not vis:
940 l = out.setdefault(h, [])
941 l = out.setdefault(h, [])
941 l[len(l):] = self.nodetags(b)
942 l[len(l):] = self.nodetags(b)
942 return out
943 return out
943
944
944 def branches(self, nodes):
945 def branches(self, nodes):
945 if not nodes:
946 if not nodes:
946 nodes = [self.changelog.tip()]
947 nodes = [self.changelog.tip()]
947 b = []
948 b = []
948 for n in nodes:
949 for n in nodes:
949 t = n
950 t = n
950 while 1:
951 while 1:
951 p = self.changelog.parents(n)
952 p = self.changelog.parents(n)
952 if p[1] != nullid or p[0] == nullid:
953 if p[1] != nullid or p[0] == nullid:
953 b.append((t, n, p[0], p[1]))
954 b.append((t, n, p[0], p[1]))
954 break
955 break
955 n = p[0]
956 n = p[0]
956 return b
957 return b
957
958
958 def between(self, pairs):
959 def between(self, pairs):
959 r = []
960 r = []
960
961
961 for top, bottom in pairs:
962 for top, bottom in pairs:
962 n, l, i = top, [], 0
963 n, l, i = top, [], 0
963 f = 1
964 f = 1
964
965
965 while n != bottom:
966 while n != bottom:
966 p = self.changelog.parents(n)[0]
967 p = self.changelog.parents(n)[0]
967 if i == f:
968 if i == f:
968 l.append(n)
969 l.append(n)
969 f = f * 2
970 f = f * 2
970 n = p
971 n = p
971 i += 1
972 i += 1
972
973
973 r.append(l)
974 r.append(l)
974
975
975 return r
976 return r
976
977
977 def findincoming(self, remote, base=None, heads=None, force=False):
978 def findincoming(self, remote, base=None, heads=None, force=False):
978 """Return list of roots of the subsets of missing nodes from remote
979 """Return list of roots of the subsets of missing nodes from remote
979
980
980 If base dict is specified, assume that these nodes and their parents
981 If base dict is specified, assume that these nodes and their parents
981 exist on the remote side and that no child of a node of base exists
982 exist on the remote side and that no child of a node of base exists
982 in both remote and self.
983 in both remote and self.
983 Furthermore base will be updated to include the nodes that exists
984 Furthermore base will be updated to include the nodes that exists
984 in self and remote but no children exists in self and remote.
985 in self and remote but no children exists in self and remote.
985 If a list of heads is specified, return only nodes which are heads
986 If a list of heads is specified, return only nodes which are heads
986 or ancestors of these heads.
987 or ancestors of these heads.
987
988
988 All the ancestors of base are in self and in remote.
989 All the ancestors of base are in self and in remote.
989 All the descendants of the list returned are missing in self.
990 All the descendants of the list returned are missing in self.
990 (and so we know that the rest of the nodes are missing in remote, see
991 (and so we know that the rest of the nodes are missing in remote, see
991 outgoing)
992 outgoing)
992 """
993 """
993 m = self.changelog.nodemap
994 m = self.changelog.nodemap
994 search = []
995 search = []
995 fetch = {}
996 fetch = {}
996 seen = {}
997 seen = {}
997 seenbranch = {}
998 seenbranch = {}
998 if base == None:
999 if base == None:
999 base = {}
1000 base = {}
1000
1001
1001 if not heads:
1002 if not heads:
1002 heads = remote.heads()
1003 heads = remote.heads()
1003
1004
1004 if self.changelog.tip() == nullid:
1005 if self.changelog.tip() == nullid:
1005 base[nullid] = 1
1006 base[nullid] = 1
1006 if heads != [nullid]:
1007 if heads != [nullid]:
1007 return [nullid]
1008 return [nullid]
1008 return []
1009 return []
1009
1010
1010 # assume we're closer to the tip than the root
1011 # assume we're closer to the tip than the root
1011 # and start by examining the heads
1012 # and start by examining the heads
1012 self.ui.status(_("searching for changes\n"))
1013 self.ui.status(_("searching for changes\n"))
1013
1014
1014 unknown = []
1015 unknown = []
1015 for h in heads:
1016 for h in heads:
1016 if h not in m:
1017 if h not in m:
1017 unknown.append(h)
1018 unknown.append(h)
1018 else:
1019 else:
1019 base[h] = 1
1020 base[h] = 1
1020
1021
1021 if not unknown:
1022 if not unknown:
1022 return []
1023 return []
1023
1024
1024 req = dict.fromkeys(unknown)
1025 req = dict.fromkeys(unknown)
1025 reqcnt = 0
1026 reqcnt = 0
1026
1027
1027 # search through remote branches
1028 # search through remote branches
1028 # a 'branch' here is a linear segment of history, with four parts:
1029 # a 'branch' here is a linear segment of history, with four parts:
1029 # head, root, first parent, second parent
1030 # head, root, first parent, second parent
1030 # (a branch always has two parents (or none) by definition)
1031 # (a branch always has two parents (or none) by definition)
1031 unknown = remote.branches(unknown)
1032 unknown = remote.branches(unknown)
1032 while unknown:
1033 while unknown:
1033 r = []
1034 r = []
1034 while unknown:
1035 while unknown:
1035 n = unknown.pop(0)
1036 n = unknown.pop(0)
1036 if n[0] in seen:
1037 if n[0] in seen:
1037 continue
1038 continue
1038
1039
1039 self.ui.debug(_("examining %s:%s\n")
1040 self.ui.debug(_("examining %s:%s\n")
1040 % (short(n[0]), short(n[1])))
1041 % (short(n[0]), short(n[1])))
1041 if n[0] == nullid: # found the end of the branch
1042 if n[0] == nullid: # found the end of the branch
1042 pass
1043 pass
1043 elif n in seenbranch:
1044 elif n in seenbranch:
1044 self.ui.debug(_("branch already found\n"))
1045 self.ui.debug(_("branch already found\n"))
1045 continue
1046 continue
1046 elif n[1] and n[1] in m: # do we know the base?
1047 elif n[1] and n[1] in m: # do we know the base?
1047 self.ui.debug(_("found incomplete branch %s:%s\n")
1048 self.ui.debug(_("found incomplete branch %s:%s\n")
1048 % (short(n[0]), short(n[1])))
1049 % (short(n[0]), short(n[1])))
1049 search.append(n) # schedule branch range for scanning
1050 search.append(n) # schedule branch range for scanning
1050 seenbranch[n] = 1
1051 seenbranch[n] = 1
1051 else:
1052 else:
1052 if n[1] not in seen and n[1] not in fetch:
1053 if n[1] not in seen and n[1] not in fetch:
1053 if n[2] in m and n[3] in m:
1054 if n[2] in m and n[3] in m:
1054 self.ui.debug(_("found new changeset %s\n") %
1055 self.ui.debug(_("found new changeset %s\n") %
1055 short(n[1]))
1056 short(n[1]))
1056 fetch[n[1]] = 1 # earliest unknown
1057 fetch[n[1]] = 1 # earliest unknown
1057 for p in n[2:4]:
1058 for p in n[2:4]:
1058 if p in m:
1059 if p in m:
1059 base[p] = 1 # latest known
1060 base[p] = 1 # latest known
1060
1061
1061 for p in n[2:4]:
1062 for p in n[2:4]:
1062 if p not in req and p not in m:
1063 if p not in req and p not in m:
1063 r.append(p)
1064 r.append(p)
1064 req[p] = 1
1065 req[p] = 1
1065 seen[n[0]] = 1
1066 seen[n[0]] = 1
1066
1067
1067 if r:
1068 if r:
1068 reqcnt += 1
1069 reqcnt += 1
1069 self.ui.debug(_("request %d: %s\n") %
1070 self.ui.debug(_("request %d: %s\n") %
1070 (reqcnt, " ".join(map(short, r))))
1071 (reqcnt, " ".join(map(short, r))))
1071 for p in range(0, len(r), 10):
1072 for p in range(0, len(r), 10):
1072 for b in remote.branches(r[p:p+10]):
1073 for b in remote.branches(r[p:p+10]):
1073 self.ui.debug(_("received %s:%s\n") %
1074 self.ui.debug(_("received %s:%s\n") %
1074 (short(b[0]), short(b[1])))
1075 (short(b[0]), short(b[1])))
1075 unknown.append(b)
1076 unknown.append(b)
1076
1077
1077 # do binary search on the branches we found
1078 # do binary search on the branches we found
1078 while search:
1079 while search:
1079 n = search.pop(0)
1080 n = search.pop(0)
1080 reqcnt += 1
1081 reqcnt += 1
1081 l = remote.between([(n[0], n[1])])[0]
1082 l = remote.between([(n[0], n[1])])[0]
1082 l.append(n[1])
1083 l.append(n[1])
1083 p = n[0]
1084 p = n[0]
1084 f = 1
1085 f = 1
1085 for i in l:
1086 for i in l:
1086 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1087 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1087 if i in m:
1088 if i in m:
1088 if f <= 2:
1089 if f <= 2:
1089 self.ui.debug(_("found new branch changeset %s\n") %
1090 self.ui.debug(_("found new branch changeset %s\n") %
1090 short(p))
1091 short(p))
1091 fetch[p] = 1
1092 fetch[p] = 1
1092 base[i] = 1
1093 base[i] = 1
1093 else:
1094 else:
1094 self.ui.debug(_("narrowed branch search to %s:%s\n")
1095 self.ui.debug(_("narrowed branch search to %s:%s\n")
1095 % (short(p), short(i)))
1096 % (short(p), short(i)))
1096 search.append((p, i))
1097 search.append((p, i))
1097 break
1098 break
1098 p, f = i, f * 2
1099 p, f = i, f * 2
1099
1100
1100 # sanity check our fetch list
1101 # sanity check our fetch list
1101 for f in fetch.keys():
1102 for f in fetch.keys():
1102 if f in m:
1103 if f in m:
1103 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1104 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1104
1105
1105 if base.keys() == [nullid]:
1106 if base.keys() == [nullid]:
1106 if force:
1107 if force:
1107 self.ui.warn(_("warning: repository is unrelated\n"))
1108 self.ui.warn(_("warning: repository is unrelated\n"))
1108 else:
1109 else:
1109 raise util.Abort(_("repository is unrelated"))
1110 raise util.Abort(_("repository is unrelated"))
1110
1111
1111 self.ui.debug(_("found new changesets starting at ") +
1112 self.ui.debug(_("found new changesets starting at ") +
1112 " ".join([short(f) for f in fetch]) + "\n")
1113 " ".join([short(f) for f in fetch]) + "\n")
1113
1114
1114 self.ui.debug(_("%d total queries\n") % reqcnt)
1115 self.ui.debug(_("%d total queries\n") % reqcnt)
1115
1116
1116 return fetch.keys()
1117 return fetch.keys()
1117
1118
1118 def findoutgoing(self, remote, base=None, heads=None, force=False):
1119 def findoutgoing(self, remote, base=None, heads=None, force=False):
1119 """Return list of nodes that are roots of subsets not in remote
1120 """Return list of nodes that are roots of subsets not in remote
1120
1121
1121 If base dict is specified, assume that these nodes and their parents
1122 If base dict is specified, assume that these nodes and their parents
1122 exist on the remote side.
1123 exist on the remote side.
1123 If a list of heads is specified, return only nodes which are heads
1124 If a list of heads is specified, return only nodes which are heads
1124 or ancestors of these heads, and return a second element which
1125 or ancestors of these heads, and return a second element which
1125 contains all remote heads which get new children.
1126 contains all remote heads which get new children.
1126 """
1127 """
1127 if base == None:
1128 if base == None:
1128 base = {}
1129 base = {}
1129 self.findincoming(remote, base, heads, force=force)
1130 self.findincoming(remote, base, heads, force=force)
1130
1131
1131 self.ui.debug(_("common changesets up to ")
1132 self.ui.debug(_("common changesets up to ")
1132 + " ".join(map(short, base.keys())) + "\n")
1133 + " ".join(map(short, base.keys())) + "\n")
1133
1134
1134 remain = dict.fromkeys(self.changelog.nodemap)
1135 remain = dict.fromkeys(self.changelog.nodemap)
1135
1136
1136 # prune everything remote has from the tree
1137 # prune everything remote has from the tree
1137 del remain[nullid]
1138 del remain[nullid]
1138 remove = base.keys()
1139 remove = base.keys()
1139 while remove:
1140 while remove:
1140 n = remove.pop(0)
1141 n = remove.pop(0)
1141 if n in remain:
1142 if n in remain:
1142 del remain[n]
1143 del remain[n]
1143 for p in self.changelog.parents(n):
1144 for p in self.changelog.parents(n):
1144 remove.append(p)
1145 remove.append(p)
1145
1146
1146 # find every node whose parents have been pruned
1147 # find every node whose parents have been pruned
1147 subset = []
1148 subset = []
1148 # find every remote head that will get new children
1149 # find every remote head that will get new children
1149 updated_heads = {}
1150 updated_heads = {}
1150 for n in remain:
1151 for n in remain:
1151 p1, p2 = self.changelog.parents(n)
1152 p1, p2 = self.changelog.parents(n)
1152 if p1 not in remain and p2 not in remain:
1153 if p1 not in remain and p2 not in remain:
1153 subset.append(n)
1154 subset.append(n)
1154 if heads:
1155 if heads:
1155 if p1 in heads:
1156 if p1 in heads:
1156 updated_heads[p1] = True
1157 updated_heads[p1] = True
1157 if p2 in heads:
1158 if p2 in heads:
1158 updated_heads[p2] = True
1159 updated_heads[p2] = True
1159
1160
1160 # this is the set of all roots we have to push
1161 # this is the set of all roots we have to push
1161 if heads:
1162 if heads:
1162 return subset, updated_heads.keys()
1163 return subset, updated_heads.keys()
1163 else:
1164 else:
1164 return subset
1165 return subset
1165
1166
1166 def pull(self, remote, heads=None, force=False, lock=None):
1167 def pull(self, remote, heads=None, force=False, lock=None):
1167 mylock = False
1168 mylock = False
1168 if not lock:
1169 if not lock:
1169 lock = self.lock()
1170 lock = self.lock()
1170 mylock = True
1171 mylock = True
1171
1172
1172 try:
1173 try:
1173 fetch = self.findincoming(remote, force=force)
1174 fetch = self.findincoming(remote, force=force)
1174 if fetch == [nullid]:
1175 if fetch == [nullid]:
1175 self.ui.status(_("requesting all changes\n"))
1176 self.ui.status(_("requesting all changes\n"))
1176
1177
1177 if not fetch:
1178 if not fetch:
1178 self.ui.status(_("no changes found\n"))
1179 self.ui.status(_("no changes found\n"))
1179 return 0
1180 return 0
1180
1181
1181 if heads is None:
1182 if heads is None:
1182 cg = remote.changegroup(fetch, 'pull')
1183 cg = remote.changegroup(fetch, 'pull')
1183 else:
1184 else:
1184 cg = remote.changegroupsubset(fetch, heads, 'pull')
1185 cg = remote.changegroupsubset(fetch, heads, 'pull')
1185 return self.addchangegroup(cg, 'pull', remote.url())
1186 return self.addchangegroup(cg, 'pull', remote.url())
1186 finally:
1187 finally:
1187 if mylock:
1188 if mylock:
1188 lock.release()
1189 lock.release()
1189
1190
1190 def push(self, remote, force=False, revs=None):
1191 def push(self, remote, force=False, revs=None):
1191 # there are two ways to push to remote repo:
1192 # there are two ways to push to remote repo:
1192 #
1193 #
1193 # addchangegroup assumes local user can lock remote
1194 # addchangegroup assumes local user can lock remote
1194 # repo (local filesystem, old ssh servers).
1195 # repo (local filesystem, old ssh servers).
1195 #
1196 #
1196 # unbundle assumes local user cannot lock remote repo (new ssh
1197 # unbundle assumes local user cannot lock remote repo (new ssh
1197 # servers, http servers).
1198 # servers, http servers).
1198
1199
1199 if remote.capable('unbundle'):
1200 if remote.capable('unbundle'):
1200 return self.push_unbundle(remote, force, revs)
1201 return self.push_unbundle(remote, force, revs)
1201 return self.push_addchangegroup(remote, force, revs)
1202 return self.push_addchangegroup(remote, force, revs)
1202
1203
1203 def prepush(self, remote, force, revs):
1204 def prepush(self, remote, force, revs):
1204 base = {}
1205 base = {}
1205 remote_heads = remote.heads()
1206 remote_heads = remote.heads()
1206 inc = self.findincoming(remote, base, remote_heads, force=force)
1207 inc = self.findincoming(remote, base, remote_heads, force=force)
1207 if not force and inc:
1208 if not force and inc:
1208 self.ui.warn(_("abort: unsynced remote changes!\n"))
1209 self.ui.warn(_("abort: unsynced remote changes!\n"))
1209 self.ui.status(_("(did you forget to sync?"
1210 self.ui.status(_("(did you forget to sync?"
1210 " use push -f to force)\n"))
1211 " use push -f to force)\n"))
1211 return None, 1
1212 return None, 1
1212
1213
1213 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1214 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1214 if revs is not None:
1215 if revs is not None:
1215 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1216 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1216 else:
1217 else:
1217 bases, heads = update, self.changelog.heads()
1218 bases, heads = update, self.changelog.heads()
1218
1219
1219 if not bases:
1220 if not bases:
1220 self.ui.status(_("no changes found\n"))
1221 self.ui.status(_("no changes found\n"))
1221 return None, 1
1222 return None, 1
1222 elif not force:
1223 elif not force:
1223 # FIXME we don't properly detect creation of new heads
1224 # FIXME we don't properly detect creation of new heads
1224 # in the push -r case, assume the user knows what he's doing
1225 # in the push -r case, assume the user knows what he's doing
1225 if not revs and len(remote_heads) < len(heads) \
1226 if not revs and len(remote_heads) < len(heads) \
1226 and remote_heads != [nullid]:
1227 and remote_heads != [nullid]:
1227 self.ui.warn(_("abort: push creates new remote branches!\n"))
1228 self.ui.warn(_("abort: push creates new remote branches!\n"))
1228 self.ui.status(_("(did you forget to merge?"
1229 self.ui.status(_("(did you forget to merge?"
1229 " use push -f to force)\n"))
1230 " use push -f to force)\n"))
1230 return None, 1
1231 return None, 1
1231
1232
1232 if revs is None:
1233 if revs is None:
1233 cg = self.changegroup(update, 'push')
1234 cg = self.changegroup(update, 'push')
1234 else:
1235 else:
1235 cg = self.changegroupsubset(update, revs, 'push')
1236 cg = self.changegroupsubset(update, revs, 'push')
1236 return cg, remote_heads
1237 return cg, remote_heads
1237
1238
1238 def push_addchangegroup(self, remote, force, revs):
1239 def push_addchangegroup(self, remote, force, revs):
1239 lock = remote.lock()
1240 lock = remote.lock()
1240
1241
1241 ret = self.prepush(remote, force, revs)
1242 ret = self.prepush(remote, force, revs)
1242 if ret[0] is not None:
1243 if ret[0] is not None:
1243 cg, remote_heads = ret
1244 cg, remote_heads = ret
1244 return remote.addchangegroup(cg, 'push', self.url())
1245 return remote.addchangegroup(cg, 'push', self.url())
1245 return ret[1]
1246 return ret[1]
1246
1247
1247 def push_unbundle(self, remote, force, revs):
1248 def push_unbundle(self, remote, force, revs):
1248 # local repo finds heads on server, finds out what revs it
1249 # local repo finds heads on server, finds out what revs it
1249 # must push. once revs transferred, if server finds it has
1250 # must push. once revs transferred, if server finds it has
1250 # different heads (someone else won commit/push race), server
1251 # different heads (someone else won commit/push race), server
1251 # aborts.
1252 # aborts.
1252
1253
1253 ret = self.prepush(remote, force, revs)
1254 ret = self.prepush(remote, force, revs)
1254 if ret[0] is not None:
1255 if ret[0] is not None:
1255 cg, remote_heads = ret
1256 cg, remote_heads = ret
1256 if force: remote_heads = ['force']
1257 if force: remote_heads = ['force']
1257 return remote.unbundle(cg, remote_heads, 'push')
1258 return remote.unbundle(cg, remote_heads, 'push')
1258 return ret[1]
1259 return ret[1]
1259
1260
1260 def changegroupsubset(self, bases, heads, source):
1261 def changegroupsubset(self, bases, heads, source):
1261 """This function generates a changegroup consisting of all the nodes
1262 """This function generates a changegroup consisting of all the nodes
1262 that are descendents of any of the bases, and ancestors of any of
1263 that are descendents of any of the bases, and ancestors of any of
1263 the heads.
1264 the heads.
1264
1265
1265 It is fairly complex as determining which filenodes and which
1266 It is fairly complex as determining which filenodes and which
1266 manifest nodes need to be included for the changeset to be complete
1267 manifest nodes need to be included for the changeset to be complete
1267 is non-trivial.
1268 is non-trivial.
1268
1269
1269 Another wrinkle is doing the reverse, figuring out which changeset in
1270 Another wrinkle is doing the reverse, figuring out which changeset in
1270 the changegroup a particular filenode or manifestnode belongs to."""
1271 the changegroup a particular filenode or manifestnode belongs to."""
1271
1272
1272 self.hook('preoutgoing', throw=True, source=source)
1273 self.hook('preoutgoing', throw=True, source=source)
1273
1274
1274 # Set up some initial variables
1275 # Set up some initial variables
1275 # Make it easy to refer to self.changelog
1276 # Make it easy to refer to self.changelog
1276 cl = self.changelog
1277 cl = self.changelog
1277 # msng is short for missing - compute the list of changesets in this
1278 # msng is short for missing - compute the list of changesets in this
1278 # changegroup.
1279 # changegroup.
1279 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1280 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1280 # Some bases may turn out to be superfluous, and some heads may be
1281 # Some bases may turn out to be superfluous, and some heads may be
1281 # too. nodesbetween will return the minimal set of bases and heads
1282 # too. nodesbetween will return the minimal set of bases and heads
1282 # necessary to re-create the changegroup.
1283 # necessary to re-create the changegroup.
1283
1284
1284 # Known heads are the list of heads that it is assumed the recipient
1285 # Known heads are the list of heads that it is assumed the recipient
1285 # of this changegroup will know about.
1286 # of this changegroup will know about.
1286 knownheads = {}
1287 knownheads = {}
1287 # We assume that all parents of bases are known heads.
1288 # We assume that all parents of bases are known heads.
1288 for n in bases:
1289 for n in bases:
1289 for p in cl.parents(n):
1290 for p in cl.parents(n):
1290 if p != nullid:
1291 if p != nullid:
1291 knownheads[p] = 1
1292 knownheads[p] = 1
1292 knownheads = knownheads.keys()
1293 knownheads = knownheads.keys()
1293 if knownheads:
1294 if knownheads:
1294 # Now that we know what heads are known, we can compute which
1295 # Now that we know what heads are known, we can compute which
1295 # changesets are known. The recipient must know about all
1296 # changesets are known. The recipient must know about all
1296 # changesets required to reach the known heads from the null
1297 # changesets required to reach the known heads from the null
1297 # changeset.
1298 # changeset.
1298 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1299 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1299 junk = None
1300 junk = None
1300 # Transform the list into an ersatz set.
1301 # Transform the list into an ersatz set.
1301 has_cl_set = dict.fromkeys(has_cl_set)
1302 has_cl_set = dict.fromkeys(has_cl_set)
1302 else:
1303 else:
1303 # If there were no known heads, the recipient cannot be assumed to
1304 # If there were no known heads, the recipient cannot be assumed to
1304 # know about any changesets.
1305 # know about any changesets.
1305 has_cl_set = {}
1306 has_cl_set = {}
1306
1307
1307 # Make it easy to refer to self.manifest
1308 # Make it easy to refer to self.manifest
1308 mnfst = self.manifest
1309 mnfst = self.manifest
1309 # We don't know which manifests are missing yet
1310 # We don't know which manifests are missing yet
1310 msng_mnfst_set = {}
1311 msng_mnfst_set = {}
1311 # Nor do we know which filenodes are missing.
1312 # Nor do we know which filenodes are missing.
1312 msng_filenode_set = {}
1313 msng_filenode_set = {}
1313
1314
1314 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1315 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1315 junk = None
1316 junk = None
1316
1317
1317 # A changeset always belongs to itself, so the changenode lookup
1318 # A changeset always belongs to itself, so the changenode lookup
1318 # function for a changenode is identity.
1319 # function for a changenode is identity.
1319 def identity(x):
1320 def identity(x):
1320 return x
1321 return x
1321
1322
1322 # A function generating function. Sets up an environment for the
1323 # A function generating function. Sets up an environment for the
1323 # inner function.
1324 # inner function.
1324 def cmp_by_rev_func(revlog):
1325 def cmp_by_rev_func(revlog):
1325 # Compare two nodes by their revision number in the environment's
1326 # Compare two nodes by their revision number in the environment's
1326 # revision history. Since the revision number both represents the
1327 # revision history. Since the revision number both represents the
1327 # most efficient order to read the nodes in, and represents a
1328 # most efficient order to read the nodes in, and represents a
1328 # topological sorting of the nodes, this function is often useful.
1329 # topological sorting of the nodes, this function is often useful.
1329 def cmp_by_rev(a, b):
1330 def cmp_by_rev(a, b):
1330 return cmp(revlog.rev(a), revlog.rev(b))
1331 return cmp(revlog.rev(a), revlog.rev(b))
1331 return cmp_by_rev
1332 return cmp_by_rev
1332
1333
1333 # If we determine that a particular file or manifest node must be a
1334 # If we determine that a particular file or manifest node must be a
1334 # node that the recipient of the changegroup will already have, we can
1335 # node that the recipient of the changegroup will already have, we can
1335 # also assume the recipient will have all the parents. This function
1336 # also assume the recipient will have all the parents. This function
1336 # prunes them from the set of missing nodes.
1337 # prunes them from the set of missing nodes.
1337 def prune_parents(revlog, hasset, msngset):
1338 def prune_parents(revlog, hasset, msngset):
1338 haslst = hasset.keys()
1339 haslst = hasset.keys()
1339 haslst.sort(cmp_by_rev_func(revlog))
1340 haslst.sort(cmp_by_rev_func(revlog))
1340 for node in haslst:
1341 for node in haslst:
1341 parentlst = [p for p in revlog.parents(node) if p != nullid]
1342 parentlst = [p for p in revlog.parents(node) if p != nullid]
1342 while parentlst:
1343 while parentlst:
1343 n = parentlst.pop()
1344 n = parentlst.pop()
1344 if n not in hasset:
1345 if n not in hasset:
1345 hasset[n] = 1
1346 hasset[n] = 1
1346 p = [p for p in revlog.parents(n) if p != nullid]
1347 p = [p for p in revlog.parents(n) if p != nullid]
1347 parentlst.extend(p)
1348 parentlst.extend(p)
1348 for n in hasset:
1349 for n in hasset:
1349 msngset.pop(n, None)
1350 msngset.pop(n, None)
1350
1351
1351 # This is a function generating function used to set up an environment
1352 # This is a function generating function used to set up an environment
1352 # for the inner function to execute in.
1353 # for the inner function to execute in.
1353 def manifest_and_file_collector(changedfileset):
1354 def manifest_and_file_collector(changedfileset):
1354 # This is an information gathering function that gathers
1355 # This is an information gathering function that gathers
1355 # information from each changeset node that goes out as part of
1356 # information from each changeset node that goes out as part of
1356 # the changegroup. The information gathered is a list of which
1357 # the changegroup. The information gathered is a list of which
1357 # manifest nodes are potentially required (the recipient may
1358 # manifest nodes are potentially required (the recipient may
1358 # already have them) and total list of all files which were
1359 # already have them) and total list of all files which were
1359 # changed in any changeset in the changegroup.
1360 # changed in any changeset in the changegroup.
1360 #
1361 #
1361 # We also remember the first changenode we saw any manifest
1362 # We also remember the first changenode we saw any manifest
1362 # referenced by so we can later determine which changenode 'owns'
1363 # referenced by so we can later determine which changenode 'owns'
1363 # the manifest.
1364 # the manifest.
1364 def collect_manifests_and_files(clnode):
1365 def collect_manifests_and_files(clnode):
1365 c = cl.read(clnode)
1366 c = cl.read(clnode)
1366 for f in c[3]:
1367 for f in c[3]:
1367 # This is to make sure we only have one instance of each
1368 # This is to make sure we only have one instance of each
1368 # filename string for each filename.
1369 # filename string for each filename.
1369 changedfileset.setdefault(f, f)
1370 changedfileset.setdefault(f, f)
1370 msng_mnfst_set.setdefault(c[0], clnode)
1371 msng_mnfst_set.setdefault(c[0], clnode)
1371 return collect_manifests_and_files
1372 return collect_manifests_and_files
1372
1373
1373 # Figure out which manifest nodes (of the ones we think might be part
1374 # Figure out which manifest nodes (of the ones we think might be part
1374 # of the changegroup) the recipient must know about and remove them
1375 # of the changegroup) the recipient must know about and remove them
1375 # from the changegroup.
1376 # from the changegroup.
1376 def prune_manifests():
1377 def prune_manifests():
1377 has_mnfst_set = {}
1378 has_mnfst_set = {}
1378 for n in msng_mnfst_set:
1379 for n in msng_mnfst_set:
1379 # If a 'missing' manifest thinks it belongs to a changenode
1380 # If a 'missing' manifest thinks it belongs to a changenode
1380 # the recipient is assumed to have, obviously the recipient
1381 # the recipient is assumed to have, obviously the recipient
1381 # must have that manifest.
1382 # must have that manifest.
1382 linknode = cl.node(mnfst.linkrev(n))
1383 linknode = cl.node(mnfst.linkrev(n))
1383 if linknode in has_cl_set:
1384 if linknode in has_cl_set:
1384 has_mnfst_set[n] = 1
1385 has_mnfst_set[n] = 1
1385 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1386 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1386
1387
1387 # Use the information collected in collect_manifests_and_files to say
1388 # Use the information collected in collect_manifests_and_files to say
1388 # which changenode any manifestnode belongs to.
1389 # which changenode any manifestnode belongs to.
1389 def lookup_manifest_link(mnfstnode):
1390 def lookup_manifest_link(mnfstnode):
1390 return msng_mnfst_set[mnfstnode]
1391 return msng_mnfst_set[mnfstnode]
1391
1392
1392 # A function generating function that sets up the initial environment
1393 # A function generating function that sets up the initial environment
1393 # the inner function.
1394 # the inner function.
1394 def filenode_collector(changedfiles):
1395 def filenode_collector(changedfiles):
1395 next_rev = [0]
1396 next_rev = [0]
1396 # This gathers information from each manifestnode included in the
1397 # This gathers information from each manifestnode included in the
1397 # changegroup about which filenodes the manifest node references
1398 # changegroup about which filenodes the manifest node references
1398 # so we can include those in the changegroup too.
1399 # so we can include those in the changegroup too.
1399 #
1400 #
1400 # It also remembers which changenode each filenode belongs to. It
1401 # It also remembers which changenode each filenode belongs to. It
1401 # does this by assuming the a filenode belongs to the changenode
1402 # does this by assuming the a filenode belongs to the changenode
1402 # the first manifest that references it belongs to.
1403 # the first manifest that references it belongs to.
1403 def collect_msng_filenodes(mnfstnode):
1404 def collect_msng_filenodes(mnfstnode):
1404 r = mnfst.rev(mnfstnode)
1405 r = mnfst.rev(mnfstnode)
1405 if r == next_rev[0]:
1406 if r == next_rev[0]:
1406 # If the last rev we looked at was the one just previous,
1407 # If the last rev we looked at was the one just previous,
1407 # we only need to see a diff.
1408 # we only need to see a diff.
1408 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1409 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1409 # For each line in the delta
1410 # For each line in the delta
1410 for dline in delta.splitlines():
1411 for dline in delta.splitlines():
1411 # get the filename and filenode for that line
1412 # get the filename and filenode for that line
1412 f, fnode = dline.split('\0')
1413 f, fnode = dline.split('\0')
1413 fnode = bin(fnode[:40])
1414 fnode = bin(fnode[:40])
1414 f = changedfiles.get(f, None)
1415 f = changedfiles.get(f, None)
1415 # And if the file is in the list of files we care
1416 # And if the file is in the list of files we care
1416 # about.
1417 # about.
1417 if f is not None:
1418 if f is not None:
1418 # Get the changenode this manifest belongs to
1419 # Get the changenode this manifest belongs to
1419 clnode = msng_mnfst_set[mnfstnode]
1420 clnode = msng_mnfst_set[mnfstnode]
1420 # Create the set of filenodes for the file if
1421 # Create the set of filenodes for the file if
1421 # there isn't one already.
1422 # there isn't one already.
1422 ndset = msng_filenode_set.setdefault(f, {})
1423 ndset = msng_filenode_set.setdefault(f, {})
1423 # And set the filenode's changelog node to the
1424 # And set the filenode's changelog node to the
1424 # manifest's if it hasn't been set already.
1425 # manifest's if it hasn't been set already.
1425 ndset.setdefault(fnode, clnode)
1426 ndset.setdefault(fnode, clnode)
1426 else:
1427 else:
1427 # Otherwise we need a full manifest.
1428 # Otherwise we need a full manifest.
1428 m = mnfst.read(mnfstnode)
1429 m = mnfst.read(mnfstnode)
1429 # For every file in we care about.
1430 # For every file in we care about.
1430 for f in changedfiles:
1431 for f in changedfiles:
1431 fnode = m.get(f, None)
1432 fnode = m.get(f, None)
1432 # If it's in the manifest
1433 # If it's in the manifest
1433 if fnode is not None:
1434 if fnode is not None:
1434 # See comments above.
1435 # See comments above.
1435 clnode = msng_mnfst_set[mnfstnode]
1436 clnode = msng_mnfst_set[mnfstnode]
1436 ndset = msng_filenode_set.setdefault(f, {})
1437 ndset = msng_filenode_set.setdefault(f, {})
1437 ndset.setdefault(fnode, clnode)
1438 ndset.setdefault(fnode, clnode)
1438 # Remember the revision we hope to see next.
1439 # Remember the revision we hope to see next.
1439 next_rev[0] = r + 1
1440 next_rev[0] = r + 1
1440 return collect_msng_filenodes
1441 return collect_msng_filenodes
1441
1442
1442 # We have a list of filenodes we think we need for a file, lets remove
1443 # We have a list of filenodes we think we need for a file, lets remove
1443 # all those we now the recipient must have.
1444 # all those we now the recipient must have.
1444 def prune_filenodes(f, filerevlog):
1445 def prune_filenodes(f, filerevlog):
1445 msngset = msng_filenode_set[f]
1446 msngset = msng_filenode_set[f]
1446 hasset = {}
1447 hasset = {}
1447 # If a 'missing' filenode thinks it belongs to a changenode we
1448 # If a 'missing' filenode thinks it belongs to a changenode we
1448 # assume the recipient must have, then the recipient must have
1449 # assume the recipient must have, then the recipient must have
1449 # that filenode.
1450 # that filenode.
1450 for n in msngset:
1451 for n in msngset:
1451 clnode = cl.node(filerevlog.linkrev(n))
1452 clnode = cl.node(filerevlog.linkrev(n))
1452 if clnode in has_cl_set:
1453 if clnode in has_cl_set:
1453 hasset[n] = 1
1454 hasset[n] = 1
1454 prune_parents(filerevlog, hasset, msngset)
1455 prune_parents(filerevlog, hasset, msngset)
1455
1456
1456 # A function generator function that sets up the a context for the
1457 # A function generator function that sets up the a context for the
1457 # inner function.
1458 # inner function.
1458 def lookup_filenode_link_func(fname):
1459 def lookup_filenode_link_func(fname):
1459 msngset = msng_filenode_set[fname]
1460 msngset = msng_filenode_set[fname]
1460 # Lookup the changenode the filenode belongs to.
1461 # Lookup the changenode the filenode belongs to.
1461 def lookup_filenode_link(fnode):
1462 def lookup_filenode_link(fnode):
1462 return msngset[fnode]
1463 return msngset[fnode]
1463 return lookup_filenode_link
1464 return lookup_filenode_link
1464
1465
1465 # Now that we have all theses utility functions to help out and
1466 # Now that we have all theses utility functions to help out and
1466 # logically divide up the task, generate the group.
1467 # logically divide up the task, generate the group.
1467 def gengroup():
1468 def gengroup():
1468 # The set of changed files starts empty.
1469 # The set of changed files starts empty.
1469 changedfiles = {}
1470 changedfiles = {}
1470 # Create a changenode group generator that will call our functions
1471 # Create a changenode group generator that will call our functions
1471 # back to lookup the owning changenode and collect information.
1472 # back to lookup the owning changenode and collect information.
1472 group = cl.group(msng_cl_lst, identity,
1473 group = cl.group(msng_cl_lst, identity,
1473 manifest_and_file_collector(changedfiles))
1474 manifest_and_file_collector(changedfiles))
1474 for chnk in group:
1475 for chnk in group:
1475 yield chnk
1476 yield chnk
1476
1477
1477 # The list of manifests has been collected by the generator
1478 # The list of manifests has been collected by the generator
1478 # calling our functions back.
1479 # calling our functions back.
1479 prune_manifests()
1480 prune_manifests()
1480 msng_mnfst_lst = msng_mnfst_set.keys()
1481 msng_mnfst_lst = msng_mnfst_set.keys()
1481 # Sort the manifestnodes by revision number.
1482 # Sort the manifestnodes by revision number.
1482 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1483 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1483 # Create a generator for the manifestnodes that calls our lookup
1484 # Create a generator for the manifestnodes that calls our lookup
1484 # and data collection functions back.
1485 # and data collection functions back.
1485 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1486 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1486 filenode_collector(changedfiles))
1487 filenode_collector(changedfiles))
1487 for chnk in group:
1488 for chnk in group:
1488 yield chnk
1489 yield chnk
1489
1490
1490 # These are no longer needed, dereference and toss the memory for
1491 # These are no longer needed, dereference and toss the memory for
1491 # them.
1492 # them.
1492 msng_mnfst_lst = None
1493 msng_mnfst_lst = None
1493 msng_mnfst_set.clear()
1494 msng_mnfst_set.clear()
1494
1495
1495 changedfiles = changedfiles.keys()
1496 changedfiles = changedfiles.keys()
1496 changedfiles.sort()
1497 changedfiles.sort()
1497 # Go through all our files in order sorted by name.
1498 # Go through all our files in order sorted by name.
1498 for fname in changedfiles:
1499 for fname in changedfiles:
1499 filerevlog = self.file(fname)
1500 filerevlog = self.file(fname)
1500 # Toss out the filenodes that the recipient isn't really
1501 # Toss out the filenodes that the recipient isn't really
1501 # missing.
1502 # missing.
1502 if msng_filenode_set.has_key(fname):
1503 if msng_filenode_set.has_key(fname):
1503 prune_filenodes(fname, filerevlog)
1504 prune_filenodes(fname, filerevlog)
1504 msng_filenode_lst = msng_filenode_set[fname].keys()
1505 msng_filenode_lst = msng_filenode_set[fname].keys()
1505 else:
1506 else:
1506 msng_filenode_lst = []
1507 msng_filenode_lst = []
1507 # If any filenodes are left, generate the group for them,
1508 # If any filenodes are left, generate the group for them,
1508 # otherwise don't bother.
1509 # otherwise don't bother.
1509 if len(msng_filenode_lst) > 0:
1510 if len(msng_filenode_lst) > 0:
1510 yield changegroup.genchunk(fname)
1511 yield changegroup.genchunk(fname)
1511 # Sort the filenodes by their revision #
1512 # Sort the filenodes by their revision #
1512 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1513 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1513 # Create a group generator and only pass in a changenode
1514 # Create a group generator and only pass in a changenode
1514 # lookup function as we need to collect no information
1515 # lookup function as we need to collect no information
1515 # from filenodes.
1516 # from filenodes.
1516 group = filerevlog.group(msng_filenode_lst,
1517 group = filerevlog.group(msng_filenode_lst,
1517 lookup_filenode_link_func(fname))
1518 lookup_filenode_link_func(fname))
1518 for chnk in group:
1519 for chnk in group:
1519 yield chnk
1520 yield chnk
1520 if msng_filenode_set.has_key(fname):
1521 if msng_filenode_set.has_key(fname):
1521 # Don't need this anymore, toss it to free memory.
1522 # Don't need this anymore, toss it to free memory.
1522 del msng_filenode_set[fname]
1523 del msng_filenode_set[fname]
1523 # Signal that no more groups are left.
1524 # Signal that no more groups are left.
1524 yield changegroup.closechunk()
1525 yield changegroup.closechunk()
1525
1526
1526 if msng_cl_lst:
1527 if msng_cl_lst:
1527 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1528 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1528
1529
1529 return util.chunkbuffer(gengroup())
1530 return util.chunkbuffer(gengroup())
1530
1531
1531 def changegroup(self, basenodes, source):
1532 def changegroup(self, basenodes, source):
1532 """Generate a changegroup of all nodes that we have that a recipient
1533 """Generate a changegroup of all nodes that we have that a recipient
1533 doesn't.
1534 doesn't.
1534
1535
1535 This is much easier than the previous function as we can assume that
1536 This is much easier than the previous function as we can assume that
1536 the recipient has any changenode we aren't sending them."""
1537 the recipient has any changenode we aren't sending them."""
1537
1538
1538 self.hook('preoutgoing', throw=True, source=source)
1539 self.hook('preoutgoing', throw=True, source=source)
1539
1540
1540 cl = self.changelog
1541 cl = self.changelog
1541 nodes = cl.nodesbetween(basenodes, None)[0]
1542 nodes = cl.nodesbetween(basenodes, None)[0]
1542 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1543 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1543
1544
1544 def identity(x):
1545 def identity(x):
1545 return x
1546 return x
1546
1547
1547 def gennodelst(revlog):
1548 def gennodelst(revlog):
1548 for r in xrange(0, revlog.count()):
1549 for r in xrange(0, revlog.count()):
1549 n = revlog.node(r)
1550 n = revlog.node(r)
1550 if revlog.linkrev(n) in revset:
1551 if revlog.linkrev(n) in revset:
1551 yield n
1552 yield n
1552
1553
1553 def changed_file_collector(changedfileset):
1554 def changed_file_collector(changedfileset):
1554 def collect_changed_files(clnode):
1555 def collect_changed_files(clnode):
1555 c = cl.read(clnode)
1556 c = cl.read(clnode)
1556 for fname in c[3]:
1557 for fname in c[3]:
1557 changedfileset[fname] = 1
1558 changedfileset[fname] = 1
1558 return collect_changed_files
1559 return collect_changed_files
1559
1560
1560 def lookuprevlink_func(revlog):
1561 def lookuprevlink_func(revlog):
1561 def lookuprevlink(n):
1562 def lookuprevlink(n):
1562 return cl.node(revlog.linkrev(n))
1563 return cl.node(revlog.linkrev(n))
1563 return lookuprevlink
1564 return lookuprevlink
1564
1565
1565 def gengroup():
1566 def gengroup():
1566 # construct a list of all changed files
1567 # construct a list of all changed files
1567 changedfiles = {}
1568 changedfiles = {}
1568
1569
1569 for chnk in cl.group(nodes, identity,
1570 for chnk in cl.group(nodes, identity,
1570 changed_file_collector(changedfiles)):
1571 changed_file_collector(changedfiles)):
1571 yield chnk
1572 yield chnk
1572 changedfiles = changedfiles.keys()
1573 changedfiles = changedfiles.keys()
1573 changedfiles.sort()
1574 changedfiles.sort()
1574
1575
1575 mnfst = self.manifest
1576 mnfst = self.manifest
1576 nodeiter = gennodelst(mnfst)
1577 nodeiter = gennodelst(mnfst)
1577 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1578 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1578 yield chnk
1579 yield chnk
1579
1580
1580 for fname in changedfiles:
1581 for fname in changedfiles:
1581 filerevlog = self.file(fname)
1582 filerevlog = self.file(fname)
1582 nodeiter = gennodelst(filerevlog)
1583 nodeiter = gennodelst(filerevlog)
1583 nodeiter = list(nodeiter)
1584 nodeiter = list(nodeiter)
1584 if nodeiter:
1585 if nodeiter:
1585 yield changegroup.genchunk(fname)
1586 yield changegroup.genchunk(fname)
1586 lookup = lookuprevlink_func(filerevlog)
1587 lookup = lookuprevlink_func(filerevlog)
1587 for chnk in filerevlog.group(nodeiter, lookup):
1588 for chnk in filerevlog.group(nodeiter, lookup):
1588 yield chnk
1589 yield chnk
1589
1590
1590 yield changegroup.closechunk()
1591 yield changegroup.closechunk()
1591
1592
1592 if nodes:
1593 if nodes:
1593 self.hook('outgoing', node=hex(nodes[0]), source=source)
1594 self.hook('outgoing', node=hex(nodes[0]), source=source)
1594
1595
1595 return util.chunkbuffer(gengroup())
1596 return util.chunkbuffer(gengroup())
1596
1597
1597 def addchangegroup(self, source, srctype, url):
1598 def addchangegroup(self, source, srctype, url):
1598 """add changegroup to repo.
1599 """add changegroup to repo.
1599 returns number of heads modified or added + 1."""
1600 returns number of heads modified or added + 1."""
1600
1601
1601 def csmap(x):
1602 def csmap(x):
1602 self.ui.debug(_("add changeset %s\n") % short(x))
1603 self.ui.debug(_("add changeset %s\n") % short(x))
1603 return cl.count()
1604 return cl.count()
1604
1605
1605 def revmap(x):
1606 def revmap(x):
1606 return cl.rev(x)
1607 return cl.rev(x)
1607
1608
1608 if not source:
1609 if not source:
1609 return 0
1610 return 0
1610
1611
1611 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1612 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1612
1613
1613 changesets = files = revisions = 0
1614 changesets = files = revisions = 0
1614
1615
1615 tr = self.transaction()
1616 tr = self.transaction()
1616
1617
1617 # write changelog data to temp files so concurrent readers will not see
1618 # write changelog data to temp files so concurrent readers will not see
1618 # inconsistent view
1619 # inconsistent view
1619 cl = None
1620 cl = None
1620 try:
1621 try:
1621 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1622 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1622
1623
1623 oldheads = len(cl.heads())
1624 oldheads = len(cl.heads())
1624
1625
1625 # pull off the changeset group
1626 # pull off the changeset group
1626 self.ui.status(_("adding changesets\n"))
1627 self.ui.status(_("adding changesets\n"))
1627 cor = cl.count() - 1
1628 cor = cl.count() - 1
1628 chunkiter = changegroup.chunkiter(source)
1629 chunkiter = changegroup.chunkiter(source)
1629 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1630 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1630 raise util.Abort(_("received changelog group is empty"))
1631 raise util.Abort(_("received changelog group is empty"))
1631 cnr = cl.count() - 1
1632 cnr = cl.count() - 1
1632 changesets = cnr - cor
1633 changesets = cnr - cor
1633
1634
1634 # pull off the manifest group
1635 # pull off the manifest group
1635 self.ui.status(_("adding manifests\n"))
1636 self.ui.status(_("adding manifests\n"))
1636 chunkiter = changegroup.chunkiter(source)
1637 chunkiter = changegroup.chunkiter(source)
1637 # no need to check for empty manifest group here:
1638 # no need to check for empty manifest group here:
1638 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1639 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1639 # no new manifest will be created and the manifest group will
1640 # no new manifest will be created and the manifest group will
1640 # be empty during the pull
1641 # be empty during the pull
1641 self.manifest.addgroup(chunkiter, revmap, tr)
1642 self.manifest.addgroup(chunkiter, revmap, tr)
1642
1643
1643 # process the files
1644 # process the files
1644 self.ui.status(_("adding file changes\n"))
1645 self.ui.status(_("adding file changes\n"))
1645 while 1:
1646 while 1:
1646 f = changegroup.getchunk(source)
1647 f = changegroup.getchunk(source)
1647 if not f:
1648 if not f:
1648 break
1649 break
1649 self.ui.debug(_("adding %s revisions\n") % f)
1650 self.ui.debug(_("adding %s revisions\n") % f)
1650 fl = self.file(f)
1651 fl = self.file(f)
1651 o = fl.count()
1652 o = fl.count()
1652 chunkiter = changegroup.chunkiter(source)
1653 chunkiter = changegroup.chunkiter(source)
1653 if fl.addgroup(chunkiter, revmap, tr) is None:
1654 if fl.addgroup(chunkiter, revmap, tr) is None:
1654 raise util.Abort(_("received file revlog group is empty"))
1655 raise util.Abort(_("received file revlog group is empty"))
1655 revisions += fl.count() - o
1656 revisions += fl.count() - o
1656 files += 1
1657 files += 1
1657
1658
1658 cl.writedata()
1659 cl.writedata()
1659 finally:
1660 finally:
1660 if cl:
1661 if cl:
1661 cl.cleanup()
1662 cl.cleanup()
1662
1663
1663 # make changelog see real files again
1664 # make changelog see real files again
1664 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1665 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1665 self.changelog.checkinlinesize(tr)
1666 self.changelog.checkinlinesize(tr)
1666
1667
1667 newheads = len(self.changelog.heads())
1668 newheads = len(self.changelog.heads())
1668 heads = ""
1669 heads = ""
1669 if oldheads and newheads != oldheads:
1670 if oldheads and newheads != oldheads:
1670 heads = _(" (%+d heads)") % (newheads - oldheads)
1671 heads = _(" (%+d heads)") % (newheads - oldheads)
1671
1672
1672 self.ui.status(_("added %d changesets"
1673 self.ui.status(_("added %d changesets"
1673 " with %d changes to %d files%s\n")
1674 " with %d changes to %d files%s\n")
1674 % (changesets, revisions, files, heads))
1675 % (changesets, revisions, files, heads))
1675
1676
1676 if changesets > 0:
1677 if changesets > 0:
1677 self.hook('pretxnchangegroup', throw=True,
1678 self.hook('pretxnchangegroup', throw=True,
1678 node=hex(self.changelog.node(cor+1)), source=srctype,
1679 node=hex(self.changelog.node(cor+1)), source=srctype,
1679 url=url)
1680 url=url)
1680
1681
1681 tr.close()
1682 tr.close()
1682
1683
1683 if changesets > 0:
1684 if changesets > 0:
1684 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1685 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1685 source=srctype, url=url)
1686 source=srctype, url=url)
1686
1687
1687 for i in range(cor + 1, cnr + 1):
1688 for i in range(cor + 1, cnr + 1):
1688 self.hook("incoming", node=hex(self.changelog.node(i)),
1689 self.hook("incoming", node=hex(self.changelog.node(i)),
1689 source=srctype, url=url)
1690 source=srctype, url=url)
1690
1691
1691 return newheads - oldheads + 1
1692 return newheads - oldheads + 1
1692
1693
1693
1694
1694 def stream_in(self, remote):
1695 def stream_in(self, remote):
1695 fp = remote.stream_out()
1696 fp = remote.stream_out()
1696 resp = int(fp.readline())
1697 resp = int(fp.readline())
1697 if resp != 0:
1698 if resp != 0:
1698 raise util.Abort(_('operation forbidden by server'))
1699 raise util.Abort(_('operation forbidden by server'))
1699 self.ui.status(_('streaming all changes\n'))
1700 self.ui.status(_('streaming all changes\n'))
1700 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1701 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1701 self.ui.status(_('%d files to transfer, %s of data\n') %
1702 self.ui.status(_('%d files to transfer, %s of data\n') %
1702 (total_files, util.bytecount(total_bytes)))
1703 (total_files, util.bytecount(total_bytes)))
1703 start = time.time()
1704 start = time.time()
1704 for i in xrange(total_files):
1705 for i in xrange(total_files):
1705 name, size = fp.readline().split('\0', 1)
1706 name, size = fp.readline().split('\0', 1)
1706 size = int(size)
1707 size = int(size)
1707 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1708 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1708 ofp = self.opener(name, 'w')
1709 ofp = self.opener(name, 'w')
1709 for chunk in util.filechunkiter(fp, limit=size):
1710 for chunk in util.filechunkiter(fp, limit=size):
1710 ofp.write(chunk)
1711 ofp.write(chunk)
1711 ofp.close()
1712 ofp.close()
1712 elapsed = time.time() - start
1713 elapsed = time.time() - start
1713 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1714 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1714 (util.bytecount(total_bytes), elapsed,
1715 (util.bytecount(total_bytes), elapsed,
1715 util.bytecount(total_bytes / elapsed)))
1716 util.bytecount(total_bytes / elapsed)))
1716 self.reload()
1717 self.reload()
1717 return len(self.heads()) + 1
1718 return len(self.heads()) + 1
1718
1719
1719 def clone(self, remote, heads=[], stream=False):
1720 def clone(self, remote, heads=[], stream=False):
1720 '''clone remote repository.
1721 '''clone remote repository.
1721
1722
1722 keyword arguments:
1723 keyword arguments:
1723 heads: list of revs to clone (forces use of pull)
1724 heads: list of revs to clone (forces use of pull)
1724 stream: use streaming clone if possible'''
1725 stream: use streaming clone if possible'''
1725
1726
1726 # now, all clients that can request uncompressed clones can
1727 # now, all clients that can request uncompressed clones can
1727 # read repo formats supported by all servers that can serve
1728 # read repo formats supported by all servers that can serve
1728 # them.
1729 # them.
1729
1730
1730 # if revlog format changes, client will have to check version
1731 # if revlog format changes, client will have to check version
1731 # and format flags on "stream" capability, and use
1732 # and format flags on "stream" capability, and use
1732 # uncompressed only if compatible.
1733 # uncompressed only if compatible.
1733
1734
1734 if stream and not heads and remote.capable('stream'):
1735 if stream and not heads and remote.capable('stream'):
1735 return self.stream_in(remote)
1736 return self.stream_in(remote)
1736 return self.pull(remote, heads)
1737 return self.pull(remote, heads)
1737
1738
1738 # used to avoid circular references so destructors work
1739 # used to avoid circular references so destructors work
1739 def aftertrans(base):
1740 def aftertrans(base):
1740 p = base
1741 p = base
1741 def a():
1742 def a():
1742 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1743 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1743 util.rename(os.path.join(p, "journal.dirstate"),
1744 util.rename(os.path.join(p, "journal.dirstate"),
1744 os.path.join(p, "undo.dirstate"))
1745 os.path.join(p, "undo.dirstate"))
1745 return a
1746 return a
1746
1747
1747 def instance(ui, path, create):
1748 def instance(ui, path, create):
1748 return localrepository(ui, util.drop_scheme('file', path), create)
1749 return localrepository(ui, util.drop_scheme('file', path), create)
1749
1750
1750 def islocal(path):
1751 def islocal(path):
1751 return True
1752 return True
General Comments 0
You need to be logged in to leave comments. Login now