##// END OF EJS Templates
Add '.' as a shortcut revision name for the working directory parent.
Brendan Cully -
r2789:e3564699 default
parent child Browse files
Show More
@@ -1,3680 +1,3680
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
16 demandload(globals(), "hgweb.server sshserver")
16 demandload(globals(), "hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted, unknown = repo.changes()
24 modified, added, removed, deleted, unknown = repo.changes()
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def filterfiles(filters, files):
28 def filterfiles(filters, files):
29 l = [x for x in files if x in filters]
29 l = [x for x in files if x in filters]
30
30
31 for t in filters:
31 for t in filters:
32 if t and t[-1] != "/":
32 if t and t[-1] != "/":
33 t += "/"
33 t += "/"
34 l += [x for x in files if x.startswith(t)]
34 l += [x for x in files if x.startswith(t)]
35 return l
35 return l
36
36
37 def relpath(repo, args):
37 def relpath(repo, args):
38 cwd = repo.getcwd()
38 cwd = repo.getcwd()
39 if cwd:
39 if cwd:
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
41 return args
41 return args
42
42
43 def logmessage(**opts):
43 def logmessage(**opts):
44 """ get the log message according to -m and -l option """
44 """ get the log message according to -m and -l option """
45 message = opts['message']
45 message = opts['message']
46 logfile = opts['logfile']
46 logfile = opts['logfile']
47
47
48 if message and logfile:
48 if message and logfile:
49 raise util.Abort(_('options --message and --logfile are mutually '
49 raise util.Abort(_('options --message and --logfile are mutually '
50 'exclusive'))
50 'exclusive'))
51 if not message and logfile:
51 if not message and logfile:
52 try:
52 try:
53 if logfile == '-':
53 if logfile == '-':
54 message = sys.stdin.read()
54 message = sys.stdin.read()
55 else:
55 else:
56 message = open(logfile).read()
56 message = open(logfile).read()
57 except IOError, inst:
57 except IOError, inst:
58 raise util.Abort(_("can't read commit message '%s': %s") %
58 raise util.Abort(_("can't read commit message '%s': %s") %
59 (logfile, inst.strerror))
59 (logfile, inst.strerror))
60 return message
60 return message
61
61
62 def matchpats(repo, pats=[], opts={}, head=''):
62 def matchpats(repo, pats=[], opts={}, head=''):
63 cwd = repo.getcwd()
63 cwd = repo.getcwd()
64 if not pats and cwd:
64 if not pats and cwd:
65 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
65 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
66 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
66 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
67 cwd = ''
67 cwd = ''
68 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
68 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
69 opts.get('exclude'), head)
69 opts.get('exclude'), head)
70
70
71 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
71 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
72 files, matchfn, anypats = matchpats(repo, pats, opts, head)
72 files, matchfn, anypats = matchpats(repo, pats, opts, head)
73 exact = dict(zip(files, files))
73 exact = dict(zip(files, files))
74 def walk():
74 def walk():
75 for src, fn in repo.walk(node=node, files=files, match=matchfn,
75 for src, fn in repo.walk(node=node, files=files, match=matchfn,
76 badmatch=badmatch):
76 badmatch=badmatch):
77 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
77 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
78 return files, matchfn, walk()
78 return files, matchfn, walk()
79
79
80 def walk(repo, pats, opts, node=None, head='', badmatch=None):
80 def walk(repo, pats, opts, node=None, head='', badmatch=None):
81 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
81 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
82 for r in results:
82 for r in results:
83 yield r
83 yield r
84
84
85 def walkchangerevs(ui, repo, pats, opts):
85 def walkchangerevs(ui, repo, pats, opts):
86 '''Iterate over files and the revs they changed in.
86 '''Iterate over files and the revs they changed in.
87
87
88 Callers most commonly need to iterate backwards over the history
88 Callers most commonly need to iterate backwards over the history
89 it is interested in. Doing so has awful (quadratic-looking)
89 it is interested in. Doing so has awful (quadratic-looking)
90 performance, so we use iterators in a "windowed" way.
90 performance, so we use iterators in a "windowed" way.
91
91
92 We walk a window of revisions in the desired order. Within the
92 We walk a window of revisions in the desired order. Within the
93 window, we first walk forwards to gather data, then in the desired
93 window, we first walk forwards to gather data, then in the desired
94 order (usually backwards) to display it.
94 order (usually backwards) to display it.
95
95
96 This function returns an (iterator, getchange, matchfn) tuple. The
96 This function returns an (iterator, getchange, matchfn) tuple. The
97 getchange function returns the changelog entry for a numeric
97 getchange function returns the changelog entry for a numeric
98 revision. The iterator yields 3-tuples. They will be of one of
98 revision. The iterator yields 3-tuples. They will be of one of
99 the following forms:
99 the following forms:
100
100
101 "window", incrementing, lastrev: stepping through a window,
101 "window", incrementing, lastrev: stepping through a window,
102 positive if walking forwards through revs, last rev in the
102 positive if walking forwards through revs, last rev in the
103 sequence iterated over - use to reset state for the current window
103 sequence iterated over - use to reset state for the current window
104
104
105 "add", rev, fns: out-of-order traversal of the given file names
105 "add", rev, fns: out-of-order traversal of the given file names
106 fns, which changed during revision rev - use to gather data for
106 fns, which changed during revision rev - use to gather data for
107 possible display
107 possible display
108
108
109 "iter", rev, None: in-order traversal of the revs earlier iterated
109 "iter", rev, None: in-order traversal of the revs earlier iterated
110 over with "add" - use to display data'''
110 over with "add" - use to display data'''
111
111
112 def increasing_windows(start, end, windowsize=8, sizelimit=512):
112 def increasing_windows(start, end, windowsize=8, sizelimit=512):
113 if start < end:
113 if start < end:
114 while start < end:
114 while start < end:
115 yield start, min(windowsize, end-start)
115 yield start, min(windowsize, end-start)
116 start += windowsize
116 start += windowsize
117 if windowsize < sizelimit:
117 if windowsize < sizelimit:
118 windowsize *= 2
118 windowsize *= 2
119 else:
119 else:
120 while start > end:
120 while start > end:
121 yield start, min(windowsize, start-end-1)
121 yield start, min(windowsize, start-end-1)
122 start -= windowsize
122 start -= windowsize
123 if windowsize < sizelimit:
123 if windowsize < sizelimit:
124 windowsize *= 2
124 windowsize *= 2
125
125
126
126
127 files, matchfn, anypats = matchpats(repo, pats, opts)
127 files, matchfn, anypats = matchpats(repo, pats, opts)
128 follow = opts.get('follow') or opts.get('follow_first')
128 follow = opts.get('follow') or opts.get('follow_first')
129
129
130 if repo.changelog.count() == 0:
130 if repo.changelog.count() == 0:
131 return [], False, matchfn
131 return [], False, matchfn
132
132
133 if follow:
133 if follow:
134 p = repo.dirstate.parents()[0]
134 p = repo.dirstate.parents()[0]
135 if p == nullid:
135 if p == nullid:
136 ui.warn(_('No working directory revision; defaulting to tip\n'))
136 ui.warn(_('No working directory revision; defaulting to tip\n'))
137 start = 'tip'
137 start = 'tip'
138 else:
138 else:
139 start = repo.changelog.rev(p)
139 start = repo.changelog.rev(p)
140 defrange = '%s:0' % start
140 defrange = '%s:0' % start
141 else:
141 else:
142 defrange = 'tip:0'
142 defrange = 'tip:0'
143 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
143 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
144 wanted = {}
144 wanted = {}
145 slowpath = anypats
145 slowpath = anypats
146 fncache = {}
146 fncache = {}
147
147
148 chcache = {}
148 chcache = {}
149 def getchange(rev):
149 def getchange(rev):
150 ch = chcache.get(rev)
150 ch = chcache.get(rev)
151 if ch is None:
151 if ch is None:
152 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
152 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
153 return ch
153 return ch
154
154
155 if not slowpath and not files:
155 if not slowpath and not files:
156 # No files, no patterns. Display all revs.
156 # No files, no patterns. Display all revs.
157 wanted = dict(zip(revs, revs))
157 wanted = dict(zip(revs, revs))
158 copies = []
158 copies = []
159 if not slowpath:
159 if not slowpath:
160 # Only files, no patterns. Check the history of each file.
160 # Only files, no patterns. Check the history of each file.
161 def filerevgen(filelog, node):
161 def filerevgen(filelog, node):
162 cl_count = repo.changelog.count()
162 cl_count = repo.changelog.count()
163 if node is None:
163 if node is None:
164 last = filelog.count() - 1
164 last = filelog.count() - 1
165 else:
165 else:
166 last = filelog.rev(node)
166 last = filelog.rev(node)
167 for i, window in increasing_windows(last, -1):
167 for i, window in increasing_windows(last, -1):
168 revs = []
168 revs = []
169 for j in xrange(i - window, i + 1):
169 for j in xrange(i - window, i + 1):
170 n = filelog.node(j)
170 n = filelog.node(j)
171 revs.append((filelog.linkrev(n),
171 revs.append((filelog.linkrev(n),
172 follow and filelog.renamed(n)))
172 follow and filelog.renamed(n)))
173 revs.reverse()
173 revs.reverse()
174 for rev in revs:
174 for rev in revs:
175 # only yield rev for which we have the changelog, it can
175 # only yield rev for which we have the changelog, it can
176 # happen while doing "hg log" during a pull or commit
176 # happen while doing "hg log" during a pull or commit
177 if rev[0] < cl_count:
177 if rev[0] < cl_count:
178 yield rev
178 yield rev
179 def iterfiles():
179 def iterfiles():
180 for filename in files:
180 for filename in files:
181 yield filename, None
181 yield filename, None
182 for filename_node in copies:
182 for filename_node in copies:
183 yield filename_node
183 yield filename_node
184 minrev, maxrev = min(revs), max(revs)
184 minrev, maxrev = min(revs), max(revs)
185 for file_, node in iterfiles():
185 for file_, node in iterfiles():
186 filelog = repo.file(file_)
186 filelog = repo.file(file_)
187 # A zero count may be a directory or deleted file, so
187 # A zero count may be a directory or deleted file, so
188 # try to find matching entries on the slow path.
188 # try to find matching entries on the slow path.
189 if filelog.count() == 0:
189 if filelog.count() == 0:
190 slowpath = True
190 slowpath = True
191 break
191 break
192 for rev, copied in filerevgen(filelog, node):
192 for rev, copied in filerevgen(filelog, node):
193 if rev <= maxrev:
193 if rev <= maxrev:
194 if rev < minrev:
194 if rev < minrev:
195 break
195 break
196 fncache.setdefault(rev, [])
196 fncache.setdefault(rev, [])
197 fncache[rev].append(file_)
197 fncache[rev].append(file_)
198 wanted[rev] = 1
198 wanted[rev] = 1
199 if follow and copied:
199 if follow and copied:
200 copies.append(copied)
200 copies.append(copied)
201 if slowpath:
201 if slowpath:
202 if follow:
202 if follow:
203 raise util.Abort(_('can only follow copies/renames for explicit '
203 raise util.Abort(_('can only follow copies/renames for explicit '
204 'file names'))
204 'file names'))
205
205
206 # The slow path checks files modified in every changeset.
206 # The slow path checks files modified in every changeset.
207 def changerevgen():
207 def changerevgen():
208 for i, window in increasing_windows(repo.changelog.count()-1, -1):
208 for i, window in increasing_windows(repo.changelog.count()-1, -1):
209 for j in xrange(i - window, i + 1):
209 for j in xrange(i - window, i + 1):
210 yield j, getchange(j)[3]
210 yield j, getchange(j)[3]
211
211
212 for rev, changefiles in changerevgen():
212 for rev, changefiles in changerevgen():
213 matches = filter(matchfn, changefiles)
213 matches = filter(matchfn, changefiles)
214 if matches:
214 if matches:
215 fncache[rev] = matches
215 fncache[rev] = matches
216 wanted[rev] = 1
216 wanted[rev] = 1
217
217
218 def iterate():
218 def iterate():
219 class followfilter:
219 class followfilter:
220 def __init__(self, onlyfirst=False):
220 def __init__(self, onlyfirst=False):
221 self.startrev = -1
221 self.startrev = -1
222 self.roots = []
222 self.roots = []
223 self.onlyfirst = onlyfirst
223 self.onlyfirst = onlyfirst
224
224
225 def match(self, rev):
225 def match(self, rev):
226 def realparents(rev):
226 def realparents(rev):
227 if self.onlyfirst:
227 if self.onlyfirst:
228 return repo.changelog.parentrevs(rev)[0:1]
228 return repo.changelog.parentrevs(rev)[0:1]
229 else:
229 else:
230 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
230 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
231
231
232 if self.startrev == -1:
232 if self.startrev == -1:
233 self.startrev = rev
233 self.startrev = rev
234 return True
234 return True
235
235
236 if rev > self.startrev:
236 if rev > self.startrev:
237 # forward: all descendants
237 # forward: all descendants
238 if not self.roots:
238 if not self.roots:
239 self.roots.append(self.startrev)
239 self.roots.append(self.startrev)
240 for parent in realparents(rev):
240 for parent in realparents(rev):
241 if parent in self.roots:
241 if parent in self.roots:
242 self.roots.append(rev)
242 self.roots.append(rev)
243 return True
243 return True
244 else:
244 else:
245 # backwards: all parents
245 # backwards: all parents
246 if not self.roots:
246 if not self.roots:
247 self.roots.extend(realparents(self.startrev))
247 self.roots.extend(realparents(self.startrev))
248 if rev in self.roots:
248 if rev in self.roots:
249 self.roots.remove(rev)
249 self.roots.remove(rev)
250 self.roots.extend(realparents(rev))
250 self.roots.extend(realparents(rev))
251 return True
251 return True
252
252
253 return False
253 return False
254
254
255 if follow and not files:
255 if follow and not files:
256 ff = followfilter(onlyfirst=opts.get('follow_first'))
256 ff = followfilter(onlyfirst=opts.get('follow_first'))
257 def want(rev):
257 def want(rev):
258 if rev not in wanted:
258 if rev not in wanted:
259 return False
259 return False
260 return ff.match(rev)
260 return ff.match(rev)
261 else:
261 else:
262 def want(rev):
262 def want(rev):
263 return rev in wanted
263 return rev in wanted
264
264
265 for i, window in increasing_windows(0, len(revs)):
265 for i, window in increasing_windows(0, len(revs)):
266 yield 'window', revs[0] < revs[-1], revs[-1]
266 yield 'window', revs[0] < revs[-1], revs[-1]
267 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
267 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
268 srevs = list(nrevs)
268 srevs = list(nrevs)
269 srevs.sort()
269 srevs.sort()
270 for rev in srevs:
270 for rev in srevs:
271 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
271 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
272 yield 'add', rev, fns
272 yield 'add', rev, fns
273 for rev in nrevs:
273 for rev in nrevs:
274 yield 'iter', rev, None
274 yield 'iter', rev, None
275 return iterate(), getchange, matchfn
275 return iterate(), getchange, matchfn
276
276
277 revrangesep = ':'
277 revrangesep = ':'
278
278
279 def revfix(repo, val, defval):
279 def revfix(repo, val, defval):
280 '''turn user-level id of changeset into rev number.
280 '''turn user-level id of changeset into rev number.
281 user-level id can be tag, changeset, rev number, or negative rev
281 user-level id can be tag, changeset, rev number, or negative rev
282 number relative to number of revs (-1 is tip, etc).'''
282 number relative to number of revs (-1 is tip, etc).'''
283 if not val:
283 if not val:
284 return defval
284 return defval
285 try:
285 try:
286 num = int(val)
286 num = int(val)
287 if str(num) != val:
287 if str(num) != val:
288 raise ValueError
288 raise ValueError
289 if num < 0:
289 if num < 0:
290 num += repo.changelog.count()
290 num += repo.changelog.count()
291 if num < 0:
291 if num < 0:
292 num = 0
292 num = 0
293 elif num >= repo.changelog.count():
293 elif num >= repo.changelog.count():
294 raise ValueError
294 raise ValueError
295 except ValueError:
295 except ValueError:
296 try:
296 try:
297 num = repo.changelog.rev(repo.lookup(val))
297 num = repo.changelog.rev(repo.lookup(val))
298 except KeyError:
298 except KeyError:
299 raise util.Abort(_('invalid revision identifier %s'), val)
299 raise util.Abort(_('invalid revision identifier %s'), val)
300 return num
300 return num
301
301
302 def revpair(ui, repo, revs):
302 def revpair(ui, repo, revs):
303 '''return pair of nodes, given list of revisions. second item can
303 '''return pair of nodes, given list of revisions. second item can
304 be None, meaning use working dir.'''
304 be None, meaning use working dir.'''
305 if not revs:
305 if not revs:
306 return repo.dirstate.parents()[0], None
306 return repo.dirstate.parents()[0], None
307 end = None
307 end = None
308 if len(revs) == 1:
308 if len(revs) == 1:
309 start = revs[0]
309 start = revs[0]
310 if revrangesep in start:
310 if revrangesep in start:
311 start, end = start.split(revrangesep, 1)
311 start, end = start.split(revrangesep, 1)
312 start = revfix(repo, start, 0)
312 start = revfix(repo, start, 0)
313 end = revfix(repo, end, repo.changelog.count() - 1)
313 end = revfix(repo, end, repo.changelog.count() - 1)
314 else:
314 else:
315 start = revfix(repo, start, None)
315 start = revfix(repo, start, None)
316 elif len(revs) == 2:
316 elif len(revs) == 2:
317 if revrangesep in revs[0] or revrangesep in revs[1]:
317 if revrangesep in revs[0] or revrangesep in revs[1]:
318 raise util.Abort(_('too many revisions specified'))
318 raise util.Abort(_('too many revisions specified'))
319 start = revfix(repo, revs[0], None)
319 start = revfix(repo, revs[0], None)
320 end = revfix(repo, revs[1], None)
320 end = revfix(repo, revs[1], None)
321 else:
321 else:
322 raise util.Abort(_('too many revisions specified'))
322 raise util.Abort(_('too many revisions specified'))
323 if end is not None: end = repo.lookup(str(end))
323 if end is not None: end = repo.lookup(str(end))
324 return repo.lookup(str(start)), end
324 return repo.lookup(str(start)), end
325
325
326 def revrange(ui, repo, revs):
326 def revrange(ui, repo, revs):
327 """Yield revision as strings from a list of revision specifications."""
327 """Yield revision as strings from a list of revision specifications."""
328 seen = {}
328 seen = {}
329 for spec in revs:
329 for spec in revs:
330 if revrangesep in spec:
330 if revrangesep in spec:
331 start, end = spec.split(revrangesep, 1)
331 start, end = spec.split(revrangesep, 1)
332 start = revfix(repo, start, 0)
332 start = revfix(repo, start, 0)
333 end = revfix(repo, end, repo.changelog.count() - 1)
333 end = revfix(repo, end, repo.changelog.count() - 1)
334 step = start > end and -1 or 1
334 step = start > end and -1 or 1
335 for rev in xrange(start, end+step, step):
335 for rev in xrange(start, end+step, step):
336 if rev in seen:
336 if rev in seen:
337 continue
337 continue
338 seen[rev] = 1
338 seen[rev] = 1
339 yield str(rev)
339 yield str(rev)
340 else:
340 else:
341 rev = revfix(repo, spec, None)
341 rev = revfix(repo, spec, None)
342 if rev in seen:
342 if rev in seen:
343 continue
343 continue
344 seen[rev] = 1
344 seen[rev] = 1
345 yield str(rev)
345 yield str(rev)
346
346
347 def make_filename(repo, pat, node,
347 def make_filename(repo, pat, node,
348 total=None, seqno=None, revwidth=None, pathname=None):
348 total=None, seqno=None, revwidth=None, pathname=None):
349 node_expander = {
349 node_expander = {
350 'H': lambda: hex(node),
350 'H': lambda: hex(node),
351 'R': lambda: str(repo.changelog.rev(node)),
351 'R': lambda: str(repo.changelog.rev(node)),
352 'h': lambda: short(node),
352 'h': lambda: short(node),
353 }
353 }
354 expander = {
354 expander = {
355 '%': lambda: '%',
355 '%': lambda: '%',
356 'b': lambda: os.path.basename(repo.root),
356 'b': lambda: os.path.basename(repo.root),
357 }
357 }
358
358
359 try:
359 try:
360 if node:
360 if node:
361 expander.update(node_expander)
361 expander.update(node_expander)
362 if node and revwidth is not None:
362 if node and revwidth is not None:
363 expander['r'] = (lambda:
363 expander['r'] = (lambda:
364 str(repo.changelog.rev(node)).zfill(revwidth))
364 str(repo.changelog.rev(node)).zfill(revwidth))
365 if total is not None:
365 if total is not None:
366 expander['N'] = lambda: str(total)
366 expander['N'] = lambda: str(total)
367 if seqno is not None:
367 if seqno is not None:
368 expander['n'] = lambda: str(seqno)
368 expander['n'] = lambda: str(seqno)
369 if total is not None and seqno is not None:
369 if total is not None and seqno is not None:
370 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
370 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
371 if pathname is not None:
371 if pathname is not None:
372 expander['s'] = lambda: os.path.basename(pathname)
372 expander['s'] = lambda: os.path.basename(pathname)
373 expander['d'] = lambda: os.path.dirname(pathname) or '.'
373 expander['d'] = lambda: os.path.dirname(pathname) or '.'
374 expander['p'] = lambda: pathname
374 expander['p'] = lambda: pathname
375
375
376 newname = []
376 newname = []
377 patlen = len(pat)
377 patlen = len(pat)
378 i = 0
378 i = 0
379 while i < patlen:
379 while i < patlen:
380 c = pat[i]
380 c = pat[i]
381 if c == '%':
381 if c == '%':
382 i += 1
382 i += 1
383 c = pat[i]
383 c = pat[i]
384 c = expander[c]()
384 c = expander[c]()
385 newname.append(c)
385 newname.append(c)
386 i += 1
386 i += 1
387 return ''.join(newname)
387 return ''.join(newname)
388 except KeyError, inst:
388 except KeyError, inst:
389 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
389 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
390 inst.args[0])
390 inst.args[0])
391
391
392 def make_file(repo, pat, node=None,
392 def make_file(repo, pat, node=None,
393 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
393 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
394 if not pat or pat == '-':
394 if not pat or pat == '-':
395 return 'w' in mode and sys.stdout or sys.stdin
395 return 'w' in mode and sys.stdout or sys.stdin
396 if hasattr(pat, 'write') and 'w' in mode:
396 if hasattr(pat, 'write') and 'w' in mode:
397 return pat
397 return pat
398 if hasattr(pat, 'read') and 'r' in mode:
398 if hasattr(pat, 'read') and 'r' in mode:
399 return pat
399 return pat
400 return open(make_filename(repo, pat, node, total, seqno, revwidth,
400 return open(make_filename(repo, pat, node, total, seqno, revwidth,
401 pathname),
401 pathname),
402 mode)
402 mode)
403
403
404 def write_bundle(cg, filename=None, compress=True):
404 def write_bundle(cg, filename=None, compress=True):
405 """Write a bundle file and return its filename.
405 """Write a bundle file and return its filename.
406
406
407 Existing files will not be overwritten.
407 Existing files will not be overwritten.
408 If no filename is specified, a temporary file is created.
408 If no filename is specified, a temporary file is created.
409 bz2 compression can be turned off.
409 bz2 compression can be turned off.
410 The bundle file will be deleted in case of errors.
410 The bundle file will be deleted in case of errors.
411 """
411 """
412 class nocompress(object):
412 class nocompress(object):
413 def compress(self, x):
413 def compress(self, x):
414 return x
414 return x
415 def flush(self):
415 def flush(self):
416 return ""
416 return ""
417
417
418 fh = None
418 fh = None
419 cleanup = None
419 cleanup = None
420 try:
420 try:
421 if filename:
421 if filename:
422 if os.path.exists(filename):
422 if os.path.exists(filename):
423 raise util.Abort(_("file '%s' already exists"), filename)
423 raise util.Abort(_("file '%s' already exists"), filename)
424 fh = open(filename, "wb")
424 fh = open(filename, "wb")
425 else:
425 else:
426 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
426 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
427 fh = os.fdopen(fd, "wb")
427 fh = os.fdopen(fd, "wb")
428 cleanup = filename
428 cleanup = filename
429
429
430 if compress:
430 if compress:
431 fh.write("HG10")
431 fh.write("HG10")
432 z = bz2.BZ2Compressor(9)
432 z = bz2.BZ2Compressor(9)
433 else:
433 else:
434 fh.write("HG10UN")
434 fh.write("HG10UN")
435 z = nocompress()
435 z = nocompress()
436 # parse the changegroup data, otherwise we will block
436 # parse the changegroup data, otherwise we will block
437 # in case of sshrepo because we don't know the end of the stream
437 # in case of sshrepo because we don't know the end of the stream
438
438
439 # an empty chunkiter is the end of the changegroup
439 # an empty chunkiter is the end of the changegroup
440 empty = False
440 empty = False
441 while not empty:
441 while not empty:
442 empty = True
442 empty = True
443 for chunk in changegroup.chunkiter(cg):
443 for chunk in changegroup.chunkiter(cg):
444 empty = False
444 empty = False
445 fh.write(z.compress(changegroup.genchunk(chunk)))
445 fh.write(z.compress(changegroup.genchunk(chunk)))
446 fh.write(z.compress(changegroup.closechunk()))
446 fh.write(z.compress(changegroup.closechunk()))
447 fh.write(z.flush())
447 fh.write(z.flush())
448 cleanup = None
448 cleanup = None
449 return filename
449 return filename
450 finally:
450 finally:
451 if fh is not None:
451 if fh is not None:
452 fh.close()
452 fh.close()
453 if cleanup is not None:
453 if cleanup is not None:
454 os.unlink(cleanup)
454 os.unlink(cleanup)
455
455
456 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
456 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
457 changes=None, text=False, opts={}):
457 changes=None, text=False, opts={}):
458 if not node1:
458 if not node1:
459 node1 = repo.dirstate.parents()[0]
459 node1 = repo.dirstate.parents()[0]
460 # reading the data for node1 early allows it to play nicely
460 # reading the data for node1 early allows it to play nicely
461 # with repo.changes and the revlog cache.
461 # with repo.changes and the revlog cache.
462 change = repo.changelog.read(node1)
462 change = repo.changelog.read(node1)
463 mmap = repo.manifest.read(change[0])
463 mmap = repo.manifest.read(change[0])
464 date1 = util.datestr(change[2])
464 date1 = util.datestr(change[2])
465
465
466 if not changes:
466 if not changes:
467 changes = repo.changes(node1, node2, files, match=match)
467 changes = repo.changes(node1, node2, files, match=match)
468 modified, added, removed, deleted, unknown = changes
468 modified, added, removed, deleted, unknown = changes
469 if files:
469 if files:
470 modified, added, removed = map(lambda x: filterfiles(files, x),
470 modified, added, removed = map(lambda x: filterfiles(files, x),
471 (modified, added, removed))
471 (modified, added, removed))
472
472
473 if not modified and not added and not removed:
473 if not modified and not added and not removed:
474 return
474 return
475
475
476 if node2:
476 if node2:
477 change = repo.changelog.read(node2)
477 change = repo.changelog.read(node2)
478 mmap2 = repo.manifest.read(change[0])
478 mmap2 = repo.manifest.read(change[0])
479 _date2 = util.datestr(change[2])
479 _date2 = util.datestr(change[2])
480 def date2(f):
480 def date2(f):
481 return _date2
481 return _date2
482 def read(f):
482 def read(f):
483 return repo.file(f).read(mmap2[f])
483 return repo.file(f).read(mmap2[f])
484 else:
484 else:
485 tz = util.makedate()[1]
485 tz = util.makedate()[1]
486 _date2 = util.datestr()
486 _date2 = util.datestr()
487 def date2(f):
487 def date2(f):
488 try:
488 try:
489 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
489 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
490 except OSError, err:
490 except OSError, err:
491 if err.errno != errno.ENOENT: raise
491 if err.errno != errno.ENOENT: raise
492 return _date2
492 return _date2
493 def read(f):
493 def read(f):
494 return repo.wread(f)
494 return repo.wread(f)
495
495
496 if ui.quiet:
496 if ui.quiet:
497 r = None
497 r = None
498 else:
498 else:
499 hexfunc = ui.verbose and hex or short
499 hexfunc = ui.verbose and hex or short
500 r = [hexfunc(node) for node in [node1, node2] if node]
500 r = [hexfunc(node) for node in [node1, node2] if node]
501
501
502 diffopts = ui.diffopts()
502 diffopts = ui.diffopts()
503 showfunc = opts.get('show_function') or diffopts['showfunc']
503 showfunc = opts.get('show_function') or diffopts['showfunc']
504 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
504 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
505 ignorewsamount = opts.get('ignore_space_change') or \
505 ignorewsamount = opts.get('ignore_space_change') or \
506 diffopts['ignorewsamount']
506 diffopts['ignorewsamount']
507 ignoreblanklines = opts.get('ignore_blank_lines') or \
507 ignoreblanklines = opts.get('ignore_blank_lines') or \
508 diffopts['ignoreblanklines']
508 diffopts['ignoreblanklines']
509
509
510 all = modified + added + removed
510 all = modified + added + removed
511 all.sort()
511 all.sort()
512 for f in all:
512 for f in all:
513 to = None
513 to = None
514 tn = None
514 tn = None
515 if f in mmap:
515 if f in mmap:
516 to = repo.file(f).read(mmap[f])
516 to = repo.file(f).read(mmap[f])
517 if f not in removed:
517 if f not in removed:
518 tn = read(f)
518 tn = read(f)
519 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
519 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
520 showfunc=showfunc, ignorews=ignorews,
520 showfunc=showfunc, ignorews=ignorews,
521 ignorewsamount=ignorewsamount,
521 ignorewsamount=ignorewsamount,
522 ignoreblanklines=ignoreblanklines))
522 ignoreblanklines=ignoreblanklines))
523
523
524 def trimuser(ui, name, rev, revcache):
524 def trimuser(ui, name, rev, revcache):
525 """trim the name of the user who committed a change"""
525 """trim the name of the user who committed a change"""
526 user = revcache.get(rev)
526 user = revcache.get(rev)
527 if user is None:
527 if user is None:
528 user = revcache[rev] = ui.shortuser(name)
528 user = revcache[rev] = ui.shortuser(name)
529 return user
529 return user
530
530
531 class changeset_printer(object):
531 class changeset_printer(object):
532 '''show changeset information when templating not requested.'''
532 '''show changeset information when templating not requested.'''
533
533
534 def __init__(self, ui, repo):
534 def __init__(self, ui, repo):
535 self.ui = ui
535 self.ui = ui
536 self.repo = repo
536 self.repo = repo
537
537
538 def show(self, rev=0, changenode=None, brinfo=None):
538 def show(self, rev=0, changenode=None, brinfo=None):
539 '''show a single changeset or file revision'''
539 '''show a single changeset or file revision'''
540 log = self.repo.changelog
540 log = self.repo.changelog
541 if changenode is None:
541 if changenode is None:
542 changenode = log.node(rev)
542 changenode = log.node(rev)
543 elif not rev:
543 elif not rev:
544 rev = log.rev(changenode)
544 rev = log.rev(changenode)
545
545
546 if self.ui.quiet:
546 if self.ui.quiet:
547 self.ui.write("%d:%s\n" % (rev, short(changenode)))
547 self.ui.write("%d:%s\n" % (rev, short(changenode)))
548 return
548 return
549
549
550 changes = log.read(changenode)
550 changes = log.read(changenode)
551 date = util.datestr(changes[2])
551 date = util.datestr(changes[2])
552
552
553 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
553 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
554 for p in log.parents(changenode)
554 for p in log.parents(changenode)
555 if self.ui.debugflag or p != nullid]
555 if self.ui.debugflag or p != nullid]
556 if (not self.ui.debugflag and len(parents) == 1 and
556 if (not self.ui.debugflag and len(parents) == 1 and
557 parents[0][0] == rev-1):
557 parents[0][0] == rev-1):
558 parents = []
558 parents = []
559
559
560 if self.ui.verbose:
560 if self.ui.verbose:
561 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
561 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
562 else:
562 else:
563 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
563 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
564
564
565 for tag in self.repo.nodetags(changenode):
565 for tag in self.repo.nodetags(changenode):
566 self.ui.status(_("tag: %s\n") % tag)
566 self.ui.status(_("tag: %s\n") % tag)
567 for parent in parents:
567 for parent in parents:
568 self.ui.write(_("parent: %d:%s\n") % parent)
568 self.ui.write(_("parent: %d:%s\n") % parent)
569
569
570 if brinfo and changenode in brinfo:
570 if brinfo and changenode in brinfo:
571 br = brinfo[changenode]
571 br = brinfo[changenode]
572 self.ui.write(_("branch: %s\n") % " ".join(br))
572 self.ui.write(_("branch: %s\n") % " ".join(br))
573
573
574 self.ui.debug(_("manifest: %d:%s\n") %
574 self.ui.debug(_("manifest: %d:%s\n") %
575 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
575 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
576 self.ui.status(_("user: %s\n") % changes[1])
576 self.ui.status(_("user: %s\n") % changes[1])
577 self.ui.status(_("date: %s\n") % date)
577 self.ui.status(_("date: %s\n") % date)
578
578
579 if self.ui.debugflag:
579 if self.ui.debugflag:
580 files = self.repo.changes(log.parents(changenode)[0], changenode)
580 files = self.repo.changes(log.parents(changenode)[0], changenode)
581 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
581 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
582 files):
582 files):
583 if value:
583 if value:
584 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
584 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
585 else:
585 else:
586 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
586 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
587
587
588 description = changes[4].strip()
588 description = changes[4].strip()
589 if description:
589 if description:
590 if self.ui.verbose:
590 if self.ui.verbose:
591 self.ui.status(_("description:\n"))
591 self.ui.status(_("description:\n"))
592 self.ui.status(description)
592 self.ui.status(description)
593 self.ui.status("\n\n")
593 self.ui.status("\n\n")
594 else:
594 else:
595 self.ui.status(_("summary: %s\n") %
595 self.ui.status(_("summary: %s\n") %
596 description.splitlines()[0])
596 description.splitlines()[0])
597 self.ui.status("\n")
597 self.ui.status("\n")
598
598
599 def show_changeset(ui, repo, opts):
599 def show_changeset(ui, repo, opts):
600 '''show one changeset. uses template or regular display. caller
600 '''show one changeset. uses template or regular display. caller
601 can pass in 'style' and 'template' options in opts.'''
601 can pass in 'style' and 'template' options in opts.'''
602
602
603 tmpl = opts.get('template')
603 tmpl = opts.get('template')
604 if tmpl:
604 if tmpl:
605 tmpl = templater.parsestring(tmpl, quoted=False)
605 tmpl = templater.parsestring(tmpl, quoted=False)
606 else:
606 else:
607 tmpl = ui.config('ui', 'logtemplate')
607 tmpl = ui.config('ui', 'logtemplate')
608 if tmpl: tmpl = templater.parsestring(tmpl)
608 if tmpl: tmpl = templater.parsestring(tmpl)
609 mapfile = opts.get('style') or ui.config('ui', 'style')
609 mapfile = opts.get('style') or ui.config('ui', 'style')
610 if tmpl or mapfile:
610 if tmpl or mapfile:
611 if mapfile:
611 if mapfile:
612 if not os.path.isfile(mapfile):
612 if not os.path.isfile(mapfile):
613 mapname = templater.templatepath('map-cmdline.' + mapfile)
613 mapname = templater.templatepath('map-cmdline.' + mapfile)
614 if not mapname: mapname = templater.templatepath(mapfile)
614 if not mapname: mapname = templater.templatepath(mapfile)
615 if mapname: mapfile = mapname
615 if mapname: mapfile = mapname
616 try:
616 try:
617 t = templater.changeset_templater(ui, repo, mapfile)
617 t = templater.changeset_templater(ui, repo, mapfile)
618 except SyntaxError, inst:
618 except SyntaxError, inst:
619 raise util.Abort(inst.args[0])
619 raise util.Abort(inst.args[0])
620 if tmpl: t.use_template(tmpl)
620 if tmpl: t.use_template(tmpl)
621 return t
621 return t
622 return changeset_printer(ui, repo)
622 return changeset_printer(ui, repo)
623
623
624 def setremoteconfig(ui, opts):
624 def setremoteconfig(ui, opts):
625 "copy remote options to ui tree"
625 "copy remote options to ui tree"
626 if opts.get('ssh'):
626 if opts.get('ssh'):
627 ui.setconfig("ui", "ssh", opts['ssh'])
627 ui.setconfig("ui", "ssh", opts['ssh'])
628 if opts.get('remotecmd'):
628 if opts.get('remotecmd'):
629 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
629 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
630
630
631 def show_version(ui):
631 def show_version(ui):
632 """output version and copyright information"""
632 """output version and copyright information"""
633 ui.write(_("Mercurial Distributed SCM (version %s)\n")
633 ui.write(_("Mercurial Distributed SCM (version %s)\n")
634 % version.get_version())
634 % version.get_version())
635 ui.status(_(
635 ui.status(_(
636 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
636 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
637 "This is free software; see the source for copying conditions. "
637 "This is free software; see the source for copying conditions. "
638 "There is NO\nwarranty; "
638 "There is NO\nwarranty; "
639 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
639 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
640 ))
640 ))
641
641
642 def help_(ui, name=None, with_version=False):
642 def help_(ui, name=None, with_version=False):
643 """show help for a command, extension, or list of commands
643 """show help for a command, extension, or list of commands
644
644
645 With no arguments, print a list of commands and short help.
645 With no arguments, print a list of commands and short help.
646
646
647 Given a command name, print help for that command.
647 Given a command name, print help for that command.
648
648
649 Given an extension name, print help for that extension, and the
649 Given an extension name, print help for that extension, and the
650 commands it provides."""
650 commands it provides."""
651 option_lists = []
651 option_lists = []
652
652
653 def helpcmd(name):
653 def helpcmd(name):
654 if with_version:
654 if with_version:
655 show_version(ui)
655 show_version(ui)
656 ui.write('\n')
656 ui.write('\n')
657 aliases, i = findcmd(name)
657 aliases, i = findcmd(name)
658 # synopsis
658 # synopsis
659 ui.write("%s\n\n" % i[2])
659 ui.write("%s\n\n" % i[2])
660
660
661 # description
661 # description
662 doc = i[0].__doc__
662 doc = i[0].__doc__
663 if not doc:
663 if not doc:
664 doc = _("(No help text available)")
664 doc = _("(No help text available)")
665 if ui.quiet:
665 if ui.quiet:
666 doc = doc.splitlines(0)[0]
666 doc = doc.splitlines(0)[0]
667 ui.write("%s\n" % doc.rstrip())
667 ui.write("%s\n" % doc.rstrip())
668
668
669 if not ui.quiet:
669 if not ui.quiet:
670 # aliases
670 # aliases
671 if len(aliases) > 1:
671 if len(aliases) > 1:
672 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
672 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
673
673
674 # options
674 # options
675 if i[1]:
675 if i[1]:
676 option_lists.append(("options", i[1]))
676 option_lists.append(("options", i[1]))
677
677
678 def helplist(select=None):
678 def helplist(select=None):
679 h = {}
679 h = {}
680 cmds = {}
680 cmds = {}
681 for c, e in table.items():
681 for c, e in table.items():
682 f = c.split("|", 1)[0]
682 f = c.split("|", 1)[0]
683 if select and not select(f):
683 if select and not select(f):
684 continue
684 continue
685 if name == "shortlist" and not f.startswith("^"):
685 if name == "shortlist" and not f.startswith("^"):
686 continue
686 continue
687 f = f.lstrip("^")
687 f = f.lstrip("^")
688 if not ui.debugflag and f.startswith("debug"):
688 if not ui.debugflag and f.startswith("debug"):
689 continue
689 continue
690 doc = e[0].__doc__
690 doc = e[0].__doc__
691 if not doc:
691 if not doc:
692 doc = _("(No help text available)")
692 doc = _("(No help text available)")
693 h[f] = doc.splitlines(0)[0].rstrip()
693 h[f] = doc.splitlines(0)[0].rstrip()
694 cmds[f] = c.lstrip("^")
694 cmds[f] = c.lstrip("^")
695
695
696 fns = h.keys()
696 fns = h.keys()
697 fns.sort()
697 fns.sort()
698 m = max(map(len, fns))
698 m = max(map(len, fns))
699 for f in fns:
699 for f in fns:
700 if ui.verbose:
700 if ui.verbose:
701 commands = cmds[f].replace("|",", ")
701 commands = cmds[f].replace("|",", ")
702 ui.write(" %s:\n %s\n"%(commands, h[f]))
702 ui.write(" %s:\n %s\n"%(commands, h[f]))
703 else:
703 else:
704 ui.write(' %-*s %s\n' % (m, f, h[f]))
704 ui.write(' %-*s %s\n' % (m, f, h[f]))
705
705
706 def helpext(name):
706 def helpext(name):
707 try:
707 try:
708 mod = findext(name)
708 mod = findext(name)
709 except KeyError:
709 except KeyError:
710 raise UnknownCommand(name)
710 raise UnknownCommand(name)
711
711
712 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
712 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
713 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
713 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
714 for d in doc[1:]:
714 for d in doc[1:]:
715 ui.write(d, '\n')
715 ui.write(d, '\n')
716
716
717 ui.status('\n')
717 ui.status('\n')
718 if ui.verbose:
718 if ui.verbose:
719 ui.status(_('list of commands:\n\n'))
719 ui.status(_('list of commands:\n\n'))
720 else:
720 else:
721 ui.status(_('list of commands (use "hg help -v %s" '
721 ui.status(_('list of commands (use "hg help -v %s" '
722 'to show aliases and global options):\n\n') % name)
722 'to show aliases and global options):\n\n') % name)
723
723
724 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
724 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
725 helplist(modcmds.has_key)
725 helplist(modcmds.has_key)
726
726
727 if name and name != 'shortlist':
727 if name and name != 'shortlist':
728 try:
728 try:
729 helpcmd(name)
729 helpcmd(name)
730 except UnknownCommand:
730 except UnknownCommand:
731 helpext(name)
731 helpext(name)
732
732
733 else:
733 else:
734 # program name
734 # program name
735 if ui.verbose or with_version:
735 if ui.verbose or with_version:
736 show_version(ui)
736 show_version(ui)
737 else:
737 else:
738 ui.status(_("Mercurial Distributed SCM\n"))
738 ui.status(_("Mercurial Distributed SCM\n"))
739 ui.status('\n')
739 ui.status('\n')
740
740
741 # list of commands
741 # list of commands
742 if name == "shortlist":
742 if name == "shortlist":
743 ui.status(_('basic commands (use "hg help" '
743 ui.status(_('basic commands (use "hg help" '
744 'for the full list or option "-v" for details):\n\n'))
744 'for the full list or option "-v" for details):\n\n'))
745 elif ui.verbose:
745 elif ui.verbose:
746 ui.status(_('list of commands:\n\n'))
746 ui.status(_('list of commands:\n\n'))
747 else:
747 else:
748 ui.status(_('list of commands (use "hg help -v" '
748 ui.status(_('list of commands (use "hg help -v" '
749 'to show aliases and global options):\n\n'))
749 'to show aliases and global options):\n\n'))
750
750
751 helplist()
751 helplist()
752
752
753 # global options
753 # global options
754 if ui.verbose:
754 if ui.verbose:
755 option_lists.append(("global options", globalopts))
755 option_lists.append(("global options", globalopts))
756
756
757 # list all option lists
757 # list all option lists
758 opt_output = []
758 opt_output = []
759 for title, options in option_lists:
759 for title, options in option_lists:
760 opt_output.append(("\n%s:\n" % title, None))
760 opt_output.append(("\n%s:\n" % title, None))
761 for shortopt, longopt, default, desc in options:
761 for shortopt, longopt, default, desc in options:
762 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
762 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
763 longopt and " --%s" % longopt),
763 longopt and " --%s" % longopt),
764 "%s%s" % (desc,
764 "%s%s" % (desc,
765 default
765 default
766 and _(" (default: %s)") % default
766 and _(" (default: %s)") % default
767 or "")))
767 or "")))
768
768
769 if opt_output:
769 if opt_output:
770 opts_len = max([len(line[0]) for line in opt_output if line[1]])
770 opts_len = max([len(line[0]) for line in opt_output if line[1]])
771 for first, second in opt_output:
771 for first, second in opt_output:
772 if second:
772 if second:
773 ui.write(" %-*s %s\n" % (opts_len, first, second))
773 ui.write(" %-*s %s\n" % (opts_len, first, second))
774 else:
774 else:
775 ui.write("%s\n" % first)
775 ui.write("%s\n" % first)
776
776
777 # Commands start here, listed alphabetically
777 # Commands start here, listed alphabetically
778
778
779 def add(ui, repo, *pats, **opts):
779 def add(ui, repo, *pats, **opts):
780 """add the specified files on the next commit
780 """add the specified files on the next commit
781
781
782 Schedule files to be version controlled and added to the repository.
782 Schedule files to be version controlled and added to the repository.
783
783
784 The files will be added to the repository at the next commit.
784 The files will be added to the repository at the next commit.
785
785
786 If no names are given, add all files in the repository.
786 If no names are given, add all files in the repository.
787 """
787 """
788
788
789 names = []
789 names = []
790 for src, abs, rel, exact in walk(repo, pats, opts):
790 for src, abs, rel, exact in walk(repo, pats, opts):
791 if exact:
791 if exact:
792 if ui.verbose:
792 if ui.verbose:
793 ui.status(_('adding %s\n') % rel)
793 ui.status(_('adding %s\n') % rel)
794 names.append(abs)
794 names.append(abs)
795 elif repo.dirstate.state(abs) == '?':
795 elif repo.dirstate.state(abs) == '?':
796 ui.status(_('adding %s\n') % rel)
796 ui.status(_('adding %s\n') % rel)
797 names.append(abs)
797 names.append(abs)
798 if not opts.get('dry_run'):
798 if not opts.get('dry_run'):
799 repo.add(names)
799 repo.add(names)
800
800
801 def addremove(ui, repo, *pats, **opts):
801 def addremove(ui, repo, *pats, **opts):
802 """add all new files, delete all missing files (DEPRECATED)
802 """add all new files, delete all missing files (DEPRECATED)
803
803
804 (DEPRECATED)
804 (DEPRECATED)
805 Add all new files and remove all missing files from the repository.
805 Add all new files and remove all missing files from the repository.
806
806
807 New files are ignored if they match any of the patterns in .hgignore. As
807 New files are ignored if they match any of the patterns in .hgignore. As
808 with add, these changes take effect at the next commit.
808 with add, these changes take effect at the next commit.
809
809
810 This command is now deprecated and will be removed in a future
810 This command is now deprecated and will be removed in a future
811 release. Please use add and remove --after instead.
811 release. Please use add and remove --after instead.
812 """
812 """
813 ui.warn(_('(the addremove command is deprecated; use add and remove '
813 ui.warn(_('(the addremove command is deprecated; use add and remove '
814 '--after instead)\n'))
814 '--after instead)\n'))
815 return addremove_lock(ui, repo, pats, opts)
815 return addremove_lock(ui, repo, pats, opts)
816
816
817 def addremove_lock(ui, repo, pats, opts, wlock=None):
817 def addremove_lock(ui, repo, pats, opts, wlock=None):
818 add, remove = [], []
818 add, remove = [], []
819 for src, abs, rel, exact in walk(repo, pats, opts):
819 for src, abs, rel, exact in walk(repo, pats, opts):
820 if src == 'f' and repo.dirstate.state(abs) == '?':
820 if src == 'f' and repo.dirstate.state(abs) == '?':
821 add.append(abs)
821 add.append(abs)
822 if ui.verbose or not exact:
822 if ui.verbose or not exact:
823 ui.status(_('adding %s\n') % ((pats and rel) or abs))
823 ui.status(_('adding %s\n') % ((pats and rel) or abs))
824 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
824 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
825 remove.append(abs)
825 remove.append(abs)
826 if ui.verbose or not exact:
826 if ui.verbose or not exact:
827 ui.status(_('removing %s\n') % ((pats and rel) or abs))
827 ui.status(_('removing %s\n') % ((pats and rel) or abs))
828 if not opts.get('dry_run'):
828 if not opts.get('dry_run'):
829 repo.add(add, wlock=wlock)
829 repo.add(add, wlock=wlock)
830 repo.remove(remove, wlock=wlock)
830 repo.remove(remove, wlock=wlock)
831
831
832 def annotate(ui, repo, *pats, **opts):
832 def annotate(ui, repo, *pats, **opts):
833 """show changeset information per file line
833 """show changeset information per file line
834
834
835 List changes in files, showing the revision id responsible for each line
835 List changes in files, showing the revision id responsible for each line
836
836
837 This command is useful to discover who did a change or when a change took
837 This command is useful to discover who did a change or when a change took
838 place.
838 place.
839
839
840 Without the -a option, annotate will avoid processing files it
840 Without the -a option, annotate will avoid processing files it
841 detects as binary. With -a, annotate will generate an annotation
841 detects as binary. With -a, annotate will generate an annotation
842 anyway, probably with undesirable results.
842 anyway, probably with undesirable results.
843 """
843 """
844 def getnode(rev):
844 def getnode(rev):
845 return short(repo.changelog.node(rev))
845 return short(repo.changelog.node(rev))
846
846
847 ucache = {}
847 ucache = {}
848 def getname(rev):
848 def getname(rev):
849 try:
849 try:
850 return ucache[rev]
850 return ucache[rev]
851 except:
851 except:
852 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
852 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
853 ucache[rev] = u
853 ucache[rev] = u
854 return u
854 return u
855
855
856 dcache = {}
856 dcache = {}
857 def getdate(rev):
857 def getdate(rev):
858 datestr = dcache.get(rev)
858 datestr = dcache.get(rev)
859 if datestr is None:
859 if datestr is None:
860 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
860 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
861 return datestr
861 return datestr
862
862
863 if not pats:
863 if not pats:
864 raise util.Abort(_('at least one file name or pattern required'))
864 raise util.Abort(_('at least one file name or pattern required'))
865
865
866 opmap = [['user', getname], ['number', str], ['changeset', getnode],
866 opmap = [['user', getname], ['number', str], ['changeset', getnode],
867 ['date', getdate]]
867 ['date', getdate]]
868 if not opts['user'] and not opts['changeset'] and not opts['date']:
868 if not opts['user'] and not opts['changeset'] and not opts['date']:
869 opts['number'] = 1
869 opts['number'] = 1
870
870
871 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
871 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
872
872
873 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
873 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
874 fctx = ctx.filectx(abs)
874 fctx = ctx.filectx(abs)
875 if not opts['text'] and util.binary(fctx.data()):
875 if not opts['text'] and util.binary(fctx.data()):
876 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
876 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
877 continue
877 continue
878
878
879 lines = fctx.annotate()
879 lines = fctx.annotate()
880 pieces = []
880 pieces = []
881
881
882 for o, f in opmap:
882 for o, f in opmap:
883 if opts[o]:
883 if opts[o]:
884 l = [f(n) for n, dummy in lines]
884 l = [f(n) for n, dummy in lines]
885 if l:
885 if l:
886 m = max(map(len, l))
886 m = max(map(len, l))
887 pieces.append(["%*s" % (m, x) for x in l])
887 pieces.append(["%*s" % (m, x) for x in l])
888
888
889 if pieces:
889 if pieces:
890 for p, l in zip(zip(*pieces), lines):
890 for p, l in zip(zip(*pieces), lines):
891 ui.write("%s: %s" % (" ".join(p), l[1]))
891 ui.write("%s: %s" % (" ".join(p), l[1]))
892
892
893 def archive(ui, repo, dest, **opts):
893 def archive(ui, repo, dest, **opts):
894 '''create unversioned archive of a repository revision
894 '''create unversioned archive of a repository revision
895
895
896 By default, the revision used is the parent of the working
896 By default, the revision used is the parent of the working
897 directory; use "-r" to specify a different revision.
897 directory; use "-r" to specify a different revision.
898
898
899 To specify the type of archive to create, use "-t". Valid
899 To specify the type of archive to create, use "-t". Valid
900 types are:
900 types are:
901
901
902 "files" (default): a directory full of files
902 "files" (default): a directory full of files
903 "tar": tar archive, uncompressed
903 "tar": tar archive, uncompressed
904 "tbz2": tar archive, compressed using bzip2
904 "tbz2": tar archive, compressed using bzip2
905 "tgz": tar archive, compressed using gzip
905 "tgz": tar archive, compressed using gzip
906 "uzip": zip archive, uncompressed
906 "uzip": zip archive, uncompressed
907 "zip": zip archive, compressed using deflate
907 "zip": zip archive, compressed using deflate
908
908
909 The exact name of the destination archive or directory is given
909 The exact name of the destination archive or directory is given
910 using a format string; see "hg help export" for details.
910 using a format string; see "hg help export" for details.
911
911
912 Each member added to an archive file has a directory prefix
912 Each member added to an archive file has a directory prefix
913 prepended. Use "-p" to specify a format string for the prefix.
913 prepended. Use "-p" to specify a format string for the prefix.
914 The default is the basename of the archive, with suffixes removed.
914 The default is the basename of the archive, with suffixes removed.
915 '''
915 '''
916
916
917 if opts['rev']:
917 if opts['rev']:
918 node = repo.lookup(opts['rev'])
918 node = repo.lookup(opts['rev'])
919 else:
919 else:
920 node, p2 = repo.dirstate.parents()
920 node, p2 = repo.dirstate.parents()
921 if p2 != nullid:
921 if p2 != nullid:
922 raise util.Abort(_('uncommitted merge - please provide a '
922 raise util.Abort(_('uncommitted merge - please provide a '
923 'specific revision'))
923 'specific revision'))
924
924
925 dest = make_filename(repo, dest, node)
925 dest = make_filename(repo, dest, node)
926 if os.path.realpath(dest) == repo.root:
926 if os.path.realpath(dest) == repo.root:
927 raise util.Abort(_('repository root cannot be destination'))
927 raise util.Abort(_('repository root cannot be destination'))
928 dummy, matchfn, dummy = matchpats(repo, [], opts)
928 dummy, matchfn, dummy = matchpats(repo, [], opts)
929 kind = opts.get('type') or 'files'
929 kind = opts.get('type') or 'files'
930 prefix = opts['prefix']
930 prefix = opts['prefix']
931 if dest == '-':
931 if dest == '-':
932 if kind == 'files':
932 if kind == 'files':
933 raise util.Abort(_('cannot archive plain files to stdout'))
933 raise util.Abort(_('cannot archive plain files to stdout'))
934 dest = sys.stdout
934 dest = sys.stdout
935 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
935 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
936 prefix = make_filename(repo, prefix, node)
936 prefix = make_filename(repo, prefix, node)
937 archival.archive(repo, dest, node, kind, not opts['no_decode'],
937 archival.archive(repo, dest, node, kind, not opts['no_decode'],
938 matchfn, prefix)
938 matchfn, prefix)
939
939
940 def backout(ui, repo, rev, **opts):
940 def backout(ui, repo, rev, **opts):
941 '''reverse effect of earlier changeset
941 '''reverse effect of earlier changeset
942
942
943 Commit the backed out changes as a new changeset. The new
943 Commit the backed out changes as a new changeset. The new
944 changeset is a child of the backed out changeset.
944 changeset is a child of the backed out changeset.
945
945
946 If you back out a changeset other than the tip, a new head is
946 If you back out a changeset other than the tip, a new head is
947 created. This head is the parent of the working directory. If
947 created. This head is the parent of the working directory. If
948 you back out an old changeset, your working directory will appear
948 you back out an old changeset, your working directory will appear
949 old after the backout. You should merge the backout changeset
949 old after the backout. You should merge the backout changeset
950 with another head.
950 with another head.
951
951
952 The --merge option remembers the parent of the working directory
952 The --merge option remembers the parent of the working directory
953 before starting the backout, then merges the new head with that
953 before starting the backout, then merges the new head with that
954 changeset afterwards. This saves you from doing the merge by
954 changeset afterwards. This saves you from doing the merge by
955 hand. The result of this merge is not committed, as for a normal
955 hand. The result of this merge is not committed, as for a normal
956 merge.'''
956 merge.'''
957
957
958 bail_if_changed(repo)
958 bail_if_changed(repo)
959 op1, op2 = repo.dirstate.parents()
959 op1, op2 = repo.dirstate.parents()
960 if op2 != nullid:
960 if op2 != nullid:
961 raise util.Abort(_('outstanding uncommitted merge'))
961 raise util.Abort(_('outstanding uncommitted merge'))
962 node = repo.lookup(rev)
962 node = repo.lookup(rev)
963 p1, p2 = repo.changelog.parents(node)
963 p1, p2 = repo.changelog.parents(node)
964 if p1 == nullid:
964 if p1 == nullid:
965 raise util.Abort(_('cannot back out a change with no parents'))
965 raise util.Abort(_('cannot back out a change with no parents'))
966 if p2 != nullid:
966 if p2 != nullid:
967 if not opts['parent']:
967 if not opts['parent']:
968 raise util.Abort(_('cannot back out a merge changeset without '
968 raise util.Abort(_('cannot back out a merge changeset without '
969 '--parent'))
969 '--parent'))
970 p = repo.lookup(opts['parent'])
970 p = repo.lookup(opts['parent'])
971 if p not in (p1, p2):
971 if p not in (p1, p2):
972 raise util.Abort(_('%s is not a parent of %s' %
972 raise util.Abort(_('%s is not a parent of %s' %
973 (short(p), short(node))))
973 (short(p), short(node))))
974 parent = p
974 parent = p
975 else:
975 else:
976 if opts['parent']:
976 if opts['parent']:
977 raise util.Abort(_('cannot use --parent on non-merge changeset'))
977 raise util.Abort(_('cannot use --parent on non-merge changeset'))
978 parent = p1
978 parent = p1
979 repo.update(node, force=True, show_stats=False)
979 repo.update(node, force=True, show_stats=False)
980 revert_opts = opts.copy()
980 revert_opts = opts.copy()
981 revert_opts['rev'] = hex(parent)
981 revert_opts['rev'] = hex(parent)
982 revert(ui, repo, **revert_opts)
982 revert(ui, repo, **revert_opts)
983 commit_opts = opts.copy()
983 commit_opts = opts.copy()
984 commit_opts['addremove'] = False
984 commit_opts['addremove'] = False
985 if not commit_opts['message'] and not commit_opts['logfile']:
985 if not commit_opts['message'] and not commit_opts['logfile']:
986 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
986 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
987 commit_opts['force_editor'] = True
987 commit_opts['force_editor'] = True
988 commit(ui, repo, **commit_opts)
988 commit(ui, repo, **commit_opts)
989 def nice(node):
989 def nice(node):
990 return '%d:%s' % (repo.changelog.rev(node), short(node))
990 return '%d:%s' % (repo.changelog.rev(node), short(node))
991 ui.status(_('changeset %s backs out changeset %s\n') %
991 ui.status(_('changeset %s backs out changeset %s\n') %
992 (nice(repo.changelog.tip()), nice(node)))
992 (nice(repo.changelog.tip()), nice(node)))
993 if op1 != node:
993 if op1 != node:
994 if opts['merge']:
994 if opts['merge']:
995 ui.status(_('merging with changeset %s\n') % nice(op1))
995 ui.status(_('merging with changeset %s\n') % nice(op1))
996 doupdate(ui, repo, hex(op1), **opts)
996 doupdate(ui, repo, hex(op1), **opts)
997 else:
997 else:
998 ui.status(_('the backout changeset is a new head - '
998 ui.status(_('the backout changeset is a new head - '
999 'do not forget to merge\n'))
999 'do not forget to merge\n'))
1000 ui.status(_('(use "backout --merge" '
1000 ui.status(_('(use "backout --merge" '
1001 'if you want to auto-merge)\n'))
1001 'if you want to auto-merge)\n'))
1002
1002
1003 def bundle(ui, repo, fname, dest=None, **opts):
1003 def bundle(ui, repo, fname, dest=None, **opts):
1004 """create a changegroup file
1004 """create a changegroup file
1005
1005
1006 Generate a compressed changegroup file collecting all changesets
1006 Generate a compressed changegroup file collecting all changesets
1007 not found in the other repository.
1007 not found in the other repository.
1008
1008
1009 This file can then be transferred using conventional means and
1009 This file can then be transferred using conventional means and
1010 applied to another repository with the unbundle command. This is
1010 applied to another repository with the unbundle command. This is
1011 useful when native push and pull are not available or when
1011 useful when native push and pull are not available or when
1012 exporting an entire repository is undesirable. The standard file
1012 exporting an entire repository is undesirable. The standard file
1013 extension is ".hg".
1013 extension is ".hg".
1014
1014
1015 Unlike import/export, this exactly preserves all changeset
1015 Unlike import/export, this exactly preserves all changeset
1016 contents including permissions, rename data, and revision history.
1016 contents including permissions, rename data, and revision history.
1017 """
1017 """
1018 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1018 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1019 other = hg.repository(ui, dest)
1019 other = hg.repository(ui, dest)
1020 o = repo.findoutgoing(other, force=opts['force'])
1020 o = repo.findoutgoing(other, force=opts['force'])
1021 cg = repo.changegroup(o, 'bundle')
1021 cg = repo.changegroup(o, 'bundle')
1022 write_bundle(cg, fname)
1022 write_bundle(cg, fname)
1023
1023
1024 def cat(ui, repo, file1, *pats, **opts):
1024 def cat(ui, repo, file1, *pats, **opts):
1025 """output the latest or given revisions of files
1025 """output the latest or given revisions of files
1026
1026
1027 Print the specified files as they were at the given revision.
1027 Print the specified files as they were at the given revision.
1028 If no revision is given then the tip is used.
1028 If no revision is given then the tip is used.
1029
1029
1030 Output may be to a file, in which case the name of the file is
1030 Output may be to a file, in which case the name of the file is
1031 given using a format string. The formatting rules are the same as
1031 given using a format string. The formatting rules are the same as
1032 for the export command, with the following additions:
1032 for the export command, with the following additions:
1033
1033
1034 %s basename of file being printed
1034 %s basename of file being printed
1035 %d dirname of file being printed, or '.' if in repo root
1035 %d dirname of file being printed, or '.' if in repo root
1036 %p root-relative path name of file being printed
1036 %p root-relative path name of file being printed
1037 """
1037 """
1038 ctx = repo.changectx(opts['rev'] or "-1")
1038 ctx = repo.changectx(opts['rev'] or "-1")
1039 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
1039 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
1040 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
1040 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
1041 fp.write(ctx.filectx(abs).data())
1041 fp.write(ctx.filectx(abs).data())
1042
1042
1043 def clone(ui, source, dest=None, **opts):
1043 def clone(ui, source, dest=None, **opts):
1044 """make a copy of an existing repository
1044 """make a copy of an existing repository
1045
1045
1046 Create a copy of an existing repository in a new directory.
1046 Create a copy of an existing repository in a new directory.
1047
1047
1048 If no destination directory name is specified, it defaults to the
1048 If no destination directory name is specified, it defaults to the
1049 basename of the source.
1049 basename of the source.
1050
1050
1051 The location of the source is added to the new repository's
1051 The location of the source is added to the new repository's
1052 .hg/hgrc file, as the default to be used for future pulls.
1052 .hg/hgrc file, as the default to be used for future pulls.
1053
1053
1054 For efficiency, hardlinks are used for cloning whenever the source
1054 For efficiency, hardlinks are used for cloning whenever the source
1055 and destination are on the same filesystem (note this applies only
1055 and destination are on the same filesystem (note this applies only
1056 to the repository data, not to the checked out files). Some
1056 to the repository data, not to the checked out files). Some
1057 filesystems, such as AFS, implement hardlinking incorrectly, but
1057 filesystems, such as AFS, implement hardlinking incorrectly, but
1058 do not report errors. In these cases, use the --pull option to
1058 do not report errors. In these cases, use the --pull option to
1059 avoid hardlinking.
1059 avoid hardlinking.
1060
1060
1061 You can safely clone repositories and checked out files using full
1061 You can safely clone repositories and checked out files using full
1062 hardlinks with
1062 hardlinks with
1063
1063
1064 $ cp -al REPO REPOCLONE
1064 $ cp -al REPO REPOCLONE
1065
1065
1066 which is the fastest way to clone. However, the operation is not
1066 which is the fastest way to clone. However, the operation is not
1067 atomic (making sure REPO is not modified during the operation is
1067 atomic (making sure REPO is not modified during the operation is
1068 up to you) and you have to make sure your editor breaks hardlinks
1068 up to you) and you have to make sure your editor breaks hardlinks
1069 (Emacs and most Linux Kernel tools do so).
1069 (Emacs and most Linux Kernel tools do so).
1070
1070
1071 If you use the -r option to clone up to a specific revision, no
1071 If you use the -r option to clone up to a specific revision, no
1072 subsequent revisions will be present in the cloned repository.
1072 subsequent revisions will be present in the cloned repository.
1073 This option implies --pull, even on local repositories.
1073 This option implies --pull, even on local repositories.
1074
1074
1075 See pull for valid source format details.
1075 See pull for valid source format details.
1076
1076
1077 It is possible to specify an ssh:// URL as the destination, but no
1077 It is possible to specify an ssh:// URL as the destination, but no
1078 .hg/hgrc will be created on the remote side. Look at the help text
1078 .hg/hgrc will be created on the remote side. Look at the help text
1079 for the pull command for important details about ssh:// URLs.
1079 for the pull command for important details about ssh:// URLs.
1080 """
1080 """
1081 setremoteconfig(ui, opts)
1081 setremoteconfig(ui, opts)
1082 hg.clone(ui, ui.expandpath(source), dest,
1082 hg.clone(ui, ui.expandpath(source), dest,
1083 pull=opts['pull'],
1083 pull=opts['pull'],
1084 stream=opts['uncompressed'],
1084 stream=opts['uncompressed'],
1085 rev=opts['rev'],
1085 rev=opts['rev'],
1086 update=not opts['noupdate'])
1086 update=not opts['noupdate'])
1087
1087
1088 def commit(ui, repo, *pats, **opts):
1088 def commit(ui, repo, *pats, **opts):
1089 """commit the specified files or all outstanding changes
1089 """commit the specified files or all outstanding changes
1090
1090
1091 Commit changes to the given files into the repository.
1091 Commit changes to the given files into the repository.
1092
1092
1093 If a list of files is omitted, all changes reported by "hg status"
1093 If a list of files is omitted, all changes reported by "hg status"
1094 will be committed.
1094 will be committed.
1095
1095
1096 If no commit message is specified, the editor configured in your hgrc
1096 If no commit message is specified, the editor configured in your hgrc
1097 or in the EDITOR environment variable is started to enter a message.
1097 or in the EDITOR environment variable is started to enter a message.
1098 """
1098 """
1099 message = logmessage(**opts)
1099 message = logmessage(**opts)
1100
1100
1101 if opts['addremove']:
1101 if opts['addremove']:
1102 addremove_lock(ui, repo, pats, opts)
1102 addremove_lock(ui, repo, pats, opts)
1103 fns, match, anypats = matchpats(repo, pats, opts)
1103 fns, match, anypats = matchpats(repo, pats, opts)
1104 if pats:
1104 if pats:
1105 modified, added, removed, deleted, unknown = (
1105 modified, added, removed, deleted, unknown = (
1106 repo.changes(files=fns, match=match))
1106 repo.changes(files=fns, match=match))
1107 files = modified + added + removed
1107 files = modified + added + removed
1108 else:
1108 else:
1109 files = []
1109 files = []
1110 try:
1110 try:
1111 repo.commit(files, message, opts['user'], opts['date'], match,
1111 repo.commit(files, message, opts['user'], opts['date'], match,
1112 force_editor=opts.get('force_editor'))
1112 force_editor=opts.get('force_editor'))
1113 except ValueError, inst:
1113 except ValueError, inst:
1114 raise util.Abort(str(inst))
1114 raise util.Abort(str(inst))
1115
1115
1116 def docopy(ui, repo, pats, opts, wlock):
1116 def docopy(ui, repo, pats, opts, wlock):
1117 # called with the repo lock held
1117 # called with the repo lock held
1118 cwd = repo.getcwd()
1118 cwd = repo.getcwd()
1119 errors = 0
1119 errors = 0
1120 copied = []
1120 copied = []
1121 targets = {}
1121 targets = {}
1122
1122
1123 def okaytocopy(abs, rel, exact):
1123 def okaytocopy(abs, rel, exact):
1124 reasons = {'?': _('is not managed'),
1124 reasons = {'?': _('is not managed'),
1125 'a': _('has been marked for add'),
1125 'a': _('has been marked for add'),
1126 'r': _('has been marked for remove')}
1126 'r': _('has been marked for remove')}
1127 state = repo.dirstate.state(abs)
1127 state = repo.dirstate.state(abs)
1128 reason = reasons.get(state)
1128 reason = reasons.get(state)
1129 if reason:
1129 if reason:
1130 if state == 'a':
1130 if state == 'a':
1131 origsrc = repo.dirstate.copied(abs)
1131 origsrc = repo.dirstate.copied(abs)
1132 if origsrc is not None:
1132 if origsrc is not None:
1133 return origsrc
1133 return origsrc
1134 if exact:
1134 if exact:
1135 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1135 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1136 else:
1136 else:
1137 return abs
1137 return abs
1138
1138
1139 def copy(origsrc, abssrc, relsrc, target, exact):
1139 def copy(origsrc, abssrc, relsrc, target, exact):
1140 abstarget = util.canonpath(repo.root, cwd, target)
1140 abstarget = util.canonpath(repo.root, cwd, target)
1141 reltarget = util.pathto(cwd, abstarget)
1141 reltarget = util.pathto(cwd, abstarget)
1142 prevsrc = targets.get(abstarget)
1142 prevsrc = targets.get(abstarget)
1143 if prevsrc is not None:
1143 if prevsrc is not None:
1144 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1144 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1145 (reltarget, abssrc, prevsrc))
1145 (reltarget, abssrc, prevsrc))
1146 return
1146 return
1147 if (not opts['after'] and os.path.exists(reltarget) or
1147 if (not opts['after'] and os.path.exists(reltarget) or
1148 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1148 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1149 if not opts['force']:
1149 if not opts['force']:
1150 ui.warn(_('%s: not overwriting - file exists\n') %
1150 ui.warn(_('%s: not overwriting - file exists\n') %
1151 reltarget)
1151 reltarget)
1152 return
1152 return
1153 if not opts['after'] and not opts.get('dry_run'):
1153 if not opts['after'] and not opts.get('dry_run'):
1154 os.unlink(reltarget)
1154 os.unlink(reltarget)
1155 if opts['after']:
1155 if opts['after']:
1156 if not os.path.exists(reltarget):
1156 if not os.path.exists(reltarget):
1157 return
1157 return
1158 else:
1158 else:
1159 targetdir = os.path.dirname(reltarget) or '.'
1159 targetdir = os.path.dirname(reltarget) or '.'
1160 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1160 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1161 os.makedirs(targetdir)
1161 os.makedirs(targetdir)
1162 try:
1162 try:
1163 restore = repo.dirstate.state(abstarget) == 'r'
1163 restore = repo.dirstate.state(abstarget) == 'r'
1164 if restore and not opts.get('dry_run'):
1164 if restore and not opts.get('dry_run'):
1165 repo.undelete([abstarget], wlock)
1165 repo.undelete([abstarget], wlock)
1166 try:
1166 try:
1167 if not opts.get('dry_run'):
1167 if not opts.get('dry_run'):
1168 shutil.copyfile(relsrc, reltarget)
1168 shutil.copyfile(relsrc, reltarget)
1169 shutil.copymode(relsrc, reltarget)
1169 shutil.copymode(relsrc, reltarget)
1170 restore = False
1170 restore = False
1171 finally:
1171 finally:
1172 if restore:
1172 if restore:
1173 repo.remove([abstarget], wlock)
1173 repo.remove([abstarget], wlock)
1174 except shutil.Error, inst:
1174 except shutil.Error, inst:
1175 raise util.Abort(str(inst))
1175 raise util.Abort(str(inst))
1176 except IOError, inst:
1176 except IOError, inst:
1177 if inst.errno == errno.ENOENT:
1177 if inst.errno == errno.ENOENT:
1178 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1178 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1179 else:
1179 else:
1180 ui.warn(_('%s: cannot copy - %s\n') %
1180 ui.warn(_('%s: cannot copy - %s\n') %
1181 (relsrc, inst.strerror))
1181 (relsrc, inst.strerror))
1182 errors += 1
1182 errors += 1
1183 return
1183 return
1184 if ui.verbose or not exact:
1184 if ui.verbose or not exact:
1185 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1185 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1186 targets[abstarget] = abssrc
1186 targets[abstarget] = abssrc
1187 if abstarget != origsrc and not opts.get('dry_run'):
1187 if abstarget != origsrc and not opts.get('dry_run'):
1188 repo.copy(origsrc, abstarget, wlock)
1188 repo.copy(origsrc, abstarget, wlock)
1189 copied.append((abssrc, relsrc, exact))
1189 copied.append((abssrc, relsrc, exact))
1190
1190
1191 def targetpathfn(pat, dest, srcs):
1191 def targetpathfn(pat, dest, srcs):
1192 if os.path.isdir(pat):
1192 if os.path.isdir(pat):
1193 abspfx = util.canonpath(repo.root, cwd, pat)
1193 abspfx = util.canonpath(repo.root, cwd, pat)
1194 if destdirexists:
1194 if destdirexists:
1195 striplen = len(os.path.split(abspfx)[0])
1195 striplen = len(os.path.split(abspfx)[0])
1196 else:
1196 else:
1197 striplen = len(abspfx)
1197 striplen = len(abspfx)
1198 if striplen:
1198 if striplen:
1199 striplen += len(os.sep)
1199 striplen += len(os.sep)
1200 res = lambda p: os.path.join(dest, p[striplen:])
1200 res = lambda p: os.path.join(dest, p[striplen:])
1201 elif destdirexists:
1201 elif destdirexists:
1202 res = lambda p: os.path.join(dest, os.path.basename(p))
1202 res = lambda p: os.path.join(dest, os.path.basename(p))
1203 else:
1203 else:
1204 res = lambda p: dest
1204 res = lambda p: dest
1205 return res
1205 return res
1206
1206
1207 def targetpathafterfn(pat, dest, srcs):
1207 def targetpathafterfn(pat, dest, srcs):
1208 if util.patkind(pat, None)[0]:
1208 if util.patkind(pat, None)[0]:
1209 # a mercurial pattern
1209 # a mercurial pattern
1210 res = lambda p: os.path.join(dest, os.path.basename(p))
1210 res = lambda p: os.path.join(dest, os.path.basename(p))
1211 else:
1211 else:
1212 abspfx = util.canonpath(repo.root, cwd, pat)
1212 abspfx = util.canonpath(repo.root, cwd, pat)
1213 if len(abspfx) < len(srcs[0][0]):
1213 if len(abspfx) < len(srcs[0][0]):
1214 # A directory. Either the target path contains the last
1214 # A directory. Either the target path contains the last
1215 # component of the source path or it does not.
1215 # component of the source path or it does not.
1216 def evalpath(striplen):
1216 def evalpath(striplen):
1217 score = 0
1217 score = 0
1218 for s in srcs:
1218 for s in srcs:
1219 t = os.path.join(dest, s[0][striplen:])
1219 t = os.path.join(dest, s[0][striplen:])
1220 if os.path.exists(t):
1220 if os.path.exists(t):
1221 score += 1
1221 score += 1
1222 return score
1222 return score
1223
1223
1224 striplen = len(abspfx)
1224 striplen = len(abspfx)
1225 if striplen:
1225 if striplen:
1226 striplen += len(os.sep)
1226 striplen += len(os.sep)
1227 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1227 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1228 score = evalpath(striplen)
1228 score = evalpath(striplen)
1229 striplen1 = len(os.path.split(abspfx)[0])
1229 striplen1 = len(os.path.split(abspfx)[0])
1230 if striplen1:
1230 if striplen1:
1231 striplen1 += len(os.sep)
1231 striplen1 += len(os.sep)
1232 if evalpath(striplen1) > score:
1232 if evalpath(striplen1) > score:
1233 striplen = striplen1
1233 striplen = striplen1
1234 res = lambda p: os.path.join(dest, p[striplen:])
1234 res = lambda p: os.path.join(dest, p[striplen:])
1235 else:
1235 else:
1236 # a file
1236 # a file
1237 if destdirexists:
1237 if destdirexists:
1238 res = lambda p: os.path.join(dest, os.path.basename(p))
1238 res = lambda p: os.path.join(dest, os.path.basename(p))
1239 else:
1239 else:
1240 res = lambda p: dest
1240 res = lambda p: dest
1241 return res
1241 return res
1242
1242
1243
1243
1244 pats = list(pats)
1244 pats = list(pats)
1245 if not pats:
1245 if not pats:
1246 raise util.Abort(_('no source or destination specified'))
1246 raise util.Abort(_('no source or destination specified'))
1247 if len(pats) == 1:
1247 if len(pats) == 1:
1248 raise util.Abort(_('no destination specified'))
1248 raise util.Abort(_('no destination specified'))
1249 dest = pats.pop()
1249 dest = pats.pop()
1250 destdirexists = os.path.isdir(dest)
1250 destdirexists = os.path.isdir(dest)
1251 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1251 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1252 raise util.Abort(_('with multiple sources, destination must be an '
1252 raise util.Abort(_('with multiple sources, destination must be an '
1253 'existing directory'))
1253 'existing directory'))
1254 if opts['after']:
1254 if opts['after']:
1255 tfn = targetpathafterfn
1255 tfn = targetpathafterfn
1256 else:
1256 else:
1257 tfn = targetpathfn
1257 tfn = targetpathfn
1258 copylist = []
1258 copylist = []
1259 for pat in pats:
1259 for pat in pats:
1260 srcs = []
1260 srcs = []
1261 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1261 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1262 origsrc = okaytocopy(abssrc, relsrc, exact)
1262 origsrc = okaytocopy(abssrc, relsrc, exact)
1263 if origsrc:
1263 if origsrc:
1264 srcs.append((origsrc, abssrc, relsrc, exact))
1264 srcs.append((origsrc, abssrc, relsrc, exact))
1265 if not srcs:
1265 if not srcs:
1266 continue
1266 continue
1267 copylist.append((tfn(pat, dest, srcs), srcs))
1267 copylist.append((tfn(pat, dest, srcs), srcs))
1268 if not copylist:
1268 if not copylist:
1269 raise util.Abort(_('no files to copy'))
1269 raise util.Abort(_('no files to copy'))
1270
1270
1271 for targetpath, srcs in copylist:
1271 for targetpath, srcs in copylist:
1272 for origsrc, abssrc, relsrc, exact in srcs:
1272 for origsrc, abssrc, relsrc, exact in srcs:
1273 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1273 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1274
1274
1275 if errors:
1275 if errors:
1276 ui.warn(_('(consider using --after)\n'))
1276 ui.warn(_('(consider using --after)\n'))
1277 return errors, copied
1277 return errors, copied
1278
1278
1279 def copy(ui, repo, *pats, **opts):
1279 def copy(ui, repo, *pats, **opts):
1280 """mark files as copied for the next commit
1280 """mark files as copied for the next commit
1281
1281
1282 Mark dest as having copies of source files. If dest is a
1282 Mark dest as having copies of source files. If dest is a
1283 directory, copies are put in that directory. If dest is a file,
1283 directory, copies are put in that directory. If dest is a file,
1284 there can only be one source.
1284 there can only be one source.
1285
1285
1286 By default, this command copies the contents of files as they
1286 By default, this command copies the contents of files as they
1287 stand in the working directory. If invoked with --after, the
1287 stand in the working directory. If invoked with --after, the
1288 operation is recorded, but no copying is performed.
1288 operation is recorded, but no copying is performed.
1289
1289
1290 This command takes effect in the next commit.
1290 This command takes effect in the next commit.
1291
1291
1292 NOTE: This command should be treated as experimental. While it
1292 NOTE: This command should be treated as experimental. While it
1293 should properly record copied files, this information is not yet
1293 should properly record copied files, this information is not yet
1294 fully used by merge, nor fully reported by log.
1294 fully used by merge, nor fully reported by log.
1295 """
1295 """
1296 wlock = repo.wlock(0)
1296 wlock = repo.wlock(0)
1297 errs, copied = docopy(ui, repo, pats, opts, wlock)
1297 errs, copied = docopy(ui, repo, pats, opts, wlock)
1298 return errs
1298 return errs
1299
1299
1300 def debugancestor(ui, index, rev1, rev2):
1300 def debugancestor(ui, index, rev1, rev2):
1301 """find the ancestor revision of two revisions in a given index"""
1301 """find the ancestor revision of two revisions in a given index"""
1302 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1302 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1303 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1303 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1304 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1304 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1305
1305
1306 def debugcomplete(ui, cmd='', **opts):
1306 def debugcomplete(ui, cmd='', **opts):
1307 """returns the completion list associated with the given command"""
1307 """returns the completion list associated with the given command"""
1308
1308
1309 if opts['options']:
1309 if opts['options']:
1310 options = []
1310 options = []
1311 otables = [globalopts]
1311 otables = [globalopts]
1312 if cmd:
1312 if cmd:
1313 aliases, entry = findcmd(cmd)
1313 aliases, entry = findcmd(cmd)
1314 otables.append(entry[1])
1314 otables.append(entry[1])
1315 for t in otables:
1315 for t in otables:
1316 for o in t:
1316 for o in t:
1317 if o[0]:
1317 if o[0]:
1318 options.append('-%s' % o[0])
1318 options.append('-%s' % o[0])
1319 options.append('--%s' % o[1])
1319 options.append('--%s' % o[1])
1320 ui.write("%s\n" % "\n".join(options))
1320 ui.write("%s\n" % "\n".join(options))
1321 return
1321 return
1322
1322
1323 clist = findpossible(cmd).keys()
1323 clist = findpossible(cmd).keys()
1324 clist.sort()
1324 clist.sort()
1325 ui.write("%s\n" % "\n".join(clist))
1325 ui.write("%s\n" % "\n".join(clist))
1326
1326
1327 def debugrebuildstate(ui, repo, rev=None):
1327 def debugrebuildstate(ui, repo, rev=None):
1328 """rebuild the dirstate as it would look like for the given revision"""
1328 """rebuild the dirstate as it would look like for the given revision"""
1329 if not rev:
1329 if not rev:
1330 rev = repo.changelog.tip()
1330 rev = repo.changelog.tip()
1331 else:
1331 else:
1332 rev = repo.lookup(rev)
1332 rev = repo.lookup(rev)
1333 change = repo.changelog.read(rev)
1333 change = repo.changelog.read(rev)
1334 n = change[0]
1334 n = change[0]
1335 files = repo.manifest.readflags(n)
1335 files = repo.manifest.readflags(n)
1336 wlock = repo.wlock()
1336 wlock = repo.wlock()
1337 repo.dirstate.rebuild(rev, files.iteritems())
1337 repo.dirstate.rebuild(rev, files.iteritems())
1338
1338
1339 def debugcheckstate(ui, repo):
1339 def debugcheckstate(ui, repo):
1340 """validate the correctness of the current dirstate"""
1340 """validate the correctness of the current dirstate"""
1341 parent1, parent2 = repo.dirstate.parents()
1341 parent1, parent2 = repo.dirstate.parents()
1342 repo.dirstate.read()
1342 repo.dirstate.read()
1343 dc = repo.dirstate.map
1343 dc = repo.dirstate.map
1344 keys = dc.keys()
1344 keys = dc.keys()
1345 keys.sort()
1345 keys.sort()
1346 m1n = repo.changelog.read(parent1)[0]
1346 m1n = repo.changelog.read(parent1)[0]
1347 m2n = repo.changelog.read(parent2)[0]
1347 m2n = repo.changelog.read(parent2)[0]
1348 m1 = repo.manifest.read(m1n)
1348 m1 = repo.manifest.read(m1n)
1349 m2 = repo.manifest.read(m2n)
1349 m2 = repo.manifest.read(m2n)
1350 errors = 0
1350 errors = 0
1351 for f in dc:
1351 for f in dc:
1352 state = repo.dirstate.state(f)
1352 state = repo.dirstate.state(f)
1353 if state in "nr" and f not in m1:
1353 if state in "nr" and f not in m1:
1354 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1354 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1355 errors += 1
1355 errors += 1
1356 if state in "a" and f in m1:
1356 if state in "a" and f in m1:
1357 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1357 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1358 errors += 1
1358 errors += 1
1359 if state in "m" and f not in m1 and f not in m2:
1359 if state in "m" and f not in m1 and f not in m2:
1360 ui.warn(_("%s in state %s, but not in either manifest\n") %
1360 ui.warn(_("%s in state %s, but not in either manifest\n") %
1361 (f, state))
1361 (f, state))
1362 errors += 1
1362 errors += 1
1363 for f in m1:
1363 for f in m1:
1364 state = repo.dirstate.state(f)
1364 state = repo.dirstate.state(f)
1365 if state not in "nrm":
1365 if state not in "nrm":
1366 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1366 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1367 errors += 1
1367 errors += 1
1368 if errors:
1368 if errors:
1369 error = _(".hg/dirstate inconsistent with current parent's manifest")
1369 error = _(".hg/dirstate inconsistent with current parent's manifest")
1370 raise util.Abort(error)
1370 raise util.Abort(error)
1371
1371
1372 def debugconfig(ui, repo, *values):
1372 def debugconfig(ui, repo, *values):
1373 """show combined config settings from all hgrc files
1373 """show combined config settings from all hgrc files
1374
1374
1375 With no args, print names and values of all config items.
1375 With no args, print names and values of all config items.
1376
1376
1377 With one arg of the form section.name, print just the value of
1377 With one arg of the form section.name, print just the value of
1378 that config item.
1378 that config item.
1379
1379
1380 With multiple args, print names and values of all config items
1380 With multiple args, print names and values of all config items
1381 with matching section names."""
1381 with matching section names."""
1382
1382
1383 if values:
1383 if values:
1384 if len([v for v in values if '.' in v]) > 1:
1384 if len([v for v in values if '.' in v]) > 1:
1385 raise util.Abort(_('only one config item permitted'))
1385 raise util.Abort(_('only one config item permitted'))
1386 for section, name, value in ui.walkconfig():
1386 for section, name, value in ui.walkconfig():
1387 sectname = section + '.' + name
1387 sectname = section + '.' + name
1388 if values:
1388 if values:
1389 for v in values:
1389 for v in values:
1390 if v == section:
1390 if v == section:
1391 ui.write('%s=%s\n' % (sectname, value))
1391 ui.write('%s=%s\n' % (sectname, value))
1392 elif v == sectname:
1392 elif v == sectname:
1393 ui.write(value, '\n')
1393 ui.write(value, '\n')
1394 else:
1394 else:
1395 ui.write('%s=%s\n' % (sectname, value))
1395 ui.write('%s=%s\n' % (sectname, value))
1396
1396
1397 def debugsetparents(ui, repo, rev1, rev2=None):
1397 def debugsetparents(ui, repo, rev1, rev2=None):
1398 """manually set the parents of the current working directory
1398 """manually set the parents of the current working directory
1399
1399
1400 This is useful for writing repository conversion tools, but should
1400 This is useful for writing repository conversion tools, but should
1401 be used with care.
1401 be used with care.
1402 """
1402 """
1403
1403
1404 if not rev2:
1404 if not rev2:
1405 rev2 = hex(nullid)
1405 rev2 = hex(nullid)
1406
1406
1407 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1407 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1408
1408
1409 def debugstate(ui, repo):
1409 def debugstate(ui, repo):
1410 """show the contents of the current dirstate"""
1410 """show the contents of the current dirstate"""
1411 repo.dirstate.read()
1411 repo.dirstate.read()
1412 dc = repo.dirstate.map
1412 dc = repo.dirstate.map
1413 keys = dc.keys()
1413 keys = dc.keys()
1414 keys.sort()
1414 keys.sort()
1415 for file_ in keys:
1415 for file_ in keys:
1416 ui.write("%c %3o %10d %s %s\n"
1416 ui.write("%c %3o %10d %s %s\n"
1417 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1417 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1418 time.strftime("%x %X",
1418 time.strftime("%x %X",
1419 time.localtime(dc[file_][3])), file_))
1419 time.localtime(dc[file_][3])), file_))
1420 for f in repo.dirstate.copies:
1420 for f in repo.dirstate.copies:
1421 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1421 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1422
1422
1423 def debugdata(ui, file_, rev):
1423 def debugdata(ui, file_, rev):
1424 """dump the contents of an data file revision"""
1424 """dump the contents of an data file revision"""
1425 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1425 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1426 file_[:-2] + ".i", file_, 0)
1426 file_[:-2] + ".i", file_, 0)
1427 try:
1427 try:
1428 ui.write(r.revision(r.lookup(rev)))
1428 ui.write(r.revision(r.lookup(rev)))
1429 except KeyError:
1429 except KeyError:
1430 raise util.Abort(_('invalid revision identifier %s'), rev)
1430 raise util.Abort(_('invalid revision identifier %s'), rev)
1431
1431
1432 def debugindex(ui, file_):
1432 def debugindex(ui, file_):
1433 """dump the contents of an index file"""
1433 """dump the contents of an index file"""
1434 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1434 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1435 ui.write(" rev offset length base linkrev" +
1435 ui.write(" rev offset length base linkrev" +
1436 " nodeid p1 p2\n")
1436 " nodeid p1 p2\n")
1437 for i in range(r.count()):
1437 for i in range(r.count()):
1438 node = r.node(i)
1438 node = r.node(i)
1439 pp = r.parents(node)
1439 pp = r.parents(node)
1440 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1440 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1441 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1441 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1442 short(node), short(pp[0]), short(pp[1])))
1442 short(node), short(pp[0]), short(pp[1])))
1443
1443
1444 def debugindexdot(ui, file_):
1444 def debugindexdot(ui, file_):
1445 """dump an index DAG as a .dot file"""
1445 """dump an index DAG as a .dot file"""
1446 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1446 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1447 ui.write("digraph G {\n")
1447 ui.write("digraph G {\n")
1448 for i in range(r.count()):
1448 for i in range(r.count()):
1449 node = r.node(i)
1449 node = r.node(i)
1450 pp = r.parents(node)
1450 pp = r.parents(node)
1451 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1451 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1452 if pp[1] != nullid:
1452 if pp[1] != nullid:
1453 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1453 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1454 ui.write("}\n")
1454 ui.write("}\n")
1455
1455
1456 def debugrename(ui, repo, file, rev=None):
1456 def debugrename(ui, repo, file, rev=None):
1457 """dump rename information"""
1457 """dump rename information"""
1458 r = repo.file(relpath(repo, [file])[0])
1458 r = repo.file(relpath(repo, [file])[0])
1459 if rev:
1459 if rev:
1460 try:
1460 try:
1461 # assume all revision numbers are for changesets
1461 # assume all revision numbers are for changesets
1462 n = repo.lookup(rev)
1462 n = repo.lookup(rev)
1463 change = repo.changelog.read(n)
1463 change = repo.changelog.read(n)
1464 m = repo.manifest.read(change[0])
1464 m = repo.manifest.read(change[0])
1465 n = m[relpath(repo, [file])[0]]
1465 n = m[relpath(repo, [file])[0]]
1466 except (hg.RepoError, KeyError):
1466 except (hg.RepoError, KeyError):
1467 n = r.lookup(rev)
1467 n = r.lookup(rev)
1468 else:
1468 else:
1469 n = r.tip()
1469 n = r.tip()
1470 m = r.renamed(n)
1470 m = r.renamed(n)
1471 if m:
1471 if m:
1472 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1472 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1473 else:
1473 else:
1474 ui.write(_("not renamed\n"))
1474 ui.write(_("not renamed\n"))
1475
1475
1476 def debugwalk(ui, repo, *pats, **opts):
1476 def debugwalk(ui, repo, *pats, **opts):
1477 """show how files match on given patterns"""
1477 """show how files match on given patterns"""
1478 items = list(walk(repo, pats, opts))
1478 items = list(walk(repo, pats, opts))
1479 if not items:
1479 if not items:
1480 return
1480 return
1481 fmt = '%%s %%-%ds %%-%ds %%s' % (
1481 fmt = '%%s %%-%ds %%-%ds %%s' % (
1482 max([len(abs) for (src, abs, rel, exact) in items]),
1482 max([len(abs) for (src, abs, rel, exact) in items]),
1483 max([len(rel) for (src, abs, rel, exact) in items]))
1483 max([len(rel) for (src, abs, rel, exact) in items]))
1484 for src, abs, rel, exact in items:
1484 for src, abs, rel, exact in items:
1485 line = fmt % (src, abs, rel, exact and 'exact' or '')
1485 line = fmt % (src, abs, rel, exact and 'exact' or '')
1486 ui.write("%s\n" % line.rstrip())
1486 ui.write("%s\n" % line.rstrip())
1487
1487
1488 def diff(ui, repo, *pats, **opts):
1488 def diff(ui, repo, *pats, **opts):
1489 """diff repository (or selected files)
1489 """diff repository (or selected files)
1490
1490
1491 Show differences between revisions for the specified files.
1491 Show differences between revisions for the specified files.
1492
1492
1493 Differences between files are shown using the unified diff format.
1493 Differences between files are shown using the unified diff format.
1494
1494
1495 When two revision arguments are given, then changes are shown
1495 When two revision arguments are given, then changes are shown
1496 between those revisions. If only one revision is specified then
1496 between those revisions. If only one revision is specified then
1497 that revision is compared to the working directory, and, when no
1497 that revision is compared to the working directory, and, when no
1498 revisions are specified, the working directory files are compared
1498 revisions are specified, the working directory files are compared
1499 to its parent.
1499 to its parent.
1500
1500
1501 Without the -a option, diff will avoid generating diffs of files
1501 Without the -a option, diff will avoid generating diffs of files
1502 it detects as binary. With -a, diff will generate a diff anyway,
1502 it detects as binary. With -a, diff will generate a diff anyway,
1503 probably with undesirable results.
1503 probably with undesirable results.
1504 """
1504 """
1505 node1, node2 = revpair(ui, repo, opts['rev'])
1505 node1, node2 = revpair(ui, repo, opts['rev'])
1506
1506
1507 fns, matchfn, anypats = matchpats(repo, pats, opts)
1507 fns, matchfn, anypats = matchpats(repo, pats, opts)
1508
1508
1509 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1509 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1510 text=opts['text'], opts=opts)
1510 text=opts['text'], opts=opts)
1511
1511
1512 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1512 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1513 node = repo.lookup(changeset)
1513 node = repo.lookup(changeset)
1514 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1514 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1515 if opts['switch_parent']:
1515 if opts['switch_parent']:
1516 parents.reverse()
1516 parents.reverse()
1517 prev = (parents and parents[0]) or nullid
1517 prev = (parents and parents[0]) or nullid
1518 change = repo.changelog.read(node)
1518 change = repo.changelog.read(node)
1519
1519
1520 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1520 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1521 revwidth=revwidth)
1521 revwidth=revwidth)
1522 if fp != sys.stdout:
1522 if fp != sys.stdout:
1523 ui.note("%s\n" % fp.name)
1523 ui.note("%s\n" % fp.name)
1524
1524
1525 fp.write("# HG changeset patch\n")
1525 fp.write("# HG changeset patch\n")
1526 fp.write("# User %s\n" % change[1])
1526 fp.write("# User %s\n" % change[1])
1527 fp.write("# Date %d %d\n" % change[2])
1527 fp.write("# Date %d %d\n" % change[2])
1528 fp.write("# Node ID %s\n" % hex(node))
1528 fp.write("# Node ID %s\n" % hex(node))
1529 fp.write("# Parent %s\n" % hex(prev))
1529 fp.write("# Parent %s\n" % hex(prev))
1530 if len(parents) > 1:
1530 if len(parents) > 1:
1531 fp.write("# Parent %s\n" % hex(parents[1]))
1531 fp.write("# Parent %s\n" % hex(parents[1]))
1532 fp.write(change[4].rstrip())
1532 fp.write(change[4].rstrip())
1533 fp.write("\n\n")
1533 fp.write("\n\n")
1534
1534
1535 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1535 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1536 if fp != sys.stdout:
1536 if fp != sys.stdout:
1537 fp.close()
1537 fp.close()
1538
1538
1539 def export(ui, repo, *changesets, **opts):
1539 def export(ui, repo, *changesets, **opts):
1540 """dump the header and diffs for one or more changesets
1540 """dump the header and diffs for one or more changesets
1541
1541
1542 Print the changeset header and diffs for one or more revisions.
1542 Print the changeset header and diffs for one or more revisions.
1543
1543
1544 The information shown in the changeset header is: author,
1544 The information shown in the changeset header is: author,
1545 changeset hash, parent and commit comment.
1545 changeset hash, parent and commit comment.
1546
1546
1547 Output may be to a file, in which case the name of the file is
1547 Output may be to a file, in which case the name of the file is
1548 given using a format string. The formatting rules are as follows:
1548 given using a format string. The formatting rules are as follows:
1549
1549
1550 %% literal "%" character
1550 %% literal "%" character
1551 %H changeset hash (40 bytes of hexadecimal)
1551 %H changeset hash (40 bytes of hexadecimal)
1552 %N number of patches being generated
1552 %N number of patches being generated
1553 %R changeset revision number
1553 %R changeset revision number
1554 %b basename of the exporting repository
1554 %b basename of the exporting repository
1555 %h short-form changeset hash (12 bytes of hexadecimal)
1555 %h short-form changeset hash (12 bytes of hexadecimal)
1556 %n zero-padded sequence number, starting at 1
1556 %n zero-padded sequence number, starting at 1
1557 %r zero-padded changeset revision number
1557 %r zero-padded changeset revision number
1558
1558
1559 Without the -a option, export will avoid generating diffs of files
1559 Without the -a option, export will avoid generating diffs of files
1560 it detects as binary. With -a, export will generate a diff anyway,
1560 it detects as binary. With -a, export will generate a diff anyway,
1561 probably with undesirable results.
1561 probably with undesirable results.
1562
1562
1563 With the --switch-parent option, the diff will be against the second
1563 With the --switch-parent option, the diff will be against the second
1564 parent. It can be useful to review a merge.
1564 parent. It can be useful to review a merge.
1565 """
1565 """
1566 if not changesets:
1566 if not changesets:
1567 raise util.Abort(_("export requires at least one changeset"))
1567 raise util.Abort(_("export requires at least one changeset"))
1568 seqno = 0
1568 seqno = 0
1569 revs = list(revrange(ui, repo, changesets))
1569 revs = list(revrange(ui, repo, changesets))
1570 total = len(revs)
1570 total = len(revs)
1571 revwidth = max(map(len, revs))
1571 revwidth = max(map(len, revs))
1572 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1572 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1573 ui.note(msg)
1573 ui.note(msg)
1574 for cset in revs:
1574 for cset in revs:
1575 seqno += 1
1575 seqno += 1
1576 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1576 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1577
1577
1578 def forget(ui, repo, *pats, **opts):
1578 def forget(ui, repo, *pats, **opts):
1579 """don't add the specified files on the next commit (DEPRECATED)
1579 """don't add the specified files on the next commit (DEPRECATED)
1580
1580
1581 (DEPRECATED)
1581 (DEPRECATED)
1582 Undo an 'hg add' scheduled for the next commit.
1582 Undo an 'hg add' scheduled for the next commit.
1583
1583
1584 This command is now deprecated and will be removed in a future
1584 This command is now deprecated and will be removed in a future
1585 release. Please use revert instead.
1585 release. Please use revert instead.
1586 """
1586 """
1587 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1587 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1588 forget = []
1588 forget = []
1589 for src, abs, rel, exact in walk(repo, pats, opts):
1589 for src, abs, rel, exact in walk(repo, pats, opts):
1590 if repo.dirstate.state(abs) == 'a':
1590 if repo.dirstate.state(abs) == 'a':
1591 forget.append(abs)
1591 forget.append(abs)
1592 if ui.verbose or not exact:
1592 if ui.verbose or not exact:
1593 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1593 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1594 repo.forget(forget)
1594 repo.forget(forget)
1595
1595
1596 def grep(ui, repo, pattern, *pats, **opts):
1596 def grep(ui, repo, pattern, *pats, **opts):
1597 """search for a pattern in specified files and revisions
1597 """search for a pattern in specified files and revisions
1598
1598
1599 Search revisions of files for a regular expression.
1599 Search revisions of files for a regular expression.
1600
1600
1601 This command behaves differently than Unix grep. It only accepts
1601 This command behaves differently than Unix grep. It only accepts
1602 Python/Perl regexps. It searches repository history, not the
1602 Python/Perl regexps. It searches repository history, not the
1603 working directory. It always prints the revision number in which
1603 working directory. It always prints the revision number in which
1604 a match appears.
1604 a match appears.
1605
1605
1606 By default, grep only prints output for the first revision of a
1606 By default, grep only prints output for the first revision of a
1607 file in which it finds a match. To get it to print every revision
1607 file in which it finds a match. To get it to print every revision
1608 that contains a change in match status ("-" for a match that
1608 that contains a change in match status ("-" for a match that
1609 becomes a non-match, or "+" for a non-match that becomes a match),
1609 becomes a non-match, or "+" for a non-match that becomes a match),
1610 use the --all flag.
1610 use the --all flag.
1611 """
1611 """
1612 reflags = 0
1612 reflags = 0
1613 if opts['ignore_case']:
1613 if opts['ignore_case']:
1614 reflags |= re.I
1614 reflags |= re.I
1615 regexp = re.compile(pattern, reflags)
1615 regexp = re.compile(pattern, reflags)
1616 sep, eol = ':', '\n'
1616 sep, eol = ':', '\n'
1617 if opts['print0']:
1617 if opts['print0']:
1618 sep = eol = '\0'
1618 sep = eol = '\0'
1619
1619
1620 fcache = {}
1620 fcache = {}
1621 def getfile(fn):
1621 def getfile(fn):
1622 if fn not in fcache:
1622 if fn not in fcache:
1623 fcache[fn] = repo.file(fn)
1623 fcache[fn] = repo.file(fn)
1624 return fcache[fn]
1624 return fcache[fn]
1625
1625
1626 def matchlines(body):
1626 def matchlines(body):
1627 begin = 0
1627 begin = 0
1628 linenum = 0
1628 linenum = 0
1629 while True:
1629 while True:
1630 match = regexp.search(body, begin)
1630 match = regexp.search(body, begin)
1631 if not match:
1631 if not match:
1632 break
1632 break
1633 mstart, mend = match.span()
1633 mstart, mend = match.span()
1634 linenum += body.count('\n', begin, mstart) + 1
1634 linenum += body.count('\n', begin, mstart) + 1
1635 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1635 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1636 lend = body.find('\n', mend)
1636 lend = body.find('\n', mend)
1637 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1637 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1638 begin = lend + 1
1638 begin = lend + 1
1639
1639
1640 class linestate(object):
1640 class linestate(object):
1641 def __init__(self, line, linenum, colstart, colend):
1641 def __init__(self, line, linenum, colstart, colend):
1642 self.line = line
1642 self.line = line
1643 self.linenum = linenum
1643 self.linenum = linenum
1644 self.colstart = colstart
1644 self.colstart = colstart
1645 self.colend = colend
1645 self.colend = colend
1646 def __eq__(self, other):
1646 def __eq__(self, other):
1647 return self.line == other.line
1647 return self.line == other.line
1648 def __hash__(self):
1648 def __hash__(self):
1649 return hash(self.line)
1649 return hash(self.line)
1650
1650
1651 matches = {}
1651 matches = {}
1652 def grepbody(fn, rev, body):
1652 def grepbody(fn, rev, body):
1653 matches[rev].setdefault(fn, {})
1653 matches[rev].setdefault(fn, {})
1654 m = matches[rev][fn]
1654 m = matches[rev][fn]
1655 for lnum, cstart, cend, line in matchlines(body):
1655 for lnum, cstart, cend, line in matchlines(body):
1656 s = linestate(line, lnum, cstart, cend)
1656 s = linestate(line, lnum, cstart, cend)
1657 m[s] = s
1657 m[s] = s
1658
1658
1659 # FIXME: prev isn't used, why ?
1659 # FIXME: prev isn't used, why ?
1660 prev = {}
1660 prev = {}
1661 ucache = {}
1661 ucache = {}
1662 def display(fn, rev, states, prevstates):
1662 def display(fn, rev, states, prevstates):
1663 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1663 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1664 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1664 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1665 counts = {'-': 0, '+': 0}
1665 counts = {'-': 0, '+': 0}
1666 filerevmatches = {}
1666 filerevmatches = {}
1667 for l in diff:
1667 for l in diff:
1668 if incrementing or not opts['all']:
1668 if incrementing or not opts['all']:
1669 change = ((l in prevstates) and '-') or '+'
1669 change = ((l in prevstates) and '-') or '+'
1670 r = rev
1670 r = rev
1671 else:
1671 else:
1672 change = ((l in states) and '-') or '+'
1672 change = ((l in states) and '-') or '+'
1673 r = prev[fn]
1673 r = prev[fn]
1674 cols = [fn, str(rev)]
1674 cols = [fn, str(rev)]
1675 if opts['line_number']:
1675 if opts['line_number']:
1676 cols.append(str(l.linenum))
1676 cols.append(str(l.linenum))
1677 if opts['all']:
1677 if opts['all']:
1678 cols.append(change)
1678 cols.append(change)
1679 if opts['user']:
1679 if opts['user']:
1680 cols.append(trimuser(ui, getchange(rev)[1], rev,
1680 cols.append(trimuser(ui, getchange(rev)[1], rev,
1681 ucache))
1681 ucache))
1682 if opts['files_with_matches']:
1682 if opts['files_with_matches']:
1683 c = (fn, rev)
1683 c = (fn, rev)
1684 if c in filerevmatches:
1684 if c in filerevmatches:
1685 continue
1685 continue
1686 filerevmatches[c] = 1
1686 filerevmatches[c] = 1
1687 else:
1687 else:
1688 cols.append(l.line)
1688 cols.append(l.line)
1689 ui.write(sep.join(cols), eol)
1689 ui.write(sep.join(cols), eol)
1690 counts[change] += 1
1690 counts[change] += 1
1691 return counts['+'], counts['-']
1691 return counts['+'], counts['-']
1692
1692
1693 fstate = {}
1693 fstate = {}
1694 skip = {}
1694 skip = {}
1695 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1695 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1696 count = 0
1696 count = 0
1697 incrementing = False
1697 incrementing = False
1698 for st, rev, fns in changeiter:
1698 for st, rev, fns in changeiter:
1699 if st == 'window':
1699 if st == 'window':
1700 incrementing = rev
1700 incrementing = rev
1701 matches.clear()
1701 matches.clear()
1702 elif st == 'add':
1702 elif st == 'add':
1703 change = repo.changelog.read(repo.lookup(str(rev)))
1703 change = repo.changelog.read(repo.lookup(str(rev)))
1704 mf = repo.manifest.read(change[0])
1704 mf = repo.manifest.read(change[0])
1705 matches[rev] = {}
1705 matches[rev] = {}
1706 for fn in fns:
1706 for fn in fns:
1707 if fn in skip:
1707 if fn in skip:
1708 continue
1708 continue
1709 fstate.setdefault(fn, {})
1709 fstate.setdefault(fn, {})
1710 try:
1710 try:
1711 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1711 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1712 except KeyError:
1712 except KeyError:
1713 pass
1713 pass
1714 elif st == 'iter':
1714 elif st == 'iter':
1715 states = matches[rev].items()
1715 states = matches[rev].items()
1716 states.sort()
1716 states.sort()
1717 for fn, m in states:
1717 for fn, m in states:
1718 if fn in skip:
1718 if fn in skip:
1719 continue
1719 continue
1720 if incrementing or not opts['all'] or fstate[fn]:
1720 if incrementing or not opts['all'] or fstate[fn]:
1721 pos, neg = display(fn, rev, m, fstate[fn])
1721 pos, neg = display(fn, rev, m, fstate[fn])
1722 count += pos + neg
1722 count += pos + neg
1723 if pos and not opts['all']:
1723 if pos and not opts['all']:
1724 skip[fn] = True
1724 skip[fn] = True
1725 fstate[fn] = m
1725 fstate[fn] = m
1726 prev[fn] = rev
1726 prev[fn] = rev
1727
1727
1728 if not incrementing:
1728 if not incrementing:
1729 fstate = fstate.items()
1729 fstate = fstate.items()
1730 fstate.sort()
1730 fstate.sort()
1731 for fn, state in fstate:
1731 for fn, state in fstate:
1732 if fn in skip:
1732 if fn in skip:
1733 continue
1733 continue
1734 display(fn, rev, {}, state)
1734 display(fn, rev, {}, state)
1735 return (count == 0 and 1) or 0
1735 return (count == 0 and 1) or 0
1736
1736
1737 def heads(ui, repo, **opts):
1737 def heads(ui, repo, **opts):
1738 """show current repository heads
1738 """show current repository heads
1739
1739
1740 Show all repository head changesets.
1740 Show all repository head changesets.
1741
1741
1742 Repository "heads" are changesets that don't have children
1742 Repository "heads" are changesets that don't have children
1743 changesets. They are where development generally takes place and
1743 changesets. They are where development generally takes place and
1744 are the usual targets for update and merge operations.
1744 are the usual targets for update and merge operations.
1745 """
1745 """
1746 if opts['rev']:
1746 if opts['rev']:
1747 heads = repo.heads(repo.lookup(opts['rev']))
1747 heads = repo.heads(repo.lookup(opts['rev']))
1748 else:
1748 else:
1749 heads = repo.heads()
1749 heads = repo.heads()
1750 br = None
1750 br = None
1751 if opts['branches']:
1751 if opts['branches']:
1752 br = repo.branchlookup(heads)
1752 br = repo.branchlookup(heads)
1753 displayer = show_changeset(ui, repo, opts)
1753 displayer = show_changeset(ui, repo, opts)
1754 for n in heads:
1754 for n in heads:
1755 displayer.show(changenode=n, brinfo=br)
1755 displayer.show(changenode=n, brinfo=br)
1756
1756
1757 def identify(ui, repo):
1757 def identify(ui, repo):
1758 """print information about the working copy
1758 """print information about the working copy
1759
1759
1760 Print a short summary of the current state of the repo.
1760 Print a short summary of the current state of the repo.
1761
1761
1762 This summary identifies the repository state using one or two parent
1762 This summary identifies the repository state using one or two parent
1763 hash identifiers, followed by a "+" if there are uncommitted changes
1763 hash identifiers, followed by a "+" if there are uncommitted changes
1764 in the working directory, followed by a list of tags for this revision.
1764 in the working directory, followed by a list of tags for this revision.
1765 """
1765 """
1766 parents = [p for p in repo.dirstate.parents() if p != nullid]
1766 parents = [p for p in repo.dirstate.parents() if p != nullid]
1767 if not parents:
1767 if not parents:
1768 ui.write(_("unknown\n"))
1768 ui.write(_("unknown\n"))
1769 return
1769 return
1770
1770
1771 hexfunc = ui.verbose and hex or short
1771 hexfunc = ui.verbose and hex or short
1772 modified, added, removed, deleted, unknown = repo.changes()
1772 modified, added, removed, deleted, unknown = repo.changes()
1773 output = ["%s%s" %
1773 output = ["%s%s" %
1774 ('+'.join([hexfunc(parent) for parent in parents]),
1774 ('+'.join([hexfunc(parent) for parent in parents]),
1775 (modified or added or removed or deleted) and "+" or "")]
1775 (modified or added or removed or deleted) and "+" or "")]
1776
1776
1777 if not ui.quiet:
1777 if not ui.quiet:
1778 # multiple tags for a single parent separated by '/'
1778 # multiple tags for a single parent separated by '/'
1779 parenttags = ['/'.join(tags)
1779 parenttags = ['/'.join(tags)
1780 for tags in map(repo.nodetags, parents) if tags]
1780 for tags in map(repo.nodetags, parents) if tags]
1781 # tags for multiple parents separated by ' + '
1781 # tags for multiple parents separated by ' + '
1782 if parenttags:
1782 if parenttags:
1783 output.append(' + '.join(parenttags))
1783 output.append(' + '.join(parenttags))
1784
1784
1785 ui.write("%s\n" % ' '.join(output))
1785 ui.write("%s\n" % ' '.join(output))
1786
1786
1787 def import_(ui, repo, patch1, *patches, **opts):
1787 def import_(ui, repo, patch1, *patches, **opts):
1788 """import an ordered set of patches
1788 """import an ordered set of patches
1789
1789
1790 Import a list of patches and commit them individually.
1790 Import a list of patches and commit them individually.
1791
1791
1792 If there are outstanding changes in the working directory, import
1792 If there are outstanding changes in the working directory, import
1793 will abort unless given the -f flag.
1793 will abort unless given the -f flag.
1794
1794
1795 You can import a patch straight from a mail message. Even patches
1795 You can import a patch straight from a mail message. Even patches
1796 as attachments work (body part must be type text/plain or
1796 as attachments work (body part must be type text/plain or
1797 text/x-patch to be used). From and Subject headers of email
1797 text/x-patch to be used). From and Subject headers of email
1798 message are used as default committer and commit message. All
1798 message are used as default committer and commit message. All
1799 text/plain body parts before first diff are added to commit
1799 text/plain body parts before first diff are added to commit
1800 message.
1800 message.
1801
1801
1802 If imported patch was generated by hg export, user and description
1802 If imported patch was generated by hg export, user and description
1803 from patch override values from message headers and body. Values
1803 from patch override values from message headers and body. Values
1804 given on command line with -m and -u override these.
1804 given on command line with -m and -u override these.
1805
1805
1806 To read a patch from standard input, use patch name "-".
1806 To read a patch from standard input, use patch name "-".
1807 """
1807 """
1808 patches = (patch1,) + patches
1808 patches = (patch1,) + patches
1809
1809
1810 if not opts['force']:
1810 if not opts['force']:
1811 bail_if_changed(repo)
1811 bail_if_changed(repo)
1812
1812
1813 d = opts["base"]
1813 d = opts["base"]
1814 strip = opts["strip"]
1814 strip = opts["strip"]
1815
1815
1816 mailre = re.compile(r'(?:From |[\w-]+:)')
1816 mailre = re.compile(r'(?:From |[\w-]+:)')
1817
1817
1818 # attempt to detect the start of a patch
1818 # attempt to detect the start of a patch
1819 # (this heuristic is borrowed from quilt)
1819 # (this heuristic is borrowed from quilt)
1820 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1820 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1821 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1821 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1822 '(---|\*\*\*)[ \t])', re.MULTILINE)
1822 '(---|\*\*\*)[ \t])', re.MULTILINE)
1823
1823
1824 for patch in patches:
1824 for patch in patches:
1825 pf = os.path.join(d, patch)
1825 pf = os.path.join(d, patch)
1826
1826
1827 message = None
1827 message = None
1828 user = None
1828 user = None
1829 date = None
1829 date = None
1830 hgpatch = False
1830 hgpatch = False
1831
1831
1832 p = email.Parser.Parser()
1832 p = email.Parser.Parser()
1833 if pf == '-':
1833 if pf == '-':
1834 msg = p.parse(sys.stdin)
1834 msg = p.parse(sys.stdin)
1835 ui.status(_("applying patch from stdin\n"))
1835 ui.status(_("applying patch from stdin\n"))
1836 else:
1836 else:
1837 msg = p.parse(file(pf))
1837 msg = p.parse(file(pf))
1838 ui.status(_("applying %s\n") % patch)
1838 ui.status(_("applying %s\n") % patch)
1839
1839
1840 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1840 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1841 tmpfp = os.fdopen(fd, 'w')
1841 tmpfp = os.fdopen(fd, 'w')
1842 try:
1842 try:
1843 message = msg['Subject']
1843 message = msg['Subject']
1844 if message:
1844 if message:
1845 message = message.replace('\n\t', ' ')
1845 message = message.replace('\n\t', ' ')
1846 ui.debug('Subject: %s\n' % message)
1846 ui.debug('Subject: %s\n' % message)
1847 user = msg['From']
1847 user = msg['From']
1848 if user:
1848 if user:
1849 ui.debug('From: %s\n' % user)
1849 ui.debug('From: %s\n' % user)
1850 diffs_seen = 0
1850 diffs_seen = 0
1851 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
1851 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
1852 for part in msg.walk():
1852 for part in msg.walk():
1853 content_type = part.get_content_type()
1853 content_type = part.get_content_type()
1854 ui.debug('Content-Type: %s\n' % content_type)
1854 ui.debug('Content-Type: %s\n' % content_type)
1855 if content_type not in ok_types:
1855 if content_type not in ok_types:
1856 continue
1856 continue
1857 payload = part.get_payload(decode=True)
1857 payload = part.get_payload(decode=True)
1858 m = diffre.search(payload)
1858 m = diffre.search(payload)
1859 if m:
1859 if m:
1860 ui.debug(_('found patch at byte %d\n') % m.start(0))
1860 ui.debug(_('found patch at byte %d\n') % m.start(0))
1861 diffs_seen += 1
1861 diffs_seen += 1
1862 hgpatch = False
1862 hgpatch = False
1863 fp = cStringIO.StringIO()
1863 fp = cStringIO.StringIO()
1864 if message:
1864 if message:
1865 fp.write(message)
1865 fp.write(message)
1866 fp.write('\n')
1866 fp.write('\n')
1867 for line in payload[:m.start(0)].splitlines():
1867 for line in payload[:m.start(0)].splitlines():
1868 if line.startswith('# HG changeset patch'):
1868 if line.startswith('# HG changeset patch'):
1869 ui.debug(_('patch generated by hg export\n'))
1869 ui.debug(_('patch generated by hg export\n'))
1870 hgpatch = True
1870 hgpatch = True
1871 # drop earlier commit message content
1871 # drop earlier commit message content
1872 fp.seek(0)
1872 fp.seek(0)
1873 fp.truncate()
1873 fp.truncate()
1874 elif hgpatch:
1874 elif hgpatch:
1875 if line.startswith('# User '):
1875 if line.startswith('# User '):
1876 user = line[7:]
1876 user = line[7:]
1877 ui.debug('From: %s\n' % user)
1877 ui.debug('From: %s\n' % user)
1878 elif line.startswith("# Date "):
1878 elif line.startswith("# Date "):
1879 date = line[7:]
1879 date = line[7:]
1880 if not line.startswith('# '):
1880 if not line.startswith('# '):
1881 fp.write(line)
1881 fp.write(line)
1882 fp.write('\n')
1882 fp.write('\n')
1883 message = fp.getvalue()
1883 message = fp.getvalue()
1884 if tmpfp:
1884 if tmpfp:
1885 tmpfp.write(payload)
1885 tmpfp.write(payload)
1886 if not payload.endswith('\n'):
1886 if not payload.endswith('\n'):
1887 tmpfp.write('\n')
1887 tmpfp.write('\n')
1888 elif not diffs_seen and message and content_type == 'text/plain':
1888 elif not diffs_seen and message and content_type == 'text/plain':
1889 message += '\n' + payload
1889 message += '\n' + payload
1890
1890
1891 if opts['message']:
1891 if opts['message']:
1892 # pickup the cmdline msg
1892 # pickup the cmdline msg
1893 message = opts['message']
1893 message = opts['message']
1894 elif message:
1894 elif message:
1895 # pickup the patch msg
1895 # pickup the patch msg
1896 message = message.strip()
1896 message = message.strip()
1897 else:
1897 else:
1898 # launch the editor
1898 # launch the editor
1899 message = None
1899 message = None
1900 ui.debug(_('message:\n%s\n') % message)
1900 ui.debug(_('message:\n%s\n') % message)
1901
1901
1902 tmpfp.close()
1902 tmpfp.close()
1903 if not diffs_seen:
1903 if not diffs_seen:
1904 raise util.Abort(_('no diffs found'))
1904 raise util.Abort(_('no diffs found'))
1905
1905
1906 files = util.patch(strip, tmpname, ui, cwd=repo.root)
1906 files = util.patch(strip, tmpname, ui, cwd=repo.root)
1907 if len(files) > 0:
1907 if len(files) > 0:
1908 cfiles = files
1908 cfiles = files
1909 cwd = repo.getcwd()
1909 cwd = repo.getcwd()
1910 if cwd:
1910 if cwd:
1911 cfiles = [util.pathto(cwd, f) for f in files]
1911 cfiles = [util.pathto(cwd, f) for f in files]
1912 addremove_lock(ui, repo, cfiles, {})
1912 addremove_lock(ui, repo, cfiles, {})
1913 repo.commit(files, message, user, date)
1913 repo.commit(files, message, user, date)
1914 finally:
1914 finally:
1915 os.unlink(tmpname)
1915 os.unlink(tmpname)
1916
1916
1917 def incoming(ui, repo, source="default", **opts):
1917 def incoming(ui, repo, source="default", **opts):
1918 """show new changesets found in source
1918 """show new changesets found in source
1919
1919
1920 Show new changesets found in the specified path/URL or the default
1920 Show new changesets found in the specified path/URL or the default
1921 pull location. These are the changesets that would be pulled if a pull
1921 pull location. These are the changesets that would be pulled if a pull
1922 was requested.
1922 was requested.
1923
1923
1924 For remote repository, using --bundle avoids downloading the changesets
1924 For remote repository, using --bundle avoids downloading the changesets
1925 twice if the incoming is followed by a pull.
1925 twice if the incoming is followed by a pull.
1926
1926
1927 See pull for valid source format details.
1927 See pull for valid source format details.
1928 """
1928 """
1929 source = ui.expandpath(source)
1929 source = ui.expandpath(source)
1930 setremoteconfig(ui, opts)
1930 setremoteconfig(ui, opts)
1931
1931
1932 other = hg.repository(ui, source)
1932 other = hg.repository(ui, source)
1933 incoming = repo.findincoming(other, force=opts["force"])
1933 incoming = repo.findincoming(other, force=opts["force"])
1934 if not incoming:
1934 if not incoming:
1935 ui.status(_("no changes found\n"))
1935 ui.status(_("no changes found\n"))
1936 return
1936 return
1937
1937
1938 cleanup = None
1938 cleanup = None
1939 try:
1939 try:
1940 fname = opts["bundle"]
1940 fname = opts["bundle"]
1941 if fname or not other.local():
1941 if fname or not other.local():
1942 # create a bundle (uncompressed if other repo is not local)
1942 # create a bundle (uncompressed if other repo is not local)
1943 cg = other.changegroup(incoming, "incoming")
1943 cg = other.changegroup(incoming, "incoming")
1944 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1944 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1945 # keep written bundle?
1945 # keep written bundle?
1946 if opts["bundle"]:
1946 if opts["bundle"]:
1947 cleanup = None
1947 cleanup = None
1948 if not other.local():
1948 if not other.local():
1949 # use the created uncompressed bundlerepo
1949 # use the created uncompressed bundlerepo
1950 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1950 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1951
1951
1952 revs = None
1952 revs = None
1953 if opts['rev']:
1953 if opts['rev']:
1954 revs = [other.lookup(rev) for rev in opts['rev']]
1954 revs = [other.lookup(rev) for rev in opts['rev']]
1955 o = other.changelog.nodesbetween(incoming, revs)[0]
1955 o = other.changelog.nodesbetween(incoming, revs)[0]
1956 if opts['newest_first']:
1956 if opts['newest_first']:
1957 o.reverse()
1957 o.reverse()
1958 displayer = show_changeset(ui, other, opts)
1958 displayer = show_changeset(ui, other, opts)
1959 for n in o:
1959 for n in o:
1960 parents = [p for p in other.changelog.parents(n) if p != nullid]
1960 parents = [p for p in other.changelog.parents(n) if p != nullid]
1961 if opts['no_merges'] and len(parents) == 2:
1961 if opts['no_merges'] and len(parents) == 2:
1962 continue
1962 continue
1963 displayer.show(changenode=n)
1963 displayer.show(changenode=n)
1964 if opts['patch']:
1964 if opts['patch']:
1965 prev = (parents and parents[0]) or nullid
1965 prev = (parents and parents[0]) or nullid
1966 dodiff(ui, ui, other, prev, n)
1966 dodiff(ui, ui, other, prev, n)
1967 ui.write("\n")
1967 ui.write("\n")
1968 finally:
1968 finally:
1969 if hasattr(other, 'close'):
1969 if hasattr(other, 'close'):
1970 other.close()
1970 other.close()
1971 if cleanup:
1971 if cleanup:
1972 os.unlink(cleanup)
1972 os.unlink(cleanup)
1973
1973
1974 def init(ui, dest=".", **opts):
1974 def init(ui, dest=".", **opts):
1975 """create a new repository in the given directory
1975 """create a new repository in the given directory
1976
1976
1977 Initialize a new repository in the given directory. If the given
1977 Initialize a new repository in the given directory. If the given
1978 directory does not exist, it is created.
1978 directory does not exist, it is created.
1979
1979
1980 If no directory is given, the current directory is used.
1980 If no directory is given, the current directory is used.
1981
1981
1982 It is possible to specify an ssh:// URL as the destination.
1982 It is possible to specify an ssh:// URL as the destination.
1983 Look at the help text for the pull command for important details
1983 Look at the help text for the pull command for important details
1984 about ssh:// URLs.
1984 about ssh:// URLs.
1985 """
1985 """
1986 setremoteconfig(ui, opts)
1986 setremoteconfig(ui, opts)
1987 hg.repository(ui, dest, create=1)
1987 hg.repository(ui, dest, create=1)
1988
1988
1989 def locate(ui, repo, *pats, **opts):
1989 def locate(ui, repo, *pats, **opts):
1990 """locate files matching specific patterns
1990 """locate files matching specific patterns
1991
1991
1992 Print all files under Mercurial control whose names match the
1992 Print all files under Mercurial control whose names match the
1993 given patterns.
1993 given patterns.
1994
1994
1995 This command searches the current directory and its
1995 This command searches the current directory and its
1996 subdirectories. To search an entire repository, move to the root
1996 subdirectories. To search an entire repository, move to the root
1997 of the repository.
1997 of the repository.
1998
1998
1999 If no patterns are given to match, this command prints all file
1999 If no patterns are given to match, this command prints all file
2000 names.
2000 names.
2001
2001
2002 If you want to feed the output of this command into the "xargs"
2002 If you want to feed the output of this command into the "xargs"
2003 command, use the "-0" option to both this command and "xargs".
2003 command, use the "-0" option to both this command and "xargs".
2004 This will avoid the problem of "xargs" treating single filenames
2004 This will avoid the problem of "xargs" treating single filenames
2005 that contain white space as multiple filenames.
2005 that contain white space as multiple filenames.
2006 """
2006 """
2007 end = opts['print0'] and '\0' or '\n'
2007 end = opts['print0'] and '\0' or '\n'
2008 rev = opts['rev']
2008 rev = opts['rev']
2009 if rev:
2009 if rev:
2010 node = repo.lookup(rev)
2010 node = repo.lookup(rev)
2011 else:
2011 else:
2012 node = None
2012 node = None
2013
2013
2014 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2014 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2015 head='(?:.*/|)'):
2015 head='(?:.*/|)'):
2016 if not node and repo.dirstate.state(abs) == '?':
2016 if not node and repo.dirstate.state(abs) == '?':
2017 continue
2017 continue
2018 if opts['fullpath']:
2018 if opts['fullpath']:
2019 ui.write(os.path.join(repo.root, abs), end)
2019 ui.write(os.path.join(repo.root, abs), end)
2020 else:
2020 else:
2021 ui.write(((pats and rel) or abs), end)
2021 ui.write(((pats and rel) or abs), end)
2022
2022
2023 def log(ui, repo, *pats, **opts):
2023 def log(ui, repo, *pats, **opts):
2024 """show revision history of entire repository or files
2024 """show revision history of entire repository or files
2025
2025
2026 Print the revision history of the specified files or the entire
2026 Print the revision history of the specified files or the entire
2027 project.
2027 project.
2028
2028
2029 File history is shown without following rename or copy history of
2029 File history is shown without following rename or copy history of
2030 files. Use -f/--follow with a file name to follow history across
2030 files. Use -f/--follow with a file name to follow history across
2031 renames and copies. --follow without a file name will only show
2031 renames and copies. --follow without a file name will only show
2032 ancestors or descendants of the starting revision. --follow-first
2032 ancestors or descendants of the starting revision. --follow-first
2033 only follows the first parent of merge revisions.
2033 only follows the first parent of merge revisions.
2034
2034
2035 If no revision range is specified, the default is tip:0 unless
2035 If no revision range is specified, the default is tip:0 unless
2036 --follow is set, in which case the working directory parent is
2036 --follow is set, in which case the working directory parent is
2037 used as the starting revision.
2037 used as the starting revision.
2038
2038
2039 By default this command outputs: changeset id and hash, tags,
2039 By default this command outputs: changeset id and hash, tags,
2040 non-trivial parents, user, date and time, and a summary for each
2040 non-trivial parents, user, date and time, and a summary for each
2041 commit. When the -v/--verbose switch is used, the list of changed
2041 commit. When the -v/--verbose switch is used, the list of changed
2042 files and full commit message is shown.
2042 files and full commit message is shown.
2043 """
2043 """
2044 class dui(object):
2044 class dui(object):
2045 # Implement and delegate some ui protocol. Save hunks of
2045 # Implement and delegate some ui protocol. Save hunks of
2046 # output for later display in the desired order.
2046 # output for later display in the desired order.
2047 def __init__(self, ui):
2047 def __init__(self, ui):
2048 self.ui = ui
2048 self.ui = ui
2049 self.hunk = {}
2049 self.hunk = {}
2050 self.header = {}
2050 self.header = {}
2051 def bump(self, rev):
2051 def bump(self, rev):
2052 self.rev = rev
2052 self.rev = rev
2053 self.hunk[rev] = []
2053 self.hunk[rev] = []
2054 self.header[rev] = []
2054 self.header[rev] = []
2055 def note(self, *args):
2055 def note(self, *args):
2056 if self.verbose:
2056 if self.verbose:
2057 self.write(*args)
2057 self.write(*args)
2058 def status(self, *args):
2058 def status(self, *args):
2059 if not self.quiet:
2059 if not self.quiet:
2060 self.write(*args)
2060 self.write(*args)
2061 def write(self, *args):
2061 def write(self, *args):
2062 self.hunk[self.rev].append(args)
2062 self.hunk[self.rev].append(args)
2063 def write_header(self, *args):
2063 def write_header(self, *args):
2064 self.header[self.rev].append(args)
2064 self.header[self.rev].append(args)
2065 def debug(self, *args):
2065 def debug(self, *args):
2066 if self.debugflag:
2066 if self.debugflag:
2067 self.write(*args)
2067 self.write(*args)
2068 def __getattr__(self, key):
2068 def __getattr__(self, key):
2069 return getattr(self.ui, key)
2069 return getattr(self.ui, key)
2070
2070
2071 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
2071 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
2072
2072
2073 if opts['limit']:
2073 if opts['limit']:
2074 try:
2074 try:
2075 limit = int(opts['limit'])
2075 limit = int(opts['limit'])
2076 except ValueError:
2076 except ValueError:
2077 raise util.Abort(_('limit must be a positive integer'))
2077 raise util.Abort(_('limit must be a positive integer'))
2078 if limit <= 0: raise util.Abort(_('limit must be positive'))
2078 if limit <= 0: raise util.Abort(_('limit must be positive'))
2079 else:
2079 else:
2080 limit = sys.maxint
2080 limit = sys.maxint
2081 count = 0
2081 count = 0
2082
2082
2083 displayer = show_changeset(ui, repo, opts)
2083 displayer = show_changeset(ui, repo, opts)
2084 for st, rev, fns in changeiter:
2084 for st, rev, fns in changeiter:
2085 if st == 'window':
2085 if st == 'window':
2086 du = dui(ui)
2086 du = dui(ui)
2087 displayer.ui = du
2087 displayer.ui = du
2088 elif st == 'add':
2088 elif st == 'add':
2089 du.bump(rev)
2089 du.bump(rev)
2090 changenode = repo.changelog.node(rev)
2090 changenode = repo.changelog.node(rev)
2091 parents = [p for p in repo.changelog.parents(changenode)
2091 parents = [p for p in repo.changelog.parents(changenode)
2092 if p != nullid]
2092 if p != nullid]
2093 if opts['no_merges'] and len(parents) == 2:
2093 if opts['no_merges'] and len(parents) == 2:
2094 continue
2094 continue
2095 if opts['only_merges'] and len(parents) != 2:
2095 if opts['only_merges'] and len(parents) != 2:
2096 continue
2096 continue
2097
2097
2098 if opts['keyword']:
2098 if opts['keyword']:
2099 changes = getchange(rev)
2099 changes = getchange(rev)
2100 miss = 0
2100 miss = 0
2101 for k in [kw.lower() for kw in opts['keyword']]:
2101 for k in [kw.lower() for kw in opts['keyword']]:
2102 if not (k in changes[1].lower() or
2102 if not (k in changes[1].lower() or
2103 k in changes[4].lower() or
2103 k in changes[4].lower() or
2104 k in " ".join(changes[3][:20]).lower()):
2104 k in " ".join(changes[3][:20]).lower()):
2105 miss = 1
2105 miss = 1
2106 break
2106 break
2107 if miss:
2107 if miss:
2108 continue
2108 continue
2109
2109
2110 br = None
2110 br = None
2111 if opts['branches']:
2111 if opts['branches']:
2112 br = repo.branchlookup([repo.changelog.node(rev)])
2112 br = repo.branchlookup([repo.changelog.node(rev)])
2113
2113
2114 displayer.show(rev, brinfo=br)
2114 displayer.show(rev, brinfo=br)
2115 if opts['patch']:
2115 if opts['patch']:
2116 prev = (parents and parents[0]) or nullid
2116 prev = (parents and parents[0]) or nullid
2117 dodiff(du, du, repo, prev, changenode, match=matchfn)
2117 dodiff(du, du, repo, prev, changenode, match=matchfn)
2118 du.write("\n\n")
2118 du.write("\n\n")
2119 elif st == 'iter':
2119 elif st == 'iter':
2120 if count == limit: break
2120 if count == limit: break
2121 if du.header[rev]:
2121 if du.header[rev]:
2122 for args in du.header[rev]:
2122 for args in du.header[rev]:
2123 ui.write_header(*args)
2123 ui.write_header(*args)
2124 if du.hunk[rev]:
2124 if du.hunk[rev]:
2125 count += 1
2125 count += 1
2126 for args in du.hunk[rev]:
2126 for args in du.hunk[rev]:
2127 ui.write(*args)
2127 ui.write(*args)
2128
2128
2129 def manifest(ui, repo, rev=None):
2129 def manifest(ui, repo, rev=None):
2130 """output the latest or given revision of the project manifest
2130 """output the latest or given revision of the project manifest
2131
2131
2132 Print a list of version controlled files for the given revision.
2132 Print a list of version controlled files for the given revision.
2133
2133
2134 The manifest is the list of files being version controlled. If no revision
2134 The manifest is the list of files being version controlled. If no revision
2135 is given then the tip is used.
2135 is given then the tip is used.
2136 """
2136 """
2137 if rev:
2137 if rev:
2138 try:
2138 try:
2139 # assume all revision numbers are for changesets
2139 # assume all revision numbers are for changesets
2140 n = repo.lookup(rev)
2140 n = repo.lookup(rev)
2141 change = repo.changelog.read(n)
2141 change = repo.changelog.read(n)
2142 n = change[0]
2142 n = change[0]
2143 except hg.RepoError:
2143 except hg.RepoError:
2144 n = repo.manifest.lookup(rev)
2144 n = repo.manifest.lookup(rev)
2145 else:
2145 else:
2146 n = repo.manifest.tip()
2146 n = repo.manifest.tip()
2147 m = repo.manifest.read(n)
2147 m = repo.manifest.read(n)
2148 mf = repo.manifest.readflags(n)
2148 mf = repo.manifest.readflags(n)
2149 files = m.keys()
2149 files = m.keys()
2150 files.sort()
2150 files.sort()
2151
2151
2152 for f in files:
2152 for f in files:
2153 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2153 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2154
2154
2155 def merge(ui, repo, node=None, **opts):
2155 def merge(ui, repo, node=None, **opts):
2156 """Merge working directory with another revision
2156 """Merge working directory with another revision
2157
2157
2158 Merge the contents of the current working directory and the
2158 Merge the contents of the current working directory and the
2159 requested revision. Files that changed between either parent are
2159 requested revision. Files that changed between either parent are
2160 marked as changed for the next commit and a commit must be
2160 marked as changed for the next commit and a commit must be
2161 performed before any further updates are allowed.
2161 performed before any further updates are allowed.
2162 """
2162 """
2163 return doupdate(ui, repo, node=node, merge=True, **opts)
2163 return doupdate(ui, repo, node=node, merge=True, **opts)
2164
2164
2165 def outgoing(ui, repo, dest=None, **opts):
2165 def outgoing(ui, repo, dest=None, **opts):
2166 """show changesets not found in destination
2166 """show changesets not found in destination
2167
2167
2168 Show changesets not found in the specified destination repository or
2168 Show changesets not found in the specified destination repository or
2169 the default push location. These are the changesets that would be pushed
2169 the default push location. These are the changesets that would be pushed
2170 if a push was requested.
2170 if a push was requested.
2171
2171
2172 See pull for valid destination format details.
2172 See pull for valid destination format details.
2173 """
2173 """
2174 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2174 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2175 setremoteconfig(ui, opts)
2175 setremoteconfig(ui, opts)
2176 revs = None
2176 revs = None
2177 if opts['rev']:
2177 if opts['rev']:
2178 revs = [repo.lookup(rev) for rev in opts['rev']]
2178 revs = [repo.lookup(rev) for rev in opts['rev']]
2179
2179
2180 other = hg.repository(ui, dest)
2180 other = hg.repository(ui, dest)
2181 o = repo.findoutgoing(other, force=opts['force'])
2181 o = repo.findoutgoing(other, force=opts['force'])
2182 if not o:
2182 if not o:
2183 ui.status(_("no changes found\n"))
2183 ui.status(_("no changes found\n"))
2184 return
2184 return
2185 o = repo.changelog.nodesbetween(o, revs)[0]
2185 o = repo.changelog.nodesbetween(o, revs)[0]
2186 if opts['newest_first']:
2186 if opts['newest_first']:
2187 o.reverse()
2187 o.reverse()
2188 displayer = show_changeset(ui, repo, opts)
2188 displayer = show_changeset(ui, repo, opts)
2189 for n in o:
2189 for n in o:
2190 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2190 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2191 if opts['no_merges'] and len(parents) == 2:
2191 if opts['no_merges'] and len(parents) == 2:
2192 continue
2192 continue
2193 displayer.show(changenode=n)
2193 displayer.show(changenode=n)
2194 if opts['patch']:
2194 if opts['patch']:
2195 prev = (parents and parents[0]) or nullid
2195 prev = (parents and parents[0]) or nullid
2196 dodiff(ui, ui, repo, prev, n)
2196 dodiff(ui, ui, repo, prev, n)
2197 ui.write("\n")
2197 ui.write("\n")
2198
2198
2199 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2199 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2200 """show the parents of the working dir or revision
2200 """show the parents of the working dir or revision
2201
2201
2202 Print the working directory's parent revisions.
2202 Print the working directory's parent revisions.
2203 """
2203 """
2204 # legacy
2204 # legacy
2205 if file_ and not rev:
2205 if file_ and not rev:
2206 try:
2206 try:
2207 rev = repo.lookup(file_)
2207 rev = repo.lookup(file_)
2208 file_ = None
2208 file_ = None
2209 except hg.RepoError:
2209 except hg.RepoError:
2210 pass
2210 pass
2211 else:
2211 else:
2212 ui.warn(_("'hg parent REV' is deprecated, "
2212 ui.warn(_("'hg parent REV' is deprecated, "
2213 "please use 'hg parents -r REV instead\n"))
2213 "please use 'hg parents -r REV instead\n"))
2214
2214
2215 if rev:
2215 if rev:
2216 if file_:
2216 if file_:
2217 ctx = repo.filectx(file_, changeid=rev)
2217 ctx = repo.filectx(file_, changeid=rev)
2218 else:
2218 else:
2219 ctx = repo.changectx(rev)
2219 ctx = repo.changectx(rev)
2220 p = [cp.node() for cp in ctx.parents()]
2220 p = [cp.node() for cp in ctx.parents()]
2221 else:
2221 else:
2222 p = repo.dirstate.parents()
2222 p = repo.dirstate.parents()
2223
2223
2224 br = None
2224 br = None
2225 if branches is not None:
2225 if branches is not None:
2226 br = repo.branchlookup(p)
2226 br = repo.branchlookup(p)
2227 displayer = show_changeset(ui, repo, opts)
2227 displayer = show_changeset(ui, repo, opts)
2228 for n in p:
2228 for n in p:
2229 if n != nullid:
2229 if n != nullid:
2230 displayer.show(changenode=n, brinfo=br)
2230 displayer.show(changenode=n, brinfo=br)
2231
2231
2232 def paths(ui, repo, search=None):
2232 def paths(ui, repo, search=None):
2233 """show definition of symbolic path names
2233 """show definition of symbolic path names
2234
2234
2235 Show definition of symbolic path name NAME. If no name is given, show
2235 Show definition of symbolic path name NAME. If no name is given, show
2236 definition of available names.
2236 definition of available names.
2237
2237
2238 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2238 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2239 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2239 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2240 """
2240 """
2241 if search:
2241 if search:
2242 for name, path in ui.configitems("paths"):
2242 for name, path in ui.configitems("paths"):
2243 if name == search:
2243 if name == search:
2244 ui.write("%s\n" % path)
2244 ui.write("%s\n" % path)
2245 return
2245 return
2246 ui.warn(_("not found!\n"))
2246 ui.warn(_("not found!\n"))
2247 return 1
2247 return 1
2248 else:
2248 else:
2249 for name, path in ui.configitems("paths"):
2249 for name, path in ui.configitems("paths"):
2250 ui.write("%s = %s\n" % (name, path))
2250 ui.write("%s = %s\n" % (name, path))
2251
2251
2252 def postincoming(ui, repo, modheads, optupdate):
2252 def postincoming(ui, repo, modheads, optupdate):
2253 if modheads == 0:
2253 if modheads == 0:
2254 return
2254 return
2255 if optupdate:
2255 if optupdate:
2256 if modheads == 1:
2256 if modheads == 1:
2257 return doupdate(ui, repo)
2257 return doupdate(ui, repo)
2258 else:
2258 else:
2259 ui.status(_("not updating, since new heads added\n"))
2259 ui.status(_("not updating, since new heads added\n"))
2260 if modheads > 1:
2260 if modheads > 1:
2261 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2261 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2262 else:
2262 else:
2263 ui.status(_("(run 'hg update' to get a working copy)\n"))
2263 ui.status(_("(run 'hg update' to get a working copy)\n"))
2264
2264
2265 def pull(ui, repo, source="default", **opts):
2265 def pull(ui, repo, source="default", **opts):
2266 """pull changes from the specified source
2266 """pull changes from the specified source
2267
2267
2268 Pull changes from a remote repository to a local one.
2268 Pull changes from a remote repository to a local one.
2269
2269
2270 This finds all changes from the repository at the specified path
2270 This finds all changes from the repository at the specified path
2271 or URL and adds them to the local repository. By default, this
2271 or URL and adds them to the local repository. By default, this
2272 does not update the copy of the project in the working directory.
2272 does not update the copy of the project in the working directory.
2273
2273
2274 Valid URLs are of the form:
2274 Valid URLs are of the form:
2275
2275
2276 local/filesystem/path
2276 local/filesystem/path
2277 http://[user@]host[:port]/[path]
2277 http://[user@]host[:port]/[path]
2278 https://[user@]host[:port]/[path]
2278 https://[user@]host[:port]/[path]
2279 ssh://[user@]host[:port]/[path]
2279 ssh://[user@]host[:port]/[path]
2280
2280
2281 Some notes about using SSH with Mercurial:
2281 Some notes about using SSH with Mercurial:
2282 - SSH requires an accessible shell account on the destination machine
2282 - SSH requires an accessible shell account on the destination machine
2283 and a copy of hg in the remote path or specified with as remotecmd.
2283 and a copy of hg in the remote path or specified with as remotecmd.
2284 - path is relative to the remote user's home directory by default.
2284 - path is relative to the remote user's home directory by default.
2285 Use an extra slash at the start of a path to specify an absolute path:
2285 Use an extra slash at the start of a path to specify an absolute path:
2286 ssh://example.com//tmp/repository
2286 ssh://example.com//tmp/repository
2287 - Mercurial doesn't use its own compression via SSH; the right thing
2287 - Mercurial doesn't use its own compression via SSH; the right thing
2288 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2288 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2289 Host *.mylocalnetwork.example.com
2289 Host *.mylocalnetwork.example.com
2290 Compression off
2290 Compression off
2291 Host *
2291 Host *
2292 Compression on
2292 Compression on
2293 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2293 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2294 with the --ssh command line option.
2294 with the --ssh command line option.
2295 """
2295 """
2296 source = ui.expandpath(source)
2296 source = ui.expandpath(source)
2297 setremoteconfig(ui, opts)
2297 setremoteconfig(ui, opts)
2298
2298
2299 other = hg.repository(ui, source)
2299 other = hg.repository(ui, source)
2300 ui.status(_('pulling from %s\n') % (source))
2300 ui.status(_('pulling from %s\n') % (source))
2301 revs = None
2301 revs = None
2302 if opts['rev'] and not other.local():
2302 if opts['rev'] and not other.local():
2303 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2303 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2304 elif opts['rev']:
2304 elif opts['rev']:
2305 revs = [other.lookup(rev) for rev in opts['rev']]
2305 revs = [other.lookup(rev) for rev in opts['rev']]
2306 modheads = repo.pull(other, heads=revs, force=opts['force'])
2306 modheads = repo.pull(other, heads=revs, force=opts['force'])
2307 return postincoming(ui, repo, modheads, opts['update'])
2307 return postincoming(ui, repo, modheads, opts['update'])
2308
2308
2309 def push(ui, repo, dest=None, **opts):
2309 def push(ui, repo, dest=None, **opts):
2310 """push changes to the specified destination
2310 """push changes to the specified destination
2311
2311
2312 Push changes from the local repository to the given destination.
2312 Push changes from the local repository to the given destination.
2313
2313
2314 This is the symmetrical operation for pull. It helps to move
2314 This is the symmetrical operation for pull. It helps to move
2315 changes from the current repository to a different one. If the
2315 changes from the current repository to a different one. If the
2316 destination is local this is identical to a pull in that directory
2316 destination is local this is identical to a pull in that directory
2317 from the current one.
2317 from the current one.
2318
2318
2319 By default, push will refuse to run if it detects the result would
2319 By default, push will refuse to run if it detects the result would
2320 increase the number of remote heads. This generally indicates the
2320 increase the number of remote heads. This generally indicates the
2321 the client has forgotten to sync and merge before pushing.
2321 the client has forgotten to sync and merge before pushing.
2322
2322
2323 Valid URLs are of the form:
2323 Valid URLs are of the form:
2324
2324
2325 local/filesystem/path
2325 local/filesystem/path
2326 ssh://[user@]host[:port]/[path]
2326 ssh://[user@]host[:port]/[path]
2327
2327
2328 Look at the help text for the pull command for important details
2328 Look at the help text for the pull command for important details
2329 about ssh:// URLs.
2329 about ssh:// URLs.
2330
2330
2331 Pushing to http:// and https:// URLs is possible, too, if this
2331 Pushing to http:// and https:// URLs is possible, too, if this
2332 feature is enabled on the remote Mercurial server.
2332 feature is enabled on the remote Mercurial server.
2333 """
2333 """
2334 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2334 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2335 setremoteconfig(ui, opts)
2335 setremoteconfig(ui, opts)
2336
2336
2337 other = hg.repository(ui, dest)
2337 other = hg.repository(ui, dest)
2338 ui.status('pushing to %s\n' % (dest))
2338 ui.status('pushing to %s\n' % (dest))
2339 revs = None
2339 revs = None
2340 if opts['rev']:
2340 if opts['rev']:
2341 revs = [repo.lookup(rev) for rev in opts['rev']]
2341 revs = [repo.lookup(rev) for rev in opts['rev']]
2342 r = repo.push(other, opts['force'], revs=revs)
2342 r = repo.push(other, opts['force'], revs=revs)
2343 return r == 0
2343 return r == 0
2344
2344
2345 def rawcommit(ui, repo, *flist, **rc):
2345 def rawcommit(ui, repo, *flist, **rc):
2346 """raw commit interface (DEPRECATED)
2346 """raw commit interface (DEPRECATED)
2347
2347
2348 (DEPRECATED)
2348 (DEPRECATED)
2349 Lowlevel commit, for use in helper scripts.
2349 Lowlevel commit, for use in helper scripts.
2350
2350
2351 This command is not intended to be used by normal users, as it is
2351 This command is not intended to be used by normal users, as it is
2352 primarily useful for importing from other SCMs.
2352 primarily useful for importing from other SCMs.
2353
2353
2354 This command is now deprecated and will be removed in a future
2354 This command is now deprecated and will be removed in a future
2355 release, please use debugsetparents and commit instead.
2355 release, please use debugsetparents and commit instead.
2356 """
2356 """
2357
2357
2358 ui.warn(_("(the rawcommit command is deprecated)\n"))
2358 ui.warn(_("(the rawcommit command is deprecated)\n"))
2359
2359
2360 message = rc['message']
2360 message = rc['message']
2361 if not message and rc['logfile']:
2361 if not message and rc['logfile']:
2362 try:
2362 try:
2363 message = open(rc['logfile']).read()
2363 message = open(rc['logfile']).read()
2364 except IOError:
2364 except IOError:
2365 pass
2365 pass
2366 if not message and not rc['logfile']:
2366 if not message and not rc['logfile']:
2367 raise util.Abort(_("missing commit message"))
2367 raise util.Abort(_("missing commit message"))
2368
2368
2369 files = relpath(repo, list(flist))
2369 files = relpath(repo, list(flist))
2370 if rc['files']:
2370 if rc['files']:
2371 files += open(rc['files']).read().splitlines()
2371 files += open(rc['files']).read().splitlines()
2372
2372
2373 rc['parent'] = map(repo.lookup, rc['parent'])
2373 rc['parent'] = map(repo.lookup, rc['parent'])
2374
2374
2375 try:
2375 try:
2376 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2376 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2377 except ValueError, inst:
2377 except ValueError, inst:
2378 raise util.Abort(str(inst))
2378 raise util.Abort(str(inst))
2379
2379
2380 def recover(ui, repo):
2380 def recover(ui, repo):
2381 """roll back an interrupted transaction
2381 """roll back an interrupted transaction
2382
2382
2383 Recover from an interrupted commit or pull.
2383 Recover from an interrupted commit or pull.
2384
2384
2385 This command tries to fix the repository status after an interrupted
2385 This command tries to fix the repository status after an interrupted
2386 operation. It should only be necessary when Mercurial suggests it.
2386 operation. It should only be necessary when Mercurial suggests it.
2387 """
2387 """
2388 if repo.recover():
2388 if repo.recover():
2389 return repo.verify()
2389 return repo.verify()
2390 return 1
2390 return 1
2391
2391
2392 def remove(ui, repo, *pats, **opts):
2392 def remove(ui, repo, *pats, **opts):
2393 """remove the specified files on the next commit
2393 """remove the specified files on the next commit
2394
2394
2395 Schedule the indicated files for removal from the repository.
2395 Schedule the indicated files for removal from the repository.
2396
2396
2397 This command schedules the files to be removed at the next commit.
2397 This command schedules the files to be removed at the next commit.
2398 This only removes files from the current branch, not from the
2398 This only removes files from the current branch, not from the
2399 entire project history. If the files still exist in the working
2399 entire project history. If the files still exist in the working
2400 directory, they will be deleted from it. If invoked with --after,
2400 directory, they will be deleted from it. If invoked with --after,
2401 files that have been manually deleted are marked as removed.
2401 files that have been manually deleted are marked as removed.
2402
2402
2403 Modified files and added files are not removed by default. To
2403 Modified files and added files are not removed by default. To
2404 remove them, use the -f/--force option.
2404 remove them, use the -f/--force option.
2405 """
2405 """
2406 names = []
2406 names = []
2407 if not opts['after'] and not pats:
2407 if not opts['after'] and not pats:
2408 raise util.Abort(_('no files specified'))
2408 raise util.Abort(_('no files specified'))
2409 files, matchfn, anypats = matchpats(repo, pats, opts)
2409 files, matchfn, anypats = matchpats(repo, pats, opts)
2410 exact = dict.fromkeys(files)
2410 exact = dict.fromkeys(files)
2411 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2411 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2412 modified, added, removed, deleted, unknown = mardu
2412 modified, added, removed, deleted, unknown = mardu
2413 remove, forget = [], []
2413 remove, forget = [], []
2414 for src, abs, rel, exact in walk(repo, pats, opts):
2414 for src, abs, rel, exact in walk(repo, pats, opts):
2415 reason = None
2415 reason = None
2416 if abs not in deleted and opts['after']:
2416 if abs not in deleted and opts['after']:
2417 reason = _('is still present')
2417 reason = _('is still present')
2418 elif abs in modified and not opts['force']:
2418 elif abs in modified and not opts['force']:
2419 reason = _('is modified (use -f to force removal)')
2419 reason = _('is modified (use -f to force removal)')
2420 elif abs in added:
2420 elif abs in added:
2421 if opts['force']:
2421 if opts['force']:
2422 forget.append(abs)
2422 forget.append(abs)
2423 continue
2423 continue
2424 reason = _('has been marked for add (use -f to force removal)')
2424 reason = _('has been marked for add (use -f to force removal)')
2425 elif abs in unknown:
2425 elif abs in unknown:
2426 reason = _('is not managed')
2426 reason = _('is not managed')
2427 elif abs in removed:
2427 elif abs in removed:
2428 continue
2428 continue
2429 if reason:
2429 if reason:
2430 if exact:
2430 if exact:
2431 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2431 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2432 else:
2432 else:
2433 if ui.verbose or not exact:
2433 if ui.verbose or not exact:
2434 ui.status(_('removing %s\n') % rel)
2434 ui.status(_('removing %s\n') % rel)
2435 remove.append(abs)
2435 remove.append(abs)
2436 repo.forget(forget)
2436 repo.forget(forget)
2437 repo.remove(remove, unlink=not opts['after'])
2437 repo.remove(remove, unlink=not opts['after'])
2438
2438
2439 def rename(ui, repo, *pats, **opts):
2439 def rename(ui, repo, *pats, **opts):
2440 """rename files; equivalent of copy + remove
2440 """rename files; equivalent of copy + remove
2441
2441
2442 Mark dest as copies of sources; mark sources for deletion. If
2442 Mark dest as copies of sources; mark sources for deletion. If
2443 dest is a directory, copies are put in that directory. If dest is
2443 dest is a directory, copies are put in that directory. If dest is
2444 a file, there can only be one source.
2444 a file, there can only be one source.
2445
2445
2446 By default, this command copies the contents of files as they
2446 By default, this command copies the contents of files as they
2447 stand in the working directory. If invoked with --after, the
2447 stand in the working directory. If invoked with --after, the
2448 operation is recorded, but no copying is performed.
2448 operation is recorded, but no copying is performed.
2449
2449
2450 This command takes effect in the next commit.
2450 This command takes effect in the next commit.
2451
2451
2452 NOTE: This command should be treated as experimental. While it
2452 NOTE: This command should be treated as experimental. While it
2453 should properly record rename files, this information is not yet
2453 should properly record rename files, this information is not yet
2454 fully used by merge, nor fully reported by log.
2454 fully used by merge, nor fully reported by log.
2455 """
2455 """
2456 wlock = repo.wlock(0)
2456 wlock = repo.wlock(0)
2457 errs, copied = docopy(ui, repo, pats, opts, wlock)
2457 errs, copied = docopy(ui, repo, pats, opts, wlock)
2458 names = []
2458 names = []
2459 for abs, rel, exact in copied:
2459 for abs, rel, exact in copied:
2460 if ui.verbose or not exact:
2460 if ui.verbose or not exact:
2461 ui.status(_('removing %s\n') % rel)
2461 ui.status(_('removing %s\n') % rel)
2462 names.append(abs)
2462 names.append(abs)
2463 if not opts.get('dry_run'):
2463 if not opts.get('dry_run'):
2464 repo.remove(names, True, wlock)
2464 repo.remove(names, True, wlock)
2465 return errs
2465 return errs
2466
2466
2467 def revert(ui, repo, *pats, **opts):
2467 def revert(ui, repo, *pats, **opts):
2468 """revert files or dirs to their states as of some revision
2468 """revert files or dirs to their states as of some revision
2469
2469
2470 With no revision specified, revert the named files or directories
2470 With no revision specified, revert the named files or directories
2471 to the contents they had in the parent of the working directory.
2471 to the contents they had in the parent of the working directory.
2472 This restores the contents of the affected files to an unmodified
2472 This restores the contents of the affected files to an unmodified
2473 state. If the working directory has two parents, you must
2473 state. If the working directory has two parents, you must
2474 explicitly specify the revision to revert to.
2474 explicitly specify the revision to revert to.
2475
2475
2476 Modified files are saved with a .orig suffix before reverting.
2476 Modified files are saved with a .orig suffix before reverting.
2477 To disable these backups, use --no-backup.
2477 To disable these backups, use --no-backup.
2478
2478
2479 Using the -r option, revert the given files or directories to
2479 Using the -r option, revert the given files or directories to
2480 their contents as of a specific revision. This can be helpful to"roll
2480 their contents as of a specific revision. This can be helpful to"roll
2481 back" some or all of a change that should not have been committed.
2481 back" some or all of a change that should not have been committed.
2482
2482
2483 Revert modifies the working directory. It does not commit any
2483 Revert modifies the working directory. It does not commit any
2484 changes, or change the parent of the working directory. If you
2484 changes, or change the parent of the working directory. If you
2485 revert to a revision other than the parent of the working
2485 revert to a revision other than the parent of the working
2486 directory, the reverted files will thus appear modified
2486 directory, the reverted files will thus appear modified
2487 afterwards.
2487 afterwards.
2488
2488
2489 If a file has been deleted, it is recreated. If the executable
2489 If a file has been deleted, it is recreated. If the executable
2490 mode of a file was changed, it is reset.
2490 mode of a file was changed, it is reset.
2491
2491
2492 If names are given, all files matching the names are reverted.
2492 If names are given, all files matching the names are reverted.
2493
2493
2494 If no arguments are given, all files in the repository are reverted.
2494 If no arguments are given, all files in the repository are reverted.
2495 """
2495 """
2496 parent, p2 = repo.dirstate.parents()
2496 parent, p2 = repo.dirstate.parents()
2497 if opts['rev']:
2497 if opts['rev']:
2498 node = repo.lookup(opts['rev'])
2498 node = repo.lookup(opts['rev'])
2499 elif p2 != nullid:
2499 elif p2 != nullid:
2500 raise util.Abort(_('working dir has two parents; '
2500 raise util.Abort(_('working dir has two parents; '
2501 'you must specify the revision to revert to'))
2501 'you must specify the revision to revert to'))
2502 else:
2502 else:
2503 node = parent
2503 node = parent
2504 mf = repo.manifest.read(repo.changelog.read(node)[0])
2504 mf = repo.manifest.read(repo.changelog.read(node)[0])
2505 if node == parent:
2505 if node == parent:
2506 pmf = mf
2506 pmf = mf
2507 else:
2507 else:
2508 pmf = None
2508 pmf = None
2509
2509
2510 wlock = repo.wlock()
2510 wlock = repo.wlock()
2511
2511
2512 # need all matching names in dirstate and manifest of target rev,
2512 # need all matching names in dirstate and manifest of target rev,
2513 # so have to walk both. do not print errors if files exist in one
2513 # so have to walk both. do not print errors if files exist in one
2514 # but not other.
2514 # but not other.
2515
2515
2516 names = {}
2516 names = {}
2517 target_only = {}
2517 target_only = {}
2518
2518
2519 # walk dirstate.
2519 # walk dirstate.
2520
2520
2521 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2521 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2522 names[abs] = (rel, exact)
2522 names[abs] = (rel, exact)
2523 if src == 'b':
2523 if src == 'b':
2524 target_only[abs] = True
2524 target_only[abs] = True
2525
2525
2526 # walk target manifest.
2526 # walk target manifest.
2527
2527
2528 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2528 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2529 badmatch=names.has_key):
2529 badmatch=names.has_key):
2530 if abs in names: continue
2530 if abs in names: continue
2531 names[abs] = (rel, exact)
2531 names[abs] = (rel, exact)
2532 target_only[abs] = True
2532 target_only[abs] = True
2533
2533
2534 changes = repo.changes(match=names.has_key, wlock=wlock)
2534 changes = repo.changes(match=names.has_key, wlock=wlock)
2535 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2535 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2536
2536
2537 revert = ([], _('reverting %s\n'))
2537 revert = ([], _('reverting %s\n'))
2538 add = ([], _('adding %s\n'))
2538 add = ([], _('adding %s\n'))
2539 remove = ([], _('removing %s\n'))
2539 remove = ([], _('removing %s\n'))
2540 forget = ([], _('forgetting %s\n'))
2540 forget = ([], _('forgetting %s\n'))
2541 undelete = ([], _('undeleting %s\n'))
2541 undelete = ([], _('undeleting %s\n'))
2542 update = {}
2542 update = {}
2543
2543
2544 disptable = (
2544 disptable = (
2545 # dispatch table:
2545 # dispatch table:
2546 # file state
2546 # file state
2547 # action if in target manifest
2547 # action if in target manifest
2548 # action if not in target manifest
2548 # action if not in target manifest
2549 # make backup if in target manifest
2549 # make backup if in target manifest
2550 # make backup if not in target manifest
2550 # make backup if not in target manifest
2551 (modified, revert, remove, True, True),
2551 (modified, revert, remove, True, True),
2552 (added, revert, forget, True, False),
2552 (added, revert, forget, True, False),
2553 (removed, undelete, None, False, False),
2553 (removed, undelete, None, False, False),
2554 (deleted, revert, remove, False, False),
2554 (deleted, revert, remove, False, False),
2555 (unknown, add, None, True, False),
2555 (unknown, add, None, True, False),
2556 (target_only, add, None, False, False),
2556 (target_only, add, None, False, False),
2557 )
2557 )
2558
2558
2559 entries = names.items()
2559 entries = names.items()
2560 entries.sort()
2560 entries.sort()
2561
2561
2562 for abs, (rel, exact) in entries:
2562 for abs, (rel, exact) in entries:
2563 mfentry = mf.get(abs)
2563 mfentry = mf.get(abs)
2564 def handle(xlist, dobackup):
2564 def handle(xlist, dobackup):
2565 xlist[0].append(abs)
2565 xlist[0].append(abs)
2566 update[abs] = 1
2566 update[abs] = 1
2567 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2567 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2568 bakname = "%s.orig" % rel
2568 bakname = "%s.orig" % rel
2569 ui.note(_('saving current version of %s as %s\n') %
2569 ui.note(_('saving current version of %s as %s\n') %
2570 (rel, bakname))
2570 (rel, bakname))
2571 if not opts.get('dry_run'):
2571 if not opts.get('dry_run'):
2572 shutil.copyfile(rel, bakname)
2572 shutil.copyfile(rel, bakname)
2573 shutil.copymode(rel, bakname)
2573 shutil.copymode(rel, bakname)
2574 if ui.verbose or not exact:
2574 if ui.verbose or not exact:
2575 ui.status(xlist[1] % rel)
2575 ui.status(xlist[1] % rel)
2576 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2576 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2577 if abs not in table: continue
2577 if abs not in table: continue
2578 # file has changed in dirstate
2578 # file has changed in dirstate
2579 if mfentry:
2579 if mfentry:
2580 handle(hitlist, backuphit)
2580 handle(hitlist, backuphit)
2581 elif misslist is not None:
2581 elif misslist is not None:
2582 handle(misslist, backupmiss)
2582 handle(misslist, backupmiss)
2583 else:
2583 else:
2584 if exact: ui.warn(_('file not managed: %s\n' % rel))
2584 if exact: ui.warn(_('file not managed: %s\n' % rel))
2585 break
2585 break
2586 else:
2586 else:
2587 # file has not changed in dirstate
2587 # file has not changed in dirstate
2588 if node == parent:
2588 if node == parent:
2589 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2589 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2590 continue
2590 continue
2591 if pmf is None:
2591 if pmf is None:
2592 # only need parent manifest in this unlikely case,
2592 # only need parent manifest in this unlikely case,
2593 # so do not read by default
2593 # so do not read by default
2594 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2594 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2595 if abs in pmf:
2595 if abs in pmf:
2596 if mfentry:
2596 if mfentry:
2597 # if version of file is same in parent and target
2597 # if version of file is same in parent and target
2598 # manifests, do nothing
2598 # manifests, do nothing
2599 if pmf[abs] != mfentry:
2599 if pmf[abs] != mfentry:
2600 handle(revert, False)
2600 handle(revert, False)
2601 else:
2601 else:
2602 handle(remove, False)
2602 handle(remove, False)
2603
2603
2604 if not opts.get('dry_run'):
2604 if not opts.get('dry_run'):
2605 repo.dirstate.forget(forget[0])
2605 repo.dirstate.forget(forget[0])
2606 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2606 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2607 show_stats=False)
2607 show_stats=False)
2608 repo.dirstate.update(add[0], 'a')
2608 repo.dirstate.update(add[0], 'a')
2609 repo.dirstate.update(undelete[0], 'n')
2609 repo.dirstate.update(undelete[0], 'n')
2610 repo.dirstate.update(remove[0], 'r')
2610 repo.dirstate.update(remove[0], 'r')
2611 return r
2611 return r
2612
2612
2613 def rollback(ui, repo):
2613 def rollback(ui, repo):
2614 """roll back the last transaction in this repository
2614 """roll back the last transaction in this repository
2615
2615
2616 Roll back the last transaction in this repository, restoring the
2616 Roll back the last transaction in this repository, restoring the
2617 project to its state prior to the transaction.
2617 project to its state prior to the transaction.
2618
2618
2619 Transactions are used to encapsulate the effects of all commands
2619 Transactions are used to encapsulate the effects of all commands
2620 that create new changesets or propagate existing changesets into a
2620 that create new changesets or propagate existing changesets into a
2621 repository. For example, the following commands are transactional,
2621 repository. For example, the following commands are transactional,
2622 and their effects can be rolled back:
2622 and their effects can be rolled back:
2623
2623
2624 commit
2624 commit
2625 import
2625 import
2626 pull
2626 pull
2627 push (with this repository as destination)
2627 push (with this repository as destination)
2628 unbundle
2628 unbundle
2629
2629
2630 This command should be used with care. There is only one level of
2630 This command should be used with care. There is only one level of
2631 rollback, and there is no way to undo a rollback.
2631 rollback, and there is no way to undo a rollback.
2632
2632
2633 This command is not intended for use on public repositories. Once
2633 This command is not intended for use on public repositories. Once
2634 changes are visible for pull by other users, rolling a transaction
2634 changes are visible for pull by other users, rolling a transaction
2635 back locally is ineffective (someone else may already have pulled
2635 back locally is ineffective (someone else may already have pulled
2636 the changes). Furthermore, a race is possible with readers of the
2636 the changes). Furthermore, a race is possible with readers of the
2637 repository; for example an in-progress pull from the repository
2637 repository; for example an in-progress pull from the repository
2638 may fail if a rollback is performed.
2638 may fail if a rollback is performed.
2639 """
2639 """
2640 repo.rollback()
2640 repo.rollback()
2641
2641
2642 def root(ui, repo):
2642 def root(ui, repo):
2643 """print the root (top) of the current working dir
2643 """print the root (top) of the current working dir
2644
2644
2645 Print the root directory of the current repository.
2645 Print the root directory of the current repository.
2646 """
2646 """
2647 ui.write(repo.root + "\n")
2647 ui.write(repo.root + "\n")
2648
2648
2649 def serve(ui, repo, **opts):
2649 def serve(ui, repo, **opts):
2650 """export the repository via HTTP
2650 """export the repository via HTTP
2651
2651
2652 Start a local HTTP repository browser and pull server.
2652 Start a local HTTP repository browser and pull server.
2653
2653
2654 By default, the server logs accesses to stdout and errors to
2654 By default, the server logs accesses to stdout and errors to
2655 stderr. Use the "-A" and "-E" options to log to files.
2655 stderr. Use the "-A" and "-E" options to log to files.
2656 """
2656 """
2657
2657
2658 if opts["stdio"]:
2658 if opts["stdio"]:
2659 if repo is None:
2659 if repo is None:
2660 raise hg.RepoError(_('no repo found'))
2660 raise hg.RepoError(_('no repo found'))
2661 s = sshserver.sshserver(ui, repo)
2661 s = sshserver.sshserver(ui, repo)
2662 s.serve_forever()
2662 s.serve_forever()
2663
2663
2664 optlist = ("name templates style address port ipv6"
2664 optlist = ("name templates style address port ipv6"
2665 " accesslog errorlog webdir_conf")
2665 " accesslog errorlog webdir_conf")
2666 for o in optlist.split():
2666 for o in optlist.split():
2667 if opts[o]:
2667 if opts[o]:
2668 ui.setconfig("web", o, opts[o])
2668 ui.setconfig("web", o, opts[o])
2669
2669
2670 if repo is None and not ui.config("web", "webdir_conf"):
2670 if repo is None and not ui.config("web", "webdir_conf"):
2671 raise hg.RepoError(_('no repo found'))
2671 raise hg.RepoError(_('no repo found'))
2672
2672
2673 if opts['daemon'] and not opts['daemon_pipefds']:
2673 if opts['daemon'] and not opts['daemon_pipefds']:
2674 rfd, wfd = os.pipe()
2674 rfd, wfd = os.pipe()
2675 args = sys.argv[:]
2675 args = sys.argv[:]
2676 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2676 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2677 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2677 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2678 args[0], args)
2678 args[0], args)
2679 os.close(wfd)
2679 os.close(wfd)
2680 os.read(rfd, 1)
2680 os.read(rfd, 1)
2681 os._exit(0)
2681 os._exit(0)
2682
2682
2683 try:
2683 try:
2684 httpd = hgweb.server.create_server(ui, repo)
2684 httpd = hgweb.server.create_server(ui, repo)
2685 except socket.error, inst:
2685 except socket.error, inst:
2686 raise util.Abort(_('cannot start server: ') + inst.args[1])
2686 raise util.Abort(_('cannot start server: ') + inst.args[1])
2687
2687
2688 if ui.verbose:
2688 if ui.verbose:
2689 addr, port = httpd.socket.getsockname()
2689 addr, port = httpd.socket.getsockname()
2690 if addr == '0.0.0.0':
2690 if addr == '0.0.0.0':
2691 addr = socket.gethostname()
2691 addr = socket.gethostname()
2692 else:
2692 else:
2693 try:
2693 try:
2694 addr = socket.gethostbyaddr(addr)[0]
2694 addr = socket.gethostbyaddr(addr)[0]
2695 except socket.error:
2695 except socket.error:
2696 pass
2696 pass
2697 if port != 80:
2697 if port != 80:
2698 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2698 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2699 else:
2699 else:
2700 ui.status(_('listening at http://%s/\n') % addr)
2700 ui.status(_('listening at http://%s/\n') % addr)
2701
2701
2702 if opts['pid_file']:
2702 if opts['pid_file']:
2703 fp = open(opts['pid_file'], 'w')
2703 fp = open(opts['pid_file'], 'w')
2704 fp.write(str(os.getpid()) + '\n')
2704 fp.write(str(os.getpid()) + '\n')
2705 fp.close()
2705 fp.close()
2706
2706
2707 if opts['daemon_pipefds']:
2707 if opts['daemon_pipefds']:
2708 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2708 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2709 os.close(rfd)
2709 os.close(rfd)
2710 os.write(wfd, 'y')
2710 os.write(wfd, 'y')
2711 os.close(wfd)
2711 os.close(wfd)
2712 sys.stdout.flush()
2712 sys.stdout.flush()
2713 sys.stderr.flush()
2713 sys.stderr.flush()
2714 fd = os.open(util.nulldev, os.O_RDWR)
2714 fd = os.open(util.nulldev, os.O_RDWR)
2715 if fd != 0: os.dup2(fd, 0)
2715 if fd != 0: os.dup2(fd, 0)
2716 if fd != 1: os.dup2(fd, 1)
2716 if fd != 1: os.dup2(fd, 1)
2717 if fd != 2: os.dup2(fd, 2)
2717 if fd != 2: os.dup2(fd, 2)
2718 if fd not in (0, 1, 2): os.close(fd)
2718 if fd not in (0, 1, 2): os.close(fd)
2719
2719
2720 httpd.serve_forever()
2720 httpd.serve_forever()
2721
2721
2722 def status(ui, repo, *pats, **opts):
2722 def status(ui, repo, *pats, **opts):
2723 """show changed files in the working directory
2723 """show changed files in the working directory
2724
2724
2725 Show status of files in the repository. If names are given, only
2725 Show status of files in the repository. If names are given, only
2726 files that match are shown. Files that are clean or ignored, are
2726 files that match are shown. Files that are clean or ignored, are
2727 not listed unless -c (clean), -i (ignored) or -A is given.
2727 not listed unless -c (clean), -i (ignored) or -A is given.
2728
2728
2729 The codes used to show the status of files are:
2729 The codes used to show the status of files are:
2730 M = modified
2730 M = modified
2731 A = added
2731 A = added
2732 R = removed
2732 R = removed
2733 C = clean
2733 C = clean
2734 ! = deleted, but still tracked
2734 ! = deleted, but still tracked
2735 ? = not tracked
2735 ? = not tracked
2736 I = ignored (not shown by default)
2736 I = ignored (not shown by default)
2737 = the previous added file was copied from here
2737 = the previous added file was copied from here
2738 """
2738 """
2739
2739
2740 all = opts['all']
2740 all = opts['all']
2741
2741
2742 files, matchfn, anypats = matchpats(repo, pats, opts)
2742 files, matchfn, anypats = matchpats(repo, pats, opts)
2743 cwd = (pats and repo.getcwd()) or ''
2743 cwd = (pats and repo.getcwd()) or ''
2744 modified, added, removed, deleted, unknown, ignored, clean = [
2744 modified, added, removed, deleted, unknown, ignored, clean = [
2745 [util.pathto(cwd, x) for x in n]
2745 [util.pathto(cwd, x) for x in n]
2746 for n in repo.status(files=files, match=matchfn,
2746 for n in repo.status(files=files, match=matchfn,
2747 list_ignored=all or opts['ignored'],
2747 list_ignored=all or opts['ignored'],
2748 list_clean=all or opts['clean'])]
2748 list_clean=all or opts['clean'])]
2749
2749
2750 changetypes = (('modified', 'M', modified),
2750 changetypes = (('modified', 'M', modified),
2751 ('added', 'A', added),
2751 ('added', 'A', added),
2752 ('removed', 'R', removed),
2752 ('removed', 'R', removed),
2753 ('deleted', '!', deleted),
2753 ('deleted', '!', deleted),
2754 ('unknown', '?', unknown),
2754 ('unknown', '?', unknown),
2755 ('ignored', 'I', ignored))
2755 ('ignored', 'I', ignored))
2756
2756
2757 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2757 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2758
2758
2759 end = opts['print0'] and '\0' or '\n'
2759 end = opts['print0'] and '\0' or '\n'
2760
2760
2761 for opt, char, changes in ([ct for ct in explicit_changetypes
2761 for opt, char, changes in ([ct for ct in explicit_changetypes
2762 if all or opts[ct[0]]]
2762 if all or opts[ct[0]]]
2763 or changetypes):
2763 or changetypes):
2764 if opts['no_status']:
2764 if opts['no_status']:
2765 format = "%%s%s" % end
2765 format = "%%s%s" % end
2766 else:
2766 else:
2767 format = "%s %%s%s" % (char, end)
2767 format = "%s %%s%s" % (char, end)
2768
2768
2769 for f in changes:
2769 for f in changes:
2770 ui.write(format % f)
2770 ui.write(format % f)
2771 if ((all or opts.get('copies')) and not opts.get('no_status')
2771 if ((all or opts.get('copies')) and not opts.get('no_status')
2772 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2772 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2773 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2773 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2774
2774
2775 def tag(ui, repo, name, rev_=None, **opts):
2775 def tag(ui, repo, name, rev_=None, **opts):
2776 """add a tag for the current tip or a given revision
2776 """add a tag for the current tip or a given revision
2777
2777
2778 Name a particular revision using <name>.
2778 Name a particular revision using <name>.
2779
2779
2780 Tags are used to name particular revisions of the repository and are
2780 Tags are used to name particular revisions of the repository and are
2781 very useful to compare different revision, to go back to significant
2781 very useful to compare different revision, to go back to significant
2782 earlier versions or to mark branch points as releases, etc.
2782 earlier versions or to mark branch points as releases, etc.
2783
2783
2784 If no revision is given, the parent of the working directory is used.
2784 If no revision is given, the parent of the working directory is used.
2785
2785
2786 To facilitate version control, distribution, and merging of tags,
2786 To facilitate version control, distribution, and merging of tags,
2787 they are stored as a file named ".hgtags" which is managed
2787 they are stored as a file named ".hgtags" which is managed
2788 similarly to other project files and can be hand-edited if
2788 similarly to other project files and can be hand-edited if
2789 necessary. The file '.hg/localtags' is used for local tags (not
2789 necessary. The file '.hg/localtags' is used for local tags (not
2790 shared among repositories).
2790 shared among repositories).
2791 """
2791 """
2792 if name == "tip":
2792 if name in ['tip', '.']:
2793 raise util.Abort(_("the name 'tip' is reserved"))
2793 raise util.Abort(_("the name '%s' is reserved") % name)
2794 if rev_ is not None:
2794 if rev_ is not None:
2795 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2795 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2796 "please use 'hg tag [-r REV] NAME' instead\n"))
2796 "please use 'hg tag [-r REV] NAME' instead\n"))
2797 if opts['rev']:
2797 if opts['rev']:
2798 raise util.Abort(_("use only one form to specify the revision"))
2798 raise util.Abort(_("use only one form to specify the revision"))
2799 if opts['rev']:
2799 if opts['rev']:
2800 rev_ = opts['rev']
2800 rev_ = opts['rev']
2801 if rev_:
2801 if rev_:
2802 r = hex(repo.lookup(rev_))
2802 r = hex(repo.lookup(rev_))
2803 else:
2803 else:
2804 p1, p2 = repo.dirstate.parents()
2804 p1, p2 = repo.dirstate.parents()
2805 if p1 == nullid:
2805 if p1 == nullid:
2806 raise util.Abort(_('no revision to tag'))
2806 raise util.Abort(_('no revision to tag'))
2807 if p2 != nullid:
2807 if p2 != nullid:
2808 raise util.Abort(_('outstanding uncommitted merges'))
2808 raise util.Abort(_('outstanding uncommitted merges'))
2809 r = hex(p1)
2809 r = hex(p1)
2810
2810
2811 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2811 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2812 opts['date'])
2812 opts['date'])
2813
2813
2814 def tags(ui, repo):
2814 def tags(ui, repo):
2815 """list repository tags
2815 """list repository tags
2816
2816
2817 List the repository tags.
2817 List the repository tags.
2818
2818
2819 This lists both regular and local tags.
2819 This lists both regular and local tags.
2820 """
2820 """
2821
2821
2822 l = repo.tagslist()
2822 l = repo.tagslist()
2823 l.reverse()
2823 l.reverse()
2824 for t, n in l:
2824 for t, n in l:
2825 try:
2825 try:
2826 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2826 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2827 except KeyError:
2827 except KeyError:
2828 r = " ?:?"
2828 r = " ?:?"
2829 if ui.quiet:
2829 if ui.quiet:
2830 ui.write("%s\n" % t)
2830 ui.write("%s\n" % t)
2831 else:
2831 else:
2832 ui.write("%-30s %s\n" % (t, r))
2832 ui.write("%-30s %s\n" % (t, r))
2833
2833
2834 def tip(ui, repo, **opts):
2834 def tip(ui, repo, **opts):
2835 """show the tip revision
2835 """show the tip revision
2836
2836
2837 Show the tip revision.
2837 Show the tip revision.
2838 """
2838 """
2839 n = repo.changelog.tip()
2839 n = repo.changelog.tip()
2840 br = None
2840 br = None
2841 if opts['branches']:
2841 if opts['branches']:
2842 br = repo.branchlookup([n])
2842 br = repo.branchlookup([n])
2843 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2843 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2844 if opts['patch']:
2844 if opts['patch']:
2845 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2845 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2846
2846
2847 def unbundle(ui, repo, fname, **opts):
2847 def unbundle(ui, repo, fname, **opts):
2848 """apply a changegroup file
2848 """apply a changegroup file
2849
2849
2850 Apply a compressed changegroup file generated by the bundle
2850 Apply a compressed changegroup file generated by the bundle
2851 command.
2851 command.
2852 """
2852 """
2853 f = urllib.urlopen(fname)
2853 f = urllib.urlopen(fname)
2854
2854
2855 header = f.read(6)
2855 header = f.read(6)
2856 if not header.startswith("HG"):
2856 if not header.startswith("HG"):
2857 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2857 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2858 elif not header.startswith("HG10"):
2858 elif not header.startswith("HG10"):
2859 raise util.Abort(_("%s: unknown bundle version") % fname)
2859 raise util.Abort(_("%s: unknown bundle version") % fname)
2860 elif header == "HG10BZ":
2860 elif header == "HG10BZ":
2861 def generator(f):
2861 def generator(f):
2862 zd = bz2.BZ2Decompressor()
2862 zd = bz2.BZ2Decompressor()
2863 zd.decompress("BZ")
2863 zd.decompress("BZ")
2864 for chunk in f:
2864 for chunk in f:
2865 yield zd.decompress(chunk)
2865 yield zd.decompress(chunk)
2866 elif header == "HG10UN":
2866 elif header == "HG10UN":
2867 def generator(f):
2867 def generator(f):
2868 for chunk in f:
2868 for chunk in f:
2869 yield chunk
2869 yield chunk
2870 else:
2870 else:
2871 raise util.Abort(_("%s: unknown bundle compression type")
2871 raise util.Abort(_("%s: unknown bundle compression type")
2872 % fname)
2872 % fname)
2873 gen = generator(util.filechunkiter(f, 4096))
2873 gen = generator(util.filechunkiter(f, 4096))
2874 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2874 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2875 'bundle:' + fname)
2875 'bundle:' + fname)
2876 return postincoming(ui, repo, modheads, opts['update'])
2876 return postincoming(ui, repo, modheads, opts['update'])
2877
2877
2878 def undo(ui, repo):
2878 def undo(ui, repo):
2879 """undo the last commit or pull (DEPRECATED)
2879 """undo the last commit or pull (DEPRECATED)
2880
2880
2881 (DEPRECATED)
2881 (DEPRECATED)
2882 This command is now deprecated and will be removed in a future
2882 This command is now deprecated and will be removed in a future
2883 release. Please use the rollback command instead. For usage
2883 release. Please use the rollback command instead. For usage
2884 instructions, see the rollback command.
2884 instructions, see the rollback command.
2885 """
2885 """
2886 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2886 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2887 repo.rollback()
2887 repo.rollback()
2888
2888
2889 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2889 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2890 branch=None, **opts):
2890 branch=None, **opts):
2891 """update or merge working directory
2891 """update or merge working directory
2892
2892
2893 Update the working directory to the specified revision.
2893 Update the working directory to the specified revision.
2894
2894
2895 If there are no outstanding changes in the working directory and
2895 If there are no outstanding changes in the working directory and
2896 there is a linear relationship between the current version and the
2896 there is a linear relationship between the current version and the
2897 requested version, the result is the requested version.
2897 requested version, the result is the requested version.
2898
2898
2899 To merge the working directory with another revision, use the
2899 To merge the working directory with another revision, use the
2900 merge command.
2900 merge command.
2901
2901
2902 By default, update will refuse to run if doing so would require
2902 By default, update will refuse to run if doing so would require
2903 merging or discarding local changes.
2903 merging or discarding local changes.
2904 """
2904 """
2905 if merge:
2905 if merge:
2906 ui.warn(_('(the -m/--merge option is deprecated; '
2906 ui.warn(_('(the -m/--merge option is deprecated; '
2907 'use the merge command instead)\n'))
2907 'use the merge command instead)\n'))
2908 return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
2908 return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
2909
2909
2910 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2910 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2911 branch=None, **opts):
2911 branch=None, **opts):
2912 if branch:
2912 if branch:
2913 br = repo.branchlookup(branch=branch)
2913 br = repo.branchlookup(branch=branch)
2914 found = []
2914 found = []
2915 for x in br:
2915 for x in br:
2916 if branch in br[x]:
2916 if branch in br[x]:
2917 found.append(x)
2917 found.append(x)
2918 if len(found) > 1:
2918 if len(found) > 1:
2919 ui.warn(_("Found multiple heads for %s\n") % branch)
2919 ui.warn(_("Found multiple heads for %s\n") % branch)
2920 for x in found:
2920 for x in found:
2921 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2921 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2922 return 1
2922 return 1
2923 if len(found) == 1:
2923 if len(found) == 1:
2924 node = found[0]
2924 node = found[0]
2925 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2925 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2926 else:
2926 else:
2927 ui.warn(_("branch %s not found\n") % (branch))
2927 ui.warn(_("branch %s not found\n") % (branch))
2928 return 1
2928 return 1
2929 else:
2929 else:
2930 node = node and repo.lookup(node) or repo.changelog.tip()
2930 node = node and repo.lookup(node) or repo.changelog.tip()
2931 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2931 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2932
2932
2933 def verify(ui, repo):
2933 def verify(ui, repo):
2934 """verify the integrity of the repository
2934 """verify the integrity of the repository
2935
2935
2936 Verify the integrity of the current repository.
2936 Verify the integrity of the current repository.
2937
2937
2938 This will perform an extensive check of the repository's
2938 This will perform an extensive check of the repository's
2939 integrity, validating the hashes and checksums of each entry in
2939 integrity, validating the hashes and checksums of each entry in
2940 the changelog, manifest, and tracked files, as well as the
2940 the changelog, manifest, and tracked files, as well as the
2941 integrity of their crosslinks and indices.
2941 integrity of their crosslinks and indices.
2942 """
2942 """
2943 return repo.verify()
2943 return repo.verify()
2944
2944
2945 # Command options and aliases are listed here, alphabetically
2945 # Command options and aliases are listed here, alphabetically
2946
2946
2947 table = {
2947 table = {
2948 "^add":
2948 "^add":
2949 (add,
2949 (add,
2950 [('I', 'include', [], _('include names matching the given patterns')),
2950 [('I', 'include', [], _('include names matching the given patterns')),
2951 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2951 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2952 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2952 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2953 _('hg add [OPTION]... [FILE]...')),
2953 _('hg add [OPTION]... [FILE]...')),
2954 "debugaddremove|addremove":
2954 "debugaddremove|addremove":
2955 (addremove,
2955 (addremove,
2956 [('I', 'include', [], _('include names matching the given patterns')),
2956 [('I', 'include', [], _('include names matching the given patterns')),
2957 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2957 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2958 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2958 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2959 _('hg addremove [OPTION]... [FILE]...')),
2959 _('hg addremove [OPTION]... [FILE]...')),
2960 "^annotate":
2960 "^annotate":
2961 (annotate,
2961 (annotate,
2962 [('r', 'rev', '', _('annotate the specified revision')),
2962 [('r', 'rev', '', _('annotate the specified revision')),
2963 ('a', 'text', None, _('treat all files as text')),
2963 ('a', 'text', None, _('treat all files as text')),
2964 ('u', 'user', None, _('list the author')),
2964 ('u', 'user', None, _('list the author')),
2965 ('d', 'date', None, _('list the date')),
2965 ('d', 'date', None, _('list the date')),
2966 ('n', 'number', None, _('list the revision number (default)')),
2966 ('n', 'number', None, _('list the revision number (default)')),
2967 ('c', 'changeset', None, _('list the changeset')),
2967 ('c', 'changeset', None, _('list the changeset')),
2968 ('I', 'include', [], _('include names matching the given patterns')),
2968 ('I', 'include', [], _('include names matching the given patterns')),
2969 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2969 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2970 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2970 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2971 "archive":
2971 "archive":
2972 (archive,
2972 (archive,
2973 [('', 'no-decode', None, _('do not pass files through decoders')),
2973 [('', 'no-decode', None, _('do not pass files through decoders')),
2974 ('p', 'prefix', '', _('directory prefix for files in archive')),
2974 ('p', 'prefix', '', _('directory prefix for files in archive')),
2975 ('r', 'rev', '', _('revision to distribute')),
2975 ('r', 'rev', '', _('revision to distribute')),
2976 ('t', 'type', '', _('type of distribution to create')),
2976 ('t', 'type', '', _('type of distribution to create')),
2977 ('I', 'include', [], _('include names matching the given patterns')),
2977 ('I', 'include', [], _('include names matching the given patterns')),
2978 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2978 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2979 _('hg archive [OPTION]... DEST')),
2979 _('hg archive [OPTION]... DEST')),
2980 "backout":
2980 "backout":
2981 (backout,
2981 (backout,
2982 [('', 'merge', None,
2982 [('', 'merge', None,
2983 _('merge with old dirstate parent after backout')),
2983 _('merge with old dirstate parent after backout')),
2984 ('m', 'message', '', _('use <text> as commit message')),
2984 ('m', 'message', '', _('use <text> as commit message')),
2985 ('l', 'logfile', '', _('read commit message from <file>')),
2985 ('l', 'logfile', '', _('read commit message from <file>')),
2986 ('d', 'date', '', _('record datecode as commit date')),
2986 ('d', 'date', '', _('record datecode as commit date')),
2987 ('', 'parent', '', _('parent to choose when backing out merge')),
2987 ('', 'parent', '', _('parent to choose when backing out merge')),
2988 ('u', 'user', '', _('record user as committer')),
2988 ('u', 'user', '', _('record user as committer')),
2989 ('I', 'include', [], _('include names matching the given patterns')),
2989 ('I', 'include', [], _('include names matching the given patterns')),
2990 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2990 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2991 _('hg backout [OPTION]... REV')),
2991 _('hg backout [OPTION]... REV')),
2992 "bundle":
2992 "bundle":
2993 (bundle,
2993 (bundle,
2994 [('f', 'force', None,
2994 [('f', 'force', None,
2995 _('run even when remote repository is unrelated'))],
2995 _('run even when remote repository is unrelated'))],
2996 _('hg bundle FILE DEST')),
2996 _('hg bundle FILE DEST')),
2997 "cat":
2997 "cat":
2998 (cat,
2998 (cat,
2999 [('o', 'output', '', _('print output to file with formatted name')),
2999 [('o', 'output', '', _('print output to file with formatted name')),
3000 ('r', 'rev', '', _('print the given revision')),
3000 ('r', 'rev', '', _('print the given revision')),
3001 ('I', 'include', [], _('include names matching the given patterns')),
3001 ('I', 'include', [], _('include names matching the given patterns')),
3002 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3002 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3003 _('hg cat [OPTION]... FILE...')),
3003 _('hg cat [OPTION]... FILE...')),
3004 "^clone":
3004 "^clone":
3005 (clone,
3005 (clone,
3006 [('U', 'noupdate', None, _('do not update the new working directory')),
3006 [('U', 'noupdate', None, _('do not update the new working directory')),
3007 ('r', 'rev', [],
3007 ('r', 'rev', [],
3008 _('a changeset you would like to have after cloning')),
3008 _('a changeset you would like to have after cloning')),
3009 ('', 'pull', None, _('use pull protocol to copy metadata')),
3009 ('', 'pull', None, _('use pull protocol to copy metadata')),
3010 ('', 'uncompressed', None,
3010 ('', 'uncompressed', None,
3011 _('use uncompressed transfer (fast over LAN)')),
3011 _('use uncompressed transfer (fast over LAN)')),
3012 ('e', 'ssh', '', _('specify ssh command to use')),
3012 ('e', 'ssh', '', _('specify ssh command to use')),
3013 ('', 'remotecmd', '',
3013 ('', 'remotecmd', '',
3014 _('specify hg command to run on the remote side'))],
3014 _('specify hg command to run on the remote side'))],
3015 _('hg clone [OPTION]... SOURCE [DEST]')),
3015 _('hg clone [OPTION]... SOURCE [DEST]')),
3016 "^commit|ci":
3016 "^commit|ci":
3017 (commit,
3017 (commit,
3018 [('A', 'addremove', None,
3018 [('A', 'addremove', None,
3019 _('mark new/missing files as added/removed before committing')),
3019 _('mark new/missing files as added/removed before committing')),
3020 ('m', 'message', '', _('use <text> as commit message')),
3020 ('m', 'message', '', _('use <text> as commit message')),
3021 ('l', 'logfile', '', _('read the commit message from <file>')),
3021 ('l', 'logfile', '', _('read the commit message from <file>')),
3022 ('d', 'date', '', _('record datecode as commit date')),
3022 ('d', 'date', '', _('record datecode as commit date')),
3023 ('u', 'user', '', _('record user as commiter')),
3023 ('u', 'user', '', _('record user as commiter')),
3024 ('I', 'include', [], _('include names matching the given patterns')),
3024 ('I', 'include', [], _('include names matching the given patterns')),
3025 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3025 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3026 _('hg commit [OPTION]... [FILE]...')),
3026 _('hg commit [OPTION]... [FILE]...')),
3027 "copy|cp":
3027 "copy|cp":
3028 (copy,
3028 (copy,
3029 [('A', 'after', None, _('record a copy that has already occurred')),
3029 [('A', 'after', None, _('record a copy that has already occurred')),
3030 ('f', 'force', None,
3030 ('f', 'force', None,
3031 _('forcibly copy over an existing managed file')),
3031 _('forcibly copy over an existing managed file')),
3032 ('I', 'include', [], _('include names matching the given patterns')),
3032 ('I', 'include', [], _('include names matching the given patterns')),
3033 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3033 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3034 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3034 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3035 _('hg copy [OPTION]... [SOURCE]... DEST')),
3035 _('hg copy [OPTION]... [SOURCE]... DEST')),
3036 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
3036 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
3037 "debugcomplete":
3037 "debugcomplete":
3038 (debugcomplete,
3038 (debugcomplete,
3039 [('o', 'options', None, _('show the command options'))],
3039 [('o', 'options', None, _('show the command options'))],
3040 _('debugcomplete [-o] CMD')),
3040 _('debugcomplete [-o] CMD')),
3041 "debugrebuildstate":
3041 "debugrebuildstate":
3042 (debugrebuildstate,
3042 (debugrebuildstate,
3043 [('r', 'rev', '', _('revision to rebuild to'))],
3043 [('r', 'rev', '', _('revision to rebuild to'))],
3044 _('debugrebuildstate [-r REV] [REV]')),
3044 _('debugrebuildstate [-r REV] [REV]')),
3045 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
3045 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
3046 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
3046 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
3047 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
3047 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
3048 "debugstate": (debugstate, [], _('debugstate')),
3048 "debugstate": (debugstate, [], _('debugstate')),
3049 "debugdata": (debugdata, [], _('debugdata FILE REV')),
3049 "debugdata": (debugdata, [], _('debugdata FILE REV')),
3050 "debugindex": (debugindex, [], _('debugindex FILE')),
3050 "debugindex": (debugindex, [], _('debugindex FILE')),
3051 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
3051 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
3052 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
3052 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
3053 "debugwalk":
3053 "debugwalk":
3054 (debugwalk,
3054 (debugwalk,
3055 [('I', 'include', [], _('include names matching the given patterns')),
3055 [('I', 'include', [], _('include names matching the given patterns')),
3056 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3056 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3057 _('debugwalk [OPTION]... [FILE]...')),
3057 _('debugwalk [OPTION]... [FILE]...')),
3058 "^diff":
3058 "^diff":
3059 (diff,
3059 (diff,
3060 [('r', 'rev', [], _('revision')),
3060 [('r', 'rev', [], _('revision')),
3061 ('a', 'text', None, _('treat all files as text')),
3061 ('a', 'text', None, _('treat all files as text')),
3062 ('p', 'show-function', None,
3062 ('p', 'show-function', None,
3063 _('show which function each change is in')),
3063 _('show which function each change is in')),
3064 ('w', 'ignore-all-space', None,
3064 ('w', 'ignore-all-space', None,
3065 _('ignore white space when comparing lines')),
3065 _('ignore white space when comparing lines')),
3066 ('b', 'ignore-space-change', None,
3066 ('b', 'ignore-space-change', None,
3067 _('ignore changes in the amount of white space')),
3067 _('ignore changes in the amount of white space')),
3068 ('B', 'ignore-blank-lines', None,
3068 ('B', 'ignore-blank-lines', None,
3069 _('ignore changes whose lines are all blank')),
3069 _('ignore changes whose lines are all blank')),
3070 ('I', 'include', [], _('include names matching the given patterns')),
3070 ('I', 'include', [], _('include names matching the given patterns')),
3071 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3071 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3072 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
3072 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
3073 "^export":
3073 "^export":
3074 (export,
3074 (export,
3075 [('o', 'output', '', _('print output to file with formatted name')),
3075 [('o', 'output', '', _('print output to file with formatted name')),
3076 ('a', 'text', None, _('treat all files as text')),
3076 ('a', 'text', None, _('treat all files as text')),
3077 ('', 'switch-parent', None, _('diff against the second parent'))],
3077 ('', 'switch-parent', None, _('diff against the second parent'))],
3078 _('hg export [-a] [-o OUTFILESPEC] REV...')),
3078 _('hg export [-a] [-o OUTFILESPEC] REV...')),
3079 "debugforget|forget":
3079 "debugforget|forget":
3080 (forget,
3080 (forget,
3081 [('I', 'include', [], _('include names matching the given patterns')),
3081 [('I', 'include', [], _('include names matching the given patterns')),
3082 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3082 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3083 _('hg forget [OPTION]... FILE...')),
3083 _('hg forget [OPTION]... FILE...')),
3084 "grep":
3084 "grep":
3085 (grep,
3085 (grep,
3086 [('0', 'print0', None, _('end fields with NUL')),
3086 [('0', 'print0', None, _('end fields with NUL')),
3087 ('', 'all', None, _('print all revisions that match')),
3087 ('', 'all', None, _('print all revisions that match')),
3088 ('i', 'ignore-case', None, _('ignore case when matching')),
3088 ('i', 'ignore-case', None, _('ignore case when matching')),
3089 ('l', 'files-with-matches', None,
3089 ('l', 'files-with-matches', None,
3090 _('print only filenames and revs that match')),
3090 _('print only filenames and revs that match')),
3091 ('n', 'line-number', None, _('print matching line numbers')),
3091 ('n', 'line-number', None, _('print matching line numbers')),
3092 ('r', 'rev', [], _('search in given revision range')),
3092 ('r', 'rev', [], _('search in given revision range')),
3093 ('u', 'user', None, _('print user who committed change')),
3093 ('u', 'user', None, _('print user who committed change')),
3094 ('I', 'include', [], _('include names matching the given patterns')),
3094 ('I', 'include', [], _('include names matching the given patterns')),
3095 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3095 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3096 _('hg grep [OPTION]... PATTERN [FILE]...')),
3096 _('hg grep [OPTION]... PATTERN [FILE]...')),
3097 "heads":
3097 "heads":
3098 (heads,
3098 (heads,
3099 [('b', 'branches', None, _('show branches')),
3099 [('b', 'branches', None, _('show branches')),
3100 ('', 'style', '', _('display using template map file')),
3100 ('', 'style', '', _('display using template map file')),
3101 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3101 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3102 ('', 'template', '', _('display with template'))],
3102 ('', 'template', '', _('display with template'))],
3103 _('hg heads [-b] [-r <rev>]')),
3103 _('hg heads [-b] [-r <rev>]')),
3104 "help": (help_, [], _('hg help [COMMAND]')),
3104 "help": (help_, [], _('hg help [COMMAND]')),
3105 "identify|id": (identify, [], _('hg identify')),
3105 "identify|id": (identify, [], _('hg identify')),
3106 "import|patch":
3106 "import|patch":
3107 (import_,
3107 (import_,
3108 [('p', 'strip', 1,
3108 [('p', 'strip', 1,
3109 _('directory strip option for patch. This has the same\n'
3109 _('directory strip option for patch. This has the same\n'
3110 'meaning as the corresponding patch option')),
3110 'meaning as the corresponding patch option')),
3111 ('m', 'message', '', _('use <text> as commit message')),
3111 ('m', 'message', '', _('use <text> as commit message')),
3112 ('b', 'base', '', _('base path')),
3112 ('b', 'base', '', _('base path')),
3113 ('f', 'force', None,
3113 ('f', 'force', None,
3114 _('skip check for outstanding uncommitted changes'))],
3114 _('skip check for outstanding uncommitted changes'))],
3115 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
3115 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
3116 "incoming|in": (incoming,
3116 "incoming|in": (incoming,
3117 [('M', 'no-merges', None, _('do not show merges')),
3117 [('M', 'no-merges', None, _('do not show merges')),
3118 ('f', 'force', None,
3118 ('f', 'force', None,
3119 _('run even when remote repository is unrelated')),
3119 _('run even when remote repository is unrelated')),
3120 ('', 'style', '', _('display using template map file')),
3120 ('', 'style', '', _('display using template map file')),
3121 ('n', 'newest-first', None, _('show newest record first')),
3121 ('n', 'newest-first', None, _('show newest record first')),
3122 ('', 'bundle', '', _('file to store the bundles into')),
3122 ('', 'bundle', '', _('file to store the bundles into')),
3123 ('p', 'patch', None, _('show patch')),
3123 ('p', 'patch', None, _('show patch')),
3124 ('r', 'rev', [], _('a specific revision you would like to pull')),
3124 ('r', 'rev', [], _('a specific revision you would like to pull')),
3125 ('', 'template', '', _('display with template')),
3125 ('', 'template', '', _('display with template')),
3126 ('e', 'ssh', '', _('specify ssh command to use')),
3126 ('e', 'ssh', '', _('specify ssh command to use')),
3127 ('', 'remotecmd', '',
3127 ('', 'remotecmd', '',
3128 _('specify hg command to run on the remote side'))],
3128 _('specify hg command to run on the remote side'))],
3129 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3129 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3130 ' [--bundle FILENAME] [SOURCE]')),
3130 ' [--bundle FILENAME] [SOURCE]')),
3131 "^init":
3131 "^init":
3132 (init,
3132 (init,
3133 [('e', 'ssh', '', _('specify ssh command to use')),
3133 [('e', 'ssh', '', _('specify ssh command to use')),
3134 ('', 'remotecmd', '',
3134 ('', 'remotecmd', '',
3135 _('specify hg command to run on the remote side'))],
3135 _('specify hg command to run on the remote side'))],
3136 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3136 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3137 "locate":
3137 "locate":
3138 (locate,
3138 (locate,
3139 [('r', 'rev', '', _('search the repository as it stood at rev')),
3139 [('r', 'rev', '', _('search the repository as it stood at rev')),
3140 ('0', 'print0', None,
3140 ('0', 'print0', None,
3141 _('end filenames with NUL, for use with xargs')),
3141 _('end filenames with NUL, for use with xargs')),
3142 ('f', 'fullpath', None,
3142 ('f', 'fullpath', None,
3143 _('print complete paths from the filesystem root')),
3143 _('print complete paths from the filesystem root')),
3144 ('I', 'include', [], _('include names matching the given patterns')),
3144 ('I', 'include', [], _('include names matching the given patterns')),
3145 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3145 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3146 _('hg locate [OPTION]... [PATTERN]...')),
3146 _('hg locate [OPTION]... [PATTERN]...')),
3147 "^log|history":
3147 "^log|history":
3148 (log,
3148 (log,
3149 [('b', 'branches', None, _('show branches')),
3149 [('b', 'branches', None, _('show branches')),
3150 ('f', 'follow', None,
3150 ('f', 'follow', None,
3151 _('follow changeset history, or file history across copies and renames')),
3151 _('follow changeset history, or file history across copies and renames')),
3152 ('', 'follow-first', None,
3152 ('', 'follow-first', None,
3153 _('only follow the first parent of merge changesets')),
3153 _('only follow the first parent of merge changesets')),
3154 ('k', 'keyword', [], _('search for a keyword')),
3154 ('k', 'keyword', [], _('search for a keyword')),
3155 ('l', 'limit', '', _('limit number of changes displayed')),
3155 ('l', 'limit', '', _('limit number of changes displayed')),
3156 ('r', 'rev', [], _('show the specified revision or range')),
3156 ('r', 'rev', [], _('show the specified revision or range')),
3157 ('M', 'no-merges', None, _('do not show merges')),
3157 ('M', 'no-merges', None, _('do not show merges')),
3158 ('', 'style', '', _('display using template map file')),
3158 ('', 'style', '', _('display using template map file')),
3159 ('m', 'only-merges', None, _('show only merges')),
3159 ('m', 'only-merges', None, _('show only merges')),
3160 ('p', 'patch', None, _('show patch')),
3160 ('p', 'patch', None, _('show patch')),
3161 ('', 'template', '', _('display with template')),
3161 ('', 'template', '', _('display with template')),
3162 ('I', 'include', [], _('include names matching the given patterns')),
3162 ('I', 'include', [], _('include names matching the given patterns')),
3163 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3163 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3164 _('hg log [OPTION]... [FILE]')),
3164 _('hg log [OPTION]... [FILE]')),
3165 "manifest": (manifest, [], _('hg manifest [REV]')),
3165 "manifest": (manifest, [], _('hg manifest [REV]')),
3166 "merge":
3166 "merge":
3167 (merge,
3167 (merge,
3168 [('b', 'branch', '', _('merge with head of a specific branch')),
3168 [('b', 'branch', '', _('merge with head of a specific branch')),
3169 ('f', 'force', None, _('force a merge with outstanding changes'))],
3169 ('f', 'force', None, _('force a merge with outstanding changes'))],
3170 _('hg merge [-b TAG] [-f] [REV]')),
3170 _('hg merge [-b TAG] [-f] [REV]')),
3171 "outgoing|out": (outgoing,
3171 "outgoing|out": (outgoing,
3172 [('M', 'no-merges', None, _('do not show merges')),
3172 [('M', 'no-merges', None, _('do not show merges')),
3173 ('f', 'force', None,
3173 ('f', 'force', None,
3174 _('run even when remote repository is unrelated')),
3174 _('run even when remote repository is unrelated')),
3175 ('p', 'patch', None, _('show patch')),
3175 ('p', 'patch', None, _('show patch')),
3176 ('', 'style', '', _('display using template map file')),
3176 ('', 'style', '', _('display using template map file')),
3177 ('r', 'rev', [], _('a specific revision you would like to push')),
3177 ('r', 'rev', [], _('a specific revision you would like to push')),
3178 ('n', 'newest-first', None, _('show newest record first')),
3178 ('n', 'newest-first', None, _('show newest record first')),
3179 ('', 'template', '', _('display with template')),
3179 ('', 'template', '', _('display with template')),
3180 ('e', 'ssh', '', _('specify ssh command to use')),
3180 ('e', 'ssh', '', _('specify ssh command to use')),
3181 ('', 'remotecmd', '',
3181 ('', 'remotecmd', '',
3182 _('specify hg command to run on the remote side'))],
3182 _('specify hg command to run on the remote side'))],
3183 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3183 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3184 "^parents":
3184 "^parents":
3185 (parents,
3185 (parents,
3186 [('b', 'branches', None, _('show branches')),
3186 [('b', 'branches', None, _('show branches')),
3187 ('r', 'rev', '', _('show parents from the specified rev')),
3187 ('r', 'rev', '', _('show parents from the specified rev')),
3188 ('', 'style', '', _('display using template map file')),
3188 ('', 'style', '', _('display using template map file')),
3189 ('', 'template', '', _('display with template'))],
3189 ('', 'template', '', _('display with template'))],
3190 _('hg parents [-b] [-r REV] [FILE]')),
3190 _('hg parents [-b] [-r REV] [FILE]')),
3191 "paths": (paths, [], _('hg paths [NAME]')),
3191 "paths": (paths, [], _('hg paths [NAME]')),
3192 "^pull":
3192 "^pull":
3193 (pull,
3193 (pull,
3194 [('u', 'update', None,
3194 [('u', 'update', None,
3195 _('update the working directory to tip after pull')),
3195 _('update the working directory to tip after pull')),
3196 ('e', 'ssh', '', _('specify ssh command to use')),
3196 ('e', 'ssh', '', _('specify ssh command to use')),
3197 ('f', 'force', None,
3197 ('f', 'force', None,
3198 _('run even when remote repository is unrelated')),
3198 _('run even when remote repository is unrelated')),
3199 ('r', 'rev', [], _('a specific revision you would like to pull')),
3199 ('r', 'rev', [], _('a specific revision you would like to pull')),
3200 ('', 'remotecmd', '',
3200 ('', 'remotecmd', '',
3201 _('specify hg command to run on the remote side'))],
3201 _('specify hg command to run on the remote side'))],
3202 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3202 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3203 "^push":
3203 "^push":
3204 (push,
3204 (push,
3205 [('f', 'force', None, _('force push')),
3205 [('f', 'force', None, _('force push')),
3206 ('e', 'ssh', '', _('specify ssh command to use')),
3206 ('e', 'ssh', '', _('specify ssh command to use')),
3207 ('r', 'rev', [], _('a specific revision you would like to push')),
3207 ('r', 'rev', [], _('a specific revision you would like to push')),
3208 ('', 'remotecmd', '',
3208 ('', 'remotecmd', '',
3209 _('specify hg command to run on the remote side'))],
3209 _('specify hg command to run on the remote side'))],
3210 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3210 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3211 "debugrawcommit|rawcommit":
3211 "debugrawcommit|rawcommit":
3212 (rawcommit,
3212 (rawcommit,
3213 [('p', 'parent', [], _('parent')),
3213 [('p', 'parent', [], _('parent')),
3214 ('d', 'date', '', _('date code')),
3214 ('d', 'date', '', _('date code')),
3215 ('u', 'user', '', _('user')),
3215 ('u', 'user', '', _('user')),
3216 ('F', 'files', '', _('file list')),
3216 ('F', 'files', '', _('file list')),
3217 ('m', 'message', '', _('commit message')),
3217 ('m', 'message', '', _('commit message')),
3218 ('l', 'logfile', '', _('commit message file'))],
3218 ('l', 'logfile', '', _('commit message file'))],
3219 _('hg debugrawcommit [OPTION]... [FILE]...')),
3219 _('hg debugrawcommit [OPTION]... [FILE]...')),
3220 "recover": (recover, [], _('hg recover')),
3220 "recover": (recover, [], _('hg recover')),
3221 "^remove|rm":
3221 "^remove|rm":
3222 (remove,
3222 (remove,
3223 [('A', 'after', None, _('record remove that has already occurred')),
3223 [('A', 'after', None, _('record remove that has already occurred')),
3224 ('f', 'force', None, _('remove file even if modified')),
3224 ('f', 'force', None, _('remove file even if modified')),
3225 ('I', 'include', [], _('include names matching the given patterns')),
3225 ('I', 'include', [], _('include names matching the given patterns')),
3226 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3226 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3227 _('hg remove [OPTION]... FILE...')),
3227 _('hg remove [OPTION]... FILE...')),
3228 "rename|mv":
3228 "rename|mv":
3229 (rename,
3229 (rename,
3230 [('A', 'after', None, _('record a rename that has already occurred')),
3230 [('A', 'after', None, _('record a rename that has already occurred')),
3231 ('f', 'force', None,
3231 ('f', 'force', None,
3232 _('forcibly copy over an existing managed file')),
3232 _('forcibly copy over an existing managed file')),
3233 ('I', 'include', [], _('include names matching the given patterns')),
3233 ('I', 'include', [], _('include names matching the given patterns')),
3234 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3234 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3235 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3235 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3236 _('hg rename [OPTION]... SOURCE... DEST')),
3236 _('hg rename [OPTION]... SOURCE... DEST')),
3237 "^revert":
3237 "^revert":
3238 (revert,
3238 (revert,
3239 [('r', 'rev', '', _('revision to revert to')),
3239 [('r', 'rev', '', _('revision to revert to')),
3240 ('', 'no-backup', None, _('do not save backup copies of files')),
3240 ('', 'no-backup', None, _('do not save backup copies of files')),
3241 ('I', 'include', [], _('include names matching given patterns')),
3241 ('I', 'include', [], _('include names matching given patterns')),
3242 ('X', 'exclude', [], _('exclude names matching given patterns')),
3242 ('X', 'exclude', [], _('exclude names matching given patterns')),
3243 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3243 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3244 _('hg revert [-r REV] [NAME]...')),
3244 _('hg revert [-r REV] [NAME]...')),
3245 "rollback": (rollback, [], _('hg rollback')),
3245 "rollback": (rollback, [], _('hg rollback')),
3246 "root": (root, [], _('hg root')),
3246 "root": (root, [], _('hg root')),
3247 "^serve":
3247 "^serve":
3248 (serve,
3248 (serve,
3249 [('A', 'accesslog', '', _('name of access log file to write to')),
3249 [('A', 'accesslog', '', _('name of access log file to write to')),
3250 ('d', 'daemon', None, _('run server in background')),
3250 ('d', 'daemon', None, _('run server in background')),
3251 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3251 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3252 ('E', 'errorlog', '', _('name of error log file to write to')),
3252 ('E', 'errorlog', '', _('name of error log file to write to')),
3253 ('p', 'port', 0, _('port to use (default: 8000)')),
3253 ('p', 'port', 0, _('port to use (default: 8000)')),
3254 ('a', 'address', '', _('address to use')),
3254 ('a', 'address', '', _('address to use')),
3255 ('n', 'name', '',
3255 ('n', 'name', '',
3256 _('name to show in web pages (default: working dir)')),
3256 _('name to show in web pages (default: working dir)')),
3257 ('', 'webdir-conf', '', _('name of the webdir config file'
3257 ('', 'webdir-conf', '', _('name of the webdir config file'
3258 ' (serve more than one repo)')),
3258 ' (serve more than one repo)')),
3259 ('', 'pid-file', '', _('name of file to write process ID to')),
3259 ('', 'pid-file', '', _('name of file to write process ID to')),
3260 ('', 'stdio', None, _('for remote clients')),
3260 ('', 'stdio', None, _('for remote clients')),
3261 ('t', 'templates', '', _('web templates to use')),
3261 ('t', 'templates', '', _('web templates to use')),
3262 ('', 'style', '', _('template style to use')),
3262 ('', 'style', '', _('template style to use')),
3263 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3263 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3264 _('hg serve [OPTION]...')),
3264 _('hg serve [OPTION]...')),
3265 "^status|st":
3265 "^status|st":
3266 (status,
3266 (status,
3267 [('A', 'all', None, _('show status of all files')),
3267 [('A', 'all', None, _('show status of all files')),
3268 ('m', 'modified', None, _('show only modified files')),
3268 ('m', 'modified', None, _('show only modified files')),
3269 ('a', 'added', None, _('show only added files')),
3269 ('a', 'added', None, _('show only added files')),
3270 ('r', 'removed', None, _('show only removed files')),
3270 ('r', 'removed', None, _('show only removed files')),
3271 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3271 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3272 ('c', 'clean', None, _('show only files without changes')),
3272 ('c', 'clean', None, _('show only files without changes')),
3273 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3273 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3274 ('i', 'ignored', None, _('show ignored files')),
3274 ('i', 'ignored', None, _('show ignored files')),
3275 ('n', 'no-status', None, _('hide status prefix')),
3275 ('n', 'no-status', None, _('hide status prefix')),
3276 ('C', 'copies', None, _('show source of copied files')),
3276 ('C', 'copies', None, _('show source of copied files')),
3277 ('0', 'print0', None,
3277 ('0', 'print0', None,
3278 _('end filenames with NUL, for use with xargs')),
3278 _('end filenames with NUL, for use with xargs')),
3279 ('I', 'include', [], _('include names matching the given patterns')),
3279 ('I', 'include', [], _('include names matching the given patterns')),
3280 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3280 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3281 _('hg status [OPTION]... [FILE]...')),
3281 _('hg status [OPTION]... [FILE]...')),
3282 "tag":
3282 "tag":
3283 (tag,
3283 (tag,
3284 [('l', 'local', None, _('make the tag local')),
3284 [('l', 'local', None, _('make the tag local')),
3285 ('m', 'message', '', _('message for tag commit log entry')),
3285 ('m', 'message', '', _('message for tag commit log entry')),
3286 ('d', 'date', '', _('record datecode as commit date')),
3286 ('d', 'date', '', _('record datecode as commit date')),
3287 ('u', 'user', '', _('record user as commiter')),
3287 ('u', 'user', '', _('record user as commiter')),
3288 ('r', 'rev', '', _('revision to tag'))],
3288 ('r', 'rev', '', _('revision to tag'))],
3289 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3289 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3290 "tags": (tags, [], _('hg tags')),
3290 "tags": (tags, [], _('hg tags')),
3291 "tip":
3291 "tip":
3292 (tip,
3292 (tip,
3293 [('b', 'branches', None, _('show branches')),
3293 [('b', 'branches', None, _('show branches')),
3294 ('', 'style', '', _('display using template map file')),
3294 ('', 'style', '', _('display using template map file')),
3295 ('p', 'patch', None, _('show patch')),
3295 ('p', 'patch', None, _('show patch')),
3296 ('', 'template', '', _('display with template'))],
3296 ('', 'template', '', _('display with template'))],
3297 _('hg tip [-b] [-p]')),
3297 _('hg tip [-b] [-p]')),
3298 "unbundle":
3298 "unbundle":
3299 (unbundle,
3299 (unbundle,
3300 [('u', 'update', None,
3300 [('u', 'update', None,
3301 _('update the working directory to tip after unbundle'))],
3301 _('update the working directory to tip after unbundle'))],
3302 _('hg unbundle [-u] FILE')),
3302 _('hg unbundle [-u] FILE')),
3303 "debugundo|undo": (undo, [], _('hg undo')),
3303 "debugundo|undo": (undo, [], _('hg undo')),
3304 "^update|up|checkout|co":
3304 "^update|up|checkout|co":
3305 (update,
3305 (update,
3306 [('b', 'branch', '', _('checkout the head of a specific branch')),
3306 [('b', 'branch', '', _('checkout the head of a specific branch')),
3307 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3307 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3308 ('C', 'clean', None, _('overwrite locally modified files')),
3308 ('C', 'clean', None, _('overwrite locally modified files')),
3309 ('f', 'force', None, _('force a merge with outstanding changes'))],
3309 ('f', 'force', None, _('force a merge with outstanding changes'))],
3310 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3310 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3311 "verify": (verify, [], _('hg verify')),
3311 "verify": (verify, [], _('hg verify')),
3312 "version": (show_version, [], _('hg version')),
3312 "version": (show_version, [], _('hg version')),
3313 }
3313 }
3314
3314
3315 globalopts = [
3315 globalopts = [
3316 ('R', 'repository', '',
3316 ('R', 'repository', '',
3317 _('repository root directory or symbolic path name')),
3317 _('repository root directory or symbolic path name')),
3318 ('', 'cwd', '', _('change working directory')),
3318 ('', 'cwd', '', _('change working directory')),
3319 ('y', 'noninteractive', None,
3319 ('y', 'noninteractive', None,
3320 _('do not prompt, assume \'yes\' for any required answers')),
3320 _('do not prompt, assume \'yes\' for any required answers')),
3321 ('q', 'quiet', None, _('suppress output')),
3321 ('q', 'quiet', None, _('suppress output')),
3322 ('v', 'verbose', None, _('enable additional output')),
3322 ('v', 'verbose', None, _('enable additional output')),
3323 ('', 'config', [], _('set/override config option')),
3323 ('', 'config', [], _('set/override config option')),
3324 ('', 'debug', None, _('enable debugging output')),
3324 ('', 'debug', None, _('enable debugging output')),
3325 ('', 'debugger', None, _('start debugger')),
3325 ('', 'debugger', None, _('start debugger')),
3326 ('', 'lsprof', None, _('print improved command execution profile')),
3326 ('', 'lsprof', None, _('print improved command execution profile')),
3327 ('', 'traceback', None, _('print traceback on exception')),
3327 ('', 'traceback', None, _('print traceback on exception')),
3328 ('', 'time', None, _('time how long the command takes')),
3328 ('', 'time', None, _('time how long the command takes')),
3329 ('', 'profile', None, _('print command execution profile')),
3329 ('', 'profile', None, _('print command execution profile')),
3330 ('', 'version', None, _('output version information and exit')),
3330 ('', 'version', None, _('output version information and exit')),
3331 ('h', 'help', None, _('display help and exit')),
3331 ('h', 'help', None, _('display help and exit')),
3332 ]
3332 ]
3333
3333
3334 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3334 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3335 " debugindex debugindexdot")
3335 " debugindex debugindexdot")
3336 optionalrepo = ("paths serve debugconfig")
3336 optionalrepo = ("paths serve debugconfig")
3337
3337
3338 def findpossible(cmd):
3338 def findpossible(cmd):
3339 """
3339 """
3340 Return cmd -> (aliases, command table entry)
3340 Return cmd -> (aliases, command table entry)
3341 for each matching command.
3341 for each matching command.
3342 Return debug commands (or their aliases) only if no normal command matches.
3342 Return debug commands (or their aliases) only if no normal command matches.
3343 """
3343 """
3344 choice = {}
3344 choice = {}
3345 debugchoice = {}
3345 debugchoice = {}
3346 for e in table.keys():
3346 for e in table.keys():
3347 aliases = e.lstrip("^").split("|")
3347 aliases = e.lstrip("^").split("|")
3348 found = None
3348 found = None
3349 if cmd in aliases:
3349 if cmd in aliases:
3350 found = cmd
3350 found = cmd
3351 else:
3351 else:
3352 for a in aliases:
3352 for a in aliases:
3353 if a.startswith(cmd):
3353 if a.startswith(cmd):
3354 found = a
3354 found = a
3355 break
3355 break
3356 if found is not None:
3356 if found is not None:
3357 if aliases[0].startswith("debug"):
3357 if aliases[0].startswith("debug"):
3358 debugchoice[found] = (aliases, table[e])
3358 debugchoice[found] = (aliases, table[e])
3359 else:
3359 else:
3360 choice[found] = (aliases, table[e])
3360 choice[found] = (aliases, table[e])
3361
3361
3362 if not choice and debugchoice:
3362 if not choice and debugchoice:
3363 choice = debugchoice
3363 choice = debugchoice
3364
3364
3365 return choice
3365 return choice
3366
3366
3367 def findcmd(cmd):
3367 def findcmd(cmd):
3368 """Return (aliases, command table entry) for command string."""
3368 """Return (aliases, command table entry) for command string."""
3369 choice = findpossible(cmd)
3369 choice = findpossible(cmd)
3370
3370
3371 if choice.has_key(cmd):
3371 if choice.has_key(cmd):
3372 return choice[cmd]
3372 return choice[cmd]
3373
3373
3374 if len(choice) > 1:
3374 if len(choice) > 1:
3375 clist = choice.keys()
3375 clist = choice.keys()
3376 clist.sort()
3376 clist.sort()
3377 raise AmbiguousCommand(cmd, clist)
3377 raise AmbiguousCommand(cmd, clist)
3378
3378
3379 if choice:
3379 if choice:
3380 return choice.values()[0]
3380 return choice.values()[0]
3381
3381
3382 raise UnknownCommand(cmd)
3382 raise UnknownCommand(cmd)
3383
3383
3384 def catchterm(*args):
3384 def catchterm(*args):
3385 raise util.SignalInterrupt
3385 raise util.SignalInterrupt
3386
3386
3387 def run():
3387 def run():
3388 sys.exit(dispatch(sys.argv[1:]))
3388 sys.exit(dispatch(sys.argv[1:]))
3389
3389
3390 class ParseError(Exception):
3390 class ParseError(Exception):
3391 """Exception raised on errors in parsing the command line."""
3391 """Exception raised on errors in parsing the command line."""
3392
3392
3393 def parse(ui, args):
3393 def parse(ui, args):
3394 options = {}
3394 options = {}
3395 cmdoptions = {}
3395 cmdoptions = {}
3396
3396
3397 try:
3397 try:
3398 args = fancyopts.fancyopts(args, globalopts, options)
3398 args = fancyopts.fancyopts(args, globalopts, options)
3399 except fancyopts.getopt.GetoptError, inst:
3399 except fancyopts.getopt.GetoptError, inst:
3400 raise ParseError(None, inst)
3400 raise ParseError(None, inst)
3401
3401
3402 if args:
3402 if args:
3403 cmd, args = args[0], args[1:]
3403 cmd, args = args[0], args[1:]
3404 aliases, i = findcmd(cmd)
3404 aliases, i = findcmd(cmd)
3405 cmd = aliases[0]
3405 cmd = aliases[0]
3406 defaults = ui.config("defaults", cmd)
3406 defaults = ui.config("defaults", cmd)
3407 if defaults:
3407 if defaults:
3408 args = defaults.split() + args
3408 args = defaults.split() + args
3409 c = list(i[1])
3409 c = list(i[1])
3410 else:
3410 else:
3411 cmd = None
3411 cmd = None
3412 c = []
3412 c = []
3413
3413
3414 # combine global options into local
3414 # combine global options into local
3415 for o in globalopts:
3415 for o in globalopts:
3416 c.append((o[0], o[1], options[o[1]], o[3]))
3416 c.append((o[0], o[1], options[o[1]], o[3]))
3417
3417
3418 try:
3418 try:
3419 args = fancyopts.fancyopts(args, c, cmdoptions)
3419 args = fancyopts.fancyopts(args, c, cmdoptions)
3420 except fancyopts.getopt.GetoptError, inst:
3420 except fancyopts.getopt.GetoptError, inst:
3421 raise ParseError(cmd, inst)
3421 raise ParseError(cmd, inst)
3422
3422
3423 # separate global options back out
3423 # separate global options back out
3424 for o in globalopts:
3424 for o in globalopts:
3425 n = o[1]
3425 n = o[1]
3426 options[n] = cmdoptions[n]
3426 options[n] = cmdoptions[n]
3427 del cmdoptions[n]
3427 del cmdoptions[n]
3428
3428
3429 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3429 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3430
3430
3431 external = {}
3431 external = {}
3432
3432
3433 def findext(name):
3433 def findext(name):
3434 '''return module with given extension name'''
3434 '''return module with given extension name'''
3435 try:
3435 try:
3436 return sys.modules[external[name]]
3436 return sys.modules[external[name]]
3437 except KeyError:
3437 except KeyError:
3438 for k, v in external.iteritems():
3438 for k, v in external.iteritems():
3439 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3439 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3440 return sys.modules[v]
3440 return sys.modules[v]
3441 raise KeyError(name)
3441 raise KeyError(name)
3442
3442
3443 def dispatch(args):
3443 def dispatch(args):
3444 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3444 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3445 num = getattr(signal, name, None)
3445 num = getattr(signal, name, None)
3446 if num: signal.signal(num, catchterm)
3446 if num: signal.signal(num, catchterm)
3447
3447
3448 try:
3448 try:
3449 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3449 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3450 except util.Abort, inst:
3450 except util.Abort, inst:
3451 sys.stderr.write(_("abort: %s\n") % inst)
3451 sys.stderr.write(_("abort: %s\n") % inst)
3452 return -1
3452 return -1
3453
3453
3454 for ext_name, load_from_name in u.extensions():
3454 for ext_name, load_from_name in u.extensions():
3455 try:
3455 try:
3456 if load_from_name:
3456 if load_from_name:
3457 # the module will be loaded in sys.modules
3457 # the module will be loaded in sys.modules
3458 # choose an unique name so that it doesn't
3458 # choose an unique name so that it doesn't
3459 # conflicts with other modules
3459 # conflicts with other modules
3460 module_name = "hgext_%s" % ext_name.replace('.', '_')
3460 module_name = "hgext_%s" % ext_name.replace('.', '_')
3461 mod = imp.load_source(module_name, load_from_name)
3461 mod = imp.load_source(module_name, load_from_name)
3462 else:
3462 else:
3463 def importh(name):
3463 def importh(name):
3464 mod = __import__(name)
3464 mod = __import__(name)
3465 components = name.split('.')
3465 components = name.split('.')
3466 for comp in components[1:]:
3466 for comp in components[1:]:
3467 mod = getattr(mod, comp)
3467 mod = getattr(mod, comp)
3468 return mod
3468 return mod
3469 try:
3469 try:
3470 mod = importh("hgext.%s" % ext_name)
3470 mod = importh("hgext.%s" % ext_name)
3471 except ImportError:
3471 except ImportError:
3472 mod = importh(ext_name)
3472 mod = importh(ext_name)
3473 external[ext_name] = mod.__name__
3473 external[ext_name] = mod.__name__
3474 except (util.SignalInterrupt, KeyboardInterrupt):
3474 except (util.SignalInterrupt, KeyboardInterrupt):
3475 raise
3475 raise
3476 except Exception, inst:
3476 except Exception, inst:
3477 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3477 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3478 if u.print_exc():
3478 if u.print_exc():
3479 return 1
3479 return 1
3480
3480
3481 for name in external.itervalues():
3481 for name in external.itervalues():
3482 mod = sys.modules[name]
3482 mod = sys.modules[name]
3483 uisetup = getattr(mod, 'uisetup', None)
3483 uisetup = getattr(mod, 'uisetup', None)
3484 if uisetup:
3484 if uisetup:
3485 uisetup(u)
3485 uisetup(u)
3486 cmdtable = getattr(mod, 'cmdtable', {})
3486 cmdtable = getattr(mod, 'cmdtable', {})
3487 for t in cmdtable:
3487 for t in cmdtable:
3488 if t in table:
3488 if t in table:
3489 u.warn(_("module %s overrides %s\n") % (name, t))
3489 u.warn(_("module %s overrides %s\n") % (name, t))
3490 table.update(cmdtable)
3490 table.update(cmdtable)
3491
3491
3492 try:
3492 try:
3493 cmd, func, args, options, cmdoptions = parse(u, args)
3493 cmd, func, args, options, cmdoptions = parse(u, args)
3494 if options["time"]:
3494 if options["time"]:
3495 def get_times():
3495 def get_times():
3496 t = os.times()
3496 t = os.times()
3497 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3497 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3498 t = (t[0], t[1], t[2], t[3], time.clock())
3498 t = (t[0], t[1], t[2], t[3], time.clock())
3499 return t
3499 return t
3500 s = get_times()
3500 s = get_times()
3501 def print_time():
3501 def print_time():
3502 t = get_times()
3502 t = get_times()
3503 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3503 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3504 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3504 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3505 atexit.register(print_time)
3505 atexit.register(print_time)
3506
3506
3507 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3507 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3508 not options["noninteractive"], options["traceback"],
3508 not options["noninteractive"], options["traceback"],
3509 options["config"])
3509 options["config"])
3510
3510
3511 # enter the debugger before command execution
3511 # enter the debugger before command execution
3512 if options['debugger']:
3512 if options['debugger']:
3513 pdb.set_trace()
3513 pdb.set_trace()
3514
3514
3515 try:
3515 try:
3516 if options['cwd']:
3516 if options['cwd']:
3517 try:
3517 try:
3518 os.chdir(options['cwd'])
3518 os.chdir(options['cwd'])
3519 except OSError, inst:
3519 except OSError, inst:
3520 raise util.Abort('%s: %s' %
3520 raise util.Abort('%s: %s' %
3521 (options['cwd'], inst.strerror))
3521 (options['cwd'], inst.strerror))
3522
3522
3523 path = u.expandpath(options["repository"]) or ""
3523 path = u.expandpath(options["repository"]) or ""
3524 repo = path and hg.repository(u, path=path) or None
3524 repo = path and hg.repository(u, path=path) or None
3525
3525
3526 if options['help']:
3526 if options['help']:
3527 return help_(u, cmd, options['version'])
3527 return help_(u, cmd, options['version'])
3528 elif options['version']:
3528 elif options['version']:
3529 return show_version(u)
3529 return show_version(u)
3530 elif not cmd:
3530 elif not cmd:
3531 return help_(u, 'shortlist')
3531 return help_(u, 'shortlist')
3532
3532
3533 if cmd not in norepo.split():
3533 if cmd not in norepo.split():
3534 try:
3534 try:
3535 if not repo:
3535 if not repo:
3536 repo = hg.repository(u, path=path)
3536 repo = hg.repository(u, path=path)
3537 u = repo.ui
3537 u = repo.ui
3538 for name in external.itervalues():
3538 for name in external.itervalues():
3539 mod = sys.modules[name]
3539 mod = sys.modules[name]
3540 if hasattr(mod, 'reposetup'):
3540 if hasattr(mod, 'reposetup'):
3541 mod.reposetup(u, repo)
3541 mod.reposetup(u, repo)
3542 except hg.RepoError:
3542 except hg.RepoError:
3543 if cmd not in optionalrepo.split():
3543 if cmd not in optionalrepo.split():
3544 raise
3544 raise
3545 d = lambda: func(u, repo, *args, **cmdoptions)
3545 d = lambda: func(u, repo, *args, **cmdoptions)
3546 else:
3546 else:
3547 d = lambda: func(u, *args, **cmdoptions)
3547 d = lambda: func(u, *args, **cmdoptions)
3548
3548
3549 # reupdate the options, repo/.hg/hgrc may have changed them
3549 # reupdate the options, repo/.hg/hgrc may have changed them
3550 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3550 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3551 not options["noninteractive"], options["traceback"],
3551 not options["noninteractive"], options["traceback"],
3552 options["config"])
3552 options["config"])
3553
3553
3554 try:
3554 try:
3555 if options['profile']:
3555 if options['profile']:
3556 import hotshot, hotshot.stats
3556 import hotshot, hotshot.stats
3557 prof = hotshot.Profile("hg.prof")
3557 prof = hotshot.Profile("hg.prof")
3558 try:
3558 try:
3559 try:
3559 try:
3560 return prof.runcall(d)
3560 return prof.runcall(d)
3561 except:
3561 except:
3562 try:
3562 try:
3563 u.warn(_('exception raised - generating '
3563 u.warn(_('exception raised - generating '
3564 'profile anyway\n'))
3564 'profile anyway\n'))
3565 except:
3565 except:
3566 pass
3566 pass
3567 raise
3567 raise
3568 finally:
3568 finally:
3569 prof.close()
3569 prof.close()
3570 stats = hotshot.stats.load("hg.prof")
3570 stats = hotshot.stats.load("hg.prof")
3571 stats.strip_dirs()
3571 stats.strip_dirs()
3572 stats.sort_stats('time', 'calls')
3572 stats.sort_stats('time', 'calls')
3573 stats.print_stats(40)
3573 stats.print_stats(40)
3574 elif options['lsprof']:
3574 elif options['lsprof']:
3575 try:
3575 try:
3576 from mercurial import lsprof
3576 from mercurial import lsprof
3577 except ImportError:
3577 except ImportError:
3578 raise util.Abort(_(
3578 raise util.Abort(_(
3579 'lsprof not available - install from '
3579 'lsprof not available - install from '
3580 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3580 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3581 p = lsprof.Profiler()
3581 p = lsprof.Profiler()
3582 p.enable(subcalls=True)
3582 p.enable(subcalls=True)
3583 try:
3583 try:
3584 return d()
3584 return d()
3585 finally:
3585 finally:
3586 p.disable()
3586 p.disable()
3587 stats = lsprof.Stats(p.getstats())
3587 stats = lsprof.Stats(p.getstats())
3588 stats.sort()
3588 stats.sort()
3589 stats.pprint(top=10, file=sys.stderr, climit=5)
3589 stats.pprint(top=10, file=sys.stderr, climit=5)
3590 else:
3590 else:
3591 return d()
3591 return d()
3592 finally:
3592 finally:
3593 u.flush()
3593 u.flush()
3594 except:
3594 except:
3595 # enter the debugger when we hit an exception
3595 # enter the debugger when we hit an exception
3596 if options['debugger']:
3596 if options['debugger']:
3597 pdb.post_mortem(sys.exc_info()[2])
3597 pdb.post_mortem(sys.exc_info()[2])
3598 u.print_exc()
3598 u.print_exc()
3599 raise
3599 raise
3600 except ParseError, inst:
3600 except ParseError, inst:
3601 if inst.args[0]:
3601 if inst.args[0]:
3602 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3602 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3603 help_(u, inst.args[0])
3603 help_(u, inst.args[0])
3604 else:
3604 else:
3605 u.warn(_("hg: %s\n") % inst.args[1])
3605 u.warn(_("hg: %s\n") % inst.args[1])
3606 help_(u, 'shortlist')
3606 help_(u, 'shortlist')
3607 except AmbiguousCommand, inst:
3607 except AmbiguousCommand, inst:
3608 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3608 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3609 (inst.args[0], " ".join(inst.args[1])))
3609 (inst.args[0], " ".join(inst.args[1])))
3610 except UnknownCommand, inst:
3610 except UnknownCommand, inst:
3611 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3611 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3612 help_(u, 'shortlist')
3612 help_(u, 'shortlist')
3613 except hg.RepoError, inst:
3613 except hg.RepoError, inst:
3614 u.warn(_("abort: %s!\n") % inst)
3614 u.warn(_("abort: %s!\n") % inst)
3615 except lock.LockHeld, inst:
3615 except lock.LockHeld, inst:
3616 if inst.errno == errno.ETIMEDOUT:
3616 if inst.errno == errno.ETIMEDOUT:
3617 reason = _('timed out waiting for lock held by %s') % inst.locker
3617 reason = _('timed out waiting for lock held by %s') % inst.locker
3618 else:
3618 else:
3619 reason = _('lock held by %s') % inst.locker
3619 reason = _('lock held by %s') % inst.locker
3620 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3620 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3621 except lock.LockUnavailable, inst:
3621 except lock.LockUnavailable, inst:
3622 u.warn(_("abort: could not lock %s: %s\n") %
3622 u.warn(_("abort: could not lock %s: %s\n") %
3623 (inst.desc or inst.filename, inst.strerror))
3623 (inst.desc or inst.filename, inst.strerror))
3624 except revlog.RevlogError, inst:
3624 except revlog.RevlogError, inst:
3625 u.warn(_("abort: "), inst, "!\n")
3625 u.warn(_("abort: "), inst, "!\n")
3626 except util.SignalInterrupt:
3626 except util.SignalInterrupt:
3627 u.warn(_("killed!\n"))
3627 u.warn(_("killed!\n"))
3628 except KeyboardInterrupt:
3628 except KeyboardInterrupt:
3629 try:
3629 try:
3630 u.warn(_("interrupted!\n"))
3630 u.warn(_("interrupted!\n"))
3631 except IOError, inst:
3631 except IOError, inst:
3632 if inst.errno == errno.EPIPE:
3632 if inst.errno == errno.EPIPE:
3633 if u.debugflag:
3633 if u.debugflag:
3634 u.warn(_("\nbroken pipe\n"))
3634 u.warn(_("\nbroken pipe\n"))
3635 else:
3635 else:
3636 raise
3636 raise
3637 except IOError, inst:
3637 except IOError, inst:
3638 if hasattr(inst, "code"):
3638 if hasattr(inst, "code"):
3639 u.warn(_("abort: %s\n") % inst)
3639 u.warn(_("abort: %s\n") % inst)
3640 elif hasattr(inst, "reason"):
3640 elif hasattr(inst, "reason"):
3641 u.warn(_("abort: error: %s\n") % inst.reason[1])
3641 u.warn(_("abort: error: %s\n") % inst.reason[1])
3642 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3642 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3643 if u.debugflag:
3643 if u.debugflag:
3644 u.warn(_("broken pipe\n"))
3644 u.warn(_("broken pipe\n"))
3645 elif getattr(inst, "strerror", None):
3645 elif getattr(inst, "strerror", None):
3646 if getattr(inst, "filename", None):
3646 if getattr(inst, "filename", None):
3647 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3647 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3648 else:
3648 else:
3649 u.warn(_("abort: %s\n") % inst.strerror)
3649 u.warn(_("abort: %s\n") % inst.strerror)
3650 else:
3650 else:
3651 raise
3651 raise
3652 except OSError, inst:
3652 except OSError, inst:
3653 if hasattr(inst, "filename"):
3653 if hasattr(inst, "filename"):
3654 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3654 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3655 else:
3655 else:
3656 u.warn(_("abort: %s\n") % inst.strerror)
3656 u.warn(_("abort: %s\n") % inst.strerror)
3657 except util.Abort, inst:
3657 except util.Abort, inst:
3658 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3658 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3659 except TypeError, inst:
3659 except TypeError, inst:
3660 # was this an argument error?
3660 # was this an argument error?
3661 tb = traceback.extract_tb(sys.exc_info()[2])
3661 tb = traceback.extract_tb(sys.exc_info()[2])
3662 if len(tb) > 2: # no
3662 if len(tb) > 2: # no
3663 raise
3663 raise
3664 u.debug(inst, "\n")
3664 u.debug(inst, "\n")
3665 u.warn(_("%s: invalid arguments\n") % cmd)
3665 u.warn(_("%s: invalid arguments\n") % cmd)
3666 help_(u, cmd)
3666 help_(u, cmd)
3667 except SystemExit, inst:
3667 except SystemExit, inst:
3668 # Commands shouldn't sys.exit directly, but give a return code.
3668 # Commands shouldn't sys.exit directly, but give a return code.
3669 # Just in case catch this and and pass exit code to caller.
3669 # Just in case catch this and and pass exit code to caller.
3670 return inst.code
3670 return inst.code
3671 except:
3671 except:
3672 u.warn(_("** unknown exception encountered, details follow\n"))
3672 u.warn(_("** unknown exception encountered, details follow\n"))
3673 u.warn(_("** report bug details to "
3673 u.warn(_("** report bug details to "
3674 "http://www.selenic.com/mercurial/bts\n"))
3674 "http://www.selenic.com/mercurial/bts\n"))
3675 u.warn(_("** or mercurial@selenic.com\n"))
3675 u.warn(_("** or mercurial@selenic.com\n"))
3676 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3676 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3677 % version.get_version())
3677 % version.get_version())
3678 raise
3678 raise
3679
3679
3680 return -1
3680 return -1
@@ -1,2278 +1,2282
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ()
18 capabilities = ()
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("no repo found"))
30 raise repo.RepoError(_("no repo found"))
31 path = p
31 path = p
32 self.path = os.path.join(path, ".hg")
32 self.path = os.path.join(path, ".hg")
33
33
34 if not create and not os.path.isdir(self.path):
34 if not create and not os.path.isdir(self.path):
35 raise repo.RepoError(_("repository %s not found") % path)
35 raise repo.RepoError(_("repository %s not found") % path)
36
36
37 self.root = os.path.abspath(path)
37 self.root = os.path.abspath(path)
38 self.origroot = path
38 self.origroot = path
39 self.ui = ui.ui(parentui=parentui)
39 self.ui = ui.ui(parentui=parentui)
40 self.opener = util.opener(self.path)
40 self.opener = util.opener(self.path)
41 self.wopener = util.opener(self.root)
41 self.wopener = util.opener(self.root)
42
42
43 try:
43 try:
44 self.ui.readconfig(self.join("hgrc"), self.root)
44 self.ui.readconfig(self.join("hgrc"), self.root)
45 except IOError:
45 except IOError:
46 pass
46 pass
47
47
48 v = self.ui.revlogopts
48 v = self.ui.revlogopts
49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
51 fl = v.get('flags', None)
51 fl = v.get('flags', None)
52 flags = 0
52 flags = 0
53 if fl != None:
53 if fl != None:
54 for x in fl.split():
54 for x in fl.split():
55 flags |= revlog.flagstr(x)
55 flags |= revlog.flagstr(x)
56 elif self.revlogv1:
56 elif self.revlogv1:
57 flags = revlog.REVLOG_DEFAULT_FLAGS
57 flags = revlog.REVLOG_DEFAULT_FLAGS
58
58
59 v = self.revlogversion | flags
59 v = self.revlogversion | flags
60 self.manifest = manifest.manifest(self.opener, v)
60 self.manifest = manifest.manifest(self.opener, v)
61 self.changelog = changelog.changelog(self.opener, v)
61 self.changelog = changelog.changelog(self.opener, v)
62
62
63 # the changelog might not have the inline index flag
63 # the changelog might not have the inline index flag
64 # on. If the format of the changelog is the same as found in
64 # on. If the format of the changelog is the same as found in
65 # .hgrc, apply any flags found in the .hgrc as well.
65 # .hgrc, apply any flags found in the .hgrc as well.
66 # Otherwise, just version from the changelog
66 # Otherwise, just version from the changelog
67 v = self.changelog.version
67 v = self.changelog.version
68 if v == self.revlogversion:
68 if v == self.revlogversion:
69 v |= flags
69 v |= flags
70 self.revlogversion = v
70 self.revlogversion = v
71
71
72 self.tagscache = None
72 self.tagscache = None
73 self.nodetagscache = None
73 self.nodetagscache = None
74 self.encodepats = None
74 self.encodepats = None
75 self.decodepats = None
75 self.decodepats = None
76 self.transhandle = None
76 self.transhandle = None
77
77
78 if create:
78 if create:
79 if not os.path.exists(path):
79 if not os.path.exists(path):
80 os.mkdir(path)
80 os.mkdir(path)
81 os.mkdir(self.path)
81 os.mkdir(self.path)
82 os.mkdir(self.join("data"))
82 os.mkdir(self.join("data"))
83
83
84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
85
85
86 def url(self):
86 def url(self):
87 return 'file:' + self.root
87 return 'file:' + self.root
88
88
89 def hook(self, name, throw=False, **args):
89 def hook(self, name, throw=False, **args):
90 def callhook(hname, funcname):
90 def callhook(hname, funcname):
91 '''call python hook. hook is callable object, looked up as
91 '''call python hook. hook is callable object, looked up as
92 name in python module. if callable returns "true", hook
92 name in python module. if callable returns "true", hook
93 fails, else passes. if hook raises exception, treated as
93 fails, else passes. if hook raises exception, treated as
94 hook failure. exception propagates if throw is "true".
94 hook failure. exception propagates if throw is "true".
95
95
96 reason for "true" meaning "hook failed" is so that
96 reason for "true" meaning "hook failed" is so that
97 unmodified commands (e.g. mercurial.commands.update) can
97 unmodified commands (e.g. mercurial.commands.update) can
98 be run as hooks without wrappers to convert return values.'''
98 be run as hooks without wrappers to convert return values.'''
99
99
100 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
100 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
101 d = funcname.rfind('.')
101 d = funcname.rfind('.')
102 if d == -1:
102 if d == -1:
103 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
103 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
104 % (hname, funcname))
104 % (hname, funcname))
105 modname = funcname[:d]
105 modname = funcname[:d]
106 try:
106 try:
107 obj = __import__(modname)
107 obj = __import__(modname)
108 except ImportError:
108 except ImportError:
109 try:
109 try:
110 # extensions are loaded with hgext_ prefix
110 # extensions are loaded with hgext_ prefix
111 obj = __import__("hgext_%s" % modname)
111 obj = __import__("hgext_%s" % modname)
112 except ImportError:
112 except ImportError:
113 raise util.Abort(_('%s hook is invalid '
113 raise util.Abort(_('%s hook is invalid '
114 '(import of "%s" failed)') %
114 '(import of "%s" failed)') %
115 (hname, modname))
115 (hname, modname))
116 try:
116 try:
117 for p in funcname.split('.')[1:]:
117 for p in funcname.split('.')[1:]:
118 obj = getattr(obj, p)
118 obj = getattr(obj, p)
119 except AttributeError, err:
119 except AttributeError, err:
120 raise util.Abort(_('%s hook is invalid '
120 raise util.Abort(_('%s hook is invalid '
121 '("%s" is not defined)') %
121 '("%s" is not defined)') %
122 (hname, funcname))
122 (hname, funcname))
123 if not callable(obj):
123 if not callable(obj):
124 raise util.Abort(_('%s hook is invalid '
124 raise util.Abort(_('%s hook is invalid '
125 '("%s" is not callable)') %
125 '("%s" is not callable)') %
126 (hname, funcname))
126 (hname, funcname))
127 try:
127 try:
128 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
128 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
129 except (KeyboardInterrupt, util.SignalInterrupt):
129 except (KeyboardInterrupt, util.SignalInterrupt):
130 raise
130 raise
131 except Exception, exc:
131 except Exception, exc:
132 if isinstance(exc, util.Abort):
132 if isinstance(exc, util.Abort):
133 self.ui.warn(_('error: %s hook failed: %s\n') %
133 self.ui.warn(_('error: %s hook failed: %s\n') %
134 (hname, exc.args[0] % exc.args[1:]))
134 (hname, exc.args[0] % exc.args[1:]))
135 else:
135 else:
136 self.ui.warn(_('error: %s hook raised an exception: '
136 self.ui.warn(_('error: %s hook raised an exception: '
137 '%s\n') % (hname, exc))
137 '%s\n') % (hname, exc))
138 if throw:
138 if throw:
139 raise
139 raise
140 self.ui.print_exc()
140 self.ui.print_exc()
141 return True
141 return True
142 if r:
142 if r:
143 if throw:
143 if throw:
144 raise util.Abort(_('%s hook failed') % hname)
144 raise util.Abort(_('%s hook failed') % hname)
145 self.ui.warn(_('warning: %s hook failed\n') % hname)
145 self.ui.warn(_('warning: %s hook failed\n') % hname)
146 return r
146 return r
147
147
148 def runhook(name, cmd):
148 def runhook(name, cmd):
149 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
149 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
150 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
150 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
151 r = util.system(cmd, environ=env, cwd=self.root)
151 r = util.system(cmd, environ=env, cwd=self.root)
152 if r:
152 if r:
153 desc, r = util.explain_exit(r)
153 desc, r = util.explain_exit(r)
154 if throw:
154 if throw:
155 raise util.Abort(_('%s hook %s') % (name, desc))
155 raise util.Abort(_('%s hook %s') % (name, desc))
156 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
156 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
157 return r
157 return r
158
158
159 r = False
159 r = False
160 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
160 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
161 if hname.split(".", 1)[0] == name and cmd]
161 if hname.split(".", 1)[0] == name and cmd]
162 hooks.sort()
162 hooks.sort()
163 for hname, cmd in hooks:
163 for hname, cmd in hooks:
164 if cmd.startswith('python:'):
164 if cmd.startswith('python:'):
165 r = callhook(hname, cmd[7:].strip()) or r
165 r = callhook(hname, cmd[7:].strip()) or r
166 else:
166 else:
167 r = runhook(hname, cmd) or r
167 r = runhook(hname, cmd) or r
168 return r
168 return r
169
169
170 tag_disallowed = ':\r\n'
170 tag_disallowed = ':\r\n'
171
171
172 def tag(self, name, node, local=False, message=None, user=None, date=None):
172 def tag(self, name, node, local=False, message=None, user=None, date=None):
173 '''tag a revision with a symbolic name.
173 '''tag a revision with a symbolic name.
174
174
175 if local is True, the tag is stored in a per-repository file.
175 if local is True, the tag is stored in a per-repository file.
176 otherwise, it is stored in the .hgtags file, and a new
176 otherwise, it is stored in the .hgtags file, and a new
177 changeset is committed with the change.
177 changeset is committed with the change.
178
178
179 keyword arguments:
179 keyword arguments:
180
180
181 local: whether to store tag in non-version-controlled file
181 local: whether to store tag in non-version-controlled file
182 (default False)
182 (default False)
183
183
184 message: commit message to use if committing
184 message: commit message to use if committing
185
185
186 user: name of user to use if committing
186 user: name of user to use if committing
187
187
188 date: date tuple to use if committing'''
188 date: date tuple to use if committing'''
189
189
190 for c in self.tag_disallowed:
190 for c in self.tag_disallowed:
191 if c in name:
191 if c in name:
192 raise util.Abort(_('%r cannot be used in a tag name') % c)
192 raise util.Abort(_('%r cannot be used in a tag name') % c)
193
193
194 self.hook('pretag', throw=True, node=node, tag=name, local=local)
194 self.hook('pretag', throw=True, node=node, tag=name, local=local)
195
195
196 if local:
196 if local:
197 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
197 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
198 self.hook('tag', node=node, tag=name, local=local)
198 self.hook('tag', node=node, tag=name, local=local)
199 return
199 return
200
200
201 for x in self.changes():
201 for x in self.changes():
202 if '.hgtags' in x:
202 if '.hgtags' in x:
203 raise util.Abort(_('working copy of .hgtags is changed '
203 raise util.Abort(_('working copy of .hgtags is changed '
204 '(please commit .hgtags manually)'))
204 '(please commit .hgtags manually)'))
205
205
206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
207 if self.dirstate.state('.hgtags') == '?':
207 if self.dirstate.state('.hgtags') == '?':
208 self.add(['.hgtags'])
208 self.add(['.hgtags'])
209
209
210 if not message:
210 if not message:
211 message = _('Added tag %s for changeset %s') % (name, node)
211 message = _('Added tag %s for changeset %s') % (name, node)
212
212
213 self.commit(['.hgtags'], message, user, date)
213 self.commit(['.hgtags'], message, user, date)
214 self.hook('tag', node=node, tag=name, local=local)
214 self.hook('tag', node=node, tag=name, local=local)
215
215
216 def tags(self):
216 def tags(self):
217 '''return a mapping of tag to node'''
217 '''return a mapping of tag to node'''
218 if not self.tagscache:
218 if not self.tagscache:
219 self.tagscache = {}
219 self.tagscache = {}
220
220
221 def parsetag(line, context):
221 def parsetag(line, context):
222 if not line:
222 if not line:
223 return
223 return
224 s = l.split(" ", 1)
224 s = l.split(" ", 1)
225 if len(s) != 2:
225 if len(s) != 2:
226 self.ui.warn(_("%s: cannot parse entry\n") % context)
226 self.ui.warn(_("%s: cannot parse entry\n") % context)
227 return
227 return
228 node, key = s
228 node, key = s
229 key = key.strip()
229 key = key.strip()
230 try:
230 try:
231 bin_n = bin(node)
231 bin_n = bin(node)
232 except TypeError:
232 except TypeError:
233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
234 (context, node))
234 (context, node))
235 return
235 return
236 if bin_n not in self.changelog.nodemap:
236 if bin_n not in self.changelog.nodemap:
237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
238 (context, key))
238 (context, key))
239 return
239 return
240 self.tagscache[key] = bin_n
240 self.tagscache[key] = bin_n
241
241
242 # read the tags file from each head, ending with the tip,
242 # read the tags file from each head, ending with the tip,
243 # and add each tag found to the map, with "newer" ones
243 # and add each tag found to the map, with "newer" ones
244 # taking precedence
244 # taking precedence
245 heads = self.heads()
245 heads = self.heads()
246 heads.reverse()
246 heads.reverse()
247 fl = self.file(".hgtags")
247 fl = self.file(".hgtags")
248 for node in heads:
248 for node in heads:
249 change = self.changelog.read(node)
249 change = self.changelog.read(node)
250 rev = self.changelog.rev(node)
250 rev = self.changelog.rev(node)
251 fn, ff = self.manifest.find(change[0], '.hgtags')
251 fn, ff = self.manifest.find(change[0], '.hgtags')
252 if fn is None: continue
252 if fn is None: continue
253 count = 0
253 count = 0
254 for l in fl.read(fn).splitlines():
254 for l in fl.read(fn).splitlines():
255 count += 1
255 count += 1
256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
257 (rev, short(node), count))
257 (rev, short(node), count))
258 try:
258 try:
259 f = self.opener("localtags")
259 f = self.opener("localtags")
260 count = 0
260 count = 0
261 for l in f:
261 for l in f:
262 count += 1
262 count += 1
263 parsetag(l, _("localtags, line %d") % count)
263 parsetag(l, _("localtags, line %d") % count)
264 except IOError:
264 except IOError:
265 pass
265 pass
266
266
267 self.tagscache['tip'] = self.changelog.tip()
267 self.tagscache['tip'] = self.changelog.tip()
268
268
269 return self.tagscache
269 return self.tagscache
270
270
271 def tagslist(self):
271 def tagslist(self):
272 '''return a list of tags ordered by revision'''
272 '''return a list of tags ordered by revision'''
273 l = []
273 l = []
274 for t, n in self.tags().items():
274 for t, n in self.tags().items():
275 try:
275 try:
276 r = self.changelog.rev(n)
276 r = self.changelog.rev(n)
277 except:
277 except:
278 r = -2 # sort to the beginning of the list if unknown
278 r = -2 # sort to the beginning of the list if unknown
279 l.append((r, t, n))
279 l.append((r, t, n))
280 l.sort()
280 l.sort()
281 return [(t, n) for r, t, n in l]
281 return [(t, n) for r, t, n in l]
282
282
283 def nodetags(self, node):
283 def nodetags(self, node):
284 '''return the tags associated with a node'''
284 '''return the tags associated with a node'''
285 if not self.nodetagscache:
285 if not self.nodetagscache:
286 self.nodetagscache = {}
286 self.nodetagscache = {}
287 for t, n in self.tags().items():
287 for t, n in self.tags().items():
288 self.nodetagscache.setdefault(n, []).append(t)
288 self.nodetagscache.setdefault(n, []).append(t)
289 return self.nodetagscache.get(node, [])
289 return self.nodetagscache.get(node, [])
290
290
291 def lookup(self, key):
291 def lookup(self, key):
292 try:
292 try:
293 return self.tags()[key]
293 return self.tags()[key]
294 except KeyError:
294 except KeyError:
295 if key == '.':
296 key = self.dirstate.parents()[0]
297 if key == nullid:
298 raise repo.RepoError(_("no revision checked out"))
295 try:
299 try:
296 return self.changelog.lookup(key)
300 return self.changelog.lookup(key)
297 except:
301 except:
298 raise repo.RepoError(_("unknown revision '%s'") % key)
302 raise repo.RepoError(_("unknown revision '%s'") % key)
299
303
300 def dev(self):
304 def dev(self):
301 return os.lstat(self.path).st_dev
305 return os.lstat(self.path).st_dev
302
306
303 def local(self):
307 def local(self):
304 return True
308 return True
305
309
306 def join(self, f):
310 def join(self, f):
307 return os.path.join(self.path, f)
311 return os.path.join(self.path, f)
308
312
309 def wjoin(self, f):
313 def wjoin(self, f):
310 return os.path.join(self.root, f)
314 return os.path.join(self.root, f)
311
315
312 def file(self, f):
316 def file(self, f):
313 if f[0] == '/':
317 if f[0] == '/':
314 f = f[1:]
318 f = f[1:]
315 return filelog.filelog(self.opener, f, self.revlogversion)
319 return filelog.filelog(self.opener, f, self.revlogversion)
316
320
317 def changectx(self, changeid):
321 def changectx(self, changeid):
318 return context.changectx(self, changeid)
322 return context.changectx(self, changeid)
319
323
320 def filectx(self, path, changeid=None, fileid=None):
324 def filectx(self, path, changeid=None, fileid=None):
321 """changeid can be a changeset revision, node, or tag.
325 """changeid can be a changeset revision, node, or tag.
322 fileid can be a file revision or node."""
326 fileid can be a file revision or node."""
323 return context.filectx(self, path, changeid, fileid)
327 return context.filectx(self, path, changeid, fileid)
324
328
325 def getcwd(self):
329 def getcwd(self):
326 return self.dirstate.getcwd()
330 return self.dirstate.getcwd()
327
331
328 def wfile(self, f, mode='r'):
332 def wfile(self, f, mode='r'):
329 return self.wopener(f, mode)
333 return self.wopener(f, mode)
330
334
331 def wread(self, filename):
335 def wread(self, filename):
332 if self.encodepats == None:
336 if self.encodepats == None:
333 l = []
337 l = []
334 for pat, cmd in self.ui.configitems("encode"):
338 for pat, cmd in self.ui.configitems("encode"):
335 mf = util.matcher(self.root, "", [pat], [], [])[1]
339 mf = util.matcher(self.root, "", [pat], [], [])[1]
336 l.append((mf, cmd))
340 l.append((mf, cmd))
337 self.encodepats = l
341 self.encodepats = l
338
342
339 data = self.wopener(filename, 'r').read()
343 data = self.wopener(filename, 'r').read()
340
344
341 for mf, cmd in self.encodepats:
345 for mf, cmd in self.encodepats:
342 if mf(filename):
346 if mf(filename):
343 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
347 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
344 data = util.filter(data, cmd)
348 data = util.filter(data, cmd)
345 break
349 break
346
350
347 return data
351 return data
348
352
349 def wwrite(self, filename, data, fd=None):
353 def wwrite(self, filename, data, fd=None):
350 if self.decodepats == None:
354 if self.decodepats == None:
351 l = []
355 l = []
352 for pat, cmd in self.ui.configitems("decode"):
356 for pat, cmd in self.ui.configitems("decode"):
353 mf = util.matcher(self.root, "", [pat], [], [])[1]
357 mf = util.matcher(self.root, "", [pat], [], [])[1]
354 l.append((mf, cmd))
358 l.append((mf, cmd))
355 self.decodepats = l
359 self.decodepats = l
356
360
357 for mf, cmd in self.decodepats:
361 for mf, cmd in self.decodepats:
358 if mf(filename):
362 if mf(filename):
359 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
360 data = util.filter(data, cmd)
364 data = util.filter(data, cmd)
361 break
365 break
362
366
363 if fd:
367 if fd:
364 return fd.write(data)
368 return fd.write(data)
365 return self.wopener(filename, 'w').write(data)
369 return self.wopener(filename, 'w').write(data)
366
370
367 def transaction(self):
371 def transaction(self):
368 tr = self.transhandle
372 tr = self.transhandle
369 if tr != None and tr.running():
373 if tr != None and tr.running():
370 return tr.nest()
374 return tr.nest()
371
375
372 # save dirstate for rollback
376 # save dirstate for rollback
373 try:
377 try:
374 ds = self.opener("dirstate").read()
378 ds = self.opener("dirstate").read()
375 except IOError:
379 except IOError:
376 ds = ""
380 ds = ""
377 self.opener("journal.dirstate", "w").write(ds)
381 self.opener("journal.dirstate", "w").write(ds)
378
382
379 tr = transaction.transaction(self.ui.warn, self.opener,
383 tr = transaction.transaction(self.ui.warn, self.opener,
380 self.join("journal"),
384 self.join("journal"),
381 aftertrans(self.path))
385 aftertrans(self.path))
382 self.transhandle = tr
386 self.transhandle = tr
383 return tr
387 return tr
384
388
385 def recover(self):
389 def recover(self):
386 l = self.lock()
390 l = self.lock()
387 if os.path.exists(self.join("journal")):
391 if os.path.exists(self.join("journal")):
388 self.ui.status(_("rolling back interrupted transaction\n"))
392 self.ui.status(_("rolling back interrupted transaction\n"))
389 transaction.rollback(self.opener, self.join("journal"))
393 transaction.rollback(self.opener, self.join("journal"))
390 self.reload()
394 self.reload()
391 return True
395 return True
392 else:
396 else:
393 self.ui.warn(_("no interrupted transaction available\n"))
397 self.ui.warn(_("no interrupted transaction available\n"))
394 return False
398 return False
395
399
396 def rollback(self, wlock=None):
400 def rollback(self, wlock=None):
397 if not wlock:
401 if not wlock:
398 wlock = self.wlock()
402 wlock = self.wlock()
399 l = self.lock()
403 l = self.lock()
400 if os.path.exists(self.join("undo")):
404 if os.path.exists(self.join("undo")):
401 self.ui.status(_("rolling back last transaction\n"))
405 self.ui.status(_("rolling back last transaction\n"))
402 transaction.rollback(self.opener, self.join("undo"))
406 transaction.rollback(self.opener, self.join("undo"))
403 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
407 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
404 self.reload()
408 self.reload()
405 self.wreload()
409 self.wreload()
406 else:
410 else:
407 self.ui.warn(_("no rollback information available\n"))
411 self.ui.warn(_("no rollback information available\n"))
408
412
409 def wreload(self):
413 def wreload(self):
410 self.dirstate.read()
414 self.dirstate.read()
411
415
412 def reload(self):
416 def reload(self):
413 self.changelog.load()
417 self.changelog.load()
414 self.manifest.load()
418 self.manifest.load()
415 self.tagscache = None
419 self.tagscache = None
416 self.nodetagscache = None
420 self.nodetagscache = None
417
421
418 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
422 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
419 desc=None):
423 desc=None):
420 try:
424 try:
421 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
425 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
422 except lock.LockHeld, inst:
426 except lock.LockHeld, inst:
423 if not wait:
427 if not wait:
424 raise
428 raise
425 self.ui.warn(_("waiting for lock on %s held by %s\n") %
429 self.ui.warn(_("waiting for lock on %s held by %s\n") %
426 (desc, inst.args[0]))
430 (desc, inst.args[0]))
427 # default to 600 seconds timeout
431 # default to 600 seconds timeout
428 l = lock.lock(self.join(lockname),
432 l = lock.lock(self.join(lockname),
429 int(self.ui.config("ui", "timeout") or 600),
433 int(self.ui.config("ui", "timeout") or 600),
430 releasefn, desc=desc)
434 releasefn, desc=desc)
431 if acquirefn:
435 if acquirefn:
432 acquirefn()
436 acquirefn()
433 return l
437 return l
434
438
435 def lock(self, wait=1):
439 def lock(self, wait=1):
436 return self.do_lock("lock", wait, acquirefn=self.reload,
440 return self.do_lock("lock", wait, acquirefn=self.reload,
437 desc=_('repository %s') % self.origroot)
441 desc=_('repository %s') % self.origroot)
438
442
439 def wlock(self, wait=1):
443 def wlock(self, wait=1):
440 return self.do_lock("wlock", wait, self.dirstate.write,
444 return self.do_lock("wlock", wait, self.dirstate.write,
441 self.wreload,
445 self.wreload,
442 desc=_('working directory of %s') % self.origroot)
446 desc=_('working directory of %s') % self.origroot)
443
447
444 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
448 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
445 "determine whether a new filenode is needed"
449 "determine whether a new filenode is needed"
446 fp1 = manifest1.get(filename, nullid)
450 fp1 = manifest1.get(filename, nullid)
447 fp2 = manifest2.get(filename, nullid)
451 fp2 = manifest2.get(filename, nullid)
448
452
449 if fp2 != nullid:
453 if fp2 != nullid:
450 # is one parent an ancestor of the other?
454 # is one parent an ancestor of the other?
451 fpa = filelog.ancestor(fp1, fp2)
455 fpa = filelog.ancestor(fp1, fp2)
452 if fpa == fp1:
456 if fpa == fp1:
453 fp1, fp2 = fp2, nullid
457 fp1, fp2 = fp2, nullid
454 elif fpa == fp2:
458 elif fpa == fp2:
455 fp2 = nullid
459 fp2 = nullid
456
460
457 # is the file unmodified from the parent? report existing entry
461 # is the file unmodified from the parent? report existing entry
458 if fp2 == nullid and text == filelog.read(fp1):
462 if fp2 == nullid and text == filelog.read(fp1):
459 return (fp1, None, None)
463 return (fp1, None, None)
460
464
461 return (None, fp1, fp2)
465 return (None, fp1, fp2)
462
466
463 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
467 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
464 orig_parent = self.dirstate.parents()[0] or nullid
468 orig_parent = self.dirstate.parents()[0] or nullid
465 p1 = p1 or self.dirstate.parents()[0] or nullid
469 p1 = p1 or self.dirstate.parents()[0] or nullid
466 p2 = p2 or self.dirstate.parents()[1] or nullid
470 p2 = p2 or self.dirstate.parents()[1] or nullid
467 c1 = self.changelog.read(p1)
471 c1 = self.changelog.read(p1)
468 c2 = self.changelog.read(p2)
472 c2 = self.changelog.read(p2)
469 m1 = self.manifest.read(c1[0])
473 m1 = self.manifest.read(c1[0])
470 mf1 = self.manifest.readflags(c1[0])
474 mf1 = self.manifest.readflags(c1[0])
471 m2 = self.manifest.read(c2[0])
475 m2 = self.manifest.read(c2[0])
472 changed = []
476 changed = []
473
477
474 if orig_parent == p1:
478 if orig_parent == p1:
475 update_dirstate = 1
479 update_dirstate = 1
476 else:
480 else:
477 update_dirstate = 0
481 update_dirstate = 0
478
482
479 if not wlock:
483 if not wlock:
480 wlock = self.wlock()
484 wlock = self.wlock()
481 l = self.lock()
485 l = self.lock()
482 tr = self.transaction()
486 tr = self.transaction()
483 mm = m1.copy()
487 mm = m1.copy()
484 mfm = mf1.copy()
488 mfm = mf1.copy()
485 linkrev = self.changelog.count()
489 linkrev = self.changelog.count()
486 for f in files:
490 for f in files:
487 try:
491 try:
488 t = self.wread(f)
492 t = self.wread(f)
489 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
493 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
490 r = self.file(f)
494 r = self.file(f)
491 mfm[f] = tm
495 mfm[f] = tm
492
496
493 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
497 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
494 if entry:
498 if entry:
495 mm[f] = entry
499 mm[f] = entry
496 continue
500 continue
497
501
498 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
502 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
499 changed.append(f)
503 changed.append(f)
500 if update_dirstate:
504 if update_dirstate:
501 self.dirstate.update([f], "n")
505 self.dirstate.update([f], "n")
502 except IOError:
506 except IOError:
503 try:
507 try:
504 del mm[f]
508 del mm[f]
505 del mfm[f]
509 del mfm[f]
506 if update_dirstate:
510 if update_dirstate:
507 self.dirstate.forget([f])
511 self.dirstate.forget([f])
508 except:
512 except:
509 # deleted from p2?
513 # deleted from p2?
510 pass
514 pass
511
515
512 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
516 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
513 user = user or self.ui.username()
517 user = user or self.ui.username()
514 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
518 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
515 tr.close()
519 tr.close()
516 if update_dirstate:
520 if update_dirstate:
517 self.dirstate.setparents(n, nullid)
521 self.dirstate.setparents(n, nullid)
518
522
519 def commit(self, files=None, text="", user=None, date=None,
523 def commit(self, files=None, text="", user=None, date=None,
520 match=util.always, force=False, lock=None, wlock=None,
524 match=util.always, force=False, lock=None, wlock=None,
521 force_editor=False):
525 force_editor=False):
522 commit = []
526 commit = []
523 remove = []
527 remove = []
524 changed = []
528 changed = []
525
529
526 if files:
530 if files:
527 for f in files:
531 for f in files:
528 s = self.dirstate.state(f)
532 s = self.dirstate.state(f)
529 if s in 'nmai':
533 if s in 'nmai':
530 commit.append(f)
534 commit.append(f)
531 elif s == 'r':
535 elif s == 'r':
532 remove.append(f)
536 remove.append(f)
533 else:
537 else:
534 self.ui.warn(_("%s not tracked!\n") % f)
538 self.ui.warn(_("%s not tracked!\n") % f)
535 else:
539 else:
536 modified, added, removed, deleted, unknown = self.changes(match=match)
540 modified, added, removed, deleted, unknown = self.changes(match=match)
537 commit = modified + added
541 commit = modified + added
538 remove = removed
542 remove = removed
539
543
540 p1, p2 = self.dirstate.parents()
544 p1, p2 = self.dirstate.parents()
541 c1 = self.changelog.read(p1)
545 c1 = self.changelog.read(p1)
542 c2 = self.changelog.read(p2)
546 c2 = self.changelog.read(p2)
543 m1 = self.manifest.read(c1[0])
547 m1 = self.manifest.read(c1[0])
544 mf1 = self.manifest.readflags(c1[0])
548 mf1 = self.manifest.readflags(c1[0])
545 m2 = self.manifest.read(c2[0])
549 m2 = self.manifest.read(c2[0])
546
550
547 if not commit and not remove and not force and p2 == nullid:
551 if not commit and not remove and not force and p2 == nullid:
548 self.ui.status(_("nothing changed\n"))
552 self.ui.status(_("nothing changed\n"))
549 return None
553 return None
550
554
551 xp1 = hex(p1)
555 xp1 = hex(p1)
552 if p2 == nullid: xp2 = ''
556 if p2 == nullid: xp2 = ''
553 else: xp2 = hex(p2)
557 else: xp2 = hex(p2)
554
558
555 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
559 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
556
560
557 if not wlock:
561 if not wlock:
558 wlock = self.wlock()
562 wlock = self.wlock()
559 if not lock:
563 if not lock:
560 lock = self.lock()
564 lock = self.lock()
561 tr = self.transaction()
565 tr = self.transaction()
562
566
563 # check in files
567 # check in files
564 new = {}
568 new = {}
565 linkrev = self.changelog.count()
569 linkrev = self.changelog.count()
566 commit.sort()
570 commit.sort()
567 for f in commit:
571 for f in commit:
568 self.ui.note(f + "\n")
572 self.ui.note(f + "\n")
569 try:
573 try:
570 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
574 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
571 t = self.wread(f)
575 t = self.wread(f)
572 except IOError:
576 except IOError:
573 self.ui.warn(_("trouble committing %s!\n") % f)
577 self.ui.warn(_("trouble committing %s!\n") % f)
574 raise
578 raise
575
579
576 r = self.file(f)
580 r = self.file(f)
577
581
578 meta = {}
582 meta = {}
579 cp = self.dirstate.copied(f)
583 cp = self.dirstate.copied(f)
580 if cp:
584 if cp:
581 meta["copy"] = cp
585 meta["copy"] = cp
582 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
586 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
583 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
587 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
584 fp1, fp2 = nullid, nullid
588 fp1, fp2 = nullid, nullid
585 else:
589 else:
586 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
590 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
587 if entry:
591 if entry:
588 new[f] = entry
592 new[f] = entry
589 continue
593 continue
590
594
591 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
595 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
592 # remember what we've added so that we can later calculate
596 # remember what we've added so that we can later calculate
593 # the files to pull from a set of changesets
597 # the files to pull from a set of changesets
594 changed.append(f)
598 changed.append(f)
595
599
596 # update manifest
600 # update manifest
597 m1 = m1.copy()
601 m1 = m1.copy()
598 m1.update(new)
602 m1.update(new)
599 for f in remove:
603 for f in remove:
600 if f in m1:
604 if f in m1:
601 del m1[f]
605 del m1[f]
602 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
606 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
603 (new, remove))
607 (new, remove))
604
608
605 # add changeset
609 # add changeset
606 new = new.keys()
610 new = new.keys()
607 new.sort()
611 new.sort()
608
612
609 user = user or self.ui.username()
613 user = user or self.ui.username()
610 if not text or force_editor:
614 if not text or force_editor:
611 edittext = []
615 edittext = []
612 if text:
616 if text:
613 edittext.append(text)
617 edittext.append(text)
614 edittext.append("")
618 edittext.append("")
615 if p2 != nullid:
619 if p2 != nullid:
616 edittext.append("HG: branch merge")
620 edittext.append("HG: branch merge")
617 edittext.extend(["HG: changed %s" % f for f in changed])
621 edittext.extend(["HG: changed %s" % f for f in changed])
618 edittext.extend(["HG: removed %s" % f for f in remove])
622 edittext.extend(["HG: removed %s" % f for f in remove])
619 if not changed and not remove:
623 if not changed and not remove:
620 edittext.append("HG: no files changed")
624 edittext.append("HG: no files changed")
621 edittext.append("")
625 edittext.append("")
622 # run editor in the repository root
626 # run editor in the repository root
623 olddir = os.getcwd()
627 olddir = os.getcwd()
624 os.chdir(self.root)
628 os.chdir(self.root)
625 text = self.ui.edit("\n".join(edittext), user)
629 text = self.ui.edit("\n".join(edittext), user)
626 os.chdir(olddir)
630 os.chdir(olddir)
627
631
628 lines = [line.rstrip() for line in text.rstrip().splitlines()]
632 lines = [line.rstrip() for line in text.rstrip().splitlines()]
629 while lines and not lines[0]:
633 while lines and not lines[0]:
630 del lines[0]
634 del lines[0]
631 if not lines:
635 if not lines:
632 return None
636 return None
633 text = '\n'.join(lines)
637 text = '\n'.join(lines)
634 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
638 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
635 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
639 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
636 parent2=xp2)
640 parent2=xp2)
637 tr.close()
641 tr.close()
638
642
639 self.dirstate.setparents(n)
643 self.dirstate.setparents(n)
640 self.dirstate.update(new, "n")
644 self.dirstate.update(new, "n")
641 self.dirstate.forget(remove)
645 self.dirstate.forget(remove)
642
646
643 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
647 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
644 return n
648 return n
645
649
646 def walk(self, node=None, files=[], match=util.always, badmatch=None):
650 def walk(self, node=None, files=[], match=util.always, badmatch=None):
647 if node:
651 if node:
648 fdict = dict.fromkeys(files)
652 fdict = dict.fromkeys(files)
649 for fn in self.manifest.read(self.changelog.read(node)[0]):
653 for fn in self.manifest.read(self.changelog.read(node)[0]):
650 fdict.pop(fn, None)
654 fdict.pop(fn, None)
651 if match(fn):
655 if match(fn):
652 yield 'm', fn
656 yield 'm', fn
653 for fn in fdict:
657 for fn in fdict:
654 if badmatch and badmatch(fn):
658 if badmatch and badmatch(fn):
655 if match(fn):
659 if match(fn):
656 yield 'b', fn
660 yield 'b', fn
657 else:
661 else:
658 self.ui.warn(_('%s: No such file in rev %s\n') % (
662 self.ui.warn(_('%s: No such file in rev %s\n') % (
659 util.pathto(self.getcwd(), fn), short(node)))
663 util.pathto(self.getcwd(), fn), short(node)))
660 else:
664 else:
661 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
665 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
662 yield src, fn
666 yield src, fn
663
667
664 def status(self, node1=None, node2=None, files=[], match=util.always,
668 def status(self, node1=None, node2=None, files=[], match=util.always,
665 wlock=None, list_ignored=False, list_clean=False):
669 wlock=None, list_ignored=False, list_clean=False):
666 """return status of files between two nodes or node and working directory
670 """return status of files between two nodes or node and working directory
667
671
668 If node1 is None, use the first dirstate parent instead.
672 If node1 is None, use the first dirstate parent instead.
669 If node2 is None, compare node1 with working directory.
673 If node2 is None, compare node1 with working directory.
670 """
674 """
671
675
672 def fcmp(fn, mf):
676 def fcmp(fn, mf):
673 t1 = self.wread(fn)
677 t1 = self.wread(fn)
674 t2 = self.file(fn).read(mf.get(fn, nullid))
678 t2 = self.file(fn).read(mf.get(fn, nullid))
675 return cmp(t1, t2)
679 return cmp(t1, t2)
676
680
677 def mfmatches(node):
681 def mfmatches(node):
678 change = self.changelog.read(node)
682 change = self.changelog.read(node)
679 mf = dict(self.manifest.read(change[0]))
683 mf = dict(self.manifest.read(change[0]))
680 for fn in mf.keys():
684 for fn in mf.keys():
681 if not match(fn):
685 if not match(fn):
682 del mf[fn]
686 del mf[fn]
683 return mf
687 return mf
684
688
685 modified, added, removed, deleted, unknown = [], [], [], [], []
689 modified, added, removed, deleted, unknown = [], [], [], [], []
686 ignored, clean = [], []
690 ignored, clean = [], []
687
691
688 compareworking = False
692 compareworking = False
689 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
693 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
690 compareworking = True
694 compareworking = True
691
695
692 if not compareworking:
696 if not compareworking:
693 # read the manifest from node1 before the manifest from node2,
697 # read the manifest from node1 before the manifest from node2,
694 # so that we'll hit the manifest cache if we're going through
698 # so that we'll hit the manifest cache if we're going through
695 # all the revisions in parent->child order.
699 # all the revisions in parent->child order.
696 mf1 = mfmatches(node1)
700 mf1 = mfmatches(node1)
697
701
698 # are we comparing the working directory?
702 # are we comparing the working directory?
699 if not node2:
703 if not node2:
700 if not wlock:
704 if not wlock:
701 try:
705 try:
702 wlock = self.wlock(wait=0)
706 wlock = self.wlock(wait=0)
703 except lock.LockException:
707 except lock.LockException:
704 wlock = None
708 wlock = None
705 (lookup, modified, added, removed, deleted, unknown,
709 (lookup, modified, added, removed, deleted, unknown,
706 ignored, clean) = self.dirstate.status(files, match,
710 ignored, clean) = self.dirstate.status(files, match,
707 list_ignored, list_clean)
711 list_ignored, list_clean)
708
712
709 # are we comparing working dir against its parent?
713 # are we comparing working dir against its parent?
710 if compareworking:
714 if compareworking:
711 if lookup:
715 if lookup:
712 # do a full compare of any files that might have changed
716 # do a full compare of any files that might have changed
713 mf2 = mfmatches(self.dirstate.parents()[0])
717 mf2 = mfmatches(self.dirstate.parents()[0])
714 for f in lookup:
718 for f in lookup:
715 if fcmp(f, mf2):
719 if fcmp(f, mf2):
716 modified.append(f)
720 modified.append(f)
717 elif wlock is not None:
721 elif wlock is not None:
718 self.dirstate.update([f], "n")
722 self.dirstate.update([f], "n")
719 else:
723 else:
720 # we are comparing working dir against non-parent
724 # we are comparing working dir against non-parent
721 # generate a pseudo-manifest for the working dir
725 # generate a pseudo-manifest for the working dir
722 mf2 = mfmatches(self.dirstate.parents()[0])
726 mf2 = mfmatches(self.dirstate.parents()[0])
723 for f in lookup + modified + added:
727 for f in lookup + modified + added:
724 mf2[f] = ""
728 mf2[f] = ""
725 for f in removed:
729 for f in removed:
726 if f in mf2:
730 if f in mf2:
727 del mf2[f]
731 del mf2[f]
728 else:
732 else:
729 # we are comparing two revisions
733 # we are comparing two revisions
730 mf2 = mfmatches(node2)
734 mf2 = mfmatches(node2)
731
735
732 if not compareworking:
736 if not compareworking:
733 # flush lists from dirstate before comparing manifests
737 # flush lists from dirstate before comparing manifests
734 modified, added, clean = [], [], []
738 modified, added, clean = [], [], []
735
739
736 # make sure to sort the files so we talk to the disk in a
740 # make sure to sort the files so we talk to the disk in a
737 # reasonable order
741 # reasonable order
738 mf2keys = mf2.keys()
742 mf2keys = mf2.keys()
739 mf2keys.sort()
743 mf2keys.sort()
740 for fn in mf2keys:
744 for fn in mf2keys:
741 if mf1.has_key(fn):
745 if mf1.has_key(fn):
742 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
746 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
743 modified.append(fn)
747 modified.append(fn)
744 elif list_clean:
748 elif list_clean:
745 clean.append(fn)
749 clean.append(fn)
746 del mf1[fn]
750 del mf1[fn]
747 else:
751 else:
748 added.append(fn)
752 added.append(fn)
749
753
750 removed = mf1.keys()
754 removed = mf1.keys()
751
755
752 # sort and return results:
756 # sort and return results:
753 for l in modified, added, removed, deleted, unknown, ignored, clean:
757 for l in modified, added, removed, deleted, unknown, ignored, clean:
754 l.sort()
758 l.sort()
755 return (modified, added, removed, deleted, unknown, ignored, clean)
759 return (modified, added, removed, deleted, unknown, ignored, clean)
756
760
757 def changes(self, node1=None, node2=None, files=[], match=util.always,
761 def changes(self, node1=None, node2=None, files=[], match=util.always,
758 wlock=None, list_ignored=False, list_clean=False):
762 wlock=None, list_ignored=False, list_clean=False):
759 '''DEPRECATED - use status instead'''
763 '''DEPRECATED - use status instead'''
760 marduit = self.status(node1, node2, files, match, wlock,
764 marduit = self.status(node1, node2, files, match, wlock,
761 list_ignored, list_clean)
765 list_ignored, list_clean)
762 if list_ignored:
766 if list_ignored:
763 return marduit[:-1]
767 return marduit[:-1]
764 else:
768 else:
765 return marduit[:-2]
769 return marduit[:-2]
766
770
767 def add(self, list, wlock=None):
771 def add(self, list, wlock=None):
768 if not wlock:
772 if not wlock:
769 wlock = self.wlock()
773 wlock = self.wlock()
770 for f in list:
774 for f in list:
771 p = self.wjoin(f)
775 p = self.wjoin(f)
772 if not os.path.exists(p):
776 if not os.path.exists(p):
773 self.ui.warn(_("%s does not exist!\n") % f)
777 self.ui.warn(_("%s does not exist!\n") % f)
774 elif not os.path.isfile(p):
778 elif not os.path.isfile(p):
775 self.ui.warn(_("%s not added: only files supported currently\n")
779 self.ui.warn(_("%s not added: only files supported currently\n")
776 % f)
780 % f)
777 elif self.dirstate.state(f) in 'an':
781 elif self.dirstate.state(f) in 'an':
778 self.ui.warn(_("%s already tracked!\n") % f)
782 self.ui.warn(_("%s already tracked!\n") % f)
779 else:
783 else:
780 self.dirstate.update([f], "a")
784 self.dirstate.update([f], "a")
781
785
782 def forget(self, list, wlock=None):
786 def forget(self, list, wlock=None):
783 if not wlock:
787 if not wlock:
784 wlock = self.wlock()
788 wlock = self.wlock()
785 for f in list:
789 for f in list:
786 if self.dirstate.state(f) not in 'ai':
790 if self.dirstate.state(f) not in 'ai':
787 self.ui.warn(_("%s not added!\n") % f)
791 self.ui.warn(_("%s not added!\n") % f)
788 else:
792 else:
789 self.dirstate.forget([f])
793 self.dirstate.forget([f])
790
794
791 def remove(self, list, unlink=False, wlock=None):
795 def remove(self, list, unlink=False, wlock=None):
792 if unlink:
796 if unlink:
793 for f in list:
797 for f in list:
794 try:
798 try:
795 util.unlink(self.wjoin(f))
799 util.unlink(self.wjoin(f))
796 except OSError, inst:
800 except OSError, inst:
797 if inst.errno != errno.ENOENT:
801 if inst.errno != errno.ENOENT:
798 raise
802 raise
799 if not wlock:
803 if not wlock:
800 wlock = self.wlock()
804 wlock = self.wlock()
801 for f in list:
805 for f in list:
802 p = self.wjoin(f)
806 p = self.wjoin(f)
803 if os.path.exists(p):
807 if os.path.exists(p):
804 self.ui.warn(_("%s still exists!\n") % f)
808 self.ui.warn(_("%s still exists!\n") % f)
805 elif self.dirstate.state(f) == 'a':
809 elif self.dirstate.state(f) == 'a':
806 self.dirstate.forget([f])
810 self.dirstate.forget([f])
807 elif f not in self.dirstate:
811 elif f not in self.dirstate:
808 self.ui.warn(_("%s not tracked!\n") % f)
812 self.ui.warn(_("%s not tracked!\n") % f)
809 else:
813 else:
810 self.dirstate.update([f], "r")
814 self.dirstate.update([f], "r")
811
815
812 def undelete(self, list, wlock=None):
816 def undelete(self, list, wlock=None):
813 p = self.dirstate.parents()[0]
817 p = self.dirstate.parents()[0]
814 mn = self.changelog.read(p)[0]
818 mn = self.changelog.read(p)[0]
815 mf = self.manifest.readflags(mn)
819 mf = self.manifest.readflags(mn)
816 m = self.manifest.read(mn)
820 m = self.manifest.read(mn)
817 if not wlock:
821 if not wlock:
818 wlock = self.wlock()
822 wlock = self.wlock()
819 for f in list:
823 for f in list:
820 if self.dirstate.state(f) not in "r":
824 if self.dirstate.state(f) not in "r":
821 self.ui.warn("%s not removed!\n" % f)
825 self.ui.warn("%s not removed!\n" % f)
822 else:
826 else:
823 t = self.file(f).read(m[f])
827 t = self.file(f).read(m[f])
824 self.wwrite(f, t)
828 self.wwrite(f, t)
825 util.set_exec(self.wjoin(f), mf[f])
829 util.set_exec(self.wjoin(f), mf[f])
826 self.dirstate.update([f], "n")
830 self.dirstate.update([f], "n")
827
831
828 def copy(self, source, dest, wlock=None):
832 def copy(self, source, dest, wlock=None):
829 p = self.wjoin(dest)
833 p = self.wjoin(dest)
830 if not os.path.exists(p):
834 if not os.path.exists(p):
831 self.ui.warn(_("%s does not exist!\n") % dest)
835 self.ui.warn(_("%s does not exist!\n") % dest)
832 elif not os.path.isfile(p):
836 elif not os.path.isfile(p):
833 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
837 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
834 else:
838 else:
835 if not wlock:
839 if not wlock:
836 wlock = self.wlock()
840 wlock = self.wlock()
837 if self.dirstate.state(dest) == '?':
841 if self.dirstate.state(dest) == '?':
838 self.dirstate.update([dest], "a")
842 self.dirstate.update([dest], "a")
839 self.dirstate.copy(source, dest)
843 self.dirstate.copy(source, dest)
840
844
841 def heads(self, start=None):
845 def heads(self, start=None):
842 heads = self.changelog.heads(start)
846 heads = self.changelog.heads(start)
843 # sort the output in rev descending order
847 # sort the output in rev descending order
844 heads = [(-self.changelog.rev(h), h) for h in heads]
848 heads = [(-self.changelog.rev(h), h) for h in heads]
845 heads.sort()
849 heads.sort()
846 return [n for (r, n) in heads]
850 return [n for (r, n) in heads]
847
851
848 # branchlookup returns a dict giving a list of branches for
852 # branchlookup returns a dict giving a list of branches for
849 # each head. A branch is defined as the tag of a node or
853 # each head. A branch is defined as the tag of a node or
850 # the branch of the node's parents. If a node has multiple
854 # the branch of the node's parents. If a node has multiple
851 # branch tags, tags are eliminated if they are visible from other
855 # branch tags, tags are eliminated if they are visible from other
852 # branch tags.
856 # branch tags.
853 #
857 #
854 # So, for this graph: a->b->c->d->e
858 # So, for this graph: a->b->c->d->e
855 # \ /
859 # \ /
856 # aa -----/
860 # aa -----/
857 # a has tag 2.6.12
861 # a has tag 2.6.12
858 # d has tag 2.6.13
862 # d has tag 2.6.13
859 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
863 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
860 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
864 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
861 # from the list.
865 # from the list.
862 #
866 #
863 # It is possible that more than one head will have the same branch tag.
867 # It is possible that more than one head will have the same branch tag.
864 # callers need to check the result for multiple heads under the same
868 # callers need to check the result for multiple heads under the same
865 # branch tag if that is a problem for them (ie checkout of a specific
869 # branch tag if that is a problem for them (ie checkout of a specific
866 # branch).
870 # branch).
867 #
871 #
868 # passing in a specific branch will limit the depth of the search
872 # passing in a specific branch will limit the depth of the search
869 # through the parents. It won't limit the branches returned in the
873 # through the parents. It won't limit the branches returned in the
870 # result though.
874 # result though.
871 def branchlookup(self, heads=None, branch=None):
875 def branchlookup(self, heads=None, branch=None):
872 if not heads:
876 if not heads:
873 heads = self.heads()
877 heads = self.heads()
874 headt = [ h for h in heads ]
878 headt = [ h for h in heads ]
875 chlog = self.changelog
879 chlog = self.changelog
876 branches = {}
880 branches = {}
877 merges = []
881 merges = []
878 seenmerge = {}
882 seenmerge = {}
879
883
880 # traverse the tree once for each head, recording in the branches
884 # traverse the tree once for each head, recording in the branches
881 # dict which tags are visible from this head. The branches
885 # dict which tags are visible from this head. The branches
882 # dict also records which tags are visible from each tag
886 # dict also records which tags are visible from each tag
883 # while we traverse.
887 # while we traverse.
884 while headt or merges:
888 while headt or merges:
885 if merges:
889 if merges:
886 n, found = merges.pop()
890 n, found = merges.pop()
887 visit = [n]
891 visit = [n]
888 else:
892 else:
889 h = headt.pop()
893 h = headt.pop()
890 visit = [h]
894 visit = [h]
891 found = [h]
895 found = [h]
892 seen = {}
896 seen = {}
893 while visit:
897 while visit:
894 n = visit.pop()
898 n = visit.pop()
895 if n in seen:
899 if n in seen:
896 continue
900 continue
897 pp = chlog.parents(n)
901 pp = chlog.parents(n)
898 tags = self.nodetags(n)
902 tags = self.nodetags(n)
899 if tags:
903 if tags:
900 for x in tags:
904 for x in tags:
901 if x == 'tip':
905 if x == 'tip':
902 continue
906 continue
903 for f in found:
907 for f in found:
904 branches.setdefault(f, {})[n] = 1
908 branches.setdefault(f, {})[n] = 1
905 branches.setdefault(n, {})[n] = 1
909 branches.setdefault(n, {})[n] = 1
906 break
910 break
907 if n not in found:
911 if n not in found:
908 found.append(n)
912 found.append(n)
909 if branch in tags:
913 if branch in tags:
910 continue
914 continue
911 seen[n] = 1
915 seen[n] = 1
912 if pp[1] != nullid and n not in seenmerge:
916 if pp[1] != nullid and n not in seenmerge:
913 merges.append((pp[1], [x for x in found]))
917 merges.append((pp[1], [x for x in found]))
914 seenmerge[n] = 1
918 seenmerge[n] = 1
915 if pp[0] != nullid:
919 if pp[0] != nullid:
916 visit.append(pp[0])
920 visit.append(pp[0])
917 # traverse the branches dict, eliminating branch tags from each
921 # traverse the branches dict, eliminating branch tags from each
918 # head that are visible from another branch tag for that head.
922 # head that are visible from another branch tag for that head.
919 out = {}
923 out = {}
920 viscache = {}
924 viscache = {}
921 for h in heads:
925 for h in heads:
922 def visible(node):
926 def visible(node):
923 if node in viscache:
927 if node in viscache:
924 return viscache[node]
928 return viscache[node]
925 ret = {}
929 ret = {}
926 visit = [node]
930 visit = [node]
927 while visit:
931 while visit:
928 x = visit.pop()
932 x = visit.pop()
929 if x in viscache:
933 if x in viscache:
930 ret.update(viscache[x])
934 ret.update(viscache[x])
931 elif x not in ret:
935 elif x not in ret:
932 ret[x] = 1
936 ret[x] = 1
933 if x in branches:
937 if x in branches:
934 visit[len(visit):] = branches[x].keys()
938 visit[len(visit):] = branches[x].keys()
935 viscache[node] = ret
939 viscache[node] = ret
936 return ret
940 return ret
937 if h not in branches:
941 if h not in branches:
938 continue
942 continue
939 # O(n^2), but somewhat limited. This only searches the
943 # O(n^2), but somewhat limited. This only searches the
940 # tags visible from a specific head, not all the tags in the
944 # tags visible from a specific head, not all the tags in the
941 # whole repo.
945 # whole repo.
942 for b in branches[h]:
946 for b in branches[h]:
943 vis = False
947 vis = False
944 for bb in branches[h].keys():
948 for bb in branches[h].keys():
945 if b != bb:
949 if b != bb:
946 if b in visible(bb):
950 if b in visible(bb):
947 vis = True
951 vis = True
948 break
952 break
949 if not vis:
953 if not vis:
950 l = out.setdefault(h, [])
954 l = out.setdefault(h, [])
951 l[len(l):] = self.nodetags(b)
955 l[len(l):] = self.nodetags(b)
952 return out
956 return out
953
957
954 def branches(self, nodes):
958 def branches(self, nodes):
955 if not nodes:
959 if not nodes:
956 nodes = [self.changelog.tip()]
960 nodes = [self.changelog.tip()]
957 b = []
961 b = []
958 for n in nodes:
962 for n in nodes:
959 t = n
963 t = n
960 while 1:
964 while 1:
961 p = self.changelog.parents(n)
965 p = self.changelog.parents(n)
962 if p[1] != nullid or p[0] == nullid:
966 if p[1] != nullid or p[0] == nullid:
963 b.append((t, n, p[0], p[1]))
967 b.append((t, n, p[0], p[1]))
964 break
968 break
965 n = p[0]
969 n = p[0]
966 return b
970 return b
967
971
968 def between(self, pairs):
972 def between(self, pairs):
969 r = []
973 r = []
970
974
971 for top, bottom in pairs:
975 for top, bottom in pairs:
972 n, l, i = top, [], 0
976 n, l, i = top, [], 0
973 f = 1
977 f = 1
974
978
975 while n != bottom:
979 while n != bottom:
976 p = self.changelog.parents(n)[0]
980 p = self.changelog.parents(n)[0]
977 if i == f:
981 if i == f:
978 l.append(n)
982 l.append(n)
979 f = f * 2
983 f = f * 2
980 n = p
984 n = p
981 i += 1
985 i += 1
982
986
983 r.append(l)
987 r.append(l)
984
988
985 return r
989 return r
986
990
987 def findincoming(self, remote, base=None, heads=None, force=False):
991 def findincoming(self, remote, base=None, heads=None, force=False):
988 """Return list of roots of the subsets of missing nodes from remote
992 """Return list of roots of the subsets of missing nodes from remote
989
993
990 If base dict is specified, assume that these nodes and their parents
994 If base dict is specified, assume that these nodes and their parents
991 exist on the remote side and that no child of a node of base exists
995 exist on the remote side and that no child of a node of base exists
992 in both remote and self.
996 in both remote and self.
993 Furthermore base will be updated to include the nodes that exists
997 Furthermore base will be updated to include the nodes that exists
994 in self and remote but no children exists in self and remote.
998 in self and remote but no children exists in self and remote.
995 If a list of heads is specified, return only nodes which are heads
999 If a list of heads is specified, return only nodes which are heads
996 or ancestors of these heads.
1000 or ancestors of these heads.
997
1001
998 All the ancestors of base are in self and in remote.
1002 All the ancestors of base are in self and in remote.
999 All the descendants of the list returned are missing in self.
1003 All the descendants of the list returned are missing in self.
1000 (and so we know that the rest of the nodes are missing in remote, see
1004 (and so we know that the rest of the nodes are missing in remote, see
1001 outgoing)
1005 outgoing)
1002 """
1006 """
1003 m = self.changelog.nodemap
1007 m = self.changelog.nodemap
1004 search = []
1008 search = []
1005 fetch = {}
1009 fetch = {}
1006 seen = {}
1010 seen = {}
1007 seenbranch = {}
1011 seenbranch = {}
1008 if base == None:
1012 if base == None:
1009 base = {}
1013 base = {}
1010
1014
1011 if not heads:
1015 if not heads:
1012 heads = remote.heads()
1016 heads = remote.heads()
1013
1017
1014 if self.changelog.tip() == nullid:
1018 if self.changelog.tip() == nullid:
1015 base[nullid] = 1
1019 base[nullid] = 1
1016 if heads != [nullid]:
1020 if heads != [nullid]:
1017 return [nullid]
1021 return [nullid]
1018 return []
1022 return []
1019
1023
1020 # assume we're closer to the tip than the root
1024 # assume we're closer to the tip than the root
1021 # and start by examining the heads
1025 # and start by examining the heads
1022 self.ui.status(_("searching for changes\n"))
1026 self.ui.status(_("searching for changes\n"))
1023
1027
1024 unknown = []
1028 unknown = []
1025 for h in heads:
1029 for h in heads:
1026 if h not in m:
1030 if h not in m:
1027 unknown.append(h)
1031 unknown.append(h)
1028 else:
1032 else:
1029 base[h] = 1
1033 base[h] = 1
1030
1034
1031 if not unknown:
1035 if not unknown:
1032 return []
1036 return []
1033
1037
1034 req = dict.fromkeys(unknown)
1038 req = dict.fromkeys(unknown)
1035 reqcnt = 0
1039 reqcnt = 0
1036
1040
1037 # search through remote branches
1041 # search through remote branches
1038 # a 'branch' here is a linear segment of history, with four parts:
1042 # a 'branch' here is a linear segment of history, with four parts:
1039 # head, root, first parent, second parent
1043 # head, root, first parent, second parent
1040 # (a branch always has two parents (or none) by definition)
1044 # (a branch always has two parents (or none) by definition)
1041 unknown = remote.branches(unknown)
1045 unknown = remote.branches(unknown)
1042 while unknown:
1046 while unknown:
1043 r = []
1047 r = []
1044 while unknown:
1048 while unknown:
1045 n = unknown.pop(0)
1049 n = unknown.pop(0)
1046 if n[0] in seen:
1050 if n[0] in seen:
1047 continue
1051 continue
1048
1052
1049 self.ui.debug(_("examining %s:%s\n")
1053 self.ui.debug(_("examining %s:%s\n")
1050 % (short(n[0]), short(n[1])))
1054 % (short(n[0]), short(n[1])))
1051 if n[0] == nullid: # found the end of the branch
1055 if n[0] == nullid: # found the end of the branch
1052 pass
1056 pass
1053 elif n in seenbranch:
1057 elif n in seenbranch:
1054 self.ui.debug(_("branch already found\n"))
1058 self.ui.debug(_("branch already found\n"))
1055 continue
1059 continue
1056 elif n[1] and n[1] in m: # do we know the base?
1060 elif n[1] and n[1] in m: # do we know the base?
1057 self.ui.debug(_("found incomplete branch %s:%s\n")
1061 self.ui.debug(_("found incomplete branch %s:%s\n")
1058 % (short(n[0]), short(n[1])))
1062 % (short(n[0]), short(n[1])))
1059 search.append(n) # schedule branch range for scanning
1063 search.append(n) # schedule branch range for scanning
1060 seenbranch[n] = 1
1064 seenbranch[n] = 1
1061 else:
1065 else:
1062 if n[1] not in seen and n[1] not in fetch:
1066 if n[1] not in seen and n[1] not in fetch:
1063 if n[2] in m and n[3] in m:
1067 if n[2] in m and n[3] in m:
1064 self.ui.debug(_("found new changeset %s\n") %
1068 self.ui.debug(_("found new changeset %s\n") %
1065 short(n[1]))
1069 short(n[1]))
1066 fetch[n[1]] = 1 # earliest unknown
1070 fetch[n[1]] = 1 # earliest unknown
1067 for p in n[2:4]:
1071 for p in n[2:4]:
1068 if p in m:
1072 if p in m:
1069 base[p] = 1 # latest known
1073 base[p] = 1 # latest known
1070
1074
1071 for p in n[2:4]:
1075 for p in n[2:4]:
1072 if p not in req and p not in m:
1076 if p not in req and p not in m:
1073 r.append(p)
1077 r.append(p)
1074 req[p] = 1
1078 req[p] = 1
1075 seen[n[0]] = 1
1079 seen[n[0]] = 1
1076
1080
1077 if r:
1081 if r:
1078 reqcnt += 1
1082 reqcnt += 1
1079 self.ui.debug(_("request %d: %s\n") %
1083 self.ui.debug(_("request %d: %s\n") %
1080 (reqcnt, " ".join(map(short, r))))
1084 (reqcnt, " ".join(map(short, r))))
1081 for p in range(0, len(r), 10):
1085 for p in range(0, len(r), 10):
1082 for b in remote.branches(r[p:p+10]):
1086 for b in remote.branches(r[p:p+10]):
1083 self.ui.debug(_("received %s:%s\n") %
1087 self.ui.debug(_("received %s:%s\n") %
1084 (short(b[0]), short(b[1])))
1088 (short(b[0]), short(b[1])))
1085 unknown.append(b)
1089 unknown.append(b)
1086
1090
1087 # do binary search on the branches we found
1091 # do binary search on the branches we found
1088 while search:
1092 while search:
1089 n = search.pop(0)
1093 n = search.pop(0)
1090 reqcnt += 1
1094 reqcnt += 1
1091 l = remote.between([(n[0], n[1])])[0]
1095 l = remote.between([(n[0], n[1])])[0]
1092 l.append(n[1])
1096 l.append(n[1])
1093 p = n[0]
1097 p = n[0]
1094 f = 1
1098 f = 1
1095 for i in l:
1099 for i in l:
1096 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1100 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1097 if i in m:
1101 if i in m:
1098 if f <= 2:
1102 if f <= 2:
1099 self.ui.debug(_("found new branch changeset %s\n") %
1103 self.ui.debug(_("found new branch changeset %s\n") %
1100 short(p))
1104 short(p))
1101 fetch[p] = 1
1105 fetch[p] = 1
1102 base[i] = 1
1106 base[i] = 1
1103 else:
1107 else:
1104 self.ui.debug(_("narrowed branch search to %s:%s\n")
1108 self.ui.debug(_("narrowed branch search to %s:%s\n")
1105 % (short(p), short(i)))
1109 % (short(p), short(i)))
1106 search.append((p, i))
1110 search.append((p, i))
1107 break
1111 break
1108 p, f = i, f * 2
1112 p, f = i, f * 2
1109
1113
1110 # sanity check our fetch list
1114 # sanity check our fetch list
1111 for f in fetch.keys():
1115 for f in fetch.keys():
1112 if f in m:
1116 if f in m:
1113 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1117 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1114
1118
1115 if base.keys() == [nullid]:
1119 if base.keys() == [nullid]:
1116 if force:
1120 if force:
1117 self.ui.warn(_("warning: repository is unrelated\n"))
1121 self.ui.warn(_("warning: repository is unrelated\n"))
1118 else:
1122 else:
1119 raise util.Abort(_("repository is unrelated"))
1123 raise util.Abort(_("repository is unrelated"))
1120
1124
1121 self.ui.note(_("found new changesets starting at ") +
1125 self.ui.note(_("found new changesets starting at ") +
1122 " ".join([short(f) for f in fetch]) + "\n")
1126 " ".join([short(f) for f in fetch]) + "\n")
1123
1127
1124 self.ui.debug(_("%d total queries\n") % reqcnt)
1128 self.ui.debug(_("%d total queries\n") % reqcnt)
1125
1129
1126 return fetch.keys()
1130 return fetch.keys()
1127
1131
1128 def findoutgoing(self, remote, base=None, heads=None, force=False):
1132 def findoutgoing(self, remote, base=None, heads=None, force=False):
1129 """Return list of nodes that are roots of subsets not in remote
1133 """Return list of nodes that are roots of subsets not in remote
1130
1134
1131 If base dict is specified, assume that these nodes and their parents
1135 If base dict is specified, assume that these nodes and their parents
1132 exist on the remote side.
1136 exist on the remote side.
1133 If a list of heads is specified, return only nodes which are heads
1137 If a list of heads is specified, return only nodes which are heads
1134 or ancestors of these heads, and return a second element which
1138 or ancestors of these heads, and return a second element which
1135 contains all remote heads which get new children.
1139 contains all remote heads which get new children.
1136 """
1140 """
1137 if base == None:
1141 if base == None:
1138 base = {}
1142 base = {}
1139 self.findincoming(remote, base, heads, force=force)
1143 self.findincoming(remote, base, heads, force=force)
1140
1144
1141 self.ui.debug(_("common changesets up to ")
1145 self.ui.debug(_("common changesets up to ")
1142 + " ".join(map(short, base.keys())) + "\n")
1146 + " ".join(map(short, base.keys())) + "\n")
1143
1147
1144 remain = dict.fromkeys(self.changelog.nodemap)
1148 remain = dict.fromkeys(self.changelog.nodemap)
1145
1149
1146 # prune everything remote has from the tree
1150 # prune everything remote has from the tree
1147 del remain[nullid]
1151 del remain[nullid]
1148 remove = base.keys()
1152 remove = base.keys()
1149 while remove:
1153 while remove:
1150 n = remove.pop(0)
1154 n = remove.pop(0)
1151 if n in remain:
1155 if n in remain:
1152 del remain[n]
1156 del remain[n]
1153 for p in self.changelog.parents(n):
1157 for p in self.changelog.parents(n):
1154 remove.append(p)
1158 remove.append(p)
1155
1159
1156 # find every node whose parents have been pruned
1160 # find every node whose parents have been pruned
1157 subset = []
1161 subset = []
1158 # find every remote head that will get new children
1162 # find every remote head that will get new children
1159 updated_heads = {}
1163 updated_heads = {}
1160 for n in remain:
1164 for n in remain:
1161 p1, p2 = self.changelog.parents(n)
1165 p1, p2 = self.changelog.parents(n)
1162 if p1 not in remain and p2 not in remain:
1166 if p1 not in remain and p2 not in remain:
1163 subset.append(n)
1167 subset.append(n)
1164 if heads:
1168 if heads:
1165 if p1 in heads:
1169 if p1 in heads:
1166 updated_heads[p1] = True
1170 updated_heads[p1] = True
1167 if p2 in heads:
1171 if p2 in heads:
1168 updated_heads[p2] = True
1172 updated_heads[p2] = True
1169
1173
1170 # this is the set of all roots we have to push
1174 # this is the set of all roots we have to push
1171 if heads:
1175 if heads:
1172 return subset, updated_heads.keys()
1176 return subset, updated_heads.keys()
1173 else:
1177 else:
1174 return subset
1178 return subset
1175
1179
1176 def pull(self, remote, heads=None, force=False):
1180 def pull(self, remote, heads=None, force=False):
1177 l = self.lock()
1181 l = self.lock()
1178
1182
1179 fetch = self.findincoming(remote, force=force)
1183 fetch = self.findincoming(remote, force=force)
1180 if fetch == [nullid]:
1184 if fetch == [nullid]:
1181 self.ui.status(_("requesting all changes\n"))
1185 self.ui.status(_("requesting all changes\n"))
1182
1186
1183 if not fetch:
1187 if not fetch:
1184 self.ui.status(_("no changes found\n"))
1188 self.ui.status(_("no changes found\n"))
1185 return 0
1189 return 0
1186
1190
1187 if heads is None:
1191 if heads is None:
1188 cg = remote.changegroup(fetch, 'pull')
1192 cg = remote.changegroup(fetch, 'pull')
1189 else:
1193 else:
1190 cg = remote.changegroupsubset(fetch, heads, 'pull')
1194 cg = remote.changegroupsubset(fetch, heads, 'pull')
1191 return self.addchangegroup(cg, 'pull', remote.url())
1195 return self.addchangegroup(cg, 'pull', remote.url())
1192
1196
1193 def push(self, remote, force=False, revs=None):
1197 def push(self, remote, force=False, revs=None):
1194 # there are two ways to push to remote repo:
1198 # there are two ways to push to remote repo:
1195 #
1199 #
1196 # addchangegroup assumes local user can lock remote
1200 # addchangegroup assumes local user can lock remote
1197 # repo (local filesystem, old ssh servers).
1201 # repo (local filesystem, old ssh servers).
1198 #
1202 #
1199 # unbundle assumes local user cannot lock remote repo (new ssh
1203 # unbundle assumes local user cannot lock remote repo (new ssh
1200 # servers, http servers).
1204 # servers, http servers).
1201
1205
1202 if remote.capable('unbundle'):
1206 if remote.capable('unbundle'):
1203 return self.push_unbundle(remote, force, revs)
1207 return self.push_unbundle(remote, force, revs)
1204 return self.push_addchangegroup(remote, force, revs)
1208 return self.push_addchangegroup(remote, force, revs)
1205
1209
1206 def prepush(self, remote, force, revs):
1210 def prepush(self, remote, force, revs):
1207 base = {}
1211 base = {}
1208 remote_heads = remote.heads()
1212 remote_heads = remote.heads()
1209 inc = self.findincoming(remote, base, remote_heads, force=force)
1213 inc = self.findincoming(remote, base, remote_heads, force=force)
1210 if not force and inc:
1214 if not force and inc:
1211 self.ui.warn(_("abort: unsynced remote changes!\n"))
1215 self.ui.warn(_("abort: unsynced remote changes!\n"))
1212 self.ui.status(_("(did you forget to sync?"
1216 self.ui.status(_("(did you forget to sync?"
1213 " use push -f to force)\n"))
1217 " use push -f to force)\n"))
1214 return None, 1
1218 return None, 1
1215
1219
1216 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1220 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1217 if revs is not None:
1221 if revs is not None:
1218 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1222 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1219 else:
1223 else:
1220 bases, heads = update, self.changelog.heads()
1224 bases, heads = update, self.changelog.heads()
1221
1225
1222 if not bases:
1226 if not bases:
1223 self.ui.status(_("no changes found\n"))
1227 self.ui.status(_("no changes found\n"))
1224 return None, 1
1228 return None, 1
1225 elif not force:
1229 elif not force:
1226 # FIXME we don't properly detect creation of new heads
1230 # FIXME we don't properly detect creation of new heads
1227 # in the push -r case, assume the user knows what he's doing
1231 # in the push -r case, assume the user knows what he's doing
1228 if not revs and len(remote_heads) < len(heads) \
1232 if not revs and len(remote_heads) < len(heads) \
1229 and remote_heads != [nullid]:
1233 and remote_heads != [nullid]:
1230 self.ui.warn(_("abort: push creates new remote branches!\n"))
1234 self.ui.warn(_("abort: push creates new remote branches!\n"))
1231 self.ui.status(_("(did you forget to merge?"
1235 self.ui.status(_("(did you forget to merge?"
1232 " use push -f to force)\n"))
1236 " use push -f to force)\n"))
1233 return None, 1
1237 return None, 1
1234
1238
1235 if revs is None:
1239 if revs is None:
1236 cg = self.changegroup(update, 'push')
1240 cg = self.changegroup(update, 'push')
1237 else:
1241 else:
1238 cg = self.changegroupsubset(update, revs, 'push')
1242 cg = self.changegroupsubset(update, revs, 'push')
1239 return cg, remote_heads
1243 return cg, remote_heads
1240
1244
1241 def push_addchangegroup(self, remote, force, revs):
1245 def push_addchangegroup(self, remote, force, revs):
1242 lock = remote.lock()
1246 lock = remote.lock()
1243
1247
1244 ret = self.prepush(remote, force, revs)
1248 ret = self.prepush(remote, force, revs)
1245 if ret[0] is not None:
1249 if ret[0] is not None:
1246 cg, remote_heads = ret
1250 cg, remote_heads = ret
1247 return remote.addchangegroup(cg, 'push', self.url())
1251 return remote.addchangegroup(cg, 'push', self.url())
1248 return ret[1]
1252 return ret[1]
1249
1253
1250 def push_unbundle(self, remote, force, revs):
1254 def push_unbundle(self, remote, force, revs):
1251 # local repo finds heads on server, finds out what revs it
1255 # local repo finds heads on server, finds out what revs it
1252 # must push. once revs transferred, if server finds it has
1256 # must push. once revs transferred, if server finds it has
1253 # different heads (someone else won commit/push race), server
1257 # different heads (someone else won commit/push race), server
1254 # aborts.
1258 # aborts.
1255
1259
1256 ret = self.prepush(remote, force, revs)
1260 ret = self.prepush(remote, force, revs)
1257 if ret[0] is not None:
1261 if ret[0] is not None:
1258 cg, remote_heads = ret
1262 cg, remote_heads = ret
1259 if force: remote_heads = ['force']
1263 if force: remote_heads = ['force']
1260 return remote.unbundle(cg, remote_heads, 'push')
1264 return remote.unbundle(cg, remote_heads, 'push')
1261 return ret[1]
1265 return ret[1]
1262
1266
1263 def changegroupsubset(self, bases, heads, source):
1267 def changegroupsubset(self, bases, heads, source):
1264 """This function generates a changegroup consisting of all the nodes
1268 """This function generates a changegroup consisting of all the nodes
1265 that are descendents of any of the bases, and ancestors of any of
1269 that are descendents of any of the bases, and ancestors of any of
1266 the heads.
1270 the heads.
1267
1271
1268 It is fairly complex as determining which filenodes and which
1272 It is fairly complex as determining which filenodes and which
1269 manifest nodes need to be included for the changeset to be complete
1273 manifest nodes need to be included for the changeset to be complete
1270 is non-trivial.
1274 is non-trivial.
1271
1275
1272 Another wrinkle is doing the reverse, figuring out which changeset in
1276 Another wrinkle is doing the reverse, figuring out which changeset in
1273 the changegroup a particular filenode or manifestnode belongs to."""
1277 the changegroup a particular filenode or manifestnode belongs to."""
1274
1278
1275 self.hook('preoutgoing', throw=True, source=source)
1279 self.hook('preoutgoing', throw=True, source=source)
1276
1280
1277 # Set up some initial variables
1281 # Set up some initial variables
1278 # Make it easy to refer to self.changelog
1282 # Make it easy to refer to self.changelog
1279 cl = self.changelog
1283 cl = self.changelog
1280 # msng is short for missing - compute the list of changesets in this
1284 # msng is short for missing - compute the list of changesets in this
1281 # changegroup.
1285 # changegroup.
1282 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1286 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1283 # Some bases may turn out to be superfluous, and some heads may be
1287 # Some bases may turn out to be superfluous, and some heads may be
1284 # too. nodesbetween will return the minimal set of bases and heads
1288 # too. nodesbetween will return the minimal set of bases and heads
1285 # necessary to re-create the changegroup.
1289 # necessary to re-create the changegroup.
1286
1290
1287 # Known heads are the list of heads that it is assumed the recipient
1291 # Known heads are the list of heads that it is assumed the recipient
1288 # of this changegroup will know about.
1292 # of this changegroup will know about.
1289 knownheads = {}
1293 knownheads = {}
1290 # We assume that all parents of bases are known heads.
1294 # We assume that all parents of bases are known heads.
1291 for n in bases:
1295 for n in bases:
1292 for p in cl.parents(n):
1296 for p in cl.parents(n):
1293 if p != nullid:
1297 if p != nullid:
1294 knownheads[p] = 1
1298 knownheads[p] = 1
1295 knownheads = knownheads.keys()
1299 knownheads = knownheads.keys()
1296 if knownheads:
1300 if knownheads:
1297 # Now that we know what heads are known, we can compute which
1301 # Now that we know what heads are known, we can compute which
1298 # changesets are known. The recipient must know about all
1302 # changesets are known. The recipient must know about all
1299 # changesets required to reach the known heads from the null
1303 # changesets required to reach the known heads from the null
1300 # changeset.
1304 # changeset.
1301 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1305 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1302 junk = None
1306 junk = None
1303 # Transform the list into an ersatz set.
1307 # Transform the list into an ersatz set.
1304 has_cl_set = dict.fromkeys(has_cl_set)
1308 has_cl_set = dict.fromkeys(has_cl_set)
1305 else:
1309 else:
1306 # If there were no known heads, the recipient cannot be assumed to
1310 # If there were no known heads, the recipient cannot be assumed to
1307 # know about any changesets.
1311 # know about any changesets.
1308 has_cl_set = {}
1312 has_cl_set = {}
1309
1313
1310 # Make it easy to refer to self.manifest
1314 # Make it easy to refer to self.manifest
1311 mnfst = self.manifest
1315 mnfst = self.manifest
1312 # We don't know which manifests are missing yet
1316 # We don't know which manifests are missing yet
1313 msng_mnfst_set = {}
1317 msng_mnfst_set = {}
1314 # Nor do we know which filenodes are missing.
1318 # Nor do we know which filenodes are missing.
1315 msng_filenode_set = {}
1319 msng_filenode_set = {}
1316
1320
1317 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1321 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1318 junk = None
1322 junk = None
1319
1323
1320 # A changeset always belongs to itself, so the changenode lookup
1324 # A changeset always belongs to itself, so the changenode lookup
1321 # function for a changenode is identity.
1325 # function for a changenode is identity.
1322 def identity(x):
1326 def identity(x):
1323 return x
1327 return x
1324
1328
1325 # A function generating function. Sets up an environment for the
1329 # A function generating function. Sets up an environment for the
1326 # inner function.
1330 # inner function.
1327 def cmp_by_rev_func(revlog):
1331 def cmp_by_rev_func(revlog):
1328 # Compare two nodes by their revision number in the environment's
1332 # Compare two nodes by their revision number in the environment's
1329 # revision history. Since the revision number both represents the
1333 # revision history. Since the revision number both represents the
1330 # most efficient order to read the nodes in, and represents a
1334 # most efficient order to read the nodes in, and represents a
1331 # topological sorting of the nodes, this function is often useful.
1335 # topological sorting of the nodes, this function is often useful.
1332 def cmp_by_rev(a, b):
1336 def cmp_by_rev(a, b):
1333 return cmp(revlog.rev(a), revlog.rev(b))
1337 return cmp(revlog.rev(a), revlog.rev(b))
1334 return cmp_by_rev
1338 return cmp_by_rev
1335
1339
1336 # If we determine that a particular file or manifest node must be a
1340 # If we determine that a particular file or manifest node must be a
1337 # node that the recipient of the changegroup will already have, we can
1341 # node that the recipient of the changegroup will already have, we can
1338 # also assume the recipient will have all the parents. This function
1342 # also assume the recipient will have all the parents. This function
1339 # prunes them from the set of missing nodes.
1343 # prunes them from the set of missing nodes.
1340 def prune_parents(revlog, hasset, msngset):
1344 def prune_parents(revlog, hasset, msngset):
1341 haslst = hasset.keys()
1345 haslst = hasset.keys()
1342 haslst.sort(cmp_by_rev_func(revlog))
1346 haslst.sort(cmp_by_rev_func(revlog))
1343 for node in haslst:
1347 for node in haslst:
1344 parentlst = [p for p in revlog.parents(node) if p != nullid]
1348 parentlst = [p for p in revlog.parents(node) if p != nullid]
1345 while parentlst:
1349 while parentlst:
1346 n = parentlst.pop()
1350 n = parentlst.pop()
1347 if n not in hasset:
1351 if n not in hasset:
1348 hasset[n] = 1
1352 hasset[n] = 1
1349 p = [p for p in revlog.parents(n) if p != nullid]
1353 p = [p for p in revlog.parents(n) if p != nullid]
1350 parentlst.extend(p)
1354 parentlst.extend(p)
1351 for n in hasset:
1355 for n in hasset:
1352 msngset.pop(n, None)
1356 msngset.pop(n, None)
1353
1357
1354 # This is a function generating function used to set up an environment
1358 # This is a function generating function used to set up an environment
1355 # for the inner function to execute in.
1359 # for the inner function to execute in.
1356 def manifest_and_file_collector(changedfileset):
1360 def manifest_and_file_collector(changedfileset):
1357 # This is an information gathering function that gathers
1361 # This is an information gathering function that gathers
1358 # information from each changeset node that goes out as part of
1362 # information from each changeset node that goes out as part of
1359 # the changegroup. The information gathered is a list of which
1363 # the changegroup. The information gathered is a list of which
1360 # manifest nodes are potentially required (the recipient may
1364 # manifest nodes are potentially required (the recipient may
1361 # already have them) and total list of all files which were
1365 # already have them) and total list of all files which were
1362 # changed in any changeset in the changegroup.
1366 # changed in any changeset in the changegroup.
1363 #
1367 #
1364 # We also remember the first changenode we saw any manifest
1368 # We also remember the first changenode we saw any manifest
1365 # referenced by so we can later determine which changenode 'owns'
1369 # referenced by so we can later determine which changenode 'owns'
1366 # the manifest.
1370 # the manifest.
1367 def collect_manifests_and_files(clnode):
1371 def collect_manifests_and_files(clnode):
1368 c = cl.read(clnode)
1372 c = cl.read(clnode)
1369 for f in c[3]:
1373 for f in c[3]:
1370 # This is to make sure we only have one instance of each
1374 # This is to make sure we only have one instance of each
1371 # filename string for each filename.
1375 # filename string for each filename.
1372 changedfileset.setdefault(f, f)
1376 changedfileset.setdefault(f, f)
1373 msng_mnfst_set.setdefault(c[0], clnode)
1377 msng_mnfst_set.setdefault(c[0], clnode)
1374 return collect_manifests_and_files
1378 return collect_manifests_and_files
1375
1379
1376 # Figure out which manifest nodes (of the ones we think might be part
1380 # Figure out which manifest nodes (of the ones we think might be part
1377 # of the changegroup) the recipient must know about and remove them
1381 # of the changegroup) the recipient must know about and remove them
1378 # from the changegroup.
1382 # from the changegroup.
1379 def prune_manifests():
1383 def prune_manifests():
1380 has_mnfst_set = {}
1384 has_mnfst_set = {}
1381 for n in msng_mnfst_set:
1385 for n in msng_mnfst_set:
1382 # If a 'missing' manifest thinks it belongs to a changenode
1386 # If a 'missing' manifest thinks it belongs to a changenode
1383 # the recipient is assumed to have, obviously the recipient
1387 # the recipient is assumed to have, obviously the recipient
1384 # must have that manifest.
1388 # must have that manifest.
1385 linknode = cl.node(mnfst.linkrev(n))
1389 linknode = cl.node(mnfst.linkrev(n))
1386 if linknode in has_cl_set:
1390 if linknode in has_cl_set:
1387 has_mnfst_set[n] = 1
1391 has_mnfst_set[n] = 1
1388 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1392 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1389
1393
1390 # Use the information collected in collect_manifests_and_files to say
1394 # Use the information collected in collect_manifests_and_files to say
1391 # which changenode any manifestnode belongs to.
1395 # which changenode any manifestnode belongs to.
1392 def lookup_manifest_link(mnfstnode):
1396 def lookup_manifest_link(mnfstnode):
1393 return msng_mnfst_set[mnfstnode]
1397 return msng_mnfst_set[mnfstnode]
1394
1398
1395 # A function generating function that sets up the initial environment
1399 # A function generating function that sets up the initial environment
1396 # the inner function.
1400 # the inner function.
1397 def filenode_collector(changedfiles):
1401 def filenode_collector(changedfiles):
1398 next_rev = [0]
1402 next_rev = [0]
1399 # This gathers information from each manifestnode included in the
1403 # This gathers information from each manifestnode included in the
1400 # changegroup about which filenodes the manifest node references
1404 # changegroup about which filenodes the manifest node references
1401 # so we can include those in the changegroup too.
1405 # so we can include those in the changegroup too.
1402 #
1406 #
1403 # It also remembers which changenode each filenode belongs to. It
1407 # It also remembers which changenode each filenode belongs to. It
1404 # does this by assuming the a filenode belongs to the changenode
1408 # does this by assuming the a filenode belongs to the changenode
1405 # the first manifest that references it belongs to.
1409 # the first manifest that references it belongs to.
1406 def collect_msng_filenodes(mnfstnode):
1410 def collect_msng_filenodes(mnfstnode):
1407 r = mnfst.rev(mnfstnode)
1411 r = mnfst.rev(mnfstnode)
1408 if r == next_rev[0]:
1412 if r == next_rev[0]:
1409 # If the last rev we looked at was the one just previous,
1413 # If the last rev we looked at was the one just previous,
1410 # we only need to see a diff.
1414 # we only need to see a diff.
1411 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1415 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1412 # For each line in the delta
1416 # For each line in the delta
1413 for dline in delta.splitlines():
1417 for dline in delta.splitlines():
1414 # get the filename and filenode for that line
1418 # get the filename and filenode for that line
1415 f, fnode = dline.split('\0')
1419 f, fnode = dline.split('\0')
1416 fnode = bin(fnode[:40])
1420 fnode = bin(fnode[:40])
1417 f = changedfiles.get(f, None)
1421 f = changedfiles.get(f, None)
1418 # And if the file is in the list of files we care
1422 # And if the file is in the list of files we care
1419 # about.
1423 # about.
1420 if f is not None:
1424 if f is not None:
1421 # Get the changenode this manifest belongs to
1425 # Get the changenode this manifest belongs to
1422 clnode = msng_mnfst_set[mnfstnode]
1426 clnode = msng_mnfst_set[mnfstnode]
1423 # Create the set of filenodes for the file if
1427 # Create the set of filenodes for the file if
1424 # there isn't one already.
1428 # there isn't one already.
1425 ndset = msng_filenode_set.setdefault(f, {})
1429 ndset = msng_filenode_set.setdefault(f, {})
1426 # And set the filenode's changelog node to the
1430 # And set the filenode's changelog node to the
1427 # manifest's if it hasn't been set already.
1431 # manifest's if it hasn't been set already.
1428 ndset.setdefault(fnode, clnode)
1432 ndset.setdefault(fnode, clnode)
1429 else:
1433 else:
1430 # Otherwise we need a full manifest.
1434 # Otherwise we need a full manifest.
1431 m = mnfst.read(mnfstnode)
1435 m = mnfst.read(mnfstnode)
1432 # For every file in we care about.
1436 # For every file in we care about.
1433 for f in changedfiles:
1437 for f in changedfiles:
1434 fnode = m.get(f, None)
1438 fnode = m.get(f, None)
1435 # If it's in the manifest
1439 # If it's in the manifest
1436 if fnode is not None:
1440 if fnode is not None:
1437 # See comments above.
1441 # See comments above.
1438 clnode = msng_mnfst_set[mnfstnode]
1442 clnode = msng_mnfst_set[mnfstnode]
1439 ndset = msng_filenode_set.setdefault(f, {})
1443 ndset = msng_filenode_set.setdefault(f, {})
1440 ndset.setdefault(fnode, clnode)
1444 ndset.setdefault(fnode, clnode)
1441 # Remember the revision we hope to see next.
1445 # Remember the revision we hope to see next.
1442 next_rev[0] = r + 1
1446 next_rev[0] = r + 1
1443 return collect_msng_filenodes
1447 return collect_msng_filenodes
1444
1448
1445 # We have a list of filenodes we think we need for a file, lets remove
1449 # We have a list of filenodes we think we need for a file, lets remove
1446 # all those we now the recipient must have.
1450 # all those we now the recipient must have.
1447 def prune_filenodes(f, filerevlog):
1451 def prune_filenodes(f, filerevlog):
1448 msngset = msng_filenode_set[f]
1452 msngset = msng_filenode_set[f]
1449 hasset = {}
1453 hasset = {}
1450 # If a 'missing' filenode thinks it belongs to a changenode we
1454 # If a 'missing' filenode thinks it belongs to a changenode we
1451 # assume the recipient must have, then the recipient must have
1455 # assume the recipient must have, then the recipient must have
1452 # that filenode.
1456 # that filenode.
1453 for n in msngset:
1457 for n in msngset:
1454 clnode = cl.node(filerevlog.linkrev(n))
1458 clnode = cl.node(filerevlog.linkrev(n))
1455 if clnode in has_cl_set:
1459 if clnode in has_cl_set:
1456 hasset[n] = 1
1460 hasset[n] = 1
1457 prune_parents(filerevlog, hasset, msngset)
1461 prune_parents(filerevlog, hasset, msngset)
1458
1462
1459 # A function generator function that sets up the a context for the
1463 # A function generator function that sets up the a context for the
1460 # inner function.
1464 # inner function.
1461 def lookup_filenode_link_func(fname):
1465 def lookup_filenode_link_func(fname):
1462 msngset = msng_filenode_set[fname]
1466 msngset = msng_filenode_set[fname]
1463 # Lookup the changenode the filenode belongs to.
1467 # Lookup the changenode the filenode belongs to.
1464 def lookup_filenode_link(fnode):
1468 def lookup_filenode_link(fnode):
1465 return msngset[fnode]
1469 return msngset[fnode]
1466 return lookup_filenode_link
1470 return lookup_filenode_link
1467
1471
1468 # Now that we have all theses utility functions to help out and
1472 # Now that we have all theses utility functions to help out and
1469 # logically divide up the task, generate the group.
1473 # logically divide up the task, generate the group.
1470 def gengroup():
1474 def gengroup():
1471 # The set of changed files starts empty.
1475 # The set of changed files starts empty.
1472 changedfiles = {}
1476 changedfiles = {}
1473 # Create a changenode group generator that will call our functions
1477 # Create a changenode group generator that will call our functions
1474 # back to lookup the owning changenode and collect information.
1478 # back to lookup the owning changenode and collect information.
1475 group = cl.group(msng_cl_lst, identity,
1479 group = cl.group(msng_cl_lst, identity,
1476 manifest_and_file_collector(changedfiles))
1480 manifest_and_file_collector(changedfiles))
1477 for chnk in group:
1481 for chnk in group:
1478 yield chnk
1482 yield chnk
1479
1483
1480 # The list of manifests has been collected by the generator
1484 # The list of manifests has been collected by the generator
1481 # calling our functions back.
1485 # calling our functions back.
1482 prune_manifests()
1486 prune_manifests()
1483 msng_mnfst_lst = msng_mnfst_set.keys()
1487 msng_mnfst_lst = msng_mnfst_set.keys()
1484 # Sort the manifestnodes by revision number.
1488 # Sort the manifestnodes by revision number.
1485 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1489 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1486 # Create a generator for the manifestnodes that calls our lookup
1490 # Create a generator for the manifestnodes that calls our lookup
1487 # and data collection functions back.
1491 # and data collection functions back.
1488 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1492 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1489 filenode_collector(changedfiles))
1493 filenode_collector(changedfiles))
1490 for chnk in group:
1494 for chnk in group:
1491 yield chnk
1495 yield chnk
1492
1496
1493 # These are no longer needed, dereference and toss the memory for
1497 # These are no longer needed, dereference and toss the memory for
1494 # them.
1498 # them.
1495 msng_mnfst_lst = None
1499 msng_mnfst_lst = None
1496 msng_mnfst_set.clear()
1500 msng_mnfst_set.clear()
1497
1501
1498 changedfiles = changedfiles.keys()
1502 changedfiles = changedfiles.keys()
1499 changedfiles.sort()
1503 changedfiles.sort()
1500 # Go through all our files in order sorted by name.
1504 # Go through all our files in order sorted by name.
1501 for fname in changedfiles:
1505 for fname in changedfiles:
1502 filerevlog = self.file(fname)
1506 filerevlog = self.file(fname)
1503 # Toss out the filenodes that the recipient isn't really
1507 # Toss out the filenodes that the recipient isn't really
1504 # missing.
1508 # missing.
1505 if msng_filenode_set.has_key(fname):
1509 if msng_filenode_set.has_key(fname):
1506 prune_filenodes(fname, filerevlog)
1510 prune_filenodes(fname, filerevlog)
1507 msng_filenode_lst = msng_filenode_set[fname].keys()
1511 msng_filenode_lst = msng_filenode_set[fname].keys()
1508 else:
1512 else:
1509 msng_filenode_lst = []
1513 msng_filenode_lst = []
1510 # If any filenodes are left, generate the group for them,
1514 # If any filenodes are left, generate the group for them,
1511 # otherwise don't bother.
1515 # otherwise don't bother.
1512 if len(msng_filenode_lst) > 0:
1516 if len(msng_filenode_lst) > 0:
1513 yield changegroup.genchunk(fname)
1517 yield changegroup.genchunk(fname)
1514 # Sort the filenodes by their revision #
1518 # Sort the filenodes by their revision #
1515 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1519 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1516 # Create a group generator and only pass in a changenode
1520 # Create a group generator and only pass in a changenode
1517 # lookup function as we need to collect no information
1521 # lookup function as we need to collect no information
1518 # from filenodes.
1522 # from filenodes.
1519 group = filerevlog.group(msng_filenode_lst,
1523 group = filerevlog.group(msng_filenode_lst,
1520 lookup_filenode_link_func(fname))
1524 lookup_filenode_link_func(fname))
1521 for chnk in group:
1525 for chnk in group:
1522 yield chnk
1526 yield chnk
1523 if msng_filenode_set.has_key(fname):
1527 if msng_filenode_set.has_key(fname):
1524 # Don't need this anymore, toss it to free memory.
1528 # Don't need this anymore, toss it to free memory.
1525 del msng_filenode_set[fname]
1529 del msng_filenode_set[fname]
1526 # Signal that no more groups are left.
1530 # Signal that no more groups are left.
1527 yield changegroup.closechunk()
1531 yield changegroup.closechunk()
1528
1532
1529 if msng_cl_lst:
1533 if msng_cl_lst:
1530 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1534 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1531
1535
1532 return util.chunkbuffer(gengroup())
1536 return util.chunkbuffer(gengroup())
1533
1537
1534 def changegroup(self, basenodes, source):
1538 def changegroup(self, basenodes, source):
1535 """Generate a changegroup of all nodes that we have that a recipient
1539 """Generate a changegroup of all nodes that we have that a recipient
1536 doesn't.
1540 doesn't.
1537
1541
1538 This is much easier than the previous function as we can assume that
1542 This is much easier than the previous function as we can assume that
1539 the recipient has any changenode we aren't sending them."""
1543 the recipient has any changenode we aren't sending them."""
1540
1544
1541 self.hook('preoutgoing', throw=True, source=source)
1545 self.hook('preoutgoing', throw=True, source=source)
1542
1546
1543 cl = self.changelog
1547 cl = self.changelog
1544 nodes = cl.nodesbetween(basenodes, None)[0]
1548 nodes = cl.nodesbetween(basenodes, None)[0]
1545 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1549 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1546
1550
1547 def identity(x):
1551 def identity(x):
1548 return x
1552 return x
1549
1553
1550 def gennodelst(revlog):
1554 def gennodelst(revlog):
1551 for r in xrange(0, revlog.count()):
1555 for r in xrange(0, revlog.count()):
1552 n = revlog.node(r)
1556 n = revlog.node(r)
1553 if revlog.linkrev(n) in revset:
1557 if revlog.linkrev(n) in revset:
1554 yield n
1558 yield n
1555
1559
1556 def changed_file_collector(changedfileset):
1560 def changed_file_collector(changedfileset):
1557 def collect_changed_files(clnode):
1561 def collect_changed_files(clnode):
1558 c = cl.read(clnode)
1562 c = cl.read(clnode)
1559 for fname in c[3]:
1563 for fname in c[3]:
1560 changedfileset[fname] = 1
1564 changedfileset[fname] = 1
1561 return collect_changed_files
1565 return collect_changed_files
1562
1566
1563 def lookuprevlink_func(revlog):
1567 def lookuprevlink_func(revlog):
1564 def lookuprevlink(n):
1568 def lookuprevlink(n):
1565 return cl.node(revlog.linkrev(n))
1569 return cl.node(revlog.linkrev(n))
1566 return lookuprevlink
1570 return lookuprevlink
1567
1571
1568 def gengroup():
1572 def gengroup():
1569 # construct a list of all changed files
1573 # construct a list of all changed files
1570 changedfiles = {}
1574 changedfiles = {}
1571
1575
1572 for chnk in cl.group(nodes, identity,
1576 for chnk in cl.group(nodes, identity,
1573 changed_file_collector(changedfiles)):
1577 changed_file_collector(changedfiles)):
1574 yield chnk
1578 yield chnk
1575 changedfiles = changedfiles.keys()
1579 changedfiles = changedfiles.keys()
1576 changedfiles.sort()
1580 changedfiles.sort()
1577
1581
1578 mnfst = self.manifest
1582 mnfst = self.manifest
1579 nodeiter = gennodelst(mnfst)
1583 nodeiter = gennodelst(mnfst)
1580 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1584 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1581 yield chnk
1585 yield chnk
1582
1586
1583 for fname in changedfiles:
1587 for fname in changedfiles:
1584 filerevlog = self.file(fname)
1588 filerevlog = self.file(fname)
1585 nodeiter = gennodelst(filerevlog)
1589 nodeiter = gennodelst(filerevlog)
1586 nodeiter = list(nodeiter)
1590 nodeiter = list(nodeiter)
1587 if nodeiter:
1591 if nodeiter:
1588 yield changegroup.genchunk(fname)
1592 yield changegroup.genchunk(fname)
1589 lookup = lookuprevlink_func(filerevlog)
1593 lookup = lookuprevlink_func(filerevlog)
1590 for chnk in filerevlog.group(nodeiter, lookup):
1594 for chnk in filerevlog.group(nodeiter, lookup):
1591 yield chnk
1595 yield chnk
1592
1596
1593 yield changegroup.closechunk()
1597 yield changegroup.closechunk()
1594
1598
1595 if nodes:
1599 if nodes:
1596 self.hook('outgoing', node=hex(nodes[0]), source=source)
1600 self.hook('outgoing', node=hex(nodes[0]), source=source)
1597
1601
1598 return util.chunkbuffer(gengroup())
1602 return util.chunkbuffer(gengroup())
1599
1603
1600 def addchangegroup(self, source, srctype, url):
1604 def addchangegroup(self, source, srctype, url):
1601 """add changegroup to repo.
1605 """add changegroup to repo.
1602 returns number of heads modified or added + 1."""
1606 returns number of heads modified or added + 1."""
1603
1607
1604 def csmap(x):
1608 def csmap(x):
1605 self.ui.debug(_("add changeset %s\n") % short(x))
1609 self.ui.debug(_("add changeset %s\n") % short(x))
1606 return cl.count()
1610 return cl.count()
1607
1611
1608 def revmap(x):
1612 def revmap(x):
1609 return cl.rev(x)
1613 return cl.rev(x)
1610
1614
1611 if not source:
1615 if not source:
1612 return 0
1616 return 0
1613
1617
1614 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1618 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1615
1619
1616 changesets = files = revisions = 0
1620 changesets = files = revisions = 0
1617
1621
1618 tr = self.transaction()
1622 tr = self.transaction()
1619
1623
1620 # write changelog data to temp files so concurrent readers will not see
1624 # write changelog data to temp files so concurrent readers will not see
1621 # inconsistent view
1625 # inconsistent view
1622 cl = None
1626 cl = None
1623 try:
1627 try:
1624 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1628 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1625
1629
1626 oldheads = len(cl.heads())
1630 oldheads = len(cl.heads())
1627
1631
1628 # pull off the changeset group
1632 # pull off the changeset group
1629 self.ui.status(_("adding changesets\n"))
1633 self.ui.status(_("adding changesets\n"))
1630 cor = cl.count() - 1
1634 cor = cl.count() - 1
1631 chunkiter = changegroup.chunkiter(source)
1635 chunkiter = changegroup.chunkiter(source)
1632 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1636 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1633 raise util.Abort(_("received changelog group is empty"))
1637 raise util.Abort(_("received changelog group is empty"))
1634 cnr = cl.count() - 1
1638 cnr = cl.count() - 1
1635 changesets = cnr - cor
1639 changesets = cnr - cor
1636
1640
1637 # pull off the manifest group
1641 # pull off the manifest group
1638 self.ui.status(_("adding manifests\n"))
1642 self.ui.status(_("adding manifests\n"))
1639 chunkiter = changegroup.chunkiter(source)
1643 chunkiter = changegroup.chunkiter(source)
1640 # no need to check for empty manifest group here:
1644 # no need to check for empty manifest group here:
1641 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1645 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1642 # no new manifest will be created and the manifest group will
1646 # no new manifest will be created and the manifest group will
1643 # be empty during the pull
1647 # be empty during the pull
1644 self.manifest.addgroup(chunkiter, revmap, tr)
1648 self.manifest.addgroup(chunkiter, revmap, tr)
1645
1649
1646 # process the files
1650 # process the files
1647 self.ui.status(_("adding file changes\n"))
1651 self.ui.status(_("adding file changes\n"))
1648 while 1:
1652 while 1:
1649 f = changegroup.getchunk(source)
1653 f = changegroup.getchunk(source)
1650 if not f:
1654 if not f:
1651 break
1655 break
1652 self.ui.debug(_("adding %s revisions\n") % f)
1656 self.ui.debug(_("adding %s revisions\n") % f)
1653 fl = self.file(f)
1657 fl = self.file(f)
1654 o = fl.count()
1658 o = fl.count()
1655 chunkiter = changegroup.chunkiter(source)
1659 chunkiter = changegroup.chunkiter(source)
1656 if fl.addgroup(chunkiter, revmap, tr) is None:
1660 if fl.addgroup(chunkiter, revmap, tr) is None:
1657 raise util.Abort(_("received file revlog group is empty"))
1661 raise util.Abort(_("received file revlog group is empty"))
1658 revisions += fl.count() - o
1662 revisions += fl.count() - o
1659 files += 1
1663 files += 1
1660
1664
1661 cl.writedata()
1665 cl.writedata()
1662 finally:
1666 finally:
1663 if cl:
1667 if cl:
1664 cl.cleanup()
1668 cl.cleanup()
1665
1669
1666 # make changelog see real files again
1670 # make changelog see real files again
1667 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1671 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1668 self.changelog.checkinlinesize(tr)
1672 self.changelog.checkinlinesize(tr)
1669
1673
1670 newheads = len(self.changelog.heads())
1674 newheads = len(self.changelog.heads())
1671 heads = ""
1675 heads = ""
1672 if oldheads and newheads != oldheads:
1676 if oldheads and newheads != oldheads:
1673 heads = _(" (%+d heads)") % (newheads - oldheads)
1677 heads = _(" (%+d heads)") % (newheads - oldheads)
1674
1678
1675 self.ui.status(_("added %d changesets"
1679 self.ui.status(_("added %d changesets"
1676 " with %d changes to %d files%s\n")
1680 " with %d changes to %d files%s\n")
1677 % (changesets, revisions, files, heads))
1681 % (changesets, revisions, files, heads))
1678
1682
1679 if changesets > 0:
1683 if changesets > 0:
1680 self.hook('pretxnchangegroup', throw=True,
1684 self.hook('pretxnchangegroup', throw=True,
1681 node=hex(self.changelog.node(cor+1)), source=srctype,
1685 node=hex(self.changelog.node(cor+1)), source=srctype,
1682 url=url)
1686 url=url)
1683
1687
1684 tr.close()
1688 tr.close()
1685
1689
1686 if changesets > 0:
1690 if changesets > 0:
1687 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1691 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1688 source=srctype, url=url)
1692 source=srctype, url=url)
1689
1693
1690 for i in range(cor + 1, cnr + 1):
1694 for i in range(cor + 1, cnr + 1):
1691 self.hook("incoming", node=hex(self.changelog.node(i)),
1695 self.hook("incoming", node=hex(self.changelog.node(i)),
1692 source=srctype, url=url)
1696 source=srctype, url=url)
1693
1697
1694 return newheads - oldheads + 1
1698 return newheads - oldheads + 1
1695
1699
1696 def update(self, node, allow=False, force=False, choose=None,
1700 def update(self, node, allow=False, force=False, choose=None,
1697 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1701 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1698 pl = self.dirstate.parents()
1702 pl = self.dirstate.parents()
1699 if not force and pl[1] != nullid:
1703 if not force and pl[1] != nullid:
1700 raise util.Abort(_("outstanding uncommitted merges"))
1704 raise util.Abort(_("outstanding uncommitted merges"))
1701
1705
1702 err = False
1706 err = False
1703
1707
1704 p1, p2 = pl[0], node
1708 p1, p2 = pl[0], node
1705 pa = self.changelog.ancestor(p1, p2)
1709 pa = self.changelog.ancestor(p1, p2)
1706 m1n = self.changelog.read(p1)[0]
1710 m1n = self.changelog.read(p1)[0]
1707 m2n = self.changelog.read(p2)[0]
1711 m2n = self.changelog.read(p2)[0]
1708 man = self.manifest.ancestor(m1n, m2n)
1712 man = self.manifest.ancestor(m1n, m2n)
1709 m1 = self.manifest.read(m1n)
1713 m1 = self.manifest.read(m1n)
1710 mf1 = self.manifest.readflags(m1n)
1714 mf1 = self.manifest.readflags(m1n)
1711 m2 = self.manifest.read(m2n).copy()
1715 m2 = self.manifest.read(m2n).copy()
1712 mf2 = self.manifest.readflags(m2n)
1716 mf2 = self.manifest.readflags(m2n)
1713 ma = self.manifest.read(man)
1717 ma = self.manifest.read(man)
1714 mfa = self.manifest.readflags(man)
1718 mfa = self.manifest.readflags(man)
1715
1719
1716 modified, added, removed, deleted, unknown = self.changes()
1720 modified, added, removed, deleted, unknown = self.changes()
1717
1721
1718 # is this a jump, or a merge? i.e. is there a linear path
1722 # is this a jump, or a merge? i.e. is there a linear path
1719 # from p1 to p2?
1723 # from p1 to p2?
1720 linear_path = (pa == p1 or pa == p2)
1724 linear_path = (pa == p1 or pa == p2)
1721
1725
1722 if allow and linear_path:
1726 if allow and linear_path:
1723 raise util.Abort(_("there is nothing to merge, just use "
1727 raise util.Abort(_("there is nothing to merge, just use "
1724 "'hg update' or look at 'hg heads'"))
1728 "'hg update' or look at 'hg heads'"))
1725 if allow and not forcemerge:
1729 if allow and not forcemerge:
1726 if modified or added or removed:
1730 if modified or added or removed:
1727 raise util.Abort(_("outstanding uncommitted changes"))
1731 raise util.Abort(_("outstanding uncommitted changes"))
1728
1732
1729 if not forcemerge and not force:
1733 if not forcemerge and not force:
1730 for f in unknown:
1734 for f in unknown:
1731 if f in m2:
1735 if f in m2:
1732 t1 = self.wread(f)
1736 t1 = self.wread(f)
1733 t2 = self.file(f).read(m2[f])
1737 t2 = self.file(f).read(m2[f])
1734 if cmp(t1, t2) != 0:
1738 if cmp(t1, t2) != 0:
1735 raise util.Abort(_("'%s' already exists in the working"
1739 raise util.Abort(_("'%s' already exists in the working"
1736 " dir and differs from remote") % f)
1740 " dir and differs from remote") % f)
1737
1741
1738 # resolve the manifest to determine which files
1742 # resolve the manifest to determine which files
1739 # we care about merging
1743 # we care about merging
1740 self.ui.note(_("resolving manifests\n"))
1744 self.ui.note(_("resolving manifests\n"))
1741 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1745 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1742 (force, allow, moddirstate, linear_path))
1746 (force, allow, moddirstate, linear_path))
1743 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1747 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1744 (short(man), short(m1n), short(m2n)))
1748 (short(man), short(m1n), short(m2n)))
1745
1749
1746 merge = {}
1750 merge = {}
1747 get = {}
1751 get = {}
1748 remove = []
1752 remove = []
1749
1753
1750 # construct a working dir manifest
1754 # construct a working dir manifest
1751 mw = m1.copy()
1755 mw = m1.copy()
1752 mfw = mf1.copy()
1756 mfw = mf1.copy()
1753 umap = dict.fromkeys(unknown)
1757 umap = dict.fromkeys(unknown)
1754
1758
1755 for f in added + modified + unknown:
1759 for f in added + modified + unknown:
1756 mw[f] = ""
1760 mw[f] = ""
1757 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1761 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1758
1762
1759 if moddirstate and not wlock:
1763 if moddirstate and not wlock:
1760 wlock = self.wlock()
1764 wlock = self.wlock()
1761
1765
1762 for f in deleted + removed:
1766 for f in deleted + removed:
1763 if f in mw:
1767 if f in mw:
1764 del mw[f]
1768 del mw[f]
1765
1769
1766 # If we're jumping between revisions (as opposed to merging),
1770 # If we're jumping between revisions (as opposed to merging),
1767 # and if neither the working directory nor the target rev has
1771 # and if neither the working directory nor the target rev has
1768 # the file, then we need to remove it from the dirstate, to
1772 # the file, then we need to remove it from the dirstate, to
1769 # prevent the dirstate from listing the file when it is no
1773 # prevent the dirstate from listing the file when it is no
1770 # longer in the manifest.
1774 # longer in the manifest.
1771 if moddirstate and linear_path and f not in m2:
1775 if moddirstate and linear_path and f not in m2:
1772 self.dirstate.forget((f,))
1776 self.dirstate.forget((f,))
1773
1777
1774 # Compare manifests
1778 # Compare manifests
1775 for f, n in mw.iteritems():
1779 for f, n in mw.iteritems():
1776 if choose and not choose(f):
1780 if choose and not choose(f):
1777 continue
1781 continue
1778 if f in m2:
1782 if f in m2:
1779 s = 0
1783 s = 0
1780
1784
1781 # is the wfile new since m1, and match m2?
1785 # is the wfile new since m1, and match m2?
1782 if f not in m1:
1786 if f not in m1:
1783 t1 = self.wread(f)
1787 t1 = self.wread(f)
1784 t2 = self.file(f).read(m2[f])
1788 t2 = self.file(f).read(m2[f])
1785 if cmp(t1, t2) == 0:
1789 if cmp(t1, t2) == 0:
1786 n = m2[f]
1790 n = m2[f]
1787 del t1, t2
1791 del t1, t2
1788
1792
1789 # are files different?
1793 # are files different?
1790 if n != m2[f]:
1794 if n != m2[f]:
1791 a = ma.get(f, nullid)
1795 a = ma.get(f, nullid)
1792 # are both different from the ancestor?
1796 # are both different from the ancestor?
1793 if n != a and m2[f] != a:
1797 if n != a and m2[f] != a:
1794 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1798 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1795 # merge executable bits
1799 # merge executable bits
1796 # "if we changed or they changed, change in merge"
1800 # "if we changed or they changed, change in merge"
1797 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1801 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1798 mode = ((a^b) | (a^c)) ^ a
1802 mode = ((a^b) | (a^c)) ^ a
1799 merge[f] = (m1.get(f, nullid), m2[f], mode)
1803 merge[f] = (m1.get(f, nullid), m2[f], mode)
1800 s = 1
1804 s = 1
1801 # are we clobbering?
1805 # are we clobbering?
1802 # is remote's version newer?
1806 # is remote's version newer?
1803 # or are we going back in time?
1807 # or are we going back in time?
1804 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1808 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1805 self.ui.debug(_(" remote %s is newer, get\n") % f)
1809 self.ui.debug(_(" remote %s is newer, get\n") % f)
1806 get[f] = m2[f]
1810 get[f] = m2[f]
1807 s = 1
1811 s = 1
1808 elif f in umap or f in added:
1812 elif f in umap or f in added:
1809 # this unknown file is the same as the checkout
1813 # this unknown file is the same as the checkout
1810 # we need to reset the dirstate if the file was added
1814 # we need to reset the dirstate if the file was added
1811 get[f] = m2[f]
1815 get[f] = m2[f]
1812
1816
1813 if not s and mfw[f] != mf2[f]:
1817 if not s and mfw[f] != mf2[f]:
1814 if force:
1818 if force:
1815 self.ui.debug(_(" updating permissions for %s\n") % f)
1819 self.ui.debug(_(" updating permissions for %s\n") % f)
1816 util.set_exec(self.wjoin(f), mf2[f])
1820 util.set_exec(self.wjoin(f), mf2[f])
1817 else:
1821 else:
1818 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1822 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1819 mode = ((a^b) | (a^c)) ^ a
1823 mode = ((a^b) | (a^c)) ^ a
1820 if mode != b:
1824 if mode != b:
1821 self.ui.debug(_(" updating permissions for %s\n")
1825 self.ui.debug(_(" updating permissions for %s\n")
1822 % f)
1826 % f)
1823 util.set_exec(self.wjoin(f), mode)
1827 util.set_exec(self.wjoin(f), mode)
1824 del m2[f]
1828 del m2[f]
1825 elif f in ma:
1829 elif f in ma:
1826 if n != ma[f]:
1830 if n != ma[f]:
1827 r = _("d")
1831 r = _("d")
1828 if not force and (linear_path or allow):
1832 if not force and (linear_path or allow):
1829 r = self.ui.prompt(
1833 r = self.ui.prompt(
1830 (_(" local changed %s which remote deleted\n") % f) +
1834 (_(" local changed %s which remote deleted\n") % f) +
1831 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1835 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1832 if r == _("d"):
1836 if r == _("d"):
1833 remove.append(f)
1837 remove.append(f)
1834 else:
1838 else:
1835 self.ui.debug(_("other deleted %s\n") % f)
1839 self.ui.debug(_("other deleted %s\n") % f)
1836 remove.append(f) # other deleted it
1840 remove.append(f) # other deleted it
1837 else:
1841 else:
1838 # file is created on branch or in working directory
1842 # file is created on branch or in working directory
1839 if force and f not in umap:
1843 if force and f not in umap:
1840 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1844 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1841 remove.append(f)
1845 remove.append(f)
1842 elif n == m1.get(f, nullid): # same as parent
1846 elif n == m1.get(f, nullid): # same as parent
1843 if p2 == pa: # going backwards?
1847 if p2 == pa: # going backwards?
1844 self.ui.debug(_("remote deleted %s\n") % f)
1848 self.ui.debug(_("remote deleted %s\n") % f)
1845 remove.append(f)
1849 remove.append(f)
1846 else:
1850 else:
1847 self.ui.debug(_("local modified %s, keeping\n") % f)
1851 self.ui.debug(_("local modified %s, keeping\n") % f)
1848 else:
1852 else:
1849 self.ui.debug(_("working dir created %s, keeping\n") % f)
1853 self.ui.debug(_("working dir created %s, keeping\n") % f)
1850
1854
1851 for f, n in m2.iteritems():
1855 for f, n in m2.iteritems():
1852 if choose and not choose(f):
1856 if choose and not choose(f):
1853 continue
1857 continue
1854 if f[0] == "/":
1858 if f[0] == "/":
1855 continue
1859 continue
1856 if f in ma and n != ma[f]:
1860 if f in ma and n != ma[f]:
1857 r = _("k")
1861 r = _("k")
1858 if not force and (linear_path or allow):
1862 if not force and (linear_path or allow):
1859 r = self.ui.prompt(
1863 r = self.ui.prompt(
1860 (_("remote changed %s which local deleted\n") % f) +
1864 (_("remote changed %s which local deleted\n") % f) +
1861 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1865 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1862 if r == _("k"):
1866 if r == _("k"):
1863 get[f] = n
1867 get[f] = n
1864 elif f not in ma:
1868 elif f not in ma:
1865 self.ui.debug(_("remote created %s\n") % f)
1869 self.ui.debug(_("remote created %s\n") % f)
1866 get[f] = n
1870 get[f] = n
1867 else:
1871 else:
1868 if force or p2 == pa: # going backwards?
1872 if force or p2 == pa: # going backwards?
1869 self.ui.debug(_("local deleted %s, recreating\n") % f)
1873 self.ui.debug(_("local deleted %s, recreating\n") % f)
1870 get[f] = n
1874 get[f] = n
1871 else:
1875 else:
1872 self.ui.debug(_("local deleted %s\n") % f)
1876 self.ui.debug(_("local deleted %s\n") % f)
1873
1877
1874 del mw, m1, m2, ma
1878 del mw, m1, m2, ma
1875
1879
1876 if force:
1880 if force:
1877 for f in merge:
1881 for f in merge:
1878 get[f] = merge[f][1]
1882 get[f] = merge[f][1]
1879 merge = {}
1883 merge = {}
1880
1884
1881 if linear_path or force:
1885 if linear_path or force:
1882 # we don't need to do any magic, just jump to the new rev
1886 # we don't need to do any magic, just jump to the new rev
1883 branch_merge = False
1887 branch_merge = False
1884 p1, p2 = p2, nullid
1888 p1, p2 = p2, nullid
1885 else:
1889 else:
1886 if not allow:
1890 if not allow:
1887 self.ui.status(_("this update spans a branch"
1891 self.ui.status(_("this update spans a branch"
1888 " affecting the following files:\n"))
1892 " affecting the following files:\n"))
1889 fl = merge.keys() + get.keys()
1893 fl = merge.keys() + get.keys()
1890 fl.sort()
1894 fl.sort()
1891 for f in fl:
1895 for f in fl:
1892 cf = ""
1896 cf = ""
1893 if f in merge:
1897 if f in merge:
1894 cf = _(" (resolve)")
1898 cf = _(" (resolve)")
1895 self.ui.status(" %s%s\n" % (f, cf))
1899 self.ui.status(" %s%s\n" % (f, cf))
1896 self.ui.warn(_("aborting update spanning branches!\n"))
1900 self.ui.warn(_("aborting update spanning branches!\n"))
1897 self.ui.status(_("(use 'hg merge' to merge across branches"
1901 self.ui.status(_("(use 'hg merge' to merge across branches"
1898 " or 'hg update -C' to lose changes)\n"))
1902 " or 'hg update -C' to lose changes)\n"))
1899 return 1
1903 return 1
1900 branch_merge = True
1904 branch_merge = True
1901
1905
1902 xp1 = hex(p1)
1906 xp1 = hex(p1)
1903 xp2 = hex(p2)
1907 xp2 = hex(p2)
1904 if p2 == nullid: xxp2 = ''
1908 if p2 == nullid: xxp2 = ''
1905 else: xxp2 = xp2
1909 else: xxp2 = xp2
1906
1910
1907 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1911 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1908
1912
1909 # get the files we don't need to change
1913 # get the files we don't need to change
1910 files = get.keys()
1914 files = get.keys()
1911 files.sort()
1915 files.sort()
1912 for f in files:
1916 for f in files:
1913 if f[0] == "/":
1917 if f[0] == "/":
1914 continue
1918 continue
1915 self.ui.note(_("getting %s\n") % f)
1919 self.ui.note(_("getting %s\n") % f)
1916 t = self.file(f).read(get[f])
1920 t = self.file(f).read(get[f])
1917 self.wwrite(f, t)
1921 self.wwrite(f, t)
1918 util.set_exec(self.wjoin(f), mf2[f])
1922 util.set_exec(self.wjoin(f), mf2[f])
1919 if moddirstate:
1923 if moddirstate:
1920 if branch_merge:
1924 if branch_merge:
1921 self.dirstate.update([f], 'n', st_mtime=-1)
1925 self.dirstate.update([f], 'n', st_mtime=-1)
1922 else:
1926 else:
1923 self.dirstate.update([f], 'n')
1927 self.dirstate.update([f], 'n')
1924
1928
1925 # merge the tricky bits
1929 # merge the tricky bits
1926 failedmerge = []
1930 failedmerge = []
1927 files = merge.keys()
1931 files = merge.keys()
1928 files.sort()
1932 files.sort()
1929 for f in files:
1933 for f in files:
1930 self.ui.status(_("merging %s\n") % f)
1934 self.ui.status(_("merging %s\n") % f)
1931 my, other, flag = merge[f]
1935 my, other, flag = merge[f]
1932 ret = self.merge3(f, my, other, xp1, xp2)
1936 ret = self.merge3(f, my, other, xp1, xp2)
1933 if ret:
1937 if ret:
1934 err = True
1938 err = True
1935 failedmerge.append(f)
1939 failedmerge.append(f)
1936 util.set_exec(self.wjoin(f), flag)
1940 util.set_exec(self.wjoin(f), flag)
1937 if moddirstate:
1941 if moddirstate:
1938 if branch_merge:
1942 if branch_merge:
1939 # We've done a branch merge, mark this file as merged
1943 # We've done a branch merge, mark this file as merged
1940 # so that we properly record the merger later
1944 # so that we properly record the merger later
1941 self.dirstate.update([f], 'm')
1945 self.dirstate.update([f], 'm')
1942 else:
1946 else:
1943 # We've update-merged a locally modified file, so
1947 # We've update-merged a locally modified file, so
1944 # we set the dirstate to emulate a normal checkout
1948 # we set the dirstate to emulate a normal checkout
1945 # of that file some time in the past. Thus our
1949 # of that file some time in the past. Thus our
1946 # merge will appear as a normal local file
1950 # merge will appear as a normal local file
1947 # modification.
1951 # modification.
1948 f_len = len(self.file(f).read(other))
1952 f_len = len(self.file(f).read(other))
1949 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1953 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1950
1954
1951 remove.sort()
1955 remove.sort()
1952 for f in remove:
1956 for f in remove:
1953 self.ui.note(_("removing %s\n") % f)
1957 self.ui.note(_("removing %s\n") % f)
1954 util.audit_path(f)
1958 util.audit_path(f)
1955 try:
1959 try:
1956 util.unlink(self.wjoin(f))
1960 util.unlink(self.wjoin(f))
1957 except OSError, inst:
1961 except OSError, inst:
1958 if inst.errno != errno.ENOENT:
1962 if inst.errno != errno.ENOENT:
1959 self.ui.warn(_("update failed to remove %s: %s!\n") %
1963 self.ui.warn(_("update failed to remove %s: %s!\n") %
1960 (f, inst.strerror))
1964 (f, inst.strerror))
1961 if moddirstate:
1965 if moddirstate:
1962 if branch_merge:
1966 if branch_merge:
1963 self.dirstate.update(remove, 'r')
1967 self.dirstate.update(remove, 'r')
1964 else:
1968 else:
1965 self.dirstate.forget(remove)
1969 self.dirstate.forget(remove)
1966
1970
1967 if moddirstate:
1971 if moddirstate:
1968 self.dirstate.setparents(p1, p2)
1972 self.dirstate.setparents(p1, p2)
1969
1973
1970 if show_stats:
1974 if show_stats:
1971 stats = ((len(get), _("updated")),
1975 stats = ((len(get), _("updated")),
1972 (len(merge) - len(failedmerge), _("merged")),
1976 (len(merge) - len(failedmerge), _("merged")),
1973 (len(remove), _("removed")),
1977 (len(remove), _("removed")),
1974 (len(failedmerge), _("unresolved")))
1978 (len(failedmerge), _("unresolved")))
1975 note = ", ".join([_("%d files %s") % s for s in stats])
1979 note = ", ".join([_("%d files %s") % s for s in stats])
1976 self.ui.status("%s\n" % note)
1980 self.ui.status("%s\n" % note)
1977 if moddirstate:
1981 if moddirstate:
1978 if branch_merge:
1982 if branch_merge:
1979 if failedmerge:
1983 if failedmerge:
1980 self.ui.status(_("There are unresolved merges,"
1984 self.ui.status(_("There are unresolved merges,"
1981 " you can redo the full merge using:\n"
1985 " you can redo the full merge using:\n"
1982 " hg update -C %s\n"
1986 " hg update -C %s\n"
1983 " hg merge %s\n"
1987 " hg merge %s\n"
1984 % (self.changelog.rev(p1),
1988 % (self.changelog.rev(p1),
1985 self.changelog.rev(p2))))
1989 self.changelog.rev(p2))))
1986 else:
1990 else:
1987 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1991 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1988 elif failedmerge:
1992 elif failedmerge:
1989 self.ui.status(_("There are unresolved merges with"
1993 self.ui.status(_("There are unresolved merges with"
1990 " locally modified files.\n"))
1994 " locally modified files.\n"))
1991
1995
1992 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1996 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1993 return err
1997 return err
1994
1998
1995 def merge3(self, fn, my, other, p1, p2):
1999 def merge3(self, fn, my, other, p1, p2):
1996 """perform a 3-way merge in the working directory"""
2000 """perform a 3-way merge in the working directory"""
1997
2001
1998 def temp(prefix, node):
2002 def temp(prefix, node):
1999 pre = "%s~%s." % (os.path.basename(fn), prefix)
2003 pre = "%s~%s." % (os.path.basename(fn), prefix)
2000 (fd, name) = tempfile.mkstemp(prefix=pre)
2004 (fd, name) = tempfile.mkstemp(prefix=pre)
2001 f = os.fdopen(fd, "wb")
2005 f = os.fdopen(fd, "wb")
2002 self.wwrite(fn, fl.read(node), f)
2006 self.wwrite(fn, fl.read(node), f)
2003 f.close()
2007 f.close()
2004 return name
2008 return name
2005
2009
2006 fl = self.file(fn)
2010 fl = self.file(fn)
2007 base = fl.ancestor(my, other)
2011 base = fl.ancestor(my, other)
2008 a = self.wjoin(fn)
2012 a = self.wjoin(fn)
2009 b = temp("base", base)
2013 b = temp("base", base)
2010 c = temp("other", other)
2014 c = temp("other", other)
2011
2015
2012 self.ui.note(_("resolving %s\n") % fn)
2016 self.ui.note(_("resolving %s\n") % fn)
2013 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
2017 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
2014 (fn, short(my), short(other), short(base)))
2018 (fn, short(my), short(other), short(base)))
2015
2019
2016 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
2020 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
2017 or "hgmerge")
2021 or "hgmerge")
2018 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
2022 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
2019 environ={'HG_FILE': fn,
2023 environ={'HG_FILE': fn,
2020 'HG_MY_NODE': p1,
2024 'HG_MY_NODE': p1,
2021 'HG_OTHER_NODE': p2,
2025 'HG_OTHER_NODE': p2,
2022 'HG_FILE_MY_NODE': hex(my),
2026 'HG_FILE_MY_NODE': hex(my),
2023 'HG_FILE_OTHER_NODE': hex(other),
2027 'HG_FILE_OTHER_NODE': hex(other),
2024 'HG_FILE_BASE_NODE': hex(base)})
2028 'HG_FILE_BASE_NODE': hex(base)})
2025 if r:
2029 if r:
2026 self.ui.warn(_("merging %s failed!\n") % fn)
2030 self.ui.warn(_("merging %s failed!\n") % fn)
2027
2031
2028 os.unlink(b)
2032 os.unlink(b)
2029 os.unlink(c)
2033 os.unlink(c)
2030 return r
2034 return r
2031
2035
2032 def verify(self):
2036 def verify(self):
2033 filelinkrevs = {}
2037 filelinkrevs = {}
2034 filenodes = {}
2038 filenodes = {}
2035 changesets = revisions = files = 0
2039 changesets = revisions = files = 0
2036 errors = [0]
2040 errors = [0]
2037 warnings = [0]
2041 warnings = [0]
2038 neededmanifests = {}
2042 neededmanifests = {}
2039
2043
2040 def err(msg):
2044 def err(msg):
2041 self.ui.warn(msg + "\n")
2045 self.ui.warn(msg + "\n")
2042 errors[0] += 1
2046 errors[0] += 1
2043
2047
2044 def warn(msg):
2048 def warn(msg):
2045 self.ui.warn(msg + "\n")
2049 self.ui.warn(msg + "\n")
2046 warnings[0] += 1
2050 warnings[0] += 1
2047
2051
2048 def checksize(obj, name):
2052 def checksize(obj, name):
2049 d = obj.checksize()
2053 d = obj.checksize()
2050 if d[0]:
2054 if d[0]:
2051 err(_("%s data length off by %d bytes") % (name, d[0]))
2055 err(_("%s data length off by %d bytes") % (name, d[0]))
2052 if d[1]:
2056 if d[1]:
2053 err(_("%s index contains %d extra bytes") % (name, d[1]))
2057 err(_("%s index contains %d extra bytes") % (name, d[1]))
2054
2058
2055 def checkversion(obj, name):
2059 def checkversion(obj, name):
2056 if obj.version != revlog.REVLOGV0:
2060 if obj.version != revlog.REVLOGV0:
2057 if not revlogv1:
2061 if not revlogv1:
2058 warn(_("warning: `%s' uses revlog format 1") % name)
2062 warn(_("warning: `%s' uses revlog format 1") % name)
2059 elif revlogv1:
2063 elif revlogv1:
2060 warn(_("warning: `%s' uses revlog format 0") % name)
2064 warn(_("warning: `%s' uses revlog format 0") % name)
2061
2065
2062 revlogv1 = self.revlogversion != revlog.REVLOGV0
2066 revlogv1 = self.revlogversion != revlog.REVLOGV0
2063 if self.ui.verbose or revlogv1 != self.revlogv1:
2067 if self.ui.verbose or revlogv1 != self.revlogv1:
2064 self.ui.status(_("repository uses revlog format %d\n") %
2068 self.ui.status(_("repository uses revlog format %d\n") %
2065 (revlogv1 and 1 or 0))
2069 (revlogv1 and 1 or 0))
2066
2070
2067 seen = {}
2071 seen = {}
2068 self.ui.status(_("checking changesets\n"))
2072 self.ui.status(_("checking changesets\n"))
2069 checksize(self.changelog, "changelog")
2073 checksize(self.changelog, "changelog")
2070
2074
2071 for i in range(self.changelog.count()):
2075 for i in range(self.changelog.count()):
2072 changesets += 1
2076 changesets += 1
2073 n = self.changelog.node(i)
2077 n = self.changelog.node(i)
2074 l = self.changelog.linkrev(n)
2078 l = self.changelog.linkrev(n)
2075 if l != i:
2079 if l != i:
2076 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
2080 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
2077 if n in seen:
2081 if n in seen:
2078 err(_("duplicate changeset at revision %d") % i)
2082 err(_("duplicate changeset at revision %d") % i)
2079 seen[n] = 1
2083 seen[n] = 1
2080
2084
2081 for p in self.changelog.parents(n):
2085 for p in self.changelog.parents(n):
2082 if p not in self.changelog.nodemap:
2086 if p not in self.changelog.nodemap:
2083 err(_("changeset %s has unknown parent %s") %
2087 err(_("changeset %s has unknown parent %s") %
2084 (short(n), short(p)))
2088 (short(n), short(p)))
2085 try:
2089 try:
2086 changes = self.changelog.read(n)
2090 changes = self.changelog.read(n)
2087 except KeyboardInterrupt:
2091 except KeyboardInterrupt:
2088 self.ui.warn(_("interrupted"))
2092 self.ui.warn(_("interrupted"))
2089 raise
2093 raise
2090 except Exception, inst:
2094 except Exception, inst:
2091 err(_("unpacking changeset %s: %s") % (short(n), inst))
2095 err(_("unpacking changeset %s: %s") % (short(n), inst))
2092 continue
2096 continue
2093
2097
2094 neededmanifests[changes[0]] = n
2098 neededmanifests[changes[0]] = n
2095
2099
2096 for f in changes[3]:
2100 for f in changes[3]:
2097 filelinkrevs.setdefault(f, []).append(i)
2101 filelinkrevs.setdefault(f, []).append(i)
2098
2102
2099 seen = {}
2103 seen = {}
2100 self.ui.status(_("checking manifests\n"))
2104 self.ui.status(_("checking manifests\n"))
2101 checkversion(self.manifest, "manifest")
2105 checkversion(self.manifest, "manifest")
2102 checksize(self.manifest, "manifest")
2106 checksize(self.manifest, "manifest")
2103
2107
2104 for i in range(self.manifest.count()):
2108 for i in range(self.manifest.count()):
2105 n = self.manifest.node(i)
2109 n = self.manifest.node(i)
2106 l = self.manifest.linkrev(n)
2110 l = self.manifest.linkrev(n)
2107
2111
2108 if l < 0 or l >= self.changelog.count():
2112 if l < 0 or l >= self.changelog.count():
2109 err(_("bad manifest link (%d) at revision %d") % (l, i))
2113 err(_("bad manifest link (%d) at revision %d") % (l, i))
2110
2114
2111 if n in neededmanifests:
2115 if n in neededmanifests:
2112 del neededmanifests[n]
2116 del neededmanifests[n]
2113
2117
2114 if n in seen:
2118 if n in seen:
2115 err(_("duplicate manifest at revision %d") % i)
2119 err(_("duplicate manifest at revision %d") % i)
2116
2120
2117 seen[n] = 1
2121 seen[n] = 1
2118
2122
2119 for p in self.manifest.parents(n):
2123 for p in self.manifest.parents(n):
2120 if p not in self.manifest.nodemap:
2124 if p not in self.manifest.nodemap:
2121 err(_("manifest %s has unknown parent %s") %
2125 err(_("manifest %s has unknown parent %s") %
2122 (short(n), short(p)))
2126 (short(n), short(p)))
2123
2127
2124 try:
2128 try:
2125 delta = mdiff.patchtext(self.manifest.delta(n))
2129 delta = mdiff.patchtext(self.manifest.delta(n))
2126 except KeyboardInterrupt:
2130 except KeyboardInterrupt:
2127 self.ui.warn(_("interrupted"))
2131 self.ui.warn(_("interrupted"))
2128 raise
2132 raise
2129 except Exception, inst:
2133 except Exception, inst:
2130 err(_("unpacking manifest %s: %s") % (short(n), inst))
2134 err(_("unpacking manifest %s: %s") % (short(n), inst))
2131 continue
2135 continue
2132
2136
2133 try:
2137 try:
2134 ff = [ l.split('\0') for l in delta.splitlines() ]
2138 ff = [ l.split('\0') for l in delta.splitlines() ]
2135 for f, fn in ff:
2139 for f, fn in ff:
2136 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2140 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2137 except (ValueError, TypeError), inst:
2141 except (ValueError, TypeError), inst:
2138 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2142 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2139
2143
2140 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2144 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2141
2145
2142 for m, c in neededmanifests.items():
2146 for m, c in neededmanifests.items():
2143 err(_("Changeset %s refers to unknown manifest %s") %
2147 err(_("Changeset %s refers to unknown manifest %s") %
2144 (short(m), short(c)))
2148 (short(m), short(c)))
2145 del neededmanifests
2149 del neededmanifests
2146
2150
2147 for f in filenodes:
2151 for f in filenodes:
2148 if f not in filelinkrevs:
2152 if f not in filelinkrevs:
2149 err(_("file %s in manifest but not in changesets") % f)
2153 err(_("file %s in manifest but not in changesets") % f)
2150
2154
2151 for f in filelinkrevs:
2155 for f in filelinkrevs:
2152 if f not in filenodes:
2156 if f not in filenodes:
2153 err(_("file %s in changeset but not in manifest") % f)
2157 err(_("file %s in changeset but not in manifest") % f)
2154
2158
2155 self.ui.status(_("checking files\n"))
2159 self.ui.status(_("checking files\n"))
2156 ff = filenodes.keys()
2160 ff = filenodes.keys()
2157 ff.sort()
2161 ff.sort()
2158 for f in ff:
2162 for f in ff:
2159 if f == "/dev/null":
2163 if f == "/dev/null":
2160 continue
2164 continue
2161 files += 1
2165 files += 1
2162 if not f:
2166 if not f:
2163 err(_("file without name in manifest %s") % short(n))
2167 err(_("file without name in manifest %s") % short(n))
2164 continue
2168 continue
2165 fl = self.file(f)
2169 fl = self.file(f)
2166 checkversion(fl, f)
2170 checkversion(fl, f)
2167 checksize(fl, f)
2171 checksize(fl, f)
2168
2172
2169 nodes = {nullid: 1}
2173 nodes = {nullid: 1}
2170 seen = {}
2174 seen = {}
2171 for i in range(fl.count()):
2175 for i in range(fl.count()):
2172 revisions += 1
2176 revisions += 1
2173 n = fl.node(i)
2177 n = fl.node(i)
2174
2178
2175 if n in seen:
2179 if n in seen:
2176 err(_("%s: duplicate revision %d") % (f, i))
2180 err(_("%s: duplicate revision %d") % (f, i))
2177 if n not in filenodes[f]:
2181 if n not in filenodes[f]:
2178 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2182 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2179 else:
2183 else:
2180 del filenodes[f][n]
2184 del filenodes[f][n]
2181
2185
2182 flr = fl.linkrev(n)
2186 flr = fl.linkrev(n)
2183 if flr not in filelinkrevs.get(f, []):
2187 if flr not in filelinkrevs.get(f, []):
2184 err(_("%s:%s points to unexpected changeset %d")
2188 err(_("%s:%s points to unexpected changeset %d")
2185 % (f, short(n), flr))
2189 % (f, short(n), flr))
2186 else:
2190 else:
2187 filelinkrevs[f].remove(flr)
2191 filelinkrevs[f].remove(flr)
2188
2192
2189 # verify contents
2193 # verify contents
2190 try:
2194 try:
2191 t = fl.read(n)
2195 t = fl.read(n)
2192 except KeyboardInterrupt:
2196 except KeyboardInterrupt:
2193 self.ui.warn(_("interrupted"))
2197 self.ui.warn(_("interrupted"))
2194 raise
2198 raise
2195 except Exception, inst:
2199 except Exception, inst:
2196 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2200 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2197
2201
2198 # verify parents
2202 # verify parents
2199 (p1, p2) = fl.parents(n)
2203 (p1, p2) = fl.parents(n)
2200 if p1 not in nodes:
2204 if p1 not in nodes:
2201 err(_("file %s:%s unknown parent 1 %s") %
2205 err(_("file %s:%s unknown parent 1 %s") %
2202 (f, short(n), short(p1)))
2206 (f, short(n), short(p1)))
2203 if p2 not in nodes:
2207 if p2 not in nodes:
2204 err(_("file %s:%s unknown parent 2 %s") %
2208 err(_("file %s:%s unknown parent 2 %s") %
2205 (f, short(n), short(p1)))
2209 (f, short(n), short(p1)))
2206 nodes[n] = 1
2210 nodes[n] = 1
2207
2211
2208 # cross-check
2212 # cross-check
2209 for node in filenodes[f]:
2213 for node in filenodes[f]:
2210 err(_("node %s in manifests not in %s") % (hex(node), f))
2214 err(_("node %s in manifests not in %s") % (hex(node), f))
2211
2215
2212 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2216 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2213 (files, changesets, revisions))
2217 (files, changesets, revisions))
2214
2218
2215 if warnings[0]:
2219 if warnings[0]:
2216 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2220 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2217 if errors[0]:
2221 if errors[0]:
2218 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2222 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2219 return 1
2223 return 1
2220
2224
2221 def stream_in(self, remote):
2225 def stream_in(self, remote):
2222 fp = remote.stream_out()
2226 fp = remote.stream_out()
2223 resp = int(fp.readline())
2227 resp = int(fp.readline())
2224 if resp != 0:
2228 if resp != 0:
2225 raise util.Abort(_('operation forbidden by server'))
2229 raise util.Abort(_('operation forbidden by server'))
2226 self.ui.status(_('streaming all changes\n'))
2230 self.ui.status(_('streaming all changes\n'))
2227 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
2231 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
2228 self.ui.status(_('%d files to transfer, %s of data\n') %
2232 self.ui.status(_('%d files to transfer, %s of data\n') %
2229 (total_files, util.bytecount(total_bytes)))
2233 (total_files, util.bytecount(total_bytes)))
2230 start = time.time()
2234 start = time.time()
2231 for i in xrange(total_files):
2235 for i in xrange(total_files):
2232 name, size = fp.readline().split('\0', 1)
2236 name, size = fp.readline().split('\0', 1)
2233 size = int(size)
2237 size = int(size)
2234 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2238 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2235 ofp = self.opener(name, 'w')
2239 ofp = self.opener(name, 'w')
2236 for chunk in util.filechunkiter(fp, limit=size):
2240 for chunk in util.filechunkiter(fp, limit=size):
2237 ofp.write(chunk)
2241 ofp.write(chunk)
2238 ofp.close()
2242 ofp.close()
2239 elapsed = time.time() - start
2243 elapsed = time.time() - start
2240 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2244 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2241 (util.bytecount(total_bytes), elapsed,
2245 (util.bytecount(total_bytes), elapsed,
2242 util.bytecount(total_bytes / elapsed)))
2246 util.bytecount(total_bytes / elapsed)))
2243 self.reload()
2247 self.reload()
2244 return len(self.heads()) + 1
2248 return len(self.heads()) + 1
2245
2249
2246 def clone(self, remote, heads=[], stream=False):
2250 def clone(self, remote, heads=[], stream=False):
2247 '''clone remote repository.
2251 '''clone remote repository.
2248
2252
2249 keyword arguments:
2253 keyword arguments:
2250 heads: list of revs to clone (forces use of pull)
2254 heads: list of revs to clone (forces use of pull)
2251 stream: use streaming clone if possible'''
2255 stream: use streaming clone if possible'''
2252
2256
2253 # now, all clients that can request uncompressed clones can
2257 # now, all clients that can request uncompressed clones can
2254 # read repo formats supported by all servers that can serve
2258 # read repo formats supported by all servers that can serve
2255 # them.
2259 # them.
2256
2260
2257 # if revlog format changes, client will have to check version
2261 # if revlog format changes, client will have to check version
2258 # and format flags on "stream" capability, and use
2262 # and format flags on "stream" capability, and use
2259 # uncompressed only if compatible.
2263 # uncompressed only if compatible.
2260
2264
2261 if stream and not heads and remote.capable('stream'):
2265 if stream and not heads and remote.capable('stream'):
2262 return self.stream_in(remote)
2266 return self.stream_in(remote)
2263 return self.pull(remote, heads)
2267 return self.pull(remote, heads)
2264
2268
2265 # used to avoid circular references so destructors work
2269 # used to avoid circular references so destructors work
2266 def aftertrans(base):
2270 def aftertrans(base):
2267 p = base
2271 p = base
2268 def a():
2272 def a():
2269 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2273 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2270 util.rename(os.path.join(p, "journal.dirstate"),
2274 util.rename(os.path.join(p, "journal.dirstate"),
2271 os.path.join(p, "undo.dirstate"))
2275 os.path.join(p, "undo.dirstate"))
2272 return a
2276 return a
2273
2277
2274 def instance(ui, path, create):
2278 def instance(ui, path, create):
2275 return localrepository(ui, util.drop_scheme('file', path), create)
2279 return localrepository(ui, util.drop_scheme('file', path), create)
2276
2280
2277 def islocal(path):
2281 def islocal(path):
2278 return True
2282 return True
General Comments 0
You need to be logged in to leave comments. Login now