##// END OF EJS Templates
add a -r/--rev option to heads to show only heads descendant from rev
Benoit Boissinot -
r1550:ccb9b62d default
parent child Browse files
Show More
@@ -1,2652 +1,2656 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20
20
21 def filterfiles(filters, files):
21 def filterfiles(filters, files):
22 l = [x for x in files if x in filters]
22 l = [x for x in files if x in filters]
23
23
24 for t in filters:
24 for t in filters:
25 if t and t[-1] != "/":
25 if t and t[-1] != "/":
26 t += "/"
26 t += "/"
27 l += [x for x in files if x.startswith(t)]
27 l += [x for x in files if x.startswith(t)]
28 return l
28 return l
29
29
30 def relpath(repo, args):
30 def relpath(repo, args):
31 cwd = repo.getcwd()
31 cwd = repo.getcwd()
32 if cwd:
32 if cwd:
33 return [util.normpath(os.path.join(cwd, x)) for x in args]
33 return [util.normpath(os.path.join(cwd, x)) for x in args]
34 return args
34 return args
35
35
36 def matchpats(repo, cwd, pats=[], opts={}, head=''):
36 def matchpats(repo, cwd, pats=[], opts={}, head=''):
37 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
37 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
38 opts.get('exclude'), head)
38 opts.get('exclude'), head)
39
39
40 def makewalk(repo, pats, opts, head=''):
40 def makewalk(repo, pats, opts, head=''):
41 cwd = repo.getcwd()
41 cwd = repo.getcwd()
42 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
42 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
43 exact = dict(zip(files, files))
43 exact = dict(zip(files, files))
44 def walk():
44 def walk():
45 for src, fn in repo.walk(files=files, match=matchfn):
45 for src, fn in repo.walk(files=files, match=matchfn):
46 yield src, fn, util.pathto(cwd, fn), fn in exact
46 yield src, fn, util.pathto(cwd, fn), fn in exact
47 return files, matchfn, walk()
47 return files, matchfn, walk()
48
48
49 def walk(repo, pats, opts, head=''):
49 def walk(repo, pats, opts, head=''):
50 files, matchfn, results = makewalk(repo, pats, opts, head)
50 files, matchfn, results = makewalk(repo, pats, opts, head)
51 for r in results:
51 for r in results:
52 yield r
52 yield r
53
53
54 def walkchangerevs(ui, repo, cwd, pats, opts):
54 def walkchangerevs(ui, repo, cwd, pats, opts):
55 '''Iterate over files and the revs they changed in.
55 '''Iterate over files and the revs they changed in.
56
56
57 Callers most commonly need to iterate backwards over the history
57 Callers most commonly need to iterate backwards over the history
58 it is interested in. Doing so has awful (quadratic-looking)
58 it is interested in. Doing so has awful (quadratic-looking)
59 performance, so we use iterators in a "windowed" way.
59 performance, so we use iterators in a "windowed" way.
60
60
61 We walk a window of revisions in the desired order. Within the
61 We walk a window of revisions in the desired order. Within the
62 window, we first walk forwards to gather data, then in the desired
62 window, we first walk forwards to gather data, then in the desired
63 order (usually backwards) to display it.
63 order (usually backwards) to display it.
64
64
65 This function returns an (iterator, getchange) pair. The
65 This function returns an (iterator, getchange) pair. The
66 getchange function returns the changelog entry for a numeric
66 getchange function returns the changelog entry for a numeric
67 revision. The iterator yields 3-tuples. They will be of one of
67 revision. The iterator yields 3-tuples. They will be of one of
68 the following forms:
68 the following forms:
69
69
70 "window", incrementing, lastrev: stepping through a window,
70 "window", incrementing, lastrev: stepping through a window,
71 positive if walking forwards through revs, last rev in the
71 positive if walking forwards through revs, last rev in the
72 sequence iterated over - use to reset state for the current window
72 sequence iterated over - use to reset state for the current window
73
73
74 "add", rev, fns: out-of-order traversal of the given file names
74 "add", rev, fns: out-of-order traversal of the given file names
75 fns, which changed during revision rev - use to gather data for
75 fns, which changed during revision rev - use to gather data for
76 possible display
76 possible display
77
77
78 "iter", rev, None: in-order traversal of the revs earlier iterated
78 "iter", rev, None: in-order traversal of the revs earlier iterated
79 over with "add" - use to display data'''
79 over with "add" - use to display data'''
80
80
81 if repo.changelog.count() == 0:
81 if repo.changelog.count() == 0:
82 return [], False
82 return [], False
83
83
84 cwd = repo.getcwd()
84 cwd = repo.getcwd()
85 if not pats and cwd:
85 if not pats and cwd:
86 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
86 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
87 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
87 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
88 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
88 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
89 pats, opts)
89 pats, opts)
90 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
90 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
91 wanted = {}
91 wanted = {}
92 slowpath = anypats
92 slowpath = anypats
93 window = 300
93 window = 300
94 fncache = {}
94 fncache = {}
95
95
96 chcache = {}
96 chcache = {}
97 def getchange(rev):
97 def getchange(rev):
98 ch = chcache.get(rev)
98 ch = chcache.get(rev)
99 if ch is None:
99 if ch is None:
100 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
100 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
101 return ch
101 return ch
102
102
103 if not slowpath and not files:
103 if not slowpath and not files:
104 # No files, no patterns. Display all revs.
104 # No files, no patterns. Display all revs.
105 wanted = dict(zip(revs, revs))
105 wanted = dict(zip(revs, revs))
106 if not slowpath:
106 if not slowpath:
107 # Only files, no patterns. Check the history of each file.
107 # Only files, no patterns. Check the history of each file.
108 def filerevgen(filelog):
108 def filerevgen(filelog):
109 for i in xrange(filelog.count() - 1, -1, -window):
109 for i in xrange(filelog.count() - 1, -1, -window):
110 revs = []
110 revs = []
111 for j in xrange(max(0, i - window), i + 1):
111 for j in xrange(max(0, i - window), i + 1):
112 revs.append(filelog.linkrev(filelog.node(j)))
112 revs.append(filelog.linkrev(filelog.node(j)))
113 revs.reverse()
113 revs.reverse()
114 for rev in revs:
114 for rev in revs:
115 yield rev
115 yield rev
116
116
117 minrev, maxrev = min(revs), max(revs)
117 minrev, maxrev = min(revs), max(revs)
118 for file in files:
118 for file in files:
119 filelog = repo.file(file)
119 filelog = repo.file(file)
120 # A zero count may be a directory or deleted file, so
120 # A zero count may be a directory or deleted file, so
121 # try to find matching entries on the slow path.
121 # try to find matching entries on the slow path.
122 if filelog.count() == 0:
122 if filelog.count() == 0:
123 slowpath = True
123 slowpath = True
124 break
124 break
125 for rev in filerevgen(filelog):
125 for rev in filerevgen(filelog):
126 if rev <= maxrev:
126 if rev <= maxrev:
127 if rev < minrev:
127 if rev < minrev:
128 break
128 break
129 fncache.setdefault(rev, [])
129 fncache.setdefault(rev, [])
130 fncache[rev].append(file)
130 fncache[rev].append(file)
131 wanted[rev] = 1
131 wanted[rev] = 1
132 if slowpath:
132 if slowpath:
133 # The slow path checks files modified in every changeset.
133 # The slow path checks files modified in every changeset.
134 def changerevgen():
134 def changerevgen():
135 for i in xrange(repo.changelog.count() - 1, -1, -window):
135 for i in xrange(repo.changelog.count() - 1, -1, -window):
136 for j in xrange(max(0, i - window), i + 1):
136 for j in xrange(max(0, i - window), i + 1):
137 yield j, getchange(j)[3]
137 yield j, getchange(j)[3]
138
138
139 for rev, changefiles in changerevgen():
139 for rev, changefiles in changerevgen():
140 matches = filter(matchfn, changefiles)
140 matches = filter(matchfn, changefiles)
141 if matches:
141 if matches:
142 fncache[rev] = matches
142 fncache[rev] = matches
143 wanted[rev] = 1
143 wanted[rev] = 1
144
144
145 def iterate():
145 def iterate():
146 for i in xrange(0, len(revs), window):
146 for i in xrange(0, len(revs), window):
147 yield 'window', revs[0] < revs[-1], revs[-1]
147 yield 'window', revs[0] < revs[-1], revs[-1]
148 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
148 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
149 if rev in wanted]
149 if rev in wanted]
150 srevs = list(nrevs)
150 srevs = list(nrevs)
151 srevs.sort()
151 srevs.sort()
152 for rev in srevs:
152 for rev in srevs:
153 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
153 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
154 yield 'add', rev, fns
154 yield 'add', rev, fns
155 for rev in nrevs:
155 for rev in nrevs:
156 yield 'iter', rev, None
156 yield 'iter', rev, None
157 return iterate(), getchange
157 return iterate(), getchange
158
158
159 revrangesep = ':'
159 revrangesep = ':'
160
160
161 def revrange(ui, repo, revs, revlog=None):
161 def revrange(ui, repo, revs, revlog=None):
162 """Yield revision as strings from a list of revision specifications."""
162 """Yield revision as strings from a list of revision specifications."""
163 if revlog is None:
163 if revlog is None:
164 revlog = repo.changelog
164 revlog = repo.changelog
165 revcount = revlog.count()
165 revcount = revlog.count()
166 def fix(val, defval):
166 def fix(val, defval):
167 if not val:
167 if not val:
168 return defval
168 return defval
169 try:
169 try:
170 num = int(val)
170 num = int(val)
171 if str(num) != val:
171 if str(num) != val:
172 raise ValueError
172 raise ValueError
173 if num < 0: num += revcount
173 if num < 0: num += revcount
174 if num < 0: num = 0
174 if num < 0: num = 0
175 elif num >= revcount:
175 elif num >= revcount:
176 raise ValueError
176 raise ValueError
177 except ValueError:
177 except ValueError:
178 try:
178 try:
179 num = repo.changelog.rev(repo.lookup(val))
179 num = repo.changelog.rev(repo.lookup(val))
180 except KeyError:
180 except KeyError:
181 try:
181 try:
182 num = revlog.rev(revlog.lookup(val))
182 num = revlog.rev(revlog.lookup(val))
183 except KeyError:
183 except KeyError:
184 raise util.Abort(_('invalid revision identifier %s'), val)
184 raise util.Abort(_('invalid revision identifier %s'), val)
185 return num
185 return num
186 seen = {}
186 seen = {}
187 for spec in revs:
187 for spec in revs:
188 if spec.find(revrangesep) >= 0:
188 if spec.find(revrangesep) >= 0:
189 start, end = spec.split(revrangesep, 1)
189 start, end = spec.split(revrangesep, 1)
190 start = fix(start, 0)
190 start = fix(start, 0)
191 end = fix(end, revcount - 1)
191 end = fix(end, revcount - 1)
192 step = start > end and -1 or 1
192 step = start > end and -1 or 1
193 for rev in xrange(start, end+step, step):
193 for rev in xrange(start, end+step, step):
194 if rev in seen: continue
194 if rev in seen: continue
195 seen[rev] = 1
195 seen[rev] = 1
196 yield str(rev)
196 yield str(rev)
197 else:
197 else:
198 rev = fix(spec, None)
198 rev = fix(spec, None)
199 if rev in seen: continue
199 if rev in seen: continue
200 seen[rev] = 1
200 seen[rev] = 1
201 yield str(rev)
201 yield str(rev)
202
202
203 def make_filename(repo, r, pat, node=None,
203 def make_filename(repo, r, pat, node=None,
204 total=None, seqno=None, revwidth=None, pathname=None):
204 total=None, seqno=None, revwidth=None, pathname=None):
205 node_expander = {
205 node_expander = {
206 'H': lambda: hex(node),
206 'H': lambda: hex(node),
207 'R': lambda: str(r.rev(node)),
207 'R': lambda: str(r.rev(node)),
208 'h': lambda: short(node),
208 'h': lambda: short(node),
209 }
209 }
210 expander = {
210 expander = {
211 '%': lambda: '%',
211 '%': lambda: '%',
212 'b': lambda: os.path.basename(repo.root),
212 'b': lambda: os.path.basename(repo.root),
213 }
213 }
214
214
215 try:
215 try:
216 if node:
216 if node:
217 expander.update(node_expander)
217 expander.update(node_expander)
218 if node and revwidth is not None:
218 if node and revwidth is not None:
219 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
219 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
220 if total is not None:
220 if total is not None:
221 expander['N'] = lambda: str(total)
221 expander['N'] = lambda: str(total)
222 if seqno is not None:
222 if seqno is not None:
223 expander['n'] = lambda: str(seqno)
223 expander['n'] = lambda: str(seqno)
224 if total is not None and seqno is not None:
224 if total is not None and seqno is not None:
225 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
225 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
226 if pathname is not None:
226 if pathname is not None:
227 expander['s'] = lambda: os.path.basename(pathname)
227 expander['s'] = lambda: os.path.basename(pathname)
228 expander['d'] = lambda: os.path.dirname(pathname) or '.'
228 expander['d'] = lambda: os.path.dirname(pathname) or '.'
229 expander['p'] = lambda: pathname
229 expander['p'] = lambda: pathname
230
230
231 newname = []
231 newname = []
232 patlen = len(pat)
232 patlen = len(pat)
233 i = 0
233 i = 0
234 while i < patlen:
234 while i < patlen:
235 c = pat[i]
235 c = pat[i]
236 if c == '%':
236 if c == '%':
237 i += 1
237 i += 1
238 c = pat[i]
238 c = pat[i]
239 c = expander[c]()
239 c = expander[c]()
240 newname.append(c)
240 newname.append(c)
241 i += 1
241 i += 1
242 return ''.join(newname)
242 return ''.join(newname)
243 except KeyError, inst:
243 except KeyError, inst:
244 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
244 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
245 inst.args[0])
245 inst.args[0])
246
246
247 def make_file(repo, r, pat, node=None,
247 def make_file(repo, r, pat, node=None,
248 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
248 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
249 if not pat or pat == '-':
249 if not pat or pat == '-':
250 return 'w' in mode and sys.stdout or sys.stdin
250 return 'w' in mode and sys.stdout or sys.stdin
251 if hasattr(pat, 'write') and 'w' in mode:
251 if hasattr(pat, 'write') and 'w' in mode:
252 return pat
252 return pat
253 if hasattr(pat, 'read') and 'r' in mode:
253 if hasattr(pat, 'read') and 'r' in mode:
254 return pat
254 return pat
255 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
255 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
256 pathname),
256 pathname),
257 mode)
257 mode)
258
258
259 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
259 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
260 changes=None, text=False):
260 changes=None, text=False):
261 if not changes:
261 if not changes:
262 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
262 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
263 else:
263 else:
264 (c, a, d, u) = changes
264 (c, a, d, u) = changes
265 if files:
265 if files:
266 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
266 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
267
267
268 if not c and not a and not d:
268 if not c and not a and not d:
269 return
269 return
270
270
271 if node2:
271 if node2:
272 change = repo.changelog.read(node2)
272 change = repo.changelog.read(node2)
273 mmap2 = repo.manifest.read(change[0])
273 mmap2 = repo.manifest.read(change[0])
274 date2 = util.datestr(change[2])
274 date2 = util.datestr(change[2])
275 def read(f):
275 def read(f):
276 return repo.file(f).read(mmap2[f])
276 return repo.file(f).read(mmap2[f])
277 else:
277 else:
278 date2 = util.datestr()
278 date2 = util.datestr()
279 if not node1:
279 if not node1:
280 node1 = repo.dirstate.parents()[0]
280 node1 = repo.dirstate.parents()[0]
281 def read(f):
281 def read(f):
282 return repo.wfile(f).read()
282 return repo.wfile(f).read()
283
283
284 if ui.quiet:
284 if ui.quiet:
285 r = None
285 r = None
286 else:
286 else:
287 hexfunc = ui.verbose and hex or short
287 hexfunc = ui.verbose and hex or short
288 r = [hexfunc(node) for node in [node1, node2] if node]
288 r = [hexfunc(node) for node in [node1, node2] if node]
289
289
290 change = repo.changelog.read(node1)
290 change = repo.changelog.read(node1)
291 mmap = repo.manifest.read(change[0])
291 mmap = repo.manifest.read(change[0])
292 date1 = util.datestr(change[2])
292 date1 = util.datestr(change[2])
293
293
294 for f in c:
294 for f in c:
295 to = None
295 to = None
296 if f in mmap:
296 if f in mmap:
297 to = repo.file(f).read(mmap[f])
297 to = repo.file(f).read(mmap[f])
298 tn = read(f)
298 tn = read(f)
299 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
299 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
300 for f in a:
300 for f in a:
301 to = None
301 to = None
302 tn = read(f)
302 tn = read(f)
303 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
303 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
304 for f in d:
304 for f in d:
305 to = repo.file(f).read(mmap[f])
305 to = repo.file(f).read(mmap[f])
306 tn = None
306 tn = None
307 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
307 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
308
308
309 def trimuser(ui, name, rev, revcache):
309 def trimuser(ui, name, rev, revcache):
310 """trim the name of the user who committed a change"""
310 """trim the name of the user who committed a change"""
311 user = revcache.get(rev)
311 user = revcache.get(rev)
312 if user is None:
312 if user is None:
313 user = revcache[rev] = ui.shortuser(name)
313 user = revcache[rev] = ui.shortuser(name)
314 return user
314 return user
315
315
316 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
316 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
317 """show a single changeset or file revision"""
317 """show a single changeset or file revision"""
318 log = repo.changelog
318 log = repo.changelog
319 if changenode is None:
319 if changenode is None:
320 changenode = log.node(rev)
320 changenode = log.node(rev)
321 elif not rev:
321 elif not rev:
322 rev = log.rev(changenode)
322 rev = log.rev(changenode)
323
323
324 if ui.quiet:
324 if ui.quiet:
325 ui.write("%d:%s\n" % (rev, short(changenode)))
325 ui.write("%d:%s\n" % (rev, short(changenode)))
326 return
326 return
327
327
328 changes = log.read(changenode)
328 changes = log.read(changenode)
329 date = util.datestr(changes[2])
329 date = util.datestr(changes[2])
330
330
331 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
331 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
332 for p in log.parents(changenode)
332 for p in log.parents(changenode)
333 if ui.debugflag or p != nullid]
333 if ui.debugflag or p != nullid]
334 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
334 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
335 parents = []
335 parents = []
336
336
337 if ui.verbose:
337 if ui.verbose:
338 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
338 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
339 else:
339 else:
340 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
340 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
341
341
342 for tag in repo.nodetags(changenode):
342 for tag in repo.nodetags(changenode):
343 ui.status(_("tag: %s\n") % tag)
343 ui.status(_("tag: %s\n") % tag)
344 for parent in parents:
344 for parent in parents:
345 ui.write(_("parent: %d:%s\n") % parent)
345 ui.write(_("parent: %d:%s\n") % parent)
346
346
347 if brinfo and changenode in brinfo:
347 if brinfo and changenode in brinfo:
348 br = brinfo[changenode]
348 br = brinfo[changenode]
349 ui.write(_("branch: %s\n") % " ".join(br))
349 ui.write(_("branch: %s\n") % " ".join(br))
350
350
351 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
351 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
352 hex(changes[0])))
352 hex(changes[0])))
353 ui.status(_("user: %s\n") % changes[1])
353 ui.status(_("user: %s\n") % changes[1])
354 ui.status(_("date: %s\n") % date)
354 ui.status(_("date: %s\n") % date)
355
355
356 if ui.debugflag:
356 if ui.debugflag:
357 files = repo.changes(log.parents(changenode)[0], changenode)
357 files = repo.changes(log.parents(changenode)[0], changenode)
358 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
358 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
359 if value:
359 if value:
360 ui.note("%-12s %s\n" % (key, " ".join(value)))
360 ui.note("%-12s %s\n" % (key, " ".join(value)))
361 else:
361 else:
362 ui.note(_("files: %s\n") % " ".join(changes[3]))
362 ui.note(_("files: %s\n") % " ".join(changes[3]))
363
363
364 description = changes[4].strip()
364 description = changes[4].strip()
365 if description:
365 if description:
366 if ui.verbose:
366 if ui.verbose:
367 ui.status(_("description:\n"))
367 ui.status(_("description:\n"))
368 ui.status(description)
368 ui.status(description)
369 ui.status("\n\n")
369 ui.status("\n\n")
370 else:
370 else:
371 ui.status(_("summary: %s\n") % description.splitlines()[0])
371 ui.status(_("summary: %s\n") % description.splitlines()[0])
372 ui.status("\n")
372 ui.status("\n")
373
373
374 def show_version(ui):
374 def show_version(ui):
375 """output version and copyright information"""
375 """output version and copyright information"""
376 ui.write(_("Mercurial Distributed SCM (version %s)\n")
376 ui.write(_("Mercurial Distributed SCM (version %s)\n")
377 % version.get_version())
377 % version.get_version())
378 ui.status(_(
378 ui.status(_(
379 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
379 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
380 "This is free software; see the source for copying conditions. "
380 "This is free software; see the source for copying conditions. "
381 "There is NO\nwarranty; "
381 "There is NO\nwarranty; "
382 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
382 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
383 ))
383 ))
384
384
385 def help_(ui, cmd=None, with_version=False):
385 def help_(ui, cmd=None, with_version=False):
386 """show help for a given command or all commands"""
386 """show help for a given command or all commands"""
387 option_lists = []
387 option_lists = []
388 if cmd and cmd != 'shortlist':
388 if cmd and cmd != 'shortlist':
389 if with_version:
389 if with_version:
390 show_version(ui)
390 show_version(ui)
391 ui.write('\n')
391 ui.write('\n')
392 aliases, i = find(cmd)
392 aliases, i = find(cmd)
393 # synopsis
393 # synopsis
394 ui.write("%s\n\n" % i[2])
394 ui.write("%s\n\n" % i[2])
395
395
396 # description
396 # description
397 doc = i[0].__doc__
397 doc = i[0].__doc__
398 if ui.quiet:
398 if ui.quiet:
399 doc = doc.splitlines(0)[0]
399 doc = doc.splitlines(0)[0]
400 ui.write("%s\n" % doc.rstrip())
400 ui.write("%s\n" % doc.rstrip())
401
401
402 if not ui.quiet:
402 if not ui.quiet:
403 # aliases
403 # aliases
404 if len(aliases) > 1:
404 if len(aliases) > 1:
405 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
405 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
406
406
407 # options
407 # options
408 if i[1]:
408 if i[1]:
409 option_lists.append(("options", i[1]))
409 option_lists.append(("options", i[1]))
410
410
411 else:
411 else:
412 # program name
412 # program name
413 if ui.verbose or with_version:
413 if ui.verbose or with_version:
414 show_version(ui)
414 show_version(ui)
415 else:
415 else:
416 ui.status(_("Mercurial Distributed SCM\n"))
416 ui.status(_("Mercurial Distributed SCM\n"))
417 ui.status('\n')
417 ui.status('\n')
418
418
419 # list of commands
419 # list of commands
420 if cmd == "shortlist":
420 if cmd == "shortlist":
421 ui.status(_('basic commands (use "hg help" '
421 ui.status(_('basic commands (use "hg help" '
422 'for the full list or option "-v" for details):\n\n'))
422 'for the full list or option "-v" for details):\n\n'))
423 elif ui.verbose:
423 elif ui.verbose:
424 ui.status(_('list of commands:\n\n'))
424 ui.status(_('list of commands:\n\n'))
425 else:
425 else:
426 ui.status(_('list of commands (use "hg help -v" '
426 ui.status(_('list of commands (use "hg help -v" '
427 'to show aliases and global options):\n\n'))
427 'to show aliases and global options):\n\n'))
428
428
429 h = {}
429 h = {}
430 cmds = {}
430 cmds = {}
431 for c, e in table.items():
431 for c, e in table.items():
432 f = c.split("|")[0]
432 f = c.split("|")[0]
433 if cmd == "shortlist" and not f.startswith("^"):
433 if cmd == "shortlist" and not f.startswith("^"):
434 continue
434 continue
435 f = f.lstrip("^")
435 f = f.lstrip("^")
436 if not ui.debugflag and f.startswith("debug"):
436 if not ui.debugflag and f.startswith("debug"):
437 continue
437 continue
438 d = ""
438 d = ""
439 if e[0].__doc__:
439 if e[0].__doc__:
440 d = e[0].__doc__.splitlines(0)[0].rstrip()
440 d = e[0].__doc__.splitlines(0)[0].rstrip()
441 h[f] = d
441 h[f] = d
442 cmds[f]=c.lstrip("^")
442 cmds[f]=c.lstrip("^")
443
443
444 fns = h.keys()
444 fns = h.keys()
445 fns.sort()
445 fns.sort()
446 m = max(map(len, fns))
446 m = max(map(len, fns))
447 for f in fns:
447 for f in fns:
448 if ui.verbose:
448 if ui.verbose:
449 commands = cmds[f].replace("|",", ")
449 commands = cmds[f].replace("|",", ")
450 ui.write(" %s:\n %s\n"%(commands,h[f]))
450 ui.write(" %s:\n %s\n"%(commands,h[f]))
451 else:
451 else:
452 ui.write(' %-*s %s\n' % (m, f, h[f]))
452 ui.write(' %-*s %s\n' % (m, f, h[f]))
453
453
454 # global options
454 # global options
455 if ui.verbose:
455 if ui.verbose:
456 option_lists.append(("global options", globalopts))
456 option_lists.append(("global options", globalopts))
457
457
458 # list all option lists
458 # list all option lists
459 opt_output = []
459 opt_output = []
460 for title, options in option_lists:
460 for title, options in option_lists:
461 opt_output.append(("\n%s:\n" % title, None))
461 opt_output.append(("\n%s:\n" % title, None))
462 for shortopt, longopt, default, desc in options:
462 for shortopt, longopt, default, desc in options:
463 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
463 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
464 longopt and " --%s" % longopt),
464 longopt and " --%s" % longopt),
465 "%s%s" % (desc,
465 "%s%s" % (desc,
466 default and _(" (default: %s)") % default
466 default and _(" (default: %s)") % default
467 or "")))
467 or "")))
468
468
469 if opt_output:
469 if opt_output:
470 opts_len = max([len(line[0]) for line in opt_output if line[1]])
470 opts_len = max([len(line[0]) for line in opt_output if line[1]])
471 for first, second in opt_output:
471 for first, second in opt_output:
472 if second:
472 if second:
473 ui.write(" %-*s %s\n" % (opts_len, first, second))
473 ui.write(" %-*s %s\n" % (opts_len, first, second))
474 else:
474 else:
475 ui.write("%s\n" % first)
475 ui.write("%s\n" % first)
476
476
477 # Commands start here, listed alphabetically
477 # Commands start here, listed alphabetically
478
478
479 def add(ui, repo, *pats, **opts):
479 def add(ui, repo, *pats, **opts):
480 """add the specified files on the next commit
480 """add the specified files on the next commit
481
481
482 Schedule files to be version controlled and added to the repository.
482 Schedule files to be version controlled and added to the repository.
483
483
484 The files will be added to the repository at the next commit.
484 The files will be added to the repository at the next commit.
485
485
486 If no names are given, add all files in the current directory and
486 If no names are given, add all files in the current directory and
487 its subdirectories.
487 its subdirectories.
488 """
488 """
489
489
490 names = []
490 names = []
491 for src, abs, rel, exact in walk(repo, pats, opts):
491 for src, abs, rel, exact in walk(repo, pats, opts):
492 if exact:
492 if exact:
493 if ui.verbose: ui.status(_('adding %s\n') % rel)
493 if ui.verbose: ui.status(_('adding %s\n') % rel)
494 names.append(abs)
494 names.append(abs)
495 elif repo.dirstate.state(abs) == '?':
495 elif repo.dirstate.state(abs) == '?':
496 ui.status(_('adding %s\n') % rel)
496 ui.status(_('adding %s\n') % rel)
497 names.append(abs)
497 names.append(abs)
498 repo.add(names)
498 repo.add(names)
499
499
500 def addremove(ui, repo, *pats, **opts):
500 def addremove(ui, repo, *pats, **opts):
501 """add all new files, delete all missing files
501 """add all new files, delete all missing files
502
502
503 Add all new files and remove all missing files from the repository.
503 Add all new files and remove all missing files from the repository.
504
504
505 New files are ignored if they match any of the patterns in .hgignore. As
505 New files are ignored if they match any of the patterns in .hgignore. As
506 with add, these changes take effect at the next commit.
506 with add, these changes take effect at the next commit.
507 """
507 """
508 add, remove = [], []
508 add, remove = [], []
509 for src, abs, rel, exact in walk(repo, pats, opts):
509 for src, abs, rel, exact in walk(repo, pats, opts):
510 if src == 'f' and repo.dirstate.state(abs) == '?':
510 if src == 'f' and repo.dirstate.state(abs) == '?':
511 add.append(abs)
511 add.append(abs)
512 if ui.verbose or not exact:
512 if ui.verbose or not exact:
513 ui.status(_('adding %s\n') % rel)
513 ui.status(_('adding %s\n') % rel)
514 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
514 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
515 remove.append(abs)
515 remove.append(abs)
516 if ui.verbose or not exact:
516 if ui.verbose or not exact:
517 ui.status(_('removing %s\n') % rel)
517 ui.status(_('removing %s\n') % rel)
518 repo.add(add)
518 repo.add(add)
519 repo.remove(remove)
519 repo.remove(remove)
520
520
521 def annotate(ui, repo, *pats, **opts):
521 def annotate(ui, repo, *pats, **opts):
522 """show changeset information per file line
522 """show changeset information per file line
523
523
524 List changes in files, showing the revision id responsible for each line
524 List changes in files, showing the revision id responsible for each line
525
525
526 This command is useful to discover who did a change or when a change took
526 This command is useful to discover who did a change or when a change took
527 place.
527 place.
528
528
529 Without the -a option, annotate will avoid processing files it
529 Without the -a option, annotate will avoid processing files it
530 detects as binary. With -a, annotate will generate an annotation
530 detects as binary. With -a, annotate will generate an annotation
531 anyway, probably with undesirable results.
531 anyway, probably with undesirable results.
532 """
532 """
533 def getnode(rev):
533 def getnode(rev):
534 return short(repo.changelog.node(rev))
534 return short(repo.changelog.node(rev))
535
535
536 ucache = {}
536 ucache = {}
537 def getname(rev):
537 def getname(rev):
538 cl = repo.changelog.read(repo.changelog.node(rev))
538 cl = repo.changelog.read(repo.changelog.node(rev))
539 return trimuser(ui, cl[1], rev, ucache)
539 return trimuser(ui, cl[1], rev, ucache)
540
540
541 dcache = {}
541 dcache = {}
542 def getdate(rev):
542 def getdate(rev):
543 datestr = dcache.get(rev)
543 datestr = dcache.get(rev)
544 if datestr is None:
544 if datestr is None:
545 cl = repo.changelog.read(repo.changelog.node(rev))
545 cl = repo.changelog.read(repo.changelog.node(rev))
546 datestr = dcache[rev] = util.datestr(cl[2])
546 datestr = dcache[rev] = util.datestr(cl[2])
547 return datestr
547 return datestr
548
548
549 if not pats:
549 if not pats:
550 raise util.Abort(_('at least one file name or pattern required'))
550 raise util.Abort(_('at least one file name or pattern required'))
551
551
552 opmap = [['user', getname], ['number', str], ['changeset', getnode],
552 opmap = [['user', getname], ['number', str], ['changeset', getnode],
553 ['date', getdate]]
553 ['date', getdate]]
554 if not opts['user'] and not opts['changeset'] and not opts['date']:
554 if not opts['user'] and not opts['changeset'] and not opts['date']:
555 opts['number'] = 1
555 opts['number'] = 1
556
556
557 if opts['rev']:
557 if opts['rev']:
558 node = repo.changelog.lookup(opts['rev'])
558 node = repo.changelog.lookup(opts['rev'])
559 else:
559 else:
560 node = repo.dirstate.parents()[0]
560 node = repo.dirstate.parents()[0]
561 change = repo.changelog.read(node)
561 change = repo.changelog.read(node)
562 mmap = repo.manifest.read(change[0])
562 mmap = repo.manifest.read(change[0])
563
563
564 for src, abs, rel, exact in walk(repo, pats, opts):
564 for src, abs, rel, exact in walk(repo, pats, opts):
565 if abs not in mmap:
565 if abs not in mmap:
566 ui.warn(_("warning: %s is not in the repository!\n") % rel)
566 ui.warn(_("warning: %s is not in the repository!\n") % rel)
567 continue
567 continue
568
568
569 f = repo.file(abs)
569 f = repo.file(abs)
570 if not opts['text'] and util.binary(f.read(mmap[abs])):
570 if not opts['text'] and util.binary(f.read(mmap[abs])):
571 ui.write(_("%s: binary file\n") % rel)
571 ui.write(_("%s: binary file\n") % rel)
572 continue
572 continue
573
573
574 lines = f.annotate(mmap[abs])
574 lines = f.annotate(mmap[abs])
575 pieces = []
575 pieces = []
576
576
577 for o, f in opmap:
577 for o, f in opmap:
578 if opts[o]:
578 if opts[o]:
579 l = [f(n) for n, dummy in lines]
579 l = [f(n) for n, dummy in lines]
580 if l:
580 if l:
581 m = max(map(len, l))
581 m = max(map(len, l))
582 pieces.append(["%*s" % (m, x) for x in l])
582 pieces.append(["%*s" % (m, x) for x in l])
583
583
584 if pieces:
584 if pieces:
585 for p, l in zip(zip(*pieces), lines):
585 for p, l in zip(zip(*pieces), lines):
586 ui.write("%s: %s" % (" ".join(p), l[1]))
586 ui.write("%s: %s" % (" ".join(p), l[1]))
587
587
588 def bundle(ui, repo, fname, dest="default-push", **opts):
588 def bundle(ui, repo, fname, dest="default-push", **opts):
589 """create a changegroup file
589 """create a changegroup file
590
590
591 Generate a compressed changegroup file collecting all changesets
591 Generate a compressed changegroup file collecting all changesets
592 not found in the other repository.
592 not found in the other repository.
593
593
594 This file can then be transferred using conventional means and
594 This file can then be transferred using conventional means and
595 applied to another repository with the unbundle command. This is
595 applied to another repository with the unbundle command. This is
596 useful when native push and pull are not available or when
596 useful when native push and pull are not available or when
597 exporting an entire repository is undesirable. The standard file
597 exporting an entire repository is undesirable. The standard file
598 extension is ".hg".
598 extension is ".hg".
599
599
600 Unlike import/export, this exactly preserves all changeset
600 Unlike import/export, this exactly preserves all changeset
601 contents including permissions, rename data, and revision history.
601 contents including permissions, rename data, and revision history.
602 """
602 """
603 f = open(fname, "wb")
603 f = open(fname, "wb")
604 dest = ui.expandpath(dest, repo.root)
604 dest = ui.expandpath(dest, repo.root)
605 other = hg.repository(ui, dest)
605 other = hg.repository(ui, dest)
606 o = repo.findoutgoing(other)
606 o = repo.findoutgoing(other)
607 cg = repo.changegroup(o)
607 cg = repo.changegroup(o)
608
608
609 try:
609 try:
610 f.write("HG10")
610 f.write("HG10")
611 z = bz2.BZ2Compressor(9)
611 z = bz2.BZ2Compressor(9)
612 while 1:
612 while 1:
613 chunk = cg.read(4096)
613 chunk = cg.read(4096)
614 if not chunk:
614 if not chunk:
615 break
615 break
616 f.write(z.compress(chunk))
616 f.write(z.compress(chunk))
617 f.write(z.flush())
617 f.write(z.flush())
618 except:
618 except:
619 os.unlink(fname)
619 os.unlink(fname)
620 raise
620 raise
621
621
622 def cat(ui, repo, file1, *pats, **opts):
622 def cat(ui, repo, file1, *pats, **opts):
623 """output the latest or given revisions of files
623 """output the latest or given revisions of files
624
624
625 Print the specified files as they were at the given revision.
625 Print the specified files as they were at the given revision.
626 If no revision is given then the tip is used.
626 If no revision is given then the tip is used.
627
627
628 Output may be to a file, in which case the name of the file is
628 Output may be to a file, in which case the name of the file is
629 given using a format string. The formatting rules are the same as
629 given using a format string. The formatting rules are the same as
630 for the export command, with the following additions:
630 for the export command, with the following additions:
631
631
632 %s basename of file being printed
632 %s basename of file being printed
633 %d dirname of file being printed, or '.' if in repo root
633 %d dirname of file being printed, or '.' if in repo root
634 %p root-relative path name of file being printed
634 %p root-relative path name of file being printed
635 """
635 """
636 mf = {}
636 mf = {}
637 rev = opts['rev']
637 rev = opts['rev']
638 if rev:
638 if rev:
639 change = repo.changelog.read(repo.lookup(rev))
639 change = repo.changelog.read(repo.lookup(rev))
640 mf = repo.manifest.read(change[0])
640 mf = repo.manifest.read(change[0])
641 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
641 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
642 r = repo.file(abs)
642 r = repo.file(abs)
643 if rev:
643 if rev:
644 try:
644 try:
645 n = mf[abs]
645 n = mf[abs]
646 except (hg.RepoError, KeyError):
646 except (hg.RepoError, KeyError):
647 try:
647 try:
648 n = r.lookup(rev)
648 n = r.lookup(rev)
649 except KeyError, inst:
649 except KeyError, inst:
650 raise util.Abort(_('cannot find file %s in rev %s'), rel, rev)
650 raise util.Abort(_('cannot find file %s in rev %s'), rel, rev)
651 else:
651 else:
652 n = r.tip()
652 n = r.tip()
653 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
653 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
654 fp.write(r.read(n))
654 fp.write(r.read(n))
655
655
656 def clone(ui, source, dest=None, **opts):
656 def clone(ui, source, dest=None, **opts):
657 """make a copy of an existing repository
657 """make a copy of an existing repository
658
658
659 Create a copy of an existing repository in a new directory.
659 Create a copy of an existing repository in a new directory.
660
660
661 If no destination directory name is specified, it defaults to the
661 If no destination directory name is specified, it defaults to the
662 basename of the source.
662 basename of the source.
663
663
664 The location of the source is added to the new repository's
664 The location of the source is added to the new repository's
665 .hg/hgrc file, as the default to be used for future pulls.
665 .hg/hgrc file, as the default to be used for future pulls.
666
666
667 For efficiency, hardlinks are used for cloning whenever the source
667 For efficiency, hardlinks are used for cloning whenever the source
668 and destination are on the same filesystem. Some filesystems,
668 and destination are on the same filesystem. Some filesystems,
669 such as AFS, implement hardlinking incorrectly, but do not report
669 such as AFS, implement hardlinking incorrectly, but do not report
670 errors. In these cases, use the --pull option to avoid
670 errors. In these cases, use the --pull option to avoid
671 hardlinking.
671 hardlinking.
672 """
672 """
673 if dest is None:
673 if dest is None:
674 dest = os.path.basename(os.path.normpath(source))
674 dest = os.path.basename(os.path.normpath(source))
675
675
676 if os.path.exists(dest):
676 if os.path.exists(dest):
677 raise util.Abort(_("destination '%s' already exists"), dest)
677 raise util.Abort(_("destination '%s' already exists"), dest)
678
678
679 dest = os.path.realpath(dest)
679 dest = os.path.realpath(dest)
680
680
681 class Dircleanup:
681 class Dircleanup:
682 def __init__(self, dir_):
682 def __init__(self, dir_):
683 self.rmtree = shutil.rmtree
683 self.rmtree = shutil.rmtree
684 self.dir_ = dir_
684 self.dir_ = dir_
685 os.mkdir(dir_)
685 os.mkdir(dir_)
686 def close(self):
686 def close(self):
687 self.dir_ = None
687 self.dir_ = None
688 def __del__(self):
688 def __del__(self):
689 if self.dir_:
689 if self.dir_:
690 self.rmtree(self.dir_, True)
690 self.rmtree(self.dir_, True)
691
691
692 if opts['ssh']:
692 if opts['ssh']:
693 ui.setconfig("ui", "ssh", opts['ssh'])
693 ui.setconfig("ui", "ssh", opts['ssh'])
694 if opts['remotecmd']:
694 if opts['remotecmd']:
695 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
695 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
696
696
697 if not os.path.exists(source):
697 if not os.path.exists(source):
698 source = ui.expandpath(source)
698 source = ui.expandpath(source)
699
699
700 d = Dircleanup(dest)
700 d = Dircleanup(dest)
701 abspath = source
701 abspath = source
702 other = hg.repository(ui, source)
702 other = hg.repository(ui, source)
703
703
704 copy = False
704 copy = False
705 if other.dev() != -1:
705 if other.dev() != -1:
706 abspath = os.path.abspath(source)
706 abspath = os.path.abspath(source)
707 if not opts['pull'] and not opts['rev']:
707 if not opts['pull'] and not opts['rev']:
708 copy = True
708 copy = True
709
709
710 if copy:
710 if copy:
711 try:
711 try:
712 # we use a lock here because if we race with commit, we
712 # we use a lock here because if we race with commit, we
713 # can end up with extra data in the cloned revlogs that's
713 # can end up with extra data in the cloned revlogs that's
714 # not pointed to by changesets, thus causing verify to
714 # not pointed to by changesets, thus causing verify to
715 # fail
715 # fail
716 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
716 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
717 except OSError:
717 except OSError:
718 copy = False
718 copy = False
719
719
720 if copy:
720 if copy:
721 # we lock here to avoid premature writing to the target
721 # we lock here to avoid premature writing to the target
722 os.mkdir(os.path.join(dest, ".hg"))
722 os.mkdir(os.path.join(dest, ".hg"))
723 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
723 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
724
724
725 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
725 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
726 for f in files.split():
726 for f in files.split():
727 src = os.path.join(source, ".hg", f)
727 src = os.path.join(source, ".hg", f)
728 dst = os.path.join(dest, ".hg", f)
728 dst = os.path.join(dest, ".hg", f)
729 try:
729 try:
730 util.copyfiles(src, dst)
730 util.copyfiles(src, dst)
731 except OSError, inst:
731 except OSError, inst:
732 if inst.errno != errno.ENOENT: raise
732 if inst.errno != errno.ENOENT: raise
733
733
734 repo = hg.repository(ui, dest)
734 repo = hg.repository(ui, dest)
735
735
736 else:
736 else:
737 revs = None
737 revs = None
738 if opts['rev']:
738 if opts['rev']:
739 if not other.local():
739 if not other.local():
740 raise util.Abort("clone -r not supported yet for remote repositories.")
740 raise util.Abort("clone -r not supported yet for remote repositories.")
741 else:
741 else:
742 revs = [other.lookup(rev) for rev in opts['rev']]
742 revs = [other.lookup(rev) for rev in opts['rev']]
743 repo = hg.repository(ui, dest, create=1)
743 repo = hg.repository(ui, dest, create=1)
744 repo.pull(other, heads = revs)
744 repo.pull(other, heads = revs)
745
745
746 f = repo.opener("hgrc", "w", text=True)
746 f = repo.opener("hgrc", "w", text=True)
747 f.write("[paths]\n")
747 f.write("[paths]\n")
748 f.write("default = %s\n" % abspath)
748 f.write("default = %s\n" % abspath)
749 f.close()
749 f.close()
750
750
751 if not opts['noupdate']:
751 if not opts['noupdate']:
752 update(ui, repo)
752 update(ui, repo)
753
753
754 d.close()
754 d.close()
755
755
756 def commit(ui, repo, *pats, **opts):
756 def commit(ui, repo, *pats, **opts):
757 """commit the specified files or all outstanding changes
757 """commit the specified files or all outstanding changes
758
758
759 Commit changes to the given files into the repository.
759 Commit changes to the given files into the repository.
760
760
761 If a list of files is omitted, all changes reported by "hg status"
761 If a list of files is omitted, all changes reported by "hg status"
762 from the root of the repository will be commited.
762 from the root of the repository will be commited.
763
763
764 The HGEDITOR or EDITOR environment variables are used to start an
764 The HGEDITOR or EDITOR environment variables are used to start an
765 editor to add a commit comment.
765 editor to add a commit comment.
766 """
766 """
767 message = opts['message']
767 message = opts['message']
768 logfile = opts['logfile']
768 logfile = opts['logfile']
769
769
770 if message and logfile:
770 if message and logfile:
771 raise util.Abort(_('options --message and --logfile are mutually '
771 raise util.Abort(_('options --message and --logfile are mutually '
772 'exclusive'))
772 'exclusive'))
773 if not message and logfile:
773 if not message and logfile:
774 try:
774 try:
775 if logfile == '-':
775 if logfile == '-':
776 message = sys.stdin.read()
776 message = sys.stdin.read()
777 else:
777 else:
778 message = open(logfile).read()
778 message = open(logfile).read()
779 except IOError, inst:
779 except IOError, inst:
780 raise util.Abort(_("can't read commit message '%s': %s") %
780 raise util.Abort(_("can't read commit message '%s': %s") %
781 (logfile, inst.strerror))
781 (logfile, inst.strerror))
782
782
783 if opts['addremove']:
783 if opts['addremove']:
784 addremove(ui, repo, *pats, **opts)
784 addremove(ui, repo, *pats, **opts)
785 cwd = repo.getcwd()
785 cwd = repo.getcwd()
786 if not pats and cwd:
786 if not pats and cwd:
787 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
787 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
788 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
788 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
789 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
789 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
790 pats, opts)
790 pats, opts)
791 if pats:
791 if pats:
792 c, a, d, u = repo.changes(files=fns, match=match)
792 c, a, d, u = repo.changes(files=fns, match=match)
793 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
793 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
794 else:
794 else:
795 files = []
795 files = []
796 try:
796 try:
797 repo.commit(files, message, opts['user'], opts['date'], match)
797 repo.commit(files, message, opts['user'], opts['date'], match)
798 except ValueError, inst:
798 except ValueError, inst:
799 raise util.Abort(str(inst))
799 raise util.Abort(str(inst))
800
800
801 def docopy(ui, repo, pats, opts):
801 def docopy(ui, repo, pats, opts):
802 cwd = repo.getcwd()
802 cwd = repo.getcwd()
803 errors = 0
803 errors = 0
804 copied = []
804 copied = []
805 targets = {}
805 targets = {}
806
806
807 def okaytocopy(abs, rel, exact):
807 def okaytocopy(abs, rel, exact):
808 reasons = {'?': _('is not managed'),
808 reasons = {'?': _('is not managed'),
809 'a': _('has been marked for add')}
809 'a': _('has been marked for add')}
810 reason = reasons.get(repo.dirstate.state(abs))
810 reason = reasons.get(repo.dirstate.state(abs))
811 if reason:
811 if reason:
812 if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
812 if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
813 else:
813 else:
814 return True
814 return True
815
815
816 def copy(abssrc, relsrc, target, exact):
816 def copy(abssrc, relsrc, target, exact):
817 abstarget = util.canonpath(repo.root, cwd, target)
817 abstarget = util.canonpath(repo.root, cwd, target)
818 reltarget = util.pathto(cwd, abstarget)
818 reltarget = util.pathto(cwd, abstarget)
819 prevsrc = targets.get(abstarget)
819 prevsrc = targets.get(abstarget)
820 if prevsrc is not None:
820 if prevsrc is not None:
821 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
821 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
822 (reltarget, abssrc, prevsrc))
822 (reltarget, abssrc, prevsrc))
823 return
823 return
824 elif os.path.exists(reltarget):
824 elif os.path.exists(reltarget):
825 if opts['force']:
825 if opts['force']:
826 os.unlink(reltarget)
826 os.unlink(reltarget)
827 else:
827 else:
828 ui.warn(_('%s: not overwriting - file exists\n') %
828 ui.warn(_('%s: not overwriting - file exists\n') %
829 reltarget)
829 reltarget)
830 return
830 return
831 if ui.verbose or not exact:
831 if ui.verbose or not exact:
832 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
832 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
833 if not opts['after']:
833 if not opts['after']:
834 targetdir = os.path.dirname(reltarget) or '.'
834 targetdir = os.path.dirname(reltarget) or '.'
835 if not os.path.isdir(targetdir):
835 if not os.path.isdir(targetdir):
836 os.makedirs(targetdir)
836 os.makedirs(targetdir)
837 try:
837 try:
838 shutil.copyfile(relsrc, reltarget)
838 shutil.copyfile(relsrc, reltarget)
839 shutil.copymode(relsrc, reltarget)
839 shutil.copymode(relsrc, reltarget)
840 except shutil.Error, inst:
840 except shutil.Error, inst:
841 raise util.Abort(str(inst))
841 raise util.Abort(str(inst))
842 except IOError, inst:
842 except IOError, inst:
843 if inst.errno == errno.ENOENT:
843 if inst.errno == errno.ENOENT:
844 ui.warn(_('%s: deleted in working copy\n') % relsrc)
844 ui.warn(_('%s: deleted in working copy\n') % relsrc)
845 else:
845 else:
846 ui.warn(_('%s: cannot copy - %s\n') %
846 ui.warn(_('%s: cannot copy - %s\n') %
847 (relsrc, inst.strerror))
847 (relsrc, inst.strerror))
848 errors += 1
848 errors += 1
849 return
849 return
850 targets[abstarget] = abssrc
850 targets[abstarget] = abssrc
851 repo.copy(abssrc, abstarget)
851 repo.copy(abssrc, abstarget)
852 copied.append((abssrc, relsrc, exact))
852 copied.append((abssrc, relsrc, exact))
853
853
854 pats = list(pats)
854 pats = list(pats)
855 if not pats:
855 if not pats:
856 raise util.Abort(_('no source or destination specified'))
856 raise util.Abort(_('no source or destination specified'))
857 if len(pats) == 1:
857 if len(pats) == 1:
858 raise util.Abort(_('no destination specified'))
858 raise util.Abort(_('no destination specified'))
859 dest = pats.pop()
859 dest = pats.pop()
860 destdirexists = os.path.isdir(dest)
860 destdirexists = os.path.isdir(dest)
861 if (len(pats) > 1 or not os.path.exists(pats[0])) and not destdirexists:
861 if (len(pats) > 1 or not os.path.exists(pats[0])) and not destdirexists:
862 raise util.Abort(_('with multiple sources, destination must be an '
862 raise util.Abort(_('with multiple sources, destination must be an '
863 'existing directory'))
863 'existing directory'))
864
864
865 for pat in pats:
865 for pat in pats:
866 if os.path.isdir(pat):
866 if os.path.isdir(pat):
867 if destdirexists:
867 if destdirexists:
868 striplen = len(os.path.split(pat)[0])
868 striplen = len(os.path.split(pat)[0])
869 else:
869 else:
870 striplen = len(pat)
870 striplen = len(pat)
871 if striplen:
871 if striplen:
872 striplen += len(os.sep)
872 striplen += len(os.sep)
873 targetpath = lambda p: os.path.join(dest, p[striplen:])
873 targetpath = lambda p: os.path.join(dest, p[striplen:])
874 elif destdirexists:
874 elif destdirexists:
875 targetpath = lambda p: os.path.join(dest, os.path.basename(p))
875 targetpath = lambda p: os.path.join(dest, os.path.basename(p))
876 else:
876 else:
877 targetpath = lambda p: dest
877 targetpath = lambda p: dest
878 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
878 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
879 if okaytocopy(abssrc, relsrc, exact):
879 if okaytocopy(abssrc, relsrc, exact):
880 copy(abssrc, relsrc, targetpath(abssrc), exact)
880 copy(abssrc, relsrc, targetpath(abssrc), exact)
881
881
882 if errors:
882 if errors:
883 ui.warn(_('(consider using --after)\n'))
883 ui.warn(_('(consider using --after)\n'))
884 if len(copied) == 0:
884 if len(copied) == 0:
885 raise util.Abort(_('no files to copy'))
885 raise util.Abort(_('no files to copy'))
886 return errors, copied
886 return errors, copied
887
887
888 def copy(ui, repo, *pats, **opts):
888 def copy(ui, repo, *pats, **opts):
889 """mark files as copied for the next commit
889 """mark files as copied for the next commit
890
890
891 Mark dest as having copies of source files. If dest is a
891 Mark dest as having copies of source files. If dest is a
892 directory, copies are put in that directory. If dest is a file,
892 directory, copies are put in that directory. If dest is a file,
893 there can only be one source.
893 there can only be one source.
894
894
895 By default, this command copies the contents of files as they
895 By default, this command copies the contents of files as they
896 stand in the working directory. If invoked with --after, the
896 stand in the working directory. If invoked with --after, the
897 operation is recorded, but no copying is performed.
897 operation is recorded, but no copying is performed.
898
898
899 This command takes effect in the next commit.
899 This command takes effect in the next commit.
900
900
901 NOTE: This command should be treated as experimental. While it
901 NOTE: This command should be treated as experimental. While it
902 should properly record copied files, this information is not yet
902 should properly record copied files, this information is not yet
903 fully used by merge, nor fully reported by log.
903 fully used by merge, nor fully reported by log.
904 """
904 """
905 errs, copied = docopy(ui, repo, pats, opts)
905 errs, copied = docopy(ui, repo, pats, opts)
906 return errs
906 return errs
907
907
908 def debugancestor(ui, index, rev1, rev2):
908 def debugancestor(ui, index, rev1, rev2):
909 """find the ancestor revision of two revisions in a given index"""
909 """find the ancestor revision of two revisions in a given index"""
910 r = revlog.revlog(util.opener(os.getcwd()), index, "")
910 r = revlog.revlog(util.opener(os.getcwd()), index, "")
911 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
911 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
912 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
912 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
913
913
914 def debugcheckstate(ui, repo):
914 def debugcheckstate(ui, repo):
915 """validate the correctness of the current dirstate"""
915 """validate the correctness of the current dirstate"""
916 parent1, parent2 = repo.dirstate.parents()
916 parent1, parent2 = repo.dirstate.parents()
917 repo.dirstate.read()
917 repo.dirstate.read()
918 dc = repo.dirstate.map
918 dc = repo.dirstate.map
919 keys = dc.keys()
919 keys = dc.keys()
920 keys.sort()
920 keys.sort()
921 m1n = repo.changelog.read(parent1)[0]
921 m1n = repo.changelog.read(parent1)[0]
922 m2n = repo.changelog.read(parent2)[0]
922 m2n = repo.changelog.read(parent2)[0]
923 m1 = repo.manifest.read(m1n)
923 m1 = repo.manifest.read(m1n)
924 m2 = repo.manifest.read(m2n)
924 m2 = repo.manifest.read(m2n)
925 errors = 0
925 errors = 0
926 for f in dc:
926 for f in dc:
927 state = repo.dirstate.state(f)
927 state = repo.dirstate.state(f)
928 if state in "nr" and f not in m1:
928 if state in "nr" and f not in m1:
929 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
929 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
930 errors += 1
930 errors += 1
931 if state in "a" and f in m1:
931 if state in "a" and f in m1:
932 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
932 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
933 errors += 1
933 errors += 1
934 if state in "m" and f not in m1 and f not in m2:
934 if state in "m" and f not in m1 and f not in m2:
935 ui.warn(_("%s in state %s, but not in either manifest\n") %
935 ui.warn(_("%s in state %s, but not in either manifest\n") %
936 (f, state))
936 (f, state))
937 errors += 1
937 errors += 1
938 for f in m1:
938 for f in m1:
939 state = repo.dirstate.state(f)
939 state = repo.dirstate.state(f)
940 if state not in "nrm":
940 if state not in "nrm":
941 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
941 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
942 errors += 1
942 errors += 1
943 if errors:
943 if errors:
944 raise util.Abort(_(".hg/dirstate inconsistent with current parent's manifest"))
944 raise util.Abort(_(".hg/dirstate inconsistent with current parent's manifest"))
945
945
946 def debugconfig(ui):
946 def debugconfig(ui):
947 """show combined config settings from all hgrc files"""
947 """show combined config settings from all hgrc files"""
948 try:
948 try:
949 repo = hg.repository(ui)
949 repo = hg.repository(ui)
950 except hg.RepoError:
950 except hg.RepoError:
951 pass
951 pass
952 for section, name, value in ui.walkconfig():
952 for section, name, value in ui.walkconfig():
953 ui.write('%s.%s=%s\n' % (section, name, value))
953 ui.write('%s.%s=%s\n' % (section, name, value))
954
954
955 def debugsetparents(ui, repo, rev1, rev2=None):
955 def debugsetparents(ui, repo, rev1, rev2=None):
956 """manually set the parents of the current working directory
956 """manually set the parents of the current working directory
957
957
958 This is useful for writing repository conversion tools, but should
958 This is useful for writing repository conversion tools, but should
959 be used with care.
959 be used with care.
960 """
960 """
961
961
962 if not rev2:
962 if not rev2:
963 rev2 = hex(nullid)
963 rev2 = hex(nullid)
964
964
965 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
965 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
966
966
967 def debugstate(ui, repo):
967 def debugstate(ui, repo):
968 """show the contents of the current dirstate"""
968 """show the contents of the current dirstate"""
969 repo.dirstate.read()
969 repo.dirstate.read()
970 dc = repo.dirstate.map
970 dc = repo.dirstate.map
971 keys = dc.keys()
971 keys = dc.keys()
972 keys.sort()
972 keys.sort()
973 for file_ in keys:
973 for file_ in keys:
974 ui.write("%c %3o %10d %s %s\n"
974 ui.write("%c %3o %10d %s %s\n"
975 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
975 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
976 time.strftime("%x %X",
976 time.strftime("%x %X",
977 time.localtime(dc[file_][3])), file_))
977 time.localtime(dc[file_][3])), file_))
978 for f in repo.dirstate.copies:
978 for f in repo.dirstate.copies:
979 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
979 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
980
980
981 def debugdata(ui, file_, rev):
981 def debugdata(ui, file_, rev):
982 """dump the contents of an data file revision"""
982 """dump the contents of an data file revision"""
983 r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
983 r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
984 try:
984 try:
985 ui.write(r.revision(r.lookup(rev)))
985 ui.write(r.revision(r.lookup(rev)))
986 except KeyError:
986 except KeyError:
987 raise util.Abort(_('invalid revision identifier %s'), rev)
987 raise util.Abort(_('invalid revision identifier %s'), rev)
988
988
989 def debugindex(ui, file_):
989 def debugindex(ui, file_):
990 """dump the contents of an index file"""
990 """dump the contents of an index file"""
991 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
991 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
992 ui.write(" rev offset length base linkrev" +
992 ui.write(" rev offset length base linkrev" +
993 " nodeid p1 p2\n")
993 " nodeid p1 p2\n")
994 for i in range(r.count()):
994 for i in range(r.count()):
995 e = r.index[i]
995 e = r.index[i]
996 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
996 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
997 i, e[0], e[1], e[2], e[3],
997 i, e[0], e[1], e[2], e[3],
998 short(e[6]), short(e[4]), short(e[5])))
998 short(e[6]), short(e[4]), short(e[5])))
999
999
1000 def debugindexdot(ui, file_):
1000 def debugindexdot(ui, file_):
1001 """dump an index DAG as a .dot file"""
1001 """dump an index DAG as a .dot file"""
1002 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1002 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1003 ui.write("digraph G {\n")
1003 ui.write("digraph G {\n")
1004 for i in range(r.count()):
1004 for i in range(r.count()):
1005 e = r.index[i]
1005 e = r.index[i]
1006 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1006 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1007 if e[5] != nullid:
1007 if e[5] != nullid:
1008 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1008 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1009 ui.write("}\n")
1009 ui.write("}\n")
1010
1010
1011 def debugrename(ui, repo, file, rev=None):
1011 def debugrename(ui, repo, file, rev=None):
1012 """dump rename information"""
1012 """dump rename information"""
1013 r = repo.file(relpath(repo, [file])[0])
1013 r = repo.file(relpath(repo, [file])[0])
1014 if rev:
1014 if rev:
1015 try:
1015 try:
1016 # assume all revision numbers are for changesets
1016 # assume all revision numbers are for changesets
1017 n = repo.lookup(rev)
1017 n = repo.lookup(rev)
1018 change = repo.changelog.read(n)
1018 change = repo.changelog.read(n)
1019 m = repo.manifest.read(change[0])
1019 m = repo.manifest.read(change[0])
1020 n = m[relpath(repo, [file])[0]]
1020 n = m[relpath(repo, [file])[0]]
1021 except (hg.RepoError, KeyError):
1021 except (hg.RepoError, KeyError):
1022 n = r.lookup(rev)
1022 n = r.lookup(rev)
1023 else:
1023 else:
1024 n = r.tip()
1024 n = r.tip()
1025 m = r.renamed(n)
1025 m = r.renamed(n)
1026 if m:
1026 if m:
1027 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1027 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1028 else:
1028 else:
1029 ui.write(_("not renamed\n"))
1029 ui.write(_("not renamed\n"))
1030
1030
1031 def debugwalk(ui, repo, *pats, **opts):
1031 def debugwalk(ui, repo, *pats, **opts):
1032 """show how files match on given patterns"""
1032 """show how files match on given patterns"""
1033 items = list(walk(repo, pats, opts))
1033 items = list(walk(repo, pats, opts))
1034 if not items:
1034 if not items:
1035 return
1035 return
1036 fmt = '%%s %%-%ds %%-%ds %%s' % (
1036 fmt = '%%s %%-%ds %%-%ds %%s' % (
1037 max([len(abs) for (src, abs, rel, exact) in items]),
1037 max([len(abs) for (src, abs, rel, exact) in items]),
1038 max([len(rel) for (src, abs, rel, exact) in items]))
1038 max([len(rel) for (src, abs, rel, exact) in items]))
1039 for src, abs, rel, exact in items:
1039 for src, abs, rel, exact in items:
1040 line = fmt % (src, abs, rel, exact and 'exact' or '')
1040 line = fmt % (src, abs, rel, exact and 'exact' or '')
1041 ui.write("%s\n" % line.rstrip())
1041 ui.write("%s\n" % line.rstrip())
1042
1042
1043 def diff(ui, repo, *pats, **opts):
1043 def diff(ui, repo, *pats, **opts):
1044 """diff working directory (or selected files)
1044 """diff working directory (or selected files)
1045
1045
1046 Show differences between revisions for the specified files.
1046 Show differences between revisions for the specified files.
1047
1047
1048 Differences between files are shown using the unified diff format.
1048 Differences between files are shown using the unified diff format.
1049
1049
1050 When two revision arguments are given, then changes are shown
1050 When two revision arguments are given, then changes are shown
1051 between those revisions. If only one revision is specified then
1051 between those revisions. If only one revision is specified then
1052 that revision is compared to the working directory, and, when no
1052 that revision is compared to the working directory, and, when no
1053 revisions are specified, the working directory files are compared
1053 revisions are specified, the working directory files are compared
1054 to its parent.
1054 to its parent.
1055
1055
1056 Without the -a option, diff will avoid generating diffs of files
1056 Without the -a option, diff will avoid generating diffs of files
1057 it detects as binary. With -a, diff will generate a diff anyway,
1057 it detects as binary. With -a, diff will generate a diff anyway,
1058 probably with undesirable results.
1058 probably with undesirable results.
1059 """
1059 """
1060 node1, node2 = None, None
1060 node1, node2 = None, None
1061 revs = [repo.lookup(x) for x in opts['rev']]
1061 revs = [repo.lookup(x) for x in opts['rev']]
1062
1062
1063 if len(revs) > 0:
1063 if len(revs) > 0:
1064 node1 = revs[0]
1064 node1 = revs[0]
1065 if len(revs) > 1:
1065 if len(revs) > 1:
1066 node2 = revs[1]
1066 node2 = revs[1]
1067 if len(revs) > 2:
1067 if len(revs) > 2:
1068 raise util.Abort(_("too many revisions to diff"))
1068 raise util.Abort(_("too many revisions to diff"))
1069
1069
1070 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
1070 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
1071
1071
1072 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1072 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1073 text=opts['text'])
1073 text=opts['text'])
1074
1074
1075 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1075 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1076 node = repo.lookup(changeset)
1076 node = repo.lookup(changeset)
1077 prev, other = repo.changelog.parents(node)
1077 prev, other = repo.changelog.parents(node)
1078 change = repo.changelog.read(node)
1078 change = repo.changelog.read(node)
1079
1079
1080 fp = make_file(repo, repo.changelog, opts['output'],
1080 fp = make_file(repo, repo.changelog, opts['output'],
1081 node=node, total=total, seqno=seqno,
1081 node=node, total=total, seqno=seqno,
1082 revwidth=revwidth)
1082 revwidth=revwidth)
1083 if fp != sys.stdout:
1083 if fp != sys.stdout:
1084 ui.note("%s\n" % fp.name)
1084 ui.note("%s\n" % fp.name)
1085
1085
1086 fp.write("# HG changeset patch\n")
1086 fp.write("# HG changeset patch\n")
1087 fp.write("# User %s\n" % change[1])
1087 fp.write("# User %s\n" % change[1])
1088 fp.write("# Node ID %s\n" % hex(node))
1088 fp.write("# Node ID %s\n" % hex(node))
1089 fp.write("# Parent %s\n" % hex(prev))
1089 fp.write("# Parent %s\n" % hex(prev))
1090 if other != nullid:
1090 if other != nullid:
1091 fp.write("# Parent %s\n" % hex(other))
1091 fp.write("# Parent %s\n" % hex(other))
1092 fp.write(change[4].rstrip())
1092 fp.write(change[4].rstrip())
1093 fp.write("\n\n")
1093 fp.write("\n\n")
1094
1094
1095 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1095 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1096 if fp != sys.stdout:
1096 if fp != sys.stdout:
1097 fp.close()
1097 fp.close()
1098
1098
1099 def export(ui, repo, *changesets, **opts):
1099 def export(ui, repo, *changesets, **opts):
1100 """dump the header and diffs for one or more changesets
1100 """dump the header and diffs for one or more changesets
1101
1101
1102 Print the changeset header and diffs for one or more revisions.
1102 Print the changeset header and diffs for one or more revisions.
1103
1103
1104 The information shown in the changeset header is: author,
1104 The information shown in the changeset header is: author,
1105 changeset hash, parent and commit comment.
1105 changeset hash, parent and commit comment.
1106
1106
1107 Output may be to a file, in which case the name of the file is
1107 Output may be to a file, in which case the name of the file is
1108 given using a format string. The formatting rules are as follows:
1108 given using a format string. The formatting rules are as follows:
1109
1109
1110 %% literal "%" character
1110 %% literal "%" character
1111 %H changeset hash (40 bytes of hexadecimal)
1111 %H changeset hash (40 bytes of hexadecimal)
1112 %N number of patches being generated
1112 %N number of patches being generated
1113 %R changeset revision number
1113 %R changeset revision number
1114 %b basename of the exporting repository
1114 %b basename of the exporting repository
1115 %h short-form changeset hash (12 bytes of hexadecimal)
1115 %h short-form changeset hash (12 bytes of hexadecimal)
1116 %n zero-padded sequence number, starting at 1
1116 %n zero-padded sequence number, starting at 1
1117 %r zero-padded changeset revision number
1117 %r zero-padded changeset revision number
1118
1118
1119 Without the -a option, export will avoid generating diffs of files
1119 Without the -a option, export will avoid generating diffs of files
1120 it detects as binary. With -a, export will generate a diff anyway,
1120 it detects as binary. With -a, export will generate a diff anyway,
1121 probably with undesirable results.
1121 probably with undesirable results.
1122 """
1122 """
1123 if not changesets:
1123 if not changesets:
1124 raise util.Abort(_("export requires at least one changeset"))
1124 raise util.Abort(_("export requires at least one changeset"))
1125 seqno = 0
1125 seqno = 0
1126 revs = list(revrange(ui, repo, changesets))
1126 revs = list(revrange(ui, repo, changesets))
1127 total = len(revs)
1127 total = len(revs)
1128 revwidth = max(map(len, revs))
1128 revwidth = max(map(len, revs))
1129 ui.note(len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n"))
1129 ui.note(len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n"))
1130 for cset in revs:
1130 for cset in revs:
1131 seqno += 1
1131 seqno += 1
1132 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1132 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1133
1133
1134 def forget(ui, repo, *pats, **opts):
1134 def forget(ui, repo, *pats, **opts):
1135 """don't add the specified files on the next commit
1135 """don't add the specified files on the next commit
1136
1136
1137 Undo an 'hg add' scheduled for the next commit.
1137 Undo an 'hg add' scheduled for the next commit.
1138 """
1138 """
1139 forget = []
1139 forget = []
1140 for src, abs, rel, exact in walk(repo, pats, opts):
1140 for src, abs, rel, exact in walk(repo, pats, opts):
1141 if repo.dirstate.state(abs) == 'a':
1141 if repo.dirstate.state(abs) == 'a':
1142 forget.append(abs)
1142 forget.append(abs)
1143 if ui.verbose or not exact:
1143 if ui.verbose or not exact:
1144 ui.status(_('forgetting %s\n') % rel)
1144 ui.status(_('forgetting %s\n') % rel)
1145 repo.forget(forget)
1145 repo.forget(forget)
1146
1146
1147 def grep(ui, repo, pattern, *pats, **opts):
1147 def grep(ui, repo, pattern, *pats, **opts):
1148 """search for a pattern in specified files and revisions
1148 """search for a pattern in specified files and revisions
1149
1149
1150 Search revisions of files for a regular expression.
1150 Search revisions of files for a regular expression.
1151
1151
1152 This command behaves differently than Unix grep. It only accepts
1152 This command behaves differently than Unix grep. It only accepts
1153 Python/Perl regexps. It searches repository history, not the
1153 Python/Perl regexps. It searches repository history, not the
1154 working directory. It always prints the revision number in which
1154 working directory. It always prints the revision number in which
1155 a match appears.
1155 a match appears.
1156
1156
1157 By default, grep only prints output for the first revision of a
1157 By default, grep only prints output for the first revision of a
1158 file in which it finds a match. To get it to print every revision
1158 file in which it finds a match. To get it to print every revision
1159 that contains a change in match status ("-" for a match that
1159 that contains a change in match status ("-" for a match that
1160 becomes a non-match, or "+" for a non-match that becomes a match),
1160 becomes a non-match, or "+" for a non-match that becomes a match),
1161 use the --all flag.
1161 use the --all flag.
1162 """
1162 """
1163 reflags = 0
1163 reflags = 0
1164 if opts['ignore_case']:
1164 if opts['ignore_case']:
1165 reflags |= re.I
1165 reflags |= re.I
1166 regexp = re.compile(pattern, reflags)
1166 regexp = re.compile(pattern, reflags)
1167 sep, eol = ':', '\n'
1167 sep, eol = ':', '\n'
1168 if opts['print0']:
1168 if opts['print0']:
1169 sep = eol = '\0'
1169 sep = eol = '\0'
1170
1170
1171 fcache = {}
1171 fcache = {}
1172 def getfile(fn):
1172 def getfile(fn):
1173 if fn not in fcache:
1173 if fn not in fcache:
1174 fcache[fn] = repo.file(fn)
1174 fcache[fn] = repo.file(fn)
1175 return fcache[fn]
1175 return fcache[fn]
1176
1176
1177 def matchlines(body):
1177 def matchlines(body):
1178 begin = 0
1178 begin = 0
1179 linenum = 0
1179 linenum = 0
1180 while True:
1180 while True:
1181 match = regexp.search(body, begin)
1181 match = regexp.search(body, begin)
1182 if not match:
1182 if not match:
1183 break
1183 break
1184 mstart, mend = match.span()
1184 mstart, mend = match.span()
1185 linenum += body.count('\n', begin, mstart) + 1
1185 linenum += body.count('\n', begin, mstart) + 1
1186 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1186 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1187 lend = body.find('\n', mend)
1187 lend = body.find('\n', mend)
1188 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1188 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1189 begin = lend + 1
1189 begin = lend + 1
1190
1190
1191 class linestate:
1191 class linestate:
1192 def __init__(self, line, linenum, colstart, colend):
1192 def __init__(self, line, linenum, colstart, colend):
1193 self.line = line
1193 self.line = line
1194 self.linenum = linenum
1194 self.linenum = linenum
1195 self.colstart = colstart
1195 self.colstart = colstart
1196 self.colend = colend
1196 self.colend = colend
1197 def __eq__(self, other):
1197 def __eq__(self, other):
1198 return self.line == other.line
1198 return self.line == other.line
1199 def __hash__(self):
1199 def __hash__(self):
1200 return hash(self.line)
1200 return hash(self.line)
1201
1201
1202 matches = {}
1202 matches = {}
1203 def grepbody(fn, rev, body):
1203 def grepbody(fn, rev, body):
1204 matches[rev].setdefault(fn, {})
1204 matches[rev].setdefault(fn, {})
1205 m = matches[rev][fn]
1205 m = matches[rev][fn]
1206 for lnum, cstart, cend, line in matchlines(body):
1206 for lnum, cstart, cend, line in matchlines(body):
1207 s = linestate(line, lnum, cstart, cend)
1207 s = linestate(line, lnum, cstart, cend)
1208 m[s] = s
1208 m[s] = s
1209
1209
1210 prev = {}
1210 prev = {}
1211 ucache = {}
1211 ucache = {}
1212 def display(fn, rev, states, prevstates):
1212 def display(fn, rev, states, prevstates):
1213 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1213 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1214 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1214 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1215 counts = {'-': 0, '+': 0}
1215 counts = {'-': 0, '+': 0}
1216 filerevmatches = {}
1216 filerevmatches = {}
1217 for l in diff:
1217 for l in diff:
1218 if incrementing or not opts['all']:
1218 if incrementing or not opts['all']:
1219 change = ((l in prevstates) and '-') or '+'
1219 change = ((l in prevstates) and '-') or '+'
1220 r = rev
1220 r = rev
1221 else:
1221 else:
1222 change = ((l in states) and '-') or '+'
1222 change = ((l in states) and '-') or '+'
1223 r = prev[fn]
1223 r = prev[fn]
1224 cols = [fn, str(rev)]
1224 cols = [fn, str(rev)]
1225 if opts['line_number']: cols.append(str(l.linenum))
1225 if opts['line_number']: cols.append(str(l.linenum))
1226 if opts['all']: cols.append(change)
1226 if opts['all']: cols.append(change)
1227 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1227 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1228 ucache))
1228 ucache))
1229 if opts['files_with_matches']:
1229 if opts['files_with_matches']:
1230 c = (fn, rev)
1230 c = (fn, rev)
1231 if c in filerevmatches: continue
1231 if c in filerevmatches: continue
1232 filerevmatches[c] = 1
1232 filerevmatches[c] = 1
1233 else:
1233 else:
1234 cols.append(l.line)
1234 cols.append(l.line)
1235 ui.write(sep.join(cols), eol)
1235 ui.write(sep.join(cols), eol)
1236 counts[change] += 1
1236 counts[change] += 1
1237 return counts['+'], counts['-']
1237 return counts['+'], counts['-']
1238
1238
1239 fstate = {}
1239 fstate = {}
1240 skip = {}
1240 skip = {}
1241 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1241 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1242 count = 0
1242 count = 0
1243 incrementing = False
1243 incrementing = False
1244 for st, rev, fns in changeiter:
1244 for st, rev, fns in changeiter:
1245 if st == 'window':
1245 if st == 'window':
1246 incrementing = rev
1246 incrementing = rev
1247 matches.clear()
1247 matches.clear()
1248 elif st == 'add':
1248 elif st == 'add':
1249 change = repo.changelog.read(repo.lookup(str(rev)))
1249 change = repo.changelog.read(repo.lookup(str(rev)))
1250 mf = repo.manifest.read(change[0])
1250 mf = repo.manifest.read(change[0])
1251 matches[rev] = {}
1251 matches[rev] = {}
1252 for fn in fns:
1252 for fn in fns:
1253 if fn in skip: continue
1253 if fn in skip: continue
1254 fstate.setdefault(fn, {})
1254 fstate.setdefault(fn, {})
1255 try:
1255 try:
1256 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1256 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1257 except KeyError:
1257 except KeyError:
1258 pass
1258 pass
1259 elif st == 'iter':
1259 elif st == 'iter':
1260 states = matches[rev].items()
1260 states = matches[rev].items()
1261 states.sort()
1261 states.sort()
1262 for fn, m in states:
1262 for fn, m in states:
1263 if fn in skip: continue
1263 if fn in skip: continue
1264 if incrementing or not opts['all'] or fstate[fn]:
1264 if incrementing or not opts['all'] or fstate[fn]:
1265 pos, neg = display(fn, rev, m, fstate[fn])
1265 pos, neg = display(fn, rev, m, fstate[fn])
1266 count += pos + neg
1266 count += pos + neg
1267 if pos and not opts['all']:
1267 if pos and not opts['all']:
1268 skip[fn] = True
1268 skip[fn] = True
1269 fstate[fn] = m
1269 fstate[fn] = m
1270 prev[fn] = rev
1270 prev[fn] = rev
1271
1271
1272 if not incrementing:
1272 if not incrementing:
1273 fstate = fstate.items()
1273 fstate = fstate.items()
1274 fstate.sort()
1274 fstate.sort()
1275 for fn, state in fstate:
1275 for fn, state in fstate:
1276 if fn in skip: continue
1276 if fn in skip: continue
1277 display(fn, rev, {}, state)
1277 display(fn, rev, {}, state)
1278 return (count == 0 and 1) or 0
1278 return (count == 0 and 1) or 0
1279
1279
1280 def heads(ui, repo, **opts):
1280 def heads(ui, repo, **opts):
1281 """show current repository heads
1281 """show current repository heads
1282
1282
1283 Show all repository head changesets.
1283 Show all repository head changesets.
1284
1284
1285 Repository "heads" are changesets that don't have children
1285 Repository "heads" are changesets that don't have children
1286 changesets. They are where development generally takes place and
1286 changesets. They are where development generally takes place and
1287 are the usual targets for update and merge operations.
1287 are the usual targets for update and merge operations.
1288 """
1288 """
1289 heads = repo.changelog.heads()
1289 if opts['rev']:
1290 heads = repo.heads(repo.lookup(rev))
1291 else:
1292 heads = repo.heads()
1290 br = None
1293 br = None
1291 if opts['branches']:
1294 if opts['branches']:
1292 br = repo.branchlookup(heads)
1295 br = repo.branchlookup(list(heads))
1293 for n in repo.changelog.heads():
1296 for n in heads:
1294 show_changeset(ui, repo, changenode=n, brinfo=br)
1297 show_changeset(ui, repo, changenode=n, brinfo=br)
1295
1298
1296 def identify(ui, repo):
1299 def identify(ui, repo):
1297 """print information about the working copy
1300 """print information about the working copy
1298
1301
1299 Print a short summary of the current state of the repo.
1302 Print a short summary of the current state of the repo.
1300
1303
1301 This summary identifies the repository state using one or two parent
1304 This summary identifies the repository state using one or two parent
1302 hash identifiers, followed by a "+" if there are uncommitted changes
1305 hash identifiers, followed by a "+" if there are uncommitted changes
1303 in the working directory, followed by a list of tags for this revision.
1306 in the working directory, followed by a list of tags for this revision.
1304 """
1307 """
1305 parents = [p for p in repo.dirstate.parents() if p != nullid]
1308 parents = [p for p in repo.dirstate.parents() if p != nullid]
1306 if not parents:
1309 if not parents:
1307 ui.write(_("unknown\n"))
1310 ui.write(_("unknown\n"))
1308 return
1311 return
1309
1312
1310 hexfunc = ui.verbose and hex or short
1313 hexfunc = ui.verbose and hex or short
1311 (c, a, d, u) = repo.changes()
1314 (c, a, d, u) = repo.changes()
1312 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1315 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1313 (c or a or d) and "+" or "")]
1316 (c or a or d) and "+" or "")]
1314
1317
1315 if not ui.quiet:
1318 if not ui.quiet:
1316 # multiple tags for a single parent separated by '/'
1319 # multiple tags for a single parent separated by '/'
1317 parenttags = ['/'.join(tags)
1320 parenttags = ['/'.join(tags)
1318 for tags in map(repo.nodetags, parents) if tags]
1321 for tags in map(repo.nodetags, parents) if tags]
1319 # tags for multiple parents separated by ' + '
1322 # tags for multiple parents separated by ' + '
1320 if parenttags:
1323 if parenttags:
1321 output.append(' + '.join(parenttags))
1324 output.append(' + '.join(parenttags))
1322
1325
1323 ui.write("%s\n" % ' '.join(output))
1326 ui.write("%s\n" % ' '.join(output))
1324
1327
1325 def import_(ui, repo, patch1, *patches, **opts):
1328 def import_(ui, repo, patch1, *patches, **opts):
1326 """import an ordered set of patches
1329 """import an ordered set of patches
1327
1330
1328 Import a list of patches and commit them individually.
1331 Import a list of patches and commit them individually.
1329
1332
1330 If there are outstanding changes in the working directory, import
1333 If there are outstanding changes in the working directory, import
1331 will abort unless given the -f flag.
1334 will abort unless given the -f flag.
1332
1335
1333 If a patch looks like a mail message (its first line starts with
1336 If a patch looks like a mail message (its first line starts with
1334 "From " or looks like an RFC822 header), it will not be applied
1337 "From " or looks like an RFC822 header), it will not be applied
1335 unless the -f option is used. The importer neither parses nor
1338 unless the -f option is used. The importer neither parses nor
1336 discards mail headers, so use -f only to override the "mailness"
1339 discards mail headers, so use -f only to override the "mailness"
1337 safety check, not to import a real mail message.
1340 safety check, not to import a real mail message.
1338 """
1341 """
1339 patches = (patch1,) + patches
1342 patches = (patch1,) + patches
1340
1343
1341 if not opts['force']:
1344 if not opts['force']:
1342 (c, a, d, u) = repo.changes()
1345 (c, a, d, u) = repo.changes()
1343 if c or a or d:
1346 if c or a or d:
1344 raise util.Abort(_("outstanding uncommitted changes"))
1347 raise util.Abort(_("outstanding uncommitted changes"))
1345
1348
1346 d = opts["base"]
1349 d = opts["base"]
1347 strip = opts["strip"]
1350 strip = opts["strip"]
1348
1351
1349 mailre = re.compile(r'(?:From |[\w-]+:)')
1352 mailre = re.compile(r'(?:From |[\w-]+:)')
1350
1353
1351 # attempt to detect the start of a patch
1354 # attempt to detect the start of a patch
1352 # (this heuristic is borrowed from quilt)
1355 # (this heuristic is borrowed from quilt)
1353 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1356 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1354 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1357 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1355 '(---|\*\*\*)[ \t])')
1358 '(---|\*\*\*)[ \t])')
1356
1359
1357 for patch in patches:
1360 for patch in patches:
1358 ui.status(_("applying %s\n") % patch)
1361 ui.status(_("applying %s\n") % patch)
1359 pf = os.path.join(d, patch)
1362 pf = os.path.join(d, patch)
1360
1363
1361 message = []
1364 message = []
1362 user = None
1365 user = None
1363 hgpatch = False
1366 hgpatch = False
1364 for line in file(pf):
1367 for line in file(pf):
1365 line = line.rstrip()
1368 line = line.rstrip()
1366 if (not message and not hgpatch and
1369 if (not message and not hgpatch and
1367 mailre.match(line) and not opts['force']):
1370 mailre.match(line) and not opts['force']):
1368 if len(line) > 35: line = line[:32] + '...'
1371 if len(line) > 35: line = line[:32] + '...'
1369 raise util.Abort(_('first line looks like a '
1372 raise util.Abort(_('first line looks like a '
1370 'mail header: ') + line)
1373 'mail header: ') + line)
1371 if diffre.match(line):
1374 if diffre.match(line):
1372 break
1375 break
1373 elif hgpatch:
1376 elif hgpatch:
1374 # parse values when importing the result of an hg export
1377 # parse values when importing the result of an hg export
1375 if line.startswith("# User "):
1378 if line.startswith("# User "):
1376 user = line[7:]
1379 user = line[7:]
1377 ui.debug(_('User: %s\n') % user)
1380 ui.debug(_('User: %s\n') % user)
1378 elif not line.startswith("# ") and line:
1381 elif not line.startswith("# ") and line:
1379 message.append(line)
1382 message.append(line)
1380 hgpatch = False
1383 hgpatch = False
1381 elif line == '# HG changeset patch':
1384 elif line == '# HG changeset patch':
1382 hgpatch = True
1385 hgpatch = True
1383 message = [] # We may have collected garbage
1386 message = [] # We may have collected garbage
1384 else:
1387 else:
1385 message.append(line)
1388 message.append(line)
1386
1389
1387 # make sure message isn't empty
1390 # make sure message isn't empty
1388 if not message:
1391 if not message:
1389 message = _("imported patch %s\n") % patch
1392 message = _("imported patch %s\n") % patch
1390 else:
1393 else:
1391 message = "%s\n" % '\n'.join(message)
1394 message = "%s\n" % '\n'.join(message)
1392 ui.debug(_('message:\n%s\n') % message)
1395 ui.debug(_('message:\n%s\n') % message)
1393
1396
1394 files = util.patch(strip, pf, ui)
1397 files = util.patch(strip, pf, ui)
1395
1398
1396 if len(files) > 0:
1399 if len(files) > 0:
1397 addremove(ui, repo, *files)
1400 addremove(ui, repo, *files)
1398 repo.commit(files, message, user)
1401 repo.commit(files, message, user)
1399
1402
1400 def incoming(ui, repo, source="default", **opts):
1403 def incoming(ui, repo, source="default", **opts):
1401 """show new changesets found in source
1404 """show new changesets found in source
1402
1405
1403 Show new changesets found in the specified repo or the default
1406 Show new changesets found in the specified repo or the default
1404 pull repo. These are the changesets that would be pulled if a pull
1407 pull repo. These are the changesets that would be pulled if a pull
1405 was requested.
1408 was requested.
1406
1409
1407 Currently only local repositories are supported.
1410 Currently only local repositories are supported.
1408 """
1411 """
1409 source = ui.expandpath(source, repo.root)
1412 source = ui.expandpath(source, repo.root)
1410 other = hg.repository(ui, source)
1413 other = hg.repository(ui, source)
1411 if not other.local():
1414 if not other.local():
1412 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1415 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1413 o = repo.findincoming(other)
1416 o = repo.findincoming(other)
1414 if not o:
1417 if not o:
1415 return
1418 return
1416 o = other.changelog.nodesbetween(o)[0]
1419 o = other.changelog.nodesbetween(o)[0]
1417 if opts['newest_first']:
1420 if opts['newest_first']:
1418 o.reverse()
1421 o.reverse()
1419 for n in o:
1422 for n in o:
1420 parents = [p for p in other.changelog.parents(n) if p != nullid]
1423 parents = [p for p in other.changelog.parents(n) if p != nullid]
1421 if opts['no_merges'] and len(parents) == 2:
1424 if opts['no_merges'] and len(parents) == 2:
1422 continue
1425 continue
1423 show_changeset(ui, other, changenode=n)
1426 show_changeset(ui, other, changenode=n)
1424 if opts['patch']:
1427 if opts['patch']:
1425 prev = (parents and parents[0]) or nullid
1428 prev = (parents and parents[0]) or nullid
1426 dodiff(ui, ui, other, prev, n)
1429 dodiff(ui, ui, other, prev, n)
1427 ui.write("\n")
1430 ui.write("\n")
1428
1431
1429 def init(ui, dest="."):
1432 def init(ui, dest="."):
1430 """create a new repository in the given directory
1433 """create a new repository in the given directory
1431
1434
1432 Initialize a new repository in the given directory. If the given
1435 Initialize a new repository in the given directory. If the given
1433 directory does not exist, it is created.
1436 directory does not exist, it is created.
1434
1437
1435 If no directory is given, the current directory is used.
1438 If no directory is given, the current directory is used.
1436 """
1439 """
1437 if not os.path.exists(dest):
1440 if not os.path.exists(dest):
1438 os.mkdir(dest)
1441 os.mkdir(dest)
1439 hg.repository(ui, dest, create=1)
1442 hg.repository(ui, dest, create=1)
1440
1443
1441 def locate(ui, repo, *pats, **opts):
1444 def locate(ui, repo, *pats, **opts):
1442 """locate files matching specific patterns
1445 """locate files matching specific patterns
1443
1446
1444 Print all files under Mercurial control whose names match the
1447 Print all files under Mercurial control whose names match the
1445 given patterns.
1448 given patterns.
1446
1449
1447 This command searches the current directory and its
1450 This command searches the current directory and its
1448 subdirectories. To search an entire repository, move to the root
1451 subdirectories. To search an entire repository, move to the root
1449 of the repository.
1452 of the repository.
1450
1453
1451 If no patterns are given to match, this command prints all file
1454 If no patterns are given to match, this command prints all file
1452 names.
1455 names.
1453
1456
1454 If you want to feed the output of this command into the "xargs"
1457 If you want to feed the output of this command into the "xargs"
1455 command, use the "-0" option to both this command and "xargs".
1458 command, use the "-0" option to both this command and "xargs".
1456 This will avoid the problem of "xargs" treating single filenames
1459 This will avoid the problem of "xargs" treating single filenames
1457 that contain white space as multiple filenames.
1460 that contain white space as multiple filenames.
1458 """
1461 """
1459 end = opts['print0'] and '\0' or '\n'
1462 end = opts['print0'] and '\0' or '\n'
1460
1463
1461 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1464 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1462 if repo.dirstate.state(abs) == '?':
1465 if repo.dirstate.state(abs) == '?':
1463 continue
1466 continue
1464 if opts['fullpath']:
1467 if opts['fullpath']:
1465 ui.write(os.path.join(repo.root, abs), end)
1468 ui.write(os.path.join(repo.root, abs), end)
1466 else:
1469 else:
1467 ui.write(rel, end)
1470 ui.write(rel, end)
1468
1471
1469 def log(ui, repo, *pats, **opts):
1472 def log(ui, repo, *pats, **opts):
1470 """show revision history of entire repository or files
1473 """show revision history of entire repository or files
1471
1474
1472 Print the revision history of the specified files or the entire project.
1475 Print the revision history of the specified files or the entire project.
1473
1476
1474 By default this command outputs: changeset id and hash, tags,
1477 By default this command outputs: changeset id and hash, tags,
1475 parents, user, date and time, and a summary for each commit. The
1478 parents, user, date and time, and a summary for each commit. The
1476 -v switch adds some more detail, such as changed files, manifest
1479 -v switch adds some more detail, such as changed files, manifest
1477 hashes or message signatures.
1480 hashes or message signatures.
1478 """
1481 """
1479 class dui:
1482 class dui:
1480 # Implement and delegate some ui protocol. Save hunks of
1483 # Implement and delegate some ui protocol. Save hunks of
1481 # output for later display in the desired order.
1484 # output for later display in the desired order.
1482 def __init__(self, ui):
1485 def __init__(self, ui):
1483 self.ui = ui
1486 self.ui = ui
1484 self.hunk = {}
1487 self.hunk = {}
1485 def bump(self, rev):
1488 def bump(self, rev):
1486 self.rev = rev
1489 self.rev = rev
1487 self.hunk[rev] = []
1490 self.hunk[rev] = []
1488 def note(self, *args):
1491 def note(self, *args):
1489 if self.verbose:
1492 if self.verbose:
1490 self.write(*args)
1493 self.write(*args)
1491 def status(self, *args):
1494 def status(self, *args):
1492 if not self.quiet:
1495 if not self.quiet:
1493 self.write(*args)
1496 self.write(*args)
1494 def write(self, *args):
1497 def write(self, *args):
1495 self.hunk[self.rev].append(args)
1498 self.hunk[self.rev].append(args)
1496 def debug(self, *args):
1499 def debug(self, *args):
1497 if self.debugflag:
1500 if self.debugflag:
1498 self.write(*args)
1501 self.write(*args)
1499 def __getattr__(self, key):
1502 def __getattr__(self, key):
1500 return getattr(self.ui, key)
1503 return getattr(self.ui, key)
1501 cwd = repo.getcwd()
1504 cwd = repo.getcwd()
1502 if not pats and cwd:
1505 if not pats and cwd:
1503 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1506 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1504 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1507 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1505 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1508 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1506 pats, opts)
1509 pats, opts)
1507 for st, rev, fns in changeiter:
1510 for st, rev, fns in changeiter:
1508 if st == 'window':
1511 if st == 'window':
1509 du = dui(ui)
1512 du = dui(ui)
1510 elif st == 'add':
1513 elif st == 'add':
1511 du.bump(rev)
1514 du.bump(rev)
1512 changenode = repo.changelog.node(rev)
1515 changenode = repo.changelog.node(rev)
1513 parents = [p for p in repo.changelog.parents(changenode)
1516 parents = [p for p in repo.changelog.parents(changenode)
1514 if p != nullid]
1517 if p != nullid]
1515 if opts['no_merges'] and len(parents) == 2:
1518 if opts['no_merges'] and len(parents) == 2:
1516 continue
1519 continue
1517 if opts['only_merges'] and len(parents) != 2:
1520 if opts['only_merges'] and len(parents) != 2:
1518 continue
1521 continue
1519
1522
1520 br = None
1523 br = None
1521 if opts['keyword']:
1524 if opts['keyword']:
1522 changes = repo.changelog.read(repo.changelog.node(rev))
1525 changes = repo.changelog.read(repo.changelog.node(rev))
1523 miss = 0
1526 miss = 0
1524 for k in [kw.lower() for kw in opts['keyword']]:
1527 for k in [kw.lower() for kw in opts['keyword']]:
1525 if not (k in changes[1].lower() or
1528 if not (k in changes[1].lower() or
1526 k in changes[4].lower() or
1529 k in changes[4].lower() or
1527 k in " ".join(changes[3][:20]).lower()):
1530 k in " ".join(changes[3][:20]).lower()):
1528 miss = 1
1531 miss = 1
1529 break
1532 break
1530 if miss:
1533 if miss:
1531 continue
1534 continue
1532
1535
1533 if opts['branch']:
1536 if opts['branch']:
1534 br = repo.branchlookup([repo.changelog.node(rev)])
1537 br = repo.branchlookup([repo.changelog.node(rev)])
1535
1538
1536 show_changeset(du, repo, rev, brinfo=br)
1539 show_changeset(du, repo, rev, brinfo=br)
1537 if opts['patch']:
1540 if opts['patch']:
1538 prev = (parents and parents[0]) or nullid
1541 prev = (parents and parents[0]) or nullid
1539 dodiff(du, du, repo, prev, changenode, fns)
1542 dodiff(du, du, repo, prev, changenode, fns)
1540 du.write("\n\n")
1543 du.write("\n\n")
1541 elif st == 'iter':
1544 elif st == 'iter':
1542 for args in du.hunk[rev]:
1545 for args in du.hunk[rev]:
1543 ui.write(*args)
1546 ui.write(*args)
1544
1547
1545 def manifest(ui, repo, rev=None):
1548 def manifest(ui, repo, rev=None):
1546 """output the latest or given revision of the project manifest
1549 """output the latest or given revision of the project manifest
1547
1550
1548 Print a list of version controlled files for the given revision.
1551 Print a list of version controlled files for the given revision.
1549
1552
1550 The manifest is the list of files being version controlled. If no revision
1553 The manifest is the list of files being version controlled. If no revision
1551 is given then the tip is used.
1554 is given then the tip is used.
1552 """
1555 """
1553 if rev:
1556 if rev:
1554 try:
1557 try:
1555 # assume all revision numbers are for changesets
1558 # assume all revision numbers are for changesets
1556 n = repo.lookup(rev)
1559 n = repo.lookup(rev)
1557 change = repo.changelog.read(n)
1560 change = repo.changelog.read(n)
1558 n = change[0]
1561 n = change[0]
1559 except hg.RepoError:
1562 except hg.RepoError:
1560 n = repo.manifest.lookup(rev)
1563 n = repo.manifest.lookup(rev)
1561 else:
1564 else:
1562 n = repo.manifest.tip()
1565 n = repo.manifest.tip()
1563 m = repo.manifest.read(n)
1566 m = repo.manifest.read(n)
1564 mf = repo.manifest.readflags(n)
1567 mf = repo.manifest.readflags(n)
1565 files = m.keys()
1568 files = m.keys()
1566 files.sort()
1569 files.sort()
1567
1570
1568 for f in files:
1571 for f in files:
1569 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1572 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1570
1573
1571 def outgoing(ui, repo, dest="default-push", **opts):
1574 def outgoing(ui, repo, dest="default-push", **opts):
1572 """show changesets not found in destination
1575 """show changesets not found in destination
1573
1576
1574 Show changesets not found in the specified destination repo or the
1577 Show changesets not found in the specified destination repo or the
1575 default push repo. These are the changesets that would be pushed
1578 default push repo. These are the changesets that would be pushed
1576 if a push was requested.
1579 if a push was requested.
1577 """
1580 """
1578 dest = ui.expandpath(dest, repo.root)
1581 dest = ui.expandpath(dest, repo.root)
1579 other = hg.repository(ui, dest)
1582 other = hg.repository(ui, dest)
1580 o = repo.findoutgoing(other)
1583 o = repo.findoutgoing(other)
1581 o = repo.changelog.nodesbetween(o)[0]
1584 o = repo.changelog.nodesbetween(o)[0]
1582 if opts['newest_first']:
1585 if opts['newest_first']:
1583 o.reverse()
1586 o.reverse()
1584 for n in o:
1587 for n in o:
1585 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1588 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1586 if opts['no_merges'] and len(parents) == 2:
1589 if opts['no_merges'] and len(parents) == 2:
1587 continue
1590 continue
1588 show_changeset(ui, repo, changenode=n)
1591 show_changeset(ui, repo, changenode=n)
1589 if opts['patch']:
1592 if opts['patch']:
1590 prev = (parents and parents[0]) or nullid
1593 prev = (parents and parents[0]) or nullid
1591 dodiff(ui, ui, repo, prev, n)
1594 dodiff(ui, ui, repo, prev, n)
1592 ui.write("\n")
1595 ui.write("\n")
1593
1596
1594 def parents(ui, repo, rev=None):
1597 def parents(ui, repo, rev=None):
1595 """show the parents of the working dir or revision
1598 """show the parents of the working dir or revision
1596
1599
1597 Print the working directory's parent revisions.
1600 Print the working directory's parent revisions.
1598 """
1601 """
1599 if rev:
1602 if rev:
1600 p = repo.changelog.parents(repo.lookup(rev))
1603 p = repo.changelog.parents(repo.lookup(rev))
1601 else:
1604 else:
1602 p = repo.dirstate.parents()
1605 p = repo.dirstate.parents()
1603
1606
1604 for n in p:
1607 for n in p:
1605 if n != nullid:
1608 if n != nullid:
1606 show_changeset(ui, repo, changenode=n)
1609 show_changeset(ui, repo, changenode=n)
1607
1610
1608 def paths(ui, search=None):
1611 def paths(ui, search=None):
1609 """show definition of symbolic path names
1612 """show definition of symbolic path names
1610
1613
1611 Show definition of symbolic path name NAME. If no name is given, show
1614 Show definition of symbolic path name NAME. If no name is given, show
1612 definition of available names.
1615 definition of available names.
1613
1616
1614 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1617 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1615 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1618 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1616 """
1619 """
1617 try:
1620 try:
1618 repo = hg.repository(ui=ui)
1621 repo = hg.repository(ui=ui)
1619 except hg.RepoError:
1622 except hg.RepoError:
1620 pass
1623 pass
1621
1624
1622 if search:
1625 if search:
1623 for name, path in ui.configitems("paths"):
1626 for name, path in ui.configitems("paths"):
1624 if name == search:
1627 if name == search:
1625 ui.write("%s\n" % path)
1628 ui.write("%s\n" % path)
1626 return
1629 return
1627 ui.warn(_("not found!\n"))
1630 ui.warn(_("not found!\n"))
1628 return 1
1631 return 1
1629 else:
1632 else:
1630 for name, path in ui.configitems("paths"):
1633 for name, path in ui.configitems("paths"):
1631 ui.write("%s = %s\n" % (name, path))
1634 ui.write("%s = %s\n" % (name, path))
1632
1635
1633 def pull(ui, repo, source="default", **opts):
1636 def pull(ui, repo, source="default", **opts):
1634 """pull changes from the specified source
1637 """pull changes from the specified source
1635
1638
1636 Pull changes from a remote repository to a local one.
1639 Pull changes from a remote repository to a local one.
1637
1640
1638 This finds all changes from the repository at the specified path
1641 This finds all changes from the repository at the specified path
1639 or URL and adds them to the local repository. By default, this
1642 or URL and adds them to the local repository. By default, this
1640 does not update the copy of the project in the working directory.
1643 does not update the copy of the project in the working directory.
1641
1644
1642 Valid URLs are of the form:
1645 Valid URLs are of the form:
1643
1646
1644 local/filesystem/path
1647 local/filesystem/path
1645 http://[user@]host[:port][/path]
1648 http://[user@]host[:port][/path]
1646 https://[user@]host[:port][/path]
1649 https://[user@]host[:port][/path]
1647 ssh://[user@]host[:port][/path]
1650 ssh://[user@]host[:port][/path]
1648
1651
1649 SSH requires an accessible shell account on the destination machine
1652 SSH requires an accessible shell account on the destination machine
1650 and a copy of hg in the remote path. With SSH, paths are relative
1653 and a copy of hg in the remote path. With SSH, paths are relative
1651 to the remote user's home directory by default; use two slashes at
1654 to the remote user's home directory by default; use two slashes at
1652 the start of a path to specify it as relative to the filesystem root.
1655 the start of a path to specify it as relative to the filesystem root.
1653 """
1656 """
1654 source = ui.expandpath(source, repo.root)
1657 source = ui.expandpath(source, repo.root)
1655 ui.status(_('pulling from %s\n') % (source))
1658 ui.status(_('pulling from %s\n') % (source))
1656
1659
1657 if opts['ssh']:
1660 if opts['ssh']:
1658 ui.setconfig("ui", "ssh", opts['ssh'])
1661 ui.setconfig("ui", "ssh", opts['ssh'])
1659 if opts['remotecmd']:
1662 if opts['remotecmd']:
1660 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1663 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1661
1664
1662 other = hg.repository(ui, source)
1665 other = hg.repository(ui, source)
1663 revs = None
1666 revs = None
1664 if opts['rev'] and not other.local():
1667 if opts['rev'] and not other.local():
1665 raise util.Abort("pull -r doesn't work for remote repositories yet")
1668 raise util.Abort("pull -r doesn't work for remote repositories yet")
1666 elif opts['rev']:
1669 elif opts['rev']:
1667 revs = [other.lookup(rev) for rev in opts['rev']]
1670 revs = [other.lookup(rev) for rev in opts['rev']]
1668 r = repo.pull(other, heads=revs)
1671 r = repo.pull(other, heads=revs)
1669 if not r:
1672 if not r:
1670 if opts['update']:
1673 if opts['update']:
1671 return update(ui, repo)
1674 return update(ui, repo)
1672 else:
1675 else:
1673 ui.status(_("(run 'hg update' to get a working copy)\n"))
1676 ui.status(_("(run 'hg update' to get a working copy)\n"))
1674
1677
1675 return r
1678 return r
1676
1679
1677 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1680 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1678 """push changes to the specified destination
1681 """push changes to the specified destination
1679
1682
1680 Push changes from the local repository to the given destination.
1683 Push changes from the local repository to the given destination.
1681
1684
1682 This is the symmetrical operation for pull. It helps to move
1685 This is the symmetrical operation for pull. It helps to move
1683 changes from the current repository to a different one. If the
1686 changes from the current repository to a different one. If the
1684 destination is local this is identical to a pull in that directory
1687 destination is local this is identical to a pull in that directory
1685 from the current one.
1688 from the current one.
1686
1689
1687 By default, push will refuse to run if it detects the result would
1690 By default, push will refuse to run if it detects the result would
1688 increase the number of remote heads. This generally indicates the
1691 increase the number of remote heads. This generally indicates the
1689 the client has forgotten to sync and merge before pushing.
1692 the client has forgotten to sync and merge before pushing.
1690
1693
1691 Valid URLs are of the form:
1694 Valid URLs are of the form:
1692
1695
1693 local/filesystem/path
1696 local/filesystem/path
1694 ssh://[user@]host[:port][/path]
1697 ssh://[user@]host[:port][/path]
1695
1698
1696 SSH requires an accessible shell account on the destination
1699 SSH requires an accessible shell account on the destination
1697 machine and a copy of hg in the remote path.
1700 machine and a copy of hg in the remote path.
1698 """
1701 """
1699 dest = ui.expandpath(dest, repo.root)
1702 dest = ui.expandpath(dest, repo.root)
1700 ui.status('pushing to %s\n' % (dest))
1703 ui.status('pushing to %s\n' % (dest))
1701
1704
1702 if ssh:
1705 if ssh:
1703 ui.setconfig("ui", "ssh", ssh)
1706 ui.setconfig("ui", "ssh", ssh)
1704 if remotecmd:
1707 if remotecmd:
1705 ui.setconfig("ui", "remotecmd", remotecmd)
1708 ui.setconfig("ui", "remotecmd", remotecmd)
1706
1709
1707 other = hg.repository(ui, dest)
1710 other = hg.repository(ui, dest)
1708 r = repo.push(other, force)
1711 r = repo.push(other, force)
1709 return r
1712 return r
1710
1713
1711 def rawcommit(ui, repo, *flist, **rc):
1714 def rawcommit(ui, repo, *flist, **rc):
1712 """raw commit interface
1715 """raw commit interface
1713
1716
1714 Lowlevel commit, for use in helper scripts.
1717 Lowlevel commit, for use in helper scripts.
1715
1718
1716 This command is not intended to be used by normal users, as it is
1719 This command is not intended to be used by normal users, as it is
1717 primarily useful for importing from other SCMs.
1720 primarily useful for importing from other SCMs.
1718 """
1721 """
1719 message = rc['message']
1722 message = rc['message']
1720 if not message and rc['logfile']:
1723 if not message and rc['logfile']:
1721 try:
1724 try:
1722 message = open(rc['logfile']).read()
1725 message = open(rc['logfile']).read()
1723 except IOError:
1726 except IOError:
1724 pass
1727 pass
1725 if not message and not rc['logfile']:
1728 if not message and not rc['logfile']:
1726 raise util.Abort(_("missing commit message"))
1729 raise util.Abort(_("missing commit message"))
1727
1730
1728 files = relpath(repo, list(flist))
1731 files = relpath(repo, list(flist))
1729 if rc['files']:
1732 if rc['files']:
1730 files += open(rc['files']).read().splitlines()
1733 files += open(rc['files']).read().splitlines()
1731
1734
1732 rc['parent'] = map(repo.lookup, rc['parent'])
1735 rc['parent'] = map(repo.lookup, rc['parent'])
1733
1736
1734 try:
1737 try:
1735 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1738 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1736 except ValueError, inst:
1739 except ValueError, inst:
1737 raise util.Abort(str(inst))
1740 raise util.Abort(str(inst))
1738
1741
1739 def recover(ui, repo):
1742 def recover(ui, repo):
1740 """roll back an interrupted transaction
1743 """roll back an interrupted transaction
1741
1744
1742 Recover from an interrupted commit or pull.
1745 Recover from an interrupted commit or pull.
1743
1746
1744 This command tries to fix the repository status after an interrupted
1747 This command tries to fix the repository status after an interrupted
1745 operation. It should only be necessary when Mercurial suggests it.
1748 operation. It should only be necessary when Mercurial suggests it.
1746 """
1749 """
1747 if repo.recover():
1750 if repo.recover():
1748 return repo.verify()
1751 return repo.verify()
1749 return False
1752 return False
1750
1753
1751 def remove(ui, repo, pat, *pats, **opts):
1754 def remove(ui, repo, pat, *pats, **opts):
1752 """remove the specified files on the next commit
1755 """remove the specified files on the next commit
1753
1756
1754 Schedule the indicated files for removal from the repository.
1757 Schedule the indicated files for removal from the repository.
1755
1758
1756 This command schedules the files to be removed at the next commit.
1759 This command schedules the files to be removed at the next commit.
1757 This only removes files from the current branch, not from the
1760 This only removes files from the current branch, not from the
1758 entire project history. If the files still exist in the working
1761 entire project history. If the files still exist in the working
1759 directory, they will be deleted from it.
1762 directory, they will be deleted from it.
1760 """
1763 """
1761 names = []
1764 names = []
1762 def okaytoremove(abs, rel, exact):
1765 def okaytoremove(abs, rel, exact):
1763 c, a, d, u = repo.changes(files = [abs])
1766 c, a, d, u = repo.changes(files = [abs])
1764 reason = None
1767 reason = None
1765 if c: reason = _('is modified')
1768 if c: reason = _('is modified')
1766 elif a: reason = _('has been marked for add')
1769 elif a: reason = _('has been marked for add')
1767 elif u: reason = _('is not managed')
1770 elif u: reason = _('is not managed')
1768 if reason:
1771 if reason:
1769 if exact: ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1772 if exact: ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1770 else:
1773 else:
1771 return True
1774 return True
1772 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1775 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1773 if okaytoremove(abs, rel, exact):
1776 if okaytoremove(abs, rel, exact):
1774 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1777 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1775 names.append(abs)
1778 names.append(abs)
1776 repo.remove(names, unlink=True)
1779 repo.remove(names, unlink=True)
1777
1780
1778 def rename(ui, repo, *pats, **opts):
1781 def rename(ui, repo, *pats, **opts):
1779 """rename files; equivalent of copy + remove
1782 """rename files; equivalent of copy + remove
1780
1783
1781 Mark dest as copies of sources; mark sources for deletion. If
1784 Mark dest as copies of sources; mark sources for deletion. If
1782 dest is a directory, copies are put in that directory. If dest is
1785 dest is a directory, copies are put in that directory. If dest is
1783 a file, there can only be one source.
1786 a file, there can only be one source.
1784
1787
1785 By default, this command copies the contents of files as they
1788 By default, this command copies the contents of files as they
1786 stand in the working directory. If invoked with --after, the
1789 stand in the working directory. If invoked with --after, the
1787 operation is recorded, but no copying is performed.
1790 operation is recorded, but no copying is performed.
1788
1791
1789 This command takes effect in the next commit.
1792 This command takes effect in the next commit.
1790
1793
1791 NOTE: This command should be treated as experimental. While it
1794 NOTE: This command should be treated as experimental. While it
1792 should properly record rename files, this information is not yet
1795 should properly record rename files, this information is not yet
1793 fully used by merge, nor fully reported by log.
1796 fully used by merge, nor fully reported by log.
1794 """
1797 """
1795 errs, copied = docopy(ui, repo, pats, opts)
1798 errs, copied = docopy(ui, repo, pats, opts)
1796 names = []
1799 names = []
1797 for abs, rel, exact in copied:
1800 for abs, rel, exact in copied:
1798 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1801 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1799 names.append(abs)
1802 names.append(abs)
1800 repo.remove(names, unlink=True)
1803 repo.remove(names, unlink=True)
1801 return errs
1804 return errs
1802
1805
1803 def revert(ui, repo, *pats, **opts):
1806 def revert(ui, repo, *pats, **opts):
1804 """revert modified files or dirs back to their unmodified states
1807 """revert modified files or dirs back to their unmodified states
1805
1808
1806 Revert any uncommitted modifications made to the named files or
1809 Revert any uncommitted modifications made to the named files or
1807 directories. This restores the contents of the affected files to
1810 directories. This restores the contents of the affected files to
1808 an unmodified state.
1811 an unmodified state.
1809
1812
1810 If a file has been deleted, it is recreated. If the executable
1813 If a file has been deleted, it is recreated. If the executable
1811 mode of a file was changed, it is reset.
1814 mode of a file was changed, it is reset.
1812
1815
1813 If names are given, all files matching the names are reverted.
1816 If names are given, all files matching the names are reverted.
1814
1817
1815 If no names are given, all files in the current directory and
1818 If no names are given, all files in the current directory and
1816 its subdirectories are reverted.
1819 its subdirectories are reverted.
1817 """
1820 """
1818 node = opts['rev'] and repo.lookup(opts['rev']) or \
1821 node = opts['rev'] and repo.lookup(opts['rev']) or \
1819 repo.dirstate.parents()[0]
1822 repo.dirstate.parents()[0]
1820
1823
1821 files, choose, anypats = matchpats(repo, repo.getcwd(), pats, opts)
1824 files, choose, anypats = matchpats(repo, repo.getcwd(), pats, opts)
1822 (c, a, d, u) = repo.changes(match=choose)
1825 (c, a, d, u) = repo.changes(match=choose)
1823 repo.forget(a)
1826 repo.forget(a)
1824 repo.undelete(d)
1827 repo.undelete(d)
1825
1828
1826 return repo.update(node, False, True, choose, False)
1829 return repo.update(node, False, True, choose, False)
1827
1830
1828 def root(ui, repo):
1831 def root(ui, repo):
1829 """print the root (top) of the current working dir
1832 """print the root (top) of the current working dir
1830
1833
1831 Print the root directory of the current repository.
1834 Print the root directory of the current repository.
1832 """
1835 """
1833 ui.write(repo.root + "\n")
1836 ui.write(repo.root + "\n")
1834
1837
1835 def serve(ui, repo, **opts):
1838 def serve(ui, repo, **opts):
1836 """export the repository via HTTP
1839 """export the repository via HTTP
1837
1840
1838 Start a local HTTP repository browser and pull server.
1841 Start a local HTTP repository browser and pull server.
1839
1842
1840 By default, the server logs accesses to stdout and errors to
1843 By default, the server logs accesses to stdout and errors to
1841 stderr. Use the "-A" and "-E" options to log to files.
1844 stderr. Use the "-A" and "-E" options to log to files.
1842 """
1845 """
1843
1846
1844 if opts["stdio"]:
1847 if opts["stdio"]:
1845 fin, fout = sys.stdin, sys.stdout
1848 fin, fout = sys.stdin, sys.stdout
1846 sys.stdout = sys.stderr
1849 sys.stdout = sys.stderr
1847
1850
1848 # Prevent insertion/deletion of CRs
1851 # Prevent insertion/deletion of CRs
1849 util.set_binary(fin)
1852 util.set_binary(fin)
1850 util.set_binary(fout)
1853 util.set_binary(fout)
1851
1854
1852 def getarg():
1855 def getarg():
1853 argline = fin.readline()[:-1]
1856 argline = fin.readline()[:-1]
1854 arg, l = argline.split()
1857 arg, l = argline.split()
1855 val = fin.read(int(l))
1858 val = fin.read(int(l))
1856 return arg, val
1859 return arg, val
1857 def respond(v):
1860 def respond(v):
1858 fout.write("%d\n" % len(v))
1861 fout.write("%d\n" % len(v))
1859 fout.write(v)
1862 fout.write(v)
1860 fout.flush()
1863 fout.flush()
1861
1864
1862 lock = None
1865 lock = None
1863
1866
1864 while 1:
1867 while 1:
1865 cmd = fin.readline()[:-1]
1868 cmd = fin.readline()[:-1]
1866 if cmd == '':
1869 if cmd == '':
1867 return
1870 return
1868 if cmd == "heads":
1871 if cmd == "heads":
1869 h = repo.heads()
1872 h = repo.heads()
1870 respond(" ".join(map(hex, h)) + "\n")
1873 respond(" ".join(map(hex, h)) + "\n")
1871 if cmd == "lock":
1874 if cmd == "lock":
1872 lock = repo.lock()
1875 lock = repo.lock()
1873 respond("")
1876 respond("")
1874 if cmd == "unlock":
1877 if cmd == "unlock":
1875 if lock:
1878 if lock:
1876 lock.release()
1879 lock.release()
1877 lock = None
1880 lock = None
1878 respond("")
1881 respond("")
1879 elif cmd == "branches":
1882 elif cmd == "branches":
1880 arg, nodes = getarg()
1883 arg, nodes = getarg()
1881 nodes = map(bin, nodes.split(" "))
1884 nodes = map(bin, nodes.split(" "))
1882 r = []
1885 r = []
1883 for b in repo.branches(nodes):
1886 for b in repo.branches(nodes):
1884 r.append(" ".join(map(hex, b)) + "\n")
1887 r.append(" ".join(map(hex, b)) + "\n")
1885 respond("".join(r))
1888 respond("".join(r))
1886 elif cmd == "between":
1889 elif cmd == "between":
1887 arg, pairs = getarg()
1890 arg, pairs = getarg()
1888 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1891 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1889 r = []
1892 r = []
1890 for b in repo.between(pairs):
1893 for b in repo.between(pairs):
1891 r.append(" ".join(map(hex, b)) + "\n")
1894 r.append(" ".join(map(hex, b)) + "\n")
1892 respond("".join(r))
1895 respond("".join(r))
1893 elif cmd == "changegroup":
1896 elif cmd == "changegroup":
1894 nodes = []
1897 nodes = []
1895 arg, roots = getarg()
1898 arg, roots = getarg()
1896 nodes = map(bin, roots.split(" "))
1899 nodes = map(bin, roots.split(" "))
1897
1900
1898 cg = repo.changegroup(nodes)
1901 cg = repo.changegroup(nodes)
1899 while 1:
1902 while 1:
1900 d = cg.read(4096)
1903 d = cg.read(4096)
1901 if not d:
1904 if not d:
1902 break
1905 break
1903 fout.write(d)
1906 fout.write(d)
1904
1907
1905 fout.flush()
1908 fout.flush()
1906
1909
1907 elif cmd == "addchangegroup":
1910 elif cmd == "addchangegroup":
1908 if not lock:
1911 if not lock:
1909 respond("not locked")
1912 respond("not locked")
1910 continue
1913 continue
1911 respond("")
1914 respond("")
1912
1915
1913 r = repo.addchangegroup(fin)
1916 r = repo.addchangegroup(fin)
1914 respond("")
1917 respond("")
1915
1918
1916 optlist = "name templates style address port ipv6 accesslog errorlog"
1919 optlist = "name templates style address port ipv6 accesslog errorlog"
1917 for o in optlist.split():
1920 for o in optlist.split():
1918 if opts[o]:
1921 if opts[o]:
1919 ui.setconfig("web", o, opts[o])
1922 ui.setconfig("web", o, opts[o])
1920
1923
1921 try:
1924 try:
1922 httpd = hgweb.create_server(repo)
1925 httpd = hgweb.create_server(repo)
1923 except socket.error, inst:
1926 except socket.error, inst:
1924 raise util.Abort('cannot start server: ' + inst.args[1])
1927 raise util.Abort('cannot start server: ' + inst.args[1])
1925
1928
1926 if ui.verbose:
1929 if ui.verbose:
1927 addr, port = httpd.socket.getsockname()
1930 addr, port = httpd.socket.getsockname()
1928 if addr == '0.0.0.0':
1931 if addr == '0.0.0.0':
1929 addr = socket.gethostname()
1932 addr = socket.gethostname()
1930 else:
1933 else:
1931 try:
1934 try:
1932 addr = socket.gethostbyaddr(addr)[0]
1935 addr = socket.gethostbyaddr(addr)[0]
1933 except socket.error:
1936 except socket.error:
1934 pass
1937 pass
1935 if port != 80:
1938 if port != 80:
1936 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
1939 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
1937 else:
1940 else:
1938 ui.status(_('listening at http://%s/\n') % addr)
1941 ui.status(_('listening at http://%s/\n') % addr)
1939 httpd.serve_forever()
1942 httpd.serve_forever()
1940
1943
1941 def status(ui, repo, *pats, **opts):
1944 def status(ui, repo, *pats, **opts):
1942 """show changed files in the working directory
1945 """show changed files in the working directory
1943
1946
1944 Show changed files in the working directory. If no names are
1947 Show changed files in the working directory. If no names are
1945 given, all files are shown. Otherwise, only files matching the
1948 given, all files are shown. Otherwise, only files matching the
1946 given names are shown.
1949 given names are shown.
1947
1950
1948 The codes used to show the status of files are:
1951 The codes used to show the status of files are:
1949 M = modified
1952 M = modified
1950 A = added
1953 A = added
1951 R = removed
1954 R = removed
1952 ? = not tracked
1955 ? = not tracked
1953 """
1956 """
1954
1957
1955 cwd = repo.getcwd()
1958 cwd = repo.getcwd()
1956 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1959 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1957 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1960 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1958 for n in repo.changes(files=files, match=matchfn)]
1961 for n in repo.changes(files=files, match=matchfn)]
1959
1962
1960 changetypes = [(_('modified'), 'M', c),
1963 changetypes = [(_('modified'), 'M', c),
1961 (_('added'), 'A', a),
1964 (_('added'), 'A', a),
1962 (_('removed'), 'R', d),
1965 (_('removed'), 'R', d),
1963 (_('unknown'), '?', u)]
1966 (_('unknown'), '?', u)]
1964
1967
1965 end = opts['print0'] and '\0' or '\n'
1968 end = opts['print0'] and '\0' or '\n'
1966
1969
1967 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1970 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1968 or changetypes):
1971 or changetypes):
1969 if opts['no_status']:
1972 if opts['no_status']:
1970 format = "%%s%s" % end
1973 format = "%%s%s" % end
1971 else:
1974 else:
1972 format = "%s %%s%s" % (char, end);
1975 format = "%s %%s%s" % (char, end);
1973
1976
1974 for f in changes:
1977 for f in changes:
1975 ui.write(format % f)
1978 ui.write(format % f)
1976
1979
1977 def tag(ui, repo, name, rev=None, **opts):
1980 def tag(ui, repo, name, rev=None, **opts):
1978 """add a tag for the current tip or a given revision
1981 """add a tag for the current tip or a given revision
1979
1982
1980 Name a particular revision using <name>.
1983 Name a particular revision using <name>.
1981
1984
1982 Tags are used to name particular revisions of the repository and are
1985 Tags are used to name particular revisions of the repository and are
1983 very useful to compare different revision, to go back to significant
1986 very useful to compare different revision, to go back to significant
1984 earlier versions or to mark branch points as releases, etc.
1987 earlier versions or to mark branch points as releases, etc.
1985
1988
1986 If no revision is given, the tip is used.
1989 If no revision is given, the tip is used.
1987
1990
1988 To facilitate version control, distribution, and merging of tags,
1991 To facilitate version control, distribution, and merging of tags,
1989 they are stored as a file named ".hgtags" which is managed
1992 they are stored as a file named ".hgtags" which is managed
1990 similarly to other project files and can be hand-edited if
1993 similarly to other project files and can be hand-edited if
1991 necessary.
1994 necessary.
1992 """
1995 """
1993 if name == "tip":
1996 if name == "tip":
1994 raise util.Abort(_("the name 'tip' is reserved"))
1997 raise util.Abort(_("the name 'tip' is reserved"))
1995 if 'rev' in opts:
1998 if 'rev' in opts:
1996 rev = opts['rev']
1999 rev = opts['rev']
1997 if rev:
2000 if rev:
1998 r = hex(repo.lookup(rev))
2001 r = hex(repo.lookup(rev))
1999 else:
2002 else:
2000 r = hex(repo.changelog.tip())
2003 r = hex(repo.changelog.tip())
2001
2004
2002 if name.find(revrangesep) >= 0:
2005 if name.find(revrangesep) >= 0:
2003 raise util.Abort(_("'%s' cannot be used in a tag name") % revrangesep)
2006 raise util.Abort(_("'%s' cannot be used in a tag name") % revrangesep)
2004
2007
2005 if opts['local']:
2008 if opts['local']:
2006 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2009 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2007 return
2010 return
2008
2011
2009 (c, a, d, u) = repo.changes()
2012 (c, a, d, u) = repo.changes()
2010 for x in (c, a, d, u):
2013 for x in (c, a, d, u):
2011 if ".hgtags" in x:
2014 if ".hgtags" in x:
2012 raise util.Abort(_("working copy of .hgtags is changed "
2015 raise util.Abort(_("working copy of .hgtags is changed "
2013 "(please commit .hgtags manually)"))
2016 "(please commit .hgtags manually)"))
2014
2017
2015 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2018 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2016 if repo.dirstate.state(".hgtags") == '?':
2019 if repo.dirstate.state(".hgtags") == '?':
2017 repo.add([".hgtags"])
2020 repo.add([".hgtags"])
2018
2021
2019 message = (opts['message'] or
2022 message = (opts['message'] or
2020 _("Added tag %s for changeset %s") % (name, r))
2023 _("Added tag %s for changeset %s") % (name, r))
2021 try:
2024 try:
2022 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2025 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2023 except ValueError, inst:
2026 except ValueError, inst:
2024 raise util.Abort(str(inst))
2027 raise util.Abort(str(inst))
2025
2028
2026 def tags(ui, repo):
2029 def tags(ui, repo):
2027 """list repository tags
2030 """list repository tags
2028
2031
2029 List the repository tags.
2032 List the repository tags.
2030
2033
2031 This lists both regular and local tags.
2034 This lists both regular and local tags.
2032 """
2035 """
2033
2036
2034 l = repo.tagslist()
2037 l = repo.tagslist()
2035 l.reverse()
2038 l.reverse()
2036 for t, n in l:
2039 for t, n in l:
2037 try:
2040 try:
2038 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2041 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2039 except KeyError:
2042 except KeyError:
2040 r = " ?:?"
2043 r = " ?:?"
2041 ui.write("%-30s %s\n" % (t, r))
2044 ui.write("%-30s %s\n" % (t, r))
2042
2045
2043 def tip(ui, repo):
2046 def tip(ui, repo):
2044 """show the tip revision
2047 """show the tip revision
2045
2048
2046 Show the tip revision.
2049 Show the tip revision.
2047 """
2050 """
2048 n = repo.changelog.tip()
2051 n = repo.changelog.tip()
2049 show_changeset(ui, repo, changenode=n)
2052 show_changeset(ui, repo, changenode=n)
2050
2053
2051 def unbundle(ui, repo, fname):
2054 def unbundle(ui, repo, fname):
2052 """apply a changegroup file
2055 """apply a changegroup file
2053
2056
2054 Apply a compressed changegroup file generated by the bundle
2057 Apply a compressed changegroup file generated by the bundle
2055 command.
2058 command.
2056 """
2059 """
2057 f = urllib.urlopen(fname)
2060 f = urllib.urlopen(fname)
2058
2061
2059 if f.read(4) != "HG10":
2062 if f.read(4) != "HG10":
2060 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2063 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2061
2064
2062 def bzgenerator(f):
2065 def bzgenerator(f):
2063 zd = bz2.BZ2Decompressor()
2066 zd = bz2.BZ2Decompressor()
2064 for chunk in f:
2067 for chunk in f:
2065 yield zd.decompress(chunk)
2068 yield zd.decompress(chunk)
2066
2069
2067 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2070 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2068 repo.addchangegroup(util.chunkbuffer(bzgen))
2071 repo.addchangegroup(util.chunkbuffer(bzgen))
2069
2072
2070 def undo(ui, repo):
2073 def undo(ui, repo):
2071 """undo the last commit or pull
2074 """undo the last commit or pull
2072
2075
2073 Roll back the last pull or commit transaction on the
2076 Roll back the last pull or commit transaction on the
2074 repository, restoring the project to its earlier state.
2077 repository, restoring the project to its earlier state.
2075
2078
2076 This command should be used with care. There is only one level of
2079 This command should be used with care. There is only one level of
2077 undo and there is no redo.
2080 undo and there is no redo.
2078
2081
2079 This command is not intended for use on public repositories. Once
2082 This command is not intended for use on public repositories. Once
2080 a change is visible for pull by other users, undoing it locally is
2083 a change is visible for pull by other users, undoing it locally is
2081 ineffective.
2084 ineffective.
2082 """
2085 """
2083 repo.undo()
2086 repo.undo()
2084
2087
2085 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
2088 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
2086 """update or merge working directory
2089 """update or merge working directory
2087
2090
2088 Update the working directory to the specified revision.
2091 Update the working directory to the specified revision.
2089
2092
2090 If there are no outstanding changes in the working directory and
2093 If there are no outstanding changes in the working directory and
2091 there is a linear relationship between the current version and the
2094 there is a linear relationship between the current version and the
2092 requested version, the result is the requested version.
2095 requested version, the result is the requested version.
2093
2096
2094 Otherwise the result is a merge between the contents of the
2097 Otherwise the result is a merge between the contents of the
2095 current working directory and the requested version. Files that
2098 current working directory and the requested version. Files that
2096 changed between either parent are marked as changed for the next
2099 changed between either parent are marked as changed for the next
2097 commit and a commit must be performed before any further updates
2100 commit and a commit must be performed before any further updates
2098 are allowed.
2101 are allowed.
2099
2102
2100 By default, update will refuse to run if doing so would require
2103 By default, update will refuse to run if doing so would require
2101 merging or discarding local changes.
2104 merging or discarding local changes.
2102 """
2105 """
2103 if branch:
2106 if branch:
2104 br = repo.branchlookup(branch=branch)
2107 br = repo.branchlookup(branch=branch)
2105 found = []
2108 found = []
2106 for x in br:
2109 for x in br:
2107 if branch in br[x]:
2110 if branch in br[x]:
2108 found.append(x)
2111 found.append(x)
2109 if len(found) > 1:
2112 if len(found) > 1:
2110 ui.warn(_("Found multiple heads for %s\n") % branch)
2113 ui.warn(_("Found multiple heads for %s\n") % branch)
2111 for x in found:
2114 for x in found:
2112 show_changeset(ui, repo, changenode=x, brinfo=br)
2115 show_changeset(ui, repo, changenode=x, brinfo=br)
2113 return 1
2116 return 1
2114 if len(found) == 1:
2117 if len(found) == 1:
2115 node = found[0]
2118 node = found[0]
2116 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2119 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2117 else:
2120 else:
2118 ui.warn(_("branch %s not found\n") % (branch))
2121 ui.warn(_("branch %s not found\n") % (branch))
2119 return 1
2122 return 1
2120 else:
2123 else:
2121 node = node and repo.lookup(node) or repo.changelog.tip()
2124 node = node and repo.lookup(node) or repo.changelog.tip()
2122 return repo.update(node, allow=merge, force=clean)
2125 return repo.update(node, allow=merge, force=clean)
2123
2126
2124 def verify(ui, repo):
2127 def verify(ui, repo):
2125 """verify the integrity of the repository
2128 """verify the integrity of the repository
2126
2129
2127 Verify the integrity of the current repository.
2130 Verify the integrity of the current repository.
2128
2131
2129 This will perform an extensive check of the repository's
2132 This will perform an extensive check of the repository's
2130 integrity, validating the hashes and checksums of each entry in
2133 integrity, validating the hashes and checksums of each entry in
2131 the changelog, manifest, and tracked files, as well as the
2134 the changelog, manifest, and tracked files, as well as the
2132 integrity of their crosslinks and indices.
2135 integrity of their crosslinks and indices.
2133 """
2136 """
2134 return repo.verify()
2137 return repo.verify()
2135
2138
2136 # Command options and aliases are listed here, alphabetically
2139 # Command options and aliases are listed here, alphabetically
2137
2140
2138 table = {
2141 table = {
2139 "^add":
2142 "^add":
2140 (add,
2143 (add,
2141 [('I', 'include', [], _('include names matching the given patterns')),
2144 [('I', 'include', [], _('include names matching the given patterns')),
2142 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2145 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2143 "hg add [OPTION]... [FILE]..."),
2146 "hg add [OPTION]... [FILE]..."),
2144 "addremove":
2147 "addremove":
2145 (addremove,
2148 (addremove,
2146 [('I', 'include', [], _('include names matching the given patterns')),
2149 [('I', 'include', [], _('include names matching the given patterns')),
2147 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2150 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2148 "hg addremove [OPTION]... [FILE]..."),
2151 "hg addremove [OPTION]... [FILE]..."),
2149 "^annotate":
2152 "^annotate":
2150 (annotate,
2153 (annotate,
2151 [('r', 'rev', '', _('annotate the specified revision')),
2154 [('r', 'rev', '', _('annotate the specified revision')),
2152 ('a', 'text', None, _('treat all files as text')),
2155 ('a', 'text', None, _('treat all files as text')),
2153 ('u', 'user', None, _('list the author')),
2156 ('u', 'user', None, _('list the author')),
2154 ('d', 'date', None, _('list the date')),
2157 ('d', 'date', None, _('list the date')),
2155 ('n', 'number', None, _('list the revision number (default)')),
2158 ('n', 'number', None, _('list the revision number (default)')),
2156 ('c', 'changeset', None, _('list the changeset')),
2159 ('c', 'changeset', None, _('list the changeset')),
2157 ('I', 'include', [], _('include names matching the given patterns')),
2160 ('I', 'include', [], _('include names matching the given patterns')),
2158 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2161 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2159 _('hg annotate [OPTION]... FILE...')),
2162 _('hg annotate [OPTION]... FILE...')),
2160 "bundle":
2163 "bundle":
2161 (bundle,
2164 (bundle,
2162 [],
2165 [],
2163 _('hg bundle FILE DEST')),
2166 _('hg bundle FILE DEST')),
2164 "cat":
2167 "cat":
2165 (cat,
2168 (cat,
2166 [('I', 'include', [], _('include names matching the given patterns')),
2169 [('I', 'include', [], _('include names matching the given patterns')),
2167 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2170 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2168 ('o', 'output', "", _('print output to file with formatted name')),
2171 ('o', 'output', "", _('print output to file with formatted name')),
2169 ('r', 'rev', '', _('print the given revision'))],
2172 ('r', 'rev', '', _('print the given revision'))],
2170 _('hg cat [OPTION]... FILE...')),
2173 _('hg cat [OPTION]... FILE...')),
2171 "^clone":
2174 "^clone":
2172 (clone,
2175 (clone,
2173 [('U', 'noupdate', None, _('do not update the new working directory')),
2176 [('U', 'noupdate', None, _('do not update the new working directory')),
2174 ('e', 'ssh', "", _('specify ssh command to use')),
2177 ('e', 'ssh', "", _('specify ssh command to use')),
2175 ('', 'pull', None, _('use pull protocol to copy metadata')),
2178 ('', 'pull', None, _('use pull protocol to copy metadata')),
2176 ('r', 'rev', [], _('a changeset you would like to have after cloning')),
2179 ('r', 'rev', [], _('a changeset you would like to have after cloning')),
2177 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2180 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2178 _('hg clone [OPTION]... SOURCE [DEST]')),
2181 _('hg clone [OPTION]... SOURCE [DEST]')),
2179 "^commit|ci":
2182 "^commit|ci":
2180 (commit,
2183 (commit,
2181 [('A', 'addremove', None, _('run addremove during commit')),
2184 [('A', 'addremove', None, _('run addremove during commit')),
2182 ('I', 'include', [], _('include names matching the given patterns')),
2185 ('I', 'include', [], _('include names matching the given patterns')),
2183 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2186 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2184 ('m', 'message', "", _('use <text> as commit message')),
2187 ('m', 'message', "", _('use <text> as commit message')),
2185 ('l', 'logfile', "", _('read the commit message from <file>')),
2188 ('l', 'logfile', "", _('read the commit message from <file>')),
2186 ('d', 'date', "", _('record datecode as commit date')),
2189 ('d', 'date', "", _('record datecode as commit date')),
2187 ('u', 'user', "", _('record user as commiter'))],
2190 ('u', 'user', "", _('record user as commiter'))],
2188 _('hg commit [OPTION]... [FILE]...')),
2191 _('hg commit [OPTION]... [FILE]...')),
2189 "copy|cp": (copy,
2192 "copy|cp": (copy,
2190 [('I', 'include', [], _('include names matching the given patterns')),
2193 [('I', 'include', [], _('include names matching the given patterns')),
2191 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2194 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2192 ('A', 'after', None, _('record a copy that has already occurred')),
2195 ('A', 'after', None, _('record a copy that has already occurred')),
2193 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2196 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2194 _('hg copy [OPTION]... [SOURCE]... DEST')),
2197 _('hg copy [OPTION]... [SOURCE]... DEST')),
2195 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2198 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2196 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2199 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2197 "debugconfig": (debugconfig, [], _('debugconfig')),
2200 "debugconfig": (debugconfig, [], _('debugconfig')),
2198 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2201 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2199 "debugstate": (debugstate, [], _('debugstate')),
2202 "debugstate": (debugstate, [], _('debugstate')),
2200 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2203 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2201 "debugindex": (debugindex, [], _('debugindex FILE')),
2204 "debugindex": (debugindex, [], _('debugindex FILE')),
2202 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2205 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2203 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2206 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2204 "debugwalk":
2207 "debugwalk":
2205 (debugwalk,
2208 (debugwalk,
2206 [('I', 'include', [], _('include names matching the given patterns')),
2209 [('I', 'include', [], _('include names matching the given patterns')),
2207 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2210 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2208 _('debugwalk [OPTION]... [FILE]...')),
2211 _('debugwalk [OPTION]... [FILE]...')),
2209 "^diff":
2212 "^diff":
2210 (diff,
2213 (diff,
2211 [('r', 'rev', [], _('revision')),
2214 [('r', 'rev', [], _('revision')),
2212 ('a', 'text', None, _('treat all files as text')),
2215 ('a', 'text', None, _('treat all files as text')),
2213 ('I', 'include', [], _('include names matching the given patterns')),
2216 ('I', 'include', [], _('include names matching the given patterns')),
2214 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2217 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2215 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2218 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2216 "^export":
2219 "^export":
2217 (export,
2220 (export,
2218 [('o', 'output', "", _('print output to file with formatted name')),
2221 [('o', 'output', "", _('print output to file with formatted name')),
2219 ('a', 'text', None, _('treat all files as text'))],
2222 ('a', 'text', None, _('treat all files as text'))],
2220 "hg export [-a] [-o OUTFILE] REV..."),
2223 "hg export [-a] [-o OUTFILE] REV..."),
2221 "forget":
2224 "forget":
2222 (forget,
2225 (forget,
2223 [('I', 'include', [], _('include names matching the given patterns')),
2226 [('I', 'include', [], _('include names matching the given patterns')),
2224 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2227 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2225 "hg forget [OPTION]... FILE..."),
2228 "hg forget [OPTION]... FILE..."),
2226 "grep":
2229 "grep":
2227 (grep,
2230 (grep,
2228 [('0', 'print0', None, _('end fields with NUL')),
2231 [('0', 'print0', None, _('end fields with NUL')),
2229 ('I', 'include', [], _('include names matching the given patterns')),
2232 ('I', 'include', [], _('include names matching the given patterns')),
2230 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2233 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2231 ('', 'all', None, _('print all revisions that match')),
2234 ('', 'all', None, _('print all revisions that match')),
2232 ('i', 'ignore-case', None, _('ignore case when matching')),
2235 ('i', 'ignore-case', None, _('ignore case when matching')),
2233 ('l', 'files-with-matches', None, _('print only filenames and revs that match')),
2236 ('l', 'files-with-matches', None, _('print only filenames and revs that match')),
2234 ('n', 'line-number', None, _('print matching line numbers')),
2237 ('n', 'line-number', None, _('print matching line numbers')),
2235 ('r', 'rev', [], _('search in given revision range')),
2238 ('r', 'rev', [], _('search in given revision range')),
2236 ('u', 'user', None, _('print user who committed change'))],
2239 ('u', 'user', None, _('print user who committed change'))],
2237 "hg grep [OPTION]... PATTERN [FILE]..."),
2240 "hg grep [OPTION]... PATTERN [FILE]..."),
2238 "heads":
2241 "heads":
2239 (heads,
2242 (heads,
2240 [('b', 'branches', None, _('find branch info'))],
2243 [('b', 'branches', None, _('find branch info')),
2241 _('hg heads [-b]')),
2244 ('r', 'rev', None, _('show only heads descendants from rev'))],
2245 _('hg heads [-b] [-r <rev>]')),
2242 "help": (help_, [], _('hg help [COMMAND]')),
2246 "help": (help_, [], _('hg help [COMMAND]')),
2243 "identify|id": (identify, [], _('hg identify')),
2247 "identify|id": (identify, [], _('hg identify')),
2244 "import|patch":
2248 "import|patch":
2245 (import_,
2249 (import_,
2246 [('p', 'strip', 1, _('directory strip option for patch. This has the same\n') +
2250 [('p', 'strip', 1, _('directory strip option for patch. This has the same\n') +
2247 _('meaning as the corresponding patch option')),
2251 _('meaning as the corresponding patch option')),
2248 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
2252 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
2249 ('b', 'base', "", _('base path'))],
2253 ('b', 'base', "", _('base path'))],
2250 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
2254 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
2251 "incoming|in": (incoming,
2255 "incoming|in": (incoming,
2252 [('M', 'no-merges', None, _("do not show merges")),
2256 [('M', 'no-merges', None, _("do not show merges")),
2253 ('p', 'patch', None, _('show patch')),
2257 ('p', 'patch', None, _('show patch')),
2254 ('n', 'newest-first', None, _('show newest record first'))],
2258 ('n', 'newest-first', None, _('show newest record first'))],
2255 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2259 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2256 "^init": (init, [], _('hg init [DEST]')),
2260 "^init": (init, [], _('hg init [DEST]')),
2257 "locate":
2261 "locate":
2258 (locate,
2262 (locate,
2259 [('r', 'rev', '', _('search the repository as it stood at rev')),
2263 [('r', 'rev', '', _('search the repository as it stood at rev')),
2260 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2264 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2261 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
2265 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
2262 ('I', 'include', [], _('include names matching the given patterns')),
2266 ('I', 'include', [], _('include names matching the given patterns')),
2263 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2267 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2264 _('hg locate [OPTION]... [PATTERN]...')),
2268 _('hg locate [OPTION]... [PATTERN]...')),
2265 "^log|history":
2269 "^log|history":
2266 (log,
2270 (log,
2267 [('I', 'include', [], _('include names matching the given patterns')),
2271 [('I', 'include', [], _('include names matching the given patterns')),
2268 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2272 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2269 ('b', 'branch', None, _('show branches')),
2273 ('b', 'branch', None, _('show branches')),
2270 ('k', 'keyword', [], _('search for a keyword')),
2274 ('k', 'keyword', [], _('search for a keyword')),
2271 ('r', 'rev', [], _('show the specified revision or range')),
2275 ('r', 'rev', [], _('show the specified revision or range')),
2272 ('M', 'no-merges', None, _("do not show merges")),
2276 ('M', 'no-merges', None, _("do not show merges")),
2273 ('m', 'only-merges', None, _("show only merges")),
2277 ('m', 'only-merges', None, _("show only merges")),
2274 ('p', 'patch', None, _('show patch'))],
2278 ('p', 'patch', None, _('show patch'))],
2275 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2279 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2276 "manifest": (manifest, [], _('hg manifest [REV]')),
2280 "manifest": (manifest, [], _('hg manifest [REV]')),
2277 "outgoing|out": (outgoing,
2281 "outgoing|out": (outgoing,
2278 [('M', 'no-merges', None, _("do not show merges")),
2282 [('M', 'no-merges', None, _("do not show merges")),
2279 ('p', 'patch', None, _('show patch')),
2283 ('p', 'patch', None, _('show patch')),
2280 ('n', 'newest-first', None, _('show newest record first'))],
2284 ('n', 'newest-first', None, _('show newest record first'))],
2281 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2285 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2282 "^parents": (parents, [], _('hg parents [REV]')),
2286 "^parents": (parents, [], _('hg parents [REV]')),
2283 "paths": (paths, [], _('hg paths [NAME]')),
2287 "paths": (paths, [], _('hg paths [NAME]')),
2284 "^pull":
2288 "^pull":
2285 (pull,
2289 (pull,
2286 [('u', 'update', None, _('update the working directory to tip after pull')),
2290 [('u', 'update', None, _('update the working directory to tip after pull')),
2287 ('e', 'ssh', "", _('specify ssh command to use')),
2291 ('e', 'ssh', "", _('specify ssh command to use')),
2288 ('r', 'rev', [], _('a specific revision you would like to pull')),
2292 ('r', 'rev', [], _('a specific revision you would like to pull')),
2289 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2293 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2290 _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')),
2294 _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')),
2291 "^push":
2295 "^push":
2292 (push,
2296 (push,
2293 [('f', 'force', None, _('force push')),
2297 [('f', 'force', None, _('force push')),
2294 ('e', 'ssh', "", _('specify ssh command to use')),
2298 ('e', 'ssh', "", _('specify ssh command to use')),
2295 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2299 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2296 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2300 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2297 "rawcommit":
2301 "rawcommit":
2298 (rawcommit,
2302 (rawcommit,
2299 [('p', 'parent', [], _('parent')),
2303 [('p', 'parent', [], _('parent')),
2300 ('d', 'date', "", _('date code')),
2304 ('d', 'date', "", _('date code')),
2301 ('u', 'user', "", _('user')),
2305 ('u', 'user', "", _('user')),
2302 ('F', 'files', "", _('file list')),
2306 ('F', 'files', "", _('file list')),
2303 ('m', 'message', "", _('commit message')),
2307 ('m', 'message', "", _('commit message')),
2304 ('l', 'logfile', "", _('commit message file'))],
2308 ('l', 'logfile', "", _('commit message file'))],
2305 _('hg rawcommit [OPTION]... [FILE]...')),
2309 _('hg rawcommit [OPTION]... [FILE]...')),
2306 "recover": (recover, [], _("hg recover")),
2310 "recover": (recover, [], _("hg recover")),
2307 "^remove|rm": (remove,
2311 "^remove|rm": (remove,
2308 [('I', 'include', [], _('include names matching the given patterns')),
2312 [('I', 'include', [], _('include names matching the given patterns')),
2309 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2313 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2310 _("hg remove [OPTION]... FILE...")),
2314 _("hg remove [OPTION]... FILE...")),
2311 "rename|mv": (rename,
2315 "rename|mv": (rename,
2312 [('I', 'include', [], _('include names matching the given patterns')),
2316 [('I', 'include', [], _('include names matching the given patterns')),
2313 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2317 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2314 ('A', 'after', None, _('record a rename that has already occurred')),
2318 ('A', 'after', None, _('record a rename that has already occurred')),
2315 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2319 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2316 _('hg rename [OPTION]... [SOURCE]... DEST')),
2320 _('hg rename [OPTION]... [SOURCE]... DEST')),
2317 "^revert":
2321 "^revert":
2318 (revert,
2322 (revert,
2319 [('I', 'include', [], _('include names matching the given patterns')),
2323 [('I', 'include', [], _('include names matching the given patterns')),
2320 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2324 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2321 ("r", "rev", "", _("revision to revert to"))],
2325 ("r", "rev", "", _("revision to revert to"))],
2322 _("hg revert [-n] [-r REV] [NAME]...")),
2326 _("hg revert [-n] [-r REV] [NAME]...")),
2323 "root": (root, [], _("hg root")),
2327 "root": (root, [], _("hg root")),
2324 "^serve":
2328 "^serve":
2325 (serve,
2329 (serve,
2326 [('A', 'accesslog', '', _('name of access log file to write to')),
2330 [('A', 'accesslog', '', _('name of access log file to write to')),
2327 ('E', 'errorlog', '', _('name of error log file to write to')),
2331 ('E', 'errorlog', '', _('name of error log file to write to')),
2328 ('p', 'port', 0, _('port to use (default: 8000)')),
2332 ('p', 'port', 0, _('port to use (default: 8000)')),
2329 ('a', 'address', '', _('address to use')),
2333 ('a', 'address', '', _('address to use')),
2330 ('n', 'name', "", _('name to show in web pages (default: working dir)')),
2334 ('n', 'name', "", _('name to show in web pages (default: working dir)')),
2331 ('', 'stdio', None, _('for remote clients')),
2335 ('', 'stdio', None, _('for remote clients')),
2332 ('t', 'templates', "", _('web templates to use')),
2336 ('t', 'templates', "", _('web templates to use')),
2333 ('', 'style', "", _('template style to use')),
2337 ('', 'style', "", _('template style to use')),
2334 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2338 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2335 _("hg serve [OPTION]...")),
2339 _("hg serve [OPTION]...")),
2336 "^status|st":
2340 "^status|st":
2337 (status,
2341 (status,
2338 [('m', 'modified', None, _('show only modified files')),
2342 [('m', 'modified', None, _('show only modified files')),
2339 ('a', 'added', None, _('show only added files')),
2343 ('a', 'added', None, _('show only added files')),
2340 ('r', 'removed', None, _('show only removed files')),
2344 ('r', 'removed', None, _('show only removed files')),
2341 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2345 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2342 ('n', 'no-status', None, _('hide status prefix')),
2346 ('n', 'no-status', None, _('hide status prefix')),
2343 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2347 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2344 ('I', 'include', [], _('include names matching the given patterns')),
2348 ('I', 'include', [], _('include names matching the given patterns')),
2345 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2349 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2346 _("hg status [OPTION]... [FILE]...")),
2350 _("hg status [OPTION]... [FILE]...")),
2347 "tag":
2351 "tag":
2348 (tag,
2352 (tag,
2349 [('l', 'local', None, _('make the tag local')),
2353 [('l', 'local', None, _('make the tag local')),
2350 ('m', 'message', "", _('message for tag commit log entry')),
2354 ('m', 'message', "", _('message for tag commit log entry')),
2351 ('d', 'date', "", _('record datecode as commit date')),
2355 ('d', 'date', "", _('record datecode as commit date')),
2352 ('u', 'user', "", _('record user as commiter')),
2356 ('u', 'user', "", _('record user as commiter')),
2353 ('r', 'rev', "", _('revision to tag'))],
2357 ('r', 'rev', "", _('revision to tag'))],
2354 _('hg tag [OPTION]... NAME [REV]')),
2358 _('hg tag [OPTION]... NAME [REV]')),
2355 "tags": (tags, [], _('hg tags')),
2359 "tags": (tags, [], _('hg tags')),
2356 "tip": (tip, [], _('hg tip')),
2360 "tip": (tip, [], _('hg tip')),
2357 "unbundle":
2361 "unbundle":
2358 (unbundle,
2362 (unbundle,
2359 [],
2363 [],
2360 _('hg unbundle FILE')),
2364 _('hg unbundle FILE')),
2361 "undo": (undo, [], _('hg undo')),
2365 "undo": (undo, [], _('hg undo')),
2362 "^update|up|checkout|co":
2366 "^update|up|checkout|co":
2363 (update,
2367 (update,
2364 [('b', 'branch', "", _('checkout the head of a specific branch')),
2368 [('b', 'branch', "", _('checkout the head of a specific branch')),
2365 ('m', 'merge', None, _('allow merging of branches')),
2369 ('m', 'merge', None, _('allow merging of branches')),
2366 ('C', 'clean', None, _('overwrite locally modified files'))],
2370 ('C', 'clean', None, _('overwrite locally modified files'))],
2367 _('hg update [-b TAG] [-m] [-C] [REV]')),
2371 _('hg update [-b TAG] [-m] [-C] [REV]')),
2368 "verify": (verify, [], _('hg verify')),
2372 "verify": (verify, [], _('hg verify')),
2369 "version": (show_version, [], _('hg version')),
2373 "version": (show_version, [], _('hg version')),
2370 }
2374 }
2371
2375
2372 globalopts = [
2376 globalopts = [
2373 ('R', 'repository', "", _("repository root directory")),
2377 ('R', 'repository', "", _("repository root directory")),
2374 ('', 'cwd', '', _("change working directory")),
2378 ('', 'cwd', '', _("change working directory")),
2375 ('y', 'noninteractive', None, _("do not prompt, assume 'yes' for any required answers")),
2379 ('y', 'noninteractive', None, _("do not prompt, assume 'yes' for any required answers")),
2376 ('q', 'quiet', None, _("suppress output")),
2380 ('q', 'quiet', None, _("suppress output")),
2377 ('v', 'verbose', None, _("enable additional output")),
2381 ('v', 'verbose', None, _("enable additional output")),
2378 ('', 'debug', None, _("enable debugging output")),
2382 ('', 'debug', None, _("enable debugging output")),
2379 ('', 'debugger', None, _("start debugger")),
2383 ('', 'debugger', None, _("start debugger")),
2380 ('', 'traceback', None, _("print traceback on exception")),
2384 ('', 'traceback', None, _("print traceback on exception")),
2381 ('', 'time', None, _("time how long the command takes")),
2385 ('', 'time', None, _("time how long the command takes")),
2382 ('', 'profile', None, _("print command execution profile")),
2386 ('', 'profile', None, _("print command execution profile")),
2383 ('', 'version', None, _("output version information and exit")),
2387 ('', 'version', None, _("output version information and exit")),
2384 ('h', 'help', None, _("display help and exit")),
2388 ('h', 'help', None, _("display help and exit")),
2385 ]
2389 ]
2386
2390
2387 norepo = ("clone init version help debugancestor debugconfig debugdata"
2391 norepo = ("clone init version help debugancestor debugconfig debugdata"
2388 " debugindex debugindexdot paths")
2392 " debugindex debugindexdot paths")
2389
2393
2390 def find(cmd):
2394 def find(cmd):
2391 """Return (aliases, command table entry) for command string."""
2395 """Return (aliases, command table entry) for command string."""
2392 choice = None
2396 choice = None
2393 for e in table.keys():
2397 for e in table.keys():
2394 aliases = e.lstrip("^").split("|")
2398 aliases = e.lstrip("^").split("|")
2395 if cmd in aliases:
2399 if cmd in aliases:
2396 return aliases, table[e]
2400 return aliases, table[e]
2397 for a in aliases:
2401 for a in aliases:
2398 if a.startswith(cmd):
2402 if a.startswith(cmd):
2399 if choice:
2403 if choice:
2400 raise AmbiguousCommand(cmd)
2404 raise AmbiguousCommand(cmd)
2401 else:
2405 else:
2402 choice = aliases, table[e]
2406 choice = aliases, table[e]
2403 break
2407 break
2404 if choice:
2408 if choice:
2405 return choice
2409 return choice
2406
2410
2407 raise UnknownCommand(cmd)
2411 raise UnknownCommand(cmd)
2408
2412
2409 class SignalInterrupt(Exception):
2413 class SignalInterrupt(Exception):
2410 """Exception raised on SIGTERM and SIGHUP."""
2414 """Exception raised on SIGTERM and SIGHUP."""
2411
2415
2412 def catchterm(*args):
2416 def catchterm(*args):
2413 raise SignalInterrupt
2417 raise SignalInterrupt
2414
2418
2415 def run():
2419 def run():
2416 sys.exit(dispatch(sys.argv[1:]))
2420 sys.exit(dispatch(sys.argv[1:]))
2417
2421
2418 class ParseError(Exception):
2422 class ParseError(Exception):
2419 """Exception raised on errors in parsing the command line."""
2423 """Exception raised on errors in parsing the command line."""
2420
2424
2421 def parse(ui, args):
2425 def parse(ui, args):
2422 options = {}
2426 options = {}
2423 cmdoptions = {}
2427 cmdoptions = {}
2424
2428
2425 try:
2429 try:
2426 args = fancyopts.fancyopts(args, globalopts, options)
2430 args = fancyopts.fancyopts(args, globalopts, options)
2427 except fancyopts.getopt.GetoptError, inst:
2431 except fancyopts.getopt.GetoptError, inst:
2428 raise ParseError(None, inst)
2432 raise ParseError(None, inst)
2429
2433
2430 if args:
2434 if args:
2431 cmd, args = args[0], args[1:]
2435 cmd, args = args[0], args[1:]
2432 defaults = ui.config("defaults", cmd)
2436 defaults = ui.config("defaults", cmd)
2433 if defaults:
2437 if defaults:
2434 args = defaults.split() + args
2438 args = defaults.split() + args
2435
2439
2436 aliases, i = find(cmd)
2440 aliases, i = find(cmd)
2437 cmd = aliases[0]
2441 cmd = aliases[0]
2438 c = list(i[1])
2442 c = list(i[1])
2439 else:
2443 else:
2440 cmd = None
2444 cmd = None
2441 c = []
2445 c = []
2442
2446
2443 # combine global options into local
2447 # combine global options into local
2444 for o in globalopts:
2448 for o in globalopts:
2445 c.append((o[0], o[1], options[o[1]], o[3]))
2449 c.append((o[0], o[1], options[o[1]], o[3]))
2446
2450
2447 try:
2451 try:
2448 args = fancyopts.fancyopts(args, c, cmdoptions)
2452 args = fancyopts.fancyopts(args, c, cmdoptions)
2449 except fancyopts.getopt.GetoptError, inst:
2453 except fancyopts.getopt.GetoptError, inst:
2450 raise ParseError(cmd, inst)
2454 raise ParseError(cmd, inst)
2451
2455
2452 # separate global options back out
2456 # separate global options back out
2453 for o in globalopts:
2457 for o in globalopts:
2454 n = o[1]
2458 n = o[1]
2455 options[n] = cmdoptions[n]
2459 options[n] = cmdoptions[n]
2456 del cmdoptions[n]
2460 del cmdoptions[n]
2457
2461
2458 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2462 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2459
2463
2460 def dispatch(args):
2464 def dispatch(args):
2461 signal.signal(signal.SIGTERM, catchterm)
2465 signal.signal(signal.SIGTERM, catchterm)
2462 try:
2466 try:
2463 signal.signal(signal.SIGHUP, catchterm)
2467 signal.signal(signal.SIGHUP, catchterm)
2464 except AttributeError:
2468 except AttributeError:
2465 pass
2469 pass
2466
2470
2467 try:
2471 try:
2468 u = ui.ui()
2472 u = ui.ui()
2469 except util.Abort, inst:
2473 except util.Abort, inst:
2470 sys.stderr.write(_("abort: %s\n") % inst)
2474 sys.stderr.write(_("abort: %s\n") % inst)
2471 sys.exit(1)
2475 sys.exit(1)
2472
2476
2473 external = []
2477 external = []
2474 for x in u.extensions():
2478 for x in u.extensions():
2475 def on_exception(exc, inst):
2479 def on_exception(exc, inst):
2476 u.warn(_("*** failed to import extension %s\n") % x[1])
2480 u.warn(_("*** failed to import extension %s\n") % x[1])
2477 u.warn("%s\n" % inst)
2481 u.warn("%s\n" % inst)
2478 if "--traceback" in sys.argv[1:]:
2482 if "--traceback" in sys.argv[1:]:
2479 traceback.print_exc()
2483 traceback.print_exc()
2480 if x[1]:
2484 if x[1]:
2481 try:
2485 try:
2482 mod = imp.load_source(x[0], x[1])
2486 mod = imp.load_source(x[0], x[1])
2483 except Exception, inst:
2487 except Exception, inst:
2484 on_exception(Exception, inst)
2488 on_exception(Exception, inst)
2485 continue
2489 continue
2486 else:
2490 else:
2487 def importh(name):
2491 def importh(name):
2488 mod = __import__(name)
2492 mod = __import__(name)
2489 components = name.split('.')
2493 components = name.split('.')
2490 for comp in components[1:]:
2494 for comp in components[1:]:
2491 mod = getattr(mod, comp)
2495 mod = getattr(mod, comp)
2492 return mod
2496 return mod
2493 try:
2497 try:
2494 mod = importh(x[0])
2498 mod = importh(x[0])
2495 except Exception, inst:
2499 except Exception, inst:
2496 on_exception(Exception, inst)
2500 on_exception(Exception, inst)
2497 continue
2501 continue
2498
2502
2499 external.append(mod)
2503 external.append(mod)
2500 for x in external:
2504 for x in external:
2501 cmdtable = getattr(x, 'cmdtable', {})
2505 cmdtable = getattr(x, 'cmdtable', {})
2502 for t in cmdtable:
2506 for t in cmdtable:
2503 if t in table:
2507 if t in table:
2504 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2508 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2505 table.update(cmdtable)
2509 table.update(cmdtable)
2506
2510
2507 try:
2511 try:
2508 cmd, func, args, options, cmdoptions = parse(u, args)
2512 cmd, func, args, options, cmdoptions = parse(u, args)
2509 except ParseError, inst:
2513 except ParseError, inst:
2510 if inst.args[0]:
2514 if inst.args[0]:
2511 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2515 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2512 help_(u, inst.args[0])
2516 help_(u, inst.args[0])
2513 else:
2517 else:
2514 u.warn(_("hg: %s\n") % inst.args[1])
2518 u.warn(_("hg: %s\n") % inst.args[1])
2515 help_(u, 'shortlist')
2519 help_(u, 'shortlist')
2516 sys.exit(-1)
2520 sys.exit(-1)
2517 except AmbiguousCommand, inst:
2521 except AmbiguousCommand, inst:
2518 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2522 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2519 sys.exit(1)
2523 sys.exit(1)
2520 except UnknownCommand, inst:
2524 except UnknownCommand, inst:
2521 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2525 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2522 help_(u, 'shortlist')
2526 help_(u, 'shortlist')
2523 sys.exit(1)
2527 sys.exit(1)
2524
2528
2525 if options["time"]:
2529 if options["time"]:
2526 def get_times():
2530 def get_times():
2527 t = os.times()
2531 t = os.times()
2528 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2532 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2529 t = (t[0], t[1], t[2], t[3], time.clock())
2533 t = (t[0], t[1], t[2], t[3], time.clock())
2530 return t
2534 return t
2531 s = get_times()
2535 s = get_times()
2532 def print_time():
2536 def print_time():
2533 t = get_times()
2537 t = get_times()
2534 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2538 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2535 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2539 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2536 atexit.register(print_time)
2540 atexit.register(print_time)
2537
2541
2538 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2542 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2539 not options["noninteractive"])
2543 not options["noninteractive"])
2540
2544
2541 # enter the debugger before command execution
2545 # enter the debugger before command execution
2542 if options['debugger']:
2546 if options['debugger']:
2543 pdb.set_trace()
2547 pdb.set_trace()
2544
2548
2545 try:
2549 try:
2546 try:
2550 try:
2547 if options['help']:
2551 if options['help']:
2548 help_(u, cmd, options['version'])
2552 help_(u, cmd, options['version'])
2549 sys.exit(0)
2553 sys.exit(0)
2550 elif options['version']:
2554 elif options['version']:
2551 show_version(u)
2555 show_version(u)
2552 sys.exit(0)
2556 sys.exit(0)
2553 elif not cmd:
2557 elif not cmd:
2554 help_(u, 'shortlist')
2558 help_(u, 'shortlist')
2555 sys.exit(0)
2559 sys.exit(0)
2556
2560
2557 if options['cwd']:
2561 if options['cwd']:
2558 try:
2562 try:
2559 os.chdir(options['cwd'])
2563 os.chdir(options['cwd'])
2560 except OSError, inst:
2564 except OSError, inst:
2561 raise util.Abort('%s: %s' %
2565 raise util.Abort('%s: %s' %
2562 (options['cwd'], inst.strerror))
2566 (options['cwd'], inst.strerror))
2563
2567
2564 if cmd not in norepo.split():
2568 if cmd not in norepo.split():
2565 path = options["repository"] or ""
2569 path = options["repository"] or ""
2566 repo = hg.repository(ui=u, path=path)
2570 repo = hg.repository(ui=u, path=path)
2567 for x in external:
2571 for x in external:
2568 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2572 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2569 d = lambda: func(u, repo, *args, **cmdoptions)
2573 d = lambda: func(u, repo, *args, **cmdoptions)
2570 else:
2574 else:
2571 d = lambda: func(u, *args, **cmdoptions)
2575 d = lambda: func(u, *args, **cmdoptions)
2572
2576
2573 if options['profile']:
2577 if options['profile']:
2574 import hotshot, hotshot.stats
2578 import hotshot, hotshot.stats
2575 prof = hotshot.Profile("hg.prof")
2579 prof = hotshot.Profile("hg.prof")
2576 r = prof.runcall(d)
2580 r = prof.runcall(d)
2577 prof.close()
2581 prof.close()
2578 stats = hotshot.stats.load("hg.prof")
2582 stats = hotshot.stats.load("hg.prof")
2579 stats.strip_dirs()
2583 stats.strip_dirs()
2580 stats.sort_stats('time', 'calls')
2584 stats.sort_stats('time', 'calls')
2581 stats.print_stats(40)
2585 stats.print_stats(40)
2582 return r
2586 return r
2583 else:
2587 else:
2584 return d()
2588 return d()
2585 except:
2589 except:
2586 # enter the debugger when we hit an exception
2590 # enter the debugger when we hit an exception
2587 if options['debugger']:
2591 if options['debugger']:
2588 pdb.post_mortem(sys.exc_info()[2])
2592 pdb.post_mortem(sys.exc_info()[2])
2589 if options['traceback']:
2593 if options['traceback']:
2590 traceback.print_exc()
2594 traceback.print_exc()
2591 raise
2595 raise
2592 except hg.RepoError, inst:
2596 except hg.RepoError, inst:
2593 u.warn(_("abort: "), inst, "!\n")
2597 u.warn(_("abort: "), inst, "!\n")
2594 except revlog.RevlogError, inst:
2598 except revlog.RevlogError, inst:
2595 u.warn(_("abort: "), inst, "!\n")
2599 u.warn(_("abort: "), inst, "!\n")
2596 except SignalInterrupt:
2600 except SignalInterrupt:
2597 u.warn(_("killed!\n"))
2601 u.warn(_("killed!\n"))
2598 except KeyboardInterrupt:
2602 except KeyboardInterrupt:
2599 try:
2603 try:
2600 u.warn(_("interrupted!\n"))
2604 u.warn(_("interrupted!\n"))
2601 except IOError, inst:
2605 except IOError, inst:
2602 if inst.errno == errno.EPIPE:
2606 if inst.errno == errno.EPIPE:
2603 if u.debugflag:
2607 if u.debugflag:
2604 u.warn(_("\nbroken pipe\n"))
2608 u.warn(_("\nbroken pipe\n"))
2605 else:
2609 else:
2606 raise
2610 raise
2607 except IOError, inst:
2611 except IOError, inst:
2608 if hasattr(inst, "code"):
2612 if hasattr(inst, "code"):
2609 u.warn(_("abort: %s\n") % inst)
2613 u.warn(_("abort: %s\n") % inst)
2610 elif hasattr(inst, "reason"):
2614 elif hasattr(inst, "reason"):
2611 u.warn(_("abort: error: %s\n") % inst.reason[1])
2615 u.warn(_("abort: error: %s\n") % inst.reason[1])
2612 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2616 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2613 if u.debugflag:
2617 if u.debugflag:
2614 u.warn(_("broken pipe\n"))
2618 u.warn(_("broken pipe\n"))
2615 elif getattr(inst, "strerror", None):
2619 elif getattr(inst, "strerror", None):
2616 if getattr(inst, "filename", None):
2620 if getattr(inst, "filename", None):
2617 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2621 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2618 else:
2622 else:
2619 u.warn(_("abort: %s\n") % inst.strerror)
2623 u.warn(_("abort: %s\n") % inst.strerror)
2620 else:
2624 else:
2621 raise
2625 raise
2622 except OSError, inst:
2626 except OSError, inst:
2623 if hasattr(inst, "filename"):
2627 if hasattr(inst, "filename"):
2624 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2628 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2625 else:
2629 else:
2626 u.warn(_("abort: %s\n") % inst.strerror)
2630 u.warn(_("abort: %s\n") % inst.strerror)
2627 except util.Abort, inst:
2631 except util.Abort, inst:
2628 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2632 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2629 sys.exit(1)
2633 sys.exit(1)
2630 except TypeError, inst:
2634 except TypeError, inst:
2631 # was this an argument error?
2635 # was this an argument error?
2632 tb = traceback.extract_tb(sys.exc_info()[2])
2636 tb = traceback.extract_tb(sys.exc_info()[2])
2633 if len(tb) > 2: # no
2637 if len(tb) > 2: # no
2634 raise
2638 raise
2635 u.debug(inst, "\n")
2639 u.debug(inst, "\n")
2636 u.warn(_("%s: invalid arguments\n") % cmd)
2640 u.warn(_("%s: invalid arguments\n") % cmd)
2637 help_(u, cmd)
2641 help_(u, cmd)
2638 except AmbiguousCommand, inst:
2642 except AmbiguousCommand, inst:
2639 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2643 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2640 help_(u, 'shortlist')
2644 help_(u, 'shortlist')
2641 except UnknownCommand, inst:
2645 except UnknownCommand, inst:
2642 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2646 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2643 help_(u, 'shortlist')
2647 help_(u, 'shortlist')
2644 except SystemExit:
2648 except SystemExit:
2645 # don't catch this in the catch-all below
2649 # don't catch this in the catch-all below
2646 raise
2650 raise
2647 except:
2651 except:
2648 u.warn(_("** unknown exception encountered, details follow\n"))
2652 u.warn(_("** unknown exception encountered, details follow\n"))
2649 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2653 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2650 raise
2654 raise
2651
2655
2652 sys.exit(-1)
2656 sys.exit(-1)
@@ -1,1776 +1,1780 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14
14
15 class localrepository:
15 class localrepository:
16 def __init__(self, ui, path=None, create=0):
16 def __init__(self, ui, path=None, create=0):
17 if not path:
17 if not path:
18 p = os.getcwd()
18 p = os.getcwd()
19 while not os.path.isdir(os.path.join(p, ".hg")):
19 while not os.path.isdir(os.path.join(p, ".hg")):
20 oldp = p
20 oldp = p
21 p = os.path.dirname(p)
21 p = os.path.dirname(p)
22 if p == oldp: raise repo.RepoError(_("no repo found"))
22 if p == oldp: raise repo.RepoError(_("no repo found"))
23 path = p
23 path = p
24 self.path = os.path.join(path, ".hg")
24 self.path = os.path.join(path, ".hg")
25
25
26 if not create and not os.path.isdir(self.path):
26 if not create and not os.path.isdir(self.path):
27 raise repo.RepoError(_("repository %s not found") % self.path)
27 raise repo.RepoError(_("repository %s not found") % self.path)
28
28
29 self.root = os.path.abspath(path)
29 self.root = os.path.abspath(path)
30 self.ui = ui
30 self.ui = ui
31 self.opener = util.opener(self.path)
31 self.opener = util.opener(self.path)
32 self.wopener = util.opener(self.root)
32 self.wopener = util.opener(self.root)
33 self.manifest = manifest.manifest(self.opener)
33 self.manifest = manifest.manifest(self.opener)
34 self.changelog = changelog.changelog(self.opener)
34 self.changelog = changelog.changelog(self.opener)
35 self.tagscache = None
35 self.tagscache = None
36 self.nodetagscache = None
36 self.nodetagscache = None
37 self.encodepats = None
37 self.encodepats = None
38 self.decodepats = None
38 self.decodepats = None
39
39
40 if create:
40 if create:
41 os.mkdir(self.path)
41 os.mkdir(self.path)
42 os.mkdir(self.join("data"))
42 os.mkdir(self.join("data"))
43
43
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 try:
45 try:
46 self.ui.readconfig(self.join("hgrc"))
46 self.ui.readconfig(self.join("hgrc"))
47 except IOError: pass
47 except IOError: pass
48
48
49 def hook(self, name, **args):
49 def hook(self, name, **args):
50 def runhook(name, cmd):
50 def runhook(name, cmd):
51 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
51 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
52 old = {}
52 old = {}
53 for k, v in args.items():
53 for k, v in args.items():
54 k = k.upper()
54 k = k.upper()
55 old[k] = os.environ.get(k, None)
55 old[k] = os.environ.get(k, None)
56 os.environ[k] = v
56 os.environ[k] = v
57
57
58 # Hooks run in the repository root
58 # Hooks run in the repository root
59 olddir = os.getcwd()
59 olddir = os.getcwd()
60 os.chdir(self.root)
60 os.chdir(self.root)
61 r = os.system(cmd)
61 r = os.system(cmd)
62 os.chdir(olddir)
62 os.chdir(olddir)
63
63
64 for k, v in old.items():
64 for k, v in old.items():
65 if v != None:
65 if v != None:
66 os.environ[k] = v
66 os.environ[k] = v
67 else:
67 else:
68 del os.environ[k]
68 del os.environ[k]
69
69
70 if r:
70 if r:
71 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
71 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
72 (name, r))
72 (name, r))
73 return False
73 return False
74 return True
74 return True
75
75
76 r = True
76 r = True
77 for hname, cmd in self.ui.configitems("hooks"):
77 for hname, cmd in self.ui.configitems("hooks"):
78 s = hname.split(".")
78 s = hname.split(".")
79 if s[0] == name and cmd:
79 if s[0] == name and cmd:
80 r = runhook(hname, cmd) and r
80 r = runhook(hname, cmd) and r
81 return r
81 return r
82
82
83 def tags(self):
83 def tags(self):
84 '''return a mapping of tag to node'''
84 '''return a mapping of tag to node'''
85 if not self.tagscache:
85 if not self.tagscache:
86 self.tagscache = {}
86 self.tagscache = {}
87 def addtag(self, k, n):
87 def addtag(self, k, n):
88 try:
88 try:
89 bin_n = bin(n)
89 bin_n = bin(n)
90 except TypeError:
90 except TypeError:
91 bin_n = ''
91 bin_n = ''
92 self.tagscache[k.strip()] = bin_n
92 self.tagscache[k.strip()] = bin_n
93
93
94 try:
94 try:
95 # read each head of the tags file, ending with the tip
95 # read each head of the tags file, ending with the tip
96 # and add each tag found to the map, with "newer" ones
96 # and add each tag found to the map, with "newer" ones
97 # taking precedence
97 # taking precedence
98 fl = self.file(".hgtags")
98 fl = self.file(".hgtags")
99 h = fl.heads()
99 h = fl.heads()
100 h.reverse()
100 h.reverse()
101 for r in h:
101 for r in h:
102 for l in fl.read(r).splitlines():
102 for l in fl.read(r).splitlines():
103 if l:
103 if l:
104 n, k = l.split(" ", 1)
104 n, k = l.split(" ", 1)
105 addtag(self, k, n)
105 addtag(self, k, n)
106 except KeyError:
106 except KeyError:
107 pass
107 pass
108
108
109 try:
109 try:
110 f = self.opener("localtags")
110 f = self.opener("localtags")
111 for l in f:
111 for l in f:
112 n, k = l.split(" ", 1)
112 n, k = l.split(" ", 1)
113 addtag(self, k, n)
113 addtag(self, k, n)
114 except IOError:
114 except IOError:
115 pass
115 pass
116
116
117 self.tagscache['tip'] = self.changelog.tip()
117 self.tagscache['tip'] = self.changelog.tip()
118
118
119 return self.tagscache
119 return self.tagscache
120
120
121 def tagslist(self):
121 def tagslist(self):
122 '''return a list of tags ordered by revision'''
122 '''return a list of tags ordered by revision'''
123 l = []
123 l = []
124 for t, n in self.tags().items():
124 for t, n in self.tags().items():
125 try:
125 try:
126 r = self.changelog.rev(n)
126 r = self.changelog.rev(n)
127 except:
127 except:
128 r = -2 # sort to the beginning of the list if unknown
128 r = -2 # sort to the beginning of the list if unknown
129 l.append((r,t,n))
129 l.append((r,t,n))
130 l.sort()
130 l.sort()
131 return [(t,n) for r,t,n in l]
131 return [(t,n) for r,t,n in l]
132
132
133 def nodetags(self, node):
133 def nodetags(self, node):
134 '''return the tags associated with a node'''
134 '''return the tags associated with a node'''
135 if not self.nodetagscache:
135 if not self.nodetagscache:
136 self.nodetagscache = {}
136 self.nodetagscache = {}
137 for t,n in self.tags().items():
137 for t,n in self.tags().items():
138 self.nodetagscache.setdefault(n,[]).append(t)
138 self.nodetagscache.setdefault(n,[]).append(t)
139 return self.nodetagscache.get(node, [])
139 return self.nodetagscache.get(node, [])
140
140
141 def lookup(self, key):
141 def lookup(self, key):
142 try:
142 try:
143 return self.tags()[key]
143 return self.tags()[key]
144 except KeyError:
144 except KeyError:
145 try:
145 try:
146 return self.changelog.lookup(key)
146 return self.changelog.lookup(key)
147 except:
147 except:
148 raise repo.RepoError(_("unknown revision '%s'") % key)
148 raise repo.RepoError(_("unknown revision '%s'") % key)
149
149
150 def dev(self):
150 def dev(self):
151 return os.stat(self.path).st_dev
151 return os.stat(self.path).st_dev
152
152
153 def local(self):
153 def local(self):
154 return True
154 return True
155
155
156 def join(self, f):
156 def join(self, f):
157 return os.path.join(self.path, f)
157 return os.path.join(self.path, f)
158
158
159 def wjoin(self, f):
159 def wjoin(self, f):
160 return os.path.join(self.root, f)
160 return os.path.join(self.root, f)
161
161
162 def file(self, f):
162 def file(self, f):
163 if f[0] == '/': f = f[1:]
163 if f[0] == '/': f = f[1:]
164 return filelog.filelog(self.opener, f)
164 return filelog.filelog(self.opener, f)
165
165
166 def getcwd(self):
166 def getcwd(self):
167 return self.dirstate.getcwd()
167 return self.dirstate.getcwd()
168
168
169 def wfile(self, f, mode='r'):
169 def wfile(self, f, mode='r'):
170 return self.wopener(f, mode)
170 return self.wopener(f, mode)
171
171
172 def wread(self, filename):
172 def wread(self, filename):
173 if self.encodepats == None:
173 if self.encodepats == None:
174 l = []
174 l = []
175 for pat, cmd in self.ui.configitems("encode"):
175 for pat, cmd in self.ui.configitems("encode"):
176 mf = util.matcher("", "/", [pat], [], [])[1]
176 mf = util.matcher("", "/", [pat], [], [])[1]
177 l.append((mf, cmd))
177 l.append((mf, cmd))
178 self.encodepats = l
178 self.encodepats = l
179
179
180 data = self.wopener(filename, 'r').read()
180 data = self.wopener(filename, 'r').read()
181
181
182 for mf, cmd in self.encodepats:
182 for mf, cmd in self.encodepats:
183 if mf(filename):
183 if mf(filename):
184 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
184 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
185 data = util.filter(data, cmd)
185 data = util.filter(data, cmd)
186 break
186 break
187
187
188 return data
188 return data
189
189
190 def wwrite(self, filename, data, fd=None):
190 def wwrite(self, filename, data, fd=None):
191 if self.decodepats == None:
191 if self.decodepats == None:
192 l = []
192 l = []
193 for pat, cmd in self.ui.configitems("decode"):
193 for pat, cmd in self.ui.configitems("decode"):
194 mf = util.matcher("", "/", [pat], [], [])[1]
194 mf = util.matcher("", "/", [pat], [], [])[1]
195 l.append((mf, cmd))
195 l.append((mf, cmd))
196 self.decodepats = l
196 self.decodepats = l
197
197
198 for mf, cmd in self.decodepats:
198 for mf, cmd in self.decodepats:
199 if mf(filename):
199 if mf(filename):
200 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
200 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
201 data = util.filter(data, cmd)
201 data = util.filter(data, cmd)
202 break
202 break
203
203
204 if fd:
204 if fd:
205 return fd.write(data)
205 return fd.write(data)
206 return self.wopener(filename, 'w').write(data)
206 return self.wopener(filename, 'w').write(data)
207
207
208 def transaction(self):
208 def transaction(self):
209 # save dirstate for undo
209 # save dirstate for undo
210 try:
210 try:
211 ds = self.opener("dirstate").read()
211 ds = self.opener("dirstate").read()
212 except IOError:
212 except IOError:
213 ds = ""
213 ds = ""
214 self.opener("journal.dirstate", "w").write(ds)
214 self.opener("journal.dirstate", "w").write(ds)
215
215
216 def after():
216 def after():
217 util.rename(self.join("journal"), self.join("undo"))
217 util.rename(self.join("journal"), self.join("undo"))
218 util.rename(self.join("journal.dirstate"),
218 util.rename(self.join("journal.dirstate"),
219 self.join("undo.dirstate"))
219 self.join("undo.dirstate"))
220
220
221 return transaction.transaction(self.ui.warn, self.opener,
221 return transaction.transaction(self.ui.warn, self.opener,
222 self.join("journal"), after)
222 self.join("journal"), after)
223
223
224 def recover(self):
224 def recover(self):
225 lock = self.lock()
225 lock = self.lock()
226 if os.path.exists(self.join("journal")):
226 if os.path.exists(self.join("journal")):
227 self.ui.status(_("rolling back interrupted transaction\n"))
227 self.ui.status(_("rolling back interrupted transaction\n"))
228 transaction.rollback(self.opener, self.join("journal"))
228 transaction.rollback(self.opener, self.join("journal"))
229 return True
229 return True
230 else:
230 else:
231 self.ui.warn(_("no interrupted transaction available\n"))
231 self.ui.warn(_("no interrupted transaction available\n"))
232 return False
232 return False
233
233
234 def undo(self):
234 def undo(self):
235 wlock = self.wlock()
235 wlock = self.wlock()
236 lock = self.lock()
236 lock = self.lock()
237 if os.path.exists(self.join("undo")):
237 if os.path.exists(self.join("undo")):
238 self.ui.status(_("rolling back last transaction\n"))
238 self.ui.status(_("rolling back last transaction\n"))
239 transaction.rollback(self.opener, self.join("undo"))
239 transaction.rollback(self.opener, self.join("undo"))
240 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
240 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
241 self.dirstate.read()
241 self.dirstate.read()
242 else:
242 else:
243 self.ui.warn(_("no undo information available\n"))
243 self.ui.warn(_("no undo information available\n"))
244
244
245 def lock(self, wait=1):
245 def lock(self, wait=1):
246 try:
246 try:
247 return lock.lock(self.join("lock"), 0)
247 return lock.lock(self.join("lock"), 0)
248 except lock.LockHeld, inst:
248 except lock.LockHeld, inst:
249 if wait:
249 if wait:
250 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
250 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
251 return lock.lock(self.join("lock"), wait)
251 return lock.lock(self.join("lock"), wait)
252 raise inst
252 raise inst
253
253
254 def wlock(self, wait=1):
254 def wlock(self, wait=1):
255 try:
255 try:
256 wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write)
256 wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write)
257 except lock.LockHeld, inst:
257 except lock.LockHeld, inst:
258 if not wait:
258 if not wait:
259 raise inst
259 raise inst
260 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
260 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
261 wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write)
261 wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write)
262 self.dirstate.read()
262 self.dirstate.read()
263 return wlock
263 return wlock
264
264
265 def rawcommit(self, files, text, user, date, p1=None, p2=None):
265 def rawcommit(self, files, text, user, date, p1=None, p2=None):
266 orig_parent = self.dirstate.parents()[0] or nullid
266 orig_parent = self.dirstate.parents()[0] or nullid
267 p1 = p1 or self.dirstate.parents()[0] or nullid
267 p1 = p1 or self.dirstate.parents()[0] or nullid
268 p2 = p2 or self.dirstate.parents()[1] or nullid
268 p2 = p2 or self.dirstate.parents()[1] or nullid
269 c1 = self.changelog.read(p1)
269 c1 = self.changelog.read(p1)
270 c2 = self.changelog.read(p2)
270 c2 = self.changelog.read(p2)
271 m1 = self.manifest.read(c1[0])
271 m1 = self.manifest.read(c1[0])
272 mf1 = self.manifest.readflags(c1[0])
272 mf1 = self.manifest.readflags(c1[0])
273 m2 = self.manifest.read(c2[0])
273 m2 = self.manifest.read(c2[0])
274 changed = []
274 changed = []
275
275
276 if orig_parent == p1:
276 if orig_parent == p1:
277 update_dirstate = 1
277 update_dirstate = 1
278 else:
278 else:
279 update_dirstate = 0
279 update_dirstate = 0
280
280
281 wlock = self.wlock()
281 wlock = self.wlock()
282 lock = self.lock()
282 lock = self.lock()
283 tr = self.transaction()
283 tr = self.transaction()
284 mm = m1.copy()
284 mm = m1.copy()
285 mfm = mf1.copy()
285 mfm = mf1.copy()
286 linkrev = self.changelog.count()
286 linkrev = self.changelog.count()
287 for f in files:
287 for f in files:
288 try:
288 try:
289 t = self.wread(f)
289 t = self.wread(f)
290 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
290 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
291 r = self.file(f)
291 r = self.file(f)
292 mfm[f] = tm
292 mfm[f] = tm
293
293
294 fp1 = m1.get(f, nullid)
294 fp1 = m1.get(f, nullid)
295 fp2 = m2.get(f, nullid)
295 fp2 = m2.get(f, nullid)
296
296
297 # is the same revision on two branches of a merge?
297 # is the same revision on two branches of a merge?
298 if fp2 == fp1:
298 if fp2 == fp1:
299 fp2 = nullid
299 fp2 = nullid
300
300
301 if fp2 != nullid:
301 if fp2 != nullid:
302 # is one parent an ancestor of the other?
302 # is one parent an ancestor of the other?
303 fpa = r.ancestor(fp1, fp2)
303 fpa = r.ancestor(fp1, fp2)
304 if fpa == fp1:
304 if fpa == fp1:
305 fp1, fp2 = fp2, nullid
305 fp1, fp2 = fp2, nullid
306 elif fpa == fp2:
306 elif fpa == fp2:
307 fp2 = nullid
307 fp2 = nullid
308
308
309 # is the file unmodified from the parent?
309 # is the file unmodified from the parent?
310 if t == r.read(fp1):
310 if t == r.read(fp1):
311 # record the proper existing parent in manifest
311 # record the proper existing parent in manifest
312 # no need to add a revision
312 # no need to add a revision
313 mm[f] = fp1
313 mm[f] = fp1
314 continue
314 continue
315
315
316 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
316 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
317 changed.append(f)
317 changed.append(f)
318 if update_dirstate:
318 if update_dirstate:
319 self.dirstate.update([f], "n")
319 self.dirstate.update([f], "n")
320 except IOError:
320 except IOError:
321 try:
321 try:
322 del mm[f]
322 del mm[f]
323 del mfm[f]
323 del mfm[f]
324 if update_dirstate:
324 if update_dirstate:
325 self.dirstate.forget([f])
325 self.dirstate.forget([f])
326 except:
326 except:
327 # deleted from p2?
327 # deleted from p2?
328 pass
328 pass
329
329
330 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
330 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
331 user = user or self.ui.username()
331 user = user or self.ui.username()
332 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
332 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
333 tr.close()
333 tr.close()
334 if update_dirstate:
334 if update_dirstate:
335 self.dirstate.setparents(n, nullid)
335 self.dirstate.setparents(n, nullid)
336
336
337 def commit(self, files = None, text = "", user = None, date = None,
337 def commit(self, files = None, text = "", user = None, date = None,
338 match = util.always, force=False):
338 match = util.always, force=False):
339 commit = []
339 commit = []
340 remove = []
340 remove = []
341 changed = []
341 changed = []
342
342
343 if files:
343 if files:
344 for f in files:
344 for f in files:
345 s = self.dirstate.state(f)
345 s = self.dirstate.state(f)
346 if s in 'nmai':
346 if s in 'nmai':
347 commit.append(f)
347 commit.append(f)
348 elif s == 'r':
348 elif s == 'r':
349 remove.append(f)
349 remove.append(f)
350 else:
350 else:
351 self.ui.warn(_("%s not tracked!\n") % f)
351 self.ui.warn(_("%s not tracked!\n") % f)
352 else:
352 else:
353 (c, a, d, u) = self.changes(match=match)
353 (c, a, d, u) = self.changes(match=match)
354 commit = c + a
354 commit = c + a
355 remove = d
355 remove = d
356
356
357 p1, p2 = self.dirstate.parents()
357 p1, p2 = self.dirstate.parents()
358 c1 = self.changelog.read(p1)
358 c1 = self.changelog.read(p1)
359 c2 = self.changelog.read(p2)
359 c2 = self.changelog.read(p2)
360 m1 = self.manifest.read(c1[0])
360 m1 = self.manifest.read(c1[0])
361 mf1 = self.manifest.readflags(c1[0])
361 mf1 = self.manifest.readflags(c1[0])
362 m2 = self.manifest.read(c2[0])
362 m2 = self.manifest.read(c2[0])
363
363
364 if not commit and not remove and not force and p2 == nullid:
364 if not commit and not remove and not force and p2 == nullid:
365 self.ui.status(_("nothing changed\n"))
365 self.ui.status(_("nothing changed\n"))
366 return None
366 return None
367
367
368 if not self.hook("precommit"):
368 if not self.hook("precommit"):
369 return None
369 return None
370
370
371 wlock = self.wlock()
371 wlock = self.wlock()
372 lock = self.lock()
372 lock = self.lock()
373 tr = self.transaction()
373 tr = self.transaction()
374
374
375 # check in files
375 # check in files
376 new = {}
376 new = {}
377 linkrev = self.changelog.count()
377 linkrev = self.changelog.count()
378 commit.sort()
378 commit.sort()
379 for f in commit:
379 for f in commit:
380 self.ui.note(f + "\n")
380 self.ui.note(f + "\n")
381 try:
381 try:
382 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
382 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
383 t = self.wread(f)
383 t = self.wread(f)
384 except IOError:
384 except IOError:
385 self.ui.warn(_("trouble committing %s!\n") % f)
385 self.ui.warn(_("trouble committing %s!\n") % f)
386 raise
386 raise
387
387
388 r = self.file(f)
388 r = self.file(f)
389
389
390 meta = {}
390 meta = {}
391 cp = self.dirstate.copied(f)
391 cp = self.dirstate.copied(f)
392 if cp:
392 if cp:
393 meta["copy"] = cp
393 meta["copy"] = cp
394 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
394 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
395 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
395 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
396 fp1, fp2 = nullid, nullid
396 fp1, fp2 = nullid, nullid
397 else:
397 else:
398 fp1 = m1.get(f, nullid)
398 fp1 = m1.get(f, nullid)
399 fp2 = m2.get(f, nullid)
399 fp2 = m2.get(f, nullid)
400
400
401 # is the same revision on two branches of a merge?
401 # is the same revision on two branches of a merge?
402 if fp2 == fp1:
402 if fp2 == fp1:
403 fp2 = nullid
403 fp2 = nullid
404
404
405 if fp2 != nullid:
405 if fp2 != nullid:
406 # is one parent an ancestor of the other?
406 # is one parent an ancestor of the other?
407 fpa = r.ancestor(fp1, fp2)
407 fpa = r.ancestor(fp1, fp2)
408 if fpa == fp1:
408 if fpa == fp1:
409 fp1, fp2 = fp2, nullid
409 fp1, fp2 = fp2, nullid
410 elif fpa == fp2:
410 elif fpa == fp2:
411 fp2 = nullid
411 fp2 = nullid
412
412
413 # is the file unmodified from the parent?
413 # is the file unmodified from the parent?
414 if not meta and t == r.read(fp1):
414 if not meta and t == r.read(fp1):
415 # record the proper existing parent in manifest
415 # record the proper existing parent in manifest
416 # no need to add a revision
416 # no need to add a revision
417 new[f] = fp1
417 new[f] = fp1
418 continue
418 continue
419
419
420 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
420 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
421 # remember what we've added so that we can later calculate
421 # remember what we've added so that we can later calculate
422 # the files to pull from a set of changesets
422 # the files to pull from a set of changesets
423 changed.append(f)
423 changed.append(f)
424
424
425 # update manifest
425 # update manifest
426 m1.update(new)
426 m1.update(new)
427 for f in remove:
427 for f in remove:
428 if f in m1:
428 if f in m1:
429 del m1[f]
429 del m1[f]
430 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
430 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
431 (new, remove))
431 (new, remove))
432
432
433 # add changeset
433 # add changeset
434 new = new.keys()
434 new = new.keys()
435 new.sort()
435 new.sort()
436
436
437 if not text:
437 if not text:
438 edittext = ""
438 edittext = ""
439 if p2 != nullid:
439 if p2 != nullid:
440 edittext += "HG: branch merge\n"
440 edittext += "HG: branch merge\n"
441 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
441 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
442 edittext += "".join(["HG: changed %s\n" % f for f in changed])
442 edittext += "".join(["HG: changed %s\n" % f for f in changed])
443 edittext += "".join(["HG: removed %s\n" % f for f in remove])
443 edittext += "".join(["HG: removed %s\n" % f for f in remove])
444 if not changed and not remove:
444 if not changed and not remove:
445 edittext += "HG: no files changed\n"
445 edittext += "HG: no files changed\n"
446 edittext = self.ui.edit(edittext)
446 edittext = self.ui.edit(edittext)
447 if not edittext.rstrip():
447 if not edittext.rstrip():
448 return None
448 return None
449 text = edittext
449 text = edittext
450
450
451 user = user or self.ui.username()
451 user = user or self.ui.username()
452 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
452 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
453 tr.close()
453 tr.close()
454
454
455 self.dirstate.setparents(n)
455 self.dirstate.setparents(n)
456 self.dirstate.update(new, "n")
456 self.dirstate.update(new, "n")
457 self.dirstate.forget(remove)
457 self.dirstate.forget(remove)
458
458
459 if not self.hook("commit", node=hex(n)):
459 if not self.hook("commit", node=hex(n)):
460 return None
460 return None
461 return n
461 return n
462
462
463 def walk(self, node=None, files=[], match=util.always):
463 def walk(self, node=None, files=[], match=util.always):
464 if node:
464 if node:
465 for fn in self.manifest.read(self.changelog.read(node)[0]):
465 for fn in self.manifest.read(self.changelog.read(node)[0]):
466 if match(fn): yield 'm', fn
466 if match(fn): yield 'm', fn
467 else:
467 else:
468 for src, fn in self.dirstate.walk(files, match):
468 for src, fn in self.dirstate.walk(files, match):
469 yield src, fn
469 yield src, fn
470
470
471 def changes(self, node1 = None, node2 = None, files = [],
471 def changes(self, node1 = None, node2 = None, files = [],
472 match = util.always):
472 match = util.always):
473 mf2, u = None, []
473 mf2, u = None, []
474
474
475 def fcmp(fn, mf):
475 def fcmp(fn, mf):
476 t1 = self.wread(fn)
476 t1 = self.wread(fn)
477 t2 = self.file(fn).read(mf.get(fn, nullid))
477 t2 = self.file(fn).read(mf.get(fn, nullid))
478 return cmp(t1, t2)
478 return cmp(t1, t2)
479
479
480 def mfmatches(node):
480 def mfmatches(node):
481 mf = dict(self.manifest.read(node))
481 mf = dict(self.manifest.read(node))
482 for fn in mf.keys():
482 for fn in mf.keys():
483 if not match(fn):
483 if not match(fn):
484 del mf[fn]
484 del mf[fn]
485 return mf
485 return mf
486
486
487 # are we comparing the working directory?
487 # are we comparing the working directory?
488 if not node2:
488 if not node2:
489 try:
489 try:
490 wlock = self.wlock(wait=0)
490 wlock = self.wlock(wait=0)
491 except lock.LockHeld:
491 except lock.LockHeld:
492 wlock = None
492 wlock = None
493 l, c, a, d, u = self.dirstate.changes(files, match)
493 l, c, a, d, u = self.dirstate.changes(files, match)
494
494
495 # are we comparing working dir against its parent?
495 # are we comparing working dir against its parent?
496 if not node1:
496 if not node1:
497 if l:
497 if l:
498 # do a full compare of any files that might have changed
498 # do a full compare of any files that might have changed
499 change = self.changelog.read(self.dirstate.parents()[0])
499 change = self.changelog.read(self.dirstate.parents()[0])
500 mf2 = mfmatches(change[0])
500 mf2 = mfmatches(change[0])
501 for f in l:
501 for f in l:
502 if fcmp(f, mf2):
502 if fcmp(f, mf2):
503 c.append(f)
503 c.append(f)
504 elif wlock is not None:
504 elif wlock is not None:
505 self.dirstate.update([f], "n")
505 self.dirstate.update([f], "n")
506
506
507 for l in c, a, d, u:
507 for l in c, a, d, u:
508 l.sort()
508 l.sort()
509
509
510 return (c, a, d, u)
510 return (c, a, d, u)
511
511
512 # are we comparing working dir against non-tip?
512 # are we comparing working dir against non-tip?
513 # generate a pseudo-manifest for the working dir
513 # generate a pseudo-manifest for the working dir
514 if not node2:
514 if not node2:
515 if not mf2:
515 if not mf2:
516 change = self.changelog.read(self.dirstate.parents()[0])
516 change = self.changelog.read(self.dirstate.parents()[0])
517 mf2 = mfmatches(change[0])
517 mf2 = mfmatches(change[0])
518 for f in a + c + l:
518 for f in a + c + l:
519 mf2[f] = ""
519 mf2[f] = ""
520 for f in d:
520 for f in d:
521 if f in mf2: del mf2[f]
521 if f in mf2: del mf2[f]
522 else:
522 else:
523 change = self.changelog.read(node2)
523 change = self.changelog.read(node2)
524 mf2 = mfmatches(change[0])
524 mf2 = mfmatches(change[0])
525
525
526 # flush lists from dirstate before comparing manifests
526 # flush lists from dirstate before comparing manifests
527 c, a = [], []
527 c, a = [], []
528
528
529 change = self.changelog.read(node1)
529 change = self.changelog.read(node1)
530 mf1 = mfmatches(change[0])
530 mf1 = mfmatches(change[0])
531
531
532 for fn in mf2:
532 for fn in mf2:
533 if mf1.has_key(fn):
533 if mf1.has_key(fn):
534 if mf1[fn] != mf2[fn]:
534 if mf1[fn] != mf2[fn]:
535 if mf2[fn] != "" or fcmp(fn, mf1):
535 if mf2[fn] != "" or fcmp(fn, mf1):
536 c.append(fn)
536 c.append(fn)
537 del mf1[fn]
537 del mf1[fn]
538 else:
538 else:
539 a.append(fn)
539 a.append(fn)
540
540
541 d = mf1.keys()
541 d = mf1.keys()
542
542
543 for l in c, a, d, u:
543 for l in c, a, d, u:
544 l.sort()
544 l.sort()
545
545
546 return (c, a, d, u)
546 return (c, a, d, u)
547
547
548 def add(self, list):
548 def add(self, list):
549 wlock = self.wlock()
549 wlock = self.wlock()
550 for f in list:
550 for f in list:
551 p = self.wjoin(f)
551 p = self.wjoin(f)
552 if not os.path.exists(p):
552 if not os.path.exists(p):
553 self.ui.warn(_("%s does not exist!\n") % f)
553 self.ui.warn(_("%s does not exist!\n") % f)
554 elif not os.path.isfile(p):
554 elif not os.path.isfile(p):
555 self.ui.warn(_("%s not added: only files supported currently\n") % f)
555 self.ui.warn(_("%s not added: only files supported currently\n") % f)
556 elif self.dirstate.state(f) in 'an':
556 elif self.dirstate.state(f) in 'an':
557 self.ui.warn(_("%s already tracked!\n") % f)
557 self.ui.warn(_("%s already tracked!\n") % f)
558 else:
558 else:
559 self.dirstate.update([f], "a")
559 self.dirstate.update([f], "a")
560
560
561 def forget(self, list):
561 def forget(self, list):
562 wlock = self.wlock()
562 wlock = self.wlock()
563 for f in list:
563 for f in list:
564 if self.dirstate.state(f) not in 'ai':
564 if self.dirstate.state(f) not in 'ai':
565 self.ui.warn(_("%s not added!\n") % f)
565 self.ui.warn(_("%s not added!\n") % f)
566 else:
566 else:
567 self.dirstate.forget([f])
567 self.dirstate.forget([f])
568
568
569 def remove(self, list, unlink=False):
569 def remove(self, list, unlink=False):
570 if unlink:
570 if unlink:
571 for f in list:
571 for f in list:
572 try:
572 try:
573 util.unlink(self.wjoin(f))
573 util.unlink(self.wjoin(f))
574 except OSError, inst:
574 except OSError, inst:
575 if inst.errno != errno.ENOENT: raise
575 if inst.errno != errno.ENOENT: raise
576 wlock = self.wlock()
576 wlock = self.wlock()
577 for f in list:
577 for f in list:
578 p = self.wjoin(f)
578 p = self.wjoin(f)
579 if os.path.exists(p):
579 if os.path.exists(p):
580 self.ui.warn(_("%s still exists!\n") % f)
580 self.ui.warn(_("%s still exists!\n") % f)
581 elif self.dirstate.state(f) == 'a':
581 elif self.dirstate.state(f) == 'a':
582 self.ui.warn(_("%s never committed!\n") % f)
582 self.ui.warn(_("%s never committed!\n") % f)
583 self.dirstate.forget([f])
583 self.dirstate.forget([f])
584 elif f not in self.dirstate:
584 elif f not in self.dirstate:
585 self.ui.warn(_("%s not tracked!\n") % f)
585 self.ui.warn(_("%s not tracked!\n") % f)
586 else:
586 else:
587 self.dirstate.update([f], "r")
587 self.dirstate.update([f], "r")
588
588
589 def undelete(self, list):
589 def undelete(self, list):
590 p = self.dirstate.parents()[0]
590 p = self.dirstate.parents()[0]
591 mn = self.changelog.read(p)[0]
591 mn = self.changelog.read(p)[0]
592 mf = self.manifest.readflags(mn)
592 mf = self.manifest.readflags(mn)
593 m = self.manifest.read(mn)
593 m = self.manifest.read(mn)
594 wlock = self.wlock()
594 wlock = self.wlock()
595 for f in list:
595 for f in list:
596 if self.dirstate.state(f) not in "r":
596 if self.dirstate.state(f) not in "r":
597 self.ui.warn("%s not removed!\n" % f)
597 self.ui.warn("%s not removed!\n" % f)
598 else:
598 else:
599 t = self.file(f).read(m[f])
599 t = self.file(f).read(m[f])
600 self.wwrite(f, t)
600 self.wwrite(f, t)
601 util.set_exec(self.wjoin(f), mf[f])
601 util.set_exec(self.wjoin(f), mf[f])
602 self.dirstate.update([f], "n")
602 self.dirstate.update([f], "n")
603
603
604 def copy(self, source, dest):
604 def copy(self, source, dest):
605 p = self.wjoin(dest)
605 p = self.wjoin(dest)
606 if not os.path.exists(p):
606 if not os.path.exists(p):
607 self.ui.warn(_("%s does not exist!\n") % dest)
607 self.ui.warn(_("%s does not exist!\n") % dest)
608 elif not os.path.isfile(p):
608 elif not os.path.isfile(p):
609 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
609 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
610 else:
610 else:
611 wlock = self.wlock()
611 wlock = self.wlock()
612 if self.dirstate.state(dest) == '?':
612 if self.dirstate.state(dest) == '?':
613 self.dirstate.update([dest], "a")
613 self.dirstate.update([dest], "a")
614 self.dirstate.copy(source, dest)
614 self.dirstate.copy(source, dest)
615
615
616 def heads(self):
616 def heads(self, start=nullid):
617 return self.changelog.heads()
617 heads = self.changelog.heads(start)
618 # sort the output in rev descending order
619 heads = [(-self.changelog.rev(h), h) for h in heads]
620 heads.sort()
621 return [n for (r, n) in heads]
618
622
619 # branchlookup returns a dict giving a list of branches for
623 # branchlookup returns a dict giving a list of branches for
620 # each head. A branch is defined as the tag of a node or
624 # each head. A branch is defined as the tag of a node or
621 # the branch of the node's parents. If a node has multiple
625 # the branch of the node's parents. If a node has multiple
622 # branch tags, tags are eliminated if they are visible from other
626 # branch tags, tags are eliminated if they are visible from other
623 # branch tags.
627 # branch tags.
624 #
628 #
625 # So, for this graph: a->b->c->d->e
629 # So, for this graph: a->b->c->d->e
626 # \ /
630 # \ /
627 # aa -----/
631 # aa -----/
628 # a has tag 2.6.12
632 # a has tag 2.6.12
629 # d has tag 2.6.13
633 # d has tag 2.6.13
630 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
634 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
631 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
635 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
632 # from the list.
636 # from the list.
633 #
637 #
634 # It is possible that more than one head will have the same branch tag.
638 # It is possible that more than one head will have the same branch tag.
635 # callers need to check the result for multiple heads under the same
639 # callers need to check the result for multiple heads under the same
636 # branch tag if that is a problem for them (ie checkout of a specific
640 # branch tag if that is a problem for them (ie checkout of a specific
637 # branch).
641 # branch).
638 #
642 #
639 # passing in a specific branch will limit the depth of the search
643 # passing in a specific branch will limit the depth of the search
640 # through the parents. It won't limit the branches returned in the
644 # through the parents. It won't limit the branches returned in the
641 # result though.
645 # result though.
642 def branchlookup(self, heads=None, branch=None):
646 def branchlookup(self, heads=None, branch=None):
643 if not heads:
647 if not heads:
644 heads = self.heads()
648 heads = self.heads()
645 headt = [ h for h in heads ]
649 headt = [ h for h in heads ]
646 chlog = self.changelog
650 chlog = self.changelog
647 branches = {}
651 branches = {}
648 merges = []
652 merges = []
649 seenmerge = {}
653 seenmerge = {}
650
654
651 # traverse the tree once for each head, recording in the branches
655 # traverse the tree once for each head, recording in the branches
652 # dict which tags are visible from this head. The branches
656 # dict which tags are visible from this head. The branches
653 # dict also records which tags are visible from each tag
657 # dict also records which tags are visible from each tag
654 # while we traverse.
658 # while we traverse.
655 while headt or merges:
659 while headt or merges:
656 if merges:
660 if merges:
657 n, found = merges.pop()
661 n, found = merges.pop()
658 visit = [n]
662 visit = [n]
659 else:
663 else:
660 h = headt.pop()
664 h = headt.pop()
661 visit = [h]
665 visit = [h]
662 found = [h]
666 found = [h]
663 seen = {}
667 seen = {}
664 while visit:
668 while visit:
665 n = visit.pop()
669 n = visit.pop()
666 if n in seen:
670 if n in seen:
667 continue
671 continue
668 pp = chlog.parents(n)
672 pp = chlog.parents(n)
669 tags = self.nodetags(n)
673 tags = self.nodetags(n)
670 if tags:
674 if tags:
671 for x in tags:
675 for x in tags:
672 if x == 'tip':
676 if x == 'tip':
673 continue
677 continue
674 for f in found:
678 for f in found:
675 branches.setdefault(f, {})[n] = 1
679 branches.setdefault(f, {})[n] = 1
676 branches.setdefault(n, {})[n] = 1
680 branches.setdefault(n, {})[n] = 1
677 break
681 break
678 if n not in found:
682 if n not in found:
679 found.append(n)
683 found.append(n)
680 if branch in tags:
684 if branch in tags:
681 continue
685 continue
682 seen[n] = 1
686 seen[n] = 1
683 if pp[1] != nullid and n not in seenmerge:
687 if pp[1] != nullid and n not in seenmerge:
684 merges.append((pp[1], [x for x in found]))
688 merges.append((pp[1], [x for x in found]))
685 seenmerge[n] = 1
689 seenmerge[n] = 1
686 if pp[0] != nullid:
690 if pp[0] != nullid:
687 visit.append(pp[0])
691 visit.append(pp[0])
688 # traverse the branches dict, eliminating branch tags from each
692 # traverse the branches dict, eliminating branch tags from each
689 # head that are visible from another branch tag for that head.
693 # head that are visible from another branch tag for that head.
690 out = {}
694 out = {}
691 viscache = {}
695 viscache = {}
692 for h in heads:
696 for h in heads:
693 def visible(node):
697 def visible(node):
694 if node in viscache:
698 if node in viscache:
695 return viscache[node]
699 return viscache[node]
696 ret = {}
700 ret = {}
697 visit = [node]
701 visit = [node]
698 while visit:
702 while visit:
699 x = visit.pop()
703 x = visit.pop()
700 if x in viscache:
704 if x in viscache:
701 ret.update(viscache[x])
705 ret.update(viscache[x])
702 elif x not in ret:
706 elif x not in ret:
703 ret[x] = 1
707 ret[x] = 1
704 if x in branches:
708 if x in branches:
705 visit[len(visit):] = branches[x].keys()
709 visit[len(visit):] = branches[x].keys()
706 viscache[node] = ret
710 viscache[node] = ret
707 return ret
711 return ret
708 if h not in branches:
712 if h not in branches:
709 continue
713 continue
710 # O(n^2), but somewhat limited. This only searches the
714 # O(n^2), but somewhat limited. This only searches the
711 # tags visible from a specific head, not all the tags in the
715 # tags visible from a specific head, not all the tags in the
712 # whole repo.
716 # whole repo.
713 for b in branches[h]:
717 for b in branches[h]:
714 vis = False
718 vis = False
715 for bb in branches[h].keys():
719 for bb in branches[h].keys():
716 if b != bb:
720 if b != bb:
717 if b in visible(bb):
721 if b in visible(bb):
718 vis = True
722 vis = True
719 break
723 break
720 if not vis:
724 if not vis:
721 l = out.setdefault(h, [])
725 l = out.setdefault(h, [])
722 l[len(l):] = self.nodetags(b)
726 l[len(l):] = self.nodetags(b)
723 return out
727 return out
724
728
725 def branches(self, nodes):
729 def branches(self, nodes):
726 if not nodes: nodes = [self.changelog.tip()]
730 if not nodes: nodes = [self.changelog.tip()]
727 b = []
731 b = []
728 for n in nodes:
732 for n in nodes:
729 t = n
733 t = n
730 while n:
734 while n:
731 p = self.changelog.parents(n)
735 p = self.changelog.parents(n)
732 if p[1] != nullid or p[0] == nullid:
736 if p[1] != nullid or p[0] == nullid:
733 b.append((t, n, p[0], p[1]))
737 b.append((t, n, p[0], p[1]))
734 break
738 break
735 n = p[0]
739 n = p[0]
736 return b
740 return b
737
741
738 def between(self, pairs):
742 def between(self, pairs):
739 r = []
743 r = []
740
744
741 for top, bottom in pairs:
745 for top, bottom in pairs:
742 n, l, i = top, [], 0
746 n, l, i = top, [], 0
743 f = 1
747 f = 1
744
748
745 while n != bottom:
749 while n != bottom:
746 p = self.changelog.parents(n)[0]
750 p = self.changelog.parents(n)[0]
747 if i == f:
751 if i == f:
748 l.append(n)
752 l.append(n)
749 f = f * 2
753 f = f * 2
750 n = p
754 n = p
751 i += 1
755 i += 1
752
756
753 r.append(l)
757 r.append(l)
754
758
755 return r
759 return r
756
760
757 def findincoming(self, remote, base=None, heads=None):
761 def findincoming(self, remote, base=None, heads=None):
758 m = self.changelog.nodemap
762 m = self.changelog.nodemap
759 search = []
763 search = []
760 fetch = {}
764 fetch = {}
761 seen = {}
765 seen = {}
762 seenbranch = {}
766 seenbranch = {}
763 if base == None:
767 if base == None:
764 base = {}
768 base = {}
765
769
766 # assume we're closer to the tip than the root
770 # assume we're closer to the tip than the root
767 # and start by examining the heads
771 # and start by examining the heads
768 self.ui.status(_("searching for changes\n"))
772 self.ui.status(_("searching for changes\n"))
769
773
770 if not heads:
774 if not heads:
771 heads = remote.heads()
775 heads = remote.heads()
772
776
773 unknown = []
777 unknown = []
774 for h in heads:
778 for h in heads:
775 if h not in m:
779 if h not in m:
776 unknown.append(h)
780 unknown.append(h)
777 else:
781 else:
778 base[h] = 1
782 base[h] = 1
779
783
780 if not unknown:
784 if not unknown:
781 return None
785 return None
782
786
783 rep = {}
787 rep = {}
784 reqcnt = 0
788 reqcnt = 0
785
789
786 # search through remote branches
790 # search through remote branches
787 # a 'branch' here is a linear segment of history, with four parts:
791 # a 'branch' here is a linear segment of history, with four parts:
788 # head, root, first parent, second parent
792 # head, root, first parent, second parent
789 # (a branch always has two parents (or none) by definition)
793 # (a branch always has two parents (or none) by definition)
790 unknown = remote.branches(unknown)
794 unknown = remote.branches(unknown)
791 while unknown:
795 while unknown:
792 r = []
796 r = []
793 while unknown:
797 while unknown:
794 n = unknown.pop(0)
798 n = unknown.pop(0)
795 if n[0] in seen:
799 if n[0] in seen:
796 continue
800 continue
797
801
798 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
802 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
799 if n[0] == nullid:
803 if n[0] == nullid:
800 break
804 break
801 if n in seenbranch:
805 if n in seenbranch:
802 self.ui.debug(_("branch already found\n"))
806 self.ui.debug(_("branch already found\n"))
803 continue
807 continue
804 if n[1] and n[1] in m: # do we know the base?
808 if n[1] and n[1] in m: # do we know the base?
805 self.ui.debug(_("found incomplete branch %s:%s\n")
809 self.ui.debug(_("found incomplete branch %s:%s\n")
806 % (short(n[0]), short(n[1])))
810 % (short(n[0]), short(n[1])))
807 search.append(n) # schedule branch range for scanning
811 search.append(n) # schedule branch range for scanning
808 seenbranch[n] = 1
812 seenbranch[n] = 1
809 else:
813 else:
810 if n[1] not in seen and n[1] not in fetch:
814 if n[1] not in seen and n[1] not in fetch:
811 if n[2] in m and n[3] in m:
815 if n[2] in m and n[3] in m:
812 self.ui.debug(_("found new changeset %s\n") %
816 self.ui.debug(_("found new changeset %s\n") %
813 short(n[1]))
817 short(n[1]))
814 fetch[n[1]] = 1 # earliest unknown
818 fetch[n[1]] = 1 # earliest unknown
815 base[n[2]] = 1 # latest known
819 base[n[2]] = 1 # latest known
816 continue
820 continue
817
821
818 for a in n[2:4]:
822 for a in n[2:4]:
819 if a not in rep:
823 if a not in rep:
820 r.append(a)
824 r.append(a)
821 rep[a] = 1
825 rep[a] = 1
822
826
823 seen[n[0]] = 1
827 seen[n[0]] = 1
824
828
825 if r:
829 if r:
826 reqcnt += 1
830 reqcnt += 1
827 self.ui.debug(_("request %d: %s\n") %
831 self.ui.debug(_("request %d: %s\n") %
828 (reqcnt, " ".join(map(short, r))))
832 (reqcnt, " ".join(map(short, r))))
829 for p in range(0, len(r), 10):
833 for p in range(0, len(r), 10):
830 for b in remote.branches(r[p:p+10]):
834 for b in remote.branches(r[p:p+10]):
831 self.ui.debug(_("received %s:%s\n") %
835 self.ui.debug(_("received %s:%s\n") %
832 (short(b[0]), short(b[1])))
836 (short(b[0]), short(b[1])))
833 if b[0] in m:
837 if b[0] in m:
834 self.ui.debug(_("found base node %s\n") % short(b[0]))
838 self.ui.debug(_("found base node %s\n") % short(b[0]))
835 base[b[0]] = 1
839 base[b[0]] = 1
836 elif b[0] not in seen:
840 elif b[0] not in seen:
837 unknown.append(b)
841 unknown.append(b)
838
842
839 # do binary search on the branches we found
843 # do binary search on the branches we found
840 while search:
844 while search:
841 n = search.pop(0)
845 n = search.pop(0)
842 reqcnt += 1
846 reqcnt += 1
843 l = remote.between([(n[0], n[1])])[0]
847 l = remote.between([(n[0], n[1])])[0]
844 l.append(n[1])
848 l.append(n[1])
845 p = n[0]
849 p = n[0]
846 f = 1
850 f = 1
847 for i in l:
851 for i in l:
848 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
852 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
849 if i in m:
853 if i in m:
850 if f <= 2:
854 if f <= 2:
851 self.ui.debug(_("found new branch changeset %s\n") %
855 self.ui.debug(_("found new branch changeset %s\n") %
852 short(p))
856 short(p))
853 fetch[p] = 1
857 fetch[p] = 1
854 base[i] = 1
858 base[i] = 1
855 else:
859 else:
856 self.ui.debug(_("narrowed branch search to %s:%s\n")
860 self.ui.debug(_("narrowed branch search to %s:%s\n")
857 % (short(p), short(i)))
861 % (short(p), short(i)))
858 search.append((p, i))
862 search.append((p, i))
859 break
863 break
860 p, f = i, f * 2
864 p, f = i, f * 2
861
865
862 # sanity check our fetch list
866 # sanity check our fetch list
863 for f in fetch.keys():
867 for f in fetch.keys():
864 if f in m:
868 if f in m:
865 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
869 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
866
870
867 if base.keys() == [nullid]:
871 if base.keys() == [nullid]:
868 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
872 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
869
873
870 self.ui.note(_("found new changesets starting at ") +
874 self.ui.note(_("found new changesets starting at ") +
871 " ".join([short(f) for f in fetch]) + "\n")
875 " ".join([short(f) for f in fetch]) + "\n")
872
876
873 self.ui.debug(_("%d total queries\n") % reqcnt)
877 self.ui.debug(_("%d total queries\n") % reqcnt)
874
878
875 return fetch.keys()
879 return fetch.keys()
876
880
877 def findoutgoing(self, remote, base=None, heads=None):
881 def findoutgoing(self, remote, base=None, heads=None):
878 if base == None:
882 if base == None:
879 base = {}
883 base = {}
880 self.findincoming(remote, base, heads)
884 self.findincoming(remote, base, heads)
881
885
882 self.ui.debug(_("common changesets up to ")
886 self.ui.debug(_("common changesets up to ")
883 + " ".join(map(short, base.keys())) + "\n")
887 + " ".join(map(short, base.keys())) + "\n")
884
888
885 remain = dict.fromkeys(self.changelog.nodemap)
889 remain = dict.fromkeys(self.changelog.nodemap)
886
890
887 # prune everything remote has from the tree
891 # prune everything remote has from the tree
888 del remain[nullid]
892 del remain[nullid]
889 remove = base.keys()
893 remove = base.keys()
890 while remove:
894 while remove:
891 n = remove.pop(0)
895 n = remove.pop(0)
892 if n in remain:
896 if n in remain:
893 del remain[n]
897 del remain[n]
894 for p in self.changelog.parents(n):
898 for p in self.changelog.parents(n):
895 remove.append(p)
899 remove.append(p)
896
900
897 # find every node whose parents have been pruned
901 # find every node whose parents have been pruned
898 subset = []
902 subset = []
899 for n in remain:
903 for n in remain:
900 p1, p2 = self.changelog.parents(n)
904 p1, p2 = self.changelog.parents(n)
901 if p1 not in remain and p2 not in remain:
905 if p1 not in remain and p2 not in remain:
902 subset.append(n)
906 subset.append(n)
903
907
904 # this is the set of all roots we have to push
908 # this is the set of all roots we have to push
905 return subset
909 return subset
906
910
907 def pull(self, remote, heads = None):
911 def pull(self, remote, heads = None):
908 lock = self.lock()
912 lock = self.lock()
909
913
910 # if we have an empty repo, fetch everything
914 # if we have an empty repo, fetch everything
911 if self.changelog.tip() == nullid:
915 if self.changelog.tip() == nullid:
912 self.ui.status(_("requesting all changes\n"))
916 self.ui.status(_("requesting all changes\n"))
913 fetch = [nullid]
917 fetch = [nullid]
914 else:
918 else:
915 fetch = self.findincoming(remote)
919 fetch = self.findincoming(remote)
916
920
917 if not fetch:
921 if not fetch:
918 self.ui.status(_("no changes found\n"))
922 self.ui.status(_("no changes found\n"))
919 return 1
923 return 1
920
924
921 if heads is None:
925 if heads is None:
922 cg = remote.changegroup(fetch)
926 cg = remote.changegroup(fetch)
923 else:
927 else:
924 cg = remote.changegroupsubset(fetch, heads)
928 cg = remote.changegroupsubset(fetch, heads)
925 return self.addchangegroup(cg)
929 return self.addchangegroup(cg)
926
930
927 def push(self, remote, force=False):
931 def push(self, remote, force=False):
928 lock = remote.lock()
932 lock = remote.lock()
929
933
930 base = {}
934 base = {}
931 heads = remote.heads()
935 heads = remote.heads()
932 inc = self.findincoming(remote, base, heads)
936 inc = self.findincoming(remote, base, heads)
933 if not force and inc:
937 if not force and inc:
934 self.ui.warn(_("abort: unsynced remote changes!\n"))
938 self.ui.warn(_("abort: unsynced remote changes!\n"))
935 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
939 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
936 return 1
940 return 1
937
941
938 update = self.findoutgoing(remote, base)
942 update = self.findoutgoing(remote, base)
939 if not update:
943 if not update:
940 self.ui.status(_("no changes found\n"))
944 self.ui.status(_("no changes found\n"))
941 return 1
945 return 1
942 elif not force:
946 elif not force:
943 if len(heads) < len(self.changelog.heads()):
947 if len(heads) < len(self.changelog.heads()):
944 self.ui.warn(_("abort: push creates new remote branches!\n"))
948 self.ui.warn(_("abort: push creates new remote branches!\n"))
945 self.ui.status(_("(did you forget to merge?"
949 self.ui.status(_("(did you forget to merge?"
946 " use push -f to force)\n"))
950 " use push -f to force)\n"))
947 return 1
951 return 1
948
952
949 cg = self.changegroup(update)
953 cg = self.changegroup(update)
950 return remote.addchangegroup(cg)
954 return remote.addchangegroup(cg)
951
955
952 def changegroupsubset(self, bases, heads):
956 def changegroupsubset(self, bases, heads):
953 """This function generates a changegroup consisting of all the nodes
957 """This function generates a changegroup consisting of all the nodes
954 that are descendents of any of the bases, and ancestors of any of
958 that are descendents of any of the bases, and ancestors of any of
955 the heads.
959 the heads.
956
960
957 It is fairly complex as determining which filenodes and which
961 It is fairly complex as determining which filenodes and which
958 manifest nodes need to be included for the changeset to be complete
962 manifest nodes need to be included for the changeset to be complete
959 is non-trivial.
963 is non-trivial.
960
964
961 Another wrinkle is doing the reverse, figuring out which changeset in
965 Another wrinkle is doing the reverse, figuring out which changeset in
962 the changegroup a particular filenode or manifestnode belongs to."""
966 the changegroup a particular filenode or manifestnode belongs to."""
963
967
964 # Set up some initial variables
968 # Set up some initial variables
965 # Make it easy to refer to self.changelog
969 # Make it easy to refer to self.changelog
966 cl = self.changelog
970 cl = self.changelog
967 # msng is short for missing - compute the list of changesets in this
971 # msng is short for missing - compute the list of changesets in this
968 # changegroup.
972 # changegroup.
969 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
973 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
970 # Some bases may turn out to be superfluous, and some heads may be
974 # Some bases may turn out to be superfluous, and some heads may be
971 # too. nodesbetween will return the minimal set of bases and heads
975 # too. nodesbetween will return the minimal set of bases and heads
972 # necessary to re-create the changegroup.
976 # necessary to re-create the changegroup.
973
977
974 # Known heads are the list of heads that it is assumed the recipient
978 # Known heads are the list of heads that it is assumed the recipient
975 # of this changegroup will know about.
979 # of this changegroup will know about.
976 knownheads = {}
980 knownheads = {}
977 # We assume that all parents of bases are known heads.
981 # We assume that all parents of bases are known heads.
978 for n in bases:
982 for n in bases:
979 for p in cl.parents(n):
983 for p in cl.parents(n):
980 if p != nullid:
984 if p != nullid:
981 knownheads[p] = 1
985 knownheads[p] = 1
982 knownheads = knownheads.keys()
986 knownheads = knownheads.keys()
983 if knownheads:
987 if knownheads:
984 # Now that we know what heads are known, we can compute which
988 # Now that we know what heads are known, we can compute which
985 # changesets are known. The recipient must know about all
989 # changesets are known. The recipient must know about all
986 # changesets required to reach the known heads from the null
990 # changesets required to reach the known heads from the null
987 # changeset.
991 # changeset.
988 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
992 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
989 junk = None
993 junk = None
990 # Transform the list into an ersatz set.
994 # Transform the list into an ersatz set.
991 has_cl_set = dict.fromkeys(has_cl_set)
995 has_cl_set = dict.fromkeys(has_cl_set)
992 else:
996 else:
993 # If there were no known heads, the recipient cannot be assumed to
997 # If there were no known heads, the recipient cannot be assumed to
994 # know about any changesets.
998 # know about any changesets.
995 has_cl_set = {}
999 has_cl_set = {}
996
1000
997 # Make it easy to refer to self.manifest
1001 # Make it easy to refer to self.manifest
998 mnfst = self.manifest
1002 mnfst = self.manifest
999 # We don't know which manifests are missing yet
1003 # We don't know which manifests are missing yet
1000 msng_mnfst_set = {}
1004 msng_mnfst_set = {}
1001 # Nor do we know which filenodes are missing.
1005 # Nor do we know which filenodes are missing.
1002 msng_filenode_set = {}
1006 msng_filenode_set = {}
1003
1007
1004 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1008 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1005 junk = None
1009 junk = None
1006
1010
1007 # A changeset always belongs to itself, so the changenode lookup
1011 # A changeset always belongs to itself, so the changenode lookup
1008 # function for a changenode is identity.
1012 # function for a changenode is identity.
1009 def identity(x):
1013 def identity(x):
1010 return x
1014 return x
1011
1015
1012 # A function generating function. Sets up an environment for the
1016 # A function generating function. Sets up an environment for the
1013 # inner function.
1017 # inner function.
1014 def cmp_by_rev_func(revlog):
1018 def cmp_by_rev_func(revlog):
1015 # Compare two nodes by their revision number in the environment's
1019 # Compare two nodes by their revision number in the environment's
1016 # revision history. Since the revision number both represents the
1020 # revision history. Since the revision number both represents the
1017 # most efficient order to read the nodes in, and represents a
1021 # most efficient order to read the nodes in, and represents a
1018 # topological sorting of the nodes, this function is often useful.
1022 # topological sorting of the nodes, this function is often useful.
1019 def cmp_by_rev(a, b):
1023 def cmp_by_rev(a, b):
1020 return cmp(revlog.rev(a), revlog.rev(b))
1024 return cmp(revlog.rev(a), revlog.rev(b))
1021 return cmp_by_rev
1025 return cmp_by_rev
1022
1026
1023 # If we determine that a particular file or manifest node must be a
1027 # If we determine that a particular file or manifest node must be a
1024 # node that the recipient of the changegroup will already have, we can
1028 # node that the recipient of the changegroup will already have, we can
1025 # also assume the recipient will have all the parents. This function
1029 # also assume the recipient will have all the parents. This function
1026 # prunes them from the set of missing nodes.
1030 # prunes them from the set of missing nodes.
1027 def prune_parents(revlog, hasset, msngset):
1031 def prune_parents(revlog, hasset, msngset):
1028 haslst = hasset.keys()
1032 haslst = hasset.keys()
1029 haslst.sort(cmp_by_rev_func(revlog))
1033 haslst.sort(cmp_by_rev_func(revlog))
1030 for node in haslst:
1034 for node in haslst:
1031 parentlst = [p for p in revlog.parents(node) if p != nullid]
1035 parentlst = [p for p in revlog.parents(node) if p != nullid]
1032 while parentlst:
1036 while parentlst:
1033 n = parentlst.pop()
1037 n = parentlst.pop()
1034 if n not in hasset:
1038 if n not in hasset:
1035 hasset[n] = 1
1039 hasset[n] = 1
1036 p = [p for p in revlog.parents(n) if p != nullid]
1040 p = [p for p in revlog.parents(n) if p != nullid]
1037 parentlst.extend(p)
1041 parentlst.extend(p)
1038 for n in hasset:
1042 for n in hasset:
1039 msngset.pop(n, None)
1043 msngset.pop(n, None)
1040
1044
1041 # This is a function generating function used to set up an environment
1045 # This is a function generating function used to set up an environment
1042 # for the inner function to execute in.
1046 # for the inner function to execute in.
1043 def manifest_and_file_collector(changedfileset):
1047 def manifest_and_file_collector(changedfileset):
1044 # This is an information gathering function that gathers
1048 # This is an information gathering function that gathers
1045 # information from each changeset node that goes out as part of
1049 # information from each changeset node that goes out as part of
1046 # the changegroup. The information gathered is a list of which
1050 # the changegroup. The information gathered is a list of which
1047 # manifest nodes are potentially required (the recipient may
1051 # manifest nodes are potentially required (the recipient may
1048 # already have them) and total list of all files which were
1052 # already have them) and total list of all files which were
1049 # changed in any changeset in the changegroup.
1053 # changed in any changeset in the changegroup.
1050 #
1054 #
1051 # We also remember the first changenode we saw any manifest
1055 # We also remember the first changenode we saw any manifest
1052 # referenced by so we can later determine which changenode 'owns'
1056 # referenced by so we can later determine which changenode 'owns'
1053 # the manifest.
1057 # the manifest.
1054 def collect_manifests_and_files(clnode):
1058 def collect_manifests_and_files(clnode):
1055 c = cl.read(clnode)
1059 c = cl.read(clnode)
1056 for f in c[3]:
1060 for f in c[3]:
1057 # This is to make sure we only have one instance of each
1061 # This is to make sure we only have one instance of each
1058 # filename string for each filename.
1062 # filename string for each filename.
1059 changedfileset.setdefault(f, f)
1063 changedfileset.setdefault(f, f)
1060 msng_mnfst_set.setdefault(c[0], clnode)
1064 msng_mnfst_set.setdefault(c[0], clnode)
1061 return collect_manifests_and_files
1065 return collect_manifests_and_files
1062
1066
1063 # Figure out which manifest nodes (of the ones we think might be part
1067 # Figure out which manifest nodes (of the ones we think might be part
1064 # of the changegroup) the recipient must know about and remove them
1068 # of the changegroup) the recipient must know about and remove them
1065 # from the changegroup.
1069 # from the changegroup.
1066 def prune_manifests():
1070 def prune_manifests():
1067 has_mnfst_set = {}
1071 has_mnfst_set = {}
1068 for n in msng_mnfst_set:
1072 for n in msng_mnfst_set:
1069 # If a 'missing' manifest thinks it belongs to a changenode
1073 # If a 'missing' manifest thinks it belongs to a changenode
1070 # the recipient is assumed to have, obviously the recipient
1074 # the recipient is assumed to have, obviously the recipient
1071 # must have that manifest.
1075 # must have that manifest.
1072 linknode = cl.node(mnfst.linkrev(n))
1076 linknode = cl.node(mnfst.linkrev(n))
1073 if linknode in has_cl_set:
1077 if linknode in has_cl_set:
1074 has_mnfst_set[n] = 1
1078 has_mnfst_set[n] = 1
1075 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1079 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1076
1080
1077 # Use the information collected in collect_manifests_and_files to say
1081 # Use the information collected in collect_manifests_and_files to say
1078 # which changenode any manifestnode belongs to.
1082 # which changenode any manifestnode belongs to.
1079 def lookup_manifest_link(mnfstnode):
1083 def lookup_manifest_link(mnfstnode):
1080 return msng_mnfst_set[mnfstnode]
1084 return msng_mnfst_set[mnfstnode]
1081
1085
1082 # A function generating function that sets up the initial environment
1086 # A function generating function that sets up the initial environment
1083 # the inner function.
1087 # the inner function.
1084 def filenode_collector(changedfiles):
1088 def filenode_collector(changedfiles):
1085 next_rev = [0]
1089 next_rev = [0]
1086 # This gathers information from each manifestnode included in the
1090 # This gathers information from each manifestnode included in the
1087 # changegroup about which filenodes the manifest node references
1091 # changegroup about which filenodes the manifest node references
1088 # so we can include those in the changegroup too.
1092 # so we can include those in the changegroup too.
1089 #
1093 #
1090 # It also remembers which changenode each filenode belongs to. It
1094 # It also remembers which changenode each filenode belongs to. It
1091 # does this by assuming the a filenode belongs to the changenode
1095 # does this by assuming the a filenode belongs to the changenode
1092 # the first manifest that references it belongs to.
1096 # the first manifest that references it belongs to.
1093 def collect_msng_filenodes(mnfstnode):
1097 def collect_msng_filenodes(mnfstnode):
1094 r = mnfst.rev(mnfstnode)
1098 r = mnfst.rev(mnfstnode)
1095 if r == next_rev[0]:
1099 if r == next_rev[0]:
1096 # If the last rev we looked at was the one just previous,
1100 # If the last rev we looked at was the one just previous,
1097 # we only need to see a diff.
1101 # we only need to see a diff.
1098 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1102 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1099 # For each line in the delta
1103 # For each line in the delta
1100 for dline in delta.splitlines():
1104 for dline in delta.splitlines():
1101 # get the filename and filenode for that line
1105 # get the filename and filenode for that line
1102 f, fnode = dline.split('\0')
1106 f, fnode = dline.split('\0')
1103 fnode = bin(fnode[:40])
1107 fnode = bin(fnode[:40])
1104 f = changedfiles.get(f, None)
1108 f = changedfiles.get(f, None)
1105 # And if the file is in the list of files we care
1109 # And if the file is in the list of files we care
1106 # about.
1110 # about.
1107 if f is not None:
1111 if f is not None:
1108 # Get the changenode this manifest belongs to
1112 # Get the changenode this manifest belongs to
1109 clnode = msng_mnfst_set[mnfstnode]
1113 clnode = msng_mnfst_set[mnfstnode]
1110 # Create the set of filenodes for the file if
1114 # Create the set of filenodes for the file if
1111 # there isn't one already.
1115 # there isn't one already.
1112 ndset = msng_filenode_set.setdefault(f, {})
1116 ndset = msng_filenode_set.setdefault(f, {})
1113 # And set the filenode's changelog node to the
1117 # And set the filenode's changelog node to the
1114 # manifest's if it hasn't been set already.
1118 # manifest's if it hasn't been set already.
1115 ndset.setdefault(fnode, clnode)
1119 ndset.setdefault(fnode, clnode)
1116 else:
1120 else:
1117 # Otherwise we need a full manifest.
1121 # Otherwise we need a full manifest.
1118 m = mnfst.read(mnfstnode)
1122 m = mnfst.read(mnfstnode)
1119 # For every file in we care about.
1123 # For every file in we care about.
1120 for f in changedfiles:
1124 for f in changedfiles:
1121 fnode = m.get(f, None)
1125 fnode = m.get(f, None)
1122 # If it's in the manifest
1126 # If it's in the manifest
1123 if fnode is not None:
1127 if fnode is not None:
1124 # See comments above.
1128 # See comments above.
1125 clnode = msng_mnfst_set[mnfstnode]
1129 clnode = msng_mnfst_set[mnfstnode]
1126 ndset = msng_filenode_set.setdefault(f, {})
1130 ndset = msng_filenode_set.setdefault(f, {})
1127 ndset.setdefault(fnode, clnode)
1131 ndset.setdefault(fnode, clnode)
1128 # Remember the revision we hope to see next.
1132 # Remember the revision we hope to see next.
1129 next_rev[0] = r + 1
1133 next_rev[0] = r + 1
1130 return collect_msng_filenodes
1134 return collect_msng_filenodes
1131
1135
1132 # We have a list of filenodes we think we need for a file, lets remove
1136 # We have a list of filenodes we think we need for a file, lets remove
1133 # all those we now the recipient must have.
1137 # all those we now the recipient must have.
1134 def prune_filenodes(f, filerevlog):
1138 def prune_filenodes(f, filerevlog):
1135 msngset = msng_filenode_set[f]
1139 msngset = msng_filenode_set[f]
1136 hasset = {}
1140 hasset = {}
1137 # If a 'missing' filenode thinks it belongs to a changenode we
1141 # If a 'missing' filenode thinks it belongs to a changenode we
1138 # assume the recipient must have, then the recipient must have
1142 # assume the recipient must have, then the recipient must have
1139 # that filenode.
1143 # that filenode.
1140 for n in msngset:
1144 for n in msngset:
1141 clnode = cl.node(filerevlog.linkrev(n))
1145 clnode = cl.node(filerevlog.linkrev(n))
1142 if clnode in has_cl_set:
1146 if clnode in has_cl_set:
1143 hasset[n] = 1
1147 hasset[n] = 1
1144 prune_parents(filerevlog, hasset, msngset)
1148 prune_parents(filerevlog, hasset, msngset)
1145
1149
1146 # A function generator function that sets up the a context for the
1150 # A function generator function that sets up the a context for the
1147 # inner function.
1151 # inner function.
1148 def lookup_filenode_link_func(fname):
1152 def lookup_filenode_link_func(fname):
1149 msngset = msng_filenode_set[fname]
1153 msngset = msng_filenode_set[fname]
1150 # Lookup the changenode the filenode belongs to.
1154 # Lookup the changenode the filenode belongs to.
1151 def lookup_filenode_link(fnode):
1155 def lookup_filenode_link(fnode):
1152 return msngset[fnode]
1156 return msngset[fnode]
1153 return lookup_filenode_link
1157 return lookup_filenode_link
1154
1158
1155 # Now that we have all theses utility functions to help out and
1159 # Now that we have all theses utility functions to help out and
1156 # logically divide up the task, generate the group.
1160 # logically divide up the task, generate the group.
1157 def gengroup():
1161 def gengroup():
1158 # The set of changed files starts empty.
1162 # The set of changed files starts empty.
1159 changedfiles = {}
1163 changedfiles = {}
1160 # Create a changenode group generator that will call our functions
1164 # Create a changenode group generator that will call our functions
1161 # back to lookup the owning changenode and collect information.
1165 # back to lookup the owning changenode and collect information.
1162 group = cl.group(msng_cl_lst, identity,
1166 group = cl.group(msng_cl_lst, identity,
1163 manifest_and_file_collector(changedfiles))
1167 manifest_and_file_collector(changedfiles))
1164 for chnk in group:
1168 for chnk in group:
1165 yield chnk
1169 yield chnk
1166
1170
1167 # The list of manifests has been collected by the generator
1171 # The list of manifests has been collected by the generator
1168 # calling our functions back.
1172 # calling our functions back.
1169 prune_manifests()
1173 prune_manifests()
1170 msng_mnfst_lst = msng_mnfst_set.keys()
1174 msng_mnfst_lst = msng_mnfst_set.keys()
1171 # Sort the manifestnodes by revision number.
1175 # Sort the manifestnodes by revision number.
1172 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1176 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1173 # Create a generator for the manifestnodes that calls our lookup
1177 # Create a generator for the manifestnodes that calls our lookup
1174 # and data collection functions back.
1178 # and data collection functions back.
1175 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1179 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1176 filenode_collector(changedfiles))
1180 filenode_collector(changedfiles))
1177 for chnk in group:
1181 for chnk in group:
1178 yield chnk
1182 yield chnk
1179
1183
1180 # These are no longer needed, dereference and toss the memory for
1184 # These are no longer needed, dereference and toss the memory for
1181 # them.
1185 # them.
1182 msng_mnfst_lst = None
1186 msng_mnfst_lst = None
1183 msng_mnfst_set.clear()
1187 msng_mnfst_set.clear()
1184
1188
1185 changedfiles = changedfiles.keys()
1189 changedfiles = changedfiles.keys()
1186 changedfiles.sort()
1190 changedfiles.sort()
1187 # Go through all our files in order sorted by name.
1191 # Go through all our files in order sorted by name.
1188 for fname in changedfiles:
1192 for fname in changedfiles:
1189 filerevlog = self.file(fname)
1193 filerevlog = self.file(fname)
1190 # Toss out the filenodes that the recipient isn't really
1194 # Toss out the filenodes that the recipient isn't really
1191 # missing.
1195 # missing.
1192 prune_filenodes(fname, filerevlog)
1196 prune_filenodes(fname, filerevlog)
1193 msng_filenode_lst = msng_filenode_set[fname].keys()
1197 msng_filenode_lst = msng_filenode_set[fname].keys()
1194 # If any filenodes are left, generate the group for them,
1198 # If any filenodes are left, generate the group for them,
1195 # otherwise don't bother.
1199 # otherwise don't bother.
1196 if len(msng_filenode_lst) > 0:
1200 if len(msng_filenode_lst) > 0:
1197 yield struct.pack(">l", len(fname) + 4) + fname
1201 yield struct.pack(">l", len(fname) + 4) + fname
1198 # Sort the filenodes by their revision #
1202 # Sort the filenodes by their revision #
1199 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1203 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1200 # Create a group generator and only pass in a changenode
1204 # Create a group generator and only pass in a changenode
1201 # lookup function as we need to collect no information
1205 # lookup function as we need to collect no information
1202 # from filenodes.
1206 # from filenodes.
1203 group = filerevlog.group(msng_filenode_lst,
1207 group = filerevlog.group(msng_filenode_lst,
1204 lookup_filenode_link_func(fname))
1208 lookup_filenode_link_func(fname))
1205 for chnk in group:
1209 for chnk in group:
1206 yield chnk
1210 yield chnk
1207 # Don't need this anymore, toss it to free memory.
1211 # Don't need this anymore, toss it to free memory.
1208 del msng_filenode_set[fname]
1212 del msng_filenode_set[fname]
1209 # Signal that no more groups are left.
1213 # Signal that no more groups are left.
1210 yield struct.pack(">l", 0)
1214 yield struct.pack(">l", 0)
1211
1215
1212 return util.chunkbuffer(gengroup())
1216 return util.chunkbuffer(gengroup())
1213
1217
1214 def changegroup(self, basenodes):
1218 def changegroup(self, basenodes):
1215 """Generate a changegroup of all nodes that we have that a recipient
1219 """Generate a changegroup of all nodes that we have that a recipient
1216 doesn't.
1220 doesn't.
1217
1221
1218 This is much easier than the previous function as we can assume that
1222 This is much easier than the previous function as we can assume that
1219 the recipient has any changenode we aren't sending them."""
1223 the recipient has any changenode we aren't sending them."""
1220 cl = self.changelog
1224 cl = self.changelog
1221 nodes = cl.nodesbetween(basenodes, None)[0]
1225 nodes = cl.nodesbetween(basenodes, None)[0]
1222 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1226 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1223
1227
1224 def identity(x):
1228 def identity(x):
1225 return x
1229 return x
1226
1230
1227 def gennodelst(revlog):
1231 def gennodelst(revlog):
1228 for r in xrange(0, revlog.count()):
1232 for r in xrange(0, revlog.count()):
1229 n = revlog.node(r)
1233 n = revlog.node(r)
1230 if revlog.linkrev(n) in revset:
1234 if revlog.linkrev(n) in revset:
1231 yield n
1235 yield n
1232
1236
1233 def changed_file_collector(changedfileset):
1237 def changed_file_collector(changedfileset):
1234 def collect_changed_files(clnode):
1238 def collect_changed_files(clnode):
1235 c = cl.read(clnode)
1239 c = cl.read(clnode)
1236 for fname in c[3]:
1240 for fname in c[3]:
1237 changedfileset[fname] = 1
1241 changedfileset[fname] = 1
1238 return collect_changed_files
1242 return collect_changed_files
1239
1243
1240 def lookuprevlink_func(revlog):
1244 def lookuprevlink_func(revlog):
1241 def lookuprevlink(n):
1245 def lookuprevlink(n):
1242 return cl.node(revlog.linkrev(n))
1246 return cl.node(revlog.linkrev(n))
1243 return lookuprevlink
1247 return lookuprevlink
1244
1248
1245 def gengroup():
1249 def gengroup():
1246 # construct a list of all changed files
1250 # construct a list of all changed files
1247 changedfiles = {}
1251 changedfiles = {}
1248
1252
1249 for chnk in cl.group(nodes, identity,
1253 for chnk in cl.group(nodes, identity,
1250 changed_file_collector(changedfiles)):
1254 changed_file_collector(changedfiles)):
1251 yield chnk
1255 yield chnk
1252 changedfiles = changedfiles.keys()
1256 changedfiles = changedfiles.keys()
1253 changedfiles.sort()
1257 changedfiles.sort()
1254
1258
1255 mnfst = self.manifest
1259 mnfst = self.manifest
1256 nodeiter = gennodelst(mnfst)
1260 nodeiter = gennodelst(mnfst)
1257 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1261 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1258 yield chnk
1262 yield chnk
1259
1263
1260 for fname in changedfiles:
1264 for fname in changedfiles:
1261 filerevlog = self.file(fname)
1265 filerevlog = self.file(fname)
1262 nodeiter = gennodelst(filerevlog)
1266 nodeiter = gennodelst(filerevlog)
1263 nodeiter = list(nodeiter)
1267 nodeiter = list(nodeiter)
1264 if nodeiter:
1268 if nodeiter:
1265 yield struct.pack(">l", len(fname) + 4) + fname
1269 yield struct.pack(">l", len(fname) + 4) + fname
1266 lookup = lookuprevlink_func(filerevlog)
1270 lookup = lookuprevlink_func(filerevlog)
1267 for chnk in filerevlog.group(nodeiter, lookup):
1271 for chnk in filerevlog.group(nodeiter, lookup):
1268 yield chnk
1272 yield chnk
1269
1273
1270 yield struct.pack(">l", 0)
1274 yield struct.pack(">l", 0)
1271
1275
1272 return util.chunkbuffer(gengroup())
1276 return util.chunkbuffer(gengroup())
1273
1277
1274 def addchangegroup(self, source):
1278 def addchangegroup(self, source):
1275
1279
1276 def getchunk():
1280 def getchunk():
1277 d = source.read(4)
1281 d = source.read(4)
1278 if not d: return ""
1282 if not d: return ""
1279 l = struct.unpack(">l", d)[0]
1283 l = struct.unpack(">l", d)[0]
1280 if l <= 4: return ""
1284 if l <= 4: return ""
1281 d = source.read(l - 4)
1285 d = source.read(l - 4)
1282 if len(d) < l - 4:
1286 if len(d) < l - 4:
1283 raise repo.RepoError(_("premature EOF reading chunk"
1287 raise repo.RepoError(_("premature EOF reading chunk"
1284 " (got %d bytes, expected %d)")
1288 " (got %d bytes, expected %d)")
1285 % (len(d), l - 4))
1289 % (len(d), l - 4))
1286 return d
1290 return d
1287
1291
1288 def getgroup():
1292 def getgroup():
1289 while 1:
1293 while 1:
1290 c = getchunk()
1294 c = getchunk()
1291 if not c: break
1295 if not c: break
1292 yield c
1296 yield c
1293
1297
1294 def csmap(x):
1298 def csmap(x):
1295 self.ui.debug(_("add changeset %s\n") % short(x))
1299 self.ui.debug(_("add changeset %s\n") % short(x))
1296 return self.changelog.count()
1300 return self.changelog.count()
1297
1301
1298 def revmap(x):
1302 def revmap(x):
1299 return self.changelog.rev(x)
1303 return self.changelog.rev(x)
1300
1304
1301 if not source: return
1305 if not source: return
1302 changesets = files = revisions = 0
1306 changesets = files = revisions = 0
1303
1307
1304 tr = self.transaction()
1308 tr = self.transaction()
1305
1309
1306 oldheads = len(self.changelog.heads())
1310 oldheads = len(self.changelog.heads())
1307
1311
1308 # pull off the changeset group
1312 # pull off the changeset group
1309 self.ui.status(_("adding changesets\n"))
1313 self.ui.status(_("adding changesets\n"))
1310 co = self.changelog.tip()
1314 co = self.changelog.tip()
1311 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1315 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1312 cnr, cor = map(self.changelog.rev, (cn, co))
1316 cnr, cor = map(self.changelog.rev, (cn, co))
1313 if cn == nullid:
1317 if cn == nullid:
1314 cnr = cor
1318 cnr = cor
1315 changesets = cnr - cor
1319 changesets = cnr - cor
1316
1320
1317 # pull off the manifest group
1321 # pull off the manifest group
1318 self.ui.status(_("adding manifests\n"))
1322 self.ui.status(_("adding manifests\n"))
1319 mm = self.manifest.tip()
1323 mm = self.manifest.tip()
1320 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1324 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1321
1325
1322 # process the files
1326 # process the files
1323 self.ui.status(_("adding file changes\n"))
1327 self.ui.status(_("adding file changes\n"))
1324 while 1:
1328 while 1:
1325 f = getchunk()
1329 f = getchunk()
1326 if not f: break
1330 if not f: break
1327 self.ui.debug(_("adding %s revisions\n") % f)
1331 self.ui.debug(_("adding %s revisions\n") % f)
1328 fl = self.file(f)
1332 fl = self.file(f)
1329 o = fl.count()
1333 o = fl.count()
1330 n = fl.addgroup(getgroup(), revmap, tr)
1334 n = fl.addgroup(getgroup(), revmap, tr)
1331 revisions += fl.count() - o
1335 revisions += fl.count() - o
1332 files += 1
1336 files += 1
1333
1337
1334 newheads = len(self.changelog.heads())
1338 newheads = len(self.changelog.heads())
1335 heads = ""
1339 heads = ""
1336 if oldheads and newheads > oldheads:
1340 if oldheads and newheads > oldheads:
1337 heads = _(" (+%d heads)") % (newheads - oldheads)
1341 heads = _(" (+%d heads)") % (newheads - oldheads)
1338
1342
1339 self.ui.status(_("added %d changesets"
1343 self.ui.status(_("added %d changesets"
1340 " with %d changes to %d files%s\n")
1344 " with %d changes to %d files%s\n")
1341 % (changesets, revisions, files, heads))
1345 % (changesets, revisions, files, heads))
1342
1346
1343 tr.close()
1347 tr.close()
1344
1348
1345 if changesets > 0:
1349 if changesets > 0:
1346 if not self.hook("changegroup",
1350 if not self.hook("changegroup",
1347 node=hex(self.changelog.node(cor+1))):
1351 node=hex(self.changelog.node(cor+1))):
1348 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1352 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1349 return 1
1353 return 1
1350
1354
1351 for i in range(cor + 1, cnr + 1):
1355 for i in range(cor + 1, cnr + 1):
1352 self.hook("commit", node=hex(self.changelog.node(i)))
1356 self.hook("commit", node=hex(self.changelog.node(i)))
1353
1357
1354 return
1358 return
1355
1359
1356 def update(self, node, allow=False, force=False, choose=None,
1360 def update(self, node, allow=False, force=False, choose=None,
1357 moddirstate=True):
1361 moddirstate=True):
1358 pl = self.dirstate.parents()
1362 pl = self.dirstate.parents()
1359 if not force and pl[1] != nullid:
1363 if not force and pl[1] != nullid:
1360 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1364 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1361 return 1
1365 return 1
1362
1366
1363 p1, p2 = pl[0], node
1367 p1, p2 = pl[0], node
1364 pa = self.changelog.ancestor(p1, p2)
1368 pa = self.changelog.ancestor(p1, p2)
1365 m1n = self.changelog.read(p1)[0]
1369 m1n = self.changelog.read(p1)[0]
1366 m2n = self.changelog.read(p2)[0]
1370 m2n = self.changelog.read(p2)[0]
1367 man = self.manifest.ancestor(m1n, m2n)
1371 man = self.manifest.ancestor(m1n, m2n)
1368 m1 = self.manifest.read(m1n)
1372 m1 = self.manifest.read(m1n)
1369 mf1 = self.manifest.readflags(m1n)
1373 mf1 = self.manifest.readflags(m1n)
1370 m2 = self.manifest.read(m2n)
1374 m2 = self.manifest.read(m2n)
1371 mf2 = self.manifest.readflags(m2n)
1375 mf2 = self.manifest.readflags(m2n)
1372 ma = self.manifest.read(man)
1376 ma = self.manifest.read(man)
1373 mfa = self.manifest.readflags(man)
1377 mfa = self.manifest.readflags(man)
1374
1378
1375 (c, a, d, u) = self.changes()
1379 (c, a, d, u) = self.changes()
1376
1380
1377 # is this a jump, or a merge? i.e. is there a linear path
1381 # is this a jump, or a merge? i.e. is there a linear path
1378 # from p1 to p2?
1382 # from p1 to p2?
1379 linear_path = (pa == p1 or pa == p2)
1383 linear_path = (pa == p1 or pa == p2)
1380
1384
1381 # resolve the manifest to determine which files
1385 # resolve the manifest to determine which files
1382 # we care about merging
1386 # we care about merging
1383 self.ui.note(_("resolving manifests\n"))
1387 self.ui.note(_("resolving manifests\n"))
1384 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1388 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1385 (force, allow, moddirstate, linear_path))
1389 (force, allow, moddirstate, linear_path))
1386 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1390 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1387 (short(man), short(m1n), short(m2n)))
1391 (short(man), short(m1n), short(m2n)))
1388
1392
1389 merge = {}
1393 merge = {}
1390 get = {}
1394 get = {}
1391 remove = []
1395 remove = []
1392
1396
1393 # construct a working dir manifest
1397 # construct a working dir manifest
1394 mw = m1.copy()
1398 mw = m1.copy()
1395 mfw = mf1.copy()
1399 mfw = mf1.copy()
1396 umap = dict.fromkeys(u)
1400 umap = dict.fromkeys(u)
1397
1401
1398 for f in a + c + u:
1402 for f in a + c + u:
1399 mw[f] = ""
1403 mw[f] = ""
1400 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1404 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1401
1405
1402 if moddirstate:
1406 if moddirstate:
1403 wlock = self.wlock()
1407 wlock = self.wlock()
1404
1408
1405 for f in d:
1409 for f in d:
1406 if f in mw: del mw[f]
1410 if f in mw: del mw[f]
1407
1411
1408 # If we're jumping between revisions (as opposed to merging),
1412 # If we're jumping between revisions (as opposed to merging),
1409 # and if neither the working directory nor the target rev has
1413 # and if neither the working directory nor the target rev has
1410 # the file, then we need to remove it from the dirstate, to
1414 # the file, then we need to remove it from the dirstate, to
1411 # prevent the dirstate from listing the file when it is no
1415 # prevent the dirstate from listing the file when it is no
1412 # longer in the manifest.
1416 # longer in the manifest.
1413 if moddirstate and linear_path and f not in m2:
1417 if moddirstate and linear_path and f not in m2:
1414 self.dirstate.forget((f,))
1418 self.dirstate.forget((f,))
1415
1419
1416 # Compare manifests
1420 # Compare manifests
1417 for f, n in mw.iteritems():
1421 for f, n in mw.iteritems():
1418 if choose and not choose(f): continue
1422 if choose and not choose(f): continue
1419 if f in m2:
1423 if f in m2:
1420 s = 0
1424 s = 0
1421
1425
1422 # is the wfile new since m1, and match m2?
1426 # is the wfile new since m1, and match m2?
1423 if f not in m1:
1427 if f not in m1:
1424 t1 = self.wread(f)
1428 t1 = self.wread(f)
1425 t2 = self.file(f).read(m2[f])
1429 t2 = self.file(f).read(m2[f])
1426 if cmp(t1, t2) == 0:
1430 if cmp(t1, t2) == 0:
1427 n = m2[f]
1431 n = m2[f]
1428 del t1, t2
1432 del t1, t2
1429
1433
1430 # are files different?
1434 # are files different?
1431 if n != m2[f]:
1435 if n != m2[f]:
1432 a = ma.get(f, nullid)
1436 a = ma.get(f, nullid)
1433 # are both different from the ancestor?
1437 # are both different from the ancestor?
1434 if n != a and m2[f] != a:
1438 if n != a and m2[f] != a:
1435 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1439 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1436 # merge executable bits
1440 # merge executable bits
1437 # "if we changed or they changed, change in merge"
1441 # "if we changed or they changed, change in merge"
1438 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1442 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1439 mode = ((a^b) | (a^c)) ^ a
1443 mode = ((a^b) | (a^c)) ^ a
1440 merge[f] = (m1.get(f, nullid), m2[f], mode)
1444 merge[f] = (m1.get(f, nullid), m2[f], mode)
1441 s = 1
1445 s = 1
1442 # are we clobbering?
1446 # are we clobbering?
1443 # is remote's version newer?
1447 # is remote's version newer?
1444 # or are we going back in time?
1448 # or are we going back in time?
1445 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1449 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1446 self.ui.debug(_(" remote %s is newer, get\n") % f)
1450 self.ui.debug(_(" remote %s is newer, get\n") % f)
1447 get[f] = m2[f]
1451 get[f] = m2[f]
1448 s = 1
1452 s = 1
1449 elif f in umap:
1453 elif f in umap:
1450 # this unknown file is the same as the checkout
1454 # this unknown file is the same as the checkout
1451 get[f] = m2[f]
1455 get[f] = m2[f]
1452
1456
1453 if not s and mfw[f] != mf2[f]:
1457 if not s and mfw[f] != mf2[f]:
1454 if force:
1458 if force:
1455 self.ui.debug(_(" updating permissions for %s\n") % f)
1459 self.ui.debug(_(" updating permissions for %s\n") % f)
1456 util.set_exec(self.wjoin(f), mf2[f])
1460 util.set_exec(self.wjoin(f), mf2[f])
1457 else:
1461 else:
1458 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1462 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1459 mode = ((a^b) | (a^c)) ^ a
1463 mode = ((a^b) | (a^c)) ^ a
1460 if mode != b:
1464 if mode != b:
1461 self.ui.debug(_(" updating permissions for %s\n") % f)
1465 self.ui.debug(_(" updating permissions for %s\n") % f)
1462 util.set_exec(self.wjoin(f), mode)
1466 util.set_exec(self.wjoin(f), mode)
1463 del m2[f]
1467 del m2[f]
1464 elif f in ma:
1468 elif f in ma:
1465 if n != ma[f]:
1469 if n != ma[f]:
1466 r = _("d")
1470 r = _("d")
1467 if not force and (linear_path or allow):
1471 if not force and (linear_path or allow):
1468 r = self.ui.prompt(
1472 r = self.ui.prompt(
1469 (_(" local changed %s which remote deleted\n") % f) +
1473 (_(" local changed %s which remote deleted\n") % f) +
1470 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1474 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1471 if r == _("d"):
1475 if r == _("d"):
1472 remove.append(f)
1476 remove.append(f)
1473 else:
1477 else:
1474 self.ui.debug(_("other deleted %s\n") % f)
1478 self.ui.debug(_("other deleted %s\n") % f)
1475 remove.append(f) # other deleted it
1479 remove.append(f) # other deleted it
1476 else:
1480 else:
1477 # file is created on branch or in working directory
1481 # file is created on branch or in working directory
1478 if force and f not in umap:
1482 if force and f not in umap:
1479 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1483 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1480 remove.append(f)
1484 remove.append(f)
1481 elif n == m1.get(f, nullid): # same as parent
1485 elif n == m1.get(f, nullid): # same as parent
1482 if p2 == pa: # going backwards?
1486 if p2 == pa: # going backwards?
1483 self.ui.debug(_("remote deleted %s\n") % f)
1487 self.ui.debug(_("remote deleted %s\n") % f)
1484 remove.append(f)
1488 remove.append(f)
1485 else:
1489 else:
1486 self.ui.debug(_("local modified %s, keeping\n") % f)
1490 self.ui.debug(_("local modified %s, keeping\n") % f)
1487 else:
1491 else:
1488 self.ui.debug(_("working dir created %s, keeping\n") % f)
1492 self.ui.debug(_("working dir created %s, keeping\n") % f)
1489
1493
1490 for f, n in m2.iteritems():
1494 for f, n in m2.iteritems():
1491 if choose and not choose(f): continue
1495 if choose and not choose(f): continue
1492 if f[0] == "/": continue
1496 if f[0] == "/": continue
1493 if f in ma and n != ma[f]:
1497 if f in ma and n != ma[f]:
1494 r = _("k")
1498 r = _("k")
1495 if not force and (linear_path or allow):
1499 if not force and (linear_path or allow):
1496 r = self.ui.prompt(
1500 r = self.ui.prompt(
1497 (_("remote changed %s which local deleted\n") % f) +
1501 (_("remote changed %s which local deleted\n") % f) +
1498 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1502 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1499 if r == _("k"): get[f] = n
1503 if r == _("k"): get[f] = n
1500 elif f not in ma:
1504 elif f not in ma:
1501 self.ui.debug(_("remote created %s\n") % f)
1505 self.ui.debug(_("remote created %s\n") % f)
1502 get[f] = n
1506 get[f] = n
1503 else:
1507 else:
1504 if force or p2 == pa: # going backwards?
1508 if force or p2 == pa: # going backwards?
1505 self.ui.debug(_("local deleted %s, recreating\n") % f)
1509 self.ui.debug(_("local deleted %s, recreating\n") % f)
1506 get[f] = n
1510 get[f] = n
1507 else:
1511 else:
1508 self.ui.debug(_("local deleted %s\n") % f)
1512 self.ui.debug(_("local deleted %s\n") % f)
1509
1513
1510 del mw, m1, m2, ma
1514 del mw, m1, m2, ma
1511
1515
1512 if force:
1516 if force:
1513 for f in merge:
1517 for f in merge:
1514 get[f] = merge[f][1]
1518 get[f] = merge[f][1]
1515 merge = {}
1519 merge = {}
1516
1520
1517 if linear_path or force:
1521 if linear_path or force:
1518 # we don't need to do any magic, just jump to the new rev
1522 # we don't need to do any magic, just jump to the new rev
1519 branch_merge = False
1523 branch_merge = False
1520 p1, p2 = p2, nullid
1524 p1, p2 = p2, nullid
1521 else:
1525 else:
1522 if not allow:
1526 if not allow:
1523 self.ui.status(_("this update spans a branch"
1527 self.ui.status(_("this update spans a branch"
1524 " affecting the following files:\n"))
1528 " affecting the following files:\n"))
1525 fl = merge.keys() + get.keys()
1529 fl = merge.keys() + get.keys()
1526 fl.sort()
1530 fl.sort()
1527 for f in fl:
1531 for f in fl:
1528 cf = ""
1532 cf = ""
1529 if f in merge: cf = _(" (resolve)")
1533 if f in merge: cf = _(" (resolve)")
1530 self.ui.status(" %s%s\n" % (f, cf))
1534 self.ui.status(" %s%s\n" % (f, cf))
1531 self.ui.warn(_("aborting update spanning branches!\n"))
1535 self.ui.warn(_("aborting update spanning branches!\n"))
1532 self.ui.status(_("(use update -m to merge across branches"
1536 self.ui.status(_("(use update -m to merge across branches"
1533 " or -C to lose changes)\n"))
1537 " or -C to lose changes)\n"))
1534 return 1
1538 return 1
1535 branch_merge = True
1539 branch_merge = True
1536
1540
1537 # get the files we don't need to change
1541 # get the files we don't need to change
1538 files = get.keys()
1542 files = get.keys()
1539 files.sort()
1543 files.sort()
1540 for f in files:
1544 for f in files:
1541 if f[0] == "/": continue
1545 if f[0] == "/": continue
1542 self.ui.note(_("getting %s\n") % f)
1546 self.ui.note(_("getting %s\n") % f)
1543 t = self.file(f).read(get[f])
1547 t = self.file(f).read(get[f])
1544 self.wwrite(f, t)
1548 self.wwrite(f, t)
1545 util.set_exec(self.wjoin(f), mf2[f])
1549 util.set_exec(self.wjoin(f), mf2[f])
1546 if moddirstate:
1550 if moddirstate:
1547 if branch_merge:
1551 if branch_merge:
1548 self.dirstate.update([f], 'n', st_mtime=-1)
1552 self.dirstate.update([f], 'n', st_mtime=-1)
1549 else:
1553 else:
1550 self.dirstate.update([f], 'n')
1554 self.dirstate.update([f], 'n')
1551
1555
1552 # merge the tricky bits
1556 # merge the tricky bits
1553 files = merge.keys()
1557 files = merge.keys()
1554 files.sort()
1558 files.sort()
1555 for f in files:
1559 for f in files:
1556 self.ui.status(_("merging %s\n") % f)
1560 self.ui.status(_("merging %s\n") % f)
1557 my, other, flag = merge[f]
1561 my, other, flag = merge[f]
1558 self.merge3(f, my, other)
1562 self.merge3(f, my, other)
1559 util.set_exec(self.wjoin(f), flag)
1563 util.set_exec(self.wjoin(f), flag)
1560 if moddirstate:
1564 if moddirstate:
1561 if branch_merge:
1565 if branch_merge:
1562 # We've done a branch merge, mark this file as merged
1566 # We've done a branch merge, mark this file as merged
1563 # so that we properly record the merger later
1567 # so that we properly record the merger later
1564 self.dirstate.update([f], 'm')
1568 self.dirstate.update([f], 'm')
1565 else:
1569 else:
1566 # We've update-merged a locally modified file, so
1570 # We've update-merged a locally modified file, so
1567 # we set the dirstate to emulate a normal checkout
1571 # we set the dirstate to emulate a normal checkout
1568 # of that file some time in the past. Thus our
1572 # of that file some time in the past. Thus our
1569 # merge will appear as a normal local file
1573 # merge will appear as a normal local file
1570 # modification.
1574 # modification.
1571 f_len = len(self.file(f).read(other))
1575 f_len = len(self.file(f).read(other))
1572 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1576 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1573
1577
1574 remove.sort()
1578 remove.sort()
1575 for f in remove:
1579 for f in remove:
1576 self.ui.note(_("removing %s\n") % f)
1580 self.ui.note(_("removing %s\n") % f)
1577 try:
1581 try:
1578 util.unlink(self.wjoin(f))
1582 util.unlink(self.wjoin(f))
1579 except OSError, inst:
1583 except OSError, inst:
1580 if inst.errno != errno.ENOENT:
1584 if inst.errno != errno.ENOENT:
1581 self.ui.warn(_("update failed to remove %s: %s!\n") %
1585 self.ui.warn(_("update failed to remove %s: %s!\n") %
1582 (f, inst.strerror))
1586 (f, inst.strerror))
1583 if moddirstate:
1587 if moddirstate:
1584 if branch_merge:
1588 if branch_merge:
1585 self.dirstate.update(remove, 'r')
1589 self.dirstate.update(remove, 'r')
1586 else:
1590 else:
1587 self.dirstate.forget(remove)
1591 self.dirstate.forget(remove)
1588
1592
1589 if moddirstate:
1593 if moddirstate:
1590 self.dirstate.setparents(p1, p2)
1594 self.dirstate.setparents(p1, p2)
1591
1595
1592 def merge3(self, fn, my, other):
1596 def merge3(self, fn, my, other):
1593 """perform a 3-way merge in the working directory"""
1597 """perform a 3-way merge in the working directory"""
1594
1598
1595 def temp(prefix, node):
1599 def temp(prefix, node):
1596 pre = "%s~%s." % (os.path.basename(fn), prefix)
1600 pre = "%s~%s." % (os.path.basename(fn), prefix)
1597 (fd, name) = tempfile.mkstemp("", pre)
1601 (fd, name) = tempfile.mkstemp("", pre)
1598 f = os.fdopen(fd, "wb")
1602 f = os.fdopen(fd, "wb")
1599 self.wwrite(fn, fl.read(node), f)
1603 self.wwrite(fn, fl.read(node), f)
1600 f.close()
1604 f.close()
1601 return name
1605 return name
1602
1606
1603 fl = self.file(fn)
1607 fl = self.file(fn)
1604 base = fl.ancestor(my, other)
1608 base = fl.ancestor(my, other)
1605 a = self.wjoin(fn)
1609 a = self.wjoin(fn)
1606 b = temp("base", base)
1610 b = temp("base", base)
1607 c = temp("other", other)
1611 c = temp("other", other)
1608
1612
1609 self.ui.note(_("resolving %s\n") % fn)
1613 self.ui.note(_("resolving %s\n") % fn)
1610 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1614 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1611 (fn, short(my), short(other), short(base)))
1615 (fn, short(my), short(other), short(base)))
1612
1616
1613 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1617 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1614 or "hgmerge")
1618 or "hgmerge")
1615 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1619 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1616 if r:
1620 if r:
1617 self.ui.warn(_("merging %s failed!\n") % fn)
1621 self.ui.warn(_("merging %s failed!\n") % fn)
1618
1622
1619 os.unlink(b)
1623 os.unlink(b)
1620 os.unlink(c)
1624 os.unlink(c)
1621
1625
1622 def verify(self):
1626 def verify(self):
1623 filelinkrevs = {}
1627 filelinkrevs = {}
1624 filenodes = {}
1628 filenodes = {}
1625 changesets = revisions = files = 0
1629 changesets = revisions = files = 0
1626 errors = [0]
1630 errors = [0]
1627 neededmanifests = {}
1631 neededmanifests = {}
1628
1632
1629 def err(msg):
1633 def err(msg):
1630 self.ui.warn(msg + "\n")
1634 self.ui.warn(msg + "\n")
1631 errors[0] += 1
1635 errors[0] += 1
1632
1636
1633 seen = {}
1637 seen = {}
1634 self.ui.status(_("checking changesets\n"))
1638 self.ui.status(_("checking changesets\n"))
1635 d = self.changelog.checksize()
1639 d = self.changelog.checksize()
1636 if d:
1640 if d:
1637 err(_("changeset data short %d bytes") % d)
1641 err(_("changeset data short %d bytes") % d)
1638 for i in range(self.changelog.count()):
1642 for i in range(self.changelog.count()):
1639 changesets += 1
1643 changesets += 1
1640 n = self.changelog.node(i)
1644 n = self.changelog.node(i)
1641 l = self.changelog.linkrev(n)
1645 l = self.changelog.linkrev(n)
1642 if l != i:
1646 if l != i:
1643 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1647 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1644 if n in seen:
1648 if n in seen:
1645 err(_("duplicate changeset at revision %d") % i)
1649 err(_("duplicate changeset at revision %d") % i)
1646 seen[n] = 1
1650 seen[n] = 1
1647
1651
1648 for p in self.changelog.parents(n):
1652 for p in self.changelog.parents(n):
1649 if p not in self.changelog.nodemap:
1653 if p not in self.changelog.nodemap:
1650 err(_("changeset %s has unknown parent %s") %
1654 err(_("changeset %s has unknown parent %s") %
1651 (short(n), short(p)))
1655 (short(n), short(p)))
1652 try:
1656 try:
1653 changes = self.changelog.read(n)
1657 changes = self.changelog.read(n)
1654 except KeyboardInterrupt:
1658 except KeyboardInterrupt:
1655 self.ui.warn(_("interrupted"))
1659 self.ui.warn(_("interrupted"))
1656 raise
1660 raise
1657 except Exception, inst:
1661 except Exception, inst:
1658 err(_("unpacking changeset %s: %s") % (short(n), inst))
1662 err(_("unpacking changeset %s: %s") % (short(n), inst))
1659
1663
1660 neededmanifests[changes[0]] = n
1664 neededmanifests[changes[0]] = n
1661
1665
1662 for f in changes[3]:
1666 for f in changes[3]:
1663 filelinkrevs.setdefault(f, []).append(i)
1667 filelinkrevs.setdefault(f, []).append(i)
1664
1668
1665 seen = {}
1669 seen = {}
1666 self.ui.status(_("checking manifests\n"))
1670 self.ui.status(_("checking manifests\n"))
1667 d = self.manifest.checksize()
1671 d = self.manifest.checksize()
1668 if d:
1672 if d:
1669 err(_("manifest data short %d bytes") % d)
1673 err(_("manifest data short %d bytes") % d)
1670 for i in range(self.manifest.count()):
1674 for i in range(self.manifest.count()):
1671 n = self.manifest.node(i)
1675 n = self.manifest.node(i)
1672 l = self.manifest.linkrev(n)
1676 l = self.manifest.linkrev(n)
1673
1677
1674 if l < 0 or l >= self.changelog.count():
1678 if l < 0 or l >= self.changelog.count():
1675 err(_("bad manifest link (%d) at revision %d") % (l, i))
1679 err(_("bad manifest link (%d) at revision %d") % (l, i))
1676
1680
1677 if n in neededmanifests:
1681 if n in neededmanifests:
1678 del neededmanifests[n]
1682 del neededmanifests[n]
1679
1683
1680 if n in seen:
1684 if n in seen:
1681 err(_("duplicate manifest at revision %d") % i)
1685 err(_("duplicate manifest at revision %d") % i)
1682
1686
1683 seen[n] = 1
1687 seen[n] = 1
1684
1688
1685 for p in self.manifest.parents(n):
1689 for p in self.manifest.parents(n):
1686 if p not in self.manifest.nodemap:
1690 if p not in self.manifest.nodemap:
1687 err(_("manifest %s has unknown parent %s") %
1691 err(_("manifest %s has unknown parent %s") %
1688 (short(n), short(p)))
1692 (short(n), short(p)))
1689
1693
1690 try:
1694 try:
1691 delta = mdiff.patchtext(self.manifest.delta(n))
1695 delta = mdiff.patchtext(self.manifest.delta(n))
1692 except KeyboardInterrupt:
1696 except KeyboardInterrupt:
1693 self.ui.warn(_("interrupted"))
1697 self.ui.warn(_("interrupted"))
1694 raise
1698 raise
1695 except Exception, inst:
1699 except Exception, inst:
1696 err(_("unpacking manifest %s: %s") % (short(n), inst))
1700 err(_("unpacking manifest %s: %s") % (short(n), inst))
1697
1701
1698 ff = [ l.split('\0') for l in delta.splitlines() ]
1702 ff = [ l.split('\0') for l in delta.splitlines() ]
1699 for f, fn in ff:
1703 for f, fn in ff:
1700 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1704 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1701
1705
1702 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1706 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1703
1707
1704 for m,c in neededmanifests.items():
1708 for m,c in neededmanifests.items():
1705 err(_("Changeset %s refers to unknown manifest %s") %
1709 err(_("Changeset %s refers to unknown manifest %s") %
1706 (short(m), short(c)))
1710 (short(m), short(c)))
1707 del neededmanifests
1711 del neededmanifests
1708
1712
1709 for f in filenodes:
1713 for f in filenodes:
1710 if f not in filelinkrevs:
1714 if f not in filelinkrevs:
1711 err(_("file %s in manifest but not in changesets") % f)
1715 err(_("file %s in manifest but not in changesets") % f)
1712
1716
1713 for f in filelinkrevs:
1717 for f in filelinkrevs:
1714 if f not in filenodes:
1718 if f not in filenodes:
1715 err(_("file %s in changeset but not in manifest") % f)
1719 err(_("file %s in changeset but not in manifest") % f)
1716
1720
1717 self.ui.status(_("checking files\n"))
1721 self.ui.status(_("checking files\n"))
1718 ff = filenodes.keys()
1722 ff = filenodes.keys()
1719 ff.sort()
1723 ff.sort()
1720 for f in ff:
1724 for f in ff:
1721 if f == "/dev/null": continue
1725 if f == "/dev/null": continue
1722 files += 1
1726 files += 1
1723 fl = self.file(f)
1727 fl = self.file(f)
1724 d = fl.checksize()
1728 d = fl.checksize()
1725 if d:
1729 if d:
1726 err(_("%s file data short %d bytes") % (f, d))
1730 err(_("%s file data short %d bytes") % (f, d))
1727
1731
1728 nodes = { nullid: 1 }
1732 nodes = { nullid: 1 }
1729 seen = {}
1733 seen = {}
1730 for i in range(fl.count()):
1734 for i in range(fl.count()):
1731 revisions += 1
1735 revisions += 1
1732 n = fl.node(i)
1736 n = fl.node(i)
1733
1737
1734 if n in seen:
1738 if n in seen:
1735 err(_("%s: duplicate revision %d") % (f, i))
1739 err(_("%s: duplicate revision %d") % (f, i))
1736 if n not in filenodes[f]:
1740 if n not in filenodes[f]:
1737 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1741 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1738 else:
1742 else:
1739 del filenodes[f][n]
1743 del filenodes[f][n]
1740
1744
1741 flr = fl.linkrev(n)
1745 flr = fl.linkrev(n)
1742 if flr not in filelinkrevs[f]:
1746 if flr not in filelinkrevs[f]:
1743 err(_("%s:%s points to unexpected changeset %d")
1747 err(_("%s:%s points to unexpected changeset %d")
1744 % (f, short(n), flr))
1748 % (f, short(n), flr))
1745 else:
1749 else:
1746 filelinkrevs[f].remove(flr)
1750 filelinkrevs[f].remove(flr)
1747
1751
1748 # verify contents
1752 # verify contents
1749 try:
1753 try:
1750 t = fl.read(n)
1754 t = fl.read(n)
1751 except KeyboardInterrupt:
1755 except KeyboardInterrupt:
1752 self.ui.warn(_("interrupted"))
1756 self.ui.warn(_("interrupted"))
1753 raise
1757 raise
1754 except Exception, inst:
1758 except Exception, inst:
1755 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1759 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1756
1760
1757 # verify parents
1761 # verify parents
1758 (p1, p2) = fl.parents(n)
1762 (p1, p2) = fl.parents(n)
1759 if p1 not in nodes:
1763 if p1 not in nodes:
1760 err(_("file %s:%s unknown parent 1 %s") %
1764 err(_("file %s:%s unknown parent 1 %s") %
1761 (f, short(n), short(p1)))
1765 (f, short(n), short(p1)))
1762 if p2 not in nodes:
1766 if p2 not in nodes:
1763 err(_("file %s:%s unknown parent 2 %s") %
1767 err(_("file %s:%s unknown parent 2 %s") %
1764 (f, short(n), short(p1)))
1768 (f, short(n), short(p1)))
1765 nodes[n] = 1
1769 nodes[n] = 1
1766
1770
1767 # cross-check
1771 # cross-check
1768 for node in filenodes[f]:
1772 for node in filenodes[f]:
1769 err(_("node %s in manifests not in %s") % (hex(node), f))
1773 err(_("node %s in manifests not in %s") % (hex(node), f))
1770
1774
1771 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1775 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1772 (files, changesets, revisions))
1776 (files, changesets, revisions))
1773
1777
1774 if errors[0]:
1778 if errors[0]:
1775 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1779 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1776 return 1
1780 return 1
@@ -1,890 +1,888 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 from demandload import demandload
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
17
17
18 def hash(text, p1, p2):
18 def hash(text, p1, p2):
19 """generate a hash from the given text and its parent hashes
19 """generate a hash from the given text and its parent hashes
20
20
21 This hash combines both the current file contents and its history
21 This hash combines both the current file contents and its history
22 in a manner that makes it easy to distinguish nodes with the same
22 in a manner that makes it easy to distinguish nodes with the same
23 content in the revision graph.
23 content in the revision graph.
24 """
24 """
25 l = [p1, p2]
25 l = [p1, p2]
26 l.sort()
26 l.sort()
27 s = sha.new(l[0])
27 s = sha.new(l[0])
28 s.update(l[1])
28 s.update(l[1])
29 s.update(text)
29 s.update(text)
30 return s.digest()
30 return s.digest()
31
31
32 def compress(text):
32 def compress(text):
33 """ generate a possibly-compressed representation of text """
33 """ generate a possibly-compressed representation of text """
34 if not text: return ("", text)
34 if not text: return ("", text)
35 if len(text) < 44:
35 if len(text) < 44:
36 if text[0] == '\0': return ("", text)
36 if text[0] == '\0': return ("", text)
37 return ('u', text)
37 return ('u', text)
38 bin = zlib.compress(text)
38 bin = zlib.compress(text)
39 if len(bin) > len(text):
39 if len(bin) > len(text):
40 if text[0] == '\0': return ("", text)
40 if text[0] == '\0': return ("", text)
41 return ('u', text)
41 return ('u', text)
42 return ("", bin)
42 return ("", bin)
43
43
44 def decompress(bin):
44 def decompress(bin):
45 """ decompress the given input """
45 """ decompress the given input """
46 if not bin: return bin
46 if not bin: return bin
47 t = bin[0]
47 t = bin[0]
48 if t == '\0': return bin
48 if t == '\0': return bin
49 if t == 'x': return zlib.decompress(bin)
49 if t == 'x': return zlib.decompress(bin)
50 if t == 'u': return bin[1:]
50 if t == 'u': return bin[1:]
51 raise RevlogError(_("unknown compression type %s") % t)
51 raise RevlogError(_("unknown compression type %s") % t)
52
52
53 indexformat = ">4l20s20s20s"
53 indexformat = ">4l20s20s20s"
54
54
55 class lazyparser:
55 class lazyparser:
56 """
56 """
57 this class avoids the need to parse the entirety of large indices
57 this class avoids the need to parse the entirety of large indices
58
58
59 By default we parse and load 1000 entries at a time.
59 By default we parse and load 1000 entries at a time.
60
60
61 If no position is specified, we load the whole index, and replace
61 If no position is specified, we load the whole index, and replace
62 the lazy objects in revlog with the underlying objects for
62 the lazy objects in revlog with the underlying objects for
63 efficiency in cases where we look at most of the nodes.
63 efficiency in cases where we look at most of the nodes.
64 """
64 """
65 def __init__(self, data, revlog):
65 def __init__(self, data, revlog):
66 self.data = data
66 self.data = data
67 self.s = struct.calcsize(indexformat)
67 self.s = struct.calcsize(indexformat)
68 self.l = len(data)/self.s
68 self.l = len(data)/self.s
69 self.index = [None] * self.l
69 self.index = [None] * self.l
70 self.map = {nullid: -1}
70 self.map = {nullid: -1}
71 self.all = 0
71 self.all = 0
72 self.revlog = revlog
72 self.revlog = revlog
73
73
74 def trunc(self, pos):
74 def trunc(self, pos):
75 self.l = pos/self.s
75 self.l = pos/self.s
76
76
77 def load(self, pos=None):
77 def load(self, pos=None):
78 if self.all: return
78 if self.all: return
79 if pos is not None:
79 if pos is not None:
80 block = pos / 1000
80 block = pos / 1000
81 i = block * 1000
81 i = block * 1000
82 end = min(self.l, i + 1000)
82 end = min(self.l, i + 1000)
83 else:
83 else:
84 self.all = 1
84 self.all = 1
85 i = 0
85 i = 0
86 end = self.l
86 end = self.l
87 self.revlog.index = self.index
87 self.revlog.index = self.index
88 self.revlog.nodemap = self.map
88 self.revlog.nodemap = self.map
89
89
90 while i < end:
90 while i < end:
91 d = self.data[i * self.s: (i + 1) * self.s]
91 d = self.data[i * self.s: (i + 1) * self.s]
92 e = struct.unpack(indexformat, d)
92 e = struct.unpack(indexformat, d)
93 self.index[i] = e
93 self.index[i] = e
94 self.map[e[6]] = i
94 self.map[e[6]] = i
95 i += 1
95 i += 1
96
96
97 class lazyindex:
97 class lazyindex:
98 """a lazy version of the index array"""
98 """a lazy version of the index array"""
99 def __init__(self, parser):
99 def __init__(self, parser):
100 self.p = parser
100 self.p = parser
101 def __len__(self):
101 def __len__(self):
102 return len(self.p.index)
102 return len(self.p.index)
103 def load(self, pos):
103 def load(self, pos):
104 if pos < 0:
104 if pos < 0:
105 pos += len(self.p.index)
105 pos += len(self.p.index)
106 self.p.load(pos)
106 self.p.load(pos)
107 return self.p.index[pos]
107 return self.p.index[pos]
108 def __getitem__(self, pos):
108 def __getitem__(self, pos):
109 return self.p.index[pos] or self.load(pos)
109 return self.p.index[pos] or self.load(pos)
110 def __delitem__(self, pos):
110 def __delitem__(self, pos):
111 del self.p.index[pos]
111 del self.p.index[pos]
112 def append(self, e):
112 def append(self, e):
113 self.p.index.append(e)
113 self.p.index.append(e)
114 def trunc(self, pos):
114 def trunc(self, pos):
115 self.p.trunc(pos)
115 self.p.trunc(pos)
116
116
117 class lazymap:
117 class lazymap:
118 """a lazy version of the node map"""
118 """a lazy version of the node map"""
119 def __init__(self, parser):
119 def __init__(self, parser):
120 self.p = parser
120 self.p = parser
121 def load(self, key):
121 def load(self, key):
122 if self.p.all: return
122 if self.p.all: return
123 n = self.p.data.find(key)
123 n = self.p.data.find(key)
124 if n < 0:
124 if n < 0:
125 raise KeyError(key)
125 raise KeyError(key)
126 pos = n / self.p.s
126 pos = n / self.p.s
127 self.p.load(pos)
127 self.p.load(pos)
128 def __contains__(self, key):
128 def __contains__(self, key):
129 self.p.load()
129 self.p.load()
130 return key in self.p.map
130 return key in self.p.map
131 def __iter__(self):
131 def __iter__(self):
132 yield nullid
132 yield nullid
133 for i in xrange(self.p.l):
133 for i in xrange(self.p.l):
134 try:
134 try:
135 yield self.p.index[i][6]
135 yield self.p.index[i][6]
136 except:
136 except:
137 self.p.load(i)
137 self.p.load(i)
138 yield self.p.index[i][6]
138 yield self.p.index[i][6]
139 def __getitem__(self, key):
139 def __getitem__(self, key):
140 try:
140 try:
141 return self.p.map[key]
141 return self.p.map[key]
142 except KeyError:
142 except KeyError:
143 try:
143 try:
144 self.load(key)
144 self.load(key)
145 return self.p.map[key]
145 return self.p.map[key]
146 except KeyError:
146 except KeyError:
147 raise KeyError("node " + hex(key))
147 raise KeyError("node " + hex(key))
148 def __setitem__(self, key, val):
148 def __setitem__(self, key, val):
149 self.p.map[key] = val
149 self.p.map[key] = val
150 def __delitem__(self, key):
150 def __delitem__(self, key):
151 del self.p.map[key]
151 del self.p.map[key]
152
152
153 class RevlogError(Exception): pass
153 class RevlogError(Exception): pass
154
154
155 class revlog:
155 class revlog:
156 """
156 """
157 the underlying revision storage object
157 the underlying revision storage object
158
158
159 A revlog consists of two parts, an index and the revision data.
159 A revlog consists of two parts, an index and the revision data.
160
160
161 The index is a file with a fixed record size containing
161 The index is a file with a fixed record size containing
162 information on each revision, includings its nodeid (hash), the
162 information on each revision, includings its nodeid (hash), the
163 nodeids of its parents, the position and offset of its data within
163 nodeids of its parents, the position and offset of its data within
164 the data file, and the revision it's based on. Finally, each entry
164 the data file, and the revision it's based on. Finally, each entry
165 contains a linkrev entry that can serve as a pointer to external
165 contains a linkrev entry that can serve as a pointer to external
166 data.
166 data.
167
167
168 The revision data itself is a linear collection of data chunks.
168 The revision data itself is a linear collection of data chunks.
169 Each chunk represents a revision and is usually represented as a
169 Each chunk represents a revision and is usually represented as a
170 delta against the previous chunk. To bound lookup time, runs of
170 delta against the previous chunk. To bound lookup time, runs of
171 deltas are limited to about 2 times the length of the original
171 deltas are limited to about 2 times the length of the original
172 version data. This makes retrieval of a version proportional to
172 version data. This makes retrieval of a version proportional to
173 its size, or O(1) relative to the number of revisions.
173 its size, or O(1) relative to the number of revisions.
174
174
175 Both pieces of the revlog are written to in an append-only
175 Both pieces of the revlog are written to in an append-only
176 fashion, which means we never need to rewrite a file to insert or
176 fashion, which means we never need to rewrite a file to insert or
177 remove data, and can use some simple techniques to avoid the need
177 remove data, and can use some simple techniques to avoid the need
178 for locking while reading.
178 for locking while reading.
179 """
179 """
180 def __init__(self, opener, indexfile, datafile):
180 def __init__(self, opener, indexfile, datafile):
181 """
181 """
182 create a revlog object
182 create a revlog object
183
183
184 opener is a function that abstracts the file opening operation
184 opener is a function that abstracts the file opening operation
185 and can be used to implement COW semantics or the like.
185 and can be used to implement COW semantics or the like.
186 """
186 """
187 self.indexfile = indexfile
187 self.indexfile = indexfile
188 self.datafile = datafile
188 self.datafile = datafile
189 self.opener = opener
189 self.opener = opener
190 self.cache = None
190 self.cache = None
191
191
192 try:
192 try:
193 i = self.opener(self.indexfile).read()
193 i = self.opener(self.indexfile).read()
194 except IOError, inst:
194 except IOError, inst:
195 if inst.errno != errno.ENOENT:
195 if inst.errno != errno.ENOENT:
196 raise
196 raise
197 i = ""
197 i = ""
198
198
199 if len(i) > 10000:
199 if len(i) > 10000:
200 # big index, let's parse it on demand
200 # big index, let's parse it on demand
201 parser = lazyparser(i, self)
201 parser = lazyparser(i, self)
202 self.index = lazyindex(parser)
202 self.index = lazyindex(parser)
203 self.nodemap = lazymap(parser)
203 self.nodemap = lazymap(parser)
204 else:
204 else:
205 s = struct.calcsize(indexformat)
205 s = struct.calcsize(indexformat)
206 l = len(i) / s
206 l = len(i) / s
207 self.index = [None] * l
207 self.index = [None] * l
208 m = [None] * l
208 m = [None] * l
209
209
210 n = 0
210 n = 0
211 for f in xrange(0, len(i), s):
211 for f in xrange(0, len(i), s):
212 # offset, size, base, linkrev, p1, p2, nodeid
212 # offset, size, base, linkrev, p1, p2, nodeid
213 e = struct.unpack(indexformat, i[f:f + s])
213 e = struct.unpack(indexformat, i[f:f + s])
214 m[n] = (e[6], n)
214 m[n] = (e[6], n)
215 self.index[n] = e
215 self.index[n] = e
216 n += 1
216 n += 1
217
217
218 self.nodemap = dict(m)
218 self.nodemap = dict(m)
219 self.nodemap[nullid] = -1
219 self.nodemap[nullid] = -1
220
220
221 def tip(self): return self.node(len(self.index) - 1)
221 def tip(self): return self.node(len(self.index) - 1)
222 def count(self): return len(self.index)
222 def count(self): return len(self.index)
223 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
223 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
224 def rev(self, node):
224 def rev(self, node):
225 try:
225 try:
226 return self.nodemap[node]
226 return self.nodemap[node]
227 except KeyError:
227 except KeyError:
228 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
228 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
229 def linkrev(self, node): return self.index[self.rev(node)][3]
229 def linkrev(self, node): return self.index[self.rev(node)][3]
230 def parents(self, node):
230 def parents(self, node):
231 if node == nullid: return (nullid, nullid)
231 if node == nullid: return (nullid, nullid)
232 return self.index[self.rev(node)][4:6]
232 return self.index[self.rev(node)][4:6]
233
233
234 def start(self, rev): return self.index[rev][0]
234 def start(self, rev): return self.index[rev][0]
235 def length(self, rev): return self.index[rev][1]
235 def length(self, rev): return self.index[rev][1]
236 def end(self, rev): return self.start(rev) + self.length(rev)
236 def end(self, rev): return self.start(rev) + self.length(rev)
237 def base(self, rev): return self.index[rev][2]
237 def base(self, rev): return self.index[rev][2]
238
238
239 def reachable(self, rev, stop=None):
239 def reachable(self, rev, stop=None):
240 reachable = {}
240 reachable = {}
241 visit = [rev]
241 visit = [rev]
242 reachable[rev] = 1
242 reachable[rev] = 1
243 if stop:
243 if stop:
244 stopn = self.rev(stop)
244 stopn = self.rev(stop)
245 else:
245 else:
246 stopn = 0
246 stopn = 0
247 while visit:
247 while visit:
248 n = visit.pop(0)
248 n = visit.pop(0)
249 if n == stop:
249 if n == stop:
250 continue
250 continue
251 if n == nullid:
251 if n == nullid:
252 continue
252 continue
253 for p in self.parents(n):
253 for p in self.parents(n):
254 if self.rev(p) < stopn:
254 if self.rev(p) < stopn:
255 continue
255 continue
256 if p not in reachable:
256 if p not in reachable:
257 reachable[p] = 1
257 reachable[p] = 1
258 visit.append(p)
258 visit.append(p)
259 return reachable
259 return reachable
260
260
261 def nodesbetween(self, roots=None, heads=None):
261 def nodesbetween(self, roots=None, heads=None):
262 """Return a tuple containing three elements. Elements 1 and 2 contain
262 """Return a tuple containing three elements. Elements 1 and 2 contain
263 a final list bases and heads after all the unreachable ones have been
263 a final list bases and heads after all the unreachable ones have been
264 pruned. Element 0 contains a topologically sorted list of all
264 pruned. Element 0 contains a topologically sorted list of all
265
265
266 nodes that satisfy these constraints:
266 nodes that satisfy these constraints:
267 1. All nodes must be descended from a node in roots (the nodes on
267 1. All nodes must be descended from a node in roots (the nodes on
268 roots are considered descended from themselves).
268 roots are considered descended from themselves).
269 2. All nodes must also be ancestors of a node in heads (the nodes in
269 2. All nodes must also be ancestors of a node in heads (the nodes in
270 heads are considered to be their own ancestors).
270 heads are considered to be their own ancestors).
271
271
272 If roots is unspecified, nullid is assumed as the only root.
272 If roots is unspecified, nullid is assumed as the only root.
273 If heads is unspecified, it is taken to be the output of the
273 If heads is unspecified, it is taken to be the output of the
274 heads method (i.e. a list of all nodes in the repository that
274 heads method (i.e. a list of all nodes in the repository that
275 have no children)."""
275 have no children)."""
276 nonodes = ([], [], [])
276 nonodes = ([], [], [])
277 if roots is not None:
277 if roots is not None:
278 roots = list(roots)
278 roots = list(roots)
279 if not roots:
279 if not roots:
280 return nonodes
280 return nonodes
281 lowestrev = min([self.rev(n) for n in roots])
281 lowestrev = min([self.rev(n) for n in roots])
282 else:
282 else:
283 roots = [nullid] # Everybody's a descendent of nullid
283 roots = [nullid] # Everybody's a descendent of nullid
284 lowestrev = -1
284 lowestrev = -1
285 if (lowestrev == -1) and (heads is None):
285 if (lowestrev == -1) and (heads is None):
286 # We want _all_ the nodes!
286 # We want _all_ the nodes!
287 return ([self.node(r) for r in xrange(0, self.count())],
287 return ([self.node(r) for r in xrange(0, self.count())],
288 [nullid], list(self.heads()))
288 [nullid], list(self.heads()))
289 if heads is None:
289 if heads is None:
290 # All nodes are ancestors, so the latest ancestor is the last
290 # All nodes are ancestors, so the latest ancestor is the last
291 # node.
291 # node.
292 highestrev = self.count() - 1
292 highestrev = self.count() - 1
293 # Set ancestors to None to signal that every node is an ancestor.
293 # Set ancestors to None to signal that every node is an ancestor.
294 ancestors = None
294 ancestors = None
295 # Set heads to an empty dictionary for later discovery of heads
295 # Set heads to an empty dictionary for later discovery of heads
296 heads = {}
296 heads = {}
297 else:
297 else:
298 heads = list(heads)
298 heads = list(heads)
299 if not heads:
299 if not heads:
300 return nonodes
300 return nonodes
301 ancestors = {}
301 ancestors = {}
302 # Start at the top and keep marking parents until we're done.
302 # Start at the top and keep marking parents until we're done.
303 nodestotag = heads[:]
303 nodestotag = heads[:]
304 # Turn heads into a dictionary so we can remove 'fake' heads.
304 # Turn heads into a dictionary so we can remove 'fake' heads.
305 # Also, later we will be using it to filter out the heads we can't
305 # Also, later we will be using it to filter out the heads we can't
306 # find from roots.
306 # find from roots.
307 heads = dict.fromkeys(heads, 0)
307 heads = dict.fromkeys(heads, 0)
308 # Remember where the top was so we can use it as a limit later.
308 # Remember where the top was so we can use it as a limit later.
309 highestrev = max([self.rev(n) for n in nodestotag])
309 highestrev = max([self.rev(n) for n in nodestotag])
310 while nodestotag:
310 while nodestotag:
311 # grab a node to tag
311 # grab a node to tag
312 n = nodestotag.pop()
312 n = nodestotag.pop()
313 # Never tag nullid
313 # Never tag nullid
314 if n == nullid:
314 if n == nullid:
315 continue
315 continue
316 # A node's revision number represents its place in a
316 # A node's revision number represents its place in a
317 # topologically sorted list of nodes.
317 # topologically sorted list of nodes.
318 r = self.rev(n)
318 r = self.rev(n)
319 if r >= lowestrev:
319 if r >= lowestrev:
320 if n not in ancestors:
320 if n not in ancestors:
321 # If we are possibly a descendent of one of the roots
321 # If we are possibly a descendent of one of the roots
322 # and we haven't already been marked as an ancestor
322 # and we haven't already been marked as an ancestor
323 ancestors[n] = 1 # Mark as ancestor
323 ancestors[n] = 1 # Mark as ancestor
324 # Add non-nullid parents to list of nodes to tag.
324 # Add non-nullid parents to list of nodes to tag.
325 nodestotag.extend([p for p in self.parents(n) if
325 nodestotag.extend([p for p in self.parents(n) if
326 p != nullid])
326 p != nullid])
327 elif n in heads: # We've seen it before, is it a fake head?
327 elif n in heads: # We've seen it before, is it a fake head?
328 # So it is, real heads should not be the ancestors of
328 # So it is, real heads should not be the ancestors of
329 # any other heads.
329 # any other heads.
330 heads.pop(n)
330 heads.pop(n)
331 if not ancestors:
331 if not ancestors:
332 return nonodes
332 return nonodes
333 # Now that we have our set of ancestors, we want to remove any
333 # Now that we have our set of ancestors, we want to remove any
334 # roots that are not ancestors.
334 # roots that are not ancestors.
335
335
336 # If one of the roots was nullid, everything is included anyway.
336 # If one of the roots was nullid, everything is included anyway.
337 if lowestrev > -1:
337 if lowestrev > -1:
338 # But, since we weren't, let's recompute the lowest rev to not
338 # But, since we weren't, let's recompute the lowest rev to not
339 # include roots that aren't ancestors.
339 # include roots that aren't ancestors.
340
340
341 # Filter out roots that aren't ancestors of heads
341 # Filter out roots that aren't ancestors of heads
342 roots = [n for n in roots if n in ancestors]
342 roots = [n for n in roots if n in ancestors]
343 # Recompute the lowest revision
343 # Recompute the lowest revision
344 if roots:
344 if roots:
345 lowestrev = min([self.rev(n) for n in roots])
345 lowestrev = min([self.rev(n) for n in roots])
346 else:
346 else:
347 # No more roots? Return empty list
347 # No more roots? Return empty list
348 return nonodes
348 return nonodes
349 else:
349 else:
350 # We are descending from nullid, and don't need to care about
350 # We are descending from nullid, and don't need to care about
351 # any other roots.
351 # any other roots.
352 lowestrev = -1
352 lowestrev = -1
353 roots = [nullid]
353 roots = [nullid]
354 # Transform our roots list into a 'set' (i.e. a dictionary where the
354 # Transform our roots list into a 'set' (i.e. a dictionary where the
355 # values don't matter.
355 # values don't matter.
356 descendents = dict.fromkeys(roots, 1)
356 descendents = dict.fromkeys(roots, 1)
357 # Also, keep the original roots so we can filter out roots that aren't
357 # Also, keep the original roots so we can filter out roots that aren't
358 # 'real' roots (i.e. are descended from other roots).
358 # 'real' roots (i.e. are descended from other roots).
359 roots = descendents.copy()
359 roots = descendents.copy()
360 # Our topologically sorted list of output nodes.
360 # Our topologically sorted list of output nodes.
361 orderedout = []
361 orderedout = []
362 # Don't start at nullid since we don't want nullid in our output list,
362 # Don't start at nullid since we don't want nullid in our output list,
363 # and if nullid shows up in descedents, empty parents will look like
363 # and if nullid shows up in descedents, empty parents will look like
364 # they're descendents.
364 # they're descendents.
365 for r in xrange(max(lowestrev, 0), highestrev + 1):
365 for r in xrange(max(lowestrev, 0), highestrev + 1):
366 n = self.node(r)
366 n = self.node(r)
367 isdescendent = False
367 isdescendent = False
368 if lowestrev == -1: # Everybody is a descendent of nullid
368 if lowestrev == -1: # Everybody is a descendent of nullid
369 isdescendent = True
369 isdescendent = True
370 elif n in descendents:
370 elif n in descendents:
371 # n is already a descendent
371 # n is already a descendent
372 isdescendent = True
372 isdescendent = True
373 # This check only needs to be done here because all the roots
373 # This check only needs to be done here because all the roots
374 # will start being marked is descendents before the loop.
374 # will start being marked is descendents before the loop.
375 if n in roots:
375 if n in roots:
376 # If n was a root, check if it's a 'real' root.
376 # If n was a root, check if it's a 'real' root.
377 p = tuple(self.parents(n))
377 p = tuple(self.parents(n))
378 # If any of its parents are descendents, it's not a root.
378 # If any of its parents are descendents, it's not a root.
379 if (p[0] in descendents) or (p[1] in descendents):
379 if (p[0] in descendents) or (p[1] in descendents):
380 roots.pop(n)
380 roots.pop(n)
381 else:
381 else:
382 p = tuple(self.parents(n))
382 p = tuple(self.parents(n))
383 # A node is a descendent if either of its parents are
383 # A node is a descendent if either of its parents are
384 # descendents. (We seeded the dependents list with the roots
384 # descendents. (We seeded the dependents list with the roots
385 # up there, remember?)
385 # up there, remember?)
386 if (p[0] in descendents) or (p[1] in descendents):
386 if (p[0] in descendents) or (p[1] in descendents):
387 descendents[n] = 1
387 descendents[n] = 1
388 isdescendent = True
388 isdescendent = True
389 if isdescendent and ((ancestors is None) or (n in ancestors)):
389 if isdescendent and ((ancestors is None) or (n in ancestors)):
390 # Only include nodes that are both descendents and ancestors.
390 # Only include nodes that are both descendents and ancestors.
391 orderedout.append(n)
391 orderedout.append(n)
392 if (ancestors is not None) and (n in heads):
392 if (ancestors is not None) and (n in heads):
393 # We're trying to figure out which heads are reachable
393 # We're trying to figure out which heads are reachable
394 # from roots.
394 # from roots.
395 # Mark this head as having been reached
395 # Mark this head as having been reached
396 heads[n] = 1
396 heads[n] = 1
397 elif ancestors is None:
397 elif ancestors is None:
398 # Otherwise, we're trying to discover the heads.
398 # Otherwise, we're trying to discover the heads.
399 # Assume this is a head because if it isn't, the next step
399 # Assume this is a head because if it isn't, the next step
400 # will eventually remove it.
400 # will eventually remove it.
401 heads[n] = 1
401 heads[n] = 1
402 # But, obviously its parents aren't.
402 # But, obviously its parents aren't.
403 for p in self.parents(n):
403 for p in self.parents(n):
404 heads.pop(p, None)
404 heads.pop(p, None)
405 heads = [n for n in heads.iterkeys() if heads[n] != 0]
405 heads = [n for n in heads.iterkeys() if heads[n] != 0]
406 roots = roots.keys()
406 roots = roots.keys()
407 assert orderedout
407 assert orderedout
408 assert roots
408 assert roots
409 assert heads
409 assert heads
410 return (orderedout, roots, heads)
410 return (orderedout, roots, heads)
411
411
412 def heads(self, stop=None):
412 def heads(self, start=nullid):
413 """return the list of all nodes that have no children"""
413 """return the list of all nodes that have no children
414 p = {}
414 if start is specified, only heads that are children of
415 h = []
415 start will be returned"""
416 stoprev = 0
416 reachable = {start: 1}
417 if stop and stop in self.nodemap:
417 heads = {start: 1}
418 stoprev = self.rev(stop)
418 startrev = self.rev(start)
419
419
420 for r in range(self.count() - 1, -1, -1):
420 for r in xrange(startrev + 1, self.count()):
421 n = self.node(r)
421 n = self.node(r)
422 if n not in p:
423 h.append(n)
424 if n == stop:
425 break
426 if r < stoprev:
427 break
428 for pn in self.parents(n):
422 for pn in self.parents(n):
429 p[pn] = 1
423 if pn in reachable:
430 return h
424 reachable[n] = 1
425 heads[n] = 1
426 if pn in heads:
427 del heads[pn]
428 return heads.keys()
431
429
432 def children(self, node):
430 def children(self, node):
433 """find the children of a given node"""
431 """find the children of a given node"""
434 c = []
432 c = []
435 p = self.rev(node)
433 p = self.rev(node)
436 for r in range(p + 1, self.count()):
434 for r in range(p + 1, self.count()):
437 n = self.node(r)
435 n = self.node(r)
438 for pn in self.parents(n):
436 for pn in self.parents(n):
439 if pn == node:
437 if pn == node:
440 c.append(n)
438 c.append(n)
441 continue
439 continue
442 elif pn == nullid:
440 elif pn == nullid:
443 continue
441 continue
444 return c
442 return c
445
443
446 def lookup(self, id):
444 def lookup(self, id):
447 """locate a node based on revision number or subset of hex nodeid"""
445 """locate a node based on revision number or subset of hex nodeid"""
448 try:
446 try:
449 rev = int(id)
447 rev = int(id)
450 if str(rev) != id: raise ValueError
448 if str(rev) != id: raise ValueError
451 if rev < 0: rev = self.count() + rev
449 if rev < 0: rev = self.count() + rev
452 if rev < 0 or rev >= self.count(): raise ValueError
450 if rev < 0 or rev >= self.count(): raise ValueError
453 return self.node(rev)
451 return self.node(rev)
454 except (ValueError, OverflowError):
452 except (ValueError, OverflowError):
455 c = []
453 c = []
456 for n in self.nodemap:
454 for n in self.nodemap:
457 if hex(n).startswith(id):
455 if hex(n).startswith(id):
458 c.append(n)
456 c.append(n)
459 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
457 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
460 if len(c) < 1: raise RevlogError(_("No match found"))
458 if len(c) < 1: raise RevlogError(_("No match found"))
461 return c[0]
459 return c[0]
462
460
463 return None
461 return None
464
462
465 def diff(self, a, b):
463 def diff(self, a, b):
466 """return a delta between two revisions"""
464 """return a delta between two revisions"""
467 return mdiff.textdiff(a, b)
465 return mdiff.textdiff(a, b)
468
466
469 def patches(self, t, pl):
467 def patches(self, t, pl):
470 """apply a list of patches to a string"""
468 """apply a list of patches to a string"""
471 return mdiff.patches(t, pl)
469 return mdiff.patches(t, pl)
472
470
473 def delta(self, node):
471 def delta(self, node):
474 """return or calculate a delta between a node and its predecessor"""
472 """return or calculate a delta between a node and its predecessor"""
475 r = self.rev(node)
473 r = self.rev(node)
476 b = self.base(r)
474 b = self.base(r)
477 if r == b:
475 if r == b:
478 return self.diff(self.revision(self.node(r - 1)),
476 return self.diff(self.revision(self.node(r - 1)),
479 self.revision(node))
477 self.revision(node))
480 else:
478 else:
481 f = self.opener(self.datafile)
479 f = self.opener(self.datafile)
482 f.seek(self.start(r))
480 f.seek(self.start(r))
483 data = f.read(self.length(r))
481 data = f.read(self.length(r))
484 return decompress(data)
482 return decompress(data)
485
483
486 def revision(self, node):
484 def revision(self, node):
487 """return an uncompressed revision of a given"""
485 """return an uncompressed revision of a given"""
488 if node == nullid: return ""
486 if node == nullid: return ""
489 if self.cache and self.cache[0] == node: return self.cache[2]
487 if self.cache and self.cache[0] == node: return self.cache[2]
490
488
491 # look up what we need to read
489 # look up what we need to read
492 text = None
490 text = None
493 rev = self.rev(node)
491 rev = self.rev(node)
494 start, length, base, link, p1, p2, node = self.index[rev]
492 start, length, base, link, p1, p2, node = self.index[rev]
495 end = start + length
493 end = start + length
496 if base != rev: start = self.start(base)
494 if base != rev: start = self.start(base)
497
495
498 # do we have useful data cached?
496 # do we have useful data cached?
499 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
497 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
500 base = self.cache[1]
498 base = self.cache[1]
501 start = self.start(base + 1)
499 start = self.start(base + 1)
502 text = self.cache[2]
500 text = self.cache[2]
503 last = 0
501 last = 0
504
502
505 f = self.opener(self.datafile)
503 f = self.opener(self.datafile)
506 f.seek(start)
504 f.seek(start)
507 data = f.read(end - start)
505 data = f.read(end - start)
508
506
509 if text is None:
507 if text is None:
510 last = self.length(base)
508 last = self.length(base)
511 text = decompress(data[:last])
509 text = decompress(data[:last])
512
510
513 bins = []
511 bins = []
514 for r in xrange(base + 1, rev + 1):
512 for r in xrange(base + 1, rev + 1):
515 s = self.length(r)
513 s = self.length(r)
516 bins.append(decompress(data[last:last + s]))
514 bins.append(decompress(data[last:last + s]))
517 last = last + s
515 last = last + s
518
516
519 text = mdiff.patches(text, bins)
517 text = mdiff.patches(text, bins)
520
518
521 if node != hash(text, p1, p2):
519 if node != hash(text, p1, p2):
522 raise RevlogError(_("integrity check failed on %s:%d")
520 raise RevlogError(_("integrity check failed on %s:%d")
523 % (self.datafile, rev))
521 % (self.datafile, rev))
524
522
525 self.cache = (node, rev, text)
523 self.cache = (node, rev, text)
526 return text
524 return text
527
525
528 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
526 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
529 """add a revision to the log
527 """add a revision to the log
530
528
531 text - the revision data to add
529 text - the revision data to add
532 transaction - the transaction object used for rollback
530 transaction - the transaction object used for rollback
533 link - the linkrev data to add
531 link - the linkrev data to add
534 p1, p2 - the parent nodeids of the revision
532 p1, p2 - the parent nodeids of the revision
535 d - an optional precomputed delta
533 d - an optional precomputed delta
536 """
534 """
537 if text is None: text = ""
535 if text is None: text = ""
538 if p1 is None: p1 = self.tip()
536 if p1 is None: p1 = self.tip()
539 if p2 is None: p2 = nullid
537 if p2 is None: p2 = nullid
540
538
541 node = hash(text, p1, p2)
539 node = hash(text, p1, p2)
542
540
543 if node in self.nodemap:
541 if node in self.nodemap:
544 return node
542 return node
545
543
546 n = self.count()
544 n = self.count()
547 t = n - 1
545 t = n - 1
548
546
549 if n:
547 if n:
550 base = self.base(t)
548 base = self.base(t)
551 start = self.start(base)
549 start = self.start(base)
552 end = self.end(t)
550 end = self.end(t)
553 if not d:
551 if not d:
554 prev = self.revision(self.tip())
552 prev = self.revision(self.tip())
555 d = self.diff(prev, str(text))
553 d = self.diff(prev, str(text))
556 data = compress(d)
554 data = compress(d)
557 l = len(data[1]) + len(data[0])
555 l = len(data[1]) + len(data[0])
558 dist = end - start + l
556 dist = end - start + l
559
557
560 # full versions are inserted when the needed deltas
558 # full versions are inserted when the needed deltas
561 # become comparable to the uncompressed text
559 # become comparable to the uncompressed text
562 if not n or dist > len(text) * 2:
560 if not n or dist > len(text) * 2:
563 data = compress(text)
561 data = compress(text)
564 l = len(data[1]) + len(data[0])
562 l = len(data[1]) + len(data[0])
565 base = n
563 base = n
566 else:
564 else:
567 base = self.base(t)
565 base = self.base(t)
568
566
569 offset = 0
567 offset = 0
570 if t >= 0:
568 if t >= 0:
571 offset = self.end(t)
569 offset = self.end(t)
572
570
573 e = (offset, l, base, link, p1, p2, node)
571 e = (offset, l, base, link, p1, p2, node)
574
572
575 self.index.append(e)
573 self.index.append(e)
576 self.nodemap[node] = n
574 self.nodemap[node] = n
577 entry = struct.pack(indexformat, *e)
575 entry = struct.pack(indexformat, *e)
578
576
579 transaction.add(self.datafile, e[0])
577 transaction.add(self.datafile, e[0])
580 f = self.opener(self.datafile, "a")
578 f = self.opener(self.datafile, "a")
581 if data[0]:
579 if data[0]:
582 f.write(data[0])
580 f.write(data[0])
583 f.write(data[1])
581 f.write(data[1])
584 transaction.add(self.indexfile, n * len(entry))
582 transaction.add(self.indexfile, n * len(entry))
585 self.opener(self.indexfile, "a").write(entry)
583 self.opener(self.indexfile, "a").write(entry)
586
584
587 self.cache = (node, n, text)
585 self.cache = (node, n, text)
588 return node
586 return node
589
587
590 def ancestor(self, a, b):
588 def ancestor(self, a, b):
591 """calculate the least common ancestor of nodes a and b"""
589 """calculate the least common ancestor of nodes a and b"""
592 # calculate the distance of every node from root
590 # calculate the distance of every node from root
593 dist = {nullid: 0}
591 dist = {nullid: 0}
594 for i in xrange(self.count()):
592 for i in xrange(self.count()):
595 n = self.node(i)
593 n = self.node(i)
596 p1, p2 = self.parents(n)
594 p1, p2 = self.parents(n)
597 dist[n] = max(dist[p1], dist[p2]) + 1
595 dist[n] = max(dist[p1], dist[p2]) + 1
598
596
599 # traverse ancestors in order of decreasing distance from root
597 # traverse ancestors in order of decreasing distance from root
600 def ancestors(node):
598 def ancestors(node):
601 # we store negative distances because heap returns smallest member
599 # we store negative distances because heap returns smallest member
602 h = [(-dist[node], node)]
600 h = [(-dist[node], node)]
603 seen = {}
601 seen = {}
604 earliest = self.count()
602 earliest = self.count()
605 while h:
603 while h:
606 d, n = heapq.heappop(h)
604 d, n = heapq.heappop(h)
607 if n not in seen:
605 if n not in seen:
608 seen[n] = 1
606 seen[n] = 1
609 r = self.rev(n)
607 r = self.rev(n)
610 yield (-d, n)
608 yield (-d, n)
611 for p in self.parents(n):
609 for p in self.parents(n):
612 heapq.heappush(h, (-dist[p], p))
610 heapq.heappush(h, (-dist[p], p))
613
611
614 def generations(node):
612 def generations(node):
615 sg, s = None, {}
613 sg, s = None, {}
616 for g,n in ancestors(node):
614 for g,n in ancestors(node):
617 if g != sg:
615 if g != sg:
618 if sg:
616 if sg:
619 yield sg, s
617 yield sg, s
620 sg, s = g, {n:1}
618 sg, s = g, {n:1}
621 else:
619 else:
622 s[n] = 1
620 s[n] = 1
623 yield sg, s
621 yield sg, s
624
622
625 x = generations(a)
623 x = generations(a)
626 y = generations(b)
624 y = generations(b)
627 gx = x.next()
625 gx = x.next()
628 gy = y.next()
626 gy = y.next()
629
627
630 # increment each ancestor list until it is closer to root than
628 # increment each ancestor list until it is closer to root than
631 # the other, or they match
629 # the other, or they match
632 while 1:
630 while 1:
633 #print "ancestor gen %s %s" % (gx[0], gy[0])
631 #print "ancestor gen %s %s" % (gx[0], gy[0])
634 if gx[0] == gy[0]:
632 if gx[0] == gy[0]:
635 # find the intersection
633 # find the intersection
636 i = [ n for n in gx[1] if n in gy[1] ]
634 i = [ n for n in gx[1] if n in gy[1] ]
637 if i:
635 if i:
638 return i[0]
636 return i[0]
639 else:
637 else:
640 #print "next"
638 #print "next"
641 gy = y.next()
639 gy = y.next()
642 gx = x.next()
640 gx = x.next()
643 elif gx[0] < gy[0]:
641 elif gx[0] < gy[0]:
644 #print "next y"
642 #print "next y"
645 gy = y.next()
643 gy = y.next()
646 else:
644 else:
647 #print "next x"
645 #print "next x"
648 gx = x.next()
646 gx = x.next()
649
647
650 def group(self, nodelist, lookup, infocollect = None):
648 def group(self, nodelist, lookup, infocollect = None):
651 """calculate a delta group
649 """calculate a delta group
652
650
653 Given a list of changeset revs, return a set of deltas and
651 Given a list of changeset revs, return a set of deltas and
654 metadata corresponding to nodes. the first delta is
652 metadata corresponding to nodes. the first delta is
655 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
653 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
656 have this parent as it has all history before these
654 have this parent as it has all history before these
657 changesets. parent is parent[0]
655 changesets. parent is parent[0]
658 """
656 """
659 revs = [self.rev(n) for n in nodelist]
657 revs = [self.rev(n) for n in nodelist]
660 needed = dict.fromkeys(revs, 1)
658 needed = dict.fromkeys(revs, 1)
661
659
662 # if we don't have any revisions touched by these changesets, bail
660 # if we don't have any revisions touched by these changesets, bail
663 if not revs:
661 if not revs:
664 yield struct.pack(">l", 0)
662 yield struct.pack(">l", 0)
665 return
663 return
666
664
667 # add the parent of the first rev
665 # add the parent of the first rev
668 p = self.parents(self.node(revs[0]))[0]
666 p = self.parents(self.node(revs[0]))[0]
669 revs.insert(0, self.rev(p))
667 revs.insert(0, self.rev(p))
670
668
671 # for each delta that isn't contiguous in the log, we need to
669 # for each delta that isn't contiguous in the log, we need to
672 # reconstruct the base, reconstruct the result, and then
670 # reconstruct the base, reconstruct the result, and then
673 # calculate the delta. We also need to do this where we've
671 # calculate the delta. We also need to do this where we've
674 # stored a full version and not a delta
672 # stored a full version and not a delta
675 for i in xrange(0, len(revs) - 1):
673 for i in xrange(0, len(revs) - 1):
676 a, b = revs[i], revs[i + 1]
674 a, b = revs[i], revs[i + 1]
677 if a + 1 != b or self.base(b) == b:
675 if a + 1 != b or self.base(b) == b:
678 for j in xrange(self.base(a), a + 1):
676 for j in xrange(self.base(a), a + 1):
679 needed[j] = 1
677 needed[j] = 1
680 for j in xrange(self.base(b), b + 1):
678 for j in xrange(self.base(b), b + 1):
681 needed[j] = 1
679 needed[j] = 1
682
680
683 # calculate spans to retrieve from datafile
681 # calculate spans to retrieve from datafile
684 needed = needed.keys()
682 needed = needed.keys()
685 needed.sort()
683 needed.sort()
686 spans = []
684 spans = []
687 oo = -1
685 oo = -1
688 ol = 0
686 ol = 0
689 for n in needed:
687 for n in needed:
690 if n < 0: continue
688 if n < 0: continue
691 o = self.start(n)
689 o = self.start(n)
692 l = self.length(n)
690 l = self.length(n)
693 if oo + ol == o: # can we merge with the previous?
691 if oo + ol == o: # can we merge with the previous?
694 nl = spans[-1][2]
692 nl = spans[-1][2]
695 nl.append((n, l))
693 nl.append((n, l))
696 ol += l
694 ol += l
697 spans[-1] = (oo, ol, nl)
695 spans[-1] = (oo, ol, nl)
698 else:
696 else:
699 oo = o
697 oo = o
700 ol = l
698 ol = l
701 spans.append((oo, ol, [(n, l)]))
699 spans.append((oo, ol, [(n, l)]))
702
700
703 # read spans in, divide up chunks
701 # read spans in, divide up chunks
704 chunks = {}
702 chunks = {}
705 for span in spans:
703 for span in spans:
706 # we reopen the file for each span to make http happy for now
704 # we reopen the file for each span to make http happy for now
707 f = self.opener(self.datafile)
705 f = self.opener(self.datafile)
708 f.seek(span[0])
706 f.seek(span[0])
709 data = f.read(span[1])
707 data = f.read(span[1])
710
708
711 # divide up the span
709 # divide up the span
712 pos = 0
710 pos = 0
713 for r, l in span[2]:
711 for r, l in span[2]:
714 chunks[r] = decompress(data[pos: pos + l])
712 chunks[r] = decompress(data[pos: pos + l])
715 pos += l
713 pos += l
716
714
717 # helper to reconstruct intermediate versions
715 # helper to reconstruct intermediate versions
718 def construct(text, base, rev):
716 def construct(text, base, rev):
719 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
717 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
720 return mdiff.patches(text, bins)
718 return mdiff.patches(text, bins)
721
719
722 # build deltas
720 # build deltas
723 deltas = []
721 deltas = []
724 for d in xrange(0, len(revs) - 1):
722 for d in xrange(0, len(revs) - 1):
725 a, b = revs[d], revs[d + 1]
723 a, b = revs[d], revs[d + 1]
726 n = self.node(b)
724 n = self.node(b)
727
725
728 if infocollect is not None:
726 if infocollect is not None:
729 infocollect(n)
727 infocollect(n)
730
728
731 # do we need to construct a new delta?
729 # do we need to construct a new delta?
732 if a + 1 != b or self.base(b) == b:
730 if a + 1 != b or self.base(b) == b:
733 if a >= 0:
731 if a >= 0:
734 base = self.base(a)
732 base = self.base(a)
735 ta = chunks[self.base(a)]
733 ta = chunks[self.base(a)]
736 ta = construct(ta, base, a)
734 ta = construct(ta, base, a)
737 else:
735 else:
738 ta = ""
736 ta = ""
739
737
740 base = self.base(b)
738 base = self.base(b)
741 if a > base:
739 if a > base:
742 base = a
740 base = a
743 tb = ta
741 tb = ta
744 else:
742 else:
745 tb = chunks[self.base(b)]
743 tb = chunks[self.base(b)]
746 tb = construct(tb, base, b)
744 tb = construct(tb, base, b)
747 d = self.diff(ta, tb)
745 d = self.diff(ta, tb)
748 else:
746 else:
749 d = chunks[b]
747 d = chunks[b]
750
748
751 p = self.parents(n)
749 p = self.parents(n)
752 meta = n + p[0] + p[1] + lookup(n)
750 meta = n + p[0] + p[1] + lookup(n)
753 l = struct.pack(">l", len(meta) + len(d) + 4)
751 l = struct.pack(">l", len(meta) + len(d) + 4)
754 yield l
752 yield l
755 yield meta
753 yield meta
756 yield d
754 yield d
757
755
758 yield struct.pack(">l", 0)
756 yield struct.pack(">l", 0)
759
757
760 def addgroup(self, revs, linkmapper, transaction, unique=0):
758 def addgroup(self, revs, linkmapper, transaction, unique=0):
761 """
759 """
762 add a delta group
760 add a delta group
763
761
764 given a set of deltas, add them to the revision log. the
762 given a set of deltas, add them to the revision log. the
765 first delta is against its parent, which should be in our
763 first delta is against its parent, which should be in our
766 log, the rest are against the previous delta.
764 log, the rest are against the previous delta.
767 """
765 """
768
766
769 #track the base of the current delta log
767 #track the base of the current delta log
770 r = self.count()
768 r = self.count()
771 t = r - 1
769 t = r - 1
772 node = nullid
770 node = nullid
773
771
774 base = prev = -1
772 base = prev = -1
775 start = end = measure = 0
773 start = end = measure = 0
776 if r:
774 if r:
777 start = self.start(self.base(t))
775 start = self.start(self.base(t))
778 end = self.end(t)
776 end = self.end(t)
779 measure = self.length(self.base(t))
777 measure = self.length(self.base(t))
780 base = self.base(t)
778 base = self.base(t)
781 prev = self.tip()
779 prev = self.tip()
782
780
783 transaction.add(self.datafile, end)
781 transaction.add(self.datafile, end)
784 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
782 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
785 dfh = self.opener(self.datafile, "a")
783 dfh = self.opener(self.datafile, "a")
786 ifh = self.opener(self.indexfile, "a")
784 ifh = self.opener(self.indexfile, "a")
787
785
788 # loop through our set of deltas
786 # loop through our set of deltas
789 chain = None
787 chain = None
790 for chunk in revs:
788 for chunk in revs:
791 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
789 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
792 link = linkmapper(cs)
790 link = linkmapper(cs)
793 if node in self.nodemap:
791 if node in self.nodemap:
794 # this can happen if two branches make the same change
792 # this can happen if two branches make the same change
795 # if unique:
793 # if unique:
796 # raise RevlogError(_("already have %s") % hex(node[:4]))
794 # raise RevlogError(_("already have %s") % hex(node[:4]))
797 chain = node
795 chain = node
798 continue
796 continue
799 delta = chunk[80:]
797 delta = chunk[80:]
800
798
801 for p in (p1, p2):
799 for p in (p1, p2):
802 if not p in self.nodemap:
800 if not p in self.nodemap:
803 raise RevlogError(_("unknown parent %s") % short(p1))
801 raise RevlogError(_("unknown parent %s") % short(p1))
804
802
805 if not chain:
803 if not chain:
806 # retrieve the parent revision of the delta chain
804 # retrieve the parent revision of the delta chain
807 chain = p1
805 chain = p1
808 if not chain in self.nodemap:
806 if not chain in self.nodemap:
809 raise RevlogError(_("unknown base %s") % short(chain[:4]))
807 raise RevlogError(_("unknown base %s") % short(chain[:4]))
810
808
811 # full versions are inserted when the needed deltas become
809 # full versions are inserted when the needed deltas become
812 # comparable to the uncompressed text or when the previous
810 # comparable to the uncompressed text or when the previous
813 # version is not the one we have a delta against. We use
811 # version is not the one we have a delta against. We use
814 # the size of the previous full rev as a proxy for the
812 # the size of the previous full rev as a proxy for the
815 # current size.
813 # current size.
816
814
817 if chain == prev:
815 if chain == prev:
818 tempd = compress(delta)
816 tempd = compress(delta)
819 cdelta = tempd[0] + tempd[1]
817 cdelta = tempd[0] + tempd[1]
820
818
821 if chain != prev or (end - start + len(cdelta)) > measure * 2:
819 if chain != prev or (end - start + len(cdelta)) > measure * 2:
822 # flush our writes here so we can read it in revision
820 # flush our writes here so we can read it in revision
823 dfh.flush()
821 dfh.flush()
824 ifh.flush()
822 ifh.flush()
825 text = self.revision(chain)
823 text = self.revision(chain)
826 text = self.patches(text, [delta])
824 text = self.patches(text, [delta])
827 chk = self.addrevision(text, transaction, link, p1, p2)
825 chk = self.addrevision(text, transaction, link, p1, p2)
828 if chk != node:
826 if chk != node:
829 raise RevlogError(_("consistency error adding group"))
827 raise RevlogError(_("consistency error adding group"))
830 measure = len(text)
828 measure = len(text)
831 else:
829 else:
832 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
830 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
833 self.index.append(e)
831 self.index.append(e)
834 self.nodemap[node] = r
832 self.nodemap[node] = r
835 dfh.write(cdelta)
833 dfh.write(cdelta)
836 ifh.write(struct.pack(indexformat, *e))
834 ifh.write(struct.pack(indexformat, *e))
837
835
838 t, r, chain, prev = r, r + 1, node, node
836 t, r, chain, prev = r, r + 1, node, node
839 start = self.start(self.base(t))
837 start = self.start(self.base(t))
840 end = self.end(t)
838 end = self.end(t)
841
839
842 dfh.close()
840 dfh.close()
843 ifh.close()
841 ifh.close()
844 return node
842 return node
845
843
846 def strip(self, rev, minlink):
844 def strip(self, rev, minlink):
847 if self.count() == 0 or rev >= self.count():
845 if self.count() == 0 or rev >= self.count():
848 return
846 return
849
847
850 # When stripping away a revision, we need to make sure it
848 # When stripping away a revision, we need to make sure it
851 # does not actually belong to an older changeset.
849 # does not actually belong to an older changeset.
852 # The minlink parameter defines the oldest revision
850 # The minlink parameter defines the oldest revision
853 # we're allowed to strip away.
851 # we're allowed to strip away.
854 while minlink > self.index[rev][3]:
852 while minlink > self.index[rev][3]:
855 rev += 1
853 rev += 1
856 if rev >= self.count():
854 if rev >= self.count():
857 return
855 return
858
856
859 # first truncate the files on disk
857 # first truncate the files on disk
860 end = self.start(rev)
858 end = self.start(rev)
861 self.opener(self.datafile, "a").truncate(end)
859 self.opener(self.datafile, "a").truncate(end)
862 end = rev * struct.calcsize(indexformat)
860 end = rev * struct.calcsize(indexformat)
863 self.opener(self.indexfile, "a").truncate(end)
861 self.opener(self.indexfile, "a").truncate(end)
864
862
865 # then reset internal state in memory to forget those revisions
863 # then reset internal state in memory to forget those revisions
866 self.cache = None
864 self.cache = None
867 for p in self.index[rev:]:
865 for p in self.index[rev:]:
868 del self.nodemap[p[6]]
866 del self.nodemap[p[6]]
869 del self.index[rev:]
867 del self.index[rev:]
870
868
871 # truncating the lazyindex also truncates the lazymap.
869 # truncating the lazyindex also truncates the lazymap.
872 if isinstance(self.index, lazyindex):
870 if isinstance(self.index, lazyindex):
873 self.index.trunc(end)
871 self.index.trunc(end)
874
872
875
873
876 def checksize(self):
874 def checksize(self):
877 expected = 0
875 expected = 0
878 if self.count():
876 if self.count():
879 expected = self.end(self.count() - 1)
877 expected = self.end(self.count() - 1)
880 try:
878 try:
881 f = self.opener(self.datafile)
879 f = self.opener(self.datafile)
882 f.seek(0, 2)
880 f.seek(0, 2)
883 actual = f.tell()
881 actual = f.tell()
884 return expected - actual
882 return expected - actual
885 except IOError, inst:
883 except IOError, inst:
886 if inst.errno == errno.ENOENT:
884 if inst.errno == errno.ENOENT:
887 return 0
885 return 0
888 raise
886 raise
889
887
890
888
General Comments 0
You need to be logged in to leave comments. Login now