##// END OF EJS Templates
This implements the nodesbetween method, and it removes the newer method...
Eric Hopper -
r1457:518da3c3 default
parent child Browse files
Show More
@@ -1,2222 +1,2222 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 demandload(globals(), "os re sys signal shutil imp urllib pdb")
10 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "fancyopts ui hg util lock revlog")
11 demandload(globals(), "fancyopts ui hg util lock revlog")
12 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
12 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
13 demandload(globals(), "errno socket version struct atexit sets bz2")
13 demandload(globals(), "errno socket version struct atexit sets bz2")
14
14
15 class UnknownCommand(Exception):
15 class UnknownCommand(Exception):
16 """Exception raised if command is not in the command table."""
16 """Exception raised if command is not in the command table."""
17
17
18 def filterfiles(filters, files):
18 def filterfiles(filters, files):
19 l = [x for x in files if x in filters]
19 l = [x for x in files if x in filters]
20
20
21 for t in filters:
21 for t in filters:
22 if t and t[-1] != "/":
22 if t and t[-1] != "/":
23 t += "/"
23 t += "/"
24 l += [x for x in files if x.startswith(t)]
24 l += [x for x in files if x.startswith(t)]
25 return l
25 return l
26
26
27 def relpath(repo, args):
27 def relpath(repo, args):
28 cwd = repo.getcwd()
28 cwd = repo.getcwd()
29 if cwd:
29 if cwd:
30 return [util.normpath(os.path.join(cwd, x)) for x in args]
30 return [util.normpath(os.path.join(cwd, x)) for x in args]
31 return args
31 return args
32
32
33 def matchpats(repo, cwd, pats=[], opts={}, head=''):
33 def matchpats(repo, cwd, pats=[], opts={}, head=''):
34 return util.matcher(repo.root, cwd, pats or ['.'], opts.get('include'),
34 return util.matcher(repo.root, cwd, pats or ['.'], opts.get('include'),
35 opts.get('exclude'), head)
35 opts.get('exclude'), head)
36
36
37 def makewalk(repo, pats, opts, head=''):
37 def makewalk(repo, pats, opts, head=''):
38 cwd = repo.getcwd()
38 cwd = repo.getcwd()
39 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
39 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
40 exact = dict(zip(files, files))
40 exact = dict(zip(files, files))
41 def walk():
41 def walk():
42 for src, fn in repo.walk(files=files, match=matchfn):
42 for src, fn in repo.walk(files=files, match=matchfn):
43 yield src, fn, util.pathto(cwd, fn), fn in exact
43 yield src, fn, util.pathto(cwd, fn), fn in exact
44 return files, matchfn, walk()
44 return files, matchfn, walk()
45
45
46 def walk(repo, pats, opts, head=''):
46 def walk(repo, pats, opts, head=''):
47 files, matchfn, results = makewalk(repo, pats, opts, head)
47 files, matchfn, results = makewalk(repo, pats, opts, head)
48 for r in results:
48 for r in results:
49 yield r
49 yield r
50
50
51 def walkchangerevs(ui, repo, cwd, pats, opts):
51 def walkchangerevs(ui, repo, cwd, pats, opts):
52 '''Iterate over files and the revs they changed in.
52 '''Iterate over files and the revs they changed in.
53
53
54 Callers most commonly need to iterate backwards over the history
54 Callers most commonly need to iterate backwards over the history
55 it is interested in. Doing so has awful (quadratic-looking)
55 it is interested in. Doing so has awful (quadratic-looking)
56 performance, so we use iterators in a "windowed" way.
56 performance, so we use iterators in a "windowed" way.
57
57
58 We walk a window of revisions in the desired order. Within the
58 We walk a window of revisions in the desired order. Within the
59 window, we first walk forwards to gather data, then in the desired
59 window, we first walk forwards to gather data, then in the desired
60 order (usually backwards) to display it.
60 order (usually backwards) to display it.
61
61
62 This function returns an (iterator, getchange) pair. The
62 This function returns an (iterator, getchange) pair. The
63 getchange function returns the changelog entry for a numeric
63 getchange function returns the changelog entry for a numeric
64 revision. The iterator yields 3-tuples. They will be of one of
64 revision. The iterator yields 3-tuples. They will be of one of
65 the following forms:
65 the following forms:
66
66
67 "window", incrementing, lastrev: stepping through a window,
67 "window", incrementing, lastrev: stepping through a window,
68 positive if walking forwards through revs, last rev in the
68 positive if walking forwards through revs, last rev in the
69 sequence iterated over - use to reset state for the current window
69 sequence iterated over - use to reset state for the current window
70
70
71 "add", rev, fns: out-of-order traversal of the given file names
71 "add", rev, fns: out-of-order traversal of the given file names
72 fns, which changed during revision rev - use to gather data for
72 fns, which changed during revision rev - use to gather data for
73 possible display
73 possible display
74
74
75 "iter", rev, None: in-order traversal of the revs earlier iterated
75 "iter", rev, None: in-order traversal of the revs earlier iterated
76 over with "add" - use to display data'''
76 over with "add" - use to display data'''
77
77
78 if repo.changelog.count() == 0:
78 if repo.changelog.count() == 0:
79 return [], False
79 return [], False
80
80
81 cwd = repo.getcwd()
81 cwd = repo.getcwd()
82 if not pats and cwd:
82 if not pats and cwd:
83 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
83 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
84 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
84 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
85 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
85 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
86 pats, opts)
86 pats, opts)
87 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
87 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
88 wanted = {}
88 wanted = {}
89 slowpath = anypats
89 slowpath = anypats
90 window = 300
90 window = 300
91 fncache = {}
91 fncache = {}
92
92
93 chcache = {}
93 chcache = {}
94 def getchange(rev):
94 def getchange(rev):
95 ch = chcache.get(rev)
95 ch = chcache.get(rev)
96 if ch is None:
96 if ch is None:
97 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
97 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
98 return ch
98 return ch
99
99
100 if not slowpath and not files:
100 if not slowpath and not files:
101 # No files, no patterns. Display all revs.
101 # No files, no patterns. Display all revs.
102 wanted = dict(zip(revs, revs))
102 wanted = dict(zip(revs, revs))
103 if not slowpath:
103 if not slowpath:
104 # Only files, no patterns. Check the history of each file.
104 # Only files, no patterns. Check the history of each file.
105 def filerevgen(filelog):
105 def filerevgen(filelog):
106 for i in xrange(filelog.count() - 1, -1, -window):
106 for i in xrange(filelog.count() - 1, -1, -window):
107 revs = []
107 revs = []
108 for j in xrange(max(0, i - window), i + 1):
108 for j in xrange(max(0, i - window), i + 1):
109 revs.append(filelog.linkrev(filelog.node(j)))
109 revs.append(filelog.linkrev(filelog.node(j)))
110 revs.reverse()
110 revs.reverse()
111 for rev in revs:
111 for rev in revs:
112 yield rev
112 yield rev
113
113
114 minrev, maxrev = min(revs), max(revs)
114 minrev, maxrev = min(revs), max(revs)
115 for file in files:
115 for file in files:
116 filelog = repo.file(file)
116 filelog = repo.file(file)
117 # A zero count may be a directory or deleted file, so
117 # A zero count may be a directory or deleted file, so
118 # try to find matching entries on the slow path.
118 # try to find matching entries on the slow path.
119 if filelog.count() == 0:
119 if filelog.count() == 0:
120 slowpath = True
120 slowpath = True
121 break
121 break
122 for rev in filerevgen(filelog):
122 for rev in filerevgen(filelog):
123 if rev <= maxrev:
123 if rev <= maxrev:
124 if rev < minrev:
124 if rev < minrev:
125 break
125 break
126 fncache.setdefault(rev, [])
126 fncache.setdefault(rev, [])
127 fncache[rev].append(file)
127 fncache[rev].append(file)
128 wanted[rev] = 1
128 wanted[rev] = 1
129 if slowpath:
129 if slowpath:
130 # The slow path checks files modified in every changeset.
130 # The slow path checks files modified in every changeset.
131 def changerevgen():
131 def changerevgen():
132 for i in xrange(repo.changelog.count() - 1, -1, -window):
132 for i in xrange(repo.changelog.count() - 1, -1, -window):
133 for j in xrange(max(0, i - window), i + 1):
133 for j in xrange(max(0, i - window), i + 1):
134 yield j, getchange(j)[3]
134 yield j, getchange(j)[3]
135
135
136 for rev, changefiles in changerevgen():
136 for rev, changefiles in changerevgen():
137 matches = filter(matchfn, changefiles)
137 matches = filter(matchfn, changefiles)
138 if matches:
138 if matches:
139 fncache[rev] = matches
139 fncache[rev] = matches
140 wanted[rev] = 1
140 wanted[rev] = 1
141
141
142 def iterate():
142 def iterate():
143 for i in xrange(0, len(revs), window):
143 for i in xrange(0, len(revs), window):
144 yield 'window', revs[0] < revs[-1], revs[-1]
144 yield 'window', revs[0] < revs[-1], revs[-1]
145 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
145 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
146 if rev in wanted]
146 if rev in wanted]
147 srevs = list(nrevs)
147 srevs = list(nrevs)
148 srevs.sort()
148 srevs.sort()
149 for rev in srevs:
149 for rev in srevs:
150 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
150 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
151 yield 'add', rev, fns
151 yield 'add', rev, fns
152 for rev in nrevs:
152 for rev in nrevs:
153 yield 'iter', rev, None
153 yield 'iter', rev, None
154 return iterate(), getchange
154 return iterate(), getchange
155
155
156 revrangesep = ':'
156 revrangesep = ':'
157
157
158 def revrange(ui, repo, revs, revlog=None):
158 def revrange(ui, repo, revs, revlog=None):
159 """Yield revision as strings from a list of revision specifications."""
159 """Yield revision as strings from a list of revision specifications."""
160 if revlog is None:
160 if revlog is None:
161 revlog = repo.changelog
161 revlog = repo.changelog
162 revcount = revlog.count()
162 revcount = revlog.count()
163 def fix(val, defval):
163 def fix(val, defval):
164 if not val:
164 if not val:
165 return defval
165 return defval
166 try:
166 try:
167 num = int(val)
167 num = int(val)
168 if str(num) != val:
168 if str(num) != val:
169 raise ValueError
169 raise ValueError
170 if num < 0: num += revcount
170 if num < 0: num += revcount
171 if num < 0: num = 0
171 if num < 0: num = 0
172 elif num >= revcount:
172 elif num >= revcount:
173 raise ValueError
173 raise ValueError
174 except ValueError:
174 except ValueError:
175 try:
175 try:
176 num = repo.changelog.rev(repo.lookup(val))
176 num = repo.changelog.rev(repo.lookup(val))
177 except KeyError:
177 except KeyError:
178 try:
178 try:
179 num = revlog.rev(revlog.lookup(val))
179 num = revlog.rev(revlog.lookup(val))
180 except KeyError:
180 except KeyError:
181 raise util.Abort('invalid revision identifier %s', val)
181 raise util.Abort('invalid revision identifier %s', val)
182 return num
182 return num
183 seen = {}
183 seen = {}
184 for spec in revs:
184 for spec in revs:
185 if spec.find(revrangesep) >= 0:
185 if spec.find(revrangesep) >= 0:
186 start, end = spec.split(revrangesep, 1)
186 start, end = spec.split(revrangesep, 1)
187 start = fix(start, 0)
187 start = fix(start, 0)
188 end = fix(end, revcount - 1)
188 end = fix(end, revcount - 1)
189 step = start > end and -1 or 1
189 step = start > end and -1 or 1
190 for rev in xrange(start, end+step, step):
190 for rev in xrange(start, end+step, step):
191 if rev in seen: continue
191 if rev in seen: continue
192 seen[rev] = 1
192 seen[rev] = 1
193 yield str(rev)
193 yield str(rev)
194 else:
194 else:
195 rev = fix(spec, None)
195 rev = fix(spec, None)
196 if rev in seen: continue
196 if rev in seen: continue
197 seen[rev] = 1
197 seen[rev] = 1
198 yield str(rev)
198 yield str(rev)
199
199
200 def make_filename(repo, r, pat, node=None,
200 def make_filename(repo, r, pat, node=None,
201 total=None, seqno=None, revwidth=None, pathname=None):
201 total=None, seqno=None, revwidth=None, pathname=None):
202 node_expander = {
202 node_expander = {
203 'H': lambda: hex(node),
203 'H': lambda: hex(node),
204 'R': lambda: str(r.rev(node)),
204 'R': lambda: str(r.rev(node)),
205 'h': lambda: short(node),
205 'h': lambda: short(node),
206 }
206 }
207 expander = {
207 expander = {
208 '%': lambda: '%',
208 '%': lambda: '%',
209 'b': lambda: os.path.basename(repo.root),
209 'b': lambda: os.path.basename(repo.root),
210 }
210 }
211
211
212 try:
212 try:
213 if node:
213 if node:
214 expander.update(node_expander)
214 expander.update(node_expander)
215 if node and revwidth is not None:
215 if node and revwidth is not None:
216 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
216 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
217 if total is not None:
217 if total is not None:
218 expander['N'] = lambda: str(total)
218 expander['N'] = lambda: str(total)
219 if seqno is not None:
219 if seqno is not None:
220 expander['n'] = lambda: str(seqno)
220 expander['n'] = lambda: str(seqno)
221 if total is not None and seqno is not None:
221 if total is not None and seqno is not None:
222 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
222 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
223 if pathname is not None:
223 if pathname is not None:
224 expander['s'] = lambda: os.path.basename(pathname)
224 expander['s'] = lambda: os.path.basename(pathname)
225 expander['d'] = lambda: os.path.dirname(pathname) or '.'
225 expander['d'] = lambda: os.path.dirname(pathname) or '.'
226 expander['p'] = lambda: pathname
226 expander['p'] = lambda: pathname
227
227
228 newname = []
228 newname = []
229 patlen = len(pat)
229 patlen = len(pat)
230 i = 0
230 i = 0
231 while i < patlen:
231 while i < patlen:
232 c = pat[i]
232 c = pat[i]
233 if c == '%':
233 if c == '%':
234 i += 1
234 i += 1
235 c = pat[i]
235 c = pat[i]
236 c = expander[c]()
236 c = expander[c]()
237 newname.append(c)
237 newname.append(c)
238 i += 1
238 i += 1
239 return ''.join(newname)
239 return ''.join(newname)
240 except KeyError, inst:
240 except KeyError, inst:
241 raise util.Abort("invalid format spec '%%%s' in output file name",
241 raise util.Abort("invalid format spec '%%%s' in output file name",
242 inst.args[0])
242 inst.args[0])
243
243
244 def make_file(repo, r, pat, node=None,
244 def make_file(repo, r, pat, node=None,
245 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
245 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
246 if not pat or pat == '-':
246 if not pat or pat == '-':
247 return 'w' in mode and sys.stdout or sys.stdin
247 return 'w' in mode and sys.stdout or sys.stdin
248 if hasattr(pat, 'write') and 'w' in mode:
248 if hasattr(pat, 'write') and 'w' in mode:
249 return pat
249 return pat
250 if hasattr(pat, 'read') and 'r' in mode:
250 if hasattr(pat, 'read') and 'r' in mode:
251 return pat
251 return pat
252 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
252 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
253 pathname),
253 pathname),
254 mode)
254 mode)
255
255
256 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
256 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
257 changes=None, text=False):
257 changes=None, text=False):
258 if not changes:
258 if not changes:
259 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
259 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
260 else:
260 else:
261 (c, a, d, u) = changes
261 (c, a, d, u) = changes
262 if files:
262 if files:
263 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
263 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
264
264
265 if not c and not a and not d:
265 if not c and not a and not d:
266 return
266 return
267
267
268 if node2:
268 if node2:
269 change = repo.changelog.read(node2)
269 change = repo.changelog.read(node2)
270 mmap2 = repo.manifest.read(change[0])
270 mmap2 = repo.manifest.read(change[0])
271 date2 = util.datestr(change[2])
271 date2 = util.datestr(change[2])
272 def read(f):
272 def read(f):
273 return repo.file(f).read(mmap2[f])
273 return repo.file(f).read(mmap2[f])
274 else:
274 else:
275 date2 = util.datestr()
275 date2 = util.datestr()
276 if not node1:
276 if not node1:
277 node1 = repo.dirstate.parents()[0]
277 node1 = repo.dirstate.parents()[0]
278 def read(f):
278 def read(f):
279 return repo.wfile(f).read()
279 return repo.wfile(f).read()
280
280
281 if ui.quiet:
281 if ui.quiet:
282 r = None
282 r = None
283 else:
283 else:
284 hexfunc = ui.verbose and hex or short
284 hexfunc = ui.verbose and hex or short
285 r = [hexfunc(node) for node in [node1, node2] if node]
285 r = [hexfunc(node) for node in [node1, node2] if node]
286
286
287 change = repo.changelog.read(node1)
287 change = repo.changelog.read(node1)
288 mmap = repo.manifest.read(change[0])
288 mmap = repo.manifest.read(change[0])
289 date1 = util.datestr(change[2])
289 date1 = util.datestr(change[2])
290
290
291 for f in c:
291 for f in c:
292 to = None
292 to = None
293 if f in mmap:
293 if f in mmap:
294 to = repo.file(f).read(mmap[f])
294 to = repo.file(f).read(mmap[f])
295 tn = read(f)
295 tn = read(f)
296 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
296 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
297 for f in a:
297 for f in a:
298 to = None
298 to = None
299 tn = read(f)
299 tn = read(f)
300 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
300 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
301 for f in d:
301 for f in d:
302 to = repo.file(f).read(mmap[f])
302 to = repo.file(f).read(mmap[f])
303 tn = None
303 tn = None
304 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
304 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
305
305
306 def trimuser(ui, name, rev, revcache):
306 def trimuser(ui, name, rev, revcache):
307 """trim the name of the user who committed a change"""
307 """trim the name of the user who committed a change"""
308 user = revcache.get(rev)
308 user = revcache.get(rev)
309 if user is None:
309 if user is None:
310 user = revcache[rev] = ui.shortuser(name)
310 user = revcache[rev] = ui.shortuser(name)
311 return user
311 return user
312
312
313 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
313 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
314 """show a single changeset or file revision"""
314 """show a single changeset or file revision"""
315 log = repo.changelog
315 log = repo.changelog
316 if changenode is None:
316 if changenode is None:
317 changenode = log.node(rev)
317 changenode = log.node(rev)
318 elif not rev:
318 elif not rev:
319 rev = log.rev(changenode)
319 rev = log.rev(changenode)
320
320
321 if ui.quiet:
321 if ui.quiet:
322 ui.write("%d:%s\n" % (rev, short(changenode)))
322 ui.write("%d:%s\n" % (rev, short(changenode)))
323 return
323 return
324
324
325 changes = log.read(changenode)
325 changes = log.read(changenode)
326 date = util.datestr(changes[2])
326 date = util.datestr(changes[2])
327
327
328 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
328 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
329 for p in log.parents(changenode)
329 for p in log.parents(changenode)
330 if ui.debugflag or p != nullid]
330 if ui.debugflag or p != nullid]
331 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
331 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
332 parents = []
332 parents = []
333
333
334 if ui.verbose:
334 if ui.verbose:
335 ui.write("changeset: %d:%s\n" % (rev, hex(changenode)))
335 ui.write("changeset: %d:%s\n" % (rev, hex(changenode)))
336 else:
336 else:
337 ui.write("changeset: %d:%s\n" % (rev, short(changenode)))
337 ui.write("changeset: %d:%s\n" % (rev, short(changenode)))
338
338
339 for tag in repo.nodetags(changenode):
339 for tag in repo.nodetags(changenode):
340 ui.status("tag: %s\n" % tag)
340 ui.status("tag: %s\n" % tag)
341 for parent in parents:
341 for parent in parents:
342 ui.write("parent: %d:%s\n" % parent)
342 ui.write("parent: %d:%s\n" % parent)
343
343
344 if brinfo and changenode in brinfo:
344 if brinfo and changenode in brinfo:
345 br = brinfo[changenode]
345 br = brinfo[changenode]
346 ui.write("branch: %s\n" % " ".join(br))
346 ui.write("branch: %s\n" % " ".join(br))
347
347
348 ui.debug("manifest: %d:%s\n" % (repo.manifest.rev(changes[0]),
348 ui.debug("manifest: %d:%s\n" % (repo.manifest.rev(changes[0]),
349 hex(changes[0])))
349 hex(changes[0])))
350 ui.status("user: %s\n" % changes[1])
350 ui.status("user: %s\n" % changes[1])
351 ui.status("date: %s\n" % date)
351 ui.status("date: %s\n" % date)
352
352
353 if ui.debugflag:
353 if ui.debugflag:
354 files = repo.changes(log.parents(changenode)[0], changenode)
354 files = repo.changes(log.parents(changenode)[0], changenode)
355 for key, value in zip(["files:", "files+:", "files-:"], files):
355 for key, value in zip(["files:", "files+:", "files-:"], files):
356 if value:
356 if value:
357 ui.note("%-12s %s\n" % (key, " ".join(value)))
357 ui.note("%-12s %s\n" % (key, " ".join(value)))
358 else:
358 else:
359 ui.note("files: %s\n" % " ".join(changes[3]))
359 ui.note("files: %s\n" % " ".join(changes[3]))
360
360
361 description = changes[4].strip()
361 description = changes[4].strip()
362 if description:
362 if description:
363 if ui.verbose:
363 if ui.verbose:
364 ui.status("description:\n")
364 ui.status("description:\n")
365 ui.status(description)
365 ui.status(description)
366 ui.status("\n\n")
366 ui.status("\n\n")
367 else:
367 else:
368 ui.status("summary: %s\n" % description.splitlines()[0])
368 ui.status("summary: %s\n" % description.splitlines()[0])
369 ui.status("\n")
369 ui.status("\n")
370
370
371 def show_version(ui):
371 def show_version(ui):
372 """output version and copyright information"""
372 """output version and copyright information"""
373 ui.write("Mercurial Distributed SCM (version %s)\n"
373 ui.write("Mercurial Distributed SCM (version %s)\n"
374 % version.get_version())
374 % version.get_version())
375 ui.status(
375 ui.status(
376 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
376 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
377 "This is free software; see the source for copying conditions. "
377 "This is free software; see the source for copying conditions. "
378 "There is NO\nwarranty; "
378 "There is NO\nwarranty; "
379 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
379 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
380 )
380 )
381
381
382 def help_(ui, cmd=None, with_version=False):
382 def help_(ui, cmd=None, with_version=False):
383 """show help for a given command or all commands"""
383 """show help for a given command or all commands"""
384 option_lists = []
384 option_lists = []
385 if cmd and cmd != 'shortlist':
385 if cmd and cmd != 'shortlist':
386 if with_version:
386 if with_version:
387 show_version(ui)
387 show_version(ui)
388 ui.write('\n')
388 ui.write('\n')
389 key, i = find(cmd)
389 key, i = find(cmd)
390 # synopsis
390 # synopsis
391 ui.write("%s\n\n" % i[2])
391 ui.write("%s\n\n" % i[2])
392
392
393 # description
393 # description
394 doc = i[0].__doc__
394 doc = i[0].__doc__
395 if ui.quiet:
395 if ui.quiet:
396 doc = doc.splitlines(0)[0]
396 doc = doc.splitlines(0)[0]
397 ui.write("%s\n" % doc.rstrip())
397 ui.write("%s\n" % doc.rstrip())
398
398
399 if not ui.quiet:
399 if not ui.quiet:
400 # aliases
400 # aliases
401 aliases = ', '.join(key.split('|')[1:])
401 aliases = ', '.join(key.split('|')[1:])
402 if aliases:
402 if aliases:
403 ui.write("\naliases: %s\n" % aliases)
403 ui.write("\naliases: %s\n" % aliases)
404
404
405 # options
405 # options
406 if i[1]:
406 if i[1]:
407 option_lists.append(("options", i[1]))
407 option_lists.append(("options", i[1]))
408
408
409 else:
409 else:
410 # program name
410 # program name
411 if ui.verbose or with_version:
411 if ui.verbose or with_version:
412 show_version(ui)
412 show_version(ui)
413 else:
413 else:
414 ui.status("Mercurial Distributed SCM\n")
414 ui.status("Mercurial Distributed SCM\n")
415 ui.status('\n')
415 ui.status('\n')
416
416
417 # list of commands
417 # list of commands
418 if cmd == "shortlist":
418 if cmd == "shortlist":
419 ui.status('basic commands (use "hg help" '
419 ui.status('basic commands (use "hg help" '
420 'for the full list or option "-v" for details):\n\n')
420 'for the full list or option "-v" for details):\n\n')
421 elif ui.verbose:
421 elif ui.verbose:
422 ui.status('list of commands:\n\n')
422 ui.status('list of commands:\n\n')
423 else:
423 else:
424 ui.status('list of commands (use "hg help -v" '
424 ui.status('list of commands (use "hg help -v" '
425 'to show aliases and global options):\n\n')
425 'to show aliases and global options):\n\n')
426
426
427 h = {}
427 h = {}
428 cmds = {}
428 cmds = {}
429 for c, e in table.items():
429 for c, e in table.items():
430 f = c.split("|")[0]
430 f = c.split("|")[0]
431 if cmd == "shortlist" and not f.startswith("^"):
431 if cmd == "shortlist" and not f.startswith("^"):
432 continue
432 continue
433 f = f.lstrip("^")
433 f = f.lstrip("^")
434 if not ui.debugflag and f.startswith("debug"):
434 if not ui.debugflag and f.startswith("debug"):
435 continue
435 continue
436 d = ""
436 d = ""
437 if e[0].__doc__:
437 if e[0].__doc__:
438 d = e[0].__doc__.splitlines(0)[0].rstrip()
438 d = e[0].__doc__.splitlines(0)[0].rstrip()
439 h[f] = d
439 h[f] = d
440 cmds[f]=c.lstrip("^")
440 cmds[f]=c.lstrip("^")
441
441
442 fns = h.keys()
442 fns = h.keys()
443 fns.sort()
443 fns.sort()
444 m = max(map(len, fns))
444 m = max(map(len, fns))
445 for f in fns:
445 for f in fns:
446 if ui.verbose:
446 if ui.verbose:
447 commands = cmds[f].replace("|",", ")
447 commands = cmds[f].replace("|",", ")
448 ui.write(" %s:\n %s\n"%(commands,h[f]))
448 ui.write(" %s:\n %s\n"%(commands,h[f]))
449 else:
449 else:
450 ui.write(' %-*s %s\n' % (m, f, h[f]))
450 ui.write(' %-*s %s\n' % (m, f, h[f]))
451
451
452 # global options
452 # global options
453 if ui.verbose:
453 if ui.verbose:
454 option_lists.append(("global options", globalopts))
454 option_lists.append(("global options", globalopts))
455
455
456 # list all option lists
456 # list all option lists
457 opt_output = []
457 opt_output = []
458 for title, options in option_lists:
458 for title, options in option_lists:
459 opt_output.append(("\n%s:\n" % title, None))
459 opt_output.append(("\n%s:\n" % title, None))
460 for shortopt, longopt, default, desc in options:
460 for shortopt, longopt, default, desc in options:
461 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
461 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
462 longopt and " --%s" % longopt),
462 longopt and " --%s" % longopt),
463 "%s%s" % (desc,
463 "%s%s" % (desc,
464 default and " (default: %s)" % default
464 default and " (default: %s)" % default
465 or "")))
465 or "")))
466
466
467 if opt_output:
467 if opt_output:
468 opts_len = max([len(line[0]) for line in opt_output if line[1]])
468 opts_len = max([len(line[0]) for line in opt_output if line[1]])
469 for first, second in opt_output:
469 for first, second in opt_output:
470 if second:
470 if second:
471 ui.write(" %-*s %s\n" % (opts_len, first, second))
471 ui.write(" %-*s %s\n" % (opts_len, first, second))
472 else:
472 else:
473 ui.write("%s\n" % first)
473 ui.write("%s\n" % first)
474
474
475 # Commands start here, listed alphabetically
475 # Commands start here, listed alphabetically
476
476
477 def add(ui, repo, *pats, **opts):
477 def add(ui, repo, *pats, **opts):
478 '''add the specified files on the next commit'''
478 '''add the specified files on the next commit'''
479 names = []
479 names = []
480 for src, abs, rel, exact in walk(repo, pats, opts):
480 for src, abs, rel, exact in walk(repo, pats, opts):
481 if exact:
481 if exact:
482 if ui.verbose: ui.status('adding %s\n' % rel)
482 if ui.verbose: ui.status('adding %s\n' % rel)
483 names.append(abs)
483 names.append(abs)
484 elif repo.dirstate.state(abs) == '?':
484 elif repo.dirstate.state(abs) == '?':
485 ui.status('adding %s\n' % rel)
485 ui.status('adding %s\n' % rel)
486 names.append(abs)
486 names.append(abs)
487 repo.add(names)
487 repo.add(names)
488
488
489 def addremove(ui, repo, *pats, **opts):
489 def addremove(ui, repo, *pats, **opts):
490 """add all new files, delete all missing files"""
490 """add all new files, delete all missing files"""
491 add, remove = [], []
491 add, remove = [], []
492 for src, abs, rel, exact in walk(repo, pats, opts):
492 for src, abs, rel, exact in walk(repo, pats, opts):
493 if src == 'f' and repo.dirstate.state(abs) == '?':
493 if src == 'f' and repo.dirstate.state(abs) == '?':
494 add.append(abs)
494 add.append(abs)
495 if ui.verbose or not exact:
495 if ui.verbose or not exact:
496 ui.status('adding ', rel, '\n')
496 ui.status('adding ', rel, '\n')
497 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
497 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
498 remove.append(abs)
498 remove.append(abs)
499 if ui.verbose or not exact:
499 if ui.verbose or not exact:
500 ui.status('removing ', rel, '\n')
500 ui.status('removing ', rel, '\n')
501 repo.add(add)
501 repo.add(add)
502 repo.remove(remove)
502 repo.remove(remove)
503
503
504 def annotate(ui, repo, *pats, **opts):
504 def annotate(ui, repo, *pats, **opts):
505 """show changeset information per file line"""
505 """show changeset information per file line"""
506 def getnode(rev):
506 def getnode(rev):
507 return short(repo.changelog.node(rev))
507 return short(repo.changelog.node(rev))
508
508
509 ucache = {}
509 ucache = {}
510 def getname(rev):
510 def getname(rev):
511 cl = repo.changelog.read(repo.changelog.node(rev))
511 cl = repo.changelog.read(repo.changelog.node(rev))
512 return trimuser(ui, cl[1], rev, ucache)
512 return trimuser(ui, cl[1], rev, ucache)
513
513
514 if not pats:
514 if not pats:
515 raise util.Abort('at least one file name or pattern required')
515 raise util.Abort('at least one file name or pattern required')
516
516
517 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
517 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
518 if not opts['user'] and not opts['changeset']:
518 if not opts['user'] and not opts['changeset']:
519 opts['number'] = 1
519 opts['number'] = 1
520
520
521 if opts['rev']:
521 if opts['rev']:
522 node = repo.changelog.lookup(opts['rev'])
522 node = repo.changelog.lookup(opts['rev'])
523 else:
523 else:
524 node = repo.dirstate.parents()[0]
524 node = repo.dirstate.parents()[0]
525 change = repo.changelog.read(node)
525 change = repo.changelog.read(node)
526 mmap = repo.manifest.read(change[0])
526 mmap = repo.manifest.read(change[0])
527
527
528 for src, abs, rel, exact in walk(repo, pats, opts):
528 for src, abs, rel, exact in walk(repo, pats, opts):
529 if abs not in mmap:
529 if abs not in mmap:
530 ui.warn("warning: %s is not in the repository!\n" % rel)
530 ui.warn("warning: %s is not in the repository!\n" % rel)
531 continue
531 continue
532
532
533 f = repo.file(abs)
533 f = repo.file(abs)
534 if not opts['text'] and util.binary(f.read(mmap[abs])):
534 if not opts['text'] and util.binary(f.read(mmap[abs])):
535 ui.write("%s: binary file\n" % rel)
535 ui.write("%s: binary file\n" % rel)
536 continue
536 continue
537
537
538 lines = f.annotate(mmap[abs])
538 lines = f.annotate(mmap[abs])
539 pieces = []
539 pieces = []
540
540
541 for o, f in opmap:
541 for o, f in opmap:
542 if opts[o]:
542 if opts[o]:
543 l = [f(n) for n, dummy in lines]
543 l = [f(n) for n, dummy in lines]
544 if l:
544 if l:
545 m = max(map(len, l))
545 m = max(map(len, l))
546 pieces.append(["%*s" % (m, x) for x in l])
546 pieces.append(["%*s" % (m, x) for x in l])
547
547
548 if pieces:
548 if pieces:
549 for p, l in zip(zip(*pieces), lines):
549 for p, l in zip(zip(*pieces), lines):
550 ui.write("%s: %s" % (" ".join(p), l[1]))
550 ui.write("%s: %s" % (" ".join(p), l[1]))
551
551
552 def bundle(ui, repo, fname, dest="default-push", **opts):
552 def bundle(ui, repo, fname, dest="default-push", **opts):
553 """create a changegroup file"""
553 """create a changegroup file"""
554 f = open(fname, "wb")
554 f = open(fname, "wb")
555 dest = ui.expandpath(dest)
555 dest = ui.expandpath(dest)
556 other = hg.repository(ui, dest)
556 other = hg.repository(ui, dest)
557 o = repo.findoutgoing(other)
557 o = repo.findoutgoing(other)
558 cg = repo.changegroup(o)
558 cg = repo.changegroup(o)
559
559
560 try:
560 try:
561 f.write("HG10")
561 f.write("HG10")
562 z = bz2.BZ2Compressor(9)
562 z = bz2.BZ2Compressor(9)
563 while 1:
563 while 1:
564 chunk = cg.read(4096)
564 chunk = cg.read(4096)
565 if not chunk:
565 if not chunk:
566 break
566 break
567 f.write(z.compress(chunk))
567 f.write(z.compress(chunk))
568 f.write(z.flush())
568 f.write(z.flush())
569 except:
569 except:
570 os.unlink(fname)
570 os.unlink(fname)
571 raise
571 raise
572
572
573 def cat(ui, repo, file1, *pats, **opts):
573 def cat(ui, repo, file1, *pats, **opts):
574 """output the latest or given revisions of files"""
574 """output the latest or given revisions of files"""
575 mf = {}
575 mf = {}
576 if opts['rev']:
576 if opts['rev']:
577 change = repo.changelog.read(repo.lookup(opts['rev']))
577 change = repo.changelog.read(repo.lookup(opts['rev']))
578 mf = repo.manifest.read(change[0])
578 mf = repo.manifest.read(change[0])
579 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
579 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
580 r = repo.file(abs)
580 r = repo.file(abs)
581 if opts['rev']:
581 if opts['rev']:
582 try:
582 try:
583 n = mf[abs]
583 n = mf[abs]
584 except (hg.RepoError, KeyError):
584 except (hg.RepoError, KeyError):
585 try:
585 try:
586 n = r.lookup(rev)
586 n = r.lookup(rev)
587 except KeyError, inst:
587 except KeyError, inst:
588 raise util.Abort('cannot find file %s in rev %s', rel, rev)
588 raise util.Abort('cannot find file %s in rev %s', rel, rev)
589 else:
589 else:
590 n = r.tip()
590 n = r.tip()
591 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
591 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
592 fp.write(r.read(n))
592 fp.write(r.read(n))
593
593
594 def clone(ui, source, dest=None, **opts):
594 def clone(ui, source, dest=None, **opts):
595 """make a copy of an existing repository"""
595 """make a copy of an existing repository"""
596 if dest is None:
596 if dest is None:
597 dest = os.path.basename(os.path.normpath(source))
597 dest = os.path.basename(os.path.normpath(source))
598
598
599 if os.path.exists(dest):
599 if os.path.exists(dest):
600 raise util.Abort("destination '%s' already exists", dest)
600 raise util.Abort("destination '%s' already exists", dest)
601
601
602 dest = os.path.realpath(dest)
602 dest = os.path.realpath(dest)
603
603
604 class Dircleanup:
604 class Dircleanup:
605 def __init__(self, dir_):
605 def __init__(self, dir_):
606 self.rmtree = shutil.rmtree
606 self.rmtree = shutil.rmtree
607 self.dir_ = dir_
607 self.dir_ = dir_
608 os.mkdir(dir_)
608 os.mkdir(dir_)
609 def close(self):
609 def close(self):
610 self.dir_ = None
610 self.dir_ = None
611 def __del__(self):
611 def __del__(self):
612 if self.dir_:
612 if self.dir_:
613 self.rmtree(self.dir_, True)
613 self.rmtree(self.dir_, True)
614
614
615 if opts['ssh']:
615 if opts['ssh']:
616 ui.setconfig("ui", "ssh", opts['ssh'])
616 ui.setconfig("ui", "ssh", opts['ssh'])
617 if opts['remotecmd']:
617 if opts['remotecmd']:
618 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
618 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
619
619
620 if not os.path.exists(source):
620 if not os.path.exists(source):
621 source = ui.expandpath(source)
621 source = ui.expandpath(source)
622
622
623 d = Dircleanup(dest)
623 d = Dircleanup(dest)
624 abspath = source
624 abspath = source
625 other = hg.repository(ui, source)
625 other = hg.repository(ui, source)
626
626
627 copy = False
627 copy = False
628 if other.dev() != -1:
628 if other.dev() != -1:
629 abspath = os.path.abspath(source)
629 abspath = os.path.abspath(source)
630 if not opts['pull']:
630 if not opts['pull']:
631 copy = True
631 copy = True
632
632
633 if copy:
633 if copy:
634 try:
634 try:
635 # we use a lock here because if we race with commit, we
635 # we use a lock here because if we race with commit, we
636 # can end up with extra data in the cloned revlogs that's
636 # can end up with extra data in the cloned revlogs that's
637 # not pointed to by changesets, thus causing verify to
637 # not pointed to by changesets, thus causing verify to
638 # fail
638 # fail
639 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
639 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
640 except OSError:
640 except OSError:
641 copy = False
641 copy = False
642
642
643 if copy:
643 if copy:
644 # we lock here to avoid premature writing to the target
644 # we lock here to avoid premature writing to the target
645 os.mkdir(os.path.join(dest, ".hg"))
645 os.mkdir(os.path.join(dest, ".hg"))
646 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
646 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
647
647
648 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
648 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
649 for f in files.split():
649 for f in files.split():
650 src = os.path.join(source, ".hg", f)
650 src = os.path.join(source, ".hg", f)
651 dst = os.path.join(dest, ".hg", f)
651 dst = os.path.join(dest, ".hg", f)
652 util.copyfiles(src, dst)
652 util.copyfiles(src, dst)
653
653
654 repo = hg.repository(ui, dest)
654 repo = hg.repository(ui, dest)
655
655
656 else:
656 else:
657 repo = hg.repository(ui, dest, create=1)
657 repo = hg.repository(ui, dest, create=1)
658 repo.pull(other)
658 repo.pull(other)
659
659
660 f = repo.opener("hgrc", "w", text=True)
660 f = repo.opener("hgrc", "w", text=True)
661 f.write("[paths]\n")
661 f.write("[paths]\n")
662 f.write("default = %s\n" % abspath)
662 f.write("default = %s\n" % abspath)
663
663
664 if not opts['noupdate']:
664 if not opts['noupdate']:
665 update(ui, repo)
665 update(ui, repo)
666
666
667 d.close()
667 d.close()
668
668
669 def commit(ui, repo, *pats, **opts):
669 def commit(ui, repo, *pats, **opts):
670 """commit the specified files or all outstanding changes"""
670 """commit the specified files or all outstanding changes"""
671 if opts['text']:
671 if opts['text']:
672 ui.warn("Warning: -t and --text is deprecated,"
672 ui.warn("Warning: -t and --text is deprecated,"
673 " please use -m or --message instead.\n")
673 " please use -m or --message instead.\n")
674 message = opts['message'] or opts['text']
674 message = opts['message'] or opts['text']
675 logfile = opts['logfile']
675 logfile = opts['logfile']
676
676
677 if message and logfile:
677 if message and logfile:
678 raise util.Abort('options --message and --logfile are mutually '
678 raise util.Abort('options --message and --logfile are mutually '
679 'exclusive')
679 'exclusive')
680 if not message and logfile:
680 if not message and logfile:
681 try:
681 try:
682 if logfile == '-':
682 if logfile == '-':
683 message = sys.stdin.read()
683 message = sys.stdin.read()
684 else:
684 else:
685 message = open(logfile).read()
685 message = open(logfile).read()
686 except IOError, inst:
686 except IOError, inst:
687 raise util.Abort("can't read commit message '%s': %s" %
687 raise util.Abort("can't read commit message '%s': %s" %
688 (logfile, inst.strerror))
688 (logfile, inst.strerror))
689
689
690 if opts['addremove']:
690 if opts['addremove']:
691 addremove(ui, repo, *pats, **opts)
691 addremove(ui, repo, *pats, **opts)
692 cwd = repo.getcwd()
692 cwd = repo.getcwd()
693 if not pats and cwd:
693 if not pats and cwd:
694 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
694 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
695 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
695 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
696 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
696 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
697 pats, opts)
697 pats, opts)
698 if pats:
698 if pats:
699 c, a, d, u = repo.changes(files=fns, match=match)
699 c, a, d, u = repo.changes(files=fns, match=match)
700 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
700 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
701 else:
701 else:
702 files = []
702 files = []
703 try:
703 try:
704 repo.commit(files, message, opts['user'], opts['date'], match)
704 repo.commit(files, message, opts['user'], opts['date'], match)
705 except ValueError, inst:
705 except ValueError, inst:
706 raise util.Abort(str(inst))
706 raise util.Abort(str(inst))
707
707
708 def docopy(ui, repo, pats, opts):
708 def docopy(ui, repo, pats, opts):
709 if not pats:
709 if not pats:
710 raise util.Abort('no source or destination specified')
710 raise util.Abort('no source or destination specified')
711 elif len(pats) == 1:
711 elif len(pats) == 1:
712 raise util.Abort('no destination specified')
712 raise util.Abort('no destination specified')
713 pats = list(pats)
713 pats = list(pats)
714 dest = pats.pop()
714 dest = pats.pop()
715 sources = []
715 sources = []
716
716
717 def okaytocopy(abs, rel, exact):
717 def okaytocopy(abs, rel, exact):
718 reasons = {'?': 'is not managed',
718 reasons = {'?': 'is not managed',
719 'a': 'has been marked for add'}
719 'a': 'has been marked for add'}
720 reason = reasons.get(repo.dirstate.state(abs))
720 reason = reasons.get(repo.dirstate.state(abs))
721 if reason:
721 if reason:
722 if exact: ui.warn('%s: not copying - file %s\n' % (rel, reason))
722 if exact: ui.warn('%s: not copying - file %s\n' % (rel, reason))
723 else:
723 else:
724 return True
724 return True
725
725
726 for src, abs, rel, exact in walk(repo, pats, opts):
726 for src, abs, rel, exact in walk(repo, pats, opts):
727 if okaytocopy(abs, rel, exact):
727 if okaytocopy(abs, rel, exact):
728 sources.append((abs, rel, exact))
728 sources.append((abs, rel, exact))
729 if not sources:
729 if not sources:
730 raise util.Abort('no files to copy')
730 raise util.Abort('no files to copy')
731
731
732 cwd = repo.getcwd()
732 cwd = repo.getcwd()
733 absdest = util.canonpath(repo.root, cwd, dest)
733 absdest = util.canonpath(repo.root, cwd, dest)
734 reldest = util.pathto(cwd, absdest)
734 reldest = util.pathto(cwd, absdest)
735 if os.path.exists(reldest):
735 if os.path.exists(reldest):
736 destisfile = not os.path.isdir(reldest)
736 destisfile = not os.path.isdir(reldest)
737 else:
737 else:
738 destisfile = len(sources) == 1 or repo.dirstate.state(absdest) != '?'
738 destisfile = len(sources) == 1 or repo.dirstate.state(absdest) != '?'
739
739
740 if destisfile:
740 if destisfile:
741 if opts['parents']:
741 if opts['parents']:
742 raise util.Abort('with --parents, destination must be a directory')
742 raise util.Abort('with --parents, destination must be a directory')
743 elif len(sources) > 1:
743 elif len(sources) > 1:
744 raise util.Abort('with multiple sources, destination must be a '
744 raise util.Abort('with multiple sources, destination must be a '
745 'directory')
745 'directory')
746 errs, copied = 0, []
746 errs, copied = 0, []
747 for abs, rel, exact in sources:
747 for abs, rel, exact in sources:
748 if opts['parents']:
748 if opts['parents']:
749 mydest = os.path.join(dest, rel)
749 mydest = os.path.join(dest, rel)
750 elif destisfile:
750 elif destisfile:
751 mydest = reldest
751 mydest = reldest
752 else:
752 else:
753 mydest = os.path.join(dest, os.path.basename(rel))
753 mydest = os.path.join(dest, os.path.basename(rel))
754 myabsdest = util.canonpath(repo.root, cwd, mydest)
754 myabsdest = util.canonpath(repo.root, cwd, mydest)
755 myreldest = util.pathto(cwd, myabsdest)
755 myreldest = util.pathto(cwd, myabsdest)
756 if not opts['force'] and repo.dirstate.state(myabsdest) not in 'a?':
756 if not opts['force'] and repo.dirstate.state(myabsdest) not in 'a?':
757 ui.warn('%s: not overwriting - file already managed\n' % myreldest)
757 ui.warn('%s: not overwriting - file already managed\n' % myreldest)
758 continue
758 continue
759 mydestdir = os.path.dirname(myreldest) or '.'
759 mydestdir = os.path.dirname(myreldest) or '.'
760 if not opts['after']:
760 if not opts['after']:
761 try:
761 try:
762 if opts['parents']: os.makedirs(mydestdir)
762 if opts['parents']: os.makedirs(mydestdir)
763 elif not destisfile: os.mkdir(mydestdir)
763 elif not destisfile: os.mkdir(mydestdir)
764 except OSError, inst:
764 except OSError, inst:
765 if inst.errno != errno.EEXIST: raise
765 if inst.errno != errno.EEXIST: raise
766 if ui.verbose or not exact:
766 if ui.verbose or not exact:
767 ui.status('copying %s to %s\n' % (rel, myreldest))
767 ui.status('copying %s to %s\n' % (rel, myreldest))
768 if not opts['after']:
768 if not opts['after']:
769 try:
769 try:
770 shutil.copyfile(rel, myreldest)
770 shutil.copyfile(rel, myreldest)
771 shutil.copymode(rel, myreldest)
771 shutil.copymode(rel, myreldest)
772 except shutil.Error, inst:
772 except shutil.Error, inst:
773 raise util.Abort(str(inst))
773 raise util.Abort(str(inst))
774 except IOError, inst:
774 except IOError, inst:
775 if inst.errno == errno.ENOENT:
775 if inst.errno == errno.ENOENT:
776 ui.warn('%s: deleted in working copy\n' % rel)
776 ui.warn('%s: deleted in working copy\n' % rel)
777 else:
777 else:
778 ui.warn('%s: cannot copy - %s\n' % (rel, inst.strerror))
778 ui.warn('%s: cannot copy - %s\n' % (rel, inst.strerror))
779 errs += 1
779 errs += 1
780 continue
780 continue
781 repo.copy(abs, myabsdest)
781 repo.copy(abs, myabsdest)
782 copied.append((abs, rel, exact))
782 copied.append((abs, rel, exact))
783 if errs:
783 if errs:
784 ui.warn('(consider using --after)\n')
784 ui.warn('(consider using --after)\n')
785 return errs, copied
785 return errs, copied
786
786
787 def copy(ui, repo, *pats, **opts):
787 def copy(ui, repo, *pats, **opts):
788 """mark files as copied for the next commit"""
788 """mark files as copied for the next commit"""
789 errs, copied = docopy(ui, repo, pats, opts)
789 errs, copied = docopy(ui, repo, pats, opts)
790 return errs
790 return errs
791
791
792 def debugancestor(ui, index, rev1, rev2):
792 def debugancestor(ui, index, rev1, rev2):
793 """find the ancestor revision of two revisions in a given index"""
793 """find the ancestor revision of two revisions in a given index"""
794 r = revlog.revlog(file, index, "")
794 r = revlog.revlog(file, index, "")
795 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
795 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
796 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
796 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
797
797
798 def debugcheckstate(ui, repo):
798 def debugcheckstate(ui, repo):
799 """validate the correctness of the current dirstate"""
799 """validate the correctness of the current dirstate"""
800 parent1, parent2 = repo.dirstate.parents()
800 parent1, parent2 = repo.dirstate.parents()
801 repo.dirstate.read()
801 repo.dirstate.read()
802 dc = repo.dirstate.map
802 dc = repo.dirstate.map
803 keys = dc.keys()
803 keys = dc.keys()
804 keys.sort()
804 keys.sort()
805 m1n = repo.changelog.read(parent1)[0]
805 m1n = repo.changelog.read(parent1)[0]
806 m2n = repo.changelog.read(parent2)[0]
806 m2n = repo.changelog.read(parent2)[0]
807 m1 = repo.manifest.read(m1n)
807 m1 = repo.manifest.read(m1n)
808 m2 = repo.manifest.read(m2n)
808 m2 = repo.manifest.read(m2n)
809 errors = 0
809 errors = 0
810 for f in dc:
810 for f in dc:
811 state = repo.dirstate.state(f)
811 state = repo.dirstate.state(f)
812 if state in "nr" and f not in m1:
812 if state in "nr" and f not in m1:
813 ui.warn("%s in state %s, but not in manifest1\n" % (f, state))
813 ui.warn("%s in state %s, but not in manifest1\n" % (f, state))
814 errors += 1
814 errors += 1
815 if state in "a" and f in m1:
815 if state in "a" and f in m1:
816 ui.warn("%s in state %s, but also in manifest1\n" % (f, state))
816 ui.warn("%s in state %s, but also in manifest1\n" % (f, state))
817 errors += 1
817 errors += 1
818 if state in "m" and f not in m1 and f not in m2:
818 if state in "m" and f not in m1 and f not in m2:
819 ui.warn("%s in state %s, but not in either manifest\n" %
819 ui.warn("%s in state %s, but not in either manifest\n" %
820 (f, state))
820 (f, state))
821 errors += 1
821 errors += 1
822 for f in m1:
822 for f in m1:
823 state = repo.dirstate.state(f)
823 state = repo.dirstate.state(f)
824 if state not in "nrm":
824 if state not in "nrm":
825 ui.warn("%s in manifest1, but listed as state %s" % (f, state))
825 ui.warn("%s in manifest1, but listed as state %s" % (f, state))
826 errors += 1
826 errors += 1
827 if errors:
827 if errors:
828 raise util.Abort(".hg/dirstate inconsistent with current parent's manifest")
828 raise util.Abort(".hg/dirstate inconsistent with current parent's manifest")
829
829
830 def debugconfig(ui):
830 def debugconfig(ui):
831 """show combined config settings from all hgrc files"""
831 """show combined config settings from all hgrc files"""
832 try:
832 try:
833 repo = hg.repository(ui)
833 repo = hg.repository(ui)
834 except hg.RepoError:
834 except hg.RepoError:
835 pass
835 pass
836 for section, name, value in ui.walkconfig():
836 for section, name, value in ui.walkconfig():
837 ui.write('%s.%s=%s\n' % (section, name, value))
837 ui.write('%s.%s=%s\n' % (section, name, value))
838
838
839 def debugstate(ui, repo):
839 def debugstate(ui, repo):
840 """show the contents of the current dirstate"""
840 """show the contents of the current dirstate"""
841 repo.dirstate.read()
841 repo.dirstate.read()
842 dc = repo.dirstate.map
842 dc = repo.dirstate.map
843 keys = dc.keys()
843 keys = dc.keys()
844 keys.sort()
844 keys.sort()
845 for file_ in keys:
845 for file_ in keys:
846 ui.write("%c %3o %10d %s %s\n"
846 ui.write("%c %3o %10d %s %s\n"
847 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
847 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
848 time.strftime("%x %X",
848 time.strftime("%x %X",
849 time.localtime(dc[file_][3])), file_))
849 time.localtime(dc[file_][3])), file_))
850 for f in repo.dirstate.copies:
850 for f in repo.dirstate.copies:
851 ui.write("copy: %s -> %s\n" % (repo.dirstate.copies[f], f))
851 ui.write("copy: %s -> %s\n" % (repo.dirstate.copies[f], f))
852
852
853 def debugdata(ui, file_, rev):
853 def debugdata(ui, file_, rev):
854 """dump the contents of an data file revision"""
854 """dump the contents of an data file revision"""
855 r = revlog.revlog(file, file_[:-2] + ".i", file_)
855 r = revlog.revlog(file, file_[:-2] + ".i", file_)
856 try:
856 try:
857 ui.write(r.revision(r.lookup(rev)))
857 ui.write(r.revision(r.lookup(rev)))
858 except KeyError:
858 except KeyError:
859 raise util.Abort('invalid revision identifier %s', rev)
859 raise util.Abort('invalid revision identifier %s', rev)
860
860
861 def debugindex(ui, file_):
861 def debugindex(ui, file_):
862 """dump the contents of an index file"""
862 """dump the contents of an index file"""
863 r = revlog.revlog(file, file_, "")
863 r = revlog.revlog(file, file_, "")
864 ui.write(" rev offset length base linkrev" +
864 ui.write(" rev offset length base linkrev" +
865 " nodeid p1 p2\n")
865 " nodeid p1 p2\n")
866 for i in range(r.count()):
866 for i in range(r.count()):
867 e = r.index[i]
867 e = r.index[i]
868 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
868 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
869 i, e[0], e[1], e[2], e[3],
869 i, e[0], e[1], e[2], e[3],
870 short(e[6]), short(e[4]), short(e[5])))
870 short(e[6]), short(e[4]), short(e[5])))
871
871
872 def debugindexdot(ui, file_):
872 def debugindexdot(ui, file_):
873 """dump an index DAG as a .dot file"""
873 """dump an index DAG as a .dot file"""
874 r = revlog.revlog(file, file_, "")
874 r = revlog.revlog(file, file_, "")
875 ui.write("digraph G {\n")
875 ui.write("digraph G {\n")
876 for i in range(r.count()):
876 for i in range(r.count()):
877 e = r.index[i]
877 e = r.index[i]
878 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
878 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
879 if e[5] != nullid:
879 if e[5] != nullid:
880 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
880 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
881 ui.write("}\n")
881 ui.write("}\n")
882
882
883 def debugrename(ui, repo, file, rev=None):
883 def debugrename(ui, repo, file, rev=None):
884 """dump rename information"""
884 """dump rename information"""
885 r = repo.file(relpath(repo, [file])[0])
885 r = repo.file(relpath(repo, [file])[0])
886 if rev:
886 if rev:
887 try:
887 try:
888 # assume all revision numbers are for changesets
888 # assume all revision numbers are for changesets
889 n = repo.lookup(rev)
889 n = repo.lookup(rev)
890 change = repo.changelog.read(n)
890 change = repo.changelog.read(n)
891 m = repo.manifest.read(change[0])
891 m = repo.manifest.read(change[0])
892 n = m[relpath(repo, [file])[0]]
892 n = m[relpath(repo, [file])[0]]
893 except hg.RepoError, KeyError:
893 except hg.RepoError, KeyError:
894 n = r.lookup(rev)
894 n = r.lookup(rev)
895 else:
895 else:
896 n = r.tip()
896 n = r.tip()
897 m = r.renamed(n)
897 m = r.renamed(n)
898 if m:
898 if m:
899 ui.write("renamed from %s:%s\n" % (m[0], hex(m[1])))
899 ui.write("renamed from %s:%s\n" % (m[0], hex(m[1])))
900 else:
900 else:
901 ui.write("not renamed\n")
901 ui.write("not renamed\n")
902
902
903 def debugwalk(ui, repo, *pats, **opts):
903 def debugwalk(ui, repo, *pats, **opts):
904 """show how files match on given patterns"""
904 """show how files match on given patterns"""
905 items = list(walk(repo, pats, opts))
905 items = list(walk(repo, pats, opts))
906 if not items:
906 if not items:
907 return
907 return
908 fmt = '%%s %%-%ds %%-%ds %%s' % (
908 fmt = '%%s %%-%ds %%-%ds %%s' % (
909 max([len(abs) for (src, abs, rel, exact) in items]),
909 max([len(abs) for (src, abs, rel, exact) in items]),
910 max([len(rel) for (src, abs, rel, exact) in items]))
910 max([len(rel) for (src, abs, rel, exact) in items]))
911 for src, abs, rel, exact in items:
911 for src, abs, rel, exact in items:
912 line = fmt % (src, abs, rel, exact and 'exact' or '')
912 line = fmt % (src, abs, rel, exact and 'exact' or '')
913 ui.write("%s\n" % line.rstrip())
913 ui.write("%s\n" % line.rstrip())
914
914
915 def diff(ui, repo, *pats, **opts):
915 def diff(ui, repo, *pats, **opts):
916 """diff working directory (or selected files)"""
916 """diff working directory (or selected files)"""
917 node1, node2 = None, None
917 node1, node2 = None, None
918 revs = [repo.lookup(x) for x in opts['rev']]
918 revs = [repo.lookup(x) for x in opts['rev']]
919
919
920 if len(revs) > 0:
920 if len(revs) > 0:
921 node1 = revs[0]
921 node1 = revs[0]
922 if len(revs) > 1:
922 if len(revs) > 1:
923 node2 = revs[1]
923 node2 = revs[1]
924 if len(revs) > 2:
924 if len(revs) > 2:
925 raise util.Abort("too many revisions to diff")
925 raise util.Abort("too many revisions to diff")
926
926
927 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
927 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
928
928
929 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
929 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
930 text=opts['text'])
930 text=opts['text'])
931
931
932 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
932 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
933 node = repo.lookup(changeset)
933 node = repo.lookup(changeset)
934 prev, other = repo.changelog.parents(node)
934 prev, other = repo.changelog.parents(node)
935 change = repo.changelog.read(node)
935 change = repo.changelog.read(node)
936
936
937 fp = make_file(repo, repo.changelog, opts['output'],
937 fp = make_file(repo, repo.changelog, opts['output'],
938 node=node, total=total, seqno=seqno,
938 node=node, total=total, seqno=seqno,
939 revwidth=revwidth)
939 revwidth=revwidth)
940 if fp != sys.stdout:
940 if fp != sys.stdout:
941 ui.note("%s\n" % fp.name)
941 ui.note("%s\n" % fp.name)
942
942
943 fp.write("# HG changeset patch\n")
943 fp.write("# HG changeset patch\n")
944 fp.write("# User %s\n" % change[1])
944 fp.write("# User %s\n" % change[1])
945 fp.write("# Node ID %s\n" % hex(node))
945 fp.write("# Node ID %s\n" % hex(node))
946 fp.write("# Parent %s\n" % hex(prev))
946 fp.write("# Parent %s\n" % hex(prev))
947 if other != nullid:
947 if other != nullid:
948 fp.write("# Parent %s\n" % hex(other))
948 fp.write("# Parent %s\n" % hex(other))
949 fp.write(change[4].rstrip())
949 fp.write(change[4].rstrip())
950 fp.write("\n\n")
950 fp.write("\n\n")
951
951
952 dodiff(fp, ui, repo, prev, node, text=opts['text'])
952 dodiff(fp, ui, repo, prev, node, text=opts['text'])
953 if fp != sys.stdout:
953 if fp != sys.stdout:
954 fp.close()
954 fp.close()
955
955
956 def export(ui, repo, *changesets, **opts):
956 def export(ui, repo, *changesets, **opts):
957 """dump the header and diffs for one or more changesets"""
957 """dump the header and diffs for one or more changesets"""
958 if not changesets:
958 if not changesets:
959 raise util.Abort("export requires at least one changeset")
959 raise util.Abort("export requires at least one changeset")
960 seqno = 0
960 seqno = 0
961 revs = list(revrange(ui, repo, changesets))
961 revs = list(revrange(ui, repo, changesets))
962 total = len(revs)
962 total = len(revs)
963 revwidth = max(map(len, revs))
963 revwidth = max(map(len, revs))
964 ui.note(len(revs) > 1 and "Exporting patches:\n" or "Exporting patch:\n")
964 ui.note(len(revs) > 1 and "Exporting patches:\n" or "Exporting patch:\n")
965 for cset in revs:
965 for cset in revs:
966 seqno += 1
966 seqno += 1
967 doexport(ui, repo, cset, seqno, total, revwidth, opts)
967 doexport(ui, repo, cset, seqno, total, revwidth, opts)
968
968
969 def forget(ui, repo, *pats, **opts):
969 def forget(ui, repo, *pats, **opts):
970 """don't add the specified files on the next commit"""
970 """don't add the specified files on the next commit"""
971 forget = []
971 forget = []
972 for src, abs, rel, exact in walk(repo, pats, opts):
972 for src, abs, rel, exact in walk(repo, pats, opts):
973 if repo.dirstate.state(abs) == 'a':
973 if repo.dirstate.state(abs) == 'a':
974 forget.append(abs)
974 forget.append(abs)
975 if ui.verbose or not exact:
975 if ui.verbose or not exact:
976 ui.status('forgetting ', rel, '\n')
976 ui.status('forgetting ', rel, '\n')
977 repo.forget(forget)
977 repo.forget(forget)
978
978
979 def grep(ui, repo, pattern, *pats, **opts):
979 def grep(ui, repo, pattern, *pats, **opts):
980 """search for a pattern in specified files and revisions"""
980 """search for a pattern in specified files and revisions"""
981 reflags = 0
981 reflags = 0
982 if opts['ignore_case']:
982 if opts['ignore_case']:
983 reflags |= re.I
983 reflags |= re.I
984 regexp = re.compile(pattern, reflags)
984 regexp = re.compile(pattern, reflags)
985 sep, eol = ':', '\n'
985 sep, eol = ':', '\n'
986 if opts['print0']:
986 if opts['print0']:
987 sep = eol = '\0'
987 sep = eol = '\0'
988
988
989 fcache = {}
989 fcache = {}
990 def getfile(fn):
990 def getfile(fn):
991 if fn not in fcache:
991 if fn not in fcache:
992 fcache[fn] = repo.file(fn)
992 fcache[fn] = repo.file(fn)
993 return fcache[fn]
993 return fcache[fn]
994
994
995 def matchlines(body):
995 def matchlines(body):
996 begin = 0
996 begin = 0
997 linenum = 0
997 linenum = 0
998 while True:
998 while True:
999 match = regexp.search(body, begin)
999 match = regexp.search(body, begin)
1000 if not match:
1000 if not match:
1001 break
1001 break
1002 mstart, mend = match.span()
1002 mstart, mend = match.span()
1003 linenum += body.count('\n', begin, mstart) + 1
1003 linenum += body.count('\n', begin, mstart) + 1
1004 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1004 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1005 lend = body.find('\n', mend)
1005 lend = body.find('\n', mend)
1006 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1006 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1007 begin = lend + 1
1007 begin = lend + 1
1008
1008
1009 class linestate:
1009 class linestate:
1010 def __init__(self, line, linenum, colstart, colend):
1010 def __init__(self, line, linenum, colstart, colend):
1011 self.line = line
1011 self.line = line
1012 self.linenum = linenum
1012 self.linenum = linenum
1013 self.colstart = colstart
1013 self.colstart = colstart
1014 self.colend = colend
1014 self.colend = colend
1015 def __eq__(self, other):
1015 def __eq__(self, other):
1016 return self.line == other.line
1016 return self.line == other.line
1017 def __hash__(self):
1017 def __hash__(self):
1018 return hash(self.line)
1018 return hash(self.line)
1019
1019
1020 matches = {}
1020 matches = {}
1021 def grepbody(fn, rev, body):
1021 def grepbody(fn, rev, body):
1022 matches[rev].setdefault(fn, {})
1022 matches[rev].setdefault(fn, {})
1023 m = matches[rev][fn]
1023 m = matches[rev][fn]
1024 for lnum, cstart, cend, line in matchlines(body):
1024 for lnum, cstart, cend, line in matchlines(body):
1025 s = linestate(line, lnum, cstart, cend)
1025 s = linestate(line, lnum, cstart, cend)
1026 m[s] = s
1026 m[s] = s
1027
1027
1028 prev = {}
1028 prev = {}
1029 ucache = {}
1029 ucache = {}
1030 def display(fn, rev, states, prevstates):
1030 def display(fn, rev, states, prevstates):
1031 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1031 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1032 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1032 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1033 counts = {'-': 0, '+': 0}
1033 counts = {'-': 0, '+': 0}
1034 filerevmatches = {}
1034 filerevmatches = {}
1035 for l in diff:
1035 for l in diff:
1036 if incrementing or not opts['all']:
1036 if incrementing or not opts['all']:
1037 change = ((l in prevstates) and '-') or '+'
1037 change = ((l in prevstates) and '-') or '+'
1038 r = rev
1038 r = rev
1039 else:
1039 else:
1040 change = ((l in states) and '-') or '+'
1040 change = ((l in states) and '-') or '+'
1041 r = prev[fn]
1041 r = prev[fn]
1042 cols = [fn, str(rev)]
1042 cols = [fn, str(rev)]
1043 if opts['line_number']: cols.append(str(l.linenum))
1043 if opts['line_number']: cols.append(str(l.linenum))
1044 if opts['all']: cols.append(change)
1044 if opts['all']: cols.append(change)
1045 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1045 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1046 ucache))
1046 ucache))
1047 if opts['files_with_matches']:
1047 if opts['files_with_matches']:
1048 c = (fn, rev)
1048 c = (fn, rev)
1049 if c in filerevmatches: continue
1049 if c in filerevmatches: continue
1050 filerevmatches[c] = 1
1050 filerevmatches[c] = 1
1051 else:
1051 else:
1052 cols.append(l.line)
1052 cols.append(l.line)
1053 ui.write(sep.join(cols), eol)
1053 ui.write(sep.join(cols), eol)
1054 counts[change] += 1
1054 counts[change] += 1
1055 return counts['+'], counts['-']
1055 return counts['+'], counts['-']
1056
1056
1057 fstate = {}
1057 fstate = {}
1058 skip = {}
1058 skip = {}
1059 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1059 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1060 count = 0
1060 count = 0
1061 incrementing = False
1061 incrementing = False
1062 for st, rev, fns in changeiter:
1062 for st, rev, fns in changeiter:
1063 if st == 'window':
1063 if st == 'window':
1064 incrementing = rev
1064 incrementing = rev
1065 matches.clear()
1065 matches.clear()
1066 elif st == 'add':
1066 elif st == 'add':
1067 change = repo.changelog.read(repo.lookup(str(rev)))
1067 change = repo.changelog.read(repo.lookup(str(rev)))
1068 mf = repo.manifest.read(change[0])
1068 mf = repo.manifest.read(change[0])
1069 matches[rev] = {}
1069 matches[rev] = {}
1070 for fn in fns:
1070 for fn in fns:
1071 if fn in skip: continue
1071 if fn in skip: continue
1072 fstate.setdefault(fn, {})
1072 fstate.setdefault(fn, {})
1073 try:
1073 try:
1074 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1074 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1075 except KeyError:
1075 except KeyError:
1076 pass
1076 pass
1077 elif st == 'iter':
1077 elif st == 'iter':
1078 states = matches[rev].items()
1078 states = matches[rev].items()
1079 states.sort()
1079 states.sort()
1080 for fn, m in states:
1080 for fn, m in states:
1081 if fn in skip: continue
1081 if fn in skip: continue
1082 if incrementing or not opts['all'] or fstate[fn]:
1082 if incrementing or not opts['all'] or fstate[fn]:
1083 pos, neg = display(fn, rev, m, fstate[fn])
1083 pos, neg = display(fn, rev, m, fstate[fn])
1084 count += pos + neg
1084 count += pos + neg
1085 if pos and not opts['all']:
1085 if pos and not opts['all']:
1086 skip[fn] = True
1086 skip[fn] = True
1087 fstate[fn] = m
1087 fstate[fn] = m
1088 prev[fn] = rev
1088 prev[fn] = rev
1089
1089
1090 if not incrementing:
1090 if not incrementing:
1091 fstate = fstate.items()
1091 fstate = fstate.items()
1092 fstate.sort()
1092 fstate.sort()
1093 for fn, state in fstate:
1093 for fn, state in fstate:
1094 if fn in skip: continue
1094 if fn in skip: continue
1095 display(fn, rev, {}, state)
1095 display(fn, rev, {}, state)
1096 return (count == 0 and 1) or 0
1096 return (count == 0 and 1) or 0
1097
1097
1098 def heads(ui, repo, **opts):
1098 def heads(ui, repo, **opts):
1099 """show current repository heads"""
1099 """show current repository heads"""
1100 heads = repo.changelog.heads()
1100 heads = repo.changelog.heads()
1101 br = None
1101 br = None
1102 if opts['branches']:
1102 if opts['branches']:
1103 br = repo.branchlookup(heads)
1103 br = repo.branchlookup(heads)
1104 for n in repo.changelog.heads():
1104 for n in repo.changelog.heads():
1105 show_changeset(ui, repo, changenode=n, brinfo=br)
1105 show_changeset(ui, repo, changenode=n, brinfo=br)
1106
1106
1107 def identify(ui, repo):
1107 def identify(ui, repo):
1108 """print information about the working copy"""
1108 """print information about the working copy"""
1109 parents = [p for p in repo.dirstate.parents() if p != nullid]
1109 parents = [p for p in repo.dirstate.parents() if p != nullid]
1110 if not parents:
1110 if not parents:
1111 ui.write("unknown\n")
1111 ui.write("unknown\n")
1112 return
1112 return
1113
1113
1114 hexfunc = ui.verbose and hex or short
1114 hexfunc = ui.verbose and hex or short
1115 (c, a, d, u) = repo.changes()
1115 (c, a, d, u) = repo.changes()
1116 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1116 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1117 (c or a or d) and "+" or "")]
1117 (c or a or d) and "+" or "")]
1118
1118
1119 if not ui.quiet:
1119 if not ui.quiet:
1120 # multiple tags for a single parent separated by '/'
1120 # multiple tags for a single parent separated by '/'
1121 parenttags = ['/'.join(tags)
1121 parenttags = ['/'.join(tags)
1122 for tags in map(repo.nodetags, parents) if tags]
1122 for tags in map(repo.nodetags, parents) if tags]
1123 # tags for multiple parents separated by ' + '
1123 # tags for multiple parents separated by ' + '
1124 if parenttags:
1124 if parenttags:
1125 output.append(' + '.join(parenttags))
1125 output.append(' + '.join(parenttags))
1126
1126
1127 ui.write("%s\n" % ' '.join(output))
1127 ui.write("%s\n" % ' '.join(output))
1128
1128
1129 def import_(ui, repo, patch1, *patches, **opts):
1129 def import_(ui, repo, patch1, *patches, **opts):
1130 """import an ordered set of patches"""
1130 """import an ordered set of patches"""
1131 patches = (patch1,) + patches
1131 patches = (patch1,) + patches
1132
1132
1133 if not opts['force']:
1133 if not opts['force']:
1134 (c, a, d, u) = repo.changes()
1134 (c, a, d, u) = repo.changes()
1135 if c or a or d:
1135 if c or a or d:
1136 raise util.Abort("outstanding uncommitted changes")
1136 raise util.Abort("outstanding uncommitted changes")
1137
1137
1138 d = opts["base"]
1138 d = opts["base"]
1139 strip = opts["strip"]
1139 strip = opts["strip"]
1140
1140
1141 mailre = re.compile(r'(?:From |[\w-]+:)')
1141 mailre = re.compile(r'(?:From |[\w-]+:)')
1142 diffre = re.compile(r'(?:diff -|--- .*\s+\w+ \w+ +\d+ \d+:\d+:\d+ \d+)')
1142 diffre = re.compile(r'(?:diff -|--- .*\s+\w+ \w+ +\d+ \d+:\d+:\d+ \d+)')
1143
1143
1144 for patch in patches:
1144 for patch in patches:
1145 ui.status("applying %s\n" % patch)
1145 ui.status("applying %s\n" % patch)
1146 pf = os.path.join(d, patch)
1146 pf = os.path.join(d, patch)
1147
1147
1148 message = []
1148 message = []
1149 user = None
1149 user = None
1150 hgpatch = False
1150 hgpatch = False
1151 for line in file(pf):
1151 for line in file(pf):
1152 line = line.rstrip()
1152 line = line.rstrip()
1153 if (not message and not hgpatch and
1153 if (not message and not hgpatch and
1154 mailre.match(line) and not opts['force']):
1154 mailre.match(line) and not opts['force']):
1155 if len(line) > 35: line = line[:32] + '...'
1155 if len(line) > 35: line = line[:32] + '...'
1156 raise util.Abort('first line looks like a '
1156 raise util.Abort('first line looks like a '
1157 'mail header: ' + line)
1157 'mail header: ' + line)
1158 if diffre.match(line):
1158 if diffre.match(line):
1159 break
1159 break
1160 elif hgpatch:
1160 elif hgpatch:
1161 # parse values when importing the result of an hg export
1161 # parse values when importing the result of an hg export
1162 if line.startswith("# User "):
1162 if line.startswith("# User "):
1163 user = line[7:]
1163 user = line[7:]
1164 ui.debug('User: %s\n' % user)
1164 ui.debug('User: %s\n' % user)
1165 elif not line.startswith("# ") and line:
1165 elif not line.startswith("# ") and line:
1166 message.append(line)
1166 message.append(line)
1167 hgpatch = False
1167 hgpatch = False
1168 elif line == '# HG changeset patch':
1168 elif line == '# HG changeset patch':
1169 hgpatch = True
1169 hgpatch = True
1170 message = [] # We may have collected garbage
1170 message = [] # We may have collected garbage
1171 else:
1171 else:
1172 message.append(line)
1172 message.append(line)
1173
1173
1174 # make sure message isn't empty
1174 # make sure message isn't empty
1175 if not message:
1175 if not message:
1176 message = "imported patch %s\n" % patch
1176 message = "imported patch %s\n" % patch
1177 else:
1177 else:
1178 message = "%s\n" % '\n'.join(message)
1178 message = "%s\n" % '\n'.join(message)
1179 ui.debug('message:\n%s\n' % message)
1179 ui.debug('message:\n%s\n' % message)
1180
1180
1181 files = util.patch(strip, pf, ui)
1181 files = util.patch(strip, pf, ui)
1182
1182
1183 if len(files) > 0:
1183 if len(files) > 0:
1184 addremove(ui, repo, *files)
1184 addremove(ui, repo, *files)
1185 repo.commit(files, message, user)
1185 repo.commit(files, message, user)
1186
1186
1187 def incoming(ui, repo, source="default", **opts):
1187 def incoming(ui, repo, source="default", **opts):
1188 """show new changesets found in source"""
1188 """show new changesets found in source"""
1189 source = ui.expandpath(source)
1189 source = ui.expandpath(source)
1190 other = hg.repository(ui, source)
1190 other = hg.repository(ui, source)
1191 if not other.local():
1191 if not other.local():
1192 raise util.Abort("incoming doesn't work for remote repositories yet")
1192 raise util.Abort("incoming doesn't work for remote repositories yet")
1193 o = repo.findincoming(other)
1193 o = repo.findincoming(other)
1194 if not o:
1194 if not o:
1195 return
1195 return
1196 o = other.newer(o)
1196 o = other.changelog.nodesbetween(o)[0]
1197 for n in o:
1197 for n in o:
1198 show_changeset(ui, other, changenode=n)
1198 show_changeset(ui, other, changenode=n)
1199 if opts['patch']:
1199 if opts['patch']:
1200 prev = other.changelog.parents(n)[0]
1200 prev = other.changelog.parents(n)[0]
1201 dodiff(ui, ui, other, prev, n)
1201 dodiff(ui, ui, other, prev, n)
1202 ui.write("\n")
1202 ui.write("\n")
1203
1203
1204 def init(ui, dest="."):
1204 def init(ui, dest="."):
1205 """create a new repository in the given directory"""
1205 """create a new repository in the given directory"""
1206 if not os.path.exists(dest):
1206 if not os.path.exists(dest):
1207 os.mkdir(dest)
1207 os.mkdir(dest)
1208 hg.repository(ui, dest, create=1)
1208 hg.repository(ui, dest, create=1)
1209
1209
1210 def locate(ui, repo, *pats, **opts):
1210 def locate(ui, repo, *pats, **opts):
1211 """locate files matching specific patterns"""
1211 """locate files matching specific patterns"""
1212 end = opts['print0'] and '\0' or '\n'
1212 end = opts['print0'] and '\0' or '\n'
1213
1213
1214 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1214 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1215 if repo.dirstate.state(abs) == '?':
1215 if repo.dirstate.state(abs) == '?':
1216 continue
1216 continue
1217 if opts['fullpath']:
1217 if opts['fullpath']:
1218 ui.write(os.path.join(repo.root, abs), end)
1218 ui.write(os.path.join(repo.root, abs), end)
1219 else:
1219 else:
1220 ui.write(rel, end)
1220 ui.write(rel, end)
1221
1221
1222 def log(ui, repo, *pats, **opts):
1222 def log(ui, repo, *pats, **opts):
1223 """show revision history of entire repository or files"""
1223 """show revision history of entire repository or files"""
1224 class dui:
1224 class dui:
1225 # Implement and delegate some ui protocol. Save hunks of
1225 # Implement and delegate some ui protocol. Save hunks of
1226 # output for later display in the desired order.
1226 # output for later display in the desired order.
1227 def __init__(self, ui):
1227 def __init__(self, ui):
1228 self.ui = ui
1228 self.ui = ui
1229 self.hunk = {}
1229 self.hunk = {}
1230 def bump(self, rev):
1230 def bump(self, rev):
1231 self.rev = rev
1231 self.rev = rev
1232 self.hunk[rev] = []
1232 self.hunk[rev] = []
1233 def note(self, *args):
1233 def note(self, *args):
1234 if self.verbose:
1234 if self.verbose:
1235 self.write(*args)
1235 self.write(*args)
1236 def status(self, *args):
1236 def status(self, *args):
1237 if not self.quiet:
1237 if not self.quiet:
1238 self.write(*args)
1238 self.write(*args)
1239 def write(self, *args):
1239 def write(self, *args):
1240 self.hunk[self.rev].append(args)
1240 self.hunk[self.rev].append(args)
1241 def debug(self, *args):
1241 def debug(self, *args):
1242 if self.debugflag:
1242 if self.debugflag:
1243 self.write(*args)
1243 self.write(*args)
1244 def __getattr__(self, key):
1244 def __getattr__(self, key):
1245 return getattr(self.ui, key)
1245 return getattr(self.ui, key)
1246 cwd = repo.getcwd()
1246 cwd = repo.getcwd()
1247 if not pats and cwd:
1247 if not pats and cwd:
1248 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1248 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1249 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1249 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1250 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1250 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1251 pats, opts)
1251 pats, opts)
1252 for st, rev, fns in changeiter:
1252 for st, rev, fns in changeiter:
1253 if st == 'window':
1253 if st == 'window':
1254 du = dui(ui)
1254 du = dui(ui)
1255 elif st == 'add':
1255 elif st == 'add':
1256 du.bump(rev)
1256 du.bump(rev)
1257 br = None
1257 br = None
1258 if opts['branch']:
1258 if opts['branch']:
1259 br = repo.branchlookup([repo.changelog.node(rev)])
1259 br = repo.branchlookup([repo.changelog.node(rev)])
1260
1260
1261 if opts['keyword']:
1261 if opts['keyword']:
1262 changes = repo.changelog.read(repo.changelog.node(rev))
1262 changes = repo.changelog.read(repo.changelog.node(rev))
1263 miss = 0
1263 miss = 0
1264 for k in opts['keyword']:
1264 for k in opts['keyword']:
1265 if not (k in changes[1].lower() or
1265 if not (k in changes[1].lower() or
1266 k in changes[4].lower() or
1266 k in changes[4].lower() or
1267 k in " ".join(changes[3][:20]).lower()):
1267 k in " ".join(changes[3][:20]).lower()):
1268 miss = 1
1268 miss = 1
1269 break
1269 break
1270 if miss:
1270 if miss:
1271 continue
1271 continue
1272
1272
1273 show_changeset(du, repo, rev, brinfo=br)
1273 show_changeset(du, repo, rev, brinfo=br)
1274 if opts['patch']:
1274 if opts['patch']:
1275 changenode = repo.changelog.node(rev)
1275 changenode = repo.changelog.node(rev)
1276 prev, other = repo.changelog.parents(changenode)
1276 prev, other = repo.changelog.parents(changenode)
1277 dodiff(du, du, repo, prev, changenode, fns)
1277 dodiff(du, du, repo, prev, changenode, fns)
1278 du.write("\n\n")
1278 du.write("\n\n")
1279 elif st == 'iter':
1279 elif st == 'iter':
1280 for args in du.hunk[rev]:
1280 for args in du.hunk[rev]:
1281 ui.write(*args)
1281 ui.write(*args)
1282
1282
1283 def manifest(ui, repo, rev=None):
1283 def manifest(ui, repo, rev=None):
1284 """output the latest or given revision of the project manifest"""
1284 """output the latest or given revision of the project manifest"""
1285 if rev:
1285 if rev:
1286 try:
1286 try:
1287 # assume all revision numbers are for changesets
1287 # assume all revision numbers are for changesets
1288 n = repo.lookup(rev)
1288 n = repo.lookup(rev)
1289 change = repo.changelog.read(n)
1289 change = repo.changelog.read(n)
1290 n = change[0]
1290 n = change[0]
1291 except hg.RepoError:
1291 except hg.RepoError:
1292 n = repo.manifest.lookup(rev)
1292 n = repo.manifest.lookup(rev)
1293 else:
1293 else:
1294 n = repo.manifest.tip()
1294 n = repo.manifest.tip()
1295 m = repo.manifest.read(n)
1295 m = repo.manifest.read(n)
1296 mf = repo.manifest.readflags(n)
1296 mf = repo.manifest.readflags(n)
1297 files = m.keys()
1297 files = m.keys()
1298 files.sort()
1298 files.sort()
1299
1299
1300 for f in files:
1300 for f in files:
1301 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1301 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1302
1302
1303 def outgoing(ui, repo, dest="default-push", **opts):
1303 def outgoing(ui, repo, dest="default-push", **opts):
1304 """show changesets not found in destination"""
1304 """show changesets not found in destination"""
1305 dest = ui.expandpath(dest)
1305 dest = ui.expandpath(dest)
1306 other = hg.repository(ui, dest)
1306 other = hg.repository(ui, dest)
1307 o = repo.findoutgoing(other)
1307 o = repo.findoutgoing(other)
1308 o = repo.newer(o)
1308 o = repo.changelog.nodesbetween(o)[0]
1309 for n in o:
1309 for n in o:
1310 show_changeset(ui, repo, changenode=n)
1310 show_changeset(ui, repo, changenode=n)
1311 if opts['patch']:
1311 if opts['patch']:
1312 prev = repo.changelog.parents(n)[0]
1312 prev = repo.changelog.parents(n)[0]
1313 dodiff(ui, ui, repo, prev, n)
1313 dodiff(ui, ui, repo, prev, n)
1314 ui.write("\n")
1314 ui.write("\n")
1315
1315
1316 def parents(ui, repo, rev=None):
1316 def parents(ui, repo, rev=None):
1317 """show the parents of the working dir or revision"""
1317 """show the parents of the working dir or revision"""
1318 if rev:
1318 if rev:
1319 p = repo.changelog.parents(repo.lookup(rev))
1319 p = repo.changelog.parents(repo.lookup(rev))
1320 else:
1320 else:
1321 p = repo.dirstate.parents()
1321 p = repo.dirstate.parents()
1322
1322
1323 for n in p:
1323 for n in p:
1324 if n != nullid:
1324 if n != nullid:
1325 show_changeset(ui, repo, changenode=n)
1325 show_changeset(ui, repo, changenode=n)
1326
1326
1327 def paths(ui, search=None):
1327 def paths(ui, search=None):
1328 """show definition of symbolic path names"""
1328 """show definition of symbolic path names"""
1329 try:
1329 try:
1330 repo = hg.repository(ui=ui)
1330 repo = hg.repository(ui=ui)
1331 except hg.RepoError:
1331 except hg.RepoError:
1332 pass
1332 pass
1333
1333
1334 if search:
1334 if search:
1335 for name, path in ui.configitems("paths"):
1335 for name, path in ui.configitems("paths"):
1336 if name == search:
1336 if name == search:
1337 ui.write("%s\n" % path)
1337 ui.write("%s\n" % path)
1338 return
1338 return
1339 ui.warn("not found!\n")
1339 ui.warn("not found!\n")
1340 return 1
1340 return 1
1341 else:
1341 else:
1342 for name, path in ui.configitems("paths"):
1342 for name, path in ui.configitems("paths"):
1343 ui.write("%s = %s\n" % (name, path))
1343 ui.write("%s = %s\n" % (name, path))
1344
1344
1345 def pull(ui, repo, source="default", **opts):
1345 def pull(ui, repo, source="default", **opts):
1346 """pull changes from the specified source"""
1346 """pull changes from the specified source"""
1347 source = ui.expandpath(source)
1347 source = ui.expandpath(source)
1348 ui.status('pulling from %s\n' % (source))
1348 ui.status('pulling from %s\n' % (source))
1349
1349
1350 if opts['ssh']:
1350 if opts['ssh']:
1351 ui.setconfig("ui", "ssh", opts['ssh'])
1351 ui.setconfig("ui", "ssh", opts['ssh'])
1352 if opts['remotecmd']:
1352 if opts['remotecmd']:
1353 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1353 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1354
1354
1355 other = hg.repository(ui, source)
1355 other = hg.repository(ui, source)
1356 r = repo.pull(other)
1356 r = repo.pull(other)
1357 if not r:
1357 if not r:
1358 if opts['update']:
1358 if opts['update']:
1359 return update(ui, repo)
1359 return update(ui, repo)
1360 else:
1360 else:
1361 ui.status("(run 'hg update' to get a working copy)\n")
1361 ui.status("(run 'hg update' to get a working copy)\n")
1362
1362
1363 return r
1363 return r
1364
1364
1365 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1365 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1366 """push changes to the specified destination"""
1366 """push changes to the specified destination"""
1367 dest = ui.expandpath(dest)
1367 dest = ui.expandpath(dest)
1368 ui.status('pushing to %s\n' % (dest))
1368 ui.status('pushing to %s\n' % (dest))
1369
1369
1370 if ssh:
1370 if ssh:
1371 ui.setconfig("ui", "ssh", ssh)
1371 ui.setconfig("ui", "ssh", ssh)
1372 if remotecmd:
1372 if remotecmd:
1373 ui.setconfig("ui", "remotecmd", remotecmd)
1373 ui.setconfig("ui", "remotecmd", remotecmd)
1374
1374
1375 other = hg.repository(ui, dest)
1375 other = hg.repository(ui, dest)
1376 r = repo.push(other, force)
1376 r = repo.push(other, force)
1377 return r
1377 return r
1378
1378
1379 def rawcommit(ui, repo, *flist, **rc):
1379 def rawcommit(ui, repo, *flist, **rc):
1380 "raw commit interface"
1380 "raw commit interface"
1381 if rc['text']:
1381 if rc['text']:
1382 ui.warn("Warning: -t and --text is deprecated,"
1382 ui.warn("Warning: -t and --text is deprecated,"
1383 " please use -m or --message instead.\n")
1383 " please use -m or --message instead.\n")
1384 message = rc['message'] or rc['text']
1384 message = rc['message'] or rc['text']
1385 if not message and rc['logfile']:
1385 if not message and rc['logfile']:
1386 try:
1386 try:
1387 message = open(rc['logfile']).read()
1387 message = open(rc['logfile']).read()
1388 except IOError:
1388 except IOError:
1389 pass
1389 pass
1390 if not message and not rc['logfile']:
1390 if not message and not rc['logfile']:
1391 raise util.Abort("missing commit message")
1391 raise util.Abort("missing commit message")
1392
1392
1393 files = relpath(repo, list(flist))
1393 files = relpath(repo, list(flist))
1394 if rc['files']:
1394 if rc['files']:
1395 files += open(rc['files']).read().splitlines()
1395 files += open(rc['files']).read().splitlines()
1396
1396
1397 rc['parent'] = map(repo.lookup, rc['parent'])
1397 rc['parent'] = map(repo.lookup, rc['parent'])
1398
1398
1399 try:
1399 try:
1400 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1400 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1401 except ValueError, inst:
1401 except ValueError, inst:
1402 raise util.Abort(str(inst))
1402 raise util.Abort(str(inst))
1403
1403
1404 def recover(ui, repo):
1404 def recover(ui, repo):
1405 """roll back an interrupted transaction"""
1405 """roll back an interrupted transaction"""
1406 repo.recover()
1406 repo.recover()
1407
1407
1408 def remove(ui, repo, pat, *pats, **opts):
1408 def remove(ui, repo, pat, *pats, **opts):
1409 """remove the specified files on the next commit"""
1409 """remove the specified files on the next commit"""
1410 names = []
1410 names = []
1411 def okaytoremove(abs, rel, exact):
1411 def okaytoremove(abs, rel, exact):
1412 c, a, d, u = repo.changes(files = [abs])
1412 c, a, d, u = repo.changes(files = [abs])
1413 reason = None
1413 reason = None
1414 if c: reason = 'is modified'
1414 if c: reason = 'is modified'
1415 elif a: reason = 'has been marked for add'
1415 elif a: reason = 'has been marked for add'
1416 elif u: reason = 'is not managed'
1416 elif u: reason = 'is not managed'
1417 if reason:
1417 if reason:
1418 if exact: ui.warn('not removing %s: file %s\n' % (rel, reason))
1418 if exact: ui.warn('not removing %s: file %s\n' % (rel, reason))
1419 else:
1419 else:
1420 return True
1420 return True
1421 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1421 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1422 if okaytoremove(abs, rel, exact):
1422 if okaytoremove(abs, rel, exact):
1423 if ui.verbose or not exact: ui.status('removing %s\n' % rel)
1423 if ui.verbose or not exact: ui.status('removing %s\n' % rel)
1424 names.append(abs)
1424 names.append(abs)
1425 for name in names:
1425 for name in names:
1426 try:
1426 try:
1427 os.unlink(name)
1427 os.unlink(name)
1428 except OSError, inst:
1428 except OSError, inst:
1429 if inst.errno != errno.ENOENT: raise
1429 if inst.errno != errno.ENOENT: raise
1430 repo.remove(names)
1430 repo.remove(names)
1431
1431
1432 def rename(ui, repo, *pats, **opts):
1432 def rename(ui, repo, *pats, **opts):
1433 """rename files; equivalent of copy + remove"""
1433 """rename files; equivalent of copy + remove"""
1434 errs, copied = docopy(ui, repo, pats, opts)
1434 errs, copied = docopy(ui, repo, pats, opts)
1435 names = []
1435 names = []
1436 for abs, rel, exact in copied:
1436 for abs, rel, exact in copied:
1437 if ui.verbose or not exact: ui.status('removing %s\n' % rel)
1437 if ui.verbose or not exact: ui.status('removing %s\n' % rel)
1438 try:
1438 try:
1439 os.unlink(rel)
1439 os.unlink(rel)
1440 except OSError, inst:
1440 except OSError, inst:
1441 if inst.errno != errno.ENOENT: raise
1441 if inst.errno != errno.ENOENT: raise
1442 names.append(abs)
1442 names.append(abs)
1443 repo.remove(names)
1443 repo.remove(names)
1444 return errs
1444 return errs
1445
1445
1446 def revert(ui, repo, *names, **opts):
1446 def revert(ui, repo, *names, **opts):
1447 """revert modified files or dirs back to their unmodified states"""
1447 """revert modified files or dirs back to their unmodified states"""
1448 node = opts['rev'] and repo.lookup(opts['rev']) or \
1448 node = opts['rev'] and repo.lookup(opts['rev']) or \
1449 repo.dirstate.parents()[0]
1449 repo.dirstate.parents()[0]
1450 root = os.path.realpath(repo.root)
1450 root = os.path.realpath(repo.root)
1451
1451
1452 def trimpath(p):
1452 def trimpath(p):
1453 p = os.path.realpath(p)
1453 p = os.path.realpath(p)
1454 if p.startswith(root):
1454 if p.startswith(root):
1455 rest = p[len(root):]
1455 rest = p[len(root):]
1456 if not rest:
1456 if not rest:
1457 return rest
1457 return rest
1458 if p.startswith(os.sep):
1458 if p.startswith(os.sep):
1459 return rest[1:]
1459 return rest[1:]
1460 return p
1460 return p
1461
1461
1462 relnames = map(trimpath, names or [os.getcwd()])
1462 relnames = map(trimpath, names or [os.getcwd()])
1463 chosen = {}
1463 chosen = {}
1464
1464
1465 def choose(name):
1465 def choose(name):
1466 def body(name):
1466 def body(name):
1467 for r in relnames:
1467 for r in relnames:
1468 if not name.startswith(r):
1468 if not name.startswith(r):
1469 continue
1469 continue
1470 rest = name[len(r):]
1470 rest = name[len(r):]
1471 if not rest:
1471 if not rest:
1472 return r, True
1472 return r, True
1473 depth = rest.count(os.sep)
1473 depth = rest.count(os.sep)
1474 if not r:
1474 if not r:
1475 if depth == 0 or not opts['nonrecursive']:
1475 if depth == 0 or not opts['nonrecursive']:
1476 return r, True
1476 return r, True
1477 elif rest[0] == os.sep:
1477 elif rest[0] == os.sep:
1478 if depth == 1 or not opts['nonrecursive']:
1478 if depth == 1 or not opts['nonrecursive']:
1479 return r, True
1479 return r, True
1480 return None, False
1480 return None, False
1481 relname, ret = body(name)
1481 relname, ret = body(name)
1482 if ret:
1482 if ret:
1483 chosen[relname] = 1
1483 chosen[relname] = 1
1484 return ret
1484 return ret
1485
1485
1486 r = repo.update(node, False, True, choose, False)
1486 r = repo.update(node, False, True, choose, False)
1487 for n in relnames:
1487 for n in relnames:
1488 if n not in chosen:
1488 if n not in chosen:
1489 ui.warn('error: no matches for %s\n' % n)
1489 ui.warn('error: no matches for %s\n' % n)
1490 r = 1
1490 r = 1
1491 sys.stdout.flush()
1491 sys.stdout.flush()
1492 return r
1492 return r
1493
1493
1494 def root(ui, repo):
1494 def root(ui, repo):
1495 """print the root (top) of the current working dir"""
1495 """print the root (top) of the current working dir"""
1496 ui.write(repo.root + "\n")
1496 ui.write(repo.root + "\n")
1497
1497
1498 def serve(ui, repo, **opts):
1498 def serve(ui, repo, **opts):
1499 """export the repository via HTTP"""
1499 """export the repository via HTTP"""
1500
1500
1501 if opts["stdio"]:
1501 if opts["stdio"]:
1502 fin, fout = sys.stdin, sys.stdout
1502 fin, fout = sys.stdin, sys.stdout
1503 sys.stdout = sys.stderr
1503 sys.stdout = sys.stderr
1504
1504
1505 def getarg():
1505 def getarg():
1506 argline = fin.readline()[:-1]
1506 argline = fin.readline()[:-1]
1507 arg, l = argline.split()
1507 arg, l = argline.split()
1508 val = fin.read(int(l))
1508 val = fin.read(int(l))
1509 return arg, val
1509 return arg, val
1510 def respond(v):
1510 def respond(v):
1511 fout.write("%d\n" % len(v))
1511 fout.write("%d\n" % len(v))
1512 fout.write(v)
1512 fout.write(v)
1513 fout.flush()
1513 fout.flush()
1514
1514
1515 lock = None
1515 lock = None
1516
1516
1517 while 1:
1517 while 1:
1518 cmd = fin.readline()[:-1]
1518 cmd = fin.readline()[:-1]
1519 if cmd == '':
1519 if cmd == '':
1520 return
1520 return
1521 if cmd == "heads":
1521 if cmd == "heads":
1522 h = repo.heads()
1522 h = repo.heads()
1523 respond(" ".join(map(hex, h)) + "\n")
1523 respond(" ".join(map(hex, h)) + "\n")
1524 if cmd == "lock":
1524 if cmd == "lock":
1525 lock = repo.lock()
1525 lock = repo.lock()
1526 respond("")
1526 respond("")
1527 if cmd == "unlock":
1527 if cmd == "unlock":
1528 if lock:
1528 if lock:
1529 lock.release()
1529 lock.release()
1530 lock = None
1530 lock = None
1531 respond("")
1531 respond("")
1532 elif cmd == "branches":
1532 elif cmd == "branches":
1533 arg, nodes = getarg()
1533 arg, nodes = getarg()
1534 nodes = map(bin, nodes.split(" "))
1534 nodes = map(bin, nodes.split(" "))
1535 r = []
1535 r = []
1536 for b in repo.branches(nodes):
1536 for b in repo.branches(nodes):
1537 r.append(" ".join(map(hex, b)) + "\n")
1537 r.append(" ".join(map(hex, b)) + "\n")
1538 respond("".join(r))
1538 respond("".join(r))
1539 elif cmd == "between":
1539 elif cmd == "between":
1540 arg, pairs = getarg()
1540 arg, pairs = getarg()
1541 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1541 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1542 r = []
1542 r = []
1543 for b in repo.between(pairs):
1543 for b in repo.between(pairs):
1544 r.append(" ".join(map(hex, b)) + "\n")
1544 r.append(" ".join(map(hex, b)) + "\n")
1545 respond("".join(r))
1545 respond("".join(r))
1546 elif cmd == "changegroup":
1546 elif cmd == "changegroup":
1547 nodes = []
1547 nodes = []
1548 arg, roots = getarg()
1548 arg, roots = getarg()
1549 nodes = map(bin, roots.split(" "))
1549 nodes = map(bin, roots.split(" "))
1550
1550
1551 cg = repo.changegroup(nodes)
1551 cg = repo.changegroup(nodes)
1552 while 1:
1552 while 1:
1553 d = cg.read(4096)
1553 d = cg.read(4096)
1554 if not d:
1554 if not d:
1555 break
1555 break
1556 fout.write(d)
1556 fout.write(d)
1557
1557
1558 fout.flush()
1558 fout.flush()
1559
1559
1560 elif cmd == "addchangegroup":
1560 elif cmd == "addchangegroup":
1561 if not lock:
1561 if not lock:
1562 respond("not locked")
1562 respond("not locked")
1563 continue
1563 continue
1564 respond("")
1564 respond("")
1565
1565
1566 r = repo.addchangegroup(fin)
1566 r = repo.addchangegroup(fin)
1567 respond("")
1567 respond("")
1568
1568
1569 optlist = "name templates style address port ipv6 accesslog errorlog"
1569 optlist = "name templates style address port ipv6 accesslog errorlog"
1570 for o in optlist.split():
1570 for o in optlist.split():
1571 if opts[o]:
1571 if opts[o]:
1572 ui.setconfig("web", o, opts[o])
1572 ui.setconfig("web", o, opts[o])
1573
1573
1574 try:
1574 try:
1575 httpd = hgweb.create_server(repo)
1575 httpd = hgweb.create_server(repo)
1576 except socket.error, inst:
1576 except socket.error, inst:
1577 raise util.Abort('cannot start server: ' + inst.args[1])
1577 raise util.Abort('cannot start server: ' + inst.args[1])
1578
1578
1579 if ui.verbose:
1579 if ui.verbose:
1580 addr, port = httpd.socket.getsockname()
1580 addr, port = httpd.socket.getsockname()
1581 if addr == '0.0.0.0':
1581 if addr == '0.0.0.0':
1582 addr = socket.gethostname()
1582 addr = socket.gethostname()
1583 else:
1583 else:
1584 try:
1584 try:
1585 addr = socket.gethostbyaddr(addr)[0]
1585 addr = socket.gethostbyaddr(addr)[0]
1586 except socket.error:
1586 except socket.error:
1587 pass
1587 pass
1588 if port != 80:
1588 if port != 80:
1589 ui.status('listening at http://%s:%d/\n' % (addr, port))
1589 ui.status('listening at http://%s:%d/\n' % (addr, port))
1590 else:
1590 else:
1591 ui.status('listening at http://%s/\n' % addr)
1591 ui.status('listening at http://%s/\n' % addr)
1592 httpd.serve_forever()
1592 httpd.serve_forever()
1593
1593
1594 def status(ui, repo, *pats, **opts):
1594 def status(ui, repo, *pats, **opts):
1595 '''show changed files in the working directory
1595 '''show changed files in the working directory
1596
1596
1597 M = modified
1597 M = modified
1598 A = added
1598 A = added
1599 R = removed
1599 R = removed
1600 ? = not tracked
1600 ? = not tracked
1601 '''
1601 '''
1602
1602
1603 cwd = repo.getcwd()
1603 cwd = repo.getcwd()
1604 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1604 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1605 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1605 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1606 for n in repo.changes(files=files, match=matchfn)]
1606 for n in repo.changes(files=files, match=matchfn)]
1607
1607
1608 changetypes = [('modified', 'M', c),
1608 changetypes = [('modified', 'M', c),
1609 ('added', 'A', a),
1609 ('added', 'A', a),
1610 ('removed', 'R', d),
1610 ('removed', 'R', d),
1611 ('unknown', '?', u)]
1611 ('unknown', '?', u)]
1612
1612
1613 end = opts['print0'] and '\0' or '\n'
1613 end = opts['print0'] and '\0' or '\n'
1614
1614
1615 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1615 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1616 or changetypes):
1616 or changetypes):
1617 if opts['no_status']:
1617 if opts['no_status']:
1618 format = "%%s%s" % end
1618 format = "%%s%s" % end
1619 else:
1619 else:
1620 format = "%s %%s%s" % (char, end);
1620 format = "%s %%s%s" % (char, end);
1621
1621
1622 for f in changes:
1622 for f in changes:
1623 ui.write(format % f)
1623 ui.write(format % f)
1624
1624
1625 def tag(ui, repo, name, rev=None, **opts):
1625 def tag(ui, repo, name, rev=None, **opts):
1626 """add a tag for the current tip or a given revision"""
1626 """add a tag for the current tip or a given revision"""
1627 if opts['text']:
1627 if opts['text']:
1628 ui.warn("Warning: -t and --text is deprecated,"
1628 ui.warn("Warning: -t and --text is deprecated,"
1629 " please use -m or --message instead.\n")
1629 " please use -m or --message instead.\n")
1630 if name == "tip":
1630 if name == "tip":
1631 raise util.Abort("the name 'tip' is reserved")
1631 raise util.Abort("the name 'tip' is reserved")
1632 if rev:
1632 if rev:
1633 r = hex(repo.lookup(rev))
1633 r = hex(repo.lookup(rev))
1634 else:
1634 else:
1635 r = hex(repo.changelog.tip())
1635 r = hex(repo.changelog.tip())
1636
1636
1637 if name.find(revrangesep) >= 0:
1637 if name.find(revrangesep) >= 0:
1638 raise util.Abort("'%s' cannot be used in a tag name" % revrangesep)
1638 raise util.Abort("'%s' cannot be used in a tag name" % revrangesep)
1639
1639
1640 if opts['local']:
1640 if opts['local']:
1641 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
1641 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
1642 return
1642 return
1643
1643
1644 (c, a, d, u) = repo.changes()
1644 (c, a, d, u) = repo.changes()
1645 for x in (c, a, d, u):
1645 for x in (c, a, d, u):
1646 if ".hgtags" in x:
1646 if ".hgtags" in x:
1647 raise util.Abort("working copy of .hgtags is changed "
1647 raise util.Abort("working copy of .hgtags is changed "
1648 "(please commit .hgtags manually)")
1648 "(please commit .hgtags manually)")
1649
1649
1650 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
1650 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
1651 if repo.dirstate.state(".hgtags") == '?':
1651 if repo.dirstate.state(".hgtags") == '?':
1652 repo.add([".hgtags"])
1652 repo.add([".hgtags"])
1653
1653
1654 message = (opts['message'] or opts['text'] or
1654 message = (opts['message'] or opts['text'] or
1655 "Added tag %s for changeset %s" % (name, r))
1655 "Added tag %s for changeset %s" % (name, r))
1656 try:
1656 try:
1657 repo.commit([".hgtags"], message, opts['user'], opts['date'])
1657 repo.commit([".hgtags"], message, opts['user'], opts['date'])
1658 except ValueError, inst:
1658 except ValueError, inst:
1659 raise util.Abort(str(inst))
1659 raise util.Abort(str(inst))
1660
1660
1661 def tags(ui, repo):
1661 def tags(ui, repo):
1662 """list repository tags"""
1662 """list repository tags"""
1663
1663
1664 l = repo.tagslist()
1664 l = repo.tagslist()
1665 l.reverse()
1665 l.reverse()
1666 for t, n in l:
1666 for t, n in l:
1667 try:
1667 try:
1668 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
1668 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
1669 except KeyError:
1669 except KeyError:
1670 r = " ?:?"
1670 r = " ?:?"
1671 ui.write("%-30s %s\n" % (t, r))
1671 ui.write("%-30s %s\n" % (t, r))
1672
1672
1673 def tip(ui, repo):
1673 def tip(ui, repo):
1674 """show the tip revision"""
1674 """show the tip revision"""
1675 n = repo.changelog.tip()
1675 n = repo.changelog.tip()
1676 show_changeset(ui, repo, changenode=n)
1676 show_changeset(ui, repo, changenode=n)
1677
1677
1678 def unbundle(ui, repo, fname):
1678 def unbundle(ui, repo, fname):
1679 """apply a changegroup file"""
1679 """apply a changegroup file"""
1680 f = urllib.urlopen(fname)
1680 f = urllib.urlopen(fname)
1681
1681
1682 if f.read(4) != "HG10":
1682 if f.read(4) != "HG10":
1683 raise util.Abort("%s: not a Mercurial bundle file" % fname)
1683 raise util.Abort("%s: not a Mercurial bundle file" % fname)
1684
1684
1685 def bzgenerator(f):
1685 def bzgenerator(f):
1686 zd = bz2.BZ2Decompressor()
1686 zd = bz2.BZ2Decompressor()
1687 for chunk in f:
1687 for chunk in f:
1688 yield zd.decompress(chunk)
1688 yield zd.decompress(chunk)
1689 yield zd.flush()
1689 yield zd.flush()
1690
1690
1691 bzgen = bzgenerator(util.filechunkiter(f, 4096))
1691 bzgen = bzgenerator(util.filechunkiter(f, 4096))
1692 repo.addchangegroup(util.chunkbuffer(bzgen))
1692 repo.addchangegroup(util.chunkbuffer(bzgen))
1693
1693
1694 def undo(ui, repo):
1694 def undo(ui, repo):
1695 """undo the last commit or pull
1695 """undo the last commit or pull
1696
1696
1697 Roll back the last pull or commit transaction on the
1697 Roll back the last pull or commit transaction on the
1698 repository, restoring the project to its earlier state.
1698 repository, restoring the project to its earlier state.
1699
1699
1700 This command should be used with care. There is only one level of
1700 This command should be used with care. There is only one level of
1701 undo and there is no redo.
1701 undo and there is no redo.
1702
1702
1703 This command is not intended for use on public repositories. Once
1703 This command is not intended for use on public repositories. Once
1704 a change is visible for pull by other users, undoing it locally is
1704 a change is visible for pull by other users, undoing it locally is
1705 ineffective.
1705 ineffective.
1706 """
1706 """
1707 repo.undo()
1707 repo.undo()
1708
1708
1709 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
1709 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
1710 '''update or merge working directory
1710 '''update or merge working directory
1711
1711
1712 If there are no outstanding changes in the working directory and
1712 If there are no outstanding changes in the working directory and
1713 there is a linear relationship between the current version and the
1713 there is a linear relationship between the current version and the
1714 requested version, the result is the requested version.
1714 requested version, the result is the requested version.
1715
1715
1716 Otherwise the result is a merge between the contents of the
1716 Otherwise the result is a merge between the contents of the
1717 current working directory and the requested version. Files that
1717 current working directory and the requested version. Files that
1718 changed between either parent are marked as changed for the next
1718 changed between either parent are marked as changed for the next
1719 commit and a commit must be performed before any further updates
1719 commit and a commit must be performed before any further updates
1720 are allowed.
1720 are allowed.
1721 '''
1721 '''
1722 if branch:
1722 if branch:
1723 br = repo.branchlookup(branch=branch)
1723 br = repo.branchlookup(branch=branch)
1724 found = []
1724 found = []
1725 for x in br:
1725 for x in br:
1726 if branch in br[x]:
1726 if branch in br[x]:
1727 found.append(x)
1727 found.append(x)
1728 if len(found) > 1:
1728 if len(found) > 1:
1729 ui.warn("Found multiple heads for %s\n" % branch)
1729 ui.warn("Found multiple heads for %s\n" % branch)
1730 for x in found:
1730 for x in found:
1731 show_changeset(ui, repo, changenode=x, brinfo=br)
1731 show_changeset(ui, repo, changenode=x, brinfo=br)
1732 return 1
1732 return 1
1733 if len(found) == 1:
1733 if len(found) == 1:
1734 node = found[0]
1734 node = found[0]
1735 ui.warn("Using head %s for branch %s\n" % (short(node), branch))
1735 ui.warn("Using head %s for branch %s\n" % (short(node), branch))
1736 else:
1736 else:
1737 ui.warn("branch %s not found\n" % (branch))
1737 ui.warn("branch %s not found\n" % (branch))
1738 return 1
1738 return 1
1739 else:
1739 else:
1740 node = node and repo.lookup(node) or repo.changelog.tip()
1740 node = node and repo.lookup(node) or repo.changelog.tip()
1741 return repo.update(node, allow=merge, force=clean)
1741 return repo.update(node, allow=merge, force=clean)
1742
1742
1743 def verify(ui, repo):
1743 def verify(ui, repo):
1744 """verify the integrity of the repository"""
1744 """verify the integrity of the repository"""
1745 return repo.verify()
1745 return repo.verify()
1746
1746
1747 # Command options and aliases are listed here, alphabetically
1747 # Command options and aliases are listed here, alphabetically
1748
1748
1749 table = {
1749 table = {
1750 "^add":
1750 "^add":
1751 (add,
1751 (add,
1752 [('I', 'include', [], 'include path in search'),
1752 [('I', 'include', [], 'include path in search'),
1753 ('X', 'exclude', [], 'exclude path from search')],
1753 ('X', 'exclude', [], 'exclude path from search')],
1754 "hg add [OPTION]... [FILE]..."),
1754 "hg add [OPTION]... [FILE]..."),
1755 "addremove":
1755 "addremove":
1756 (addremove,
1756 (addremove,
1757 [('I', 'include', [], 'include path in search'),
1757 [('I', 'include', [], 'include path in search'),
1758 ('X', 'exclude', [], 'exclude path from search')],
1758 ('X', 'exclude', [], 'exclude path from search')],
1759 "hg addremove [OPTION]... [FILE]..."),
1759 "hg addremove [OPTION]... [FILE]..."),
1760 "^annotate":
1760 "^annotate":
1761 (annotate,
1761 (annotate,
1762 [('r', 'rev', '', 'revision'),
1762 [('r', 'rev', '', 'revision'),
1763 ('a', 'text', None, 'treat all files as text'),
1763 ('a', 'text', None, 'treat all files as text'),
1764 ('u', 'user', None, 'show user'),
1764 ('u', 'user', None, 'show user'),
1765 ('n', 'number', None, 'show revision number'),
1765 ('n', 'number', None, 'show revision number'),
1766 ('c', 'changeset', None, 'show changeset'),
1766 ('c', 'changeset', None, 'show changeset'),
1767 ('I', 'include', [], 'include path in search'),
1767 ('I', 'include', [], 'include path in search'),
1768 ('X', 'exclude', [], 'exclude path from search')],
1768 ('X', 'exclude', [], 'exclude path from search')],
1769 'hg annotate [OPTION]... FILE...'),
1769 'hg annotate [OPTION]... FILE...'),
1770 "bundle":
1770 "bundle":
1771 (bundle,
1771 (bundle,
1772 [],
1772 [],
1773 'hg bundle FILE DEST'),
1773 'hg bundle FILE DEST'),
1774 "cat":
1774 "cat":
1775 (cat,
1775 (cat,
1776 [('I', 'include', [], 'include path in search'),
1776 [('I', 'include', [], 'include path in search'),
1777 ('X', 'exclude', [], 'exclude path from search'),
1777 ('X', 'exclude', [], 'exclude path from search'),
1778 ('o', 'output', "", 'output to file'),
1778 ('o', 'output', "", 'output to file'),
1779 ('r', 'rev', '', 'revision')],
1779 ('r', 'rev', '', 'revision')],
1780 'hg cat [OPTION]... FILE...'),
1780 'hg cat [OPTION]... FILE...'),
1781 "^clone":
1781 "^clone":
1782 (clone,
1782 (clone,
1783 [('U', 'noupdate', None, 'skip update after cloning'),
1783 [('U', 'noupdate', None, 'skip update after cloning'),
1784 ('e', 'ssh', "", 'ssh command'),
1784 ('e', 'ssh', "", 'ssh command'),
1785 ('', 'pull', None, 'use pull protocol to copy metadata'),
1785 ('', 'pull', None, 'use pull protocol to copy metadata'),
1786 ('', 'remotecmd', "", 'remote hg command')],
1786 ('', 'remotecmd', "", 'remote hg command')],
1787 'hg clone [OPTION]... SOURCE [DEST]'),
1787 'hg clone [OPTION]... SOURCE [DEST]'),
1788 "^commit|ci":
1788 "^commit|ci":
1789 (commit,
1789 (commit,
1790 [('A', 'addremove', None, 'run add/remove during commit'),
1790 [('A', 'addremove', None, 'run add/remove during commit'),
1791 ('I', 'include', [], 'include path in search'),
1791 ('I', 'include', [], 'include path in search'),
1792 ('X', 'exclude', [], 'exclude path from search'),
1792 ('X', 'exclude', [], 'exclude path from search'),
1793 ('m', 'message', "", 'commit message'),
1793 ('m', 'message', "", 'commit message'),
1794 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1794 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1795 ('l', 'logfile', "", 'commit message file'),
1795 ('l', 'logfile', "", 'commit message file'),
1796 ('d', 'date', "", 'date code'),
1796 ('d', 'date', "", 'date code'),
1797 ('u', 'user', "", 'user')],
1797 ('u', 'user', "", 'user')],
1798 'hg commit [OPTION]... [FILE]...'),
1798 'hg commit [OPTION]... [FILE]...'),
1799 "copy|cp": (copy,
1799 "copy|cp": (copy,
1800 [('I', 'include', [], 'include path in search'),
1800 [('I', 'include', [], 'include path in search'),
1801 ('X', 'exclude', [], 'exclude path from search'),
1801 ('X', 'exclude', [], 'exclude path from search'),
1802 ('A', 'after', None, 'record a copy after it has happened'),
1802 ('A', 'after', None, 'record a copy after it has happened'),
1803 ('f', 'force', None, 'replace destination if it exists'),
1803 ('f', 'force', None, 'replace destination if it exists'),
1804 ('p', 'parents', None, 'append source path to dest')],
1804 ('p', 'parents', None, 'append source path to dest')],
1805 'hg copy [OPTION]... [SOURCE]... DEST'),
1805 'hg copy [OPTION]... [SOURCE]... DEST'),
1806 "debugancestor": (debugancestor, [], 'debugancestor INDEX REV1 REV2'),
1806 "debugancestor": (debugancestor, [], 'debugancestor INDEX REV1 REV2'),
1807 "debugcheckstate": (debugcheckstate, [], 'debugcheckstate'),
1807 "debugcheckstate": (debugcheckstate, [], 'debugcheckstate'),
1808 "debugconfig": (debugconfig, [], 'debugconfig'),
1808 "debugconfig": (debugconfig, [], 'debugconfig'),
1809 "debugstate": (debugstate, [], 'debugstate'),
1809 "debugstate": (debugstate, [], 'debugstate'),
1810 "debugdata": (debugdata, [], 'debugdata FILE REV'),
1810 "debugdata": (debugdata, [], 'debugdata FILE REV'),
1811 "debugindex": (debugindex, [], 'debugindex FILE'),
1811 "debugindex": (debugindex, [], 'debugindex FILE'),
1812 "debugindexdot": (debugindexdot, [], 'debugindexdot FILE'),
1812 "debugindexdot": (debugindexdot, [], 'debugindexdot FILE'),
1813 "debugrename": (debugrename, [], 'debugrename FILE [REV]'),
1813 "debugrename": (debugrename, [], 'debugrename FILE [REV]'),
1814 "debugwalk":
1814 "debugwalk":
1815 (debugwalk,
1815 (debugwalk,
1816 [('I', 'include', [], 'include path in search'),
1816 [('I', 'include', [], 'include path in search'),
1817 ('X', 'exclude', [], 'exclude path from search')],
1817 ('X', 'exclude', [], 'exclude path from search')],
1818 'debugwalk [OPTION]... [FILE]...'),
1818 'debugwalk [OPTION]... [FILE]...'),
1819 "^diff":
1819 "^diff":
1820 (diff,
1820 (diff,
1821 [('r', 'rev', [], 'revision'),
1821 [('r', 'rev', [], 'revision'),
1822 ('a', 'text', None, 'treat all files as text'),
1822 ('a', 'text', None, 'treat all files as text'),
1823 ('I', 'include', [], 'include path in search'),
1823 ('I', 'include', [], 'include path in search'),
1824 ('X', 'exclude', [], 'exclude path from search')],
1824 ('X', 'exclude', [], 'exclude path from search')],
1825 'hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...'),
1825 'hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...'),
1826 "^export":
1826 "^export":
1827 (export,
1827 (export,
1828 [('o', 'output', "", 'output to file'),
1828 [('o', 'output', "", 'output to file'),
1829 ('a', 'text', None, 'treat all files as text')],
1829 ('a', 'text', None, 'treat all files as text')],
1830 "hg export [-a] [-o OUTFILE] REV..."),
1830 "hg export [-a] [-o OUTFILE] REV..."),
1831 "forget":
1831 "forget":
1832 (forget,
1832 (forget,
1833 [('I', 'include', [], 'include path in search'),
1833 [('I', 'include', [], 'include path in search'),
1834 ('X', 'exclude', [], 'exclude path from search')],
1834 ('X', 'exclude', [], 'exclude path from search')],
1835 "hg forget [OPTION]... FILE..."),
1835 "hg forget [OPTION]... FILE..."),
1836 "grep":
1836 "grep":
1837 (grep,
1837 (grep,
1838 [('0', 'print0', None, 'end fields with NUL'),
1838 [('0', 'print0', None, 'end fields with NUL'),
1839 ('I', 'include', [], 'include path in search'),
1839 ('I', 'include', [], 'include path in search'),
1840 ('X', 'exclude', [], 'include path in search'),
1840 ('X', 'exclude', [], 'include path in search'),
1841 ('', 'all', None, 'print all revisions with matches'),
1841 ('', 'all', None, 'print all revisions with matches'),
1842 ('i', 'ignore-case', None, 'ignore case when matching'),
1842 ('i', 'ignore-case', None, 'ignore case when matching'),
1843 ('l', 'files-with-matches', None, 'print names of files and revs with matches'),
1843 ('l', 'files-with-matches', None, 'print names of files and revs with matches'),
1844 ('n', 'line-number', None, 'print line numbers'),
1844 ('n', 'line-number', None, 'print line numbers'),
1845 ('r', 'rev', [], 'search in revision rev'),
1845 ('r', 'rev', [], 'search in revision rev'),
1846 ('u', 'user', None, 'print user who made change')],
1846 ('u', 'user', None, 'print user who made change')],
1847 "hg grep [OPTION]... PATTERN [FILE]..."),
1847 "hg grep [OPTION]... PATTERN [FILE]..."),
1848 "heads":
1848 "heads":
1849 (heads,
1849 (heads,
1850 [('b', 'branches', None, 'find branch info')],
1850 [('b', 'branches', None, 'find branch info')],
1851 'hg heads [-b]'),
1851 'hg heads [-b]'),
1852 "help": (help_, [], 'hg help [COMMAND]'),
1852 "help": (help_, [], 'hg help [COMMAND]'),
1853 "identify|id": (identify, [], 'hg identify'),
1853 "identify|id": (identify, [], 'hg identify'),
1854 "import|patch":
1854 "import|patch":
1855 (import_,
1855 (import_,
1856 [('p', 'strip', 1, 'path strip'),
1856 [('p', 'strip', 1, 'path strip'),
1857 ('f', 'force', None, 'skip check for outstanding changes'),
1857 ('f', 'force', None, 'skip check for outstanding changes'),
1858 ('b', 'base', "", 'base path')],
1858 ('b', 'base', "", 'base path')],
1859 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
1859 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
1860 "incoming|in": (incoming,
1860 "incoming|in": (incoming,
1861 [('p', 'patch', None, 'show patch')],
1861 [('p', 'patch', None, 'show patch')],
1862 'hg incoming [-p] [SOURCE]'),
1862 'hg incoming [-p] [SOURCE]'),
1863 "^init": (init, [], 'hg init [DEST]'),
1863 "^init": (init, [], 'hg init [DEST]'),
1864 "locate":
1864 "locate":
1865 (locate,
1865 (locate,
1866 [('r', 'rev', '', 'revision'),
1866 [('r', 'rev', '', 'revision'),
1867 ('0', 'print0', None, 'end filenames with NUL'),
1867 ('0', 'print0', None, 'end filenames with NUL'),
1868 ('f', 'fullpath', None, 'print complete paths'),
1868 ('f', 'fullpath', None, 'print complete paths'),
1869 ('I', 'include', [], 'include path in search'),
1869 ('I', 'include', [], 'include path in search'),
1870 ('X', 'exclude', [], 'exclude path from search')],
1870 ('X', 'exclude', [], 'exclude path from search')],
1871 'hg locate [OPTION]... [PATTERN]...'),
1871 'hg locate [OPTION]... [PATTERN]...'),
1872 "^log|history":
1872 "^log|history":
1873 (log,
1873 (log,
1874 [('I', 'include', [], 'include path in search'),
1874 [('I', 'include', [], 'include path in search'),
1875 ('X', 'exclude', [], 'exclude path from search'),
1875 ('X', 'exclude', [], 'exclude path from search'),
1876 ('b', 'branch', None, 'show branches'),
1876 ('b', 'branch', None, 'show branches'),
1877 ('k', 'keyword', [], 'search for a keyword'),
1877 ('k', 'keyword', [], 'search for a keyword'),
1878 ('r', 'rev', [], 'revision'),
1878 ('r', 'rev', [], 'revision'),
1879 ('p', 'patch', None, 'show patch')],
1879 ('p', 'patch', None, 'show patch')],
1880 'hg log [-I] [-X] [-r REV]... [-p] [FILE]'),
1880 'hg log [-I] [-X] [-r REV]... [-p] [FILE]'),
1881 "manifest": (manifest, [], 'hg manifest [REV]'),
1881 "manifest": (manifest, [], 'hg manifest [REV]'),
1882 "outgoing|out": (outgoing,
1882 "outgoing|out": (outgoing,
1883 [('p', 'patch', None, 'show patch')],
1883 [('p', 'patch', None, 'show patch')],
1884 'hg outgoing [-p] [DEST]'),
1884 'hg outgoing [-p] [DEST]'),
1885 "parents": (parents, [], 'hg parents [REV]'),
1885 "parents": (parents, [], 'hg parents [REV]'),
1886 "paths": (paths, [], 'hg paths [NAME]'),
1886 "paths": (paths, [], 'hg paths [NAME]'),
1887 "^pull":
1887 "^pull":
1888 (pull,
1888 (pull,
1889 [('u', 'update', None, 'update working directory'),
1889 [('u', 'update', None, 'update working directory'),
1890 ('e', 'ssh', "", 'ssh command'),
1890 ('e', 'ssh', "", 'ssh command'),
1891 ('', 'remotecmd', "", 'remote hg command')],
1891 ('', 'remotecmd', "", 'remote hg command')],
1892 'hg pull [-u] [-e FILE] [--remotecmd FILE] [SOURCE]'),
1892 'hg pull [-u] [-e FILE] [--remotecmd FILE] [SOURCE]'),
1893 "^push":
1893 "^push":
1894 (push,
1894 (push,
1895 [('f', 'force', None, 'force push'),
1895 [('f', 'force', None, 'force push'),
1896 ('e', 'ssh', "", 'ssh command'),
1896 ('e', 'ssh', "", 'ssh command'),
1897 ('', 'remotecmd', "", 'remote hg command')],
1897 ('', 'remotecmd', "", 'remote hg command')],
1898 'hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]'),
1898 'hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]'),
1899 "rawcommit":
1899 "rawcommit":
1900 (rawcommit,
1900 (rawcommit,
1901 [('p', 'parent', [], 'parent'),
1901 [('p', 'parent', [], 'parent'),
1902 ('d', 'date', "", 'date code'),
1902 ('d', 'date', "", 'date code'),
1903 ('u', 'user', "", 'user'),
1903 ('u', 'user', "", 'user'),
1904 ('F', 'files', "", 'file list'),
1904 ('F', 'files', "", 'file list'),
1905 ('m', 'message', "", 'commit message'),
1905 ('m', 'message', "", 'commit message'),
1906 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1906 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1907 ('l', 'logfile', "", 'commit message file')],
1907 ('l', 'logfile', "", 'commit message file')],
1908 'hg rawcommit [OPTION]... [FILE]...'),
1908 'hg rawcommit [OPTION]... [FILE]...'),
1909 "recover": (recover, [], "hg recover"),
1909 "recover": (recover, [], "hg recover"),
1910 "^remove|rm": (remove,
1910 "^remove|rm": (remove,
1911 [('I', 'include', [], 'include path in search'),
1911 [('I', 'include', [], 'include path in search'),
1912 ('X', 'exclude', [], 'exclude path from search')],
1912 ('X', 'exclude', [], 'exclude path from search')],
1913 "hg remove [OPTION]... FILE..."),
1913 "hg remove [OPTION]... FILE..."),
1914 "rename|mv": (rename,
1914 "rename|mv": (rename,
1915 [('I', 'include', [], 'include path in search'),
1915 [('I', 'include', [], 'include path in search'),
1916 ('X', 'exclude', [], 'exclude path from search'),
1916 ('X', 'exclude', [], 'exclude path from search'),
1917 ('A', 'after', None, 'record a copy after it has happened'),
1917 ('A', 'after', None, 'record a copy after it has happened'),
1918 ('f', 'force', None, 'replace destination if it exists'),
1918 ('f', 'force', None, 'replace destination if it exists'),
1919 ('p', 'parents', None, 'append source path to dest')],
1919 ('p', 'parents', None, 'append source path to dest')],
1920 'hg rename [OPTION]... [SOURCE]... DEST'),
1920 'hg rename [OPTION]... [SOURCE]... DEST'),
1921 "^revert":
1921 "^revert":
1922 (revert,
1922 (revert,
1923 [("n", "nonrecursive", None, "don't recurse into subdirs"),
1923 [("n", "nonrecursive", None, "don't recurse into subdirs"),
1924 ("r", "rev", "", "revision")],
1924 ("r", "rev", "", "revision")],
1925 "hg revert [-n] [-r REV] [NAME]..."),
1925 "hg revert [-n] [-r REV] [NAME]..."),
1926 "root": (root, [], "hg root"),
1926 "root": (root, [], "hg root"),
1927 "^serve":
1927 "^serve":
1928 (serve,
1928 (serve,
1929 [('A', 'accesslog', '', 'access log file'),
1929 [('A', 'accesslog', '', 'access log file'),
1930 ('E', 'errorlog', '', 'error log file'),
1930 ('E', 'errorlog', '', 'error log file'),
1931 ('p', 'port', 0, 'listen port'),
1931 ('p', 'port', 0, 'listen port'),
1932 ('a', 'address', '', 'interface address'),
1932 ('a', 'address', '', 'interface address'),
1933 ('n', 'name', "", 'repository name'),
1933 ('n', 'name', "", 'repository name'),
1934 ('', 'stdio', None, 'for remote clients'),
1934 ('', 'stdio', None, 'for remote clients'),
1935 ('t', 'templates', "", 'template directory'),
1935 ('t', 'templates', "", 'template directory'),
1936 ('', 'style', "", 'template style'),
1936 ('', 'style', "", 'template style'),
1937 ('6', 'ipv6', None, 'use IPv6 in addition to IPv4')],
1937 ('6', 'ipv6', None, 'use IPv6 in addition to IPv4')],
1938 "hg serve [OPTION]..."),
1938 "hg serve [OPTION]..."),
1939 "^status":
1939 "^status":
1940 (status,
1940 (status,
1941 [('m', 'modified', None, 'show only modified files'),
1941 [('m', 'modified', None, 'show only modified files'),
1942 ('a', 'added', None, 'show only added files'),
1942 ('a', 'added', None, 'show only added files'),
1943 ('r', 'removed', None, 'show only removed files'),
1943 ('r', 'removed', None, 'show only removed files'),
1944 ('u', 'unknown', None, 'show only unknown (not tracked) files'),
1944 ('u', 'unknown', None, 'show only unknown (not tracked) files'),
1945 ('n', 'no-status', None, 'hide status prefix'),
1945 ('n', 'no-status', None, 'hide status prefix'),
1946 ('0', 'print0', None, 'end filenames with NUL'),
1946 ('0', 'print0', None, 'end filenames with NUL'),
1947 ('I', 'include', [], 'include path in search'),
1947 ('I', 'include', [], 'include path in search'),
1948 ('X', 'exclude', [], 'exclude path from search')],
1948 ('X', 'exclude', [], 'exclude path from search')],
1949 "hg status [OPTION]... [FILE]..."),
1949 "hg status [OPTION]... [FILE]..."),
1950 "tag":
1950 "tag":
1951 (tag,
1951 (tag,
1952 [('l', 'local', None, 'make the tag local'),
1952 [('l', 'local', None, 'make the tag local'),
1953 ('m', 'message', "", 'commit message'),
1953 ('m', 'message', "", 'commit message'),
1954 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1954 ('t', 'text', "", 'commit message (deprecated: use -m)'),
1955 ('d', 'date', "", 'date code'),
1955 ('d', 'date', "", 'date code'),
1956 ('u', 'user', "", 'user')],
1956 ('u', 'user', "", 'user')],
1957 'hg tag [OPTION]... NAME [REV]'),
1957 'hg tag [OPTION]... NAME [REV]'),
1958 "tags": (tags, [], 'hg tags'),
1958 "tags": (tags, [], 'hg tags'),
1959 "tip": (tip, [], 'hg tip'),
1959 "tip": (tip, [], 'hg tip'),
1960 "unbundle":
1960 "unbundle":
1961 (unbundle,
1961 (unbundle,
1962 [],
1962 [],
1963 'hg unbundle FILE'),
1963 'hg unbundle FILE'),
1964 "undo": (undo, [], 'hg undo'),
1964 "undo": (undo, [], 'hg undo'),
1965 "^update|up|checkout|co":
1965 "^update|up|checkout|co":
1966 (update,
1966 (update,
1967 [('b', 'branch', "", 'checkout the head of a specific branch'),
1967 [('b', 'branch', "", 'checkout the head of a specific branch'),
1968 ('m', 'merge', None, 'allow merging of conflicts'),
1968 ('m', 'merge', None, 'allow merging of conflicts'),
1969 ('C', 'clean', None, 'overwrite locally modified files')],
1969 ('C', 'clean', None, 'overwrite locally modified files')],
1970 'hg update [-b TAG] [-m] [-C] [REV]'),
1970 'hg update [-b TAG] [-m] [-C] [REV]'),
1971 "verify": (verify, [], 'hg verify'),
1971 "verify": (verify, [], 'hg verify'),
1972 "version": (show_version, [], 'hg version'),
1972 "version": (show_version, [], 'hg version'),
1973 }
1973 }
1974
1974
1975 globalopts = [
1975 globalopts = [
1976 ('R', 'repository', "", 'repository root directory'),
1976 ('R', 'repository', "", 'repository root directory'),
1977 ('', 'cwd', '', 'change working directory'),
1977 ('', 'cwd', '', 'change working directory'),
1978 ('y', 'noninteractive', None, 'run non-interactively'),
1978 ('y', 'noninteractive', None, 'run non-interactively'),
1979 ('q', 'quiet', None, 'quiet mode'),
1979 ('q', 'quiet', None, 'quiet mode'),
1980 ('v', 'verbose', None, 'verbose mode'),
1980 ('v', 'verbose', None, 'verbose mode'),
1981 ('', 'debug', None, 'debug mode'),
1981 ('', 'debug', None, 'debug mode'),
1982 ('', 'debugger', None, 'start debugger'),
1982 ('', 'debugger', None, 'start debugger'),
1983 ('', 'traceback', None, 'print traceback on exception'),
1983 ('', 'traceback', None, 'print traceback on exception'),
1984 ('', 'time', None, 'time how long the command takes'),
1984 ('', 'time', None, 'time how long the command takes'),
1985 ('', 'profile', None, 'profile'),
1985 ('', 'profile', None, 'profile'),
1986 ('', 'version', None, 'output version information and exit'),
1986 ('', 'version', None, 'output version information and exit'),
1987 ('h', 'help', None, 'display help and exit'),
1987 ('h', 'help', None, 'display help and exit'),
1988 ]
1988 ]
1989
1989
1990 norepo = ("clone init version help debugancestor debugconfig debugdata"
1990 norepo = ("clone init version help debugancestor debugconfig debugdata"
1991 " debugindex debugindexdot paths")
1991 " debugindex debugindexdot paths")
1992
1992
1993 def find(cmd):
1993 def find(cmd):
1994 for e in table.keys():
1994 for e in table.keys():
1995 if re.match("(%s)$" % e, cmd):
1995 if re.match("(%s)$" % e, cmd):
1996 return e, table[e]
1996 return e, table[e]
1997
1997
1998 raise UnknownCommand(cmd)
1998 raise UnknownCommand(cmd)
1999
1999
2000 class SignalInterrupt(Exception):
2000 class SignalInterrupt(Exception):
2001 """Exception raised on SIGTERM and SIGHUP."""
2001 """Exception raised on SIGTERM and SIGHUP."""
2002
2002
2003 def catchterm(*args):
2003 def catchterm(*args):
2004 raise SignalInterrupt
2004 raise SignalInterrupt
2005
2005
2006 def run():
2006 def run():
2007 sys.exit(dispatch(sys.argv[1:]))
2007 sys.exit(dispatch(sys.argv[1:]))
2008
2008
2009 class ParseError(Exception):
2009 class ParseError(Exception):
2010 """Exception raised on errors in parsing the command line."""
2010 """Exception raised on errors in parsing the command line."""
2011
2011
2012 def parse(args):
2012 def parse(args):
2013 options = {}
2013 options = {}
2014 cmdoptions = {}
2014 cmdoptions = {}
2015
2015
2016 try:
2016 try:
2017 args = fancyopts.fancyopts(args, globalopts, options)
2017 args = fancyopts.fancyopts(args, globalopts, options)
2018 except fancyopts.getopt.GetoptError, inst:
2018 except fancyopts.getopt.GetoptError, inst:
2019 raise ParseError(None, inst)
2019 raise ParseError(None, inst)
2020
2020
2021 if args:
2021 if args:
2022 cmd, args = args[0], args[1:]
2022 cmd, args = args[0], args[1:]
2023 i = find(cmd)[1]
2023 i = find(cmd)[1]
2024 c = list(i[1])
2024 c = list(i[1])
2025 else:
2025 else:
2026 cmd = None
2026 cmd = None
2027 c = []
2027 c = []
2028
2028
2029 # combine global options into local
2029 # combine global options into local
2030 for o in globalopts:
2030 for o in globalopts:
2031 c.append((o[0], o[1], options[o[1]], o[3]))
2031 c.append((o[0], o[1], options[o[1]], o[3]))
2032
2032
2033 try:
2033 try:
2034 args = fancyopts.fancyopts(args, c, cmdoptions)
2034 args = fancyopts.fancyopts(args, c, cmdoptions)
2035 except fancyopts.getopt.GetoptError, inst:
2035 except fancyopts.getopt.GetoptError, inst:
2036 raise ParseError(cmd, inst)
2036 raise ParseError(cmd, inst)
2037
2037
2038 # separate global options back out
2038 # separate global options back out
2039 for o in globalopts:
2039 for o in globalopts:
2040 n = o[1]
2040 n = o[1]
2041 options[n] = cmdoptions[n]
2041 options[n] = cmdoptions[n]
2042 del cmdoptions[n]
2042 del cmdoptions[n]
2043
2043
2044 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2044 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2045
2045
2046 def dispatch(args):
2046 def dispatch(args):
2047 signal.signal(signal.SIGTERM, catchterm)
2047 signal.signal(signal.SIGTERM, catchterm)
2048 try:
2048 try:
2049 signal.signal(signal.SIGHUP, catchterm)
2049 signal.signal(signal.SIGHUP, catchterm)
2050 except AttributeError:
2050 except AttributeError:
2051 pass
2051 pass
2052
2052
2053 u = ui.ui()
2053 u = ui.ui()
2054 external = []
2054 external = []
2055 for x in u.extensions():
2055 for x in u.extensions():
2056 if x[1]:
2056 if x[1]:
2057 try:
2057 try:
2058 mod = imp.load_source(x[0], x[1])
2058 mod = imp.load_source(x[0], x[1])
2059 except:
2059 except:
2060 u.warn("*** failed to import extension %s\n" % x[1])
2060 u.warn("*** failed to import extension %s\n" % x[1])
2061 continue
2061 continue
2062 else:
2062 else:
2063 def importh(name):
2063 def importh(name):
2064 mod = __import__(name)
2064 mod = __import__(name)
2065 components = name.split('.')
2065 components = name.split('.')
2066 for comp in components[1:]:
2066 for comp in components[1:]:
2067 mod = getattr(mod, comp)
2067 mod = getattr(mod, comp)
2068 return mod
2068 return mod
2069 try:
2069 try:
2070 mod = importh(x[0])
2070 mod = importh(x[0])
2071 except:
2071 except:
2072 u.warn("failed to import extension %s\n" % x[0])
2072 u.warn("failed to import extension %s\n" % x[0])
2073 continue
2073 continue
2074
2074
2075 external.append(mod)
2075 external.append(mod)
2076 for x in external:
2076 for x in external:
2077 cmdtable = getattr(x, 'cmdtable', {})
2077 cmdtable = getattr(x, 'cmdtable', {})
2078 for t in cmdtable:
2078 for t in cmdtable:
2079 if t in table:
2079 if t in table:
2080 u.warn("module %s overrides %s\n" % (x.__name__, t))
2080 u.warn("module %s overrides %s\n" % (x.__name__, t))
2081 table.update(cmdtable)
2081 table.update(cmdtable)
2082
2082
2083 try:
2083 try:
2084 cmd, func, args, options, cmdoptions = parse(args)
2084 cmd, func, args, options, cmdoptions = parse(args)
2085 except ParseError, inst:
2085 except ParseError, inst:
2086 if inst.args[0]:
2086 if inst.args[0]:
2087 u.warn("hg %s: %s\n" % (inst.args[0], inst.args[1]))
2087 u.warn("hg %s: %s\n" % (inst.args[0], inst.args[1]))
2088 help_(u, inst.args[0])
2088 help_(u, inst.args[0])
2089 else:
2089 else:
2090 u.warn("hg: %s\n" % inst.args[1])
2090 u.warn("hg: %s\n" % inst.args[1])
2091 help_(u, 'shortlist')
2091 help_(u, 'shortlist')
2092 sys.exit(-1)
2092 sys.exit(-1)
2093 except UnknownCommand, inst:
2093 except UnknownCommand, inst:
2094 u.warn("hg: unknown command '%s'\n" % inst.args[0])
2094 u.warn("hg: unknown command '%s'\n" % inst.args[0])
2095 help_(u, 'shortlist')
2095 help_(u, 'shortlist')
2096 sys.exit(1)
2096 sys.exit(1)
2097
2097
2098 if options["time"]:
2098 if options["time"]:
2099 def get_times():
2099 def get_times():
2100 t = os.times()
2100 t = os.times()
2101 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2101 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2102 t = (t[0], t[1], t[2], t[3], time.clock())
2102 t = (t[0], t[1], t[2], t[3], time.clock())
2103 return t
2103 return t
2104 s = get_times()
2104 s = get_times()
2105 def print_time():
2105 def print_time():
2106 t = get_times()
2106 t = get_times()
2107 u.warn("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n" %
2107 u.warn("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n" %
2108 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2108 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2109 atexit.register(print_time)
2109 atexit.register(print_time)
2110
2110
2111 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2111 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2112 not options["noninteractive"])
2112 not options["noninteractive"])
2113
2113
2114 # enter the debugger before command execution
2114 # enter the debugger before command execution
2115 if options['debugger']:
2115 if options['debugger']:
2116 pdb.set_trace()
2116 pdb.set_trace()
2117
2117
2118 try:
2118 try:
2119 try:
2119 try:
2120 if options['help']:
2120 if options['help']:
2121 help_(u, cmd, options['version'])
2121 help_(u, cmd, options['version'])
2122 sys.exit(0)
2122 sys.exit(0)
2123 elif options['version']:
2123 elif options['version']:
2124 show_version(u)
2124 show_version(u)
2125 sys.exit(0)
2125 sys.exit(0)
2126 elif not cmd:
2126 elif not cmd:
2127 help_(u, 'shortlist')
2127 help_(u, 'shortlist')
2128 sys.exit(0)
2128 sys.exit(0)
2129
2129
2130 if options['cwd']:
2130 if options['cwd']:
2131 try:
2131 try:
2132 os.chdir(options['cwd'])
2132 os.chdir(options['cwd'])
2133 except OSError, inst:
2133 except OSError, inst:
2134 raise util.Abort('%s: %s' %
2134 raise util.Abort('%s: %s' %
2135 (options['cwd'], inst.strerror))
2135 (options['cwd'], inst.strerror))
2136
2136
2137 if cmd not in norepo.split():
2137 if cmd not in norepo.split():
2138 path = options["repository"] or ""
2138 path = options["repository"] or ""
2139 repo = hg.repository(ui=u, path=path)
2139 repo = hg.repository(ui=u, path=path)
2140 for x in external:
2140 for x in external:
2141 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2141 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2142 d = lambda: func(u, repo, *args, **cmdoptions)
2142 d = lambda: func(u, repo, *args, **cmdoptions)
2143 else:
2143 else:
2144 d = lambda: func(u, *args, **cmdoptions)
2144 d = lambda: func(u, *args, **cmdoptions)
2145
2145
2146 if options['profile']:
2146 if options['profile']:
2147 import hotshot, hotshot.stats
2147 import hotshot, hotshot.stats
2148 prof = hotshot.Profile("hg.prof")
2148 prof = hotshot.Profile("hg.prof")
2149 r = prof.runcall(d)
2149 r = prof.runcall(d)
2150 prof.close()
2150 prof.close()
2151 stats = hotshot.stats.load("hg.prof")
2151 stats = hotshot.stats.load("hg.prof")
2152 stats.strip_dirs()
2152 stats.strip_dirs()
2153 stats.sort_stats('time', 'calls')
2153 stats.sort_stats('time', 'calls')
2154 stats.print_stats(40)
2154 stats.print_stats(40)
2155 return r
2155 return r
2156 else:
2156 else:
2157 return d()
2157 return d()
2158 except:
2158 except:
2159 # enter the debugger when we hit an exception
2159 # enter the debugger when we hit an exception
2160 if options['debugger']:
2160 if options['debugger']:
2161 pdb.post_mortem(sys.exc_info()[2])
2161 pdb.post_mortem(sys.exc_info()[2])
2162 if options['traceback']:
2162 if options['traceback']:
2163 traceback.print_exc()
2163 traceback.print_exc()
2164 raise
2164 raise
2165 except hg.RepoError, inst:
2165 except hg.RepoError, inst:
2166 u.warn("abort: ", inst, "!\n")
2166 u.warn("abort: ", inst, "!\n")
2167 except revlog.RevlogError, inst:
2167 except revlog.RevlogError, inst:
2168 u.warn("abort: ", inst, "!\n")
2168 u.warn("abort: ", inst, "!\n")
2169 except SignalInterrupt:
2169 except SignalInterrupt:
2170 u.warn("killed!\n")
2170 u.warn("killed!\n")
2171 except KeyboardInterrupt:
2171 except KeyboardInterrupt:
2172 try:
2172 try:
2173 u.warn("interrupted!\n")
2173 u.warn("interrupted!\n")
2174 except IOError, inst:
2174 except IOError, inst:
2175 if inst.errno == errno.EPIPE:
2175 if inst.errno == errno.EPIPE:
2176 if u.debugflag:
2176 if u.debugflag:
2177 u.warn("\nbroken pipe\n")
2177 u.warn("\nbroken pipe\n")
2178 else:
2178 else:
2179 raise
2179 raise
2180 except IOError, inst:
2180 except IOError, inst:
2181 if hasattr(inst, "code"):
2181 if hasattr(inst, "code"):
2182 u.warn("abort: %s\n" % inst)
2182 u.warn("abort: %s\n" % inst)
2183 elif hasattr(inst, "reason"):
2183 elif hasattr(inst, "reason"):
2184 u.warn("abort: error: %s\n" % inst.reason[1])
2184 u.warn("abort: error: %s\n" % inst.reason[1])
2185 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2185 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2186 if u.debugflag:
2186 if u.debugflag:
2187 u.warn("broken pipe\n")
2187 u.warn("broken pipe\n")
2188 elif getattr(inst, "strerror", None):
2188 elif getattr(inst, "strerror", None):
2189 if getattr(inst, "filename", None):
2189 if getattr(inst, "filename", None):
2190 u.warn("abort: %s - %s\n" % (inst.strerror, inst.filename))
2190 u.warn("abort: %s - %s\n" % (inst.strerror, inst.filename))
2191 else:
2191 else:
2192 u.warn("abort: %s\n" % inst.strerror)
2192 u.warn("abort: %s\n" % inst.strerror)
2193 else:
2193 else:
2194 raise
2194 raise
2195 except OSError, inst:
2195 except OSError, inst:
2196 if hasattr(inst, "filename"):
2196 if hasattr(inst, "filename"):
2197 u.warn("abort: %s: %s\n" % (inst.strerror, inst.filename))
2197 u.warn("abort: %s: %s\n" % (inst.strerror, inst.filename))
2198 else:
2198 else:
2199 u.warn("abort: %s\n" % inst.strerror)
2199 u.warn("abort: %s\n" % inst.strerror)
2200 except util.Abort, inst:
2200 except util.Abort, inst:
2201 u.warn('abort: ', inst.args[0] % inst.args[1:], '\n')
2201 u.warn('abort: ', inst.args[0] % inst.args[1:], '\n')
2202 sys.exit(1)
2202 sys.exit(1)
2203 except TypeError, inst:
2203 except TypeError, inst:
2204 # was this an argument error?
2204 # was this an argument error?
2205 tb = traceback.extract_tb(sys.exc_info()[2])
2205 tb = traceback.extract_tb(sys.exc_info()[2])
2206 if len(tb) > 2: # no
2206 if len(tb) > 2: # no
2207 raise
2207 raise
2208 u.debug(inst, "\n")
2208 u.debug(inst, "\n")
2209 u.warn("%s: invalid arguments\n" % cmd)
2209 u.warn("%s: invalid arguments\n" % cmd)
2210 help_(u, cmd)
2210 help_(u, cmd)
2211 except UnknownCommand, inst:
2211 except UnknownCommand, inst:
2212 u.warn("hg: unknown command '%s'\n" % inst.args[0])
2212 u.warn("hg: unknown command '%s'\n" % inst.args[0])
2213 help_(u, 'shortlist')
2213 help_(u, 'shortlist')
2214 except SystemExit:
2214 except SystemExit:
2215 # don't catch this in the catch-all below
2215 # don't catch this in the catch-all below
2216 raise
2216 raise
2217 except:
2217 except:
2218 u.warn("** unknown exception encountered, details follow\n")
2218 u.warn("** unknown exception encountered, details follow\n")
2219 u.warn("** report bug details to mercurial@selenic.com\n")
2219 u.warn("** report bug details to mercurial@selenic.com\n")
2220 raise
2220 raise
2221
2221
2222 sys.exit(-1)
2222 sys.exit(-1)
@@ -1,1446 +1,1420 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
12 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13
13
14 class localrepository:
14 class localrepository:
15 def __init__(self, ui, path=None, create=0):
15 def __init__(self, ui, path=None, create=0):
16 if not path:
16 if not path:
17 p = os.getcwd()
17 p = os.getcwd()
18 while not os.path.isdir(os.path.join(p, ".hg")):
18 while not os.path.isdir(os.path.join(p, ".hg")):
19 oldp = p
19 oldp = p
20 p = os.path.dirname(p)
20 p = os.path.dirname(p)
21 if p == oldp: raise repo.RepoError("no repo found")
21 if p == oldp: raise repo.RepoError("no repo found")
22 path = p
22 path = p
23 self.path = os.path.join(path, ".hg")
23 self.path = os.path.join(path, ".hg")
24
24
25 if not create and not os.path.isdir(self.path):
25 if not create and not os.path.isdir(self.path):
26 raise repo.RepoError("repository %s not found" % self.path)
26 raise repo.RepoError("repository %s not found" % self.path)
27
27
28 self.root = os.path.abspath(path)
28 self.root = os.path.abspath(path)
29 self.ui = ui
29 self.ui = ui
30 self.opener = util.opener(self.path)
30 self.opener = util.opener(self.path)
31 self.wopener = util.opener(self.root)
31 self.wopener = util.opener(self.root)
32 self.manifest = manifest.manifest(self.opener)
32 self.manifest = manifest.manifest(self.opener)
33 self.changelog = changelog.changelog(self.opener)
33 self.changelog = changelog.changelog(self.opener)
34 self.tagscache = None
34 self.tagscache = None
35 self.nodetagscache = None
35 self.nodetagscache = None
36 self.encodepats = None
36 self.encodepats = None
37 self.decodepats = None
37 self.decodepats = None
38
38
39 if create:
39 if create:
40 os.mkdir(self.path)
40 os.mkdir(self.path)
41 os.mkdir(self.join("data"))
41 os.mkdir(self.join("data"))
42
42
43 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
43 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
44 try:
44 try:
45 self.ui.readconfig(self.opener("hgrc"))
45 self.ui.readconfig(self.opener("hgrc"))
46 except IOError: pass
46 except IOError: pass
47
47
48 def hook(self, name, **args):
48 def hook(self, name, **args):
49 s = self.ui.config("hooks", name)
49 s = self.ui.config("hooks", name)
50 if s:
50 if s:
51 self.ui.note("running hook %s: %s\n" % (name, s))
51 self.ui.note("running hook %s: %s\n" % (name, s))
52 old = {}
52 old = {}
53 for k, v in args.items():
53 for k, v in args.items():
54 k = k.upper()
54 k = k.upper()
55 old[k] = os.environ.get(k, None)
55 old[k] = os.environ.get(k, None)
56 os.environ[k] = v
56 os.environ[k] = v
57
57
58 # Hooks run in the repository root
58 # Hooks run in the repository root
59 olddir = os.getcwd()
59 olddir = os.getcwd()
60 os.chdir(self.root)
60 os.chdir(self.root)
61 r = os.system(s)
61 r = os.system(s)
62 os.chdir(olddir)
62 os.chdir(olddir)
63
63
64 for k, v in old.items():
64 for k, v in old.items():
65 if v != None:
65 if v != None:
66 os.environ[k] = v
66 os.environ[k] = v
67 else:
67 else:
68 del os.environ[k]
68 del os.environ[k]
69
69
70 if r:
70 if r:
71 self.ui.warn("abort: %s hook failed with status %d!\n" %
71 self.ui.warn("abort: %s hook failed with status %d!\n" %
72 (name, r))
72 (name, r))
73 return False
73 return False
74 return True
74 return True
75
75
76 def tags(self):
76 def tags(self):
77 '''return a mapping of tag to node'''
77 '''return a mapping of tag to node'''
78 if not self.tagscache:
78 if not self.tagscache:
79 self.tagscache = {}
79 self.tagscache = {}
80 def addtag(self, k, n):
80 def addtag(self, k, n):
81 try:
81 try:
82 bin_n = bin(n)
82 bin_n = bin(n)
83 except TypeError:
83 except TypeError:
84 bin_n = ''
84 bin_n = ''
85 self.tagscache[k.strip()] = bin_n
85 self.tagscache[k.strip()] = bin_n
86
86
87 try:
87 try:
88 # read each head of the tags file, ending with the tip
88 # read each head of the tags file, ending with the tip
89 # and add each tag found to the map, with "newer" ones
89 # and add each tag found to the map, with "newer" ones
90 # taking precedence
90 # taking precedence
91 fl = self.file(".hgtags")
91 fl = self.file(".hgtags")
92 h = fl.heads()
92 h = fl.heads()
93 h.reverse()
93 h.reverse()
94 for r in h:
94 for r in h:
95 for l in fl.read(r).splitlines():
95 for l in fl.read(r).splitlines():
96 if l:
96 if l:
97 n, k = l.split(" ", 1)
97 n, k = l.split(" ", 1)
98 addtag(self, k, n)
98 addtag(self, k, n)
99 except KeyError:
99 except KeyError:
100 pass
100 pass
101
101
102 try:
102 try:
103 f = self.opener("localtags")
103 f = self.opener("localtags")
104 for l in f:
104 for l in f:
105 n, k = l.split(" ", 1)
105 n, k = l.split(" ", 1)
106 addtag(self, k, n)
106 addtag(self, k, n)
107 except IOError:
107 except IOError:
108 pass
108 pass
109
109
110 self.tagscache['tip'] = self.changelog.tip()
110 self.tagscache['tip'] = self.changelog.tip()
111
111
112 return self.tagscache
112 return self.tagscache
113
113
114 def tagslist(self):
114 def tagslist(self):
115 '''return a list of tags ordered by revision'''
115 '''return a list of tags ordered by revision'''
116 l = []
116 l = []
117 for t, n in self.tags().items():
117 for t, n in self.tags().items():
118 try:
118 try:
119 r = self.changelog.rev(n)
119 r = self.changelog.rev(n)
120 except:
120 except:
121 r = -2 # sort to the beginning of the list if unknown
121 r = -2 # sort to the beginning of the list if unknown
122 l.append((r,t,n))
122 l.append((r,t,n))
123 l.sort()
123 l.sort()
124 return [(t,n) for r,t,n in l]
124 return [(t,n) for r,t,n in l]
125
125
126 def nodetags(self, node):
126 def nodetags(self, node):
127 '''return the tags associated with a node'''
127 '''return the tags associated with a node'''
128 if not self.nodetagscache:
128 if not self.nodetagscache:
129 self.nodetagscache = {}
129 self.nodetagscache = {}
130 for t,n in self.tags().items():
130 for t,n in self.tags().items():
131 self.nodetagscache.setdefault(n,[]).append(t)
131 self.nodetagscache.setdefault(n,[]).append(t)
132 return self.nodetagscache.get(node, [])
132 return self.nodetagscache.get(node, [])
133
133
134 def lookup(self, key):
134 def lookup(self, key):
135 try:
135 try:
136 return self.tags()[key]
136 return self.tags()[key]
137 except KeyError:
137 except KeyError:
138 try:
138 try:
139 return self.changelog.lookup(key)
139 return self.changelog.lookup(key)
140 except:
140 except:
141 raise repo.RepoError("unknown revision '%s'" % key)
141 raise repo.RepoError("unknown revision '%s'" % key)
142
142
143 def dev(self):
143 def dev(self):
144 return os.stat(self.path).st_dev
144 return os.stat(self.path).st_dev
145
145
146 def local(self):
146 def local(self):
147 return True
147 return True
148
148
149 def join(self, f):
149 def join(self, f):
150 return os.path.join(self.path, f)
150 return os.path.join(self.path, f)
151
151
152 def wjoin(self, f):
152 def wjoin(self, f):
153 return os.path.join(self.root, f)
153 return os.path.join(self.root, f)
154
154
155 def file(self, f):
155 def file(self, f):
156 if f[0] == '/': f = f[1:]
156 if f[0] == '/': f = f[1:]
157 return filelog.filelog(self.opener, f)
157 return filelog.filelog(self.opener, f)
158
158
159 def getcwd(self):
159 def getcwd(self):
160 return self.dirstate.getcwd()
160 return self.dirstate.getcwd()
161
161
162 def wfile(self, f, mode='r'):
162 def wfile(self, f, mode='r'):
163 return self.wopener(f, mode)
163 return self.wopener(f, mode)
164
164
165 def wread(self, filename):
165 def wread(self, filename):
166 if self.encodepats == None:
166 if self.encodepats == None:
167 l = []
167 l = []
168 for pat, cmd in self.ui.configitems("encode"):
168 for pat, cmd in self.ui.configitems("encode"):
169 mf = util.matcher("", "/", [pat], [], [])[1]
169 mf = util.matcher("", "/", [pat], [], [])[1]
170 l.append((mf, cmd))
170 l.append((mf, cmd))
171 self.encodepats = l
171 self.encodepats = l
172
172
173 data = self.wopener(filename, 'r').read()
173 data = self.wopener(filename, 'r').read()
174
174
175 for mf, cmd in self.encodepats:
175 for mf, cmd in self.encodepats:
176 if mf(filename):
176 if mf(filename):
177 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
177 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
178 data = util.filter(data, cmd)
178 data = util.filter(data, cmd)
179 break
179 break
180
180
181 return data
181 return data
182
182
183 def wwrite(self, filename, data, fd=None):
183 def wwrite(self, filename, data, fd=None):
184 if self.decodepats == None:
184 if self.decodepats == None:
185 l = []
185 l = []
186 for pat, cmd in self.ui.configitems("decode"):
186 for pat, cmd in self.ui.configitems("decode"):
187 mf = util.matcher("", "/", [pat], [], [])[1]
187 mf = util.matcher("", "/", [pat], [], [])[1]
188 l.append((mf, cmd))
188 l.append((mf, cmd))
189 self.decodepats = l
189 self.decodepats = l
190
190
191 for mf, cmd in self.decodepats:
191 for mf, cmd in self.decodepats:
192 if mf(filename):
192 if mf(filename):
193 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
193 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
194 data = util.filter(data, cmd)
194 data = util.filter(data, cmd)
195 break
195 break
196
196
197 if fd:
197 if fd:
198 return fd.write(data)
198 return fd.write(data)
199 return self.wopener(filename, 'w').write(data)
199 return self.wopener(filename, 'w').write(data)
200
200
201 def transaction(self):
201 def transaction(self):
202 # save dirstate for undo
202 # save dirstate for undo
203 try:
203 try:
204 ds = self.opener("dirstate").read()
204 ds = self.opener("dirstate").read()
205 except IOError:
205 except IOError:
206 ds = ""
206 ds = ""
207 self.opener("journal.dirstate", "w").write(ds)
207 self.opener("journal.dirstate", "w").write(ds)
208
208
209 def after():
209 def after():
210 util.rename(self.join("journal"), self.join("undo"))
210 util.rename(self.join("journal"), self.join("undo"))
211 util.rename(self.join("journal.dirstate"),
211 util.rename(self.join("journal.dirstate"),
212 self.join("undo.dirstate"))
212 self.join("undo.dirstate"))
213
213
214 return transaction.transaction(self.ui.warn, self.opener,
214 return transaction.transaction(self.ui.warn, self.opener,
215 self.join("journal"), after)
215 self.join("journal"), after)
216
216
217 def recover(self):
217 def recover(self):
218 lock = self.lock()
218 lock = self.lock()
219 if os.path.exists(self.join("journal")):
219 if os.path.exists(self.join("journal")):
220 self.ui.status("rolling back interrupted transaction\n")
220 self.ui.status("rolling back interrupted transaction\n")
221 return transaction.rollback(self.opener, self.join("journal"))
221 return transaction.rollback(self.opener, self.join("journal"))
222 else:
222 else:
223 self.ui.warn("no interrupted transaction available\n")
223 self.ui.warn("no interrupted transaction available\n")
224
224
225 def undo(self):
225 def undo(self):
226 lock = self.lock()
226 lock = self.lock()
227 if os.path.exists(self.join("undo")):
227 if os.path.exists(self.join("undo")):
228 self.ui.status("rolling back last transaction\n")
228 self.ui.status("rolling back last transaction\n")
229 transaction.rollback(self.opener, self.join("undo"))
229 transaction.rollback(self.opener, self.join("undo"))
230 self.dirstate = None
230 self.dirstate = None
231 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
231 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
232 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
232 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
233 else:
233 else:
234 self.ui.warn("no undo information available\n")
234 self.ui.warn("no undo information available\n")
235
235
236 def lock(self, wait=1):
236 def lock(self, wait=1):
237 try:
237 try:
238 return lock.lock(self.join("lock"), 0)
238 return lock.lock(self.join("lock"), 0)
239 except lock.LockHeld, inst:
239 except lock.LockHeld, inst:
240 if wait:
240 if wait:
241 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
241 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
242 return lock.lock(self.join("lock"), wait)
242 return lock.lock(self.join("lock"), wait)
243 raise inst
243 raise inst
244
244
245 def rawcommit(self, files, text, user, date, p1=None, p2=None):
245 def rawcommit(self, files, text, user, date, p1=None, p2=None):
246 orig_parent = self.dirstate.parents()[0] or nullid
246 orig_parent = self.dirstate.parents()[0] or nullid
247 p1 = p1 or self.dirstate.parents()[0] or nullid
247 p1 = p1 or self.dirstate.parents()[0] or nullid
248 p2 = p2 or self.dirstate.parents()[1] or nullid
248 p2 = p2 or self.dirstate.parents()[1] or nullid
249 c1 = self.changelog.read(p1)
249 c1 = self.changelog.read(p1)
250 c2 = self.changelog.read(p2)
250 c2 = self.changelog.read(p2)
251 m1 = self.manifest.read(c1[0])
251 m1 = self.manifest.read(c1[0])
252 mf1 = self.manifest.readflags(c1[0])
252 mf1 = self.manifest.readflags(c1[0])
253 m2 = self.manifest.read(c2[0])
253 m2 = self.manifest.read(c2[0])
254 changed = []
254 changed = []
255
255
256 if orig_parent == p1:
256 if orig_parent == p1:
257 update_dirstate = 1
257 update_dirstate = 1
258 else:
258 else:
259 update_dirstate = 0
259 update_dirstate = 0
260
260
261 tr = self.transaction()
261 tr = self.transaction()
262 mm = m1.copy()
262 mm = m1.copy()
263 mfm = mf1.copy()
263 mfm = mf1.copy()
264 linkrev = self.changelog.count()
264 linkrev = self.changelog.count()
265 for f in files:
265 for f in files:
266 try:
266 try:
267 t = self.wread(f)
267 t = self.wread(f)
268 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
268 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
269 r = self.file(f)
269 r = self.file(f)
270 mfm[f] = tm
270 mfm[f] = tm
271
271
272 fp1 = m1.get(f, nullid)
272 fp1 = m1.get(f, nullid)
273 fp2 = m2.get(f, nullid)
273 fp2 = m2.get(f, nullid)
274
274
275 # is the same revision on two branches of a merge?
275 # is the same revision on two branches of a merge?
276 if fp2 == fp1:
276 if fp2 == fp1:
277 fp2 = nullid
277 fp2 = nullid
278
278
279 if fp2 != nullid:
279 if fp2 != nullid:
280 # is one parent an ancestor of the other?
280 # is one parent an ancestor of the other?
281 fpa = r.ancestor(fp1, fp2)
281 fpa = r.ancestor(fp1, fp2)
282 if fpa == fp1:
282 if fpa == fp1:
283 fp1, fp2 = fp2, nullid
283 fp1, fp2 = fp2, nullid
284 elif fpa == fp2:
284 elif fpa == fp2:
285 fp2 = nullid
285 fp2 = nullid
286
286
287 # is the file unmodified from the parent?
287 # is the file unmodified from the parent?
288 if t == r.read(fp1):
288 if t == r.read(fp1):
289 # record the proper existing parent in manifest
289 # record the proper existing parent in manifest
290 # no need to add a revision
290 # no need to add a revision
291 mm[f] = fp1
291 mm[f] = fp1
292 continue
292 continue
293
293
294 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
294 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
295 changed.append(f)
295 changed.append(f)
296 if update_dirstate:
296 if update_dirstate:
297 self.dirstate.update([f], "n")
297 self.dirstate.update([f], "n")
298 except IOError:
298 except IOError:
299 try:
299 try:
300 del mm[f]
300 del mm[f]
301 del mfm[f]
301 del mfm[f]
302 if update_dirstate:
302 if update_dirstate:
303 self.dirstate.forget([f])
303 self.dirstate.forget([f])
304 except:
304 except:
305 # deleted from p2?
305 # deleted from p2?
306 pass
306 pass
307
307
308 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
308 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
309 user = user or self.ui.username()
309 user = user or self.ui.username()
310 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
310 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
311 tr.close()
311 tr.close()
312 if update_dirstate:
312 if update_dirstate:
313 self.dirstate.setparents(n, nullid)
313 self.dirstate.setparents(n, nullid)
314
314
315 def commit(self, files = None, text = "", user = None, date = None,
315 def commit(self, files = None, text = "", user = None, date = None,
316 match = util.always, force=False):
316 match = util.always, force=False):
317 commit = []
317 commit = []
318 remove = []
318 remove = []
319 changed = []
319 changed = []
320
320
321 if files:
321 if files:
322 for f in files:
322 for f in files:
323 s = self.dirstate.state(f)
323 s = self.dirstate.state(f)
324 if s in 'nmai':
324 if s in 'nmai':
325 commit.append(f)
325 commit.append(f)
326 elif s == 'r':
326 elif s == 'r':
327 remove.append(f)
327 remove.append(f)
328 else:
328 else:
329 self.ui.warn("%s not tracked!\n" % f)
329 self.ui.warn("%s not tracked!\n" % f)
330 else:
330 else:
331 (c, a, d, u) = self.changes(match=match)
331 (c, a, d, u) = self.changes(match=match)
332 commit = c + a
332 commit = c + a
333 remove = d
333 remove = d
334
334
335 p1, p2 = self.dirstate.parents()
335 p1, p2 = self.dirstate.parents()
336 c1 = self.changelog.read(p1)
336 c1 = self.changelog.read(p1)
337 c2 = self.changelog.read(p2)
337 c2 = self.changelog.read(p2)
338 m1 = self.manifest.read(c1[0])
338 m1 = self.manifest.read(c1[0])
339 mf1 = self.manifest.readflags(c1[0])
339 mf1 = self.manifest.readflags(c1[0])
340 m2 = self.manifest.read(c2[0])
340 m2 = self.manifest.read(c2[0])
341
341
342 if not commit and not remove and not force and p2 == nullid:
342 if not commit and not remove and not force and p2 == nullid:
343 self.ui.status("nothing changed\n")
343 self.ui.status("nothing changed\n")
344 return None
344 return None
345
345
346 if not self.hook("precommit"):
346 if not self.hook("precommit"):
347 return None
347 return None
348
348
349 lock = self.lock()
349 lock = self.lock()
350 tr = self.transaction()
350 tr = self.transaction()
351
351
352 # check in files
352 # check in files
353 new = {}
353 new = {}
354 linkrev = self.changelog.count()
354 linkrev = self.changelog.count()
355 commit.sort()
355 commit.sort()
356 for f in commit:
356 for f in commit:
357 self.ui.note(f + "\n")
357 self.ui.note(f + "\n")
358 try:
358 try:
359 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
359 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
360 t = self.wread(f)
360 t = self.wread(f)
361 except IOError:
361 except IOError:
362 self.ui.warn("trouble committing %s!\n" % f)
362 self.ui.warn("trouble committing %s!\n" % f)
363 raise
363 raise
364
364
365 r = self.file(f)
365 r = self.file(f)
366
366
367 meta = {}
367 meta = {}
368 cp = self.dirstate.copied(f)
368 cp = self.dirstate.copied(f)
369 if cp:
369 if cp:
370 meta["copy"] = cp
370 meta["copy"] = cp
371 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
371 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
372 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
372 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
373 fp1, fp2 = nullid, nullid
373 fp1, fp2 = nullid, nullid
374 else:
374 else:
375 fp1 = m1.get(f, nullid)
375 fp1 = m1.get(f, nullid)
376 fp2 = m2.get(f, nullid)
376 fp2 = m2.get(f, nullid)
377
377
378 # is the same revision on two branches of a merge?
378 # is the same revision on two branches of a merge?
379 if fp2 == fp1:
379 if fp2 == fp1:
380 fp2 = nullid
380 fp2 = nullid
381
381
382 if fp2 != nullid:
382 if fp2 != nullid:
383 # is one parent an ancestor of the other?
383 # is one parent an ancestor of the other?
384 fpa = r.ancestor(fp1, fp2)
384 fpa = r.ancestor(fp1, fp2)
385 if fpa == fp1:
385 if fpa == fp1:
386 fp1, fp2 = fp2, nullid
386 fp1, fp2 = fp2, nullid
387 elif fpa == fp2:
387 elif fpa == fp2:
388 fp2 = nullid
388 fp2 = nullid
389
389
390 # is the file unmodified from the parent?
390 # is the file unmodified from the parent?
391 if not meta and t == r.read(fp1):
391 if not meta and t == r.read(fp1):
392 # record the proper existing parent in manifest
392 # record the proper existing parent in manifest
393 # no need to add a revision
393 # no need to add a revision
394 new[f] = fp1
394 new[f] = fp1
395 continue
395 continue
396
396
397 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
397 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
398 # remember what we've added so that we can later calculate
398 # remember what we've added so that we can later calculate
399 # the files to pull from a set of changesets
399 # the files to pull from a set of changesets
400 changed.append(f)
400 changed.append(f)
401
401
402 # update manifest
402 # update manifest
403 m1.update(new)
403 m1.update(new)
404 for f in remove:
404 for f in remove:
405 if f in m1:
405 if f in m1:
406 del m1[f]
406 del m1[f]
407 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
407 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
408 (new, remove))
408 (new, remove))
409
409
410 # add changeset
410 # add changeset
411 new = new.keys()
411 new = new.keys()
412 new.sort()
412 new.sort()
413
413
414 if not text:
414 if not text:
415 edittext = ""
415 edittext = ""
416 if p2 != nullid:
416 if p2 != nullid:
417 edittext += "HG: branch merge\n"
417 edittext += "HG: branch merge\n"
418 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
418 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
419 edittext += "".join(["HG: changed %s\n" % f for f in changed])
419 edittext += "".join(["HG: changed %s\n" % f for f in changed])
420 edittext += "".join(["HG: removed %s\n" % f for f in remove])
420 edittext += "".join(["HG: removed %s\n" % f for f in remove])
421 if not changed and not remove:
421 if not changed and not remove:
422 edittext += "HG: no files changed\n"
422 edittext += "HG: no files changed\n"
423 edittext = self.ui.edit(edittext)
423 edittext = self.ui.edit(edittext)
424 if not edittext.rstrip():
424 if not edittext.rstrip():
425 return None
425 return None
426 text = edittext
426 text = edittext
427
427
428 user = user or self.ui.username()
428 user = user or self.ui.username()
429 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
429 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
430 tr.close()
430 tr.close()
431
431
432 self.dirstate.setparents(n)
432 self.dirstate.setparents(n)
433 self.dirstate.update(new, "n")
433 self.dirstate.update(new, "n")
434 self.dirstate.forget(remove)
434 self.dirstate.forget(remove)
435
435
436 if not self.hook("commit", node=hex(n)):
436 if not self.hook("commit", node=hex(n)):
437 return None
437 return None
438 return n
438 return n
439
439
440 def walk(self, node=None, files=[], match=util.always):
440 def walk(self, node=None, files=[], match=util.always):
441 if node:
441 if node:
442 for fn in self.manifest.read(self.changelog.read(node)[0]):
442 for fn in self.manifest.read(self.changelog.read(node)[0]):
443 if match(fn): yield 'm', fn
443 if match(fn): yield 'm', fn
444 else:
444 else:
445 for src, fn in self.dirstate.walk(files, match):
445 for src, fn in self.dirstate.walk(files, match):
446 yield src, fn
446 yield src, fn
447
447
448 def changes(self, node1 = None, node2 = None, files = [],
448 def changes(self, node1 = None, node2 = None, files = [],
449 match = util.always):
449 match = util.always):
450 mf2, u = None, []
450 mf2, u = None, []
451
451
452 def fcmp(fn, mf):
452 def fcmp(fn, mf):
453 t1 = self.wread(fn)
453 t1 = self.wread(fn)
454 t2 = self.file(fn).read(mf.get(fn, nullid))
454 t2 = self.file(fn).read(mf.get(fn, nullid))
455 return cmp(t1, t2)
455 return cmp(t1, t2)
456
456
457 def mfmatches(node):
457 def mfmatches(node):
458 mf = dict(self.manifest.read(node))
458 mf = dict(self.manifest.read(node))
459 for fn in mf.keys():
459 for fn in mf.keys():
460 if not match(fn):
460 if not match(fn):
461 del mf[fn]
461 del mf[fn]
462 return mf
462 return mf
463
463
464 # are we comparing the working directory?
464 # are we comparing the working directory?
465 if not node2:
465 if not node2:
466 l, c, a, d, u = self.dirstate.changes(files, match)
466 l, c, a, d, u = self.dirstate.changes(files, match)
467
467
468 # are we comparing working dir against its parent?
468 # are we comparing working dir against its parent?
469 if not node1:
469 if not node1:
470 if l:
470 if l:
471 # do a full compare of any files that might have changed
471 # do a full compare of any files that might have changed
472 change = self.changelog.read(self.dirstate.parents()[0])
472 change = self.changelog.read(self.dirstate.parents()[0])
473 mf2 = mfmatches(change[0])
473 mf2 = mfmatches(change[0])
474 for f in l:
474 for f in l:
475 if fcmp(f, mf2):
475 if fcmp(f, mf2):
476 c.append(f)
476 c.append(f)
477
477
478 for l in c, a, d, u:
478 for l in c, a, d, u:
479 l.sort()
479 l.sort()
480
480
481 return (c, a, d, u)
481 return (c, a, d, u)
482
482
483 # are we comparing working dir against non-tip?
483 # are we comparing working dir against non-tip?
484 # generate a pseudo-manifest for the working dir
484 # generate a pseudo-manifest for the working dir
485 if not node2:
485 if not node2:
486 if not mf2:
486 if not mf2:
487 change = self.changelog.read(self.dirstate.parents()[0])
487 change = self.changelog.read(self.dirstate.parents()[0])
488 mf2 = mfmatches(change[0])
488 mf2 = mfmatches(change[0])
489 for f in a + c + l:
489 for f in a + c + l:
490 mf2[f] = ""
490 mf2[f] = ""
491 for f in d:
491 for f in d:
492 if f in mf2: del mf2[f]
492 if f in mf2: del mf2[f]
493 else:
493 else:
494 change = self.changelog.read(node2)
494 change = self.changelog.read(node2)
495 mf2 = mfmatches(change[0])
495 mf2 = mfmatches(change[0])
496
496
497 # flush lists from dirstate before comparing manifests
497 # flush lists from dirstate before comparing manifests
498 c, a = [], []
498 c, a = [], []
499
499
500 change = self.changelog.read(node1)
500 change = self.changelog.read(node1)
501 mf1 = mfmatches(change[0])
501 mf1 = mfmatches(change[0])
502
502
503 for fn in mf2:
503 for fn in mf2:
504 if mf1.has_key(fn):
504 if mf1.has_key(fn):
505 if mf1[fn] != mf2[fn]:
505 if mf1[fn] != mf2[fn]:
506 if mf2[fn] != "" or fcmp(fn, mf1):
506 if mf2[fn] != "" or fcmp(fn, mf1):
507 c.append(fn)
507 c.append(fn)
508 del mf1[fn]
508 del mf1[fn]
509 else:
509 else:
510 a.append(fn)
510 a.append(fn)
511
511
512 d = mf1.keys()
512 d = mf1.keys()
513
513
514 for l in c, a, d, u:
514 for l in c, a, d, u:
515 l.sort()
515 l.sort()
516
516
517 return (c, a, d, u)
517 return (c, a, d, u)
518
518
519 def add(self, list):
519 def add(self, list):
520 for f in list:
520 for f in list:
521 p = self.wjoin(f)
521 p = self.wjoin(f)
522 if not os.path.exists(p):
522 if not os.path.exists(p):
523 self.ui.warn("%s does not exist!\n" % f)
523 self.ui.warn("%s does not exist!\n" % f)
524 elif not os.path.isfile(p):
524 elif not os.path.isfile(p):
525 self.ui.warn("%s not added: only files supported currently\n" % f)
525 self.ui.warn("%s not added: only files supported currently\n" % f)
526 elif self.dirstate.state(f) in 'an':
526 elif self.dirstate.state(f) in 'an':
527 self.ui.warn("%s already tracked!\n" % f)
527 self.ui.warn("%s already tracked!\n" % f)
528 else:
528 else:
529 self.dirstate.update([f], "a")
529 self.dirstate.update([f], "a")
530
530
531 def forget(self, list):
531 def forget(self, list):
532 for f in list:
532 for f in list:
533 if self.dirstate.state(f) not in 'ai':
533 if self.dirstate.state(f) not in 'ai':
534 self.ui.warn("%s not added!\n" % f)
534 self.ui.warn("%s not added!\n" % f)
535 else:
535 else:
536 self.dirstate.forget([f])
536 self.dirstate.forget([f])
537
537
538 def remove(self, list):
538 def remove(self, list):
539 for f in list:
539 for f in list:
540 p = self.wjoin(f)
540 p = self.wjoin(f)
541 if os.path.exists(p):
541 if os.path.exists(p):
542 self.ui.warn("%s still exists!\n" % f)
542 self.ui.warn("%s still exists!\n" % f)
543 elif self.dirstate.state(f) == 'a':
543 elif self.dirstate.state(f) == 'a':
544 self.ui.warn("%s never committed!\n" % f)
544 self.ui.warn("%s never committed!\n" % f)
545 self.dirstate.forget([f])
545 self.dirstate.forget([f])
546 elif f not in self.dirstate:
546 elif f not in self.dirstate:
547 self.ui.warn("%s not tracked!\n" % f)
547 self.ui.warn("%s not tracked!\n" % f)
548 else:
548 else:
549 self.dirstate.update([f], "r")
549 self.dirstate.update([f], "r")
550
550
551 def copy(self, source, dest):
551 def copy(self, source, dest):
552 p = self.wjoin(dest)
552 p = self.wjoin(dest)
553 if not os.path.exists(p):
553 if not os.path.exists(p):
554 self.ui.warn("%s does not exist!\n" % dest)
554 self.ui.warn("%s does not exist!\n" % dest)
555 elif not os.path.isfile(p):
555 elif not os.path.isfile(p):
556 self.ui.warn("copy failed: %s is not a file\n" % dest)
556 self.ui.warn("copy failed: %s is not a file\n" % dest)
557 else:
557 else:
558 if self.dirstate.state(dest) == '?':
558 if self.dirstate.state(dest) == '?':
559 self.dirstate.update([dest], "a")
559 self.dirstate.update([dest], "a")
560 self.dirstate.copy(source, dest)
560 self.dirstate.copy(source, dest)
561
561
562 def heads(self):
562 def heads(self):
563 return self.changelog.heads()
563 return self.changelog.heads()
564
564
565 # branchlookup returns a dict giving a list of branches for
565 # branchlookup returns a dict giving a list of branches for
566 # each head. A branch is defined as the tag of a node or
566 # each head. A branch is defined as the tag of a node or
567 # the branch of the node's parents. If a node has multiple
567 # the branch of the node's parents. If a node has multiple
568 # branch tags, tags are eliminated if they are visible from other
568 # branch tags, tags are eliminated if they are visible from other
569 # branch tags.
569 # branch tags.
570 #
570 #
571 # So, for this graph: a->b->c->d->e
571 # So, for this graph: a->b->c->d->e
572 # \ /
572 # \ /
573 # aa -----/
573 # aa -----/
574 # a has tag 2.6.12
574 # a has tag 2.6.12
575 # d has tag 2.6.13
575 # d has tag 2.6.13
576 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
576 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
577 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
577 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
578 # from the list.
578 # from the list.
579 #
579 #
580 # It is possible that more than one head will have the same branch tag.
580 # It is possible that more than one head will have the same branch tag.
581 # callers need to check the result for multiple heads under the same
581 # callers need to check the result for multiple heads under the same
582 # branch tag if that is a problem for them (ie checkout of a specific
582 # branch tag if that is a problem for them (ie checkout of a specific
583 # branch).
583 # branch).
584 #
584 #
585 # passing in a specific branch will limit the depth of the search
585 # passing in a specific branch will limit the depth of the search
586 # through the parents. It won't limit the branches returned in the
586 # through the parents. It won't limit the branches returned in the
587 # result though.
587 # result though.
588 def branchlookup(self, heads=None, branch=None):
588 def branchlookup(self, heads=None, branch=None):
589 if not heads:
589 if not heads:
590 heads = self.heads()
590 heads = self.heads()
591 headt = [ h for h in heads ]
591 headt = [ h for h in heads ]
592 chlog = self.changelog
592 chlog = self.changelog
593 branches = {}
593 branches = {}
594 merges = []
594 merges = []
595 seenmerge = {}
595 seenmerge = {}
596
596
597 # traverse the tree once for each head, recording in the branches
597 # traverse the tree once for each head, recording in the branches
598 # dict which tags are visible from this head. The branches
598 # dict which tags are visible from this head. The branches
599 # dict also records which tags are visible from each tag
599 # dict also records which tags are visible from each tag
600 # while we traverse.
600 # while we traverse.
601 while headt or merges:
601 while headt or merges:
602 if merges:
602 if merges:
603 n, found = merges.pop()
603 n, found = merges.pop()
604 visit = [n]
604 visit = [n]
605 else:
605 else:
606 h = headt.pop()
606 h = headt.pop()
607 visit = [h]
607 visit = [h]
608 found = [h]
608 found = [h]
609 seen = {}
609 seen = {}
610 while visit:
610 while visit:
611 n = visit.pop()
611 n = visit.pop()
612 if n in seen:
612 if n in seen:
613 continue
613 continue
614 pp = chlog.parents(n)
614 pp = chlog.parents(n)
615 tags = self.nodetags(n)
615 tags = self.nodetags(n)
616 if tags:
616 if tags:
617 for x in tags:
617 for x in tags:
618 if x == 'tip':
618 if x == 'tip':
619 continue
619 continue
620 for f in found:
620 for f in found:
621 branches.setdefault(f, {})[n] = 1
621 branches.setdefault(f, {})[n] = 1
622 branches.setdefault(n, {})[n] = 1
622 branches.setdefault(n, {})[n] = 1
623 break
623 break
624 if n not in found:
624 if n not in found:
625 found.append(n)
625 found.append(n)
626 if branch in tags:
626 if branch in tags:
627 continue
627 continue
628 seen[n] = 1
628 seen[n] = 1
629 if pp[1] != nullid and n not in seenmerge:
629 if pp[1] != nullid and n not in seenmerge:
630 merges.append((pp[1], [x for x in found]))
630 merges.append((pp[1], [x for x in found]))
631 seenmerge[n] = 1
631 seenmerge[n] = 1
632 if pp[0] != nullid:
632 if pp[0] != nullid:
633 visit.append(pp[0])
633 visit.append(pp[0])
634 # traverse the branches dict, eliminating branch tags from each
634 # traverse the branches dict, eliminating branch tags from each
635 # head that are visible from another branch tag for that head.
635 # head that are visible from another branch tag for that head.
636 out = {}
636 out = {}
637 viscache = {}
637 viscache = {}
638 for h in heads:
638 for h in heads:
639 def visible(node):
639 def visible(node):
640 if node in viscache:
640 if node in viscache:
641 return viscache[node]
641 return viscache[node]
642 ret = {}
642 ret = {}
643 visit = [node]
643 visit = [node]
644 while visit:
644 while visit:
645 x = visit.pop()
645 x = visit.pop()
646 if x in viscache:
646 if x in viscache:
647 ret.update(viscache[x])
647 ret.update(viscache[x])
648 elif x not in ret:
648 elif x not in ret:
649 ret[x] = 1
649 ret[x] = 1
650 if x in branches:
650 if x in branches:
651 visit[len(visit):] = branches[x].keys()
651 visit[len(visit):] = branches[x].keys()
652 viscache[node] = ret
652 viscache[node] = ret
653 return ret
653 return ret
654 if h not in branches:
654 if h not in branches:
655 continue
655 continue
656 # O(n^2), but somewhat limited. This only searches the
656 # O(n^2), but somewhat limited. This only searches the
657 # tags visible from a specific head, not all the tags in the
657 # tags visible from a specific head, not all the tags in the
658 # whole repo.
658 # whole repo.
659 for b in branches[h]:
659 for b in branches[h]:
660 vis = False
660 vis = False
661 for bb in branches[h].keys():
661 for bb in branches[h].keys():
662 if b != bb:
662 if b != bb:
663 if b in visible(bb):
663 if b in visible(bb):
664 vis = True
664 vis = True
665 break
665 break
666 if not vis:
666 if not vis:
667 l = out.setdefault(h, [])
667 l = out.setdefault(h, [])
668 l[len(l):] = self.nodetags(b)
668 l[len(l):] = self.nodetags(b)
669 return out
669 return out
670
670
671 def branches(self, nodes):
671 def branches(self, nodes):
672 if not nodes: nodes = [self.changelog.tip()]
672 if not nodes: nodes = [self.changelog.tip()]
673 b = []
673 b = []
674 for n in nodes:
674 for n in nodes:
675 t = n
675 t = n
676 while n:
676 while n:
677 p = self.changelog.parents(n)
677 p = self.changelog.parents(n)
678 if p[1] != nullid or p[0] == nullid:
678 if p[1] != nullid or p[0] == nullid:
679 b.append((t, n, p[0], p[1]))
679 b.append((t, n, p[0], p[1]))
680 break
680 break
681 n = p[0]
681 n = p[0]
682 return b
682 return b
683
683
684 def between(self, pairs):
684 def between(self, pairs):
685 r = []
685 r = []
686
686
687 for top, bottom in pairs:
687 for top, bottom in pairs:
688 n, l, i = top, [], 0
688 n, l, i = top, [], 0
689 f = 1
689 f = 1
690
690
691 while n != bottom:
691 while n != bottom:
692 p = self.changelog.parents(n)[0]
692 p = self.changelog.parents(n)[0]
693 if i == f:
693 if i == f:
694 l.append(n)
694 l.append(n)
695 f = f * 2
695 f = f * 2
696 n = p
696 n = p
697 i += 1
697 i += 1
698
698
699 r.append(l)
699 r.append(l)
700
700
701 return r
701 return r
702
702
703 def newer(self, nodes):
704 m = {}
705 nl = []
706 pm = {}
707 cl = self.changelog
708 t = l = cl.count()
709
710 # find the lowest numbered node
711 for n in nodes:
712 l = min(l, cl.rev(n))
713 m[n] = 1
714
715 for i in xrange(l, t):
716 n = cl.node(i)
717 if n in m: # explicitly listed
718 pm[n] = 1
719 nl.append(n)
720 continue
721 for p in cl.parents(n):
722 if p in pm: # parent listed
723 pm[n] = 1
724 nl.append(n)
725 break
726
727 return nl
728
729 def findincoming(self, remote, base=None, heads=None):
703 def findincoming(self, remote, base=None, heads=None):
730 m = self.changelog.nodemap
704 m = self.changelog.nodemap
731 search = []
705 search = []
732 fetch = {}
706 fetch = {}
733 seen = {}
707 seen = {}
734 seenbranch = {}
708 seenbranch = {}
735 if base == None:
709 if base == None:
736 base = {}
710 base = {}
737
711
738 # assume we're closer to the tip than the root
712 # assume we're closer to the tip than the root
739 # and start by examining the heads
713 # and start by examining the heads
740 self.ui.status("searching for changes\n")
714 self.ui.status("searching for changes\n")
741
715
742 if not heads:
716 if not heads:
743 heads = remote.heads()
717 heads = remote.heads()
744
718
745 unknown = []
719 unknown = []
746 for h in heads:
720 for h in heads:
747 if h not in m:
721 if h not in m:
748 unknown.append(h)
722 unknown.append(h)
749 else:
723 else:
750 base[h] = 1
724 base[h] = 1
751
725
752 if not unknown:
726 if not unknown:
753 return None
727 return None
754
728
755 rep = {}
729 rep = {}
756 reqcnt = 0
730 reqcnt = 0
757
731
758 # search through remote branches
732 # search through remote branches
759 # a 'branch' here is a linear segment of history, with four parts:
733 # a 'branch' here is a linear segment of history, with four parts:
760 # head, root, first parent, second parent
734 # head, root, first parent, second parent
761 # (a branch always has two parents (or none) by definition)
735 # (a branch always has two parents (or none) by definition)
762 unknown = remote.branches(unknown)
736 unknown = remote.branches(unknown)
763 while unknown:
737 while unknown:
764 r = []
738 r = []
765 while unknown:
739 while unknown:
766 n = unknown.pop(0)
740 n = unknown.pop(0)
767 if n[0] in seen:
741 if n[0] in seen:
768 continue
742 continue
769
743
770 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
744 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
771 if n[0] == nullid:
745 if n[0] == nullid:
772 break
746 break
773 if n in seenbranch:
747 if n in seenbranch:
774 self.ui.debug("branch already found\n")
748 self.ui.debug("branch already found\n")
775 continue
749 continue
776 if n[1] and n[1] in m: # do we know the base?
750 if n[1] and n[1] in m: # do we know the base?
777 self.ui.debug("found incomplete branch %s:%s\n"
751 self.ui.debug("found incomplete branch %s:%s\n"
778 % (short(n[0]), short(n[1])))
752 % (short(n[0]), short(n[1])))
779 search.append(n) # schedule branch range for scanning
753 search.append(n) # schedule branch range for scanning
780 seenbranch[n] = 1
754 seenbranch[n] = 1
781 else:
755 else:
782 if n[1] not in seen and n[1] not in fetch:
756 if n[1] not in seen and n[1] not in fetch:
783 if n[2] in m and n[3] in m:
757 if n[2] in m and n[3] in m:
784 self.ui.debug("found new changeset %s\n" %
758 self.ui.debug("found new changeset %s\n" %
785 short(n[1]))
759 short(n[1]))
786 fetch[n[1]] = 1 # earliest unknown
760 fetch[n[1]] = 1 # earliest unknown
787 base[n[2]] = 1 # latest known
761 base[n[2]] = 1 # latest known
788 continue
762 continue
789
763
790 for a in n[2:4]:
764 for a in n[2:4]:
791 if a not in rep:
765 if a not in rep:
792 r.append(a)
766 r.append(a)
793 rep[a] = 1
767 rep[a] = 1
794
768
795 seen[n[0]] = 1
769 seen[n[0]] = 1
796
770
797 if r:
771 if r:
798 reqcnt += 1
772 reqcnt += 1
799 self.ui.debug("request %d: %s\n" %
773 self.ui.debug("request %d: %s\n" %
800 (reqcnt, " ".join(map(short, r))))
774 (reqcnt, " ".join(map(short, r))))
801 for p in range(0, len(r), 10):
775 for p in range(0, len(r), 10):
802 for b in remote.branches(r[p:p+10]):
776 for b in remote.branches(r[p:p+10]):
803 self.ui.debug("received %s:%s\n" %
777 self.ui.debug("received %s:%s\n" %
804 (short(b[0]), short(b[1])))
778 (short(b[0]), short(b[1])))
805 if b[0] in m:
779 if b[0] in m:
806 self.ui.debug("found base node %s\n" % short(b[0]))
780 self.ui.debug("found base node %s\n" % short(b[0]))
807 base[b[0]] = 1
781 base[b[0]] = 1
808 elif b[0] not in seen:
782 elif b[0] not in seen:
809 unknown.append(b)
783 unknown.append(b)
810
784
811 # do binary search on the branches we found
785 # do binary search on the branches we found
812 while search:
786 while search:
813 n = search.pop(0)
787 n = search.pop(0)
814 reqcnt += 1
788 reqcnt += 1
815 l = remote.between([(n[0], n[1])])[0]
789 l = remote.between([(n[0], n[1])])[0]
816 l.append(n[1])
790 l.append(n[1])
817 p = n[0]
791 p = n[0]
818 f = 1
792 f = 1
819 for i in l:
793 for i in l:
820 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
794 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
821 if i in m:
795 if i in m:
822 if f <= 2:
796 if f <= 2:
823 self.ui.debug("found new branch changeset %s\n" %
797 self.ui.debug("found new branch changeset %s\n" %
824 short(p))
798 short(p))
825 fetch[p] = 1
799 fetch[p] = 1
826 base[i] = 1
800 base[i] = 1
827 else:
801 else:
828 self.ui.debug("narrowed branch search to %s:%s\n"
802 self.ui.debug("narrowed branch search to %s:%s\n"
829 % (short(p), short(i)))
803 % (short(p), short(i)))
830 search.append((p, i))
804 search.append((p, i))
831 break
805 break
832 p, f = i, f * 2
806 p, f = i, f * 2
833
807
834 # sanity check our fetch list
808 # sanity check our fetch list
835 for f in fetch.keys():
809 for f in fetch.keys():
836 if f in m:
810 if f in m:
837 raise repo.RepoError("already have changeset " + short(f[:4]))
811 raise repo.RepoError("already have changeset " + short(f[:4]))
838
812
839 if base.keys() == [nullid]:
813 if base.keys() == [nullid]:
840 self.ui.warn("warning: pulling from an unrelated repository!\n")
814 self.ui.warn("warning: pulling from an unrelated repository!\n")
841
815
842 self.ui.note("found new changesets starting at " +
816 self.ui.note("found new changesets starting at " +
843 " ".join([short(f) for f in fetch]) + "\n")
817 " ".join([short(f) for f in fetch]) + "\n")
844
818
845 self.ui.debug("%d total queries\n" % reqcnt)
819 self.ui.debug("%d total queries\n" % reqcnt)
846
820
847 return fetch.keys()
821 return fetch.keys()
848
822
849 def findoutgoing(self, remote, base=None, heads=None):
823 def findoutgoing(self, remote, base=None, heads=None):
850 if base == None:
824 if base == None:
851 base = {}
825 base = {}
852 self.findincoming(remote, base, heads)
826 self.findincoming(remote, base, heads)
853
827
854 self.ui.debug("common changesets up to "
828 self.ui.debug("common changesets up to "
855 + " ".join(map(short, base.keys())) + "\n")
829 + " ".join(map(short, base.keys())) + "\n")
856
830
857 remain = dict.fromkeys(self.changelog.nodemap)
831 remain = dict.fromkeys(self.changelog.nodemap)
858
832
859 # prune everything remote has from the tree
833 # prune everything remote has from the tree
860 del remain[nullid]
834 del remain[nullid]
861 remove = base.keys()
835 remove = base.keys()
862 while remove:
836 while remove:
863 n = remove.pop(0)
837 n = remove.pop(0)
864 if n in remain:
838 if n in remain:
865 del remain[n]
839 del remain[n]
866 for p in self.changelog.parents(n):
840 for p in self.changelog.parents(n):
867 remove.append(p)
841 remove.append(p)
868
842
869 # find every node whose parents have been pruned
843 # find every node whose parents have been pruned
870 subset = []
844 subset = []
871 for n in remain:
845 for n in remain:
872 p1, p2 = self.changelog.parents(n)
846 p1, p2 = self.changelog.parents(n)
873 if p1 not in remain and p2 not in remain:
847 if p1 not in remain and p2 not in remain:
874 subset.append(n)
848 subset.append(n)
875
849
876 # this is the set of all roots we have to push
850 # this is the set of all roots we have to push
877 return subset
851 return subset
878
852
879 def pull(self, remote):
853 def pull(self, remote):
880 lock = self.lock()
854 lock = self.lock()
881
855
882 # if we have an empty repo, fetch everything
856 # if we have an empty repo, fetch everything
883 if self.changelog.tip() == nullid:
857 if self.changelog.tip() == nullid:
884 self.ui.status("requesting all changes\n")
858 self.ui.status("requesting all changes\n")
885 fetch = [nullid]
859 fetch = [nullid]
886 else:
860 else:
887 fetch = self.findincoming(remote)
861 fetch = self.findincoming(remote)
888
862
889 if not fetch:
863 if not fetch:
890 self.ui.status("no changes found\n")
864 self.ui.status("no changes found\n")
891 return 1
865 return 1
892
866
893 cg = remote.changegroup(fetch)
867 cg = remote.changegroup(fetch)
894 return self.addchangegroup(cg)
868 return self.addchangegroup(cg)
895
869
896 def push(self, remote, force=False):
870 def push(self, remote, force=False):
897 lock = remote.lock()
871 lock = remote.lock()
898
872
899 base = {}
873 base = {}
900 heads = remote.heads()
874 heads = remote.heads()
901 inc = self.findincoming(remote, base, heads)
875 inc = self.findincoming(remote, base, heads)
902 if not force and inc:
876 if not force and inc:
903 self.ui.warn("abort: unsynced remote changes!\n")
877 self.ui.warn("abort: unsynced remote changes!\n")
904 self.ui.status("(did you forget to sync? use push -f to force)\n")
878 self.ui.status("(did you forget to sync? use push -f to force)\n")
905 return 1
879 return 1
906
880
907 update = self.findoutgoing(remote, base)
881 update = self.findoutgoing(remote, base)
908 if not update:
882 if not update:
909 self.ui.status("no changes found\n")
883 self.ui.status("no changes found\n")
910 return 1
884 return 1
911 elif not force:
885 elif not force:
912 if len(heads) < len(self.changelog.heads()):
886 if len(heads) < len(self.changelog.heads()):
913 self.ui.warn("abort: push creates new remote branches!\n")
887 self.ui.warn("abort: push creates new remote branches!\n")
914 self.ui.status("(did you forget to merge?" +
888 self.ui.status("(did you forget to merge?" +
915 " use push -f to force)\n")
889 " use push -f to force)\n")
916 return 1
890 return 1
917
891
918 cg = self.changegroup(update)
892 cg = self.changegroup(update)
919 return remote.addchangegroup(cg)
893 return remote.addchangegroup(cg)
920
894
921 def changegroup(self, basenodes):
895 def changegroup(self, basenodes):
922 genread = util.chunkbuffer
896 genread = util.chunkbuffer
923
897
924 def gengroup():
898 def gengroup():
925 nodes = self.newer(basenodes)
899 nodes = self.changelog.nodesbetween(basenodes)[0]
926
900
927 # construct the link map
901 # construct the link map
928 linkmap = {}
902 linkmap = {}
929 for n in nodes:
903 for n in nodes:
930 linkmap[self.changelog.rev(n)] = n
904 linkmap[self.changelog.rev(n)] = n
931
905
932 # construct a list of all changed files
906 # construct a list of all changed files
933 changed = {}
907 changed = {}
934 for n in nodes:
908 for n in nodes:
935 c = self.changelog.read(n)
909 c = self.changelog.read(n)
936 for f in c[3]:
910 for f in c[3]:
937 changed[f] = 1
911 changed[f] = 1
938 changed = changed.keys()
912 changed = changed.keys()
939 changed.sort()
913 changed.sort()
940
914
941 # the changegroup is changesets + manifests + all file revs
915 # the changegroup is changesets + manifests + all file revs
942 revs = [ self.changelog.rev(n) for n in nodes ]
916 revs = [ self.changelog.rev(n) for n in nodes ]
943
917
944 for y in self.changelog.group(linkmap): yield y
918 for y in self.changelog.group(linkmap): yield y
945 for y in self.manifest.group(linkmap): yield y
919 for y in self.manifest.group(linkmap): yield y
946 for f in changed:
920 for f in changed:
947 yield struct.pack(">l", len(f) + 4) + f
921 yield struct.pack(">l", len(f) + 4) + f
948 g = self.file(f).group(linkmap)
922 g = self.file(f).group(linkmap)
949 for y in g:
923 for y in g:
950 yield y
924 yield y
951
925
952 yield struct.pack(">l", 0)
926 yield struct.pack(">l", 0)
953
927
954 return genread(gengroup())
928 return genread(gengroup())
955
929
956 def addchangegroup(self, source):
930 def addchangegroup(self, source):
957
931
958 def getchunk():
932 def getchunk():
959 d = source.read(4)
933 d = source.read(4)
960 if not d: return ""
934 if not d: return ""
961 l = struct.unpack(">l", d)[0]
935 l = struct.unpack(">l", d)[0]
962 if l <= 4: return ""
936 if l <= 4: return ""
963 d = source.read(l - 4)
937 d = source.read(l - 4)
964 if len(d) < l - 4:
938 if len(d) < l - 4:
965 raise repo.RepoError("premature EOF reading chunk" +
939 raise repo.RepoError("premature EOF reading chunk" +
966 " (got %d bytes, expected %d)"
940 " (got %d bytes, expected %d)"
967 % (len(d), l - 4))
941 % (len(d), l - 4))
968 return d
942 return d
969
943
970 def getgroup():
944 def getgroup():
971 while 1:
945 while 1:
972 c = getchunk()
946 c = getchunk()
973 if not c: break
947 if not c: break
974 yield c
948 yield c
975
949
976 def csmap(x):
950 def csmap(x):
977 self.ui.debug("add changeset %s\n" % short(x))
951 self.ui.debug("add changeset %s\n" % short(x))
978 return self.changelog.count()
952 return self.changelog.count()
979
953
980 def revmap(x):
954 def revmap(x):
981 return self.changelog.rev(x)
955 return self.changelog.rev(x)
982
956
983 if not source: return
957 if not source: return
984 changesets = files = revisions = 0
958 changesets = files = revisions = 0
985
959
986 tr = self.transaction()
960 tr = self.transaction()
987
961
988 oldheads = len(self.changelog.heads())
962 oldheads = len(self.changelog.heads())
989
963
990 # pull off the changeset group
964 # pull off the changeset group
991 self.ui.status("adding changesets\n")
965 self.ui.status("adding changesets\n")
992 co = self.changelog.tip()
966 co = self.changelog.tip()
993 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
967 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
994 cnr, cor = map(self.changelog.rev, (cn, co))
968 cnr, cor = map(self.changelog.rev, (cn, co))
995 if cn == nullid:
969 if cn == nullid:
996 cnr = cor
970 cnr = cor
997 changesets = cnr - cor
971 changesets = cnr - cor
998
972
999 # pull off the manifest group
973 # pull off the manifest group
1000 self.ui.status("adding manifests\n")
974 self.ui.status("adding manifests\n")
1001 mm = self.manifest.tip()
975 mm = self.manifest.tip()
1002 mo = self.manifest.addgroup(getgroup(), revmap, tr)
976 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1003
977
1004 # process the files
978 # process the files
1005 self.ui.status("adding file changes\n")
979 self.ui.status("adding file changes\n")
1006 while 1:
980 while 1:
1007 f = getchunk()
981 f = getchunk()
1008 if not f: break
982 if not f: break
1009 self.ui.debug("adding %s revisions\n" % f)
983 self.ui.debug("adding %s revisions\n" % f)
1010 fl = self.file(f)
984 fl = self.file(f)
1011 o = fl.count()
985 o = fl.count()
1012 n = fl.addgroup(getgroup(), revmap, tr)
986 n = fl.addgroup(getgroup(), revmap, tr)
1013 revisions += fl.count() - o
987 revisions += fl.count() - o
1014 files += 1
988 files += 1
1015
989
1016 newheads = len(self.changelog.heads())
990 newheads = len(self.changelog.heads())
1017 heads = ""
991 heads = ""
1018 if oldheads and newheads > oldheads:
992 if oldheads and newheads > oldheads:
1019 heads = " (+%d heads)" % (newheads - oldheads)
993 heads = " (+%d heads)" % (newheads - oldheads)
1020
994
1021 self.ui.status(("added %d changesets" +
995 self.ui.status(("added %d changesets" +
1022 " with %d changes to %d files%s\n")
996 " with %d changes to %d files%s\n")
1023 % (changesets, revisions, files, heads))
997 % (changesets, revisions, files, heads))
1024
998
1025 tr.close()
999 tr.close()
1026
1000
1027 if changesets > 0:
1001 if changesets > 0:
1028 if not self.hook("changegroup",
1002 if not self.hook("changegroup",
1029 node=hex(self.changelog.node(cor+1))):
1003 node=hex(self.changelog.node(cor+1))):
1030 self.ui.warn("abort: changegroup hook returned failure!\n")
1004 self.ui.warn("abort: changegroup hook returned failure!\n")
1031 return 1
1005 return 1
1032
1006
1033 for i in range(cor + 1, cnr + 1):
1007 for i in range(cor + 1, cnr + 1):
1034 self.hook("commit", node=hex(self.changelog.node(i)))
1008 self.hook("commit", node=hex(self.changelog.node(i)))
1035
1009
1036 return
1010 return
1037
1011
1038 def update(self, node, allow=False, force=False, choose=None,
1012 def update(self, node, allow=False, force=False, choose=None,
1039 moddirstate=True):
1013 moddirstate=True):
1040 pl = self.dirstate.parents()
1014 pl = self.dirstate.parents()
1041 if not force and pl[1] != nullid:
1015 if not force and pl[1] != nullid:
1042 self.ui.warn("aborting: outstanding uncommitted merges\n")
1016 self.ui.warn("aborting: outstanding uncommitted merges\n")
1043 return 1
1017 return 1
1044
1018
1045 p1, p2 = pl[0], node
1019 p1, p2 = pl[0], node
1046 pa = self.changelog.ancestor(p1, p2)
1020 pa = self.changelog.ancestor(p1, p2)
1047 m1n = self.changelog.read(p1)[0]
1021 m1n = self.changelog.read(p1)[0]
1048 m2n = self.changelog.read(p2)[0]
1022 m2n = self.changelog.read(p2)[0]
1049 man = self.manifest.ancestor(m1n, m2n)
1023 man = self.manifest.ancestor(m1n, m2n)
1050 m1 = self.manifest.read(m1n)
1024 m1 = self.manifest.read(m1n)
1051 mf1 = self.manifest.readflags(m1n)
1025 mf1 = self.manifest.readflags(m1n)
1052 m2 = self.manifest.read(m2n)
1026 m2 = self.manifest.read(m2n)
1053 mf2 = self.manifest.readflags(m2n)
1027 mf2 = self.manifest.readflags(m2n)
1054 ma = self.manifest.read(man)
1028 ma = self.manifest.read(man)
1055 mfa = self.manifest.readflags(man)
1029 mfa = self.manifest.readflags(man)
1056
1030
1057 (c, a, d, u) = self.changes()
1031 (c, a, d, u) = self.changes()
1058
1032
1059 # is this a jump, or a merge? i.e. is there a linear path
1033 # is this a jump, or a merge? i.e. is there a linear path
1060 # from p1 to p2?
1034 # from p1 to p2?
1061 linear_path = (pa == p1 or pa == p2)
1035 linear_path = (pa == p1 or pa == p2)
1062
1036
1063 # resolve the manifest to determine which files
1037 # resolve the manifest to determine which files
1064 # we care about merging
1038 # we care about merging
1065 self.ui.note("resolving manifests\n")
1039 self.ui.note("resolving manifests\n")
1066 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1040 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1067 (force, allow, moddirstate, linear_path))
1041 (force, allow, moddirstate, linear_path))
1068 self.ui.debug(" ancestor %s local %s remote %s\n" %
1042 self.ui.debug(" ancestor %s local %s remote %s\n" %
1069 (short(man), short(m1n), short(m2n)))
1043 (short(man), short(m1n), short(m2n)))
1070
1044
1071 merge = {}
1045 merge = {}
1072 get = {}
1046 get = {}
1073 remove = []
1047 remove = []
1074
1048
1075 # construct a working dir manifest
1049 # construct a working dir manifest
1076 mw = m1.copy()
1050 mw = m1.copy()
1077 mfw = mf1.copy()
1051 mfw = mf1.copy()
1078 umap = dict.fromkeys(u)
1052 umap = dict.fromkeys(u)
1079
1053
1080 for f in a + c + u:
1054 for f in a + c + u:
1081 mw[f] = ""
1055 mw[f] = ""
1082 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1056 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1083
1057
1084 for f in d:
1058 for f in d:
1085 if f in mw: del mw[f]
1059 if f in mw: del mw[f]
1086
1060
1087 # If we're jumping between revisions (as opposed to merging),
1061 # If we're jumping between revisions (as opposed to merging),
1088 # and if neither the working directory nor the target rev has
1062 # and if neither the working directory nor the target rev has
1089 # the file, then we need to remove it from the dirstate, to
1063 # the file, then we need to remove it from the dirstate, to
1090 # prevent the dirstate from listing the file when it is no
1064 # prevent the dirstate from listing the file when it is no
1091 # longer in the manifest.
1065 # longer in the manifest.
1092 if moddirstate and linear_path and f not in m2:
1066 if moddirstate and linear_path and f not in m2:
1093 self.dirstate.forget((f,))
1067 self.dirstate.forget((f,))
1094
1068
1095 # Compare manifests
1069 # Compare manifests
1096 for f, n in mw.iteritems():
1070 for f, n in mw.iteritems():
1097 if choose and not choose(f): continue
1071 if choose and not choose(f): continue
1098 if f in m2:
1072 if f in m2:
1099 s = 0
1073 s = 0
1100
1074
1101 # is the wfile new since m1, and match m2?
1075 # is the wfile new since m1, and match m2?
1102 if f not in m1:
1076 if f not in m1:
1103 t1 = self.wread(f)
1077 t1 = self.wread(f)
1104 t2 = self.file(f).read(m2[f])
1078 t2 = self.file(f).read(m2[f])
1105 if cmp(t1, t2) == 0:
1079 if cmp(t1, t2) == 0:
1106 n = m2[f]
1080 n = m2[f]
1107 del t1, t2
1081 del t1, t2
1108
1082
1109 # are files different?
1083 # are files different?
1110 if n != m2[f]:
1084 if n != m2[f]:
1111 a = ma.get(f, nullid)
1085 a = ma.get(f, nullid)
1112 # are both different from the ancestor?
1086 # are both different from the ancestor?
1113 if n != a and m2[f] != a:
1087 if n != a and m2[f] != a:
1114 self.ui.debug(" %s versions differ, resolve\n" % f)
1088 self.ui.debug(" %s versions differ, resolve\n" % f)
1115 # merge executable bits
1089 # merge executable bits
1116 # "if we changed or they changed, change in merge"
1090 # "if we changed or they changed, change in merge"
1117 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1091 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1118 mode = ((a^b) | (a^c)) ^ a
1092 mode = ((a^b) | (a^c)) ^ a
1119 merge[f] = (m1.get(f, nullid), m2[f], mode)
1093 merge[f] = (m1.get(f, nullid), m2[f], mode)
1120 s = 1
1094 s = 1
1121 # are we clobbering?
1095 # are we clobbering?
1122 # is remote's version newer?
1096 # is remote's version newer?
1123 # or are we going back in time?
1097 # or are we going back in time?
1124 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1098 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1125 self.ui.debug(" remote %s is newer, get\n" % f)
1099 self.ui.debug(" remote %s is newer, get\n" % f)
1126 get[f] = m2[f]
1100 get[f] = m2[f]
1127 s = 1
1101 s = 1
1128 elif f in umap:
1102 elif f in umap:
1129 # this unknown file is the same as the checkout
1103 # this unknown file is the same as the checkout
1130 get[f] = m2[f]
1104 get[f] = m2[f]
1131
1105
1132 if not s and mfw[f] != mf2[f]:
1106 if not s and mfw[f] != mf2[f]:
1133 if force:
1107 if force:
1134 self.ui.debug(" updating permissions for %s\n" % f)
1108 self.ui.debug(" updating permissions for %s\n" % f)
1135 util.set_exec(self.wjoin(f), mf2[f])
1109 util.set_exec(self.wjoin(f), mf2[f])
1136 else:
1110 else:
1137 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1111 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1138 mode = ((a^b) | (a^c)) ^ a
1112 mode = ((a^b) | (a^c)) ^ a
1139 if mode != b:
1113 if mode != b:
1140 self.ui.debug(" updating permissions for %s\n" % f)
1114 self.ui.debug(" updating permissions for %s\n" % f)
1141 util.set_exec(self.wjoin(f), mode)
1115 util.set_exec(self.wjoin(f), mode)
1142 del m2[f]
1116 del m2[f]
1143 elif f in ma:
1117 elif f in ma:
1144 if n != ma[f]:
1118 if n != ma[f]:
1145 r = "d"
1119 r = "d"
1146 if not force and (linear_path or allow):
1120 if not force and (linear_path or allow):
1147 r = self.ui.prompt(
1121 r = self.ui.prompt(
1148 (" local changed %s which remote deleted\n" % f) +
1122 (" local changed %s which remote deleted\n" % f) +
1149 "(k)eep or (d)elete?", "[kd]", "k")
1123 "(k)eep or (d)elete?", "[kd]", "k")
1150 if r == "d":
1124 if r == "d":
1151 remove.append(f)
1125 remove.append(f)
1152 else:
1126 else:
1153 self.ui.debug("other deleted %s\n" % f)
1127 self.ui.debug("other deleted %s\n" % f)
1154 remove.append(f) # other deleted it
1128 remove.append(f) # other deleted it
1155 else:
1129 else:
1156 # file is created on branch or in working directory
1130 # file is created on branch or in working directory
1157 if force and f not in umap:
1131 if force and f not in umap:
1158 self.ui.debug("remote deleted %s, clobbering\n" % f)
1132 self.ui.debug("remote deleted %s, clobbering\n" % f)
1159 remove.append(f)
1133 remove.append(f)
1160 elif n == m1.get(f, nullid): # same as parent
1134 elif n == m1.get(f, nullid): # same as parent
1161 if p2 == pa: # going backwards?
1135 if p2 == pa: # going backwards?
1162 self.ui.debug("remote deleted %s\n" % f)
1136 self.ui.debug("remote deleted %s\n" % f)
1163 remove.append(f)
1137 remove.append(f)
1164 else:
1138 else:
1165 self.ui.debug("local modified %s, keeping\n" % f)
1139 self.ui.debug("local modified %s, keeping\n" % f)
1166 else:
1140 else:
1167 self.ui.debug("working dir created %s, keeping\n" % f)
1141 self.ui.debug("working dir created %s, keeping\n" % f)
1168
1142
1169 for f, n in m2.iteritems():
1143 for f, n in m2.iteritems():
1170 if choose and not choose(f): continue
1144 if choose and not choose(f): continue
1171 if f[0] == "/": continue
1145 if f[0] == "/": continue
1172 if f in ma and n != ma[f]:
1146 if f in ma and n != ma[f]:
1173 r = "k"
1147 r = "k"
1174 if not force and (linear_path or allow):
1148 if not force and (linear_path or allow):
1175 r = self.ui.prompt(
1149 r = self.ui.prompt(
1176 ("remote changed %s which local deleted\n" % f) +
1150 ("remote changed %s which local deleted\n" % f) +
1177 "(k)eep or (d)elete?", "[kd]", "k")
1151 "(k)eep or (d)elete?", "[kd]", "k")
1178 if r == "k": get[f] = n
1152 if r == "k": get[f] = n
1179 elif f not in ma:
1153 elif f not in ma:
1180 self.ui.debug("remote created %s\n" % f)
1154 self.ui.debug("remote created %s\n" % f)
1181 get[f] = n
1155 get[f] = n
1182 else:
1156 else:
1183 if force or p2 == pa: # going backwards?
1157 if force or p2 == pa: # going backwards?
1184 self.ui.debug("local deleted %s, recreating\n" % f)
1158 self.ui.debug("local deleted %s, recreating\n" % f)
1185 get[f] = n
1159 get[f] = n
1186 else:
1160 else:
1187 self.ui.debug("local deleted %s\n" % f)
1161 self.ui.debug("local deleted %s\n" % f)
1188
1162
1189 del mw, m1, m2, ma
1163 del mw, m1, m2, ma
1190
1164
1191 if force:
1165 if force:
1192 for f in merge:
1166 for f in merge:
1193 get[f] = merge[f][1]
1167 get[f] = merge[f][1]
1194 merge = {}
1168 merge = {}
1195
1169
1196 if linear_path or force:
1170 if linear_path or force:
1197 # we don't need to do any magic, just jump to the new rev
1171 # we don't need to do any magic, just jump to the new rev
1198 branch_merge = False
1172 branch_merge = False
1199 p1, p2 = p2, nullid
1173 p1, p2 = p2, nullid
1200 else:
1174 else:
1201 if not allow:
1175 if not allow:
1202 self.ui.status("this update spans a branch" +
1176 self.ui.status("this update spans a branch" +
1203 " affecting the following files:\n")
1177 " affecting the following files:\n")
1204 fl = merge.keys() + get.keys()
1178 fl = merge.keys() + get.keys()
1205 fl.sort()
1179 fl.sort()
1206 for f in fl:
1180 for f in fl:
1207 cf = ""
1181 cf = ""
1208 if f in merge: cf = " (resolve)"
1182 if f in merge: cf = " (resolve)"
1209 self.ui.status(" %s%s\n" % (f, cf))
1183 self.ui.status(" %s%s\n" % (f, cf))
1210 self.ui.warn("aborting update spanning branches!\n")
1184 self.ui.warn("aborting update spanning branches!\n")
1211 self.ui.status("(use update -m to merge across branches" +
1185 self.ui.status("(use update -m to merge across branches" +
1212 " or -C to lose changes)\n")
1186 " or -C to lose changes)\n")
1213 return 1
1187 return 1
1214 branch_merge = True
1188 branch_merge = True
1215
1189
1216 if moddirstate:
1190 if moddirstate:
1217 self.dirstate.setparents(p1, p2)
1191 self.dirstate.setparents(p1, p2)
1218
1192
1219 # get the files we don't need to change
1193 # get the files we don't need to change
1220 files = get.keys()
1194 files = get.keys()
1221 files.sort()
1195 files.sort()
1222 for f in files:
1196 for f in files:
1223 if f[0] == "/": continue
1197 if f[0] == "/": continue
1224 self.ui.note("getting %s\n" % f)
1198 self.ui.note("getting %s\n" % f)
1225 t = self.file(f).read(get[f])
1199 t = self.file(f).read(get[f])
1226 try:
1200 try:
1227 self.wwrite(f, t)
1201 self.wwrite(f, t)
1228 except IOError, e:
1202 except IOError, e:
1229 if e.errno != errno.ENOENT:
1203 if e.errno != errno.ENOENT:
1230 raise
1204 raise
1231 os.makedirs(os.path.dirname(self.wjoin(f)))
1205 os.makedirs(os.path.dirname(self.wjoin(f)))
1232 self.wwrite(f, t)
1206 self.wwrite(f, t)
1233 util.set_exec(self.wjoin(f), mf2[f])
1207 util.set_exec(self.wjoin(f), mf2[f])
1234 if moddirstate:
1208 if moddirstate:
1235 if branch_merge:
1209 if branch_merge:
1236 self.dirstate.update([f], 'n', st_mtime=-1)
1210 self.dirstate.update([f], 'n', st_mtime=-1)
1237 else:
1211 else:
1238 self.dirstate.update([f], 'n')
1212 self.dirstate.update([f], 'n')
1239
1213
1240 # merge the tricky bits
1214 # merge the tricky bits
1241 files = merge.keys()
1215 files = merge.keys()
1242 files.sort()
1216 files.sort()
1243 for f in files:
1217 for f in files:
1244 self.ui.status("merging %s\n" % f)
1218 self.ui.status("merging %s\n" % f)
1245 my, other, flag = merge[f]
1219 my, other, flag = merge[f]
1246 self.merge3(f, my, other)
1220 self.merge3(f, my, other)
1247 util.set_exec(self.wjoin(f), flag)
1221 util.set_exec(self.wjoin(f), flag)
1248 if moddirstate:
1222 if moddirstate:
1249 if branch_merge:
1223 if branch_merge:
1250 # We've done a branch merge, mark this file as merged
1224 # We've done a branch merge, mark this file as merged
1251 # so that we properly record the merger later
1225 # so that we properly record the merger later
1252 self.dirstate.update([f], 'm')
1226 self.dirstate.update([f], 'm')
1253 else:
1227 else:
1254 # We've update-merged a locally modified file, so
1228 # We've update-merged a locally modified file, so
1255 # we set the dirstate to emulate a normal checkout
1229 # we set the dirstate to emulate a normal checkout
1256 # of that file some time in the past. Thus our
1230 # of that file some time in the past. Thus our
1257 # merge will appear as a normal local file
1231 # merge will appear as a normal local file
1258 # modification.
1232 # modification.
1259 f_len = len(self.file(f).read(other))
1233 f_len = len(self.file(f).read(other))
1260 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1234 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1261
1235
1262 remove.sort()
1236 remove.sort()
1263 for f in remove:
1237 for f in remove:
1264 self.ui.note("removing %s\n" % f)
1238 self.ui.note("removing %s\n" % f)
1265 try:
1239 try:
1266 os.unlink(self.wjoin(f))
1240 os.unlink(self.wjoin(f))
1267 except OSError, inst:
1241 except OSError, inst:
1268 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1242 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1269 # try removing directories that might now be empty
1243 # try removing directories that might now be empty
1270 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1244 try: os.removedirs(os.path.dirname(self.wjoin(f)))
1271 except: pass
1245 except: pass
1272 if moddirstate:
1246 if moddirstate:
1273 if branch_merge:
1247 if branch_merge:
1274 self.dirstate.update(remove, 'r')
1248 self.dirstate.update(remove, 'r')
1275 else:
1249 else:
1276 self.dirstate.forget(remove)
1250 self.dirstate.forget(remove)
1277
1251
1278 def merge3(self, fn, my, other):
1252 def merge3(self, fn, my, other):
1279 """perform a 3-way merge in the working directory"""
1253 """perform a 3-way merge in the working directory"""
1280
1254
1281 def temp(prefix, node):
1255 def temp(prefix, node):
1282 pre = "%s~%s." % (os.path.basename(fn), prefix)
1256 pre = "%s~%s." % (os.path.basename(fn), prefix)
1283 (fd, name) = tempfile.mkstemp("", pre)
1257 (fd, name) = tempfile.mkstemp("", pre)
1284 f = os.fdopen(fd, "wb")
1258 f = os.fdopen(fd, "wb")
1285 self.wwrite(fn, fl.read(node), f)
1259 self.wwrite(fn, fl.read(node), f)
1286 f.close()
1260 f.close()
1287 return name
1261 return name
1288
1262
1289 fl = self.file(fn)
1263 fl = self.file(fn)
1290 base = fl.ancestor(my, other)
1264 base = fl.ancestor(my, other)
1291 a = self.wjoin(fn)
1265 a = self.wjoin(fn)
1292 b = temp("base", base)
1266 b = temp("base", base)
1293 c = temp("other", other)
1267 c = temp("other", other)
1294
1268
1295 self.ui.note("resolving %s\n" % fn)
1269 self.ui.note("resolving %s\n" % fn)
1296 self.ui.debug("file %s: my %s other %s ancestor %s\n" %
1270 self.ui.debug("file %s: my %s other %s ancestor %s\n" %
1297 (fn, short(my), short(other), short(base)))
1271 (fn, short(my), short(other), short(base)))
1298
1272
1299 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1273 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1300 or "hgmerge")
1274 or "hgmerge")
1301 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1275 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1302 if r:
1276 if r:
1303 self.ui.warn("merging %s failed!\n" % fn)
1277 self.ui.warn("merging %s failed!\n" % fn)
1304
1278
1305 os.unlink(b)
1279 os.unlink(b)
1306 os.unlink(c)
1280 os.unlink(c)
1307
1281
1308 def verify(self):
1282 def verify(self):
1309 filelinkrevs = {}
1283 filelinkrevs = {}
1310 filenodes = {}
1284 filenodes = {}
1311 changesets = revisions = files = 0
1285 changesets = revisions = files = 0
1312 errors = [0]
1286 errors = [0]
1313 neededmanifests = {}
1287 neededmanifests = {}
1314
1288
1315 def err(msg):
1289 def err(msg):
1316 self.ui.warn(msg + "\n")
1290 self.ui.warn(msg + "\n")
1317 errors[0] += 1
1291 errors[0] += 1
1318
1292
1319 seen = {}
1293 seen = {}
1320 self.ui.status("checking changesets\n")
1294 self.ui.status("checking changesets\n")
1321 for i in range(self.changelog.count()):
1295 for i in range(self.changelog.count()):
1322 changesets += 1
1296 changesets += 1
1323 n = self.changelog.node(i)
1297 n = self.changelog.node(i)
1324 l = self.changelog.linkrev(n)
1298 l = self.changelog.linkrev(n)
1325 if l != i:
1299 if l != i:
1326 err("incorrect link (%d) for changeset revision %d" % (l, i))
1300 err("incorrect link (%d) for changeset revision %d" % (l, i))
1327 if n in seen:
1301 if n in seen:
1328 err("duplicate changeset at revision %d" % i)
1302 err("duplicate changeset at revision %d" % i)
1329 seen[n] = 1
1303 seen[n] = 1
1330
1304
1331 for p in self.changelog.parents(n):
1305 for p in self.changelog.parents(n):
1332 if p not in self.changelog.nodemap:
1306 if p not in self.changelog.nodemap:
1333 err("changeset %s has unknown parent %s" %
1307 err("changeset %s has unknown parent %s" %
1334 (short(n), short(p)))
1308 (short(n), short(p)))
1335 try:
1309 try:
1336 changes = self.changelog.read(n)
1310 changes = self.changelog.read(n)
1337 except Exception, inst:
1311 except Exception, inst:
1338 err("unpacking changeset %s: %s" % (short(n), inst))
1312 err("unpacking changeset %s: %s" % (short(n), inst))
1339
1313
1340 neededmanifests[changes[0]] = n
1314 neededmanifests[changes[0]] = n
1341
1315
1342 for f in changes[3]:
1316 for f in changes[3]:
1343 filelinkrevs.setdefault(f, []).append(i)
1317 filelinkrevs.setdefault(f, []).append(i)
1344
1318
1345 seen = {}
1319 seen = {}
1346 self.ui.status("checking manifests\n")
1320 self.ui.status("checking manifests\n")
1347 for i in range(self.manifest.count()):
1321 for i in range(self.manifest.count()):
1348 n = self.manifest.node(i)
1322 n = self.manifest.node(i)
1349 l = self.manifest.linkrev(n)
1323 l = self.manifest.linkrev(n)
1350
1324
1351 if l < 0 or l >= self.changelog.count():
1325 if l < 0 or l >= self.changelog.count():
1352 err("bad manifest link (%d) at revision %d" % (l, i))
1326 err("bad manifest link (%d) at revision %d" % (l, i))
1353
1327
1354 if n in neededmanifests:
1328 if n in neededmanifests:
1355 del neededmanifests[n]
1329 del neededmanifests[n]
1356
1330
1357 if n in seen:
1331 if n in seen:
1358 err("duplicate manifest at revision %d" % i)
1332 err("duplicate manifest at revision %d" % i)
1359
1333
1360 seen[n] = 1
1334 seen[n] = 1
1361
1335
1362 for p in self.manifest.parents(n):
1336 for p in self.manifest.parents(n):
1363 if p not in self.manifest.nodemap:
1337 if p not in self.manifest.nodemap:
1364 err("manifest %s has unknown parent %s" %
1338 err("manifest %s has unknown parent %s" %
1365 (short(n), short(p)))
1339 (short(n), short(p)))
1366
1340
1367 try:
1341 try:
1368 delta = mdiff.patchtext(self.manifest.delta(n))
1342 delta = mdiff.patchtext(self.manifest.delta(n))
1369 except KeyboardInterrupt:
1343 except KeyboardInterrupt:
1370 self.ui.warn("interrupted")
1344 self.ui.warn("interrupted")
1371 raise
1345 raise
1372 except Exception, inst:
1346 except Exception, inst:
1373 err("unpacking manifest %s: %s" % (short(n), inst))
1347 err("unpacking manifest %s: %s" % (short(n), inst))
1374
1348
1375 ff = [ l.split('\0') for l in delta.splitlines() ]
1349 ff = [ l.split('\0') for l in delta.splitlines() ]
1376 for f, fn in ff:
1350 for f, fn in ff:
1377 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1351 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1378
1352
1379 self.ui.status("crosschecking files in changesets and manifests\n")
1353 self.ui.status("crosschecking files in changesets and manifests\n")
1380
1354
1381 for m,c in neededmanifests.items():
1355 for m,c in neededmanifests.items():
1382 err("Changeset %s refers to unknown manifest %s" %
1356 err("Changeset %s refers to unknown manifest %s" %
1383 (short(m), short(c)))
1357 (short(m), short(c)))
1384 del neededmanifests
1358 del neededmanifests
1385
1359
1386 for f in filenodes:
1360 for f in filenodes:
1387 if f not in filelinkrevs:
1361 if f not in filelinkrevs:
1388 err("file %s in manifest but not in changesets" % f)
1362 err("file %s in manifest but not in changesets" % f)
1389
1363
1390 for f in filelinkrevs:
1364 for f in filelinkrevs:
1391 if f not in filenodes:
1365 if f not in filenodes:
1392 err("file %s in changeset but not in manifest" % f)
1366 err("file %s in changeset but not in manifest" % f)
1393
1367
1394 self.ui.status("checking files\n")
1368 self.ui.status("checking files\n")
1395 ff = filenodes.keys()
1369 ff = filenodes.keys()
1396 ff.sort()
1370 ff.sort()
1397 for f in ff:
1371 for f in ff:
1398 if f == "/dev/null": continue
1372 if f == "/dev/null": continue
1399 files += 1
1373 files += 1
1400 fl = self.file(f)
1374 fl = self.file(f)
1401 nodes = { nullid: 1 }
1375 nodes = { nullid: 1 }
1402 seen = {}
1376 seen = {}
1403 for i in range(fl.count()):
1377 for i in range(fl.count()):
1404 revisions += 1
1378 revisions += 1
1405 n = fl.node(i)
1379 n = fl.node(i)
1406
1380
1407 if n in seen:
1381 if n in seen:
1408 err("%s: duplicate revision %d" % (f, i))
1382 err("%s: duplicate revision %d" % (f, i))
1409 if n not in filenodes[f]:
1383 if n not in filenodes[f]:
1410 err("%s: %d:%s not in manifests" % (f, i, short(n)))
1384 err("%s: %d:%s not in manifests" % (f, i, short(n)))
1411 else:
1385 else:
1412 del filenodes[f][n]
1386 del filenodes[f][n]
1413
1387
1414 flr = fl.linkrev(n)
1388 flr = fl.linkrev(n)
1415 if flr not in filelinkrevs[f]:
1389 if flr not in filelinkrevs[f]:
1416 err("%s:%s points to unexpected changeset %d"
1390 err("%s:%s points to unexpected changeset %d"
1417 % (f, short(n), flr))
1391 % (f, short(n), flr))
1418 else:
1392 else:
1419 filelinkrevs[f].remove(flr)
1393 filelinkrevs[f].remove(flr)
1420
1394
1421 # verify contents
1395 # verify contents
1422 try:
1396 try:
1423 t = fl.read(n)
1397 t = fl.read(n)
1424 except Exception, inst:
1398 except Exception, inst:
1425 err("unpacking file %s %s: %s" % (f, short(n), inst))
1399 err("unpacking file %s %s: %s" % (f, short(n), inst))
1426
1400
1427 # verify parents
1401 # verify parents
1428 (p1, p2) = fl.parents(n)
1402 (p1, p2) = fl.parents(n)
1429 if p1 not in nodes:
1403 if p1 not in nodes:
1430 err("file %s:%s unknown parent 1 %s" %
1404 err("file %s:%s unknown parent 1 %s" %
1431 (f, short(n), short(p1)))
1405 (f, short(n), short(p1)))
1432 if p2 not in nodes:
1406 if p2 not in nodes:
1433 err("file %s:%s unknown parent 2 %s" %
1407 err("file %s:%s unknown parent 2 %s" %
1434 (f, short(n), short(p1)))
1408 (f, short(n), short(p1)))
1435 nodes[n] = 1
1409 nodes[n] = 1
1436
1410
1437 # cross-check
1411 # cross-check
1438 for node in filenodes[f]:
1412 for node in filenodes[f]:
1439 err("node %s in manifests not in %s" % (hex(node), f))
1413 err("node %s in manifests not in %s" % (hex(node), f))
1440
1414
1441 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1415 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1442 (files, changesets, revisions))
1416 (files, changesets, revisions))
1443
1417
1444 if errors[0]:
1418 if errors[0]:
1445 self.ui.warn("%d integrity errors encountered!\n" % errors[0])
1419 self.ui.warn("%d integrity errors encountered!\n" % errors[0])
1446 return 1
1420 return 1
@@ -1,674 +1,817 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from demandload import demandload
14 from demandload import demandload
15 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
15 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
16
16
17 def hash(text, p1, p2):
17 def hash(text, p1, p2):
18 """generate a hash from the given text and its parent hashes
18 """generate a hash from the given text and its parent hashes
19
19
20 This hash combines both the current file contents and its history
20 This hash combines both the current file contents and its history
21 in a manner that makes it easy to distinguish nodes with the same
21 in a manner that makes it easy to distinguish nodes with the same
22 content in the revision graph.
22 content in the revision graph.
23 """
23 """
24 l = [p1, p2]
24 l = [p1, p2]
25 l.sort()
25 l.sort()
26 s = sha.new(l[0])
26 s = sha.new(l[0])
27 s.update(l[1])
27 s.update(l[1])
28 s.update(text)
28 s.update(text)
29 return s.digest()
29 return s.digest()
30
30
31 def compress(text):
31 def compress(text):
32 """ generate a possibly-compressed representation of text """
32 """ generate a possibly-compressed representation of text """
33 if not text: return text
33 if not text: return text
34 if len(text) < 44:
34 if len(text) < 44:
35 if text[0] == '\0': return text
35 if text[0] == '\0': return text
36 return 'u' + text
36 return 'u' + text
37 bin = zlib.compress(text)
37 bin = zlib.compress(text)
38 if len(bin) > len(text):
38 if len(bin) > len(text):
39 if text[0] == '\0': return text
39 if text[0] == '\0': return text
40 return 'u' + text
40 return 'u' + text
41 return bin
41 return bin
42
42
43 def decompress(bin):
43 def decompress(bin):
44 """ decompress the given input """
44 """ decompress the given input """
45 if not bin: return bin
45 if not bin: return bin
46 t = bin[0]
46 t = bin[0]
47 if t == '\0': return bin
47 if t == '\0': return bin
48 if t == 'x': return zlib.decompress(bin)
48 if t == 'x': return zlib.decompress(bin)
49 if t == 'u': return bin[1:]
49 if t == 'u': return bin[1:]
50 raise RevlogError("unknown compression type %s" % t)
50 raise RevlogError("unknown compression type %s" % t)
51
51
52 indexformat = ">4l20s20s20s"
52 indexformat = ">4l20s20s20s"
53
53
54 class lazyparser:
54 class lazyparser:
55 """
55 """
56 this class avoids the need to parse the entirety of large indices
56 this class avoids the need to parse the entirety of large indices
57
57
58 By default we parse and load 1000 entries at a time.
58 By default we parse and load 1000 entries at a time.
59
59
60 If no position is specified, we load the whole index, and replace
60 If no position is specified, we load the whole index, and replace
61 the lazy objects in revlog with the underlying objects for
61 the lazy objects in revlog with the underlying objects for
62 efficiency in cases where we look at most of the nodes.
62 efficiency in cases where we look at most of the nodes.
63 """
63 """
64 def __init__(self, data, revlog):
64 def __init__(self, data, revlog):
65 self.data = data
65 self.data = data
66 self.s = struct.calcsize(indexformat)
66 self.s = struct.calcsize(indexformat)
67 self.l = len(data)/self.s
67 self.l = len(data)/self.s
68 self.index = [None] * self.l
68 self.index = [None] * self.l
69 self.map = {nullid: -1}
69 self.map = {nullid: -1}
70 self.all = 0
70 self.all = 0
71 self.revlog = revlog
71 self.revlog = revlog
72
72
73 def load(self, pos=None):
73 def load(self, pos=None):
74 if self.all: return
74 if self.all: return
75 if pos is not None:
75 if pos is not None:
76 block = pos / 1000
76 block = pos / 1000
77 i = block * 1000
77 i = block * 1000
78 end = min(self.l, i + 1000)
78 end = min(self.l, i + 1000)
79 else:
79 else:
80 self.all = 1
80 self.all = 1
81 i = 0
81 i = 0
82 end = self.l
82 end = self.l
83 self.revlog.index = self.index
83 self.revlog.index = self.index
84 self.revlog.nodemap = self.map
84 self.revlog.nodemap = self.map
85
85
86 while i < end:
86 while i < end:
87 d = self.data[i * self.s: (i + 1) * self.s]
87 d = self.data[i * self.s: (i + 1) * self.s]
88 e = struct.unpack(indexformat, d)
88 e = struct.unpack(indexformat, d)
89 self.index[i] = e
89 self.index[i] = e
90 self.map[e[6]] = i
90 self.map[e[6]] = i
91 i += 1
91 i += 1
92
92
93 class lazyindex:
93 class lazyindex:
94 """a lazy version of the index array"""
94 """a lazy version of the index array"""
95 def __init__(self, parser):
95 def __init__(self, parser):
96 self.p = parser
96 self.p = parser
97 def __len__(self):
97 def __len__(self):
98 return len(self.p.index)
98 return len(self.p.index)
99 def load(self, pos):
99 def load(self, pos):
100 self.p.load(pos)
100 self.p.load(pos)
101 return self.p.index[pos]
101 return self.p.index[pos]
102 def __getitem__(self, pos):
102 def __getitem__(self, pos):
103 return self.p.index[pos] or self.load(pos)
103 return self.p.index[pos] or self.load(pos)
104 def append(self, e):
104 def append(self, e):
105 self.p.index.append(e)
105 self.p.index.append(e)
106
106
107 class lazymap:
107 class lazymap:
108 """a lazy version of the node map"""
108 """a lazy version of the node map"""
109 def __init__(self, parser):
109 def __init__(self, parser):
110 self.p = parser
110 self.p = parser
111 def load(self, key):
111 def load(self, key):
112 if self.p.all: return
112 if self.p.all: return
113 n = self.p.data.find(key)
113 n = self.p.data.find(key)
114 if n < 0:
114 if n < 0:
115 raise KeyError(key)
115 raise KeyError(key)
116 pos = n / self.p.s
116 pos = n / self.p.s
117 self.p.load(pos)
117 self.p.load(pos)
118 def __contains__(self, key):
118 def __contains__(self, key):
119 self.p.load()
119 self.p.load()
120 return key in self.p.map
120 return key in self.p.map
121 def __iter__(self):
121 def __iter__(self):
122 yield nullid
122 yield nullid
123 for i in xrange(self.p.l):
123 for i in xrange(self.p.l):
124 try:
124 try:
125 yield self.p.index[i][6]
125 yield self.p.index[i][6]
126 except:
126 except:
127 self.p.load(i)
127 self.p.load(i)
128 yield self.p.index[i][6]
128 yield self.p.index[i][6]
129 def __getitem__(self, key):
129 def __getitem__(self, key):
130 try:
130 try:
131 return self.p.map[key]
131 return self.p.map[key]
132 except KeyError:
132 except KeyError:
133 try:
133 try:
134 self.load(key)
134 self.load(key)
135 return self.p.map[key]
135 return self.p.map[key]
136 except KeyError:
136 except KeyError:
137 raise KeyError("node " + hex(key))
137 raise KeyError("node " + hex(key))
138 def __setitem__(self, key, val):
138 def __setitem__(self, key, val):
139 self.p.map[key] = val
139 self.p.map[key] = val
140
140
141 class RevlogError(Exception): pass
141 class RevlogError(Exception): pass
142
142
143 class revlog:
143 class revlog:
144 """
144 """
145 the underlying revision storage object
145 the underlying revision storage object
146
146
147 A revlog consists of two parts, an index and the revision data.
147 A revlog consists of two parts, an index and the revision data.
148
148
149 The index is a file with a fixed record size containing
149 The index is a file with a fixed record size containing
150 information on each revision, includings its nodeid (hash), the
150 information on each revision, includings its nodeid (hash), the
151 nodeids of its parents, the position and offset of its data within
151 nodeids of its parents, the position and offset of its data within
152 the data file, and the revision it's based on. Finally, each entry
152 the data file, and the revision it's based on. Finally, each entry
153 contains a linkrev entry that can serve as a pointer to external
153 contains a linkrev entry that can serve as a pointer to external
154 data.
154 data.
155
155
156 The revision data itself is a linear collection of data chunks.
156 The revision data itself is a linear collection of data chunks.
157 Each chunk represents a revision and is usually represented as a
157 Each chunk represents a revision and is usually represented as a
158 delta against the previous chunk. To bound lookup time, runs of
158 delta against the previous chunk. To bound lookup time, runs of
159 deltas are limited to about 2 times the length of the original
159 deltas are limited to about 2 times the length of the original
160 version data. This makes retrieval of a version proportional to
160 version data. This makes retrieval of a version proportional to
161 its size, or O(1) relative to the number of revisions.
161 its size, or O(1) relative to the number of revisions.
162
162
163 Both pieces of the revlog are written to in an append-only
163 Both pieces of the revlog are written to in an append-only
164 fashion, which means we never need to rewrite a file to insert or
164 fashion, which means we never need to rewrite a file to insert or
165 remove data, and can use some simple techniques to avoid the need
165 remove data, and can use some simple techniques to avoid the need
166 for locking while reading.
166 for locking while reading.
167 """
167 """
168 def __init__(self, opener, indexfile, datafile):
168 def __init__(self, opener, indexfile, datafile):
169 """
169 """
170 create a revlog object
170 create a revlog object
171
171
172 opener is a function that abstracts the file opening operation
172 opener is a function that abstracts the file opening operation
173 and can be used to implement COW semantics or the like.
173 and can be used to implement COW semantics or the like.
174 """
174 """
175 self.indexfile = indexfile
175 self.indexfile = indexfile
176 self.datafile = datafile
176 self.datafile = datafile
177 self.opener = opener
177 self.opener = opener
178 self.cache = None
178 self.cache = None
179
179
180 try:
180 try:
181 i = self.opener(self.indexfile).read()
181 i = self.opener(self.indexfile).read()
182 except IOError, inst:
182 except IOError, inst:
183 if inst.errno != errno.ENOENT:
183 if inst.errno != errno.ENOENT:
184 raise
184 raise
185 i = ""
185 i = ""
186
186
187 if len(i) > 10000:
187 if len(i) > 10000:
188 # big index, let's parse it on demand
188 # big index, let's parse it on demand
189 parser = lazyparser(i, self)
189 parser = lazyparser(i, self)
190 self.index = lazyindex(parser)
190 self.index = lazyindex(parser)
191 self.nodemap = lazymap(parser)
191 self.nodemap = lazymap(parser)
192 else:
192 else:
193 s = struct.calcsize(indexformat)
193 s = struct.calcsize(indexformat)
194 l = len(i) / s
194 l = len(i) / s
195 self.index = [None] * l
195 self.index = [None] * l
196 m = [None] * l
196 m = [None] * l
197
197
198 n = 0
198 n = 0
199 for f in xrange(0, len(i), s):
199 for f in xrange(0, len(i), s):
200 # offset, size, base, linkrev, p1, p2, nodeid
200 # offset, size, base, linkrev, p1, p2, nodeid
201 e = struct.unpack(indexformat, i[f:f + s])
201 e = struct.unpack(indexformat, i[f:f + s])
202 m[n] = (e[6], n)
202 m[n] = (e[6], n)
203 self.index[n] = e
203 self.index[n] = e
204 n += 1
204 n += 1
205
205
206 self.nodemap = dict(m)
206 self.nodemap = dict(m)
207 self.nodemap[nullid] = -1
207 self.nodemap[nullid] = -1
208
208
209 def tip(self): return self.node(len(self.index) - 1)
209 def tip(self): return self.node(len(self.index) - 1)
210 def count(self): return len(self.index)
210 def count(self): return len(self.index)
211 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
211 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
212 def rev(self, node):
212 def rev(self, node):
213 try:
213 try:
214 return self.nodemap[node]
214 return self.nodemap[node]
215 except KeyError:
215 except KeyError:
216 raise RevlogError('%s: no node %s' % (self.indexfile, hex(node)))
216 raise RevlogError('%s: no node %s' % (self.indexfile, hex(node)))
217 def linkrev(self, node): return self.index[self.rev(node)][3]
217 def linkrev(self, node): return self.index[self.rev(node)][3]
218 def parents(self, node):
218 def parents(self, node):
219 if node == nullid: return (nullid, nullid)
219 if node == nullid: return (nullid, nullid)
220 return self.index[self.rev(node)][4:6]
220 return self.index[self.rev(node)][4:6]
221
221
222 def start(self, rev): return self.index[rev][0]
222 def start(self, rev): return self.index[rev][0]
223 def length(self, rev): return self.index[rev][1]
223 def length(self, rev): return self.index[rev][1]
224 def end(self, rev): return self.start(rev) + self.length(rev)
224 def end(self, rev): return self.start(rev) + self.length(rev)
225 def base(self, rev): return self.index[rev][2]
225 def base(self, rev): return self.index[rev][2]
226
226
227 def reachable(self, rev, stop=None):
227 def reachable(self, rev, stop=None):
228 reachable = {}
228 reachable = {}
229 visit = [rev]
229 visit = [rev]
230 reachable[rev] = 1
230 reachable[rev] = 1
231 if stop:
231 if stop:
232 stopn = self.rev(stop)
232 stopn = self.rev(stop)
233 else:
233 else:
234 stopn = 0
234 stopn = 0
235 while visit:
235 while visit:
236 n = visit.pop(0)
236 n = visit.pop(0)
237 if n == stop:
237 if n == stop:
238 continue
238 continue
239 if n == nullid:
239 if n == nullid:
240 continue
240 continue
241 for p in self.parents(n):
241 for p in self.parents(n):
242 if self.rev(p) < stopn:
242 if self.rev(p) < stopn:
243 continue
243 continue
244 if p not in reachable:
244 if p not in reachable:
245 reachable[p] = 1
245 reachable[p] = 1
246 visit.append(p)
246 visit.append(p)
247 return reachable
247 return reachable
248
248
249 def nodesbetween(self, roots=None, heads=None):
250 """Return a tuple containing three elements. Elements 1 and 2 contain
251 a final list bases and heads after all the unreachable ones have been
252 pruned. Element 0 contains a topologically sorted list of all
253
254 nodes that satisfy these constraints:
255 1. All nodes must be descended from a node in roots (the nodes on
256 roots are considered descended from themselves).
257 2. All nodes must also be ancestors of a node in heads (the nodes in
258 heads are considered to be their own ancestors).
259
260 If roots is unspecified, nullid is assumed as the only root.
261 If heads is unspecified, it is taken to be the output of the
262 heads method (i.e. a list of all nodes in the repository that
263 have no children)."""
264 if roots is not None:
265 roots = list(roots)
266 lowestrev = min([self.rev(n) for n in roots])
267 else:
268 roots = [nullid] # Everybody's a descendent of nullid
269 lowestrev = -1
270 if (lowestrev == -1) and (heads is None):
271 # We want _all_ the nodes!
272 return ([self.node(r) for r in xrange(0, self.count())],
273 [nullid], list(self.heads()))
274 if heads is None:
275 # All nodes are ancestors, so the latest ancestor is the last
276 # node.
277 highestrev = self.count() - 1
278 # Set ancestors to None to signal that every node is an ancestor.
279 ancestors = None
280 # Set heads to an empty dictionary for later discovery of heads
281 heads = {}
282 else:
283 ancestors = {}
284 # Start at the top and keep marking parents until we're done.
285 nodestotag = list(heads)
286 # Turn heads into a dictionary so we can remove 'fake' heads.
287 # Also, later we will be using it to filter out the heads we can't
288 # find from roots.
289 heads = dict.fromkeys(heads, 0)
290 # Remember where the top was so we can use it as a limit later.
291 highestrev = max([self.rev(n) for n in nodestotag])
292 while nodestotag:
293 # grab a node to tag
294 n = nodestotag.pop()
295 # Never tag nullid
296 if n == nullid:
297 continue
298 # A node's revision number represents its place in a
299 # topologically sorted list of nodes.
300 r = self.rev(n)
301 if r >= lowestrev:
302 if n not in ancestors:
303 # If we are possibly a descendent of one of the roots
304 # and we haven't already been marked as an ancestor
305 ancestors[n] = 1 # Mark as ancestor
306 # Add non-nullid parents to list of nodes to tag.
307 nodestotag.extend([p for p in self.parents(n) if
308 p != nullid])
309 elif n in heads: # We've seen it before, is it a fake head?
310 # So it is, real heads should not be the ancestors of
311 # any other heads.
312 heads.pop(n)
313 # Now that we have our set of ancestors, we want to remove any
314 # roots that are not ancestors.
315
316 # If one of the roots was nullid, everything is included anyway.
317 if lowestrev > -1:
318 # But, since we weren't, let's recompute the lowest rev to not
319 # include roots that aren't ancestors.
320
321 # Filter out roots that aren't ancestors of heads
322 roots = [n for n in roots if n in ancestors]
323 # Recompute the lowest revision
324 if roots:
325 lowestrev = min([self.rev(n) for n in roots])
326 else:
327 # No more roots? Return empty list
328 return ([], [], [])
329 else:
330 # We are descending from nullid, and don't need to care about
331 # any other roots.
332 lowestrev = -1
333 roots = [nullid]
334 # Transform our roots list into a 'set' (i.e. a dictionary where the
335 # values don't matter.
336 descendents = dict.fromkeys(roots, 1)
337 # Also, keep the original roots so we can filter out roots that aren't
338 # 'real' roots (i.e. are descended from other roots).
339 roots = descendents.copy()
340 # Our topologically sorted list of output nodes.
341 orderedout = []
342 # Don't start at nullid since we don't want nullid in our output list,
343 # and if nullid shows up in descedents, empty parents will look like
344 # they're descendents.
345 for r in xrange(max(lowestrev, 0), highestrev + 1):
346 n = self.node(r)
347 isdescendent = False
348 if lowestrev == -1: # Everybody is a descendent of nullid
349 isdescendent = True
350 elif n in descendents:
351 # n is already a descendent
352 isdescendent = True
353 # This check only needs to be done here because all the roots
354 # will start being marked is descendents before the loop.
355 if n in roots:
356 # If n was a root, check if it's a 'real' root.
357 p = tuple(self.parents(n))
358 # If any of its parents are descendents, it's not a root.
359 if (p[0] in descendents) or (p[1] in descendents):
360 roots.pop(n)
361 else:
362 p = tuple(self.parents(n))
363 # A node is a descendent if either of its parents are
364 # descendents. (We seeded the dependents list with the roots
365 # up there, remember?)
366 if (p[0] in descendents) or (p[1] in descendents):
367 descendents[n] = 1
368 isdescendent = True
369 if isdescendent and ((ancestors is None) or (n in ancestors)):
370 # Only include nodes that are both descendents and ancestors.
371 orderedout.append(n)
372 if (ancestors is not None) and (n in heads):
373 # We're trying to figure out which heads are reachable
374 # from roots.
375 # Mark this head as having been reached
376 heads[n] = 1
377 elif ancestors is None:
378 # Otherwise, we're trying to discover the heads.
379 # Assume this is a head because if it isn't, the next step
380 # will eventually remove it.
381 heads[n] = 1
382 # But, obviously its parents aren't.
383 for p in self.parents(n):
384 heads.pop(p, None)
385 heads = [n for n in heads.iterkeys() if heads[n] != 0]
386 roots = roots.keys()
387 assert orderedout
388 assert roots
389 assert heads
390 return (orderedout, roots, heads)
391
249 def heads(self, stop=None):
392 def heads(self, stop=None):
250 """return the list of all nodes that have no children"""
393 """return the list of all nodes that have no children"""
251 p = {}
394 p = {}
252 h = []
395 h = []
253 stoprev = 0
396 stoprev = 0
254 if stop and stop in self.nodemap:
397 if stop and stop in self.nodemap:
255 stoprev = self.rev(stop)
398 stoprev = self.rev(stop)
256
399
257 for r in range(self.count() - 1, -1, -1):
400 for r in range(self.count() - 1, -1, -1):
258 n = self.node(r)
401 n = self.node(r)
259 if n not in p:
402 if n not in p:
260 h.append(n)
403 h.append(n)
261 if n == stop:
404 if n == stop:
262 break
405 break
263 if r < stoprev:
406 if r < stoprev:
264 break
407 break
265 for pn in self.parents(n):
408 for pn in self.parents(n):
266 p[pn] = 1
409 p[pn] = 1
267 return h
410 return h
268
411
269 def children(self, node):
412 def children(self, node):
270 """find the children of a given node"""
413 """find the children of a given node"""
271 c = []
414 c = []
272 p = self.rev(node)
415 p = self.rev(node)
273 for r in range(p + 1, self.count()):
416 for r in range(p + 1, self.count()):
274 n = self.node(r)
417 n = self.node(r)
275 for pn in self.parents(n):
418 for pn in self.parents(n):
276 if pn == node:
419 if pn == node:
277 c.append(n)
420 c.append(n)
278 continue
421 continue
279 elif pn == nullid:
422 elif pn == nullid:
280 continue
423 continue
281 return c
424 return c
282
425
283 def lookup(self, id):
426 def lookup(self, id):
284 """locate a node based on revision number or subset of hex nodeid"""
427 """locate a node based on revision number or subset of hex nodeid"""
285 try:
428 try:
286 rev = int(id)
429 rev = int(id)
287 if str(rev) != id: raise ValueError
430 if str(rev) != id: raise ValueError
288 if rev < 0: rev = self.count() + rev
431 if rev < 0: rev = self.count() + rev
289 if rev < 0 or rev >= self.count(): raise ValueError
432 if rev < 0 or rev >= self.count(): raise ValueError
290 return self.node(rev)
433 return self.node(rev)
291 except (ValueError, OverflowError):
434 except (ValueError, OverflowError):
292 c = []
435 c = []
293 for n in self.nodemap:
436 for n in self.nodemap:
294 if hex(n).startswith(id):
437 if hex(n).startswith(id):
295 c.append(n)
438 c.append(n)
296 if len(c) > 1: raise KeyError("Ambiguous identifier")
439 if len(c) > 1: raise KeyError("Ambiguous identifier")
297 if len(c) < 1: raise KeyError("No match found")
440 if len(c) < 1: raise KeyError("No match found")
298 return c[0]
441 return c[0]
299
442
300 return None
443 return None
301
444
302 def diff(self, a, b):
445 def diff(self, a, b):
303 """return a delta between two revisions"""
446 """return a delta between two revisions"""
304 return mdiff.textdiff(a, b)
447 return mdiff.textdiff(a, b)
305
448
306 def patches(self, t, pl):
449 def patches(self, t, pl):
307 """apply a list of patches to a string"""
450 """apply a list of patches to a string"""
308 return mdiff.patches(t, pl)
451 return mdiff.patches(t, pl)
309
452
310 def delta(self, node):
453 def delta(self, node):
311 """return or calculate a delta between a node and its predecessor"""
454 """return or calculate a delta between a node and its predecessor"""
312 r = self.rev(node)
455 r = self.rev(node)
313 b = self.base(r)
456 b = self.base(r)
314 if r == b:
457 if r == b:
315 return self.diff(self.revision(self.node(r - 1)),
458 return self.diff(self.revision(self.node(r - 1)),
316 self.revision(node))
459 self.revision(node))
317 else:
460 else:
318 f = self.opener(self.datafile)
461 f = self.opener(self.datafile)
319 f.seek(self.start(r))
462 f.seek(self.start(r))
320 data = f.read(self.length(r))
463 data = f.read(self.length(r))
321 return decompress(data)
464 return decompress(data)
322
465
323 def revision(self, node):
466 def revision(self, node):
324 """return an uncompressed revision of a given"""
467 """return an uncompressed revision of a given"""
325 if node == nullid: return ""
468 if node == nullid: return ""
326 if self.cache and self.cache[0] == node: return self.cache[2]
469 if self.cache and self.cache[0] == node: return self.cache[2]
327
470
328 # look up what we need to read
471 # look up what we need to read
329 text = None
472 text = None
330 rev = self.rev(node)
473 rev = self.rev(node)
331 start, length, base, link, p1, p2, node = self.index[rev]
474 start, length, base, link, p1, p2, node = self.index[rev]
332 end = start + length
475 end = start + length
333 if base != rev: start = self.start(base)
476 if base != rev: start = self.start(base)
334
477
335 # do we have useful data cached?
478 # do we have useful data cached?
336 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
479 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
337 base = self.cache[1]
480 base = self.cache[1]
338 start = self.start(base + 1)
481 start = self.start(base + 1)
339 text = self.cache[2]
482 text = self.cache[2]
340 last = 0
483 last = 0
341
484
342 f = self.opener(self.datafile)
485 f = self.opener(self.datafile)
343 f.seek(start)
486 f.seek(start)
344 data = f.read(end - start)
487 data = f.read(end - start)
345
488
346 if text is None:
489 if text is None:
347 last = self.length(base)
490 last = self.length(base)
348 text = decompress(data[:last])
491 text = decompress(data[:last])
349
492
350 bins = []
493 bins = []
351 for r in xrange(base + 1, rev + 1):
494 for r in xrange(base + 1, rev + 1):
352 s = self.length(r)
495 s = self.length(r)
353 bins.append(decompress(data[last:last + s]))
496 bins.append(decompress(data[last:last + s]))
354 last = last + s
497 last = last + s
355
498
356 text = mdiff.patches(text, bins)
499 text = mdiff.patches(text, bins)
357
500
358 if node != hash(text, p1, p2):
501 if node != hash(text, p1, p2):
359 raise RevlogError("integrity check failed on %s:%d"
502 raise RevlogError("integrity check failed on %s:%d"
360 % (self.datafile, rev))
503 % (self.datafile, rev))
361
504
362 self.cache = (node, rev, text)
505 self.cache = (node, rev, text)
363 return text
506 return text
364
507
365 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
508 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
366 """add a revision to the log
509 """add a revision to the log
367
510
368 text - the revision data to add
511 text - the revision data to add
369 transaction - the transaction object used for rollback
512 transaction - the transaction object used for rollback
370 link - the linkrev data to add
513 link - the linkrev data to add
371 p1, p2 - the parent nodeids of the revision
514 p1, p2 - the parent nodeids of the revision
372 d - an optional precomputed delta
515 d - an optional precomputed delta
373 """
516 """
374 if text is None: text = ""
517 if text is None: text = ""
375 if p1 is None: p1 = self.tip()
518 if p1 is None: p1 = self.tip()
376 if p2 is None: p2 = nullid
519 if p2 is None: p2 = nullid
377
520
378 node = hash(text, p1, p2)
521 node = hash(text, p1, p2)
379
522
380 if node in self.nodemap:
523 if node in self.nodemap:
381 return node
524 return node
382
525
383 n = self.count()
526 n = self.count()
384 t = n - 1
527 t = n - 1
385
528
386 if n:
529 if n:
387 base = self.base(t)
530 base = self.base(t)
388 start = self.start(base)
531 start = self.start(base)
389 end = self.end(t)
532 end = self.end(t)
390 if not d:
533 if not d:
391 prev = self.revision(self.tip())
534 prev = self.revision(self.tip())
392 d = self.diff(prev, text)
535 d = self.diff(prev, text)
393 data = compress(d)
536 data = compress(d)
394 dist = end - start + len(data)
537 dist = end - start + len(data)
395
538
396 # full versions are inserted when the needed deltas
539 # full versions are inserted when the needed deltas
397 # become comparable to the uncompressed text
540 # become comparable to the uncompressed text
398 if not n or dist > len(text) * 2:
541 if not n or dist > len(text) * 2:
399 data = compress(text)
542 data = compress(text)
400 base = n
543 base = n
401 else:
544 else:
402 base = self.base(t)
545 base = self.base(t)
403
546
404 offset = 0
547 offset = 0
405 if t >= 0:
548 if t >= 0:
406 offset = self.end(t)
549 offset = self.end(t)
407
550
408 e = (offset, len(data), base, link, p1, p2, node)
551 e = (offset, len(data), base, link, p1, p2, node)
409
552
410 self.index.append(e)
553 self.index.append(e)
411 self.nodemap[node] = n
554 self.nodemap[node] = n
412 entry = struct.pack(indexformat, *e)
555 entry = struct.pack(indexformat, *e)
413
556
414 transaction.add(self.datafile, e[0])
557 transaction.add(self.datafile, e[0])
415 self.opener(self.datafile, "a").write(data)
558 self.opener(self.datafile, "a").write(data)
416 transaction.add(self.indexfile, n * len(entry))
559 transaction.add(self.indexfile, n * len(entry))
417 self.opener(self.indexfile, "a").write(entry)
560 self.opener(self.indexfile, "a").write(entry)
418
561
419 self.cache = (node, n, text)
562 self.cache = (node, n, text)
420 return node
563 return node
421
564
422 def ancestor(self, a, b):
565 def ancestor(self, a, b):
423 """calculate the least common ancestor of nodes a and b"""
566 """calculate the least common ancestor of nodes a and b"""
424 # calculate the distance of every node from root
567 # calculate the distance of every node from root
425 dist = {nullid: 0}
568 dist = {nullid: 0}
426 for i in xrange(self.count()):
569 for i in xrange(self.count()):
427 n = self.node(i)
570 n = self.node(i)
428 p1, p2 = self.parents(n)
571 p1, p2 = self.parents(n)
429 dist[n] = max(dist[p1], dist[p2]) + 1
572 dist[n] = max(dist[p1], dist[p2]) + 1
430
573
431 # traverse ancestors in order of decreasing distance from root
574 # traverse ancestors in order of decreasing distance from root
432 def ancestors(node):
575 def ancestors(node):
433 # we store negative distances because heap returns smallest member
576 # we store negative distances because heap returns smallest member
434 h = [(-dist[node], node)]
577 h = [(-dist[node], node)]
435 seen = {}
578 seen = {}
436 earliest = self.count()
579 earliest = self.count()
437 while h:
580 while h:
438 d, n = heapq.heappop(h)
581 d, n = heapq.heappop(h)
439 if n not in seen:
582 if n not in seen:
440 seen[n] = 1
583 seen[n] = 1
441 r = self.rev(n)
584 r = self.rev(n)
442 yield (-d, n)
585 yield (-d, n)
443 for p in self.parents(n):
586 for p in self.parents(n):
444 heapq.heappush(h, (-dist[p], p))
587 heapq.heappush(h, (-dist[p], p))
445
588
446 def generations(node):
589 def generations(node):
447 sg, s = None, {}
590 sg, s = None, {}
448 for g,n in ancestors(node):
591 for g,n in ancestors(node):
449 if g != sg:
592 if g != sg:
450 if sg:
593 if sg:
451 yield sg, s
594 yield sg, s
452 sg, s = g, {n:1}
595 sg, s = g, {n:1}
453 else:
596 else:
454 s[n] = 1
597 s[n] = 1
455 yield sg, s
598 yield sg, s
456
599
457 x = generations(a)
600 x = generations(a)
458 y = generations(b)
601 y = generations(b)
459 gx = x.next()
602 gx = x.next()
460 gy = y.next()
603 gy = y.next()
461
604
462 # increment each ancestor list until it is closer to root than
605 # increment each ancestor list until it is closer to root than
463 # the other, or they match
606 # the other, or they match
464 while 1:
607 while 1:
465 #print "ancestor gen %s %s" % (gx[0], gy[0])
608 #print "ancestor gen %s %s" % (gx[0], gy[0])
466 if gx[0] == gy[0]:
609 if gx[0] == gy[0]:
467 # find the intersection
610 # find the intersection
468 i = [ n for n in gx[1] if n in gy[1] ]
611 i = [ n for n in gx[1] if n in gy[1] ]
469 if i:
612 if i:
470 return i[0]
613 return i[0]
471 else:
614 else:
472 #print "next"
615 #print "next"
473 gy = y.next()
616 gy = y.next()
474 gx = x.next()
617 gx = x.next()
475 elif gx[0] < gy[0]:
618 elif gx[0] < gy[0]:
476 #print "next y"
619 #print "next y"
477 gy = y.next()
620 gy = y.next()
478 else:
621 else:
479 #print "next x"
622 #print "next x"
480 gx = x.next()
623 gx = x.next()
481
624
482 def group(self, linkmap):
625 def group(self, linkmap):
483 """calculate a delta group
626 """calculate a delta group
484
627
485 Given a list of changeset revs, return a set of deltas and
628 Given a list of changeset revs, return a set of deltas and
486 metadata corresponding to nodes. the first delta is
629 metadata corresponding to nodes. the first delta is
487 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
630 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
488 have this parent as it has all history before these
631 have this parent as it has all history before these
489 changesets. parent is parent[0]
632 changesets. parent is parent[0]
490 """
633 """
491 revs = []
634 revs = []
492 needed = {}
635 needed = {}
493
636
494 # find file nodes/revs that match changeset revs
637 # find file nodes/revs that match changeset revs
495 for i in xrange(0, self.count()):
638 for i in xrange(0, self.count()):
496 if self.index[i][3] in linkmap:
639 if self.index[i][3] in linkmap:
497 revs.append(i)
640 revs.append(i)
498 needed[i] = 1
641 needed[i] = 1
499
642
500 # if we don't have any revisions touched by these changesets, bail
643 # if we don't have any revisions touched by these changesets, bail
501 if not revs:
644 if not revs:
502 yield struct.pack(">l", 0)
645 yield struct.pack(">l", 0)
503 return
646 return
504
647
505 # add the parent of the first rev
648 # add the parent of the first rev
506 p = self.parents(self.node(revs[0]))[0]
649 p = self.parents(self.node(revs[0]))[0]
507 revs.insert(0, self.rev(p))
650 revs.insert(0, self.rev(p))
508
651
509 # for each delta that isn't contiguous in the log, we need to
652 # for each delta that isn't contiguous in the log, we need to
510 # reconstruct the base, reconstruct the result, and then
653 # reconstruct the base, reconstruct the result, and then
511 # calculate the delta. We also need to do this where we've
654 # calculate the delta. We also need to do this where we've
512 # stored a full version and not a delta
655 # stored a full version and not a delta
513 for i in xrange(0, len(revs) - 1):
656 for i in xrange(0, len(revs) - 1):
514 a, b = revs[i], revs[i + 1]
657 a, b = revs[i], revs[i + 1]
515 if a + 1 != b or self.base(b) == b:
658 if a + 1 != b or self.base(b) == b:
516 for j in xrange(self.base(a), a + 1):
659 for j in xrange(self.base(a), a + 1):
517 needed[j] = 1
660 needed[j] = 1
518 for j in xrange(self.base(b), b + 1):
661 for j in xrange(self.base(b), b + 1):
519 needed[j] = 1
662 needed[j] = 1
520
663
521 # calculate spans to retrieve from datafile
664 # calculate spans to retrieve from datafile
522 needed = needed.keys()
665 needed = needed.keys()
523 needed.sort()
666 needed.sort()
524 spans = []
667 spans = []
525 oo = -1
668 oo = -1
526 ol = 0
669 ol = 0
527 for n in needed:
670 for n in needed:
528 if n < 0: continue
671 if n < 0: continue
529 o = self.start(n)
672 o = self.start(n)
530 l = self.length(n)
673 l = self.length(n)
531 if oo + ol == o: # can we merge with the previous?
674 if oo + ol == o: # can we merge with the previous?
532 nl = spans[-1][2]
675 nl = spans[-1][2]
533 nl.append((n, l))
676 nl.append((n, l))
534 ol += l
677 ol += l
535 spans[-1] = (oo, ol, nl)
678 spans[-1] = (oo, ol, nl)
536 else:
679 else:
537 oo = o
680 oo = o
538 ol = l
681 ol = l
539 spans.append((oo, ol, [(n, l)]))
682 spans.append((oo, ol, [(n, l)]))
540
683
541 # read spans in, divide up chunks
684 # read spans in, divide up chunks
542 chunks = {}
685 chunks = {}
543 for span in spans:
686 for span in spans:
544 # we reopen the file for each span to make http happy for now
687 # we reopen the file for each span to make http happy for now
545 f = self.opener(self.datafile)
688 f = self.opener(self.datafile)
546 f.seek(span[0])
689 f.seek(span[0])
547 data = f.read(span[1])
690 data = f.read(span[1])
548
691
549 # divide up the span
692 # divide up the span
550 pos = 0
693 pos = 0
551 for r, l in span[2]:
694 for r, l in span[2]:
552 chunks[r] = decompress(data[pos: pos + l])
695 chunks[r] = decompress(data[pos: pos + l])
553 pos += l
696 pos += l
554
697
555 # helper to reconstruct intermediate versions
698 # helper to reconstruct intermediate versions
556 def construct(text, base, rev):
699 def construct(text, base, rev):
557 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
700 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
558 return mdiff.patches(text, bins)
701 return mdiff.patches(text, bins)
559
702
560 # build deltas
703 # build deltas
561 deltas = []
704 deltas = []
562 for d in xrange(0, len(revs) - 1):
705 for d in xrange(0, len(revs) - 1):
563 a, b = revs[d], revs[d + 1]
706 a, b = revs[d], revs[d + 1]
564 n = self.node(b)
707 n = self.node(b)
565
708
566 # do we need to construct a new delta?
709 # do we need to construct a new delta?
567 if a + 1 != b or self.base(b) == b:
710 if a + 1 != b or self.base(b) == b:
568 if a >= 0:
711 if a >= 0:
569 base = self.base(a)
712 base = self.base(a)
570 ta = chunks[self.base(a)]
713 ta = chunks[self.base(a)]
571 ta = construct(ta, base, a)
714 ta = construct(ta, base, a)
572 else:
715 else:
573 ta = ""
716 ta = ""
574
717
575 base = self.base(b)
718 base = self.base(b)
576 if a > base:
719 if a > base:
577 base = a
720 base = a
578 tb = ta
721 tb = ta
579 else:
722 else:
580 tb = chunks[self.base(b)]
723 tb = chunks[self.base(b)]
581 tb = construct(tb, base, b)
724 tb = construct(tb, base, b)
582 d = self.diff(ta, tb)
725 d = self.diff(ta, tb)
583 else:
726 else:
584 d = chunks[b]
727 d = chunks[b]
585
728
586 p = self.parents(n)
729 p = self.parents(n)
587 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
730 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
588 l = struct.pack(">l", len(meta) + len(d) + 4)
731 l = struct.pack(">l", len(meta) + len(d) + 4)
589 yield l
732 yield l
590 yield meta
733 yield meta
591 yield d
734 yield d
592
735
593 yield struct.pack(">l", 0)
736 yield struct.pack(">l", 0)
594
737
595 def addgroup(self, revs, linkmapper, transaction, unique=0):
738 def addgroup(self, revs, linkmapper, transaction, unique=0):
596 """
739 """
597 add a delta group
740 add a delta group
598
741
599 given a set of deltas, add them to the revision log. the
742 given a set of deltas, add them to the revision log. the
600 first delta is against its parent, which should be in our
743 first delta is against its parent, which should be in our
601 log, the rest are against the previous delta.
744 log, the rest are against the previous delta.
602 """
745 """
603
746
604 #track the base of the current delta log
747 #track the base of the current delta log
605 r = self.count()
748 r = self.count()
606 t = r - 1
749 t = r - 1
607 node = nullid
750 node = nullid
608
751
609 base = prev = -1
752 base = prev = -1
610 start = end = measure = 0
753 start = end = measure = 0
611 if r:
754 if r:
612 start = self.start(self.base(t))
755 start = self.start(self.base(t))
613 end = self.end(t)
756 end = self.end(t)
614 measure = self.length(self.base(t))
757 measure = self.length(self.base(t))
615 base = self.base(t)
758 base = self.base(t)
616 prev = self.tip()
759 prev = self.tip()
617
760
618 transaction.add(self.datafile, end)
761 transaction.add(self.datafile, end)
619 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
762 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
620 dfh = self.opener(self.datafile, "a")
763 dfh = self.opener(self.datafile, "a")
621 ifh = self.opener(self.indexfile, "a")
764 ifh = self.opener(self.indexfile, "a")
622
765
623 # loop through our set of deltas
766 # loop through our set of deltas
624 chain = None
767 chain = None
625 for chunk in revs:
768 for chunk in revs:
626 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
769 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
627 link = linkmapper(cs)
770 link = linkmapper(cs)
628 if node in self.nodemap:
771 if node in self.nodemap:
629 # this can happen if two branches make the same change
772 # this can happen if two branches make the same change
630 # if unique:
773 # if unique:
631 # raise RevlogError("already have %s" % hex(node[:4]))
774 # raise RevlogError("already have %s" % hex(node[:4]))
632 chain = node
775 chain = node
633 continue
776 continue
634 delta = chunk[80:]
777 delta = chunk[80:]
635
778
636 if not chain:
779 if not chain:
637 # retrieve the parent revision of the delta chain
780 # retrieve the parent revision of the delta chain
638 chain = p1
781 chain = p1
639 if not chain in self.nodemap:
782 if not chain in self.nodemap:
640 raise RevlogError("unknown base %s" % short(chain[:4]))
783 raise RevlogError("unknown base %s" % short(chain[:4]))
641
784
642 # full versions are inserted when the needed deltas become
785 # full versions are inserted when the needed deltas become
643 # comparable to the uncompressed text or when the previous
786 # comparable to the uncompressed text or when the previous
644 # version is not the one we have a delta against. We use
787 # version is not the one we have a delta against. We use
645 # the size of the previous full rev as a proxy for the
788 # the size of the previous full rev as a proxy for the
646 # current size.
789 # current size.
647
790
648 if chain == prev:
791 if chain == prev:
649 cdelta = compress(delta)
792 cdelta = compress(delta)
650
793
651 if chain != prev or (end - start + len(cdelta)) > measure * 2:
794 if chain != prev or (end - start + len(cdelta)) > measure * 2:
652 # flush our writes here so we can read it in revision
795 # flush our writes here so we can read it in revision
653 dfh.flush()
796 dfh.flush()
654 ifh.flush()
797 ifh.flush()
655 text = self.revision(chain)
798 text = self.revision(chain)
656 text = self.patches(text, [delta])
799 text = self.patches(text, [delta])
657 chk = self.addrevision(text, transaction, link, p1, p2)
800 chk = self.addrevision(text, transaction, link, p1, p2)
658 if chk != node:
801 if chk != node:
659 raise RevlogError("consistency error adding group")
802 raise RevlogError("consistency error adding group")
660 measure = len(text)
803 measure = len(text)
661 else:
804 else:
662 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
805 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
663 self.index.append(e)
806 self.index.append(e)
664 self.nodemap[node] = r
807 self.nodemap[node] = r
665 dfh.write(cdelta)
808 dfh.write(cdelta)
666 ifh.write(struct.pack(indexformat, *e))
809 ifh.write(struct.pack(indexformat, *e))
667
810
668 t, r, chain, prev = r, r + 1, node, node
811 t, r, chain, prev = r, r + 1, node, node
669 start = self.start(self.base(t))
812 start = self.start(self.base(t))
670 end = self.end(t)
813 end = self.end(t)
671
814
672 dfh.close()
815 dfh.close()
673 ifh.close()
816 ifh.close()
674 return node
817 return node
General Comments 0
You need to be logged in to leave comments. Login now