##// END OF EJS Templates
fix warnings from pychecker (unused variables and shadowing)
Benoit Boissinot -
r1749:d457fec7 default
parent child Browse files
Show More
@@ -1,2853 +1,2853 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20
20
21 def filterfiles(filters, files):
21 def filterfiles(filters, files):
22 l = [x for x in files if x in filters]
22 l = [x for x in files if x in filters]
23
23
24 for t in filters:
24 for t in filters:
25 if t and t[-1] != "/":
25 if t and t[-1] != "/":
26 t += "/"
26 t += "/"
27 l += [x for x in files if x.startswith(t)]
27 l += [x for x in files if x.startswith(t)]
28 return l
28 return l
29
29
30 def relpath(repo, args):
30 def relpath(repo, args):
31 cwd = repo.getcwd()
31 cwd = repo.getcwd()
32 if cwd:
32 if cwd:
33 return [util.normpath(os.path.join(cwd, x)) for x in args]
33 return [util.normpath(os.path.join(cwd, x)) for x in args]
34 return args
34 return args
35
35
36 def matchpats(repo, pats=[], opts={}, head=''):
36 def matchpats(repo, pats=[], opts={}, head=''):
37 cwd = repo.getcwd()
37 cwd = repo.getcwd()
38 if not pats and cwd:
38 if not pats and cwd:
39 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
39 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
40 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
40 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
41 cwd = ''
41 cwd = ''
42 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
42 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
43 opts.get('exclude'), head)
43 opts.get('exclude'), head)
44
44
45 def makewalk(repo, pats, opts, node=None, head=''):
45 def makewalk(repo, pats, opts, node=None, head=''):
46 files, matchfn, anypats = matchpats(repo, pats, opts, head)
46 files, matchfn, anypats = matchpats(repo, pats, opts, head)
47 exact = dict(zip(files, files))
47 exact = dict(zip(files, files))
48 def walk():
48 def walk():
49 for src, fn in repo.walk(node=node, files=files, match=matchfn):
49 for src, fn in repo.walk(node=node, files=files, match=matchfn):
50 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
50 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
51 return files, matchfn, walk()
51 return files, matchfn, walk()
52
52
53 def walk(repo, pats, opts, node=None, head=''):
53 def walk(repo, pats, opts, node=None, head=''):
54 files, matchfn, results = makewalk(repo, pats, opts, node, head)
54 files, matchfn, results = makewalk(repo, pats, opts, node, head)
55 for r in results:
55 for r in results:
56 yield r
56 yield r
57
57
58 def walkchangerevs(ui, repo, pats, opts):
58 def walkchangerevs(ui, repo, pats, opts):
59 '''Iterate over files and the revs they changed in.
59 '''Iterate over files and the revs they changed in.
60
60
61 Callers most commonly need to iterate backwards over the history
61 Callers most commonly need to iterate backwards over the history
62 it is interested in. Doing so has awful (quadratic-looking)
62 it is interested in. Doing so has awful (quadratic-looking)
63 performance, so we use iterators in a "windowed" way.
63 performance, so we use iterators in a "windowed" way.
64
64
65 We walk a window of revisions in the desired order. Within the
65 We walk a window of revisions in the desired order. Within the
66 window, we first walk forwards to gather data, then in the desired
66 window, we first walk forwards to gather data, then in the desired
67 order (usually backwards) to display it.
67 order (usually backwards) to display it.
68
68
69 This function returns an (iterator, getchange, matchfn) tuple. The
69 This function returns an (iterator, getchange, matchfn) tuple. The
70 getchange function returns the changelog entry for a numeric
70 getchange function returns the changelog entry for a numeric
71 revision. The iterator yields 3-tuples. They will be of one of
71 revision. The iterator yields 3-tuples. They will be of one of
72 the following forms:
72 the following forms:
73
73
74 "window", incrementing, lastrev: stepping through a window,
74 "window", incrementing, lastrev: stepping through a window,
75 positive if walking forwards through revs, last rev in the
75 positive if walking forwards through revs, last rev in the
76 sequence iterated over - use to reset state for the current window
76 sequence iterated over - use to reset state for the current window
77
77
78 "add", rev, fns: out-of-order traversal of the given file names
78 "add", rev, fns: out-of-order traversal of the given file names
79 fns, which changed during revision rev - use to gather data for
79 fns, which changed during revision rev - use to gather data for
80 possible display
80 possible display
81
81
82 "iter", rev, None: in-order traversal of the revs earlier iterated
82 "iter", rev, None: in-order traversal of the revs earlier iterated
83 over with "add" - use to display data'''
83 over with "add" - use to display data'''
84
84
85 files, matchfn, anypats = matchpats(repo, pats, opts)
85 files, matchfn, anypats = matchpats(repo, pats, opts)
86
86
87 if repo.changelog.count() == 0:
87 if repo.changelog.count() == 0:
88 return [], False, matchfn
88 return [], False, matchfn
89
89
90 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
90 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
91 wanted = {}
91 wanted = {}
92 slowpath = anypats
92 slowpath = anypats
93 window = 300
93 window = 300
94 fncache = {}
94 fncache = {}
95
95
96 chcache = {}
96 chcache = {}
97 def getchange(rev):
97 def getchange(rev):
98 ch = chcache.get(rev)
98 ch = chcache.get(rev)
99 if ch is None:
99 if ch is None:
100 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
100 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
101 return ch
101 return ch
102
102
103 if not slowpath and not files:
103 if not slowpath and not files:
104 # No files, no patterns. Display all revs.
104 # No files, no patterns. Display all revs.
105 wanted = dict(zip(revs, revs))
105 wanted = dict(zip(revs, revs))
106 if not slowpath:
106 if not slowpath:
107 # Only files, no patterns. Check the history of each file.
107 # Only files, no patterns. Check the history of each file.
108 def filerevgen(filelog):
108 def filerevgen(filelog):
109 for i in xrange(filelog.count() - 1, -1, -window):
109 for i in xrange(filelog.count() - 1, -1, -window):
110 revs = []
110 revs = []
111 for j in xrange(max(0, i - window), i + 1):
111 for j in xrange(max(0, i - window), i + 1):
112 revs.append(filelog.linkrev(filelog.node(j)))
112 revs.append(filelog.linkrev(filelog.node(j)))
113 revs.reverse()
113 revs.reverse()
114 for rev in revs:
114 for rev in revs:
115 yield rev
115 yield rev
116
116
117 minrev, maxrev = min(revs), max(revs)
117 minrev, maxrev = min(revs), max(revs)
118 for file in files:
118 for file_ in files:
119 filelog = repo.file(file)
119 filelog = repo.file(file_)
120 # A zero count may be a directory or deleted file, so
120 # A zero count may be a directory or deleted file, so
121 # try to find matching entries on the slow path.
121 # try to find matching entries on the slow path.
122 if filelog.count() == 0:
122 if filelog.count() == 0:
123 slowpath = True
123 slowpath = True
124 break
124 break
125 for rev in filerevgen(filelog):
125 for rev in filerevgen(filelog):
126 if rev <= maxrev:
126 if rev <= maxrev:
127 if rev < minrev:
127 if rev < minrev:
128 break
128 break
129 fncache.setdefault(rev, [])
129 fncache.setdefault(rev, [])
130 fncache[rev].append(file)
130 fncache[rev].append(file_)
131 wanted[rev] = 1
131 wanted[rev] = 1
132 if slowpath:
132 if slowpath:
133 # The slow path checks files modified in every changeset.
133 # The slow path checks files modified in every changeset.
134 def changerevgen():
134 def changerevgen():
135 for i in xrange(repo.changelog.count() - 1, -1, -window):
135 for i in xrange(repo.changelog.count() - 1, -1, -window):
136 for j in xrange(max(0, i - window), i + 1):
136 for j in xrange(max(0, i - window), i + 1):
137 yield j, getchange(j)[3]
137 yield j, getchange(j)[3]
138
138
139 for rev, changefiles in changerevgen():
139 for rev, changefiles in changerevgen():
140 matches = filter(matchfn, changefiles)
140 matches = filter(matchfn, changefiles)
141 if matches:
141 if matches:
142 fncache[rev] = matches
142 fncache[rev] = matches
143 wanted[rev] = 1
143 wanted[rev] = 1
144
144
145 def iterate():
145 def iterate():
146 for i in xrange(0, len(revs), window):
146 for i in xrange(0, len(revs), window):
147 yield 'window', revs[0] < revs[-1], revs[-1]
147 yield 'window', revs[0] < revs[-1], revs[-1]
148 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
148 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
149 if rev in wanted]
149 if rev in wanted]
150 srevs = list(nrevs)
150 srevs = list(nrevs)
151 srevs.sort()
151 srevs.sort()
152 for rev in srevs:
152 for rev in srevs:
153 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
153 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
154 yield 'add', rev, fns
154 yield 'add', rev, fns
155 for rev in nrevs:
155 for rev in nrevs:
156 yield 'iter', rev, None
156 yield 'iter', rev, None
157 return iterate(), getchange, matchfn
157 return iterate(), getchange, matchfn
158
158
159 revrangesep = ':'
159 revrangesep = ':'
160
160
161 def revrange(ui, repo, revs, revlog=None):
161 def revrange(ui, repo, revs, revlog=None):
162 """Yield revision as strings from a list of revision specifications."""
162 """Yield revision as strings from a list of revision specifications."""
163 if revlog is None:
163 if revlog is None:
164 revlog = repo.changelog
164 revlog = repo.changelog
165 revcount = revlog.count()
165 revcount = revlog.count()
166 def fix(val, defval):
166 def fix(val, defval):
167 if not val:
167 if not val:
168 return defval
168 return defval
169 try:
169 try:
170 num = int(val)
170 num = int(val)
171 if str(num) != val:
171 if str(num) != val:
172 raise ValueError
172 raise ValueError
173 if num < 0:
173 if num < 0:
174 num += revcount
174 num += revcount
175 if num < 0:
175 if num < 0:
176 num = 0
176 num = 0
177 elif num >= revcount:
177 elif num >= revcount:
178 raise ValueError
178 raise ValueError
179 except ValueError:
179 except ValueError:
180 try:
180 try:
181 num = repo.changelog.rev(repo.lookup(val))
181 num = repo.changelog.rev(repo.lookup(val))
182 except KeyError:
182 except KeyError:
183 try:
183 try:
184 num = revlog.rev(revlog.lookup(val))
184 num = revlog.rev(revlog.lookup(val))
185 except KeyError:
185 except KeyError:
186 raise util.Abort(_('invalid revision identifier %s'), val)
186 raise util.Abort(_('invalid revision identifier %s'), val)
187 return num
187 return num
188 seen = {}
188 seen = {}
189 for spec in revs:
189 for spec in revs:
190 if spec.find(revrangesep) >= 0:
190 if spec.find(revrangesep) >= 0:
191 start, end = spec.split(revrangesep, 1)
191 start, end = spec.split(revrangesep, 1)
192 start = fix(start, 0)
192 start = fix(start, 0)
193 end = fix(end, revcount - 1)
193 end = fix(end, revcount - 1)
194 step = start > end and -1 or 1
194 step = start > end and -1 or 1
195 for rev in xrange(start, end+step, step):
195 for rev in xrange(start, end+step, step):
196 if rev in seen:
196 if rev in seen:
197 continue
197 continue
198 seen[rev] = 1
198 seen[rev] = 1
199 yield str(rev)
199 yield str(rev)
200 else:
200 else:
201 rev = fix(spec, None)
201 rev = fix(spec, None)
202 if rev in seen:
202 if rev in seen:
203 continue
203 continue
204 seen[rev] = 1
204 seen[rev] = 1
205 yield str(rev)
205 yield str(rev)
206
206
207 def make_filename(repo, r, pat, node=None,
207 def make_filename(repo, r, pat, node=None,
208 total=None, seqno=None, revwidth=None, pathname=None):
208 total=None, seqno=None, revwidth=None, pathname=None):
209 node_expander = {
209 node_expander = {
210 'H': lambda: hex(node),
210 'H': lambda: hex(node),
211 'R': lambda: str(r.rev(node)),
211 'R': lambda: str(r.rev(node)),
212 'h': lambda: short(node),
212 'h': lambda: short(node),
213 }
213 }
214 expander = {
214 expander = {
215 '%': lambda: '%',
215 '%': lambda: '%',
216 'b': lambda: os.path.basename(repo.root),
216 'b': lambda: os.path.basename(repo.root),
217 }
217 }
218
218
219 try:
219 try:
220 if node:
220 if node:
221 expander.update(node_expander)
221 expander.update(node_expander)
222 if node and revwidth is not None:
222 if node and revwidth is not None:
223 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
223 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
224 if total is not None:
224 if total is not None:
225 expander['N'] = lambda: str(total)
225 expander['N'] = lambda: str(total)
226 if seqno is not None:
226 if seqno is not None:
227 expander['n'] = lambda: str(seqno)
227 expander['n'] = lambda: str(seqno)
228 if total is not None and seqno is not None:
228 if total is not None and seqno is not None:
229 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
229 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
230 if pathname is not None:
230 if pathname is not None:
231 expander['s'] = lambda: os.path.basename(pathname)
231 expander['s'] = lambda: os.path.basename(pathname)
232 expander['d'] = lambda: os.path.dirname(pathname) or '.'
232 expander['d'] = lambda: os.path.dirname(pathname) or '.'
233 expander['p'] = lambda: pathname
233 expander['p'] = lambda: pathname
234
234
235 newname = []
235 newname = []
236 patlen = len(pat)
236 patlen = len(pat)
237 i = 0
237 i = 0
238 while i < patlen:
238 while i < patlen:
239 c = pat[i]
239 c = pat[i]
240 if c == '%':
240 if c == '%':
241 i += 1
241 i += 1
242 c = pat[i]
242 c = pat[i]
243 c = expander[c]()
243 c = expander[c]()
244 newname.append(c)
244 newname.append(c)
245 i += 1
245 i += 1
246 return ''.join(newname)
246 return ''.join(newname)
247 except KeyError, inst:
247 except KeyError, inst:
248 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
248 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
249 inst.args[0])
249 inst.args[0])
250
250
251 def make_file(repo, r, pat, node=None,
251 def make_file(repo, r, pat, node=None,
252 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
252 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
253 if not pat or pat == '-':
253 if not pat or pat == '-':
254 return 'w' in mode and sys.stdout or sys.stdin
254 return 'w' in mode and sys.stdout or sys.stdin
255 if hasattr(pat, 'write') and 'w' in mode:
255 if hasattr(pat, 'write') and 'w' in mode:
256 return pat
256 return pat
257 if hasattr(pat, 'read') and 'r' in mode:
257 if hasattr(pat, 'read') and 'r' in mode:
258 return pat
258 return pat
259 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
259 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
260 pathname),
260 pathname),
261 mode)
261 mode)
262
262
263 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
263 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
264 changes=None, text=False, opts={}):
264 changes=None, text=False, opts={}):
265 if not changes:
265 if not changes:
266 changes = repo.changes(node1, node2, files, match=match)
266 changes = repo.changes(node1, node2, files, match=match)
267 modified, added, removed, deleted, unknown = changes
267 modified, added, removed, deleted, unknown = changes
268 if files:
268 if files:
269 modified, added, removed = map(lambda x: filterfiles(files, x),
269 modified, added, removed = map(lambda x: filterfiles(files, x),
270 (modified, added, removed))
270 (modified, added, removed))
271
271
272 if not modified and not added and not removed:
272 if not modified and not added and not removed:
273 return
273 return
274
274
275 if node2:
275 if node2:
276 change = repo.changelog.read(node2)
276 change = repo.changelog.read(node2)
277 mmap2 = repo.manifest.read(change[0])
277 mmap2 = repo.manifest.read(change[0])
278 date2 = util.datestr(change[2])
278 date2 = util.datestr(change[2])
279 def read(f):
279 def read(f):
280 return repo.file(f).read(mmap2[f])
280 return repo.file(f).read(mmap2[f])
281 else:
281 else:
282 date2 = util.datestr()
282 date2 = util.datestr()
283 if not node1:
283 if not node1:
284 node1 = repo.dirstate.parents()[0]
284 node1 = repo.dirstate.parents()[0]
285 def read(f):
285 def read(f):
286 return repo.wread(f)
286 return repo.wread(f)
287
287
288 if ui.quiet:
288 if ui.quiet:
289 r = None
289 r = None
290 else:
290 else:
291 hexfunc = ui.verbose and hex or short
291 hexfunc = ui.verbose and hex or short
292 r = [hexfunc(node) for node in [node1, node2] if node]
292 r = [hexfunc(node) for node in [node1, node2] if node]
293
293
294 change = repo.changelog.read(node1)
294 change = repo.changelog.read(node1)
295 mmap = repo.manifest.read(change[0])
295 mmap = repo.manifest.read(change[0])
296 date1 = util.datestr(change[2])
296 date1 = util.datestr(change[2])
297
297
298 diffopts = ui.diffopts()
298 diffopts = ui.diffopts()
299 showfunc = opts.get('show_function') or diffopts['showfunc']
299 showfunc = opts.get('show_function') or diffopts['showfunc']
300 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
300 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
301 for f in modified:
301 for f in modified:
302 to = None
302 to = None
303 if f in mmap:
303 if f in mmap:
304 to = repo.file(f).read(mmap[f])
304 to = repo.file(f).read(mmap[f])
305 tn = read(f)
305 tn = read(f)
306 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
306 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
307 showfunc=showfunc, ignorews=ignorews))
307 showfunc=showfunc, ignorews=ignorews))
308 for f in added:
308 for f in added:
309 to = None
309 to = None
310 tn = read(f)
310 tn = read(f)
311 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
311 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
312 showfunc=showfunc, ignorews=ignorews))
312 showfunc=showfunc, ignorews=ignorews))
313 for f in removed:
313 for f in removed:
314 to = repo.file(f).read(mmap[f])
314 to = repo.file(f).read(mmap[f])
315 tn = None
315 tn = None
316 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
316 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
317 showfunc=showfunc, ignorews=ignorews))
317 showfunc=showfunc, ignorews=ignorews))
318
318
319 def trimuser(ui, name, rev, revcache):
319 def trimuser(ui, name, rev, revcache):
320 """trim the name of the user who committed a change"""
320 """trim the name of the user who committed a change"""
321 user = revcache.get(rev)
321 user = revcache.get(rev)
322 if user is None:
322 if user is None:
323 user = revcache[rev] = ui.shortuser(name)
323 user = revcache[rev] = ui.shortuser(name)
324 return user
324 return user
325
325
326 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
326 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
327 """show a single changeset or file revision"""
327 """show a single changeset or file revision"""
328 log = repo.changelog
328 log = repo.changelog
329 if changenode is None:
329 if changenode is None:
330 changenode = log.node(rev)
330 changenode = log.node(rev)
331 elif not rev:
331 elif not rev:
332 rev = log.rev(changenode)
332 rev = log.rev(changenode)
333
333
334 if ui.quiet:
334 if ui.quiet:
335 ui.write("%d:%s\n" % (rev, short(changenode)))
335 ui.write("%d:%s\n" % (rev, short(changenode)))
336 return
336 return
337
337
338 changes = log.read(changenode)
338 changes = log.read(changenode)
339 date = util.datestr(changes[2])
339 date = util.datestr(changes[2])
340
340
341 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
341 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
342 for p in log.parents(changenode)
342 for p in log.parents(changenode)
343 if ui.debugflag or p != nullid]
343 if ui.debugflag or p != nullid]
344 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
344 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
345 parents = []
345 parents = []
346
346
347 if ui.verbose:
347 if ui.verbose:
348 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
348 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
349 else:
349 else:
350 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
350 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
351
351
352 for tag in repo.nodetags(changenode):
352 for tag in repo.nodetags(changenode):
353 ui.status(_("tag: %s\n") % tag)
353 ui.status(_("tag: %s\n") % tag)
354 for parent in parents:
354 for parent in parents:
355 ui.write(_("parent: %d:%s\n") % parent)
355 ui.write(_("parent: %d:%s\n") % parent)
356
356
357 if brinfo and changenode in brinfo:
357 if brinfo and changenode in brinfo:
358 br = brinfo[changenode]
358 br = brinfo[changenode]
359 ui.write(_("branch: %s\n") % " ".join(br))
359 ui.write(_("branch: %s\n") % " ".join(br))
360
360
361 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
361 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
362 hex(changes[0])))
362 hex(changes[0])))
363 ui.status(_("user: %s\n") % changes[1])
363 ui.status(_("user: %s\n") % changes[1])
364 ui.status(_("date: %s\n") % date)
364 ui.status(_("date: %s\n") % date)
365
365
366 if ui.debugflag:
366 if ui.debugflag:
367 files = repo.changes(log.parents(changenode)[0], changenode)
367 files = repo.changes(log.parents(changenode)[0], changenode)
368 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
368 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
369 if value:
369 if value:
370 ui.note("%-12s %s\n" % (key, " ".join(value)))
370 ui.note("%-12s %s\n" % (key, " ".join(value)))
371 else:
371 else:
372 ui.note(_("files: %s\n") % " ".join(changes[3]))
372 ui.note(_("files: %s\n") % " ".join(changes[3]))
373
373
374 description = changes[4].strip()
374 description = changes[4].strip()
375 if description:
375 if description:
376 if ui.verbose:
376 if ui.verbose:
377 ui.status(_("description:\n"))
377 ui.status(_("description:\n"))
378 ui.status(description)
378 ui.status(description)
379 ui.status("\n\n")
379 ui.status("\n\n")
380 else:
380 else:
381 ui.status(_("summary: %s\n") % description.splitlines()[0])
381 ui.status(_("summary: %s\n") % description.splitlines()[0])
382 ui.status("\n")
382 ui.status("\n")
383
383
384 def show_version(ui):
384 def show_version(ui):
385 """output version and copyright information"""
385 """output version and copyright information"""
386 ui.write(_("Mercurial Distributed SCM (version %s)\n")
386 ui.write(_("Mercurial Distributed SCM (version %s)\n")
387 % version.get_version())
387 % version.get_version())
388 ui.status(_(
388 ui.status(_(
389 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
389 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
390 "This is free software; see the source for copying conditions. "
390 "This is free software; see the source for copying conditions. "
391 "There is NO\nwarranty; "
391 "There is NO\nwarranty; "
392 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
392 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
393 ))
393 ))
394
394
395 def help_(ui, cmd=None, with_version=False):
395 def help_(ui, cmd=None, with_version=False):
396 """show help for a given command or all commands"""
396 """show help for a given command or all commands"""
397 option_lists = []
397 option_lists = []
398 if cmd and cmd != 'shortlist':
398 if cmd and cmd != 'shortlist':
399 if with_version:
399 if with_version:
400 show_version(ui)
400 show_version(ui)
401 ui.write('\n')
401 ui.write('\n')
402 aliases, i = find(cmd)
402 aliases, i = find(cmd)
403 # synopsis
403 # synopsis
404 ui.write("%s\n\n" % i[2])
404 ui.write("%s\n\n" % i[2])
405
405
406 # description
406 # description
407 doc = i[0].__doc__
407 doc = i[0].__doc__
408 if not doc:
408 if not doc:
409 doc = _("(No help text available)")
409 doc = _("(No help text available)")
410 if ui.quiet:
410 if ui.quiet:
411 doc = doc.splitlines(0)[0]
411 doc = doc.splitlines(0)[0]
412 ui.write("%s\n" % doc.rstrip())
412 ui.write("%s\n" % doc.rstrip())
413
413
414 if not ui.quiet:
414 if not ui.quiet:
415 # aliases
415 # aliases
416 if len(aliases) > 1:
416 if len(aliases) > 1:
417 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
417 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
418
418
419 # options
419 # options
420 if i[1]:
420 if i[1]:
421 option_lists.append(("options", i[1]))
421 option_lists.append(("options", i[1]))
422
422
423 else:
423 else:
424 # program name
424 # program name
425 if ui.verbose or with_version:
425 if ui.verbose or with_version:
426 show_version(ui)
426 show_version(ui)
427 else:
427 else:
428 ui.status(_("Mercurial Distributed SCM\n"))
428 ui.status(_("Mercurial Distributed SCM\n"))
429 ui.status('\n')
429 ui.status('\n')
430
430
431 # list of commands
431 # list of commands
432 if cmd == "shortlist":
432 if cmd == "shortlist":
433 ui.status(_('basic commands (use "hg help" '
433 ui.status(_('basic commands (use "hg help" '
434 'for the full list or option "-v" for details):\n\n'))
434 'for the full list or option "-v" for details):\n\n'))
435 elif ui.verbose:
435 elif ui.verbose:
436 ui.status(_('list of commands:\n\n'))
436 ui.status(_('list of commands:\n\n'))
437 else:
437 else:
438 ui.status(_('list of commands (use "hg help -v" '
438 ui.status(_('list of commands (use "hg help -v" '
439 'to show aliases and global options):\n\n'))
439 'to show aliases and global options):\n\n'))
440
440
441 h = {}
441 h = {}
442 cmds = {}
442 cmds = {}
443 for c, e in table.items():
443 for c, e in table.items():
444 f = c.split("|")[0]
444 f = c.split("|")[0]
445 if cmd == "shortlist" and not f.startswith("^"):
445 if cmd == "shortlist" and not f.startswith("^"):
446 continue
446 continue
447 f = f.lstrip("^")
447 f = f.lstrip("^")
448 if not ui.debugflag and f.startswith("debug"):
448 if not ui.debugflag and f.startswith("debug"):
449 continue
449 continue
450 d = ""
451 doc = e[0].__doc__
450 doc = e[0].__doc__
452 if not doc:
451 if not doc:
453 doc = _("(No help text available)")
452 doc = _("(No help text available)")
454 h[f] = doc.splitlines(0)[0].rstrip()
453 h[f] = doc.splitlines(0)[0].rstrip()
455 cmds[f] = c.lstrip("^")
454 cmds[f] = c.lstrip("^")
456
455
457 fns = h.keys()
456 fns = h.keys()
458 fns.sort()
457 fns.sort()
459 m = max(map(len, fns))
458 m = max(map(len, fns))
460 for f in fns:
459 for f in fns:
461 if ui.verbose:
460 if ui.verbose:
462 commands = cmds[f].replace("|",", ")
461 commands = cmds[f].replace("|",", ")
463 ui.write(" %s:\n %s\n"%(commands, h[f]))
462 ui.write(" %s:\n %s\n"%(commands, h[f]))
464 else:
463 else:
465 ui.write(' %-*s %s\n' % (m, f, h[f]))
464 ui.write(' %-*s %s\n' % (m, f, h[f]))
466
465
467 # global options
466 # global options
468 if ui.verbose:
467 if ui.verbose:
469 option_lists.append(("global options", globalopts))
468 option_lists.append(("global options", globalopts))
470
469
471 # list all option lists
470 # list all option lists
472 opt_output = []
471 opt_output = []
473 for title, options in option_lists:
472 for title, options in option_lists:
474 opt_output.append(("\n%s:\n" % title, None))
473 opt_output.append(("\n%s:\n" % title, None))
475 for shortopt, longopt, default, desc in options:
474 for shortopt, longopt, default, desc in options:
476 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
475 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
477 longopt and " --%s" % longopt),
476 longopt and " --%s" % longopt),
478 "%s%s" % (desc,
477 "%s%s" % (desc,
479 default
478 default
480 and _(" (default: %s)") % default
479 and _(" (default: %s)") % default
481 or "")))
480 or "")))
482
481
483 if opt_output:
482 if opt_output:
484 opts_len = max([len(line[0]) for line in opt_output if line[1]])
483 opts_len = max([len(line[0]) for line in opt_output if line[1]])
485 for first, second in opt_output:
484 for first, second in opt_output:
486 if second:
485 if second:
487 ui.write(" %-*s %s\n" % (opts_len, first, second))
486 ui.write(" %-*s %s\n" % (opts_len, first, second))
488 else:
487 else:
489 ui.write("%s\n" % first)
488 ui.write("%s\n" % first)
490
489
491 # Commands start here, listed alphabetically
490 # Commands start here, listed alphabetically
492
491
493 def add(ui, repo, *pats, **opts):
492 def add(ui, repo, *pats, **opts):
494 """add the specified files on the next commit
493 """add the specified files on the next commit
495
494
496 Schedule files to be version controlled and added to the repository.
495 Schedule files to be version controlled and added to the repository.
497
496
498 The files will be added to the repository at the next commit.
497 The files will be added to the repository at the next commit.
499
498
500 If no names are given, add all files in the repository.
499 If no names are given, add all files in the repository.
501 """
500 """
502
501
503 names = []
502 names = []
504 for src, abs, rel, exact in walk(repo, pats, opts):
503 for src, abs, rel, exact in walk(repo, pats, opts):
505 if exact:
504 if exact:
506 if ui.verbose:
505 if ui.verbose:
507 ui.status(_('adding %s\n') % rel)
506 ui.status(_('adding %s\n') % rel)
508 names.append(abs)
507 names.append(abs)
509 elif repo.dirstate.state(abs) == '?':
508 elif repo.dirstate.state(abs) == '?':
510 ui.status(_('adding %s\n') % rel)
509 ui.status(_('adding %s\n') % rel)
511 names.append(abs)
510 names.append(abs)
512 repo.add(names)
511 repo.add(names)
513
512
514 def addremove(ui, repo, *pats, **opts):
513 def addremove(ui, repo, *pats, **opts):
515 """add all new files, delete all missing files
514 """add all new files, delete all missing files
516
515
517 Add all new files and remove all missing files from the repository.
516 Add all new files and remove all missing files from the repository.
518
517
519 New files are ignored if they match any of the patterns in .hgignore. As
518 New files are ignored if they match any of the patterns in .hgignore. As
520 with add, these changes take effect at the next commit.
519 with add, these changes take effect at the next commit.
521 """
520 """
522 return addremove_lock(ui, repo, pats, opts)
521 return addremove_lock(ui, repo, pats, opts)
523
522
524 def addremove_lock(ui, repo, pats, opts, wlock=None):
523 def addremove_lock(ui, repo, pats, opts, wlock=None):
525 add, remove = [], []
524 add, remove = [], []
526 for src, abs, rel, exact in walk(repo, pats, opts):
525 for src, abs, rel, exact in walk(repo, pats, opts):
527 if src == 'f' and repo.dirstate.state(abs) == '?':
526 if src == 'f' and repo.dirstate.state(abs) == '?':
528 add.append(abs)
527 add.append(abs)
529 if ui.verbose or not exact:
528 if ui.verbose or not exact:
530 ui.status(_('adding %s\n') % ((pats and rel) or abs))
529 ui.status(_('adding %s\n') % ((pats and rel) or abs))
531 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
530 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
532 remove.append(abs)
531 remove.append(abs)
533 if ui.verbose or not exact:
532 if ui.verbose or not exact:
534 ui.status(_('removing %s\n') % ((pats and rel) or abs))
533 ui.status(_('removing %s\n') % ((pats and rel) or abs))
535 repo.add(add, wlock=wlock)
534 repo.add(add, wlock=wlock)
536 repo.remove(remove, wlock=wlock)
535 repo.remove(remove, wlock=wlock)
537
536
538 def annotate(ui, repo, *pats, **opts):
537 def annotate(ui, repo, *pats, **opts):
539 """show changeset information per file line
538 """show changeset information per file line
540
539
541 List changes in files, showing the revision id responsible for each line
540 List changes in files, showing the revision id responsible for each line
542
541
543 This command is useful to discover who did a change or when a change took
542 This command is useful to discover who did a change or when a change took
544 place.
543 place.
545
544
546 Without the -a option, annotate will avoid processing files it
545 Without the -a option, annotate will avoid processing files it
547 detects as binary. With -a, annotate will generate an annotation
546 detects as binary. With -a, annotate will generate an annotation
548 anyway, probably with undesirable results.
547 anyway, probably with undesirable results.
549 """
548 """
550 def getnode(rev):
549 def getnode(rev):
551 return short(repo.changelog.node(rev))
550 return short(repo.changelog.node(rev))
552
551
553 ucache = {}
552 ucache = {}
554 def getname(rev):
553 def getname(rev):
555 cl = repo.changelog.read(repo.changelog.node(rev))
554 cl = repo.changelog.read(repo.changelog.node(rev))
556 return trimuser(ui, cl[1], rev, ucache)
555 return trimuser(ui, cl[1], rev, ucache)
557
556
558 dcache = {}
557 dcache = {}
559 def getdate(rev):
558 def getdate(rev):
560 datestr = dcache.get(rev)
559 datestr = dcache.get(rev)
561 if datestr is None:
560 if datestr is None:
562 cl = repo.changelog.read(repo.changelog.node(rev))
561 cl = repo.changelog.read(repo.changelog.node(rev))
563 datestr = dcache[rev] = util.datestr(cl[2])
562 datestr = dcache[rev] = util.datestr(cl[2])
564 return datestr
563 return datestr
565
564
566 if not pats:
565 if not pats:
567 raise util.Abort(_('at least one file name or pattern required'))
566 raise util.Abort(_('at least one file name or pattern required'))
568
567
569 opmap = [['user', getname], ['number', str], ['changeset', getnode],
568 opmap = [['user', getname], ['number', str], ['changeset', getnode],
570 ['date', getdate]]
569 ['date', getdate]]
571 if not opts['user'] and not opts['changeset'] and not opts['date']:
570 if not opts['user'] and not opts['changeset'] and not opts['date']:
572 opts['number'] = 1
571 opts['number'] = 1
573
572
574 if opts['rev']:
573 if opts['rev']:
575 node = repo.changelog.lookup(opts['rev'])
574 node = repo.changelog.lookup(opts['rev'])
576 else:
575 else:
577 node = repo.dirstate.parents()[0]
576 node = repo.dirstate.parents()[0]
578 change = repo.changelog.read(node)
577 change = repo.changelog.read(node)
579 mmap = repo.manifest.read(change[0])
578 mmap = repo.manifest.read(change[0])
580
579
581 for src, abs, rel, exact in walk(repo, pats, opts):
580 for src, abs, rel, exact in walk(repo, pats, opts):
582 if abs not in mmap:
581 if abs not in mmap:
583 ui.warn(_("warning: %s is not in the repository!\n") %
582 ui.warn(_("warning: %s is not in the repository!\n") %
584 ((pats and rel) or abs))
583 ((pats and rel) or abs))
585 continue
584 continue
586
585
587 f = repo.file(abs)
586 f = repo.file(abs)
588 if not opts['text'] and util.binary(f.read(mmap[abs])):
587 if not opts['text'] and util.binary(f.read(mmap[abs])):
589 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
588 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
590 continue
589 continue
591
590
592 lines = f.annotate(mmap[abs])
591 lines = f.annotate(mmap[abs])
593 pieces = []
592 pieces = []
594
593
595 for o, f in opmap:
594 for o, f in opmap:
596 if opts[o]:
595 if opts[o]:
597 l = [f(n) for n, dummy in lines]
596 l = [f(n) for n, dummy in lines]
598 if l:
597 if l:
599 m = max(map(len, l))
598 m = max(map(len, l))
600 pieces.append(["%*s" % (m, x) for x in l])
599 pieces.append(["%*s" % (m, x) for x in l])
601
600
602 if pieces:
601 if pieces:
603 for p, l in zip(zip(*pieces), lines):
602 for p, l in zip(zip(*pieces), lines):
604 ui.write("%s: %s" % (" ".join(p), l[1]))
603 ui.write("%s: %s" % (" ".join(p), l[1]))
605
604
606 def bundle(ui, repo, fname, dest="default-push", **opts):
605 def bundle(ui, repo, fname, dest="default-push", **opts):
607 """create a changegroup file
606 """create a changegroup file
608
607
609 Generate a compressed changegroup file collecting all changesets
608 Generate a compressed changegroup file collecting all changesets
610 not found in the other repository.
609 not found in the other repository.
611
610
612 This file can then be transferred using conventional means and
611 This file can then be transferred using conventional means and
613 applied to another repository with the unbundle command. This is
612 applied to another repository with the unbundle command. This is
614 useful when native push and pull are not available or when
613 useful when native push and pull are not available or when
615 exporting an entire repository is undesirable. The standard file
614 exporting an entire repository is undesirable. The standard file
616 extension is ".hg".
615 extension is ".hg".
617
616
618 Unlike import/export, this exactly preserves all changeset
617 Unlike import/export, this exactly preserves all changeset
619 contents including permissions, rename data, and revision history.
618 contents including permissions, rename data, and revision history.
620 """
619 """
621 f = open(fname, "wb")
620 f = open(fname, "wb")
622 dest = ui.expandpath(dest, repo.root)
621 dest = ui.expandpath(dest, repo.root)
623 other = hg.repository(ui, dest)
622 other = hg.repository(ui, dest)
624 o = repo.findoutgoing(other)
623 o = repo.findoutgoing(other)
625 cg = repo.changegroup(o, 'bundle')
624 cg = repo.changegroup(o, 'bundle')
626
625
627 try:
626 try:
628 f.write("HG10")
627 f.write("HG10")
629 z = bz2.BZ2Compressor(9)
628 z = bz2.BZ2Compressor(9)
630 while 1:
629 while 1:
631 chunk = cg.read(4096)
630 chunk = cg.read(4096)
632 if not chunk:
631 if not chunk:
633 break
632 break
634 f.write(z.compress(chunk))
633 f.write(z.compress(chunk))
635 f.write(z.flush())
634 f.write(z.flush())
636 except:
635 except:
637 os.unlink(fname)
636 os.unlink(fname)
638 raise
637 raise
639
638
640 def cat(ui, repo, file1, *pats, **opts):
639 def cat(ui, repo, file1, *pats, **opts):
641 """output the latest or given revisions of files
640 """output the latest or given revisions of files
642
641
643 Print the specified files as they were at the given revision.
642 Print the specified files as they were at the given revision.
644 If no revision is given then the tip is used.
643 If no revision is given then the tip is used.
645
644
646 Output may be to a file, in which case the name of the file is
645 Output may be to a file, in which case the name of the file is
647 given using a format string. The formatting rules are the same as
646 given using a format string. The formatting rules are the same as
648 for the export command, with the following additions:
647 for the export command, with the following additions:
649
648
650 %s basename of file being printed
649 %s basename of file being printed
651 %d dirname of file being printed, or '.' if in repo root
650 %d dirname of file being printed, or '.' if in repo root
652 %p root-relative path name of file being printed
651 %p root-relative path name of file being printed
653 """
652 """
654 mf = {}
653 mf = {}
655 rev = opts['rev']
654 rev = opts['rev']
656 if rev:
655 if rev:
657 node = repo.lookup(rev)
656 node = repo.lookup(rev)
658 else:
657 else:
659 node = repo.changelog.tip()
658 node = repo.changelog.tip()
660 change = repo.changelog.read(node)
659 change = repo.changelog.read(node)
661 mf = repo.manifest.read(change[0])
660 mf = repo.manifest.read(change[0])
662 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
661 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
663 r = repo.file(abs)
662 r = repo.file(abs)
664 n = mf[abs]
663 n = mf[abs]
665 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
664 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
666 fp.write(r.read(n))
665 fp.write(r.read(n))
667
666
668 def clone(ui, source, dest=None, **opts):
667 def clone(ui, source, dest=None, **opts):
669 """make a copy of an existing repository
668 """make a copy of an existing repository
670
669
671 Create a copy of an existing repository in a new directory.
670 Create a copy of an existing repository in a new directory.
672
671
673 If no destination directory name is specified, it defaults to the
672 If no destination directory name is specified, it defaults to the
674 basename of the source.
673 basename of the source.
675
674
676 The location of the source is added to the new repository's
675 The location of the source is added to the new repository's
677 .hg/hgrc file, as the default to be used for future pulls.
676 .hg/hgrc file, as the default to be used for future pulls.
678
677
679 For efficiency, hardlinks are used for cloning whenever the source
678 For efficiency, hardlinks are used for cloning whenever the source
680 and destination are on the same filesystem. Some filesystems,
679 and destination are on the same filesystem. Some filesystems,
681 such as AFS, implement hardlinking incorrectly, but do not report
680 such as AFS, implement hardlinking incorrectly, but do not report
682 errors. In these cases, use the --pull option to avoid
681 errors. In these cases, use the --pull option to avoid
683 hardlinking.
682 hardlinking.
684 """
683 """
685 if dest is None:
684 if dest is None:
686 dest = os.path.basename(os.path.normpath(source))
685 dest = os.path.basename(os.path.normpath(source))
687
686
688 if os.path.exists(dest):
687 if os.path.exists(dest):
689 raise util.Abort(_("destination '%s' already exists"), dest)
688 raise util.Abort(_("destination '%s' already exists"), dest)
690
689
691 dest = os.path.realpath(dest)
690 dest = os.path.realpath(dest)
692
691
693 class Dircleanup(object):
692 class Dircleanup(object):
694 def __init__(self, dir_):
693 def __init__(self, dir_):
695 self.rmtree = shutil.rmtree
694 self.rmtree = shutil.rmtree
696 self.dir_ = dir_
695 self.dir_ = dir_
697 os.mkdir(dir_)
696 os.mkdir(dir_)
698 def close(self):
697 def close(self):
699 self.dir_ = None
698 self.dir_ = None
700 def __del__(self):
699 def __del__(self):
701 if self.dir_:
700 if self.dir_:
702 self.rmtree(self.dir_, True)
701 self.rmtree(self.dir_, True)
703
702
704 if opts['ssh']:
703 if opts['ssh']:
705 ui.setconfig("ui", "ssh", opts['ssh'])
704 ui.setconfig("ui", "ssh", opts['ssh'])
706 if opts['remotecmd']:
705 if opts['remotecmd']:
707 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
706 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
708
707
709 if not os.path.exists(source):
708 if not os.path.exists(source):
710 source = ui.expandpath(source)
709 source = ui.expandpath(source)
711
710
712 d = Dircleanup(dest)
711 d = Dircleanup(dest)
713 abspath = source
712 abspath = source
714 other = hg.repository(ui, source)
713 other = hg.repository(ui, source)
715
714
716 copy = False
715 copy = False
717 if other.dev() != -1:
716 if other.dev() != -1:
718 abspath = os.path.abspath(source)
717 abspath = os.path.abspath(source)
719 if not opts['pull'] and not opts['rev']:
718 if not opts['pull'] and not opts['rev']:
720 copy = True
719 copy = True
721
720
722 if copy:
721 if copy:
723 try:
722 try:
724 # we use a lock here because if we race with commit, we
723 # we use a lock here because if we race with commit, we
725 # can end up with extra data in the cloned revlogs that's
724 # can end up with extra data in the cloned revlogs that's
726 # not pointed to by changesets, thus causing verify to
725 # not pointed to by changesets, thus causing verify to
727 # fail
726 # fail
728 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
727 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
729 except OSError:
728 except OSError:
730 copy = False
729 copy = False
731
730
732 if copy:
731 if copy:
733 # we lock here to avoid premature writing to the target
732 # we lock here to avoid premature writing to the target
734 os.mkdir(os.path.join(dest, ".hg"))
733 os.mkdir(os.path.join(dest, ".hg"))
735 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
734 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
736
735
737 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
736 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
738 for f in files.split():
737 for f in files.split():
739 src = os.path.join(source, ".hg", f)
738 src = os.path.join(source, ".hg", f)
740 dst = os.path.join(dest, ".hg", f)
739 dst = os.path.join(dest, ".hg", f)
741 try:
740 try:
742 util.copyfiles(src, dst)
741 util.copyfiles(src, dst)
743 except OSError, inst:
742 except OSError, inst:
744 if inst.errno != errno.ENOENT:
743 if inst.errno != errno.ENOENT:
745 raise
744 raise
746
745
747 repo = hg.repository(ui, dest)
746 repo = hg.repository(ui, dest)
748
747
749 else:
748 else:
750 revs = None
749 revs = None
751 if opts['rev']:
750 if opts['rev']:
752 if not other.local():
751 if not other.local():
753 error = _("clone -r not supported yet for remote repositories.")
752 error = _("clone -r not supported yet for remote repositories.")
754 raise util.Abort(error)
753 raise util.Abort(error)
755 else:
754 else:
756 revs = [other.lookup(rev) for rev in opts['rev']]
755 revs = [other.lookup(rev) for rev in opts['rev']]
757 repo = hg.repository(ui, dest, create=1)
756 repo = hg.repository(ui, dest, create=1)
758 repo.pull(other, heads = revs)
757 repo.pull(other, heads = revs)
759
758
760 f = repo.opener("hgrc", "w", text=True)
759 f = repo.opener("hgrc", "w", text=True)
761 f.write("[paths]\n")
760 f.write("[paths]\n")
762 f.write("default = %s\n" % abspath)
761 f.write("default = %s\n" % abspath)
763 f.close()
762 f.close()
764
763
765 if not opts['noupdate']:
764 if not opts['noupdate']:
766 update(ui, repo)
765 update(ui, repo)
767
766
768 d.close()
767 d.close()
769
768
770 def commit(ui, repo, *pats, **opts):
769 def commit(ui, repo, *pats, **opts):
771 """commit the specified files or all outstanding changes
770 """commit the specified files or all outstanding changes
772
771
773 Commit changes to the given files into the repository.
772 Commit changes to the given files into the repository.
774
773
775 If a list of files is omitted, all changes reported by "hg status"
774 If a list of files is omitted, all changes reported by "hg status"
776 will be commited.
775 will be commited.
777
776
778 The HGEDITOR or EDITOR environment variables are used to start an
777 The HGEDITOR or EDITOR environment variables are used to start an
779 editor to add a commit comment.
778 editor to add a commit comment.
780 """
779 """
781 message = opts['message']
780 message = opts['message']
782 logfile = opts['logfile']
781 logfile = opts['logfile']
783
782
784 if message and logfile:
783 if message and logfile:
785 raise util.Abort(_('options --message and --logfile are mutually '
784 raise util.Abort(_('options --message and --logfile are mutually '
786 'exclusive'))
785 'exclusive'))
787 if not message and logfile:
786 if not message and logfile:
788 try:
787 try:
789 if logfile == '-':
788 if logfile == '-':
790 message = sys.stdin.read()
789 message = sys.stdin.read()
791 else:
790 else:
792 message = open(logfile).read()
791 message = open(logfile).read()
793 except IOError, inst:
792 except IOError, inst:
794 raise util.Abort(_("can't read commit message '%s': %s") %
793 raise util.Abort(_("can't read commit message '%s': %s") %
795 (logfile, inst.strerror))
794 (logfile, inst.strerror))
796
795
797 if opts['addremove']:
796 if opts['addremove']:
798 addremove(ui, repo, *pats, **opts)
797 addremove(ui, repo, *pats, **opts)
799 fns, match, anypats = matchpats(repo, pats, opts)
798 fns, match, anypats = matchpats(repo, pats, opts)
800 if pats:
799 if pats:
801 modified, added, removed, deleted, unknown = (
800 modified, added, removed, deleted, unknown = (
802 repo.changes(files=fns, match=match))
801 repo.changes(files=fns, match=match))
803 files = modified + added + removed
802 files = modified + added + removed
804 else:
803 else:
805 files = []
804 files = []
806 try:
805 try:
807 repo.commit(files, message, opts['user'], opts['date'], match)
806 repo.commit(files, message, opts['user'], opts['date'], match)
808 except ValueError, inst:
807 except ValueError, inst:
809 raise util.Abort(str(inst))
808 raise util.Abort(str(inst))
810
809
811 def docopy(ui, repo, pats, opts):
810 def docopy(ui, repo, pats, opts):
812 cwd = repo.getcwd()
811 cwd = repo.getcwd()
813 errors = 0
812 errors = 0
814 copied = []
813 copied = []
815 targets = {}
814 targets = {}
816
815
817 def okaytocopy(abs, rel, exact):
816 def okaytocopy(abs, rel, exact):
818 reasons = {'?': _('is not managed'),
817 reasons = {'?': _('is not managed'),
819 'a': _('has been marked for add'),
818 'a': _('has been marked for add'),
820 'r': _('has been marked for remove')}
819 'r': _('has been marked for remove')}
821 state = repo.dirstate.state(abs)
820 state = repo.dirstate.state(abs)
822 reason = reasons.get(state)
821 reason = reasons.get(state)
823 if reason:
822 if reason:
824 if state == 'a':
823 if state == 'a':
825 origsrc = repo.dirstate.copied(abs)
824 origsrc = repo.dirstate.copied(abs)
826 if origsrc is not None:
825 if origsrc is not None:
827 return origsrc
826 return origsrc
828 if exact:
827 if exact:
829 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
828 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
830 else:
829 else:
831 return abs
830 return abs
832
831
833 def copy(origsrc, abssrc, relsrc, target, exact):
832 def copy(origsrc, abssrc, relsrc, target, exact):
834 abstarget = util.canonpath(repo.root, cwd, target)
833 abstarget = util.canonpath(repo.root, cwd, target)
835 reltarget = util.pathto(cwd, abstarget)
834 reltarget = util.pathto(cwd, abstarget)
836 prevsrc = targets.get(abstarget)
835 prevsrc = targets.get(abstarget)
837 if prevsrc is not None:
836 if prevsrc is not None:
838 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
837 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
839 (reltarget, abssrc, prevsrc))
838 (reltarget, abssrc, prevsrc))
840 return
839 return
841 if (not opts['after'] and os.path.exists(reltarget) or
840 if (not opts['after'] and os.path.exists(reltarget) or
842 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
841 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
843 if not opts['force']:
842 if not opts['force']:
844 ui.warn(_('%s: not overwriting - file exists\n') %
843 ui.warn(_('%s: not overwriting - file exists\n') %
845 reltarget)
844 reltarget)
846 return
845 return
847 if not opts['after']:
846 if not opts['after']:
848 os.unlink(reltarget)
847 os.unlink(reltarget)
849 if opts['after']:
848 if opts['after']:
850 if not os.path.exists(reltarget):
849 if not os.path.exists(reltarget):
851 return
850 return
852 else:
851 else:
853 targetdir = os.path.dirname(reltarget) or '.'
852 targetdir = os.path.dirname(reltarget) or '.'
854 if not os.path.isdir(targetdir):
853 if not os.path.isdir(targetdir):
855 os.makedirs(targetdir)
854 os.makedirs(targetdir)
856 try:
855 try:
857 shutil.copyfile(relsrc, reltarget)
856 shutil.copyfile(relsrc, reltarget)
858 shutil.copymode(relsrc, reltarget)
857 shutil.copymode(relsrc, reltarget)
859 except shutil.Error, inst:
858 except shutil.Error, inst:
860 raise util.Abort(str(inst))
859 raise util.Abort(str(inst))
861 except IOError, inst:
860 except IOError, inst:
862 if inst.errno == errno.ENOENT:
861 if inst.errno == errno.ENOENT:
863 ui.warn(_('%s: deleted in working copy\n') % relsrc)
862 ui.warn(_('%s: deleted in working copy\n') % relsrc)
864 else:
863 else:
865 ui.warn(_('%s: cannot copy - %s\n') %
864 ui.warn(_('%s: cannot copy - %s\n') %
866 (relsrc, inst.strerror))
865 (relsrc, inst.strerror))
867 errors += 1
866 errors += 1
868 return
867 return
869 if ui.verbose or not exact:
868 if ui.verbose or not exact:
870 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
869 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
871 targets[abstarget] = abssrc
870 targets[abstarget] = abssrc
872 repo.copy(origsrc, abstarget)
871 repo.copy(origsrc, abstarget)
873 copied.append((abssrc, relsrc, exact))
872 copied.append((abssrc, relsrc, exact))
874
873
875 def targetpathfn(pat, dest, srcs):
874 def targetpathfn(pat, dest, srcs):
876 if os.path.isdir(pat):
875 if os.path.isdir(pat):
877 abspfx = util.canonpath(repo.root, cwd, pat)
876 abspfx = util.canonpath(repo.root, cwd, pat)
878 if destdirexists:
877 if destdirexists:
879 striplen = len(os.path.split(abspfx)[0])
878 striplen = len(os.path.split(abspfx)[0])
880 else:
879 else:
881 striplen = len(abspfx)
880 striplen = len(abspfx)
882 if striplen:
881 if striplen:
883 striplen += len(os.sep)
882 striplen += len(os.sep)
884 res = lambda p: os.path.join(dest, p[striplen:])
883 res = lambda p: os.path.join(dest, p[striplen:])
885 elif destdirexists:
884 elif destdirexists:
886 res = lambda p: os.path.join(dest, os.path.basename(p))
885 res = lambda p: os.path.join(dest, os.path.basename(p))
887 else:
886 else:
888 res = lambda p: dest
887 res = lambda p: dest
889 return res
888 return res
890
889
891 def targetpathafterfn(pat, dest, srcs):
890 def targetpathafterfn(pat, dest, srcs):
892 if util.patkind(pat, None)[0]:
891 if util.patkind(pat, None)[0]:
893 # a mercurial pattern
892 # a mercurial pattern
894 res = lambda p: os.path.join(dest, os.path.basename(p))
893 res = lambda p: os.path.join(dest, os.path.basename(p))
895 else:
894 else:
896 abspfx = util.canonpath(repo.root, cwd, pat)
895 abspfx = util.canonpath(repo.root, cwd, pat)
897 if len(abspfx) < len(srcs[0][0]):
896 if len(abspfx) < len(srcs[0][0]):
898 # A directory. Either the target path contains the last
897 # A directory. Either the target path contains the last
899 # component of the source path or it does not.
898 # component of the source path or it does not.
900 def evalpath(striplen):
899 def evalpath(striplen):
901 score = 0
900 score = 0
902 for s in srcs:
901 for s in srcs:
903 t = os.path.join(dest, s[0][striplen:])
902 t = os.path.join(dest, s[0][striplen:])
904 if os.path.exists(t):
903 if os.path.exists(t):
905 score += 1
904 score += 1
906 return score
905 return score
907
906
908 striplen = len(abspfx)
907 striplen = len(abspfx)
909 if striplen:
908 if striplen:
910 striplen += len(os.sep)
909 striplen += len(os.sep)
911 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
910 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
912 score = evalpath(striplen)
911 score = evalpath(striplen)
913 striplen1 = len(os.path.split(abspfx)[0])
912 striplen1 = len(os.path.split(abspfx)[0])
914 if striplen1:
913 if striplen1:
915 striplen1 += len(os.sep)
914 striplen1 += len(os.sep)
916 if evalpath(striplen1) > score:
915 if evalpath(striplen1) > score:
917 striplen = striplen1
916 striplen = striplen1
918 res = lambda p: os.path.join(dest, p[striplen:])
917 res = lambda p: os.path.join(dest, p[striplen:])
919 else:
918 else:
920 # a file
919 # a file
921 if destdirexists:
920 if destdirexists:
922 res = lambda p: os.path.join(dest, os.path.basename(p))
921 res = lambda p: os.path.join(dest, os.path.basename(p))
923 else:
922 else:
924 res = lambda p: dest
923 res = lambda p: dest
925 return res
924 return res
926
925
927
926
928 pats = list(pats)
927 pats = list(pats)
929 if not pats:
928 if not pats:
930 raise util.Abort(_('no source or destination specified'))
929 raise util.Abort(_('no source or destination specified'))
931 if len(pats) == 1:
930 if len(pats) == 1:
932 raise util.Abort(_('no destination specified'))
931 raise util.Abort(_('no destination specified'))
933 dest = pats.pop()
932 dest = pats.pop()
934 destdirexists = os.path.isdir(dest)
933 destdirexists = os.path.isdir(dest)
935 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
934 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
936 raise util.Abort(_('with multiple sources, destination must be an '
935 raise util.Abort(_('with multiple sources, destination must be an '
937 'existing directory'))
936 'existing directory'))
938 if opts['after']:
937 if opts['after']:
939 tfn = targetpathafterfn
938 tfn = targetpathafterfn
940 else:
939 else:
941 tfn = targetpathfn
940 tfn = targetpathfn
942 copylist = []
941 copylist = []
943 for pat in pats:
942 for pat in pats:
944 srcs = []
943 srcs = []
945 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
944 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
946 origsrc = okaytocopy(abssrc, relsrc, exact)
945 origsrc = okaytocopy(abssrc, relsrc, exact)
947 if origsrc:
946 if origsrc:
948 srcs.append((origsrc, abssrc, relsrc, exact))
947 srcs.append((origsrc, abssrc, relsrc, exact))
949 if not srcs:
948 if not srcs:
950 continue
949 continue
951 copylist.append((tfn(pat, dest, srcs), srcs))
950 copylist.append((tfn(pat, dest, srcs), srcs))
952 if not copylist:
951 if not copylist:
953 raise util.Abort(_('no files to copy'))
952 raise util.Abort(_('no files to copy'))
954
953
955 for targetpath, srcs in copylist:
954 for targetpath, srcs in copylist:
956 for origsrc, abssrc, relsrc, exact in srcs:
955 for origsrc, abssrc, relsrc, exact in srcs:
957 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
956 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
958
957
959 if errors:
958 if errors:
960 ui.warn(_('(consider using --after)\n'))
959 ui.warn(_('(consider using --after)\n'))
961 return errors, copied
960 return errors, copied
962
961
963 def copy(ui, repo, *pats, **opts):
962 def copy(ui, repo, *pats, **opts):
964 """mark files as copied for the next commit
963 """mark files as copied for the next commit
965
964
966 Mark dest as having copies of source files. If dest is a
965 Mark dest as having copies of source files. If dest is a
967 directory, copies are put in that directory. If dest is a file,
966 directory, copies are put in that directory. If dest is a file,
968 there can only be one source.
967 there can only be one source.
969
968
970 By default, this command copies the contents of files as they
969 By default, this command copies the contents of files as they
971 stand in the working directory. If invoked with --after, the
970 stand in the working directory. If invoked with --after, the
972 operation is recorded, but no copying is performed.
971 operation is recorded, but no copying is performed.
973
972
974 This command takes effect in the next commit.
973 This command takes effect in the next commit.
975
974
976 NOTE: This command should be treated as experimental. While it
975 NOTE: This command should be treated as experimental. While it
977 should properly record copied files, this information is not yet
976 should properly record copied files, this information is not yet
978 fully used by merge, nor fully reported by log.
977 fully used by merge, nor fully reported by log.
979 """
978 """
980 errs, copied = docopy(ui, repo, pats, opts)
979 errs, copied = docopy(ui, repo, pats, opts)
981 return errs
980 return errs
982
981
983 def debugancestor(ui, index, rev1, rev2):
982 def debugancestor(ui, index, rev1, rev2):
984 """find the ancestor revision of two revisions in a given index"""
983 """find the ancestor revision of two revisions in a given index"""
985 r = revlog.revlog(util.opener(os.getcwd()), index, "")
984 r = revlog.revlog(util.opener(os.getcwd()), index, "")
986 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
985 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
987 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
986 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
988
987
989 def debugcheckstate(ui, repo):
988 def debugcheckstate(ui, repo):
990 """validate the correctness of the current dirstate"""
989 """validate the correctness of the current dirstate"""
991 parent1, parent2 = repo.dirstate.parents()
990 parent1, parent2 = repo.dirstate.parents()
992 repo.dirstate.read()
991 repo.dirstate.read()
993 dc = repo.dirstate.map
992 dc = repo.dirstate.map
994 keys = dc.keys()
993 keys = dc.keys()
995 keys.sort()
994 keys.sort()
996 m1n = repo.changelog.read(parent1)[0]
995 m1n = repo.changelog.read(parent1)[0]
997 m2n = repo.changelog.read(parent2)[0]
996 m2n = repo.changelog.read(parent2)[0]
998 m1 = repo.manifest.read(m1n)
997 m1 = repo.manifest.read(m1n)
999 m2 = repo.manifest.read(m2n)
998 m2 = repo.manifest.read(m2n)
1000 errors = 0
999 errors = 0
1001 for f in dc:
1000 for f in dc:
1002 state = repo.dirstate.state(f)
1001 state = repo.dirstate.state(f)
1003 if state in "nr" and f not in m1:
1002 if state in "nr" and f not in m1:
1004 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1003 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1005 errors += 1
1004 errors += 1
1006 if state in "a" and f in m1:
1005 if state in "a" and f in m1:
1007 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1006 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1008 errors += 1
1007 errors += 1
1009 if state in "m" and f not in m1 and f not in m2:
1008 if state in "m" and f not in m1 and f not in m2:
1010 ui.warn(_("%s in state %s, but not in either manifest\n") %
1009 ui.warn(_("%s in state %s, but not in either manifest\n") %
1011 (f, state))
1010 (f, state))
1012 errors += 1
1011 errors += 1
1013 for f in m1:
1012 for f in m1:
1014 state = repo.dirstate.state(f)
1013 state = repo.dirstate.state(f)
1015 if state not in "nrm":
1014 if state not in "nrm":
1016 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1015 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1017 errors += 1
1016 errors += 1
1018 if errors:
1017 if errors:
1019 error = _(".hg/dirstate inconsistent with current parent's manifest")
1018 error = _(".hg/dirstate inconsistent with current parent's manifest")
1020 raise util.Abort(error)
1019 raise util.Abort(error)
1021
1020
1022 def debugconfig(ui):
1021 def debugconfig(ui):
1023 """show combined config settings from all hgrc files"""
1022 """show combined config settings from all hgrc files"""
1024 try:
1023 try:
1025 repo = hg.repository(ui)
1024 repo = hg.repository(ui)
1026 except hg.RepoError:
1025 except hg.RepoError:
1027 pass
1026 pass
1028 for section, name, value in ui.walkconfig():
1027 for section, name, value in ui.walkconfig():
1029 ui.write('%s.%s=%s\n' % (section, name, value))
1028 ui.write('%s.%s=%s\n' % (section, name, value))
1030
1029
1031 def debugsetparents(ui, repo, rev1, rev2=None):
1030 def debugsetparents(ui, repo, rev1, rev2=None):
1032 """manually set the parents of the current working directory
1031 """manually set the parents of the current working directory
1033
1032
1034 This is useful for writing repository conversion tools, but should
1033 This is useful for writing repository conversion tools, but should
1035 be used with care.
1034 be used with care.
1036 """
1035 """
1037
1036
1038 if not rev2:
1037 if not rev2:
1039 rev2 = hex(nullid)
1038 rev2 = hex(nullid)
1040
1039
1041 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1040 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1042
1041
1043 def debugstate(ui, repo):
1042 def debugstate(ui, repo):
1044 """show the contents of the current dirstate"""
1043 """show the contents of the current dirstate"""
1045 repo.dirstate.read()
1044 repo.dirstate.read()
1046 dc = repo.dirstate.map
1045 dc = repo.dirstate.map
1047 keys = dc.keys()
1046 keys = dc.keys()
1048 keys.sort()
1047 keys.sort()
1049 for file_ in keys:
1048 for file_ in keys:
1050 ui.write("%c %3o %10d %s %s\n"
1049 ui.write("%c %3o %10d %s %s\n"
1051 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1050 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1052 time.strftime("%x %X",
1051 time.strftime("%x %X",
1053 time.localtime(dc[file_][3])), file_))
1052 time.localtime(dc[file_][3])), file_))
1054 for f in repo.dirstate.copies:
1053 for f in repo.dirstate.copies:
1055 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1054 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1056
1055
1057 def debugdata(ui, file_, rev):
1056 def debugdata(ui, file_, rev):
1058 """dump the contents of an data file revision"""
1057 """dump the contents of an data file revision"""
1059 r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
1058 r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
1060 try:
1059 try:
1061 ui.write(r.revision(r.lookup(rev)))
1060 ui.write(r.revision(r.lookup(rev)))
1062 except KeyError:
1061 except KeyError:
1063 raise util.Abort(_('invalid revision identifier %s'), rev)
1062 raise util.Abort(_('invalid revision identifier %s'), rev)
1064
1063
1065 def debugindex(ui, file_):
1064 def debugindex(ui, file_):
1066 """dump the contents of an index file"""
1065 """dump the contents of an index file"""
1067 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1066 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1068 ui.write(" rev offset length base linkrev" +
1067 ui.write(" rev offset length base linkrev" +
1069 " nodeid p1 p2\n")
1068 " nodeid p1 p2\n")
1070 for i in range(r.count()):
1069 for i in range(r.count()):
1071 e = r.index[i]
1070 e = r.index[i]
1072 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1071 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1073 i, e[0], e[1], e[2], e[3],
1072 i, e[0], e[1], e[2], e[3],
1074 short(e[6]), short(e[4]), short(e[5])))
1073 short(e[6]), short(e[4]), short(e[5])))
1075
1074
1076 def debugindexdot(ui, file_):
1075 def debugindexdot(ui, file_):
1077 """dump an index DAG as a .dot file"""
1076 """dump an index DAG as a .dot file"""
1078 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1077 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1079 ui.write("digraph G {\n")
1078 ui.write("digraph G {\n")
1080 for i in range(r.count()):
1079 for i in range(r.count()):
1081 e = r.index[i]
1080 e = r.index[i]
1082 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1081 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1083 if e[5] != nullid:
1082 if e[5] != nullid:
1084 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1083 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1085 ui.write("}\n")
1084 ui.write("}\n")
1086
1085
1087 def debugrename(ui, repo, file, rev=None):
1086 def debugrename(ui, repo, file, rev=None):
1088 """dump rename information"""
1087 """dump rename information"""
1089 r = repo.file(relpath(repo, [file])[0])
1088 r = repo.file(relpath(repo, [file])[0])
1090 if rev:
1089 if rev:
1091 try:
1090 try:
1092 # assume all revision numbers are for changesets
1091 # assume all revision numbers are for changesets
1093 n = repo.lookup(rev)
1092 n = repo.lookup(rev)
1094 change = repo.changelog.read(n)
1093 change = repo.changelog.read(n)
1095 m = repo.manifest.read(change[0])
1094 m = repo.manifest.read(change[0])
1096 n = m[relpath(repo, [file])[0]]
1095 n = m[relpath(repo, [file])[0]]
1097 except (hg.RepoError, KeyError):
1096 except (hg.RepoError, KeyError):
1098 n = r.lookup(rev)
1097 n = r.lookup(rev)
1099 else:
1098 else:
1100 n = r.tip()
1099 n = r.tip()
1101 m = r.renamed(n)
1100 m = r.renamed(n)
1102 if m:
1101 if m:
1103 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1102 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1104 else:
1103 else:
1105 ui.write(_("not renamed\n"))
1104 ui.write(_("not renamed\n"))
1106
1105
1107 def debugwalk(ui, repo, *pats, **opts):
1106 def debugwalk(ui, repo, *pats, **opts):
1108 """show how files match on given patterns"""
1107 """show how files match on given patterns"""
1109 items = list(walk(repo, pats, opts))
1108 items = list(walk(repo, pats, opts))
1110 if not items:
1109 if not items:
1111 return
1110 return
1112 fmt = '%%s %%-%ds %%-%ds %%s' % (
1111 fmt = '%%s %%-%ds %%-%ds %%s' % (
1113 max([len(abs) for (src, abs, rel, exact) in items]),
1112 max([len(abs) for (src, abs, rel, exact) in items]),
1114 max([len(rel) for (src, abs, rel, exact) in items]))
1113 max([len(rel) for (src, abs, rel, exact) in items]))
1115 for src, abs, rel, exact in items:
1114 for src, abs, rel, exact in items:
1116 line = fmt % (src, abs, rel, exact and 'exact' or '')
1115 line = fmt % (src, abs, rel, exact and 'exact' or '')
1117 ui.write("%s\n" % line.rstrip())
1116 ui.write("%s\n" % line.rstrip())
1118
1117
1119 def diff(ui, repo, *pats, **opts):
1118 def diff(ui, repo, *pats, **opts):
1120 """diff repository (or selected files)
1119 """diff repository (or selected files)
1121
1120
1122 Show differences between revisions for the specified files.
1121 Show differences between revisions for the specified files.
1123
1122
1124 Differences between files are shown using the unified diff format.
1123 Differences between files are shown using the unified diff format.
1125
1124
1126 When two revision arguments are given, then changes are shown
1125 When two revision arguments are given, then changes are shown
1127 between those revisions. If only one revision is specified then
1126 between those revisions. If only one revision is specified then
1128 that revision is compared to the working directory, and, when no
1127 that revision is compared to the working directory, and, when no
1129 revisions are specified, the working directory files are compared
1128 revisions are specified, the working directory files are compared
1130 to its parent.
1129 to its parent.
1131
1130
1132 Without the -a option, diff will avoid generating diffs of files
1131 Without the -a option, diff will avoid generating diffs of files
1133 it detects as binary. With -a, diff will generate a diff anyway,
1132 it detects as binary. With -a, diff will generate a diff anyway,
1134 probably with undesirable results.
1133 probably with undesirable results.
1135 """
1134 """
1136 node1, node2 = None, None
1135 node1, node2 = None, None
1137 revs = [repo.lookup(x) for x in opts['rev']]
1136 revs = [repo.lookup(x) for x in opts['rev']]
1138
1137
1139 if len(revs) > 0:
1138 if len(revs) > 0:
1140 node1 = revs[0]
1139 node1 = revs[0]
1141 if len(revs) > 1:
1140 if len(revs) > 1:
1142 node2 = revs[1]
1141 node2 = revs[1]
1143 if len(revs) > 2:
1142 if len(revs) > 2:
1144 raise util.Abort(_("too many revisions to diff"))
1143 raise util.Abort(_("too many revisions to diff"))
1145
1144
1146 fns, matchfn, anypats = matchpats(repo, pats, opts)
1145 fns, matchfn, anypats = matchpats(repo, pats, opts)
1147
1146
1148 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1147 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1149 text=opts['text'], opts=opts)
1148 text=opts['text'], opts=opts)
1150
1149
1151 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1150 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1152 node = repo.lookup(changeset)
1151 node = repo.lookup(changeset)
1153 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1152 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1154 if opts['switch_parent']:
1153 if opts['switch_parent']:
1155 parents.reverse()
1154 parents.reverse()
1156 prev = (parents and parents[0]) or nullid
1155 prev = (parents and parents[0]) or nullid
1157 change = repo.changelog.read(node)
1156 change = repo.changelog.read(node)
1158
1157
1159 fp = make_file(repo, repo.changelog, opts['output'],
1158 fp = make_file(repo, repo.changelog, opts['output'],
1160 node=node, total=total, seqno=seqno,
1159 node=node, total=total, seqno=seqno,
1161 revwidth=revwidth)
1160 revwidth=revwidth)
1162 if fp != sys.stdout:
1161 if fp != sys.stdout:
1163 ui.note("%s\n" % fp.name)
1162 ui.note("%s\n" % fp.name)
1164
1163
1165 fp.write("# HG changeset patch\n")
1164 fp.write("# HG changeset patch\n")
1166 fp.write("# User %s\n" % change[1])
1165 fp.write("# User %s\n" % change[1])
1167 fp.write("# Node ID %s\n" % hex(node))
1166 fp.write("# Node ID %s\n" % hex(node))
1168 fp.write("# Parent %s\n" % hex(prev))
1167 fp.write("# Parent %s\n" % hex(prev))
1169 if len(parents) > 1:
1168 if len(parents) > 1:
1170 fp.write("# Parent %s\n" % hex(parents[1]))
1169 fp.write("# Parent %s\n" % hex(parents[1]))
1171 fp.write(change[4].rstrip())
1170 fp.write(change[4].rstrip())
1172 fp.write("\n\n")
1171 fp.write("\n\n")
1173
1172
1174 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1173 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1175 if fp != sys.stdout:
1174 if fp != sys.stdout:
1176 fp.close()
1175 fp.close()
1177
1176
1178 def export(ui, repo, *changesets, **opts):
1177 def export(ui, repo, *changesets, **opts):
1179 """dump the header and diffs for one or more changesets
1178 """dump the header and diffs for one or more changesets
1180
1179
1181 Print the changeset header and diffs for one or more revisions.
1180 Print the changeset header and diffs for one or more revisions.
1182
1181
1183 The information shown in the changeset header is: author,
1182 The information shown in the changeset header is: author,
1184 changeset hash, parent and commit comment.
1183 changeset hash, parent and commit comment.
1185
1184
1186 Output may be to a file, in which case the name of the file is
1185 Output may be to a file, in which case the name of the file is
1187 given using a format string. The formatting rules are as follows:
1186 given using a format string. The formatting rules are as follows:
1188
1187
1189 %% literal "%" character
1188 %% literal "%" character
1190 %H changeset hash (40 bytes of hexadecimal)
1189 %H changeset hash (40 bytes of hexadecimal)
1191 %N number of patches being generated
1190 %N number of patches being generated
1192 %R changeset revision number
1191 %R changeset revision number
1193 %b basename of the exporting repository
1192 %b basename of the exporting repository
1194 %h short-form changeset hash (12 bytes of hexadecimal)
1193 %h short-form changeset hash (12 bytes of hexadecimal)
1195 %n zero-padded sequence number, starting at 1
1194 %n zero-padded sequence number, starting at 1
1196 %r zero-padded changeset revision number
1195 %r zero-padded changeset revision number
1197
1196
1198 Without the -a option, export will avoid generating diffs of files
1197 Without the -a option, export will avoid generating diffs of files
1199 it detects as binary. With -a, export will generate a diff anyway,
1198 it detects as binary. With -a, export will generate a diff anyway,
1200 probably with undesirable results.
1199 probably with undesirable results.
1201
1200
1202 With the --switch-parent option, the diff will be against the second
1201 With the --switch-parent option, the diff will be against the second
1203 parent. It can be useful to review a merge.
1202 parent. It can be useful to review a merge.
1204 """
1203 """
1205 if not changesets:
1204 if not changesets:
1206 raise util.Abort(_("export requires at least one changeset"))
1205 raise util.Abort(_("export requires at least one changeset"))
1207 seqno = 0
1206 seqno = 0
1208 revs = list(revrange(ui, repo, changesets))
1207 revs = list(revrange(ui, repo, changesets))
1209 total = len(revs)
1208 total = len(revs)
1210 revwidth = max(map(len, revs))
1209 revwidth = max(map(len, revs))
1211 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1210 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1212 ui.note(msg)
1211 ui.note(msg)
1213 for cset in revs:
1212 for cset in revs:
1214 seqno += 1
1213 seqno += 1
1215 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1214 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1216
1215
1217 def forget(ui, repo, *pats, **opts):
1216 def forget(ui, repo, *pats, **opts):
1218 """don't add the specified files on the next commit
1217 """don't add the specified files on the next commit
1219
1218
1220 Undo an 'hg add' scheduled for the next commit.
1219 Undo an 'hg add' scheduled for the next commit.
1221 """
1220 """
1222 forget = []
1221 forget = []
1223 for src, abs, rel, exact in walk(repo, pats, opts):
1222 for src, abs, rel, exact in walk(repo, pats, opts):
1224 if repo.dirstate.state(abs) == 'a':
1223 if repo.dirstate.state(abs) == 'a':
1225 forget.append(abs)
1224 forget.append(abs)
1226 if ui.verbose or not exact:
1225 if ui.verbose or not exact:
1227 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1226 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1228 repo.forget(forget)
1227 repo.forget(forget)
1229
1228
1230 def grep(ui, repo, pattern, *pats, **opts):
1229 def grep(ui, repo, pattern, *pats, **opts):
1231 """search for a pattern in specified files and revisions
1230 """search for a pattern in specified files and revisions
1232
1231
1233 Search revisions of files for a regular expression.
1232 Search revisions of files for a regular expression.
1234
1233
1235 This command behaves differently than Unix grep. It only accepts
1234 This command behaves differently than Unix grep. It only accepts
1236 Python/Perl regexps. It searches repository history, not the
1235 Python/Perl regexps. It searches repository history, not the
1237 working directory. It always prints the revision number in which
1236 working directory. It always prints the revision number in which
1238 a match appears.
1237 a match appears.
1239
1238
1240 By default, grep only prints output for the first revision of a
1239 By default, grep only prints output for the first revision of a
1241 file in which it finds a match. To get it to print every revision
1240 file in which it finds a match. To get it to print every revision
1242 that contains a change in match status ("-" for a match that
1241 that contains a change in match status ("-" for a match that
1243 becomes a non-match, or "+" for a non-match that becomes a match),
1242 becomes a non-match, or "+" for a non-match that becomes a match),
1244 use the --all flag.
1243 use the --all flag.
1245 """
1244 """
1246 reflags = 0
1245 reflags = 0
1247 if opts['ignore_case']:
1246 if opts['ignore_case']:
1248 reflags |= re.I
1247 reflags |= re.I
1249 regexp = re.compile(pattern, reflags)
1248 regexp = re.compile(pattern, reflags)
1250 sep, eol = ':', '\n'
1249 sep, eol = ':', '\n'
1251 if opts['print0']:
1250 if opts['print0']:
1252 sep = eol = '\0'
1251 sep = eol = '\0'
1253
1252
1254 fcache = {}
1253 fcache = {}
1255 def getfile(fn):
1254 def getfile(fn):
1256 if fn not in fcache:
1255 if fn not in fcache:
1257 fcache[fn] = repo.file(fn)
1256 fcache[fn] = repo.file(fn)
1258 return fcache[fn]
1257 return fcache[fn]
1259
1258
1260 def matchlines(body):
1259 def matchlines(body):
1261 begin = 0
1260 begin = 0
1262 linenum = 0
1261 linenum = 0
1263 while True:
1262 while True:
1264 match = regexp.search(body, begin)
1263 match = regexp.search(body, begin)
1265 if not match:
1264 if not match:
1266 break
1265 break
1267 mstart, mend = match.span()
1266 mstart, mend = match.span()
1268 linenum += body.count('\n', begin, mstart) + 1
1267 linenum += body.count('\n', begin, mstart) + 1
1269 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1268 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1270 lend = body.find('\n', mend)
1269 lend = body.find('\n', mend)
1271 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1270 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1272 begin = lend + 1
1271 begin = lend + 1
1273
1272
1274 class linestate(object):
1273 class linestate(object):
1275 def __init__(self, line, linenum, colstart, colend):
1274 def __init__(self, line, linenum, colstart, colend):
1276 self.line = line
1275 self.line = line
1277 self.linenum = linenum
1276 self.linenum = linenum
1278 self.colstart = colstart
1277 self.colstart = colstart
1279 self.colend = colend
1278 self.colend = colend
1280 def __eq__(self, other):
1279 def __eq__(self, other):
1281 return self.line == other.line
1280 return self.line == other.line
1282 def __hash__(self):
1281 def __hash__(self):
1283 return hash(self.line)
1282 return hash(self.line)
1284
1283
1285 matches = {}
1284 matches = {}
1286 def grepbody(fn, rev, body):
1285 def grepbody(fn, rev, body):
1287 matches[rev].setdefault(fn, {})
1286 matches[rev].setdefault(fn, {})
1288 m = matches[rev][fn]
1287 m = matches[rev][fn]
1289 for lnum, cstart, cend, line in matchlines(body):
1288 for lnum, cstart, cend, line in matchlines(body):
1290 s = linestate(line, lnum, cstart, cend)
1289 s = linestate(line, lnum, cstart, cend)
1291 m[s] = s
1290 m[s] = s
1292
1291
1292 # FIXME: prev isn't used, why ?
1293 prev = {}
1293 prev = {}
1294 ucache = {}
1294 ucache = {}
1295 def display(fn, rev, states, prevstates):
1295 def display(fn, rev, states, prevstates):
1296 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1296 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1297 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1297 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1298 counts = {'-': 0, '+': 0}
1298 counts = {'-': 0, '+': 0}
1299 filerevmatches = {}
1299 filerevmatches = {}
1300 for l in diff:
1300 for l in diff:
1301 if incrementing or not opts['all']:
1301 if incrementing or not opts['all']:
1302 change = ((l in prevstates) and '-') or '+'
1302 change = ((l in prevstates) and '-') or '+'
1303 r = rev
1303 r = rev
1304 else:
1304 else:
1305 change = ((l in states) and '-') or '+'
1305 change = ((l in states) and '-') or '+'
1306 r = prev[fn]
1306 r = prev[fn]
1307 cols = [fn, str(rev)]
1307 cols = [fn, str(rev)]
1308 if opts['line_number']:
1308 if opts['line_number']:
1309 cols.append(str(l.linenum))
1309 cols.append(str(l.linenum))
1310 if opts['all']:
1310 if opts['all']:
1311 cols.append(change)
1311 cols.append(change)
1312 if opts['user']:
1312 if opts['user']:
1313 cols.append(trimuser(ui, getchange(rev)[1], rev,
1313 cols.append(trimuser(ui, getchange(rev)[1], rev,
1314 ucache))
1314 ucache))
1315 if opts['files_with_matches']:
1315 if opts['files_with_matches']:
1316 c = (fn, rev)
1316 c = (fn, rev)
1317 if c in filerevmatches:
1317 if c in filerevmatches:
1318 continue
1318 continue
1319 filerevmatches[c] = 1
1319 filerevmatches[c] = 1
1320 else:
1320 else:
1321 cols.append(l.line)
1321 cols.append(l.line)
1322 ui.write(sep.join(cols), eol)
1322 ui.write(sep.join(cols), eol)
1323 counts[change] += 1
1323 counts[change] += 1
1324 return counts['+'], counts['-']
1324 return counts['+'], counts['-']
1325
1325
1326 fstate = {}
1326 fstate = {}
1327 skip = {}
1327 skip = {}
1328 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1328 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1329 count = 0
1329 count = 0
1330 incrementing = False
1330 incrementing = False
1331 for st, rev, fns in changeiter:
1331 for st, rev, fns in changeiter:
1332 if st == 'window':
1332 if st == 'window':
1333 incrementing = rev
1333 incrementing = rev
1334 matches.clear()
1334 matches.clear()
1335 elif st == 'add':
1335 elif st == 'add':
1336 change = repo.changelog.read(repo.lookup(str(rev)))
1336 change = repo.changelog.read(repo.lookup(str(rev)))
1337 mf = repo.manifest.read(change[0])
1337 mf = repo.manifest.read(change[0])
1338 matches[rev] = {}
1338 matches[rev] = {}
1339 for fn in fns:
1339 for fn in fns:
1340 if fn in skip:
1340 if fn in skip:
1341 continue
1341 continue
1342 fstate.setdefault(fn, {})
1342 fstate.setdefault(fn, {})
1343 try:
1343 try:
1344 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1344 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1345 except KeyError:
1345 except KeyError:
1346 pass
1346 pass
1347 elif st == 'iter':
1347 elif st == 'iter':
1348 states = matches[rev].items()
1348 states = matches[rev].items()
1349 states.sort()
1349 states.sort()
1350 for fn, m in states:
1350 for fn, m in states:
1351 if fn in skip:
1351 if fn in skip:
1352 continue
1352 continue
1353 if incrementing or not opts['all'] or fstate[fn]:
1353 if incrementing or not opts['all'] or fstate[fn]:
1354 pos, neg = display(fn, rev, m, fstate[fn])
1354 pos, neg = display(fn, rev, m, fstate[fn])
1355 count += pos + neg
1355 count += pos + neg
1356 if pos and not opts['all']:
1356 if pos and not opts['all']:
1357 skip[fn] = True
1357 skip[fn] = True
1358 fstate[fn] = m
1358 fstate[fn] = m
1359 prev[fn] = rev
1359 prev[fn] = rev
1360
1360
1361 if not incrementing:
1361 if not incrementing:
1362 fstate = fstate.items()
1362 fstate = fstate.items()
1363 fstate.sort()
1363 fstate.sort()
1364 for fn, state in fstate:
1364 for fn, state in fstate:
1365 if fn in skip:
1365 if fn in skip:
1366 continue
1366 continue
1367 display(fn, rev, {}, state)
1367 display(fn, rev, {}, state)
1368 return (count == 0 and 1) or 0
1368 return (count == 0 and 1) or 0
1369
1369
1370 def heads(ui, repo, **opts):
1370 def heads(ui, repo, **opts):
1371 """show current repository heads
1371 """show current repository heads
1372
1372
1373 Show all repository head changesets.
1373 Show all repository head changesets.
1374
1374
1375 Repository "heads" are changesets that don't have children
1375 Repository "heads" are changesets that don't have children
1376 changesets. They are where development generally takes place and
1376 changesets. They are where development generally takes place and
1377 are the usual targets for update and merge operations.
1377 are the usual targets for update and merge operations.
1378 """
1378 """
1379 if opts['rev']:
1379 if opts['rev']:
1380 heads = repo.heads(repo.lookup(opts['rev']))
1380 heads = repo.heads(repo.lookup(opts['rev']))
1381 else:
1381 else:
1382 heads = repo.heads()
1382 heads = repo.heads()
1383 br = None
1383 br = None
1384 if opts['branches']:
1384 if opts['branches']:
1385 br = repo.branchlookup(heads)
1385 br = repo.branchlookup(heads)
1386 for n in heads:
1386 for n in heads:
1387 show_changeset(ui, repo, changenode=n, brinfo=br)
1387 show_changeset(ui, repo, changenode=n, brinfo=br)
1388
1388
1389 def identify(ui, repo):
1389 def identify(ui, repo):
1390 """print information about the working copy
1390 """print information about the working copy
1391
1391
1392 Print a short summary of the current state of the repo.
1392 Print a short summary of the current state of the repo.
1393
1393
1394 This summary identifies the repository state using one or two parent
1394 This summary identifies the repository state using one or two parent
1395 hash identifiers, followed by a "+" if there are uncommitted changes
1395 hash identifiers, followed by a "+" if there are uncommitted changes
1396 in the working directory, followed by a list of tags for this revision.
1396 in the working directory, followed by a list of tags for this revision.
1397 """
1397 """
1398 parents = [p for p in repo.dirstate.parents() if p != nullid]
1398 parents = [p for p in repo.dirstate.parents() if p != nullid]
1399 if not parents:
1399 if not parents:
1400 ui.write(_("unknown\n"))
1400 ui.write(_("unknown\n"))
1401 return
1401 return
1402
1402
1403 hexfunc = ui.verbose and hex or short
1403 hexfunc = ui.verbose and hex or short
1404 modified, added, removed, deleted, unknown = repo.changes()
1404 modified, added, removed, deleted, unknown = repo.changes()
1405 output = ["%s%s" %
1405 output = ["%s%s" %
1406 ('+'.join([hexfunc(parent) for parent in parents]),
1406 ('+'.join([hexfunc(parent) for parent in parents]),
1407 (modified or added or removed or deleted) and "+" or "")]
1407 (modified or added or removed or deleted) and "+" or "")]
1408
1408
1409 if not ui.quiet:
1409 if not ui.quiet:
1410 # multiple tags for a single parent separated by '/'
1410 # multiple tags for a single parent separated by '/'
1411 parenttags = ['/'.join(tags)
1411 parenttags = ['/'.join(tags)
1412 for tags in map(repo.nodetags, parents) if tags]
1412 for tags in map(repo.nodetags, parents) if tags]
1413 # tags for multiple parents separated by ' + '
1413 # tags for multiple parents separated by ' + '
1414 if parenttags:
1414 if parenttags:
1415 output.append(' + '.join(parenttags))
1415 output.append(' + '.join(parenttags))
1416
1416
1417 ui.write("%s\n" % ' '.join(output))
1417 ui.write("%s\n" % ' '.join(output))
1418
1418
1419 def import_(ui, repo, patch1, *patches, **opts):
1419 def import_(ui, repo, patch1, *patches, **opts):
1420 """import an ordered set of patches
1420 """import an ordered set of patches
1421
1421
1422 Import a list of patches and commit them individually.
1422 Import a list of patches and commit them individually.
1423
1423
1424 If there are outstanding changes in the working directory, import
1424 If there are outstanding changes in the working directory, import
1425 will abort unless given the -f flag.
1425 will abort unless given the -f flag.
1426
1426
1427 If a patch looks like a mail message (its first line starts with
1427 If a patch looks like a mail message (its first line starts with
1428 "From " or looks like an RFC822 header), it will not be applied
1428 "From " or looks like an RFC822 header), it will not be applied
1429 unless the -f option is used. The importer neither parses nor
1429 unless the -f option is used. The importer neither parses nor
1430 discards mail headers, so use -f only to override the "mailness"
1430 discards mail headers, so use -f only to override the "mailness"
1431 safety check, not to import a real mail message.
1431 safety check, not to import a real mail message.
1432 """
1432 """
1433 patches = (patch1,) + patches
1433 patches = (patch1,) + patches
1434
1434
1435 if not opts['force']:
1435 if not opts['force']:
1436 modified, added, removed, deleted, unknown = repo.changes()
1436 modified, added, removed, deleted, unknown = repo.changes()
1437 if modified or added or removed or deleted:
1437 if modified or added or removed or deleted:
1438 raise util.Abort(_("outstanding uncommitted changes"))
1438 raise util.Abort(_("outstanding uncommitted changes"))
1439
1439
1440 d = opts["base"]
1440 d = opts["base"]
1441 strip = opts["strip"]
1441 strip = opts["strip"]
1442
1442
1443 mailre = re.compile(r'(?:From |[\w-]+:)')
1443 mailre = re.compile(r'(?:From |[\w-]+:)')
1444
1444
1445 # attempt to detect the start of a patch
1445 # attempt to detect the start of a patch
1446 # (this heuristic is borrowed from quilt)
1446 # (this heuristic is borrowed from quilt)
1447 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1447 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1448 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1448 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1449 '(---|\*\*\*)[ \t])')
1449 '(---|\*\*\*)[ \t])')
1450
1450
1451 for patch in patches:
1451 for patch in patches:
1452 ui.status(_("applying %s\n") % patch)
1452 ui.status(_("applying %s\n") % patch)
1453 pf = os.path.join(d, patch)
1453 pf = os.path.join(d, patch)
1454
1454
1455 message = []
1455 message = []
1456 user = None
1456 user = None
1457 hgpatch = False
1457 hgpatch = False
1458 for line in file(pf):
1458 for line in file(pf):
1459 line = line.rstrip()
1459 line = line.rstrip()
1460 if (not message and not hgpatch and
1460 if (not message and not hgpatch and
1461 mailre.match(line) and not opts['force']):
1461 mailre.match(line) and not opts['force']):
1462 if len(line) > 35:
1462 if len(line) > 35:
1463 line = line[:32] + '...'
1463 line = line[:32] + '...'
1464 raise util.Abort(_('first line looks like a '
1464 raise util.Abort(_('first line looks like a '
1465 'mail header: ') + line)
1465 'mail header: ') + line)
1466 if diffre.match(line):
1466 if diffre.match(line):
1467 break
1467 break
1468 elif hgpatch:
1468 elif hgpatch:
1469 # parse values when importing the result of an hg export
1469 # parse values when importing the result of an hg export
1470 if line.startswith("# User "):
1470 if line.startswith("# User "):
1471 user = line[7:]
1471 user = line[7:]
1472 ui.debug(_('User: %s\n') % user)
1472 ui.debug(_('User: %s\n') % user)
1473 elif not line.startswith("# ") and line:
1473 elif not line.startswith("# ") and line:
1474 message.append(line)
1474 message.append(line)
1475 hgpatch = False
1475 hgpatch = False
1476 elif line == '# HG changeset patch':
1476 elif line == '# HG changeset patch':
1477 hgpatch = True
1477 hgpatch = True
1478 message = [] # We may have collected garbage
1478 message = [] # We may have collected garbage
1479 else:
1479 else:
1480 message.append(line)
1480 message.append(line)
1481
1481
1482 # make sure message isn't empty
1482 # make sure message isn't empty
1483 if not message:
1483 if not message:
1484 message = _("imported patch %s\n") % patch
1484 message = _("imported patch %s\n") % patch
1485 else:
1485 else:
1486 message = "%s\n" % '\n'.join(message)
1486 message = "%s\n" % '\n'.join(message)
1487 ui.debug(_('message:\n%s\n') % message)
1487 ui.debug(_('message:\n%s\n') % message)
1488
1488
1489 files = util.patch(strip, pf, ui)
1489 files = util.patch(strip, pf, ui)
1490
1490
1491 if len(files) > 0:
1491 if len(files) > 0:
1492 addremove(ui, repo, *files)
1492 addremove(ui, repo, *files)
1493 repo.commit(files, message, user)
1493 repo.commit(files, message, user)
1494
1494
1495 def incoming(ui, repo, source="default", **opts):
1495 def incoming(ui, repo, source="default", **opts):
1496 """show new changesets found in source
1496 """show new changesets found in source
1497
1497
1498 Show new changesets found in the specified repo or the default
1498 Show new changesets found in the specified repo or the default
1499 pull repo. These are the changesets that would be pulled if a pull
1499 pull repo. These are the changesets that would be pulled if a pull
1500 was requested.
1500 was requested.
1501
1501
1502 Currently only local repositories are supported.
1502 Currently only local repositories are supported.
1503 """
1503 """
1504 source = ui.expandpath(source, repo.root)
1504 source = ui.expandpath(source, repo.root)
1505 other = hg.repository(ui, source)
1505 other = hg.repository(ui, source)
1506 if not other.local():
1506 if not other.local():
1507 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1507 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1508 o = repo.findincoming(other)
1508 o = repo.findincoming(other)
1509 if not o:
1509 if not o:
1510 return
1510 return
1511 o = other.changelog.nodesbetween(o)[0]
1511 o = other.changelog.nodesbetween(o)[0]
1512 if opts['newest_first']:
1512 if opts['newest_first']:
1513 o.reverse()
1513 o.reverse()
1514 for n in o:
1514 for n in o:
1515 parents = [p for p in other.changelog.parents(n) if p != nullid]
1515 parents = [p for p in other.changelog.parents(n) if p != nullid]
1516 if opts['no_merges'] and len(parents) == 2:
1516 if opts['no_merges'] and len(parents) == 2:
1517 continue
1517 continue
1518 show_changeset(ui, other, changenode=n)
1518 show_changeset(ui, other, changenode=n)
1519 if opts['patch']:
1519 if opts['patch']:
1520 prev = (parents and parents[0]) or nullid
1520 prev = (parents and parents[0]) or nullid
1521 dodiff(ui, ui, other, prev, n)
1521 dodiff(ui, ui, other, prev, n)
1522 ui.write("\n")
1522 ui.write("\n")
1523
1523
1524 def init(ui, dest="."):
1524 def init(ui, dest="."):
1525 """create a new repository in the given directory
1525 """create a new repository in the given directory
1526
1526
1527 Initialize a new repository in the given directory. If the given
1527 Initialize a new repository in the given directory. If the given
1528 directory does not exist, it is created.
1528 directory does not exist, it is created.
1529
1529
1530 If no directory is given, the current directory is used.
1530 If no directory is given, the current directory is used.
1531 """
1531 """
1532 if not os.path.exists(dest):
1532 if not os.path.exists(dest):
1533 os.mkdir(dest)
1533 os.mkdir(dest)
1534 hg.repository(ui, dest, create=1)
1534 hg.repository(ui, dest, create=1)
1535
1535
1536 def locate(ui, repo, *pats, **opts):
1536 def locate(ui, repo, *pats, **opts):
1537 """locate files matching specific patterns
1537 """locate files matching specific patterns
1538
1538
1539 Print all files under Mercurial control whose names match the
1539 Print all files under Mercurial control whose names match the
1540 given patterns.
1540 given patterns.
1541
1541
1542 This command searches the current directory and its
1542 This command searches the current directory and its
1543 subdirectories. To search an entire repository, move to the root
1543 subdirectories. To search an entire repository, move to the root
1544 of the repository.
1544 of the repository.
1545
1545
1546 If no patterns are given to match, this command prints all file
1546 If no patterns are given to match, this command prints all file
1547 names.
1547 names.
1548
1548
1549 If you want to feed the output of this command into the "xargs"
1549 If you want to feed the output of this command into the "xargs"
1550 command, use the "-0" option to both this command and "xargs".
1550 command, use the "-0" option to both this command and "xargs".
1551 This will avoid the problem of "xargs" treating single filenames
1551 This will avoid the problem of "xargs" treating single filenames
1552 that contain white space as multiple filenames.
1552 that contain white space as multiple filenames.
1553 """
1553 """
1554 end = opts['print0'] and '\0' or '\n'
1554 end = opts['print0'] and '\0' or '\n'
1555 rev = opts['rev']
1555 rev = opts['rev']
1556 if rev:
1556 if rev:
1557 node = repo.lookup(rev)
1557 node = repo.lookup(rev)
1558 else:
1558 else:
1559 node = None
1559 node = None
1560
1560
1561 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1561 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1562 head='(?:.*/|)'):
1562 head='(?:.*/|)'):
1563 if not node and repo.dirstate.state(abs) == '?':
1563 if not node and repo.dirstate.state(abs) == '?':
1564 continue
1564 continue
1565 if opts['fullpath']:
1565 if opts['fullpath']:
1566 ui.write(os.path.join(repo.root, abs), end)
1566 ui.write(os.path.join(repo.root, abs), end)
1567 else:
1567 else:
1568 ui.write(((pats and rel) or abs), end)
1568 ui.write(((pats and rel) or abs), end)
1569
1569
1570 def log(ui, repo, *pats, **opts):
1570 def log(ui, repo, *pats, **opts):
1571 """show revision history of entire repository or files
1571 """show revision history of entire repository or files
1572
1572
1573 Print the revision history of the specified files or the entire project.
1573 Print the revision history of the specified files or the entire project.
1574
1574
1575 By default this command outputs: changeset id and hash, tags,
1575 By default this command outputs: changeset id and hash, tags,
1576 non-trivial parents, user, date and time, and a summary for each
1576 non-trivial parents, user, date and time, and a summary for each
1577 commit. When the -v/--verbose switch is used, the list of changed
1577 commit. When the -v/--verbose switch is used, the list of changed
1578 files and full commit message is shown.
1578 files and full commit message is shown.
1579 """
1579 """
1580 class dui(object):
1580 class dui(object):
1581 # Implement and delegate some ui protocol. Save hunks of
1581 # Implement and delegate some ui protocol. Save hunks of
1582 # output for later display in the desired order.
1582 # output for later display in the desired order.
1583 def __init__(self, ui):
1583 def __init__(self, ui):
1584 self.ui = ui
1584 self.ui = ui
1585 self.hunk = {}
1585 self.hunk = {}
1586 def bump(self, rev):
1586 def bump(self, rev):
1587 self.rev = rev
1587 self.rev = rev
1588 self.hunk[rev] = []
1588 self.hunk[rev] = []
1589 def note(self, *args):
1589 def note(self, *args):
1590 if self.verbose:
1590 if self.verbose:
1591 self.write(*args)
1591 self.write(*args)
1592 def status(self, *args):
1592 def status(self, *args):
1593 if not self.quiet:
1593 if not self.quiet:
1594 self.write(*args)
1594 self.write(*args)
1595 def write(self, *args):
1595 def write(self, *args):
1596 self.hunk[self.rev].append(args)
1596 self.hunk[self.rev].append(args)
1597 def debug(self, *args):
1597 def debug(self, *args):
1598 if self.debugflag:
1598 if self.debugflag:
1599 self.write(*args)
1599 self.write(*args)
1600 def __getattr__(self, key):
1600 def __getattr__(self, key):
1601 return getattr(self.ui, key)
1601 return getattr(self.ui, key)
1602 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1602 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1603 for st, rev, fns in changeiter:
1603 for st, rev, fns in changeiter:
1604 if st == 'window':
1604 if st == 'window':
1605 du = dui(ui)
1605 du = dui(ui)
1606 elif st == 'add':
1606 elif st == 'add':
1607 du.bump(rev)
1607 du.bump(rev)
1608 changenode = repo.changelog.node(rev)
1608 changenode = repo.changelog.node(rev)
1609 parents = [p for p in repo.changelog.parents(changenode)
1609 parents = [p for p in repo.changelog.parents(changenode)
1610 if p != nullid]
1610 if p != nullid]
1611 if opts['no_merges'] and len(parents) == 2:
1611 if opts['no_merges'] and len(parents) == 2:
1612 continue
1612 continue
1613 if opts['only_merges'] and len(parents) != 2:
1613 if opts['only_merges'] and len(parents) != 2:
1614 continue
1614 continue
1615
1615
1616 br = None
1616 br = None
1617 if opts['keyword']:
1617 if opts['keyword']:
1618 changes = getchange(rev)
1618 changes = getchange(rev)
1619 miss = 0
1619 miss = 0
1620 for k in [kw.lower() for kw in opts['keyword']]:
1620 for k in [kw.lower() for kw in opts['keyword']]:
1621 if not (k in changes[1].lower() or
1621 if not (k in changes[1].lower() or
1622 k in changes[4].lower() or
1622 k in changes[4].lower() or
1623 k in " ".join(changes[3][:20]).lower()):
1623 k in " ".join(changes[3][:20]).lower()):
1624 miss = 1
1624 miss = 1
1625 break
1625 break
1626 if miss:
1626 if miss:
1627 continue
1627 continue
1628
1628
1629 if opts['branch']:
1629 if opts['branch']:
1630 br = repo.branchlookup([repo.changelog.node(rev)])
1630 br = repo.branchlookup([repo.changelog.node(rev)])
1631
1631
1632 show_changeset(du, repo, rev, brinfo=br)
1632 show_changeset(du, repo, rev, brinfo=br)
1633 if opts['patch']:
1633 if opts['patch']:
1634 prev = (parents and parents[0]) or nullid
1634 prev = (parents and parents[0]) or nullid
1635 dodiff(du, du, repo, prev, changenode, match=matchfn)
1635 dodiff(du, du, repo, prev, changenode, match=matchfn)
1636 du.write("\n\n")
1636 du.write("\n\n")
1637 elif st == 'iter':
1637 elif st == 'iter':
1638 for args in du.hunk[rev]:
1638 for args in du.hunk[rev]:
1639 ui.write(*args)
1639 ui.write(*args)
1640
1640
1641 def manifest(ui, repo, rev=None):
1641 def manifest(ui, repo, rev=None):
1642 """output the latest or given revision of the project manifest
1642 """output the latest or given revision of the project manifest
1643
1643
1644 Print a list of version controlled files for the given revision.
1644 Print a list of version controlled files for the given revision.
1645
1645
1646 The manifest is the list of files being version controlled. If no revision
1646 The manifest is the list of files being version controlled. If no revision
1647 is given then the tip is used.
1647 is given then the tip is used.
1648 """
1648 """
1649 if rev:
1649 if rev:
1650 try:
1650 try:
1651 # assume all revision numbers are for changesets
1651 # assume all revision numbers are for changesets
1652 n = repo.lookup(rev)
1652 n = repo.lookup(rev)
1653 change = repo.changelog.read(n)
1653 change = repo.changelog.read(n)
1654 n = change[0]
1654 n = change[0]
1655 except hg.RepoError:
1655 except hg.RepoError:
1656 n = repo.manifest.lookup(rev)
1656 n = repo.manifest.lookup(rev)
1657 else:
1657 else:
1658 n = repo.manifest.tip()
1658 n = repo.manifest.tip()
1659 m = repo.manifest.read(n)
1659 m = repo.manifest.read(n)
1660 mf = repo.manifest.readflags(n)
1660 mf = repo.manifest.readflags(n)
1661 files = m.keys()
1661 files = m.keys()
1662 files.sort()
1662 files.sort()
1663
1663
1664 for f in files:
1664 for f in files:
1665 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1665 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1666
1666
1667 def outgoing(ui, repo, dest="default-push", **opts):
1667 def outgoing(ui, repo, dest="default-push", **opts):
1668 """show changesets not found in destination
1668 """show changesets not found in destination
1669
1669
1670 Show changesets not found in the specified destination repo or the
1670 Show changesets not found in the specified destination repo or the
1671 default push repo. These are the changesets that would be pushed
1671 default push repo. These are the changesets that would be pushed
1672 if a push was requested.
1672 if a push was requested.
1673 """
1673 """
1674 dest = ui.expandpath(dest, repo.root)
1674 dest = ui.expandpath(dest, repo.root)
1675 other = hg.repository(ui, dest)
1675 other = hg.repository(ui, dest)
1676 o = repo.findoutgoing(other)
1676 o = repo.findoutgoing(other)
1677 o = repo.changelog.nodesbetween(o)[0]
1677 o = repo.changelog.nodesbetween(o)[0]
1678 if opts['newest_first']:
1678 if opts['newest_first']:
1679 o.reverse()
1679 o.reverse()
1680 for n in o:
1680 for n in o:
1681 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1681 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1682 if opts['no_merges'] and len(parents) == 2:
1682 if opts['no_merges'] and len(parents) == 2:
1683 continue
1683 continue
1684 show_changeset(ui, repo, changenode=n)
1684 show_changeset(ui, repo, changenode=n)
1685 if opts['patch']:
1685 if opts['patch']:
1686 prev = (parents and parents[0]) or nullid
1686 prev = (parents and parents[0]) or nullid
1687 dodiff(ui, ui, repo, prev, n)
1687 dodiff(ui, ui, repo, prev, n)
1688 ui.write("\n")
1688 ui.write("\n")
1689
1689
1690 def parents(ui, repo, rev=None, branch=None):
1690 def parents(ui, repo, rev=None, branch=None):
1691 """show the parents of the working dir or revision
1691 """show the parents of the working dir or revision
1692
1692
1693 Print the working directory's parent revisions.
1693 Print the working directory's parent revisions.
1694 """
1694 """
1695 if rev:
1695 if rev:
1696 p = repo.changelog.parents(repo.lookup(rev))
1696 p = repo.changelog.parents(repo.lookup(rev))
1697 else:
1697 else:
1698 p = repo.dirstate.parents()
1698 p = repo.dirstate.parents()
1699
1699
1700 br = None
1700 br = None
1701 if branch is not None:
1701 if branch is not None:
1702 br = repo.branchlookup(p)
1702 br = repo.branchlookup(p)
1703 for n in p:
1703 for n in p:
1704 if n != nullid:
1704 if n != nullid:
1705 show_changeset(ui, repo, changenode=n, brinfo=br)
1705 show_changeset(ui, repo, changenode=n, brinfo=br)
1706
1706
1707 def paths(ui, search=None):
1707 def paths(ui, search=None):
1708 """show definition of symbolic path names
1708 """show definition of symbolic path names
1709
1709
1710 Show definition of symbolic path name NAME. If no name is given, show
1710 Show definition of symbolic path name NAME. If no name is given, show
1711 definition of available names.
1711 definition of available names.
1712
1712
1713 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1713 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1714 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1714 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1715 """
1715 """
1716 try:
1716 try:
1717 repo = hg.repository(ui=ui)
1717 repo = hg.repository(ui=ui)
1718 except hg.RepoError:
1718 except hg.RepoError:
1719 pass
1719 pass
1720
1720
1721 if search:
1721 if search:
1722 for name, path in ui.configitems("paths"):
1722 for name, path in ui.configitems("paths"):
1723 if name == search:
1723 if name == search:
1724 ui.write("%s\n" % path)
1724 ui.write("%s\n" % path)
1725 return
1725 return
1726 ui.warn(_("not found!\n"))
1726 ui.warn(_("not found!\n"))
1727 return 1
1727 return 1
1728 else:
1728 else:
1729 for name, path in ui.configitems("paths"):
1729 for name, path in ui.configitems("paths"):
1730 ui.write("%s = %s\n" % (name, path))
1730 ui.write("%s = %s\n" % (name, path))
1731
1731
1732 def pull(ui, repo, source="default", **opts):
1732 def pull(ui, repo, source="default", **opts):
1733 """pull changes from the specified source
1733 """pull changes from the specified source
1734
1734
1735 Pull changes from a remote repository to a local one.
1735 Pull changes from a remote repository to a local one.
1736
1736
1737 This finds all changes from the repository at the specified path
1737 This finds all changes from the repository at the specified path
1738 or URL and adds them to the local repository. By default, this
1738 or URL and adds them to the local repository. By default, this
1739 does not update the copy of the project in the working directory.
1739 does not update the copy of the project in the working directory.
1740
1740
1741 Valid URLs are of the form:
1741 Valid URLs are of the form:
1742
1742
1743 local/filesystem/path
1743 local/filesystem/path
1744 http://[user@]host[:port][/path]
1744 http://[user@]host[:port][/path]
1745 https://[user@]host[:port][/path]
1745 https://[user@]host[:port][/path]
1746 ssh://[user@]host[:port][/path]
1746 ssh://[user@]host[:port][/path]
1747
1747
1748 SSH requires an accessible shell account on the destination machine
1748 SSH requires an accessible shell account on the destination machine
1749 and a copy of hg in the remote path. With SSH, paths are relative
1749 and a copy of hg in the remote path. With SSH, paths are relative
1750 to the remote user's home directory by default; use two slashes at
1750 to the remote user's home directory by default; use two slashes at
1751 the start of a path to specify it as relative to the filesystem root.
1751 the start of a path to specify it as relative to the filesystem root.
1752 """
1752 """
1753 source = ui.expandpath(source, repo.root)
1753 source = ui.expandpath(source, repo.root)
1754 ui.status(_('pulling from %s\n') % (source))
1754 ui.status(_('pulling from %s\n') % (source))
1755
1755
1756 if opts['ssh']:
1756 if opts['ssh']:
1757 ui.setconfig("ui", "ssh", opts['ssh'])
1757 ui.setconfig("ui", "ssh", opts['ssh'])
1758 if opts['remotecmd']:
1758 if opts['remotecmd']:
1759 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1759 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1760
1760
1761 other = hg.repository(ui, source)
1761 other = hg.repository(ui, source)
1762 revs = None
1762 revs = None
1763 if opts['rev'] and not other.local():
1763 if opts['rev'] and not other.local():
1764 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
1764 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
1765 elif opts['rev']:
1765 elif opts['rev']:
1766 revs = [other.lookup(rev) for rev in opts['rev']]
1766 revs = [other.lookup(rev) for rev in opts['rev']]
1767 r = repo.pull(other, heads=revs)
1767 r = repo.pull(other, heads=revs)
1768 if not r:
1768 if not r:
1769 if opts['update']:
1769 if opts['update']:
1770 return update(ui, repo)
1770 return update(ui, repo)
1771 else:
1771 else:
1772 ui.status(_("(run 'hg update' to get a working copy)\n"))
1772 ui.status(_("(run 'hg update' to get a working copy)\n"))
1773
1773
1774 return r
1774 return r
1775
1775
1776 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1776 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1777 """push changes to the specified destination
1777 """push changes to the specified destination
1778
1778
1779 Push changes from the local repository to the given destination.
1779 Push changes from the local repository to the given destination.
1780
1780
1781 This is the symmetrical operation for pull. It helps to move
1781 This is the symmetrical operation for pull. It helps to move
1782 changes from the current repository to a different one. If the
1782 changes from the current repository to a different one. If the
1783 destination is local this is identical to a pull in that directory
1783 destination is local this is identical to a pull in that directory
1784 from the current one.
1784 from the current one.
1785
1785
1786 By default, push will refuse to run if it detects the result would
1786 By default, push will refuse to run if it detects the result would
1787 increase the number of remote heads. This generally indicates the
1787 increase the number of remote heads. This generally indicates the
1788 the client has forgotten to sync and merge before pushing.
1788 the client has forgotten to sync and merge before pushing.
1789
1789
1790 Valid URLs are of the form:
1790 Valid URLs are of the form:
1791
1791
1792 local/filesystem/path
1792 local/filesystem/path
1793 ssh://[user@]host[:port][/path]
1793 ssh://[user@]host[:port][/path]
1794
1794
1795 SSH requires an accessible shell account on the destination
1795 SSH requires an accessible shell account on the destination
1796 machine and a copy of hg in the remote path.
1796 machine and a copy of hg in the remote path.
1797 """
1797 """
1798 dest = ui.expandpath(dest, repo.root)
1798 dest = ui.expandpath(dest, repo.root)
1799 ui.status('pushing to %s\n' % (dest))
1799 ui.status('pushing to %s\n' % (dest))
1800
1800
1801 if ssh:
1801 if ssh:
1802 ui.setconfig("ui", "ssh", ssh)
1802 ui.setconfig("ui", "ssh", ssh)
1803 if remotecmd:
1803 if remotecmd:
1804 ui.setconfig("ui", "remotecmd", remotecmd)
1804 ui.setconfig("ui", "remotecmd", remotecmd)
1805
1805
1806 other = hg.repository(ui, dest)
1806 other = hg.repository(ui, dest)
1807 r = repo.push(other, force)
1807 r = repo.push(other, force)
1808 return r
1808 return r
1809
1809
1810 def rawcommit(ui, repo, *flist, **rc):
1810 def rawcommit(ui, repo, *flist, **rc):
1811 """raw commit interface (DEPRECATED)
1811 """raw commit interface (DEPRECATED)
1812
1812
1813 Lowlevel commit, for use in helper scripts.
1813 Lowlevel commit, for use in helper scripts.
1814
1814
1815 This command is not intended to be used by normal users, as it is
1815 This command is not intended to be used by normal users, as it is
1816 primarily useful for importing from other SCMs.
1816 primarily useful for importing from other SCMs.
1817
1817
1818 This command is now deprecated and will be removed in a future
1818 This command is now deprecated and will be removed in a future
1819 release, please use debugsetparents and commit instead.
1819 release, please use debugsetparents and commit instead.
1820 """
1820 """
1821
1821
1822 ui.warn(_("(the rawcommit command is deprecated)\n"))
1822 ui.warn(_("(the rawcommit command is deprecated)\n"))
1823
1823
1824 message = rc['message']
1824 message = rc['message']
1825 if not message and rc['logfile']:
1825 if not message and rc['logfile']:
1826 try:
1826 try:
1827 message = open(rc['logfile']).read()
1827 message = open(rc['logfile']).read()
1828 except IOError:
1828 except IOError:
1829 pass
1829 pass
1830 if not message and not rc['logfile']:
1830 if not message and not rc['logfile']:
1831 raise util.Abort(_("missing commit message"))
1831 raise util.Abort(_("missing commit message"))
1832
1832
1833 files = relpath(repo, list(flist))
1833 files = relpath(repo, list(flist))
1834 if rc['files']:
1834 if rc['files']:
1835 files += open(rc['files']).read().splitlines()
1835 files += open(rc['files']).read().splitlines()
1836
1836
1837 rc['parent'] = map(repo.lookup, rc['parent'])
1837 rc['parent'] = map(repo.lookup, rc['parent'])
1838
1838
1839 try:
1839 try:
1840 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1840 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1841 except ValueError, inst:
1841 except ValueError, inst:
1842 raise util.Abort(str(inst))
1842 raise util.Abort(str(inst))
1843
1843
1844 def recover(ui, repo):
1844 def recover(ui, repo):
1845 """roll back an interrupted transaction
1845 """roll back an interrupted transaction
1846
1846
1847 Recover from an interrupted commit or pull.
1847 Recover from an interrupted commit or pull.
1848
1848
1849 This command tries to fix the repository status after an interrupted
1849 This command tries to fix the repository status after an interrupted
1850 operation. It should only be necessary when Mercurial suggests it.
1850 operation. It should only be necessary when Mercurial suggests it.
1851 """
1851 """
1852 if repo.recover():
1852 if repo.recover():
1853 return repo.verify()
1853 return repo.verify()
1854 return False
1854 return False
1855
1855
1856 def remove(ui, repo, pat, *pats, **opts):
1856 def remove(ui, repo, pat, *pats, **opts):
1857 """remove the specified files on the next commit
1857 """remove the specified files on the next commit
1858
1858
1859 Schedule the indicated files for removal from the repository.
1859 Schedule the indicated files for removal from the repository.
1860
1860
1861 This command schedules the files to be removed at the next commit.
1861 This command schedules the files to be removed at the next commit.
1862 This only removes files from the current branch, not from the
1862 This only removes files from the current branch, not from the
1863 entire project history. If the files still exist in the working
1863 entire project history. If the files still exist in the working
1864 directory, they will be deleted from it.
1864 directory, they will be deleted from it.
1865 """
1865 """
1866 names = []
1866 names = []
1867 def okaytoremove(abs, rel, exact):
1867 def okaytoremove(abs, rel, exact):
1868 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
1868 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
1869 reason = None
1869 reason = None
1870 if modified:
1870 if modified:
1871 reason = _('is modified')
1871 reason = _('is modified')
1872 elif added:
1872 elif added:
1873 reason = _('has been marked for add')
1873 reason = _('has been marked for add')
1874 elif unknown:
1874 elif unknown:
1875 reason = _('is not managed')
1875 reason = _('is not managed')
1876 if reason:
1876 if reason:
1877 if exact:
1877 if exact:
1878 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1878 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1879 else:
1879 else:
1880 return True
1880 return True
1881 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1881 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1882 if okaytoremove(abs, rel, exact):
1882 if okaytoremove(abs, rel, exact):
1883 if ui.verbose or not exact:
1883 if ui.verbose or not exact:
1884 ui.status(_('removing %s\n') % rel)
1884 ui.status(_('removing %s\n') % rel)
1885 names.append(abs)
1885 names.append(abs)
1886 repo.remove(names, unlink=True)
1886 repo.remove(names, unlink=True)
1887
1887
1888 def rename(ui, repo, *pats, **opts):
1888 def rename(ui, repo, *pats, **opts):
1889 """rename files; equivalent of copy + remove
1889 """rename files; equivalent of copy + remove
1890
1890
1891 Mark dest as copies of sources; mark sources for deletion. If
1891 Mark dest as copies of sources; mark sources for deletion. If
1892 dest is a directory, copies are put in that directory. If dest is
1892 dest is a directory, copies are put in that directory. If dest is
1893 a file, there can only be one source.
1893 a file, there can only be one source.
1894
1894
1895 By default, this command copies the contents of files as they
1895 By default, this command copies the contents of files as they
1896 stand in the working directory. If invoked with --after, the
1896 stand in the working directory. If invoked with --after, the
1897 operation is recorded, but no copying is performed.
1897 operation is recorded, but no copying is performed.
1898
1898
1899 This command takes effect in the next commit.
1899 This command takes effect in the next commit.
1900
1900
1901 NOTE: This command should be treated as experimental. While it
1901 NOTE: This command should be treated as experimental. While it
1902 should properly record rename files, this information is not yet
1902 should properly record rename files, this information is not yet
1903 fully used by merge, nor fully reported by log.
1903 fully used by merge, nor fully reported by log.
1904 """
1904 """
1905 errs, copied = docopy(ui, repo, pats, opts)
1905 errs, copied = docopy(ui, repo, pats, opts)
1906 names = []
1906 names = []
1907 for abs, rel, exact in copied:
1907 for abs, rel, exact in copied:
1908 if ui.verbose or not exact:
1908 if ui.verbose or not exact:
1909 ui.status(_('removing %s\n') % rel)
1909 ui.status(_('removing %s\n') % rel)
1910 names.append(abs)
1910 names.append(abs)
1911 repo.remove(names, unlink=True)
1911 repo.remove(names, unlink=True)
1912 return errs
1912 return errs
1913
1913
1914 def revert(ui, repo, *pats, **opts):
1914 def revert(ui, repo, *pats, **opts):
1915 """revert modified files or dirs back to their unmodified states
1915 """revert modified files or dirs back to their unmodified states
1916
1916
1917 Revert any uncommitted modifications made to the named files or
1917 Revert any uncommitted modifications made to the named files or
1918 directories. This restores the contents of the affected files to
1918 directories. This restores the contents of the affected files to
1919 an unmodified state.
1919 an unmodified state.
1920
1920
1921 If a file has been deleted, it is recreated. If the executable
1921 If a file has been deleted, it is recreated. If the executable
1922 mode of a file was changed, it is reset.
1922 mode of a file was changed, it is reset.
1923
1923
1924 If names are given, all files matching the names are reverted.
1924 If names are given, all files matching the names are reverted.
1925
1925
1926 If no arguments are given, all files in the repository are reverted.
1926 If no arguments are given, all files in the repository are reverted.
1927 """
1927 """
1928 node = opts['rev'] and repo.lookup(opts['rev']) or \
1928 node = opts['rev'] and repo.lookup(opts['rev']) or \
1929 repo.dirstate.parents()[0]
1929 repo.dirstate.parents()[0]
1930
1930
1931 files, choose, anypats = matchpats(repo, pats, opts)
1931 files, choose, anypats = matchpats(repo, pats, opts)
1932 modified, added, removed, deleted, unknown = repo.changes(match=choose)
1932 modified, added, removed, deleted, unknown = repo.changes(match=choose)
1933 repo.forget(added)
1933 repo.forget(added)
1934 repo.undelete(removed + deleted)
1934 repo.undelete(removed + deleted)
1935
1935
1936 return repo.update(node, False, True, choose, False)
1936 return repo.update(node, False, True, choose, False)
1937
1937
1938 def root(ui, repo):
1938 def root(ui, repo):
1939 """print the root (top) of the current working dir
1939 """print the root (top) of the current working dir
1940
1940
1941 Print the root directory of the current repository.
1941 Print the root directory of the current repository.
1942 """
1942 """
1943 ui.write(repo.root + "\n")
1943 ui.write(repo.root + "\n")
1944
1944
1945 def serve(ui, repo, **opts):
1945 def serve(ui, repo, **opts):
1946 """export the repository via HTTP
1946 """export the repository via HTTP
1947
1947
1948 Start a local HTTP repository browser and pull server.
1948 Start a local HTTP repository browser and pull server.
1949
1949
1950 By default, the server logs accesses to stdout and errors to
1950 By default, the server logs accesses to stdout and errors to
1951 stderr. Use the "-A" and "-E" options to log to files.
1951 stderr. Use the "-A" and "-E" options to log to files.
1952 """
1952 """
1953
1953
1954 if opts["stdio"]:
1954 if opts["stdio"]:
1955 fin, fout = sys.stdin, sys.stdout
1955 fin, fout = sys.stdin, sys.stdout
1956 sys.stdout = sys.stderr
1956 sys.stdout = sys.stderr
1957
1957
1958 # Prevent insertion/deletion of CRs
1958 # Prevent insertion/deletion of CRs
1959 util.set_binary(fin)
1959 util.set_binary(fin)
1960 util.set_binary(fout)
1960 util.set_binary(fout)
1961
1961
1962 def getarg():
1962 def getarg():
1963 argline = fin.readline()[:-1]
1963 argline = fin.readline()[:-1]
1964 arg, l = argline.split()
1964 arg, l = argline.split()
1965 val = fin.read(int(l))
1965 val = fin.read(int(l))
1966 return arg, val
1966 return arg, val
1967 def respond(v):
1967 def respond(v):
1968 fout.write("%d\n" % len(v))
1968 fout.write("%d\n" % len(v))
1969 fout.write(v)
1969 fout.write(v)
1970 fout.flush()
1970 fout.flush()
1971
1971
1972 lock = None
1972 lock = None
1973
1973
1974 while 1:
1974 while 1:
1975 cmd = fin.readline()[:-1]
1975 cmd = fin.readline()[:-1]
1976 if cmd == '':
1976 if cmd == '':
1977 return
1977 return
1978 if cmd == "heads":
1978 if cmd == "heads":
1979 h = repo.heads()
1979 h = repo.heads()
1980 respond(" ".join(map(hex, h)) + "\n")
1980 respond(" ".join(map(hex, h)) + "\n")
1981 if cmd == "lock":
1981 if cmd == "lock":
1982 lock = repo.lock()
1982 lock = repo.lock()
1983 respond("")
1983 respond("")
1984 if cmd == "unlock":
1984 if cmd == "unlock":
1985 if lock:
1985 if lock:
1986 lock.release()
1986 lock.release()
1987 lock = None
1987 lock = None
1988 respond("")
1988 respond("")
1989 elif cmd == "branches":
1989 elif cmd == "branches":
1990 arg, nodes = getarg()
1990 arg, nodes = getarg()
1991 nodes = map(bin, nodes.split(" "))
1991 nodes = map(bin, nodes.split(" "))
1992 r = []
1992 r = []
1993 for b in repo.branches(nodes):
1993 for b in repo.branches(nodes):
1994 r.append(" ".join(map(hex, b)) + "\n")
1994 r.append(" ".join(map(hex, b)) + "\n")
1995 respond("".join(r))
1995 respond("".join(r))
1996 elif cmd == "between":
1996 elif cmd == "between":
1997 arg, pairs = getarg()
1997 arg, pairs = getarg()
1998 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1998 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1999 r = []
1999 r = []
2000 for b in repo.between(pairs):
2000 for b in repo.between(pairs):
2001 r.append(" ".join(map(hex, b)) + "\n")
2001 r.append(" ".join(map(hex, b)) + "\n")
2002 respond("".join(r))
2002 respond("".join(r))
2003 elif cmd == "changegroup":
2003 elif cmd == "changegroup":
2004 nodes = []
2004 nodes = []
2005 arg, roots = getarg()
2005 arg, roots = getarg()
2006 nodes = map(bin, roots.split(" "))
2006 nodes = map(bin, roots.split(" "))
2007
2007
2008 cg = repo.changegroup(nodes, 'serve')
2008 cg = repo.changegroup(nodes, 'serve')
2009 while 1:
2009 while 1:
2010 d = cg.read(4096)
2010 d = cg.read(4096)
2011 if not d:
2011 if not d:
2012 break
2012 break
2013 fout.write(d)
2013 fout.write(d)
2014
2014
2015 fout.flush()
2015 fout.flush()
2016
2016
2017 elif cmd == "addchangegroup":
2017 elif cmd == "addchangegroup":
2018 if not lock:
2018 if not lock:
2019 respond("not locked")
2019 respond("not locked")
2020 continue
2020 continue
2021 respond("")
2021 respond("")
2022
2022
2023 r = repo.addchangegroup(fin)
2023 r = repo.addchangegroup(fin)
2024 respond("")
2024 respond("")
2025
2025
2026 optlist = "name templates style address port ipv6 accesslog errorlog"
2026 optlist = "name templates style address port ipv6 accesslog errorlog"
2027 for o in optlist.split():
2027 for o in optlist.split():
2028 if opts[o]:
2028 if opts[o]:
2029 ui.setconfig("web", o, opts[o])
2029 ui.setconfig("web", o, opts[o])
2030
2030
2031 if opts['daemon'] and not opts['daemon_pipefds']:
2031 if opts['daemon'] and not opts['daemon_pipefds']:
2032 rfd, wfd = os.pipe()
2032 rfd, wfd = os.pipe()
2033 args = sys.argv[:]
2033 args = sys.argv[:]
2034 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2034 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2035 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2035 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2036 args[0], args)
2036 args[0], args)
2037 os.close(wfd)
2037 os.close(wfd)
2038 os.read(rfd, 1)
2038 os.read(rfd, 1)
2039 os._exit(0)
2039 os._exit(0)
2040
2040
2041 try:
2041 try:
2042 httpd = hgweb.create_server(repo)
2042 httpd = hgweb.create_server(repo)
2043 except socket.error, inst:
2043 except socket.error, inst:
2044 raise util.Abort(_('cannot start server: ') + inst.args[1])
2044 raise util.Abort(_('cannot start server: ') + inst.args[1])
2045
2045
2046 if ui.verbose:
2046 if ui.verbose:
2047 addr, port = httpd.socket.getsockname()
2047 addr, port = httpd.socket.getsockname()
2048 if addr == '0.0.0.0':
2048 if addr == '0.0.0.0':
2049 addr = socket.gethostname()
2049 addr = socket.gethostname()
2050 else:
2050 else:
2051 try:
2051 try:
2052 addr = socket.gethostbyaddr(addr)[0]
2052 addr = socket.gethostbyaddr(addr)[0]
2053 except socket.error:
2053 except socket.error:
2054 pass
2054 pass
2055 if port != 80:
2055 if port != 80:
2056 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2056 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2057 else:
2057 else:
2058 ui.status(_('listening at http://%s/\n') % addr)
2058 ui.status(_('listening at http://%s/\n') % addr)
2059
2059
2060 if opts['pid_file']:
2060 if opts['pid_file']:
2061 fp = open(opts['pid_file'], 'w')
2061 fp = open(opts['pid_file'], 'w')
2062 fp.write(str(os.getpid()))
2062 fp.write(str(os.getpid()))
2063 fp.close()
2063 fp.close()
2064
2064
2065 if opts['daemon_pipefds']:
2065 if opts['daemon_pipefds']:
2066 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2066 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2067 os.close(rfd)
2067 os.close(rfd)
2068 os.write(wfd, 'y')
2068 os.write(wfd, 'y')
2069 os.close(wfd)
2069 os.close(wfd)
2070 sys.stdout.flush()
2070 sys.stdout.flush()
2071 sys.stderr.flush()
2071 sys.stderr.flush()
2072 fd = os.open(util.nulldev, os.O_RDWR)
2072 fd = os.open(util.nulldev, os.O_RDWR)
2073 if fd != 0: os.dup2(fd, 0)
2073 if fd != 0: os.dup2(fd, 0)
2074 if fd != 1: os.dup2(fd, 1)
2074 if fd != 1: os.dup2(fd, 1)
2075 if fd != 2: os.dup2(fd, 2)
2075 if fd != 2: os.dup2(fd, 2)
2076 if fd not in (0, 1, 2): os.close(fd)
2076 if fd not in (0, 1, 2): os.close(fd)
2077
2077
2078 httpd.serve_forever()
2078 httpd.serve_forever()
2079
2079
2080 def status(ui, repo, *pats, **opts):
2080 def status(ui, repo, *pats, **opts):
2081 """show changed files in the working directory
2081 """show changed files in the working directory
2082
2082
2083 Show changed files in the repository. If names are
2083 Show changed files in the repository. If names are
2084 given, only files that match are shown.
2084 given, only files that match are shown.
2085
2085
2086 The codes used to show the status of files are:
2086 The codes used to show the status of files are:
2087 M = modified
2087 M = modified
2088 A = added
2088 A = added
2089 R = removed
2089 R = removed
2090 ! = deleted, but still tracked
2090 ! = deleted, but still tracked
2091 ? = not tracked
2091 ? = not tracked
2092 """
2092 """
2093
2093
2094 files, matchfn, anypats = matchpats(repo, pats, opts)
2094 files, matchfn, anypats = matchpats(repo, pats, opts)
2095 cwd = (pats and repo.getcwd()) or ''
2095 cwd = (pats and repo.getcwd()) or ''
2096 modified, added, removed, deleted, unknown = [
2096 modified, added, removed, deleted, unknown = [
2097 [util.pathto(cwd, x) for x in n]
2097 [util.pathto(cwd, x) for x in n]
2098 for n in repo.changes(files=files, match=matchfn)]
2098 for n in repo.changes(files=files, match=matchfn)]
2099
2099
2100 changetypes = [(_('modified'), 'M', modified),
2100 changetypes = [(_('modified'), 'M', modified),
2101 (_('added'), 'A', added),
2101 (_('added'), 'A', added),
2102 (_('removed'), 'R', removed),
2102 (_('removed'), 'R', removed),
2103 (_('deleted'), '!', deleted),
2103 (_('deleted'), '!', deleted),
2104 (_('unknown'), '?', unknown)]
2104 (_('unknown'), '?', unknown)]
2105
2105
2106 end = opts['print0'] and '\0' or '\n'
2106 end = opts['print0'] and '\0' or '\n'
2107
2107
2108 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2108 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2109 or changetypes):
2109 or changetypes):
2110 if opts['no_status']:
2110 if opts['no_status']:
2111 format = "%%s%s" % end
2111 format = "%%s%s" % end
2112 else:
2112 else:
2113 format = "%s %%s%s" % (char, end);
2113 format = "%s %%s%s" % (char, end);
2114
2114
2115 for f in changes:
2115 for f in changes:
2116 ui.write(format % f)
2116 ui.write(format % f)
2117
2117
2118 def tag(ui, repo, name, rev_=None, **opts):
2118 def tag(ui, repo, name, rev_=None, **opts):
2119 """add a tag for the current tip or a given revision
2119 """add a tag for the current tip or a given revision
2120
2120
2121 Name a particular revision using <name>.
2121 Name a particular revision using <name>.
2122
2122
2123 Tags are used to name particular revisions of the repository and are
2123 Tags are used to name particular revisions of the repository and are
2124 very useful to compare different revision, to go back to significant
2124 very useful to compare different revision, to go back to significant
2125 earlier versions or to mark branch points as releases, etc.
2125 earlier versions or to mark branch points as releases, etc.
2126
2126
2127 If no revision is given, the tip is used.
2127 If no revision is given, the tip is used.
2128
2128
2129 To facilitate version control, distribution, and merging of tags,
2129 To facilitate version control, distribution, and merging of tags,
2130 they are stored as a file named ".hgtags" which is managed
2130 they are stored as a file named ".hgtags" which is managed
2131 similarly to other project files and can be hand-edited if
2131 similarly to other project files and can be hand-edited if
2132 necessary. The file '.hg/localtags' is used for local tags (not
2132 necessary. The file '.hg/localtags' is used for local tags (not
2133 shared among repositories).
2133 shared among repositories).
2134 """
2134 """
2135 if name == "tip":
2135 if name == "tip":
2136 raise util.Abort(_("the name 'tip' is reserved"))
2136 raise util.Abort(_("the name 'tip' is reserved"))
2137 if rev_ is not None:
2137 if rev_ is not None:
2138 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2138 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2139 "please use 'hg tag [-r REV] NAME' instead\n"))
2139 "please use 'hg tag [-r REV] NAME' instead\n"))
2140 if opts['rev']:
2140 if opts['rev']:
2141 raise util.Abort(_("use only one form to specify the revision"))
2141 raise util.Abort(_("use only one form to specify the revision"))
2142 if opts['rev']:
2142 if opts['rev']:
2143 rev_ = opts['rev']
2143 rev_ = opts['rev']
2144 if rev_:
2144 if rev_:
2145 r = hex(repo.lookup(rev_))
2145 r = hex(repo.lookup(rev_))
2146 else:
2146 else:
2147 r = hex(repo.changelog.tip())
2147 r = hex(repo.changelog.tip())
2148
2148
2149 disallowed = (revrangesep, '\r', '\n')
2149 disallowed = (revrangesep, '\r', '\n')
2150 for c in disallowed:
2150 for c in disallowed:
2151 if name.find(c) >= 0:
2151 if name.find(c) >= 0:
2152 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2152 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2153
2153
2154 repo.hook('pretag', throw=True, node=r, tag=name,
2154 repo.hook('pretag', throw=True, node=r, tag=name,
2155 local=int(not not opts['local']))
2155 local=int(not not opts['local']))
2156
2156
2157 if opts['local']:
2157 if opts['local']:
2158 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2158 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2159 repo.hook('tag', node=r, tag=name, local=1)
2159 repo.hook('tag', node=r, tag=name, local=1)
2160 return
2160 return
2161
2161
2162 for x in repo.changes():
2162 for x in repo.changes():
2163 if ".hgtags" in x:
2163 if ".hgtags" in x:
2164 raise util.Abort(_("working copy of .hgtags is changed "
2164 raise util.Abort(_("working copy of .hgtags is changed "
2165 "(please commit .hgtags manually)"))
2165 "(please commit .hgtags manually)"))
2166
2166
2167 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2167 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2168 if repo.dirstate.state(".hgtags") == '?':
2168 if repo.dirstate.state(".hgtags") == '?':
2169 repo.add([".hgtags"])
2169 repo.add([".hgtags"])
2170
2170
2171 message = (opts['message'] or
2171 message = (opts['message'] or
2172 _("Added tag %s for changeset %s") % (name, r))
2172 _("Added tag %s for changeset %s") % (name, r))
2173 try:
2173 try:
2174 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2174 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2175 repo.hook('tag', node=r, tag=name, local=0)
2175 repo.hook('tag', node=r, tag=name, local=0)
2176 except ValueError, inst:
2176 except ValueError, inst:
2177 raise util.Abort(str(inst))
2177 raise util.Abort(str(inst))
2178
2178
2179 def tags(ui, repo):
2179 def tags(ui, repo):
2180 """list repository tags
2180 """list repository tags
2181
2181
2182 List the repository tags.
2182 List the repository tags.
2183
2183
2184 This lists both regular and local tags.
2184 This lists both regular and local tags.
2185 """
2185 """
2186
2186
2187 l = repo.tagslist()
2187 l = repo.tagslist()
2188 l.reverse()
2188 l.reverse()
2189 for t, n in l:
2189 for t, n in l:
2190 try:
2190 try:
2191 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2191 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2192 except KeyError:
2192 except KeyError:
2193 r = " ?:?"
2193 r = " ?:?"
2194 ui.write("%-30s %s\n" % (t, r))
2194 ui.write("%-30s %s\n" % (t, r))
2195
2195
2196 def tip(ui, repo, **opts):
2196 def tip(ui, repo, **opts):
2197 """show the tip revision
2197 """show the tip revision
2198
2198
2199 Show the tip revision.
2199 Show the tip revision.
2200 """
2200 """
2201 n = repo.changelog.tip()
2201 n = repo.changelog.tip()
2202 show_changeset(ui, repo, changenode=n)
2202 show_changeset(ui, repo, changenode=n)
2203 if opts['patch']:
2203 if opts['patch']:
2204 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2204 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2205
2205
2206 def unbundle(ui, repo, fname, **opts):
2206 def unbundle(ui, repo, fname, **opts):
2207 """apply a changegroup file
2207 """apply a changegroup file
2208
2208
2209 Apply a compressed changegroup file generated by the bundle
2209 Apply a compressed changegroup file generated by the bundle
2210 command.
2210 command.
2211 """
2211 """
2212 f = urllib.urlopen(fname)
2212 f = urllib.urlopen(fname)
2213
2213
2214 if f.read(4) != "HG10":
2214 if f.read(4) != "HG10":
2215 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2215 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2216
2216
2217 def bzgenerator(f):
2217 def bzgenerator(f):
2218 zd = bz2.BZ2Decompressor()
2218 zd = bz2.BZ2Decompressor()
2219 for chunk in f:
2219 for chunk in f:
2220 yield zd.decompress(chunk)
2220 yield zd.decompress(chunk)
2221
2221
2222 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2222 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2223 if repo.addchangegroup(util.chunkbuffer(bzgen)):
2223 if repo.addchangegroup(util.chunkbuffer(bzgen)):
2224 return 1
2224 return 1
2225
2225
2226 if opts['update']:
2226 if opts['update']:
2227 return update(ui, repo)
2227 return update(ui, repo)
2228 else:
2228 else:
2229 ui.status(_("(run 'hg update' to get a working copy)\n"))
2229 ui.status(_("(run 'hg update' to get a working copy)\n"))
2230
2230
2231 def undo(ui, repo):
2231 def undo(ui, repo):
2232 """undo the last commit or pull
2232 """undo the last commit or pull
2233
2233
2234 Roll back the last pull or commit transaction on the
2234 Roll back the last pull or commit transaction on the
2235 repository, restoring the project to its earlier state.
2235 repository, restoring the project to its earlier state.
2236
2236
2237 This command should be used with care. There is only one level of
2237 This command should be used with care. There is only one level of
2238 undo and there is no redo.
2238 undo and there is no redo.
2239
2239
2240 This command is not intended for use on public repositories. Once
2240 This command is not intended for use on public repositories. Once
2241 a change is visible for pull by other users, undoing it locally is
2241 a change is visible for pull by other users, undoing it locally is
2242 ineffective.
2242 ineffective.
2243 """
2243 """
2244 repo.undo()
2244 repo.undo()
2245
2245
2246 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2246 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2247 branch=None):
2247 branch=None):
2248 """update or merge working directory
2248 """update or merge working directory
2249
2249
2250 Update the working directory to the specified revision.
2250 Update the working directory to the specified revision.
2251
2251
2252 If there are no outstanding changes in the working directory and
2252 If there are no outstanding changes in the working directory and
2253 there is a linear relationship between the current version and the
2253 there is a linear relationship between the current version and the
2254 requested version, the result is the requested version.
2254 requested version, the result is the requested version.
2255
2255
2256 Otherwise the result is a merge between the contents of the
2256 Otherwise the result is a merge between the contents of the
2257 current working directory and the requested version. Files that
2257 current working directory and the requested version. Files that
2258 changed between either parent are marked as changed for the next
2258 changed between either parent are marked as changed for the next
2259 commit and a commit must be performed before any further updates
2259 commit and a commit must be performed before any further updates
2260 are allowed.
2260 are allowed.
2261
2261
2262 By default, update will refuse to run if doing so would require
2262 By default, update will refuse to run if doing so would require
2263 merging or discarding local changes.
2263 merging or discarding local changes.
2264 """
2264 """
2265 if branch:
2265 if branch:
2266 br = repo.branchlookup(branch=branch)
2266 br = repo.branchlookup(branch=branch)
2267 found = []
2267 found = []
2268 for x in br:
2268 for x in br:
2269 if branch in br[x]:
2269 if branch in br[x]:
2270 found.append(x)
2270 found.append(x)
2271 if len(found) > 1:
2271 if len(found) > 1:
2272 ui.warn(_("Found multiple heads for %s\n") % branch)
2272 ui.warn(_("Found multiple heads for %s\n") % branch)
2273 for x in found:
2273 for x in found:
2274 show_changeset(ui, repo, changenode=x, brinfo=br)
2274 show_changeset(ui, repo, changenode=x, brinfo=br)
2275 return 1
2275 return 1
2276 if len(found) == 1:
2276 if len(found) == 1:
2277 node = found[0]
2277 node = found[0]
2278 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2278 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2279 else:
2279 else:
2280 ui.warn(_("branch %s not found\n") % (branch))
2280 ui.warn(_("branch %s not found\n") % (branch))
2281 return 1
2281 return 1
2282 else:
2282 else:
2283 node = node and repo.lookup(node) or repo.changelog.tip()
2283 node = node and repo.lookup(node) or repo.changelog.tip()
2284 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2284 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2285
2285
2286 def verify(ui, repo):
2286 def verify(ui, repo):
2287 """verify the integrity of the repository
2287 """verify the integrity of the repository
2288
2288
2289 Verify the integrity of the current repository.
2289 Verify the integrity of the current repository.
2290
2290
2291 This will perform an extensive check of the repository's
2291 This will perform an extensive check of the repository's
2292 integrity, validating the hashes and checksums of each entry in
2292 integrity, validating the hashes and checksums of each entry in
2293 the changelog, manifest, and tracked files, as well as the
2293 the changelog, manifest, and tracked files, as well as the
2294 integrity of their crosslinks and indices.
2294 integrity of their crosslinks and indices.
2295 """
2295 """
2296 return repo.verify()
2296 return repo.verify()
2297
2297
2298 # Command options and aliases are listed here, alphabetically
2298 # Command options and aliases are listed here, alphabetically
2299
2299
2300 table = {
2300 table = {
2301 "^add":
2301 "^add":
2302 (add,
2302 (add,
2303 [('I', 'include', [], _('include names matching the given patterns')),
2303 [('I', 'include', [], _('include names matching the given patterns')),
2304 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2304 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2305 _('hg add [OPTION]... [FILE]...')),
2305 _('hg add [OPTION]... [FILE]...')),
2306 "addremove":
2306 "addremove":
2307 (addremove,
2307 (addremove,
2308 [('I', 'include', [], _('include names matching the given patterns')),
2308 [('I', 'include', [], _('include names matching the given patterns')),
2309 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2309 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2310 _('hg addremove [OPTION]... [FILE]...')),
2310 _('hg addremove [OPTION]... [FILE]...')),
2311 "^annotate":
2311 "^annotate":
2312 (annotate,
2312 (annotate,
2313 [('r', 'rev', '', _('annotate the specified revision')),
2313 [('r', 'rev', '', _('annotate the specified revision')),
2314 ('a', 'text', None, _('treat all files as text')),
2314 ('a', 'text', None, _('treat all files as text')),
2315 ('u', 'user', None, _('list the author')),
2315 ('u', 'user', None, _('list the author')),
2316 ('d', 'date', None, _('list the date')),
2316 ('d', 'date', None, _('list the date')),
2317 ('n', 'number', None, _('list the revision number (default)')),
2317 ('n', 'number', None, _('list the revision number (default)')),
2318 ('c', 'changeset', None, _('list the changeset')),
2318 ('c', 'changeset', None, _('list the changeset')),
2319 ('I', 'include', [], _('include names matching the given patterns')),
2319 ('I', 'include', [], _('include names matching the given patterns')),
2320 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2320 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2321 _('hg annotate [OPTION]... FILE...')),
2321 _('hg annotate [OPTION]... FILE...')),
2322 "bundle":
2322 "bundle":
2323 (bundle,
2323 (bundle,
2324 [],
2324 [],
2325 _('hg bundle FILE DEST')),
2325 _('hg bundle FILE DEST')),
2326 "cat":
2326 "cat":
2327 (cat,
2327 (cat,
2328 [('I', 'include', [], _('include names matching the given patterns')),
2328 [('I', 'include', [], _('include names matching the given patterns')),
2329 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2329 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2330 ('o', 'output', '', _('print output to file with formatted name')),
2330 ('o', 'output', '', _('print output to file with formatted name')),
2331 ('r', 'rev', '', _('print the given revision'))],
2331 ('r', 'rev', '', _('print the given revision'))],
2332 _('hg cat [OPTION]... FILE...')),
2332 _('hg cat [OPTION]... FILE...')),
2333 "^clone":
2333 "^clone":
2334 (clone,
2334 (clone,
2335 [('U', 'noupdate', None, _('do not update the new working directory')),
2335 [('U', 'noupdate', None, _('do not update the new working directory')),
2336 ('e', 'ssh', '', _('specify ssh command to use')),
2336 ('e', 'ssh', '', _('specify ssh command to use')),
2337 ('', 'pull', None, _('use pull protocol to copy metadata')),
2337 ('', 'pull', None, _('use pull protocol to copy metadata')),
2338 ('r', 'rev', [],
2338 ('r', 'rev', [],
2339 _('a changeset you would like to have after cloning')),
2339 _('a changeset you would like to have after cloning')),
2340 ('', 'remotecmd', '',
2340 ('', 'remotecmd', '',
2341 _('specify hg command to run on the remote side'))],
2341 _('specify hg command to run on the remote side'))],
2342 _('hg clone [OPTION]... SOURCE [DEST]')),
2342 _('hg clone [OPTION]... SOURCE [DEST]')),
2343 "^commit|ci":
2343 "^commit|ci":
2344 (commit,
2344 (commit,
2345 [('A', 'addremove', None, _('run addremove during commit')),
2345 [('A', 'addremove', None, _('run addremove during commit')),
2346 ('I', 'include', [], _('include names matching the given patterns')),
2346 ('I', 'include', [], _('include names matching the given patterns')),
2347 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2347 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2348 ('m', 'message', '', _('use <text> as commit message')),
2348 ('m', 'message', '', _('use <text> as commit message')),
2349 ('l', 'logfile', '', _('read the commit message from <file>')),
2349 ('l', 'logfile', '', _('read the commit message from <file>')),
2350 ('d', 'date', '', _('record datecode as commit date')),
2350 ('d', 'date', '', _('record datecode as commit date')),
2351 ('u', 'user', '', _('record user as commiter'))],
2351 ('u', 'user', '', _('record user as commiter'))],
2352 _('hg commit [OPTION]... [FILE]...')),
2352 _('hg commit [OPTION]... [FILE]...')),
2353 "copy|cp":
2353 "copy|cp":
2354 (copy,
2354 (copy,
2355 [('I', 'include', [], _('include names matching the given patterns')),
2355 [('I', 'include', [], _('include names matching the given patterns')),
2356 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2356 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2357 ('A', 'after', None, _('record a copy that has already occurred')),
2357 ('A', 'after', None, _('record a copy that has already occurred')),
2358 ('f', 'force', None,
2358 ('f', 'force', None,
2359 _('forcibly copy over an existing managed file'))],
2359 _('forcibly copy over an existing managed file'))],
2360 _('hg copy [OPTION]... [SOURCE]... DEST')),
2360 _('hg copy [OPTION]... [SOURCE]... DEST')),
2361 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2361 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2362 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2362 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2363 "debugconfig": (debugconfig, [], _('debugconfig')),
2363 "debugconfig": (debugconfig, [], _('debugconfig')),
2364 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2364 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2365 "debugstate": (debugstate, [], _('debugstate')),
2365 "debugstate": (debugstate, [], _('debugstate')),
2366 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2366 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2367 "debugindex": (debugindex, [], _('debugindex FILE')),
2367 "debugindex": (debugindex, [], _('debugindex FILE')),
2368 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2368 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2369 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2369 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2370 "debugwalk":
2370 "debugwalk":
2371 (debugwalk,
2371 (debugwalk,
2372 [('I', 'include', [], _('include names matching the given patterns')),
2372 [('I', 'include', [], _('include names matching the given patterns')),
2373 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2373 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2374 _('debugwalk [OPTION]... [FILE]...')),
2374 _('debugwalk [OPTION]... [FILE]...')),
2375 "^diff":
2375 "^diff":
2376 (diff,
2376 (diff,
2377 [('r', 'rev', [], _('revision')),
2377 [('r', 'rev', [], _('revision')),
2378 ('a', 'text', None, _('treat all files as text')),
2378 ('a', 'text', None, _('treat all files as text')),
2379 ('I', 'include', [], _('include names matching the given patterns')),
2379 ('I', 'include', [], _('include names matching the given patterns')),
2380 ('p', 'show-function', None,
2380 ('p', 'show-function', None,
2381 _('show which function each change is in')),
2381 _('show which function each change is in')),
2382 ('w', 'ignore-all-space', None,
2382 ('w', 'ignore-all-space', None,
2383 _('ignore white space when comparing lines')),
2383 _('ignore white space when comparing lines')),
2384 ('X', 'exclude', [],
2384 ('X', 'exclude', [],
2385 _('exclude names matching the given patterns'))],
2385 _('exclude names matching the given patterns'))],
2386 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2386 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2387 "^export":
2387 "^export":
2388 (export,
2388 (export,
2389 [('o', 'output', '', _('print output to file with formatted name')),
2389 [('o', 'output', '', _('print output to file with formatted name')),
2390 ('a', 'text', None, _('treat all files as text')),
2390 ('a', 'text', None, _('treat all files as text')),
2391 ('', 'switch-parent', None, _('diff against the second parent'))],
2391 ('', 'switch-parent', None, _('diff against the second parent'))],
2392 _('hg export [-a] [-o OUTFILE] REV...')),
2392 _('hg export [-a] [-o OUTFILE] REV...')),
2393 "forget":
2393 "forget":
2394 (forget,
2394 (forget,
2395 [('I', 'include', [], _('include names matching the given patterns')),
2395 [('I', 'include', [], _('include names matching the given patterns')),
2396 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2396 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2397 _('hg forget [OPTION]... FILE...')),
2397 _('hg forget [OPTION]... FILE...')),
2398 "grep":
2398 "grep":
2399 (grep,
2399 (grep,
2400 [('0', 'print0', None, _('end fields with NUL')),
2400 [('0', 'print0', None, _('end fields with NUL')),
2401 ('I', 'include', [], _('include names matching the given patterns')),
2401 ('I', 'include', [], _('include names matching the given patterns')),
2402 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2402 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2403 ('', 'all', None, _('print all revisions that match')),
2403 ('', 'all', None, _('print all revisions that match')),
2404 ('i', 'ignore-case', None, _('ignore case when matching')),
2404 ('i', 'ignore-case', None, _('ignore case when matching')),
2405 ('l', 'files-with-matches', None,
2405 ('l', 'files-with-matches', None,
2406 _('print only filenames and revs that match')),
2406 _('print only filenames and revs that match')),
2407 ('n', 'line-number', None, _('print matching line numbers')),
2407 ('n', 'line-number', None, _('print matching line numbers')),
2408 ('r', 'rev', [], _('search in given revision range')),
2408 ('r', 'rev', [], _('search in given revision range')),
2409 ('u', 'user', None, _('print user who committed change'))],
2409 ('u', 'user', None, _('print user who committed change'))],
2410 _('hg grep [OPTION]... PATTERN [FILE]...')),
2410 _('hg grep [OPTION]... PATTERN [FILE]...')),
2411 "heads":
2411 "heads":
2412 (heads,
2412 (heads,
2413 [('b', 'branches', None, _('find branch info')),
2413 [('b', 'branches', None, _('find branch info')),
2414 ('r', 'rev', '', _('show only heads which are descendants of rev'))],
2414 ('r', 'rev', '', _('show only heads which are descendants of rev'))],
2415 _('hg heads [-b] [-r <rev>]')),
2415 _('hg heads [-b] [-r <rev>]')),
2416 "help": (help_, [], _('hg help [COMMAND]')),
2416 "help": (help_, [], _('hg help [COMMAND]')),
2417 "identify|id": (identify, [], _('hg identify')),
2417 "identify|id": (identify, [], _('hg identify')),
2418 "import|patch":
2418 "import|patch":
2419 (import_,
2419 (import_,
2420 [('p', 'strip', 1,
2420 [('p', 'strip', 1,
2421 _('directory strip option for patch. This has the same\n') +
2421 _('directory strip option for patch. This has the same\n') +
2422 _('meaning as the corresponding patch option')),
2422 _('meaning as the corresponding patch option')),
2423 ('f', 'force', None,
2423 ('f', 'force', None,
2424 _('skip check for outstanding uncommitted changes')),
2424 _('skip check for outstanding uncommitted changes')),
2425 ('b', 'base', '', _('base path'))],
2425 ('b', 'base', '', _('base path'))],
2426 _('hg import [-f] [-p NUM] [-b BASE] PATCH...')),
2426 _('hg import [-f] [-p NUM] [-b BASE] PATCH...')),
2427 "incoming|in": (incoming,
2427 "incoming|in": (incoming,
2428 [('M', 'no-merges', None, _('do not show merges')),
2428 [('M', 'no-merges', None, _('do not show merges')),
2429 ('p', 'patch', None, _('show patch')),
2429 ('p', 'patch', None, _('show patch')),
2430 ('n', 'newest-first', None, _('show newest record first'))],
2430 ('n', 'newest-first', None, _('show newest record first'))],
2431 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2431 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2432 "^init": (init, [], _('hg init [DEST]')),
2432 "^init": (init, [], _('hg init [DEST]')),
2433 "locate":
2433 "locate":
2434 (locate,
2434 (locate,
2435 [('r', 'rev', '', _('search the repository as it stood at rev')),
2435 [('r', 'rev', '', _('search the repository as it stood at rev')),
2436 ('0', 'print0', None,
2436 ('0', 'print0', None,
2437 _('end filenames with NUL, for use with xargs')),
2437 _('end filenames with NUL, for use with xargs')),
2438 ('f', 'fullpath', None,
2438 ('f', 'fullpath', None,
2439 _('print complete paths from the filesystem root')),
2439 _('print complete paths from the filesystem root')),
2440 ('I', 'include', [], _('include names matching the given patterns')),
2440 ('I', 'include', [], _('include names matching the given patterns')),
2441 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2441 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2442 _('hg locate [OPTION]... [PATTERN]...')),
2442 _('hg locate [OPTION]... [PATTERN]...')),
2443 "^log|history":
2443 "^log|history":
2444 (log,
2444 (log,
2445 [('I', 'include', [], _('include names matching the given patterns')),
2445 [('I', 'include', [], _('include names matching the given patterns')),
2446 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2446 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2447 ('b', 'branch', None, _('show branches')),
2447 ('b', 'branch', None, _('show branches')),
2448 ('k', 'keyword', [], _('search for a keyword')),
2448 ('k', 'keyword', [], _('search for a keyword')),
2449 ('r', 'rev', [], _('show the specified revision or range')),
2449 ('r', 'rev', [], _('show the specified revision or range')),
2450 ('M', 'no-merges', None, _('do not show merges')),
2450 ('M', 'no-merges', None, _('do not show merges')),
2451 ('m', 'only-merges', None, _('show only merges')),
2451 ('m', 'only-merges', None, _('show only merges')),
2452 ('p', 'patch', None, _('show patch'))],
2452 ('p', 'patch', None, _('show patch'))],
2453 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2453 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2454 "manifest": (manifest, [], _('hg manifest [REV]')),
2454 "manifest": (manifest, [], _('hg manifest [REV]')),
2455 "outgoing|out": (outgoing,
2455 "outgoing|out": (outgoing,
2456 [('M', 'no-merges', None, _('do not show merges')),
2456 [('M', 'no-merges', None, _('do not show merges')),
2457 ('p', 'patch', None, _('show patch')),
2457 ('p', 'patch', None, _('show patch')),
2458 ('n', 'newest-first', None, _('show newest record first'))],
2458 ('n', 'newest-first', None, _('show newest record first'))],
2459 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2459 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2460 "^parents":
2460 "^parents":
2461 (parents,
2461 (parents,
2462 [('b', 'branch', None, _('show branches'))],
2462 [('b', 'branch', None, _('show branches'))],
2463 _('hg parents [-b] [REV]')),
2463 _('hg parents [-b] [REV]')),
2464 "paths": (paths, [], _('hg paths [NAME]')),
2464 "paths": (paths, [], _('hg paths [NAME]')),
2465 "^pull":
2465 "^pull":
2466 (pull,
2466 (pull,
2467 [('u', 'update', None,
2467 [('u', 'update', None,
2468 _('update the working directory to tip after pull')),
2468 _('update the working directory to tip after pull')),
2469 ('e', 'ssh', '', _('specify ssh command to use')),
2469 ('e', 'ssh', '', _('specify ssh command to use')),
2470 ('r', 'rev', [], _('a specific revision you would like to pull')),
2470 ('r', 'rev', [], _('a specific revision you would like to pull')),
2471 ('', 'remotecmd', '',
2471 ('', 'remotecmd', '',
2472 _('specify hg command to run on the remote side'))],
2472 _('specify hg command to run on the remote side'))],
2473 _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')),
2473 _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')),
2474 "^push":
2474 "^push":
2475 (push,
2475 (push,
2476 [('f', 'force', None, _('force push')),
2476 [('f', 'force', None, _('force push')),
2477 ('e', 'ssh', '', _('specify ssh command to use')),
2477 ('e', 'ssh', '', _('specify ssh command to use')),
2478 ('', 'remotecmd', '',
2478 ('', 'remotecmd', '',
2479 _('specify hg command to run on the remote side'))],
2479 _('specify hg command to run on the remote side'))],
2480 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2480 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2481 "rawcommit":
2481 "rawcommit":
2482 (rawcommit,
2482 (rawcommit,
2483 [('p', 'parent', [], _('parent')),
2483 [('p', 'parent', [], _('parent')),
2484 ('d', 'date', '', _('date code')),
2484 ('d', 'date', '', _('date code')),
2485 ('u', 'user', '', _('user')),
2485 ('u', 'user', '', _('user')),
2486 ('F', 'files', '', _('file list')),
2486 ('F', 'files', '', _('file list')),
2487 ('m', 'message', '', _('commit message')),
2487 ('m', 'message', '', _('commit message')),
2488 ('l', 'logfile', '', _('commit message file'))],
2488 ('l', 'logfile', '', _('commit message file'))],
2489 _('hg rawcommit [OPTION]... [FILE]...')),
2489 _('hg rawcommit [OPTION]... [FILE]...')),
2490 "recover": (recover, [], _('hg recover')),
2490 "recover": (recover, [], _('hg recover')),
2491 "^remove|rm":
2491 "^remove|rm":
2492 (remove,
2492 (remove,
2493 [('I', 'include', [], _('include names matching the given patterns')),
2493 [('I', 'include', [], _('include names matching the given patterns')),
2494 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2494 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2495 _('hg remove [OPTION]... FILE...')),
2495 _('hg remove [OPTION]... FILE...')),
2496 "rename|mv":
2496 "rename|mv":
2497 (rename,
2497 (rename,
2498 [('I', 'include', [], _('include names matching the given patterns')),
2498 [('I', 'include', [], _('include names matching the given patterns')),
2499 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2499 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2500 ('A', 'after', None, _('record a rename that has already occurred')),
2500 ('A', 'after', None, _('record a rename that has already occurred')),
2501 ('f', 'force', None,
2501 ('f', 'force', None,
2502 _('forcibly copy over an existing managed file'))],
2502 _('forcibly copy over an existing managed file'))],
2503 _('hg rename [OPTION]... [SOURCE]... DEST')),
2503 _('hg rename [OPTION]... [SOURCE]... DEST')),
2504 "^revert":
2504 "^revert":
2505 (revert,
2505 (revert,
2506 [('I', 'include', [], _('include names matching the given patterns')),
2506 [('I', 'include', [], _('include names matching the given patterns')),
2507 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2507 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2508 ('r', 'rev', '', _('revision to revert to'))],
2508 ('r', 'rev', '', _('revision to revert to'))],
2509 _('hg revert [-n] [-r REV] [NAME]...')),
2509 _('hg revert [-n] [-r REV] [NAME]...')),
2510 "root": (root, [], _('hg root')),
2510 "root": (root, [], _('hg root')),
2511 "^serve":
2511 "^serve":
2512 (serve,
2512 (serve,
2513 [('A', 'accesslog', '', _('name of access log file to write to')),
2513 [('A', 'accesslog', '', _('name of access log file to write to')),
2514 ('d', 'daemon', None, _('run server in background')),
2514 ('d', 'daemon', None, _('run server in background')),
2515 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2515 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2516 ('E', 'errorlog', '', _('name of error log file to write to')),
2516 ('E', 'errorlog', '', _('name of error log file to write to')),
2517 ('p', 'port', 0, _('port to use (default: 8000)')),
2517 ('p', 'port', 0, _('port to use (default: 8000)')),
2518 ('a', 'address', '', _('address to use')),
2518 ('a', 'address', '', _('address to use')),
2519 ('n', 'name', '',
2519 ('n', 'name', '',
2520 _('name to show in web pages (default: working dir)')),
2520 _('name to show in web pages (default: working dir)')),
2521 ('', 'pid-file', '', _('name of file to write process ID to')),
2521 ('', 'pid-file', '', _('name of file to write process ID to')),
2522 ('', 'stdio', None, _('for remote clients')),
2522 ('', 'stdio', None, _('for remote clients')),
2523 ('t', 'templates', '', _('web templates to use')),
2523 ('t', 'templates', '', _('web templates to use')),
2524 ('', 'style', '', _('template style to use')),
2524 ('', 'style', '', _('template style to use')),
2525 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2525 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2526 _('hg serve [OPTION]...')),
2526 _('hg serve [OPTION]...')),
2527 "^status|st":
2527 "^status|st":
2528 (status,
2528 (status,
2529 [('m', 'modified', None, _('show only modified files')),
2529 [('m', 'modified', None, _('show only modified files')),
2530 ('a', 'added', None, _('show only added files')),
2530 ('a', 'added', None, _('show only added files')),
2531 ('r', 'removed', None, _('show only removed files')),
2531 ('r', 'removed', None, _('show only removed files')),
2532 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2532 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2533 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2533 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2534 ('n', 'no-status', None, _('hide status prefix')),
2534 ('n', 'no-status', None, _('hide status prefix')),
2535 ('0', 'print0', None,
2535 ('0', 'print0', None,
2536 _('end filenames with NUL, for use with xargs')),
2536 _('end filenames with NUL, for use with xargs')),
2537 ('I', 'include', [], _('include names matching the given patterns')),
2537 ('I', 'include', [], _('include names matching the given patterns')),
2538 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2538 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2539 _('hg status [OPTION]... [FILE]...')),
2539 _('hg status [OPTION]... [FILE]...')),
2540 "tag":
2540 "tag":
2541 (tag,
2541 (tag,
2542 [('l', 'local', None, _('make the tag local')),
2542 [('l', 'local', None, _('make the tag local')),
2543 ('m', 'message', '', _('message for tag commit log entry')),
2543 ('m', 'message', '', _('message for tag commit log entry')),
2544 ('d', 'date', '', _('record datecode as commit date')),
2544 ('d', 'date', '', _('record datecode as commit date')),
2545 ('u', 'user', '', _('record user as commiter')),
2545 ('u', 'user', '', _('record user as commiter')),
2546 ('r', 'rev', '', _('revision to tag'))],
2546 ('r', 'rev', '', _('revision to tag'))],
2547 _('hg tag [-r REV] [OPTION]... NAME')),
2547 _('hg tag [-r REV] [OPTION]... NAME')),
2548 "tags": (tags, [], _('hg tags')),
2548 "tags": (tags, [], _('hg tags')),
2549 "tip": (tip, [('p', 'patch', None, _('show patch'))], _('hg tip')),
2549 "tip": (tip, [('p', 'patch', None, _('show patch'))], _('hg tip')),
2550 "unbundle":
2550 "unbundle":
2551 (unbundle,
2551 (unbundle,
2552 [('u', 'update', None,
2552 [('u', 'update', None,
2553 _('update the working directory to tip after unbundle'))],
2553 _('update the working directory to tip after unbundle'))],
2554 _('hg unbundle [-u] FILE')),
2554 _('hg unbundle [-u] FILE')),
2555 "undo": (undo, [], _('hg undo')),
2555 "undo": (undo, [], _('hg undo')),
2556 "^update|up|checkout|co":
2556 "^update|up|checkout|co":
2557 (update,
2557 (update,
2558 [('b', 'branch', '', _('checkout the head of a specific branch')),
2558 [('b', 'branch', '', _('checkout the head of a specific branch')),
2559 ('m', 'merge', None, _('allow merging of branches')),
2559 ('m', 'merge', None, _('allow merging of branches')),
2560 ('C', 'clean', None, _('overwrite locally modified files')),
2560 ('C', 'clean', None, _('overwrite locally modified files')),
2561 ('f', 'force', None, _('force a merge with outstanding changes'))],
2561 ('f', 'force', None, _('force a merge with outstanding changes'))],
2562 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
2562 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
2563 "verify": (verify, [], _('hg verify')),
2563 "verify": (verify, [], _('hg verify')),
2564 "version": (show_version, [], _('hg version')),
2564 "version": (show_version, [], _('hg version')),
2565 }
2565 }
2566
2566
2567 globalopts = [
2567 globalopts = [
2568 ('R', 'repository', '', _('repository root directory')),
2568 ('R', 'repository', '', _('repository root directory')),
2569 ('', 'cwd', '', _('change working directory')),
2569 ('', 'cwd', '', _('change working directory')),
2570 ('y', 'noninteractive', None,
2570 ('y', 'noninteractive', None,
2571 _('do not prompt, assume \'yes\' for any required answers')),
2571 _('do not prompt, assume \'yes\' for any required answers')),
2572 ('q', 'quiet', None, _('suppress output')),
2572 ('q', 'quiet', None, _('suppress output')),
2573 ('v', 'verbose', None, _('enable additional output')),
2573 ('v', 'verbose', None, _('enable additional output')),
2574 ('', 'debug', None, _('enable debugging output')),
2574 ('', 'debug', None, _('enable debugging output')),
2575 ('', 'debugger', None, _('start debugger')),
2575 ('', 'debugger', None, _('start debugger')),
2576 ('', 'traceback', None, _('print traceback on exception')),
2576 ('', 'traceback', None, _('print traceback on exception')),
2577 ('', 'time', None, _('time how long the command takes')),
2577 ('', 'time', None, _('time how long the command takes')),
2578 ('', 'profile', None, _('print command execution profile')),
2578 ('', 'profile', None, _('print command execution profile')),
2579 ('', 'version', None, _('output version information and exit')),
2579 ('', 'version', None, _('output version information and exit')),
2580 ('h', 'help', None, _('display help and exit')),
2580 ('h', 'help', None, _('display help and exit')),
2581 ]
2581 ]
2582
2582
2583 norepo = ("clone init version help debugancestor debugconfig debugdata"
2583 norepo = ("clone init version help debugancestor debugconfig debugdata"
2584 " debugindex debugindexdot paths")
2584 " debugindex debugindexdot paths")
2585
2585
2586 def find(cmd):
2586 def find(cmd):
2587 """Return (aliases, command table entry) for command string."""
2587 """Return (aliases, command table entry) for command string."""
2588 choice = None
2588 choice = None
2589 count = 0
2589 count = 0
2590 for e in table.keys():
2590 for e in table.keys():
2591 aliases = e.lstrip("^").split("|")
2591 aliases = e.lstrip("^").split("|")
2592 if cmd in aliases:
2592 if cmd in aliases:
2593 return aliases, table[e]
2593 return aliases, table[e]
2594 for a in aliases:
2594 for a in aliases:
2595 if a.startswith(cmd):
2595 if a.startswith(cmd):
2596 count += 1
2596 count += 1
2597 choice = aliases, table[e]
2597 choice = aliases, table[e]
2598 break
2598 break
2599
2599
2600 if count > 1:
2600 if count > 1:
2601 raise AmbiguousCommand(cmd)
2601 raise AmbiguousCommand(cmd)
2602
2602
2603 if choice:
2603 if choice:
2604 return choice
2604 return choice
2605
2605
2606 raise UnknownCommand(cmd)
2606 raise UnknownCommand(cmd)
2607
2607
2608 class SignalInterrupt(Exception):
2608 class SignalInterrupt(Exception):
2609 """Exception raised on SIGTERM and SIGHUP."""
2609 """Exception raised on SIGTERM and SIGHUP."""
2610
2610
2611 def catchterm(*args):
2611 def catchterm(*args):
2612 raise SignalInterrupt
2612 raise SignalInterrupt
2613
2613
2614 def run():
2614 def run():
2615 sys.exit(dispatch(sys.argv[1:]))
2615 sys.exit(dispatch(sys.argv[1:]))
2616
2616
2617 class ParseError(Exception):
2617 class ParseError(Exception):
2618 """Exception raised on errors in parsing the command line."""
2618 """Exception raised on errors in parsing the command line."""
2619
2619
2620 def parse(ui, args):
2620 def parse(ui, args):
2621 options = {}
2621 options = {}
2622 cmdoptions = {}
2622 cmdoptions = {}
2623
2623
2624 try:
2624 try:
2625 args = fancyopts.fancyopts(args, globalopts, options)
2625 args = fancyopts.fancyopts(args, globalopts, options)
2626 except fancyopts.getopt.GetoptError, inst:
2626 except fancyopts.getopt.GetoptError, inst:
2627 raise ParseError(None, inst)
2627 raise ParseError(None, inst)
2628
2628
2629 if args:
2629 if args:
2630 cmd, args = args[0], args[1:]
2630 cmd, args = args[0], args[1:]
2631 aliases, i = find(cmd)
2631 aliases, i = find(cmd)
2632 cmd = aliases[0]
2632 cmd = aliases[0]
2633 defaults = ui.config("defaults", cmd)
2633 defaults = ui.config("defaults", cmd)
2634 if defaults:
2634 if defaults:
2635 args = defaults.split() + args
2635 args = defaults.split() + args
2636 c = list(i[1])
2636 c = list(i[1])
2637 else:
2637 else:
2638 cmd = None
2638 cmd = None
2639 c = []
2639 c = []
2640
2640
2641 # combine global options into local
2641 # combine global options into local
2642 for o in globalopts:
2642 for o in globalopts:
2643 c.append((o[0], o[1], options[o[1]], o[3]))
2643 c.append((o[0], o[1], options[o[1]], o[3]))
2644
2644
2645 try:
2645 try:
2646 args = fancyopts.fancyopts(args, c, cmdoptions)
2646 args = fancyopts.fancyopts(args, c, cmdoptions)
2647 except fancyopts.getopt.GetoptError, inst:
2647 except fancyopts.getopt.GetoptError, inst:
2648 raise ParseError(cmd, inst)
2648 raise ParseError(cmd, inst)
2649
2649
2650 # separate global options back out
2650 # separate global options back out
2651 for o in globalopts:
2651 for o in globalopts:
2652 n = o[1]
2652 n = o[1]
2653 options[n] = cmdoptions[n]
2653 options[n] = cmdoptions[n]
2654 del cmdoptions[n]
2654 del cmdoptions[n]
2655
2655
2656 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2656 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2657
2657
2658 def dispatch(args):
2658 def dispatch(args):
2659 signal.signal(signal.SIGTERM, catchterm)
2659 signal.signal(signal.SIGTERM, catchterm)
2660 try:
2660 try:
2661 signal.signal(signal.SIGHUP, catchterm)
2661 signal.signal(signal.SIGHUP, catchterm)
2662 except AttributeError:
2662 except AttributeError:
2663 pass
2663 pass
2664
2664
2665 try:
2665 try:
2666 u = ui.ui()
2666 u = ui.ui()
2667 except util.Abort, inst:
2667 except util.Abort, inst:
2668 sys.stderr.write(_("abort: %s\n") % inst)
2668 sys.stderr.write(_("abort: %s\n") % inst)
2669 sys.exit(1)
2669 sys.exit(1)
2670
2670
2671 external = []
2671 external = []
2672 for x in u.extensions():
2672 for x in u.extensions():
2673 def on_exception(exc, inst):
2673 def on_exception(exc, inst):
2674 u.warn(_("*** failed to import extension %s\n") % x[1])
2674 u.warn(_("*** failed to import extension %s\n") % x[1])
2675 u.warn("%s\n" % inst)
2675 u.warn("%s\n" % inst)
2676 if "--traceback" in sys.argv[1:]:
2676 if "--traceback" in sys.argv[1:]:
2677 traceback.print_exc()
2677 traceback.print_exc()
2678 if x[1]:
2678 if x[1]:
2679 try:
2679 try:
2680 mod = imp.load_source(x[0], x[1])
2680 mod = imp.load_source(x[0], x[1])
2681 except Exception, inst:
2681 except Exception, inst:
2682 on_exception(Exception, inst)
2682 on_exception(Exception, inst)
2683 continue
2683 continue
2684 else:
2684 else:
2685 def importh(name):
2685 def importh(name):
2686 mod = __import__(name)
2686 mod = __import__(name)
2687 components = name.split('.')
2687 components = name.split('.')
2688 for comp in components[1:]:
2688 for comp in components[1:]:
2689 mod = getattr(mod, comp)
2689 mod = getattr(mod, comp)
2690 return mod
2690 return mod
2691 try:
2691 try:
2692 mod = importh(x[0])
2692 mod = importh(x[0])
2693 except Exception, inst:
2693 except Exception, inst:
2694 on_exception(Exception, inst)
2694 on_exception(Exception, inst)
2695 continue
2695 continue
2696
2696
2697 external.append(mod)
2697 external.append(mod)
2698 for x in external:
2698 for x in external:
2699 cmdtable = getattr(x, 'cmdtable', {})
2699 cmdtable = getattr(x, 'cmdtable', {})
2700 for t in cmdtable:
2700 for t in cmdtable:
2701 if t in table:
2701 if t in table:
2702 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2702 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2703 table.update(cmdtable)
2703 table.update(cmdtable)
2704
2704
2705 try:
2705 try:
2706 cmd, func, args, options, cmdoptions = parse(u, args)
2706 cmd, func, args, options, cmdoptions = parse(u, args)
2707 except ParseError, inst:
2707 except ParseError, inst:
2708 if inst.args[0]:
2708 if inst.args[0]:
2709 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2709 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2710 help_(u, inst.args[0])
2710 help_(u, inst.args[0])
2711 else:
2711 else:
2712 u.warn(_("hg: %s\n") % inst.args[1])
2712 u.warn(_("hg: %s\n") % inst.args[1])
2713 help_(u, 'shortlist')
2713 help_(u, 'shortlist')
2714 sys.exit(-1)
2714 sys.exit(-1)
2715 except AmbiguousCommand, inst:
2715 except AmbiguousCommand, inst:
2716 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2716 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2717 sys.exit(1)
2717 sys.exit(1)
2718 except UnknownCommand, inst:
2718 except UnknownCommand, inst:
2719 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2719 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2720 help_(u, 'shortlist')
2720 help_(u, 'shortlist')
2721 sys.exit(1)
2721 sys.exit(1)
2722
2722
2723 if options["time"]:
2723 if options["time"]:
2724 def get_times():
2724 def get_times():
2725 t = os.times()
2725 t = os.times()
2726 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2726 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2727 t = (t[0], t[1], t[2], t[3], time.clock())
2727 t = (t[0], t[1], t[2], t[3], time.clock())
2728 return t
2728 return t
2729 s = get_times()
2729 s = get_times()
2730 def print_time():
2730 def print_time():
2731 t = get_times()
2731 t = get_times()
2732 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2732 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2733 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2733 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2734 atexit.register(print_time)
2734 atexit.register(print_time)
2735
2735
2736 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2736 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2737 not options["noninteractive"])
2737 not options["noninteractive"])
2738
2738
2739 # enter the debugger before command execution
2739 # enter the debugger before command execution
2740 if options['debugger']:
2740 if options['debugger']:
2741 pdb.set_trace()
2741 pdb.set_trace()
2742
2742
2743 try:
2743 try:
2744 try:
2744 try:
2745 if options['help']:
2745 if options['help']:
2746 help_(u, cmd, options['version'])
2746 help_(u, cmd, options['version'])
2747 sys.exit(0)
2747 sys.exit(0)
2748 elif options['version']:
2748 elif options['version']:
2749 show_version(u)
2749 show_version(u)
2750 sys.exit(0)
2750 sys.exit(0)
2751 elif not cmd:
2751 elif not cmd:
2752 help_(u, 'shortlist')
2752 help_(u, 'shortlist')
2753 sys.exit(0)
2753 sys.exit(0)
2754
2754
2755 if options['cwd']:
2755 if options['cwd']:
2756 try:
2756 try:
2757 os.chdir(options['cwd'])
2757 os.chdir(options['cwd'])
2758 except OSError, inst:
2758 except OSError, inst:
2759 raise util.Abort('%s: %s' %
2759 raise util.Abort('%s: %s' %
2760 (options['cwd'], inst.strerror))
2760 (options['cwd'], inst.strerror))
2761
2761
2762 if cmd not in norepo.split():
2762 if cmd not in norepo.split():
2763 path = options["repository"] or ""
2763 path = options["repository"] or ""
2764 repo = hg.repository(ui=u, path=path)
2764 repo = hg.repository(ui=u, path=path)
2765 for x in external:
2765 for x in external:
2766 if hasattr(x, 'reposetup'):
2766 if hasattr(x, 'reposetup'):
2767 x.reposetup(u, repo)
2767 x.reposetup(u, repo)
2768 d = lambda: func(u, repo, *args, **cmdoptions)
2768 d = lambda: func(u, repo, *args, **cmdoptions)
2769 else:
2769 else:
2770 d = lambda: func(u, *args, **cmdoptions)
2770 d = lambda: func(u, *args, **cmdoptions)
2771
2771
2772 if options['profile']:
2772 if options['profile']:
2773 import hotshot, hotshot.stats
2773 import hotshot, hotshot.stats
2774 prof = hotshot.Profile("hg.prof")
2774 prof = hotshot.Profile("hg.prof")
2775 r = prof.runcall(d)
2775 r = prof.runcall(d)
2776 prof.close()
2776 prof.close()
2777 stats = hotshot.stats.load("hg.prof")
2777 stats = hotshot.stats.load("hg.prof")
2778 stats.strip_dirs()
2778 stats.strip_dirs()
2779 stats.sort_stats('time', 'calls')
2779 stats.sort_stats('time', 'calls')
2780 stats.print_stats(40)
2780 stats.print_stats(40)
2781 return r
2781 return r
2782 else:
2782 else:
2783 return d()
2783 return d()
2784 except:
2784 except:
2785 # enter the debugger when we hit an exception
2785 # enter the debugger when we hit an exception
2786 if options['debugger']:
2786 if options['debugger']:
2787 pdb.post_mortem(sys.exc_info()[2])
2787 pdb.post_mortem(sys.exc_info()[2])
2788 if options['traceback']:
2788 if options['traceback']:
2789 traceback.print_exc()
2789 traceback.print_exc()
2790 raise
2790 raise
2791 except hg.RepoError, inst:
2791 except hg.RepoError, inst:
2792 u.warn(_("abort: "), inst, "!\n")
2792 u.warn(_("abort: "), inst, "!\n")
2793 except revlog.RevlogError, inst:
2793 except revlog.RevlogError, inst:
2794 u.warn(_("abort: "), inst, "!\n")
2794 u.warn(_("abort: "), inst, "!\n")
2795 except SignalInterrupt:
2795 except SignalInterrupt:
2796 u.warn(_("killed!\n"))
2796 u.warn(_("killed!\n"))
2797 except KeyboardInterrupt:
2797 except KeyboardInterrupt:
2798 try:
2798 try:
2799 u.warn(_("interrupted!\n"))
2799 u.warn(_("interrupted!\n"))
2800 except IOError, inst:
2800 except IOError, inst:
2801 if inst.errno == errno.EPIPE:
2801 if inst.errno == errno.EPIPE:
2802 if u.debugflag:
2802 if u.debugflag:
2803 u.warn(_("\nbroken pipe\n"))
2803 u.warn(_("\nbroken pipe\n"))
2804 else:
2804 else:
2805 raise
2805 raise
2806 except IOError, inst:
2806 except IOError, inst:
2807 if hasattr(inst, "code"):
2807 if hasattr(inst, "code"):
2808 u.warn(_("abort: %s\n") % inst)
2808 u.warn(_("abort: %s\n") % inst)
2809 elif hasattr(inst, "reason"):
2809 elif hasattr(inst, "reason"):
2810 u.warn(_("abort: error: %s\n") % inst.reason[1])
2810 u.warn(_("abort: error: %s\n") % inst.reason[1])
2811 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2811 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2812 if u.debugflag:
2812 if u.debugflag:
2813 u.warn(_("broken pipe\n"))
2813 u.warn(_("broken pipe\n"))
2814 elif getattr(inst, "strerror", None):
2814 elif getattr(inst, "strerror", None):
2815 if getattr(inst, "filename", None):
2815 if getattr(inst, "filename", None):
2816 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2816 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2817 else:
2817 else:
2818 u.warn(_("abort: %s\n") % inst.strerror)
2818 u.warn(_("abort: %s\n") % inst.strerror)
2819 else:
2819 else:
2820 raise
2820 raise
2821 except OSError, inst:
2821 except OSError, inst:
2822 if hasattr(inst, "filename"):
2822 if hasattr(inst, "filename"):
2823 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2823 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2824 else:
2824 else:
2825 u.warn(_("abort: %s\n") % inst.strerror)
2825 u.warn(_("abort: %s\n") % inst.strerror)
2826 except util.Abort, inst:
2826 except util.Abort, inst:
2827 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2827 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2828 sys.exit(1)
2828 sys.exit(1)
2829 except TypeError, inst:
2829 except TypeError, inst:
2830 # was this an argument error?
2830 # was this an argument error?
2831 tb = traceback.extract_tb(sys.exc_info()[2])
2831 tb = traceback.extract_tb(sys.exc_info()[2])
2832 if len(tb) > 2: # no
2832 if len(tb) > 2: # no
2833 raise
2833 raise
2834 u.debug(inst, "\n")
2834 u.debug(inst, "\n")
2835 u.warn(_("%s: invalid arguments\n") % cmd)
2835 u.warn(_("%s: invalid arguments\n") % cmd)
2836 help_(u, cmd)
2836 help_(u, cmd)
2837 except AmbiguousCommand, inst:
2837 except AmbiguousCommand, inst:
2838 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2838 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2839 help_(u, 'shortlist')
2839 help_(u, 'shortlist')
2840 except UnknownCommand, inst:
2840 except UnknownCommand, inst:
2841 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2841 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2842 help_(u, 'shortlist')
2842 help_(u, 'shortlist')
2843 except SystemExit:
2843 except SystemExit:
2844 # don't catch this in the catch-all below
2844 # don't catch this in the catch-all below
2845 raise
2845 raise
2846 except:
2846 except:
2847 u.warn(_("** unknown exception encountered, details follow\n"))
2847 u.warn(_("** unknown exception encountered, details follow\n"))
2848 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2848 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2849 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
2849 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
2850 % version.get_version())
2850 % version.get_version())
2851 raise
2851 raise
2852
2852
2853 sys.exit(-1)
2853 sys.exit(-1)
@@ -1,420 +1,420 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
4 Copyright 2005 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 import struct, os
10 import struct, os
11 from node import *
11 from node import *
12 from i18n import gettext as _
12 from i18n import gettext as _
13 from demandload import *
13 from demandload import *
14 demandload(globals(), "time bisect stat util re errno")
14 demandload(globals(), "time bisect stat util re errno")
15
15
16 class dirstate(object):
16 class dirstate(object):
17 def __init__(self, opener, ui, root):
17 def __init__(self, opener, ui, root):
18 self.opener = opener
18 self.opener = opener
19 self.root = root
19 self.root = root
20 self.dirty = 0
20 self.dirty = 0
21 self.ui = ui
21 self.ui = ui
22 self.map = None
22 self.map = None
23 self.pl = None
23 self.pl = None
24 self.copies = {}
24 self.copies = {}
25 self.ignorefunc = None
25 self.ignorefunc = None
26 self.blockignore = False
26 self.blockignore = False
27
27
28 def wjoin(self, f):
28 def wjoin(self, f):
29 return os.path.join(self.root, f)
29 return os.path.join(self.root, f)
30
30
31 def getcwd(self):
31 def getcwd(self):
32 cwd = os.getcwd()
32 cwd = os.getcwd()
33 if cwd == self.root: return ''
33 if cwd == self.root: return ''
34 return cwd[len(self.root) + 1:]
34 return cwd[len(self.root) + 1:]
35
35
36 def hgignore(self):
36 def hgignore(self):
37 '''return the contents of .hgignore as a list of patterns.
37 '''return the contents of .hgignore as a list of patterns.
38
38
39 trailing white space is dropped.
39 trailing white space is dropped.
40 the escape character is backslash.
40 the escape character is backslash.
41 comments start with #.
41 comments start with #.
42 empty lines are skipped.
42 empty lines are skipped.
43
43
44 lines can be of the following formats:
44 lines can be of the following formats:
45
45
46 syntax: regexp # defaults following lines to non-rooted regexps
46 syntax: regexp # defaults following lines to non-rooted regexps
47 syntax: glob # defaults following lines to non-rooted globs
47 syntax: glob # defaults following lines to non-rooted globs
48 re:pattern # non-rooted regular expression
48 re:pattern # non-rooted regular expression
49 glob:pattern # non-rooted glob
49 glob:pattern # non-rooted glob
50 pattern # pattern of the current default type'''
50 pattern # pattern of the current default type'''
51 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
51 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
52 def parselines(fp):
52 def parselines(fp):
53 for line in fp:
53 for line in fp:
54 escape = False
54 escape = False
55 for i in xrange(len(line)):
55 for i in xrange(len(line)):
56 if escape: escape = False
56 if escape: escape = False
57 elif line[i] == '\\': escape = True
57 elif line[i] == '\\': escape = True
58 elif line[i] == '#': break
58 elif line[i] == '#': break
59 line = line[:i].rstrip()
59 line = line[:i].rstrip()
60 if line: yield line
60 if line: yield line
61 pats = []
61 pats = []
62 try:
62 try:
63 fp = open(self.wjoin('.hgignore'))
63 fp = open(self.wjoin('.hgignore'))
64 syntax = 'relre:'
64 syntax = 'relre:'
65 for line in parselines(fp):
65 for line in parselines(fp):
66 if line.startswith('syntax:'):
66 if line.startswith('syntax:'):
67 s = line[7:].strip()
67 s = line[7:].strip()
68 try:
68 try:
69 syntax = syntaxes[s]
69 syntax = syntaxes[s]
70 except KeyError:
70 except KeyError:
71 self.ui.warn(_(".hgignore: ignoring invalid "
71 self.ui.warn(_(".hgignore: ignoring invalid "
72 "syntax '%s'\n") % s)
72 "syntax '%s'\n") % s)
73 continue
73 continue
74 pat = syntax + line
74 pat = syntax + line
75 for s in syntaxes.values():
75 for s in syntaxes.values():
76 if line.startswith(s):
76 if line.startswith(s):
77 pat = line
77 pat = line
78 break
78 break
79 pats.append(pat)
79 pats.append(pat)
80 except IOError: pass
80 except IOError: pass
81 return pats
81 return pats
82
82
83 def ignore(self, fn):
83 def ignore(self, fn):
84 '''default match function used by dirstate and localrepository.
84 '''default match function used by dirstate and localrepository.
85 this honours the .hgignore file, and nothing more.'''
85 this honours the .hgignore file, and nothing more.'''
86 if self.blockignore:
86 if self.blockignore:
87 return False
87 return False
88 if not self.ignorefunc:
88 if not self.ignorefunc:
89 ignore = self.hgignore()
89 ignore = self.hgignore()
90 if ignore:
90 if ignore:
91 files, self.ignorefunc, anypats = util.matcher(self.root,
91 files, self.ignorefunc, anypats = util.matcher(self.root,
92 inc=ignore,
92 inc=ignore,
93 src='.hgignore')
93 src='.hgignore')
94 else:
94 else:
95 self.ignorefunc = util.never
95 self.ignorefunc = util.never
96 return self.ignorefunc(fn)
96 return self.ignorefunc(fn)
97
97
98 def __del__(self):
98 def __del__(self):
99 if self.dirty:
99 if self.dirty:
100 self.write()
100 self.write()
101
101
102 def __getitem__(self, key):
102 def __getitem__(self, key):
103 try:
103 try:
104 return self.map[key]
104 return self.map[key]
105 except TypeError:
105 except TypeError:
106 self.lazyread()
106 self.lazyread()
107 return self[key]
107 return self[key]
108
108
109 def __contains__(self, key):
109 def __contains__(self, key):
110 self.lazyread()
110 self.lazyread()
111 return key in self.map
111 return key in self.map
112
112
113 def parents(self):
113 def parents(self):
114 self.lazyread()
114 self.lazyread()
115 return self.pl
115 return self.pl
116
116
117 def markdirty(self):
117 def markdirty(self):
118 if not self.dirty:
118 if not self.dirty:
119 self.dirty = 1
119 self.dirty = 1
120
120
121 def setparents(self, p1, p2=nullid):
121 def setparents(self, p1, p2=nullid):
122 self.lazyread()
122 self.lazyread()
123 self.markdirty()
123 self.markdirty()
124 self.pl = p1, p2
124 self.pl = p1, p2
125
125
126 def state(self, key):
126 def state(self, key):
127 try:
127 try:
128 return self[key][0]
128 return self[key][0]
129 except KeyError:
129 except KeyError:
130 return "?"
130 return "?"
131
131
132 def lazyread(self):
132 def lazyread(self):
133 if self.map is None:
133 if self.map is None:
134 self.read()
134 self.read()
135
135
136 def read(self):
136 def read(self):
137 self.map = {}
137 self.map = {}
138 self.pl = [nullid, nullid]
138 self.pl = [nullid, nullid]
139 try:
139 try:
140 st = self.opener("dirstate").read()
140 st = self.opener("dirstate").read()
141 if not st: return
141 if not st: return
142 except: return
142 except: return
143
143
144 self.pl = [st[:20], st[20: 40]]
144 self.pl = [st[:20], st[20: 40]]
145
145
146 pos = 40
146 pos = 40
147 while pos < len(st):
147 while pos < len(st):
148 e = struct.unpack(">cllll", st[pos:pos+17])
148 e = struct.unpack(">cllll", st[pos:pos+17])
149 l = e[4]
149 l = e[4]
150 pos += 17
150 pos += 17
151 f = st[pos:pos + l]
151 f = st[pos:pos + l]
152 if '\0' in f:
152 if '\0' in f:
153 f, c = f.split('\0')
153 f, c = f.split('\0')
154 self.copies[f] = c
154 self.copies[f] = c
155 self.map[f] = e[:4]
155 self.map[f] = e[:4]
156 pos += l
156 pos += l
157
157
158 def copy(self, source, dest):
158 def copy(self, source, dest):
159 self.lazyread()
159 self.lazyread()
160 self.markdirty()
160 self.markdirty()
161 self.copies[dest] = source
161 self.copies[dest] = source
162
162
163 def copied(self, file):
163 def copied(self, file):
164 return self.copies.get(file, None)
164 return self.copies.get(file, None)
165
165
166 def update(self, files, state, **kw):
166 def update(self, files, state, **kw):
167 ''' current states:
167 ''' current states:
168 n normal
168 n normal
169 m needs merging
169 m needs merging
170 r marked for removal
170 r marked for removal
171 a marked for addition'''
171 a marked for addition'''
172
172
173 if not files: return
173 if not files: return
174 self.lazyread()
174 self.lazyread()
175 self.markdirty()
175 self.markdirty()
176 for f in files:
176 for f in files:
177 if state == "r":
177 if state == "r":
178 self.map[f] = ('r', 0, 0, 0)
178 self.map[f] = ('r', 0, 0, 0)
179 else:
179 else:
180 s = os.lstat(self.wjoin(f))
180 s = os.lstat(self.wjoin(f))
181 st_size = kw.get('st_size', s.st_size)
181 st_size = kw.get('st_size', s.st_size)
182 st_mtime = kw.get('st_mtime', s.st_mtime)
182 st_mtime = kw.get('st_mtime', s.st_mtime)
183 self.map[f] = (state, s.st_mode, st_size, st_mtime)
183 self.map[f] = (state, s.st_mode, st_size, st_mtime)
184 if self.copies.has_key(f):
184 if self.copies.has_key(f):
185 del self.copies[f]
185 del self.copies[f]
186
186
187 def forget(self, files):
187 def forget(self, files):
188 if not files: return
188 if not files: return
189 self.lazyread()
189 self.lazyread()
190 self.markdirty()
190 self.markdirty()
191 for f in files:
191 for f in files:
192 try:
192 try:
193 del self.map[f]
193 del self.map[f]
194 except KeyError:
194 except KeyError:
195 self.ui.warn(_("not in dirstate: %s!\n") % f)
195 self.ui.warn(_("not in dirstate: %s!\n") % f)
196 pass
196 pass
197
197
198 def clear(self):
198 def clear(self):
199 self.map = {}
199 self.map = {}
200 self.markdirty()
200 self.markdirty()
201
201
202 def write(self):
202 def write(self):
203 st = self.opener("dirstate", "w", atomic=True)
203 st = self.opener("dirstate", "w", atomic=True)
204 st.write("".join(self.pl))
204 st.write("".join(self.pl))
205 for f, e in self.map.items():
205 for f, e in self.map.items():
206 c = self.copied(f)
206 c = self.copied(f)
207 if c:
207 if c:
208 f = f + "\0" + c
208 f = f + "\0" + c
209 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
209 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
210 st.write(e + f)
210 st.write(e + f)
211 self.dirty = 0
211 self.dirty = 0
212
212
213 def filterfiles(self, files):
213 def filterfiles(self, files):
214 ret = {}
214 ret = {}
215 unknown = []
215 unknown = []
216
216
217 for x in files:
217 for x in files:
218 if x == '.':
218 if x == '.':
219 return self.map.copy()
219 return self.map.copy()
220 if x not in self.map:
220 if x not in self.map:
221 unknown.append(x)
221 unknown.append(x)
222 else:
222 else:
223 ret[x] = self.map[x]
223 ret[x] = self.map[x]
224
224
225 if not unknown:
225 if not unknown:
226 return ret
226 return ret
227
227
228 b = self.map.keys()
228 b = self.map.keys()
229 b.sort()
229 b.sort()
230 blen = len(b)
230 blen = len(b)
231
231
232 for x in unknown:
232 for x in unknown:
233 bs = bisect.bisect(b, x)
233 bs = bisect.bisect(b, x)
234 if bs != 0 and b[bs-1] == x:
234 if bs != 0 and b[bs-1] == x:
235 ret[x] = self.map[x]
235 ret[x] = self.map[x]
236 continue
236 continue
237 while bs < blen:
237 while bs < blen:
238 s = b[bs]
238 s = b[bs]
239 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
239 if len(s) > len(x) and s.startswith(x) and s[len(x)] == '/':
240 ret[s] = self.map[s]
240 ret[s] = self.map[s]
241 else:
241 else:
242 break
242 break
243 bs += 1
243 bs += 1
244 return ret
244 return ret
245
245
246 def supported_type(self, f, st, verbose=False):
246 def supported_type(self, f, st, verbose=False):
247 if stat.S_ISREG(st.st_mode):
247 if stat.S_ISREG(st.st_mode):
248 return True
248 return True
249 if verbose:
249 if verbose:
250 kind = 'unknown'
250 kind = 'unknown'
251 if stat.S_ISCHR(st.st_mode): kind = _('character device')
251 if stat.S_ISCHR(st.st_mode): kind = _('character device')
252 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
252 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
253 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
253 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
254 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
254 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
255 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
255 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
256 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
256 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
257 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
257 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
258 util.pathto(self.getcwd(), f),
258 util.pathto(self.getcwd(), f),
259 kind))
259 kind))
260 return False
260 return False
261
261
262 def statwalk(self, files=None, match=util.always, dc=None):
262 def statwalk(self, files=None, match=util.always, dc=None):
263 self.lazyread()
263 self.lazyread()
264
264
265 # walk all files by default
265 # walk all files by default
266 if not files:
266 if not files:
267 files = [self.root]
267 files = [self.root]
268 if not dc:
268 if not dc:
269 dc = self.map.copy()
269 dc = self.map.copy()
270 elif not dc:
270 elif not dc:
271 dc = self.filterfiles(files)
271 dc = self.filterfiles(files)
272
272
273 def statmatch(file, stat):
273 def statmatch(file_, stat):
274 file = util.pconvert(file)
274 file_ = util.pconvert(file_)
275 if file not in dc and self.ignore(file):
275 if file_ not in dc and self.ignore(file_):
276 return False
276 return False
277 return match(file)
277 return match(file_)
278
278
279 return self.walkhelper(files=files, statmatch=statmatch, dc=dc)
279 return self.walkhelper(files=files, statmatch=statmatch, dc=dc)
280
280
281 def walk(self, files=None, match=util.always, dc=None):
281 def walk(self, files=None, match=util.always, dc=None):
282 # filter out the stat
282 # filter out the stat
283 for src, f, st in self.statwalk(files, match, dc):
283 for src, f, st in self.statwalk(files, match, dc):
284 yield src, f
284 yield src, f
285
285
286 # walk recursively through the directory tree, finding all files
286 # walk recursively through the directory tree, finding all files
287 # matched by the statmatch function
287 # matched by the statmatch function
288 #
288 #
289 # results are yielded in a tuple (src, filename, st), where src
289 # results are yielded in a tuple (src, filename, st), where src
290 # is one of:
290 # is one of:
291 # 'f' the file was found in the directory tree
291 # 'f' the file was found in the directory tree
292 # 'm' the file was only in the dirstate and not in the tree
292 # 'm' the file was only in the dirstate and not in the tree
293 # and st is the stat result if the file was found in the directory.
293 # and st is the stat result if the file was found in the directory.
294 #
294 #
295 # dc is an optional arg for the current dirstate. dc is not modified
295 # dc is an optional arg for the current dirstate. dc is not modified
296 # directly by this function, but might be modified by your statmatch call.
296 # directly by this function, but might be modified by your statmatch call.
297 #
297 #
298 def walkhelper(self, files, statmatch, dc):
298 def walkhelper(self, files, statmatch, dc):
299 # recursion free walker, faster than os.walk.
299 # recursion free walker, faster than os.walk.
300 def findfiles(s):
300 def findfiles(s):
301 work = [s]
301 work = [s]
302 while work:
302 while work:
303 top = work.pop()
303 top = work.pop()
304 names = os.listdir(top)
304 names = os.listdir(top)
305 names.sort()
305 names.sort()
306 # nd is the top of the repository dir tree
306 # nd is the top of the repository dir tree
307 nd = util.normpath(top[len(self.root) + 1:])
307 nd = util.normpath(top[len(self.root) + 1:])
308 if nd == '.': nd = ''
308 if nd == '.': nd = ''
309 for f in names:
309 for f in names:
310 np = util.pconvert(os.path.join(nd, f))
310 np = util.pconvert(os.path.join(nd, f))
311 if seen(np):
311 if seen(np):
312 continue
312 continue
313 p = os.path.join(top, f)
313 p = os.path.join(top, f)
314 # don't trip over symlinks
314 # don't trip over symlinks
315 st = os.lstat(p)
315 st = os.lstat(p)
316 if stat.S_ISDIR(st.st_mode):
316 if stat.S_ISDIR(st.st_mode):
317 ds = os.path.join(nd, f +'/')
317 ds = os.path.join(nd, f +'/')
318 if statmatch(ds, st):
318 if statmatch(ds, st):
319 work.append(p)
319 work.append(p)
320 if statmatch(np, st) and np in dc:
320 if statmatch(np, st) and np in dc:
321 yield 'm', np, st
321 yield 'm', np, st
322 elif statmatch(np, st):
322 elif statmatch(np, st):
323 if self.supported_type(np, st):
323 if self.supported_type(np, st):
324 yield 'f', np, st
324 yield 'f', np, st
325 elif np in dc:
325 elif np in dc:
326 yield 'm', np, st
326 yield 'm', np, st
327
327
328 known = {'.hg': 1}
328 known = {'.hg': 1}
329 def seen(fn):
329 def seen(fn):
330 if fn in known: return True
330 if fn in known: return True
331 known[fn] = 1
331 known[fn] = 1
332
332
333 # step one, find all files that match our criteria
333 # step one, find all files that match our criteria
334 files.sort()
334 files.sort()
335 for ff in util.unique(files):
335 for ff in util.unique(files):
336 f = self.wjoin(ff)
336 f = self.wjoin(ff)
337 try:
337 try:
338 st = os.lstat(f)
338 st = os.lstat(f)
339 except OSError, inst:
339 except OSError, inst:
340 nf = util.normpath(ff)
340 nf = util.normpath(ff)
341 found = False
341 found = False
342 for fn in dc:
342 for fn in dc:
343 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
343 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
344 found = True
344 found = True
345 break
345 break
346 if not found:
346 if not found:
347 self.ui.warn('%s: %s\n' % (
347 self.ui.warn('%s: %s\n' % (
348 util.pathto(self.getcwd(), ff),
348 util.pathto(self.getcwd(), ff),
349 inst.strerror))
349 inst.strerror))
350 continue
350 continue
351 if stat.S_ISDIR(st.st_mode):
351 if stat.S_ISDIR(st.st_mode):
352 cmp1 = (lambda x, y: cmp(x[1], y[1]))
352 cmp1 = (lambda x, y: cmp(x[1], y[1]))
353 sorted = [ x for x in findfiles(f) ]
353 sorted_ = [ x for x in findfiles(f) ]
354 sorted.sort(cmp1)
354 sorted_.sort(cmp1)
355 for e in sorted:
355 for e in sorted_:
356 yield e
356 yield e
357 else:
357 else:
358 ff = util.normpath(ff)
358 ff = util.normpath(ff)
359 if seen(ff):
359 if seen(ff):
360 continue
360 continue
361 self.blockignore = True
361 self.blockignore = True
362 if statmatch(ff, st):
362 if statmatch(ff, st):
363 if self.supported_type(ff, st, verbose=True):
363 if self.supported_type(ff, st, verbose=True):
364 yield 'f', ff, st
364 yield 'f', ff, st
365 elif ff in dc:
365 elif ff in dc:
366 yield 'm', ff, st
366 yield 'm', ff, st
367 self.blockignore = False
367 self.blockignore = False
368
368
369 # step two run through anything left in the dc hash and yield
369 # step two run through anything left in the dc hash and yield
370 # if we haven't already seen it
370 # if we haven't already seen it
371 ks = dc.keys()
371 ks = dc.keys()
372 ks.sort()
372 ks.sort()
373 for k in ks:
373 for k in ks:
374 if not seen(k) and (statmatch(k, None)):
374 if not seen(k) and (statmatch(k, None)):
375 yield 'm', k, None
375 yield 'm', k, None
376
376
377 def changes(self, files=None, match=util.always):
377 def changes(self, files=None, match=util.always):
378 lookup, modified, added, unknown = [], [], [], []
378 lookup, modified, added, unknown = [], [], [], []
379 removed, deleted = [], []
379 removed, deleted = [], []
380
380
381 for src, fn, st in self.statwalk(files, match):
381 for src, fn, st in self.statwalk(files, match):
382 try:
382 try:
383 type, mode, size, time = self[fn]
383 type_, mode, size, time = self[fn]
384 except KeyError:
384 except KeyError:
385 unknown.append(fn)
385 unknown.append(fn)
386 continue
386 continue
387 if src == 'm':
387 if src == 'm':
388 nonexistent = True
388 nonexistent = True
389 if not st:
389 if not st:
390 try:
390 try:
391 f = self.wjoin(fn)
391 f = self.wjoin(fn)
392 st = os.lstat(f)
392 st = os.lstat(f)
393 except OSError, inst:
393 except OSError, inst:
394 if inst.errno != errno.ENOENT:
394 if inst.errno != errno.ENOENT:
395 raise
395 raise
396 st = None
396 st = None
397 # We need to re-check that it is a valid file
397 # We need to re-check that it is a valid file
398 if st and self.supported_type(fn, st):
398 if st and self.supported_type(fn, st):
399 nonexistent = False
399 nonexistent = False
400 # XXX: what to do with file no longer present in the fs
400 # XXX: what to do with file no longer present in the fs
401 # who are not removed in the dirstate ?
401 # who are not removed in the dirstate ?
402 if nonexistent and type in "nm":
402 if nonexistent and type_ in "nm":
403 deleted.append(fn)
403 deleted.append(fn)
404 continue
404 continue
405 # check the common case first
405 # check the common case first
406 if type == 'n':
406 if type_ == 'n':
407 if not st:
407 if not st:
408 st = os.stat(fn)
408 st = os.stat(fn)
409 if size != st.st_size or (mode ^ st.st_mode) & 0100:
409 if size != st.st_size or (mode ^ st.st_mode) & 0100:
410 modified.append(fn)
410 modified.append(fn)
411 elif time != st.st_mtime:
411 elif time != st.st_mtime:
412 lookup.append(fn)
412 lookup.append(fn)
413 elif type == 'm':
413 elif type_ == 'm':
414 modified.append(fn)
414 modified.append(fn)
415 elif type == 'a':
415 elif type_ == 'a':
416 added.append(fn)
416 added.append(fn)
417 elif type == 'r':
417 elif type_ == 'r':
418 removed.append(fn)
418 removed.append(fn)
419
419
420 return (lookup, modified, added, removed, deleted, unknown)
420 return (lookup, modified, added, removed, deleted, unknown)
@@ -1,1853 +1,1853 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14
14
15 class localrepository(object):
15 class localrepository(object):
16 def __init__(self, ui, path=None, create=0):
16 def __init__(self, ui, path=None, create=0):
17 if not path:
17 if not path:
18 p = os.getcwd()
18 p = os.getcwd()
19 while not os.path.isdir(os.path.join(p, ".hg")):
19 while not os.path.isdir(os.path.join(p, ".hg")):
20 oldp = p
20 oldp = p
21 p = os.path.dirname(p)
21 p = os.path.dirname(p)
22 if p == oldp:
22 if p == oldp:
23 raise repo.RepoError(_("no repo found"))
23 raise repo.RepoError(_("no repo found"))
24 path = p
24 path = p
25 self.path = os.path.join(path, ".hg")
25 self.path = os.path.join(path, ".hg")
26
26
27 if not create and not os.path.isdir(self.path):
27 if not create and not os.path.isdir(self.path):
28 raise repo.RepoError(_("repository %s not found") % path)
28 raise repo.RepoError(_("repository %s not found") % path)
29
29
30 self.root = os.path.abspath(path)
30 self.root = os.path.abspath(path)
31 self.ui = ui
31 self.ui = ui
32 self.opener = util.opener(self.path)
32 self.opener = util.opener(self.path)
33 self.wopener = util.opener(self.root)
33 self.wopener = util.opener(self.root)
34 self.manifest = manifest.manifest(self.opener)
34 self.manifest = manifest.manifest(self.opener)
35 self.changelog = changelog.changelog(self.opener)
35 self.changelog = changelog.changelog(self.opener)
36 self.tagscache = None
36 self.tagscache = None
37 self.nodetagscache = None
37 self.nodetagscache = None
38 self.encodepats = None
38 self.encodepats = None
39 self.decodepats = None
39 self.decodepats = None
40
40
41 if create:
41 if create:
42 os.mkdir(self.path)
42 os.mkdir(self.path)
43 os.mkdir(self.join("data"))
43 os.mkdir(self.join("data"))
44
44
45 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
46 try:
46 try:
47 self.ui.readconfig(self.join("hgrc"))
47 self.ui.readconfig(self.join("hgrc"))
48 except IOError:
48 except IOError:
49 pass
49 pass
50
50
51 def hook(self, name, throw=False, **args):
51 def hook(self, name, throw=False, **args):
52 def runhook(name, cmd):
52 def runhook(name, cmd):
53 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
53 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
54 old = {}
54 old = {}
55 for k, v in args.items():
55 for k, v in args.items():
56 k = k.upper()
56 k = k.upper()
57 old['HG_' + k] = os.environ.get(k, None)
57 old['HG_' + k] = os.environ.get(k, None)
58 old[k] = os.environ.get(k, None)
58 old[k] = os.environ.get(k, None)
59 os.environ['HG_' + k] = str(v)
59 os.environ['HG_' + k] = str(v)
60 os.environ[k] = str(v)
60 os.environ[k] = str(v)
61
61
62 try:
62 try:
63 # Hooks run in the repository root
63 # Hooks run in the repository root
64 olddir = os.getcwd()
64 olddir = os.getcwd()
65 os.chdir(self.root)
65 os.chdir(self.root)
66 r = os.system(cmd)
66 r = os.system(cmd)
67 finally:
67 finally:
68 for k, v in old.items():
68 for k, v in old.items():
69 if v is not None:
69 if v is not None:
70 os.environ[k] = v
70 os.environ[k] = v
71 else:
71 else:
72 del os.environ[k]
72 del os.environ[k]
73
73
74 os.chdir(olddir)
74 os.chdir(olddir)
75
75
76 if r:
76 if r:
77 desc, r = util.explain_exit(r)
77 desc, r = util.explain_exit(r)
78 if throw:
78 if throw:
79 raise util.Abort(_('%s hook %s') % (name, desc))
79 raise util.Abort(_('%s hook %s') % (name, desc))
80 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
80 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
81 return False
81 return False
82 return True
82 return True
83
83
84 r = True
84 r = True
85 for hname, cmd in self.ui.configitems("hooks"):
85 for hname, cmd in self.ui.configitems("hooks"):
86 s = hname.split(".")
86 s = hname.split(".")
87 if s[0] == name and cmd:
87 if s[0] == name and cmd:
88 r = runhook(hname, cmd) and r
88 r = runhook(hname, cmd) and r
89 return r
89 return r
90
90
91 def tags(self):
91 def tags(self):
92 '''return a mapping of tag to node'''
92 '''return a mapping of tag to node'''
93 if not self.tagscache:
93 if not self.tagscache:
94 self.tagscache = {}
94 self.tagscache = {}
95 def addtag(self, k, n):
95 def addtag(self, k, n):
96 try:
96 try:
97 bin_n = bin(n)
97 bin_n = bin(n)
98 except TypeError:
98 except TypeError:
99 bin_n = ''
99 bin_n = ''
100 self.tagscache[k.strip()] = bin_n
100 self.tagscache[k.strip()] = bin_n
101
101
102 try:
102 try:
103 # read each head of the tags file, ending with the tip
103 # read each head of the tags file, ending with the tip
104 # and add each tag found to the map, with "newer" ones
104 # and add each tag found to the map, with "newer" ones
105 # taking precedence
105 # taking precedence
106 fl = self.file(".hgtags")
106 fl = self.file(".hgtags")
107 h = fl.heads()
107 h = fl.heads()
108 h.reverse()
108 h.reverse()
109 for r in h:
109 for r in h:
110 for l in fl.read(r).splitlines():
110 for l in fl.read(r).splitlines():
111 if l:
111 if l:
112 n, k = l.split(" ", 1)
112 n, k = l.split(" ", 1)
113 addtag(self, k, n)
113 addtag(self, k, n)
114 except KeyError:
114 except KeyError:
115 pass
115 pass
116
116
117 try:
117 try:
118 f = self.opener("localtags")
118 f = self.opener("localtags")
119 for l in f:
119 for l in f:
120 n, k = l.split(" ", 1)
120 n, k = l.split(" ", 1)
121 addtag(self, k, n)
121 addtag(self, k, n)
122 except IOError:
122 except IOError:
123 pass
123 pass
124
124
125 self.tagscache['tip'] = self.changelog.tip()
125 self.tagscache['tip'] = self.changelog.tip()
126
126
127 return self.tagscache
127 return self.tagscache
128
128
129 def tagslist(self):
129 def tagslist(self):
130 '''return a list of tags ordered by revision'''
130 '''return a list of tags ordered by revision'''
131 l = []
131 l = []
132 for t, n in self.tags().items():
132 for t, n in self.tags().items():
133 try:
133 try:
134 r = self.changelog.rev(n)
134 r = self.changelog.rev(n)
135 except:
135 except:
136 r = -2 # sort to the beginning of the list if unknown
136 r = -2 # sort to the beginning of the list if unknown
137 l.append((r, t, n))
137 l.append((r, t, n))
138 l.sort()
138 l.sort()
139 return [(t, n) for r, t, n in l]
139 return [(t, n) for r, t, n in l]
140
140
141 def nodetags(self, node):
141 def nodetags(self, node):
142 '''return the tags associated with a node'''
142 '''return the tags associated with a node'''
143 if not self.nodetagscache:
143 if not self.nodetagscache:
144 self.nodetagscache = {}
144 self.nodetagscache = {}
145 for t, n in self.tags().items():
145 for t, n in self.tags().items():
146 self.nodetagscache.setdefault(n, []).append(t)
146 self.nodetagscache.setdefault(n, []).append(t)
147 return self.nodetagscache.get(node, [])
147 return self.nodetagscache.get(node, [])
148
148
149 def lookup(self, key):
149 def lookup(self, key):
150 try:
150 try:
151 return self.tags()[key]
151 return self.tags()[key]
152 except KeyError:
152 except KeyError:
153 try:
153 try:
154 return self.changelog.lookup(key)
154 return self.changelog.lookup(key)
155 except:
155 except:
156 raise repo.RepoError(_("unknown revision '%s'") % key)
156 raise repo.RepoError(_("unknown revision '%s'") % key)
157
157
158 def dev(self):
158 def dev(self):
159 return os.stat(self.path).st_dev
159 return os.stat(self.path).st_dev
160
160
161 def local(self):
161 def local(self):
162 return True
162 return True
163
163
164 def join(self, f):
164 def join(self, f):
165 return os.path.join(self.path, f)
165 return os.path.join(self.path, f)
166
166
167 def wjoin(self, f):
167 def wjoin(self, f):
168 return os.path.join(self.root, f)
168 return os.path.join(self.root, f)
169
169
170 def file(self, f):
170 def file(self, f):
171 if f[0] == '/':
171 if f[0] == '/':
172 f = f[1:]
172 f = f[1:]
173 return filelog.filelog(self.opener, f)
173 return filelog.filelog(self.opener, f)
174
174
175 def getcwd(self):
175 def getcwd(self):
176 return self.dirstate.getcwd()
176 return self.dirstate.getcwd()
177
177
178 def wfile(self, f, mode='r'):
178 def wfile(self, f, mode='r'):
179 return self.wopener(f, mode)
179 return self.wopener(f, mode)
180
180
181 def wread(self, filename):
181 def wread(self, filename):
182 if self.encodepats == None:
182 if self.encodepats == None:
183 l = []
183 l = []
184 for pat, cmd in self.ui.configitems("encode"):
184 for pat, cmd in self.ui.configitems("encode"):
185 mf = util.matcher("", "/", [pat], [], [])[1]
185 mf = util.matcher("", "/", [pat], [], [])[1]
186 l.append((mf, cmd))
186 l.append((mf, cmd))
187 self.encodepats = l
187 self.encodepats = l
188
188
189 data = self.wopener(filename, 'r').read()
189 data = self.wopener(filename, 'r').read()
190
190
191 for mf, cmd in self.encodepats:
191 for mf, cmd in self.encodepats:
192 if mf(filename):
192 if mf(filename):
193 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
193 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
194 data = util.filter(data, cmd)
194 data = util.filter(data, cmd)
195 break
195 break
196
196
197 return data
197 return data
198
198
199 def wwrite(self, filename, data, fd=None):
199 def wwrite(self, filename, data, fd=None):
200 if self.decodepats == None:
200 if self.decodepats == None:
201 l = []
201 l = []
202 for pat, cmd in self.ui.configitems("decode"):
202 for pat, cmd in self.ui.configitems("decode"):
203 mf = util.matcher("", "/", [pat], [], [])[1]
203 mf = util.matcher("", "/", [pat], [], [])[1]
204 l.append((mf, cmd))
204 l.append((mf, cmd))
205 self.decodepats = l
205 self.decodepats = l
206
206
207 for mf, cmd in self.decodepats:
207 for mf, cmd in self.decodepats:
208 if mf(filename):
208 if mf(filename):
209 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
209 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
210 data = util.filter(data, cmd)
210 data = util.filter(data, cmd)
211 break
211 break
212
212
213 if fd:
213 if fd:
214 return fd.write(data)
214 return fd.write(data)
215 return self.wopener(filename, 'w').write(data)
215 return self.wopener(filename, 'w').write(data)
216
216
217 def transaction(self):
217 def transaction(self):
218 # save dirstate for undo
218 # save dirstate for undo
219 try:
219 try:
220 ds = self.opener("dirstate").read()
220 ds = self.opener("dirstate").read()
221 except IOError:
221 except IOError:
222 ds = ""
222 ds = ""
223 self.opener("journal.dirstate", "w").write(ds)
223 self.opener("journal.dirstate", "w").write(ds)
224
224
225 def after():
225 def after():
226 util.rename(self.join("journal"), self.join("undo"))
226 util.rename(self.join("journal"), self.join("undo"))
227 util.rename(self.join("journal.dirstate"),
227 util.rename(self.join("journal.dirstate"),
228 self.join("undo.dirstate"))
228 self.join("undo.dirstate"))
229
229
230 return transaction.transaction(self.ui.warn, self.opener,
230 return transaction.transaction(self.ui.warn, self.opener,
231 self.join("journal"), after)
231 self.join("journal"), after)
232
232
233 def recover(self):
233 def recover(self):
234 lock = self.lock()
234 l = self.lock()
235 if os.path.exists(self.join("journal")):
235 if os.path.exists(self.join("journal")):
236 self.ui.status(_("rolling back interrupted transaction\n"))
236 self.ui.status(_("rolling back interrupted transaction\n"))
237 transaction.rollback(self.opener, self.join("journal"))
237 transaction.rollback(self.opener, self.join("journal"))
238 self.manifest = manifest.manifest(self.opener)
238 self.manifest = manifest.manifest(self.opener)
239 self.changelog = changelog.changelog(self.opener)
239 self.changelog = changelog.changelog(self.opener)
240 return True
240 return True
241 else:
241 else:
242 self.ui.warn(_("no interrupted transaction available\n"))
242 self.ui.warn(_("no interrupted transaction available\n"))
243 return False
243 return False
244
244
245 def undo(self, wlock=None):
245 def undo(self, wlock=None):
246 if not wlock:
246 if not wlock:
247 wlock = self.wlock()
247 wlock = self.wlock()
248 lock = self.lock()
248 l = self.lock()
249 if os.path.exists(self.join("undo")):
249 if os.path.exists(self.join("undo")):
250 self.ui.status(_("rolling back last transaction\n"))
250 self.ui.status(_("rolling back last transaction\n"))
251 transaction.rollback(self.opener, self.join("undo"))
251 transaction.rollback(self.opener, self.join("undo"))
252 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
252 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
253 self.dirstate.read()
253 self.dirstate.read()
254 else:
254 else:
255 self.ui.warn(_("no undo information available\n"))
255 self.ui.warn(_("no undo information available\n"))
256
256
257 def lock(self, wait=1):
257 def lock(self, wait=1):
258 try:
258 try:
259 return lock.lock(self.join("lock"), 0)
259 return lock.lock(self.join("lock"), 0)
260 except lock.LockHeld, inst:
260 except lock.LockHeld, inst:
261 if wait:
261 if wait:
262 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
262 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
263 return lock.lock(self.join("lock"), wait)
263 return lock.lock(self.join("lock"), wait)
264 raise inst
264 raise inst
265
265
266 def wlock(self, wait=1):
266 def wlock(self, wait=1):
267 try:
267 try:
268 wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write)
268 wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write)
269 except lock.LockHeld, inst:
269 except lock.LockHeld, inst:
270 if not wait:
270 if not wait:
271 raise inst
271 raise inst
272 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
272 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
273 wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write)
273 wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write)
274 self.dirstate.read()
274 self.dirstate.read()
275 return wlock
275 return wlock
276
276
277 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
277 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
278 "determine whether a new filenode is needed"
278 "determine whether a new filenode is needed"
279 fp1 = manifest1.get(filename, nullid)
279 fp1 = manifest1.get(filename, nullid)
280 fp2 = manifest2.get(filename, nullid)
280 fp2 = manifest2.get(filename, nullid)
281
281
282 if fp2 != nullid:
282 if fp2 != nullid:
283 # is one parent an ancestor of the other?
283 # is one parent an ancestor of the other?
284 fpa = filelog.ancestor(fp1, fp2)
284 fpa = filelog.ancestor(fp1, fp2)
285 if fpa == fp1:
285 if fpa == fp1:
286 fp1, fp2 = fp2, nullid
286 fp1, fp2 = fp2, nullid
287 elif fpa == fp2:
287 elif fpa == fp2:
288 fp2 = nullid
288 fp2 = nullid
289
289
290 # is the file unmodified from the parent? report existing entry
290 # is the file unmodified from the parent? report existing entry
291 if fp2 == nullid and text == filelog.read(fp1):
291 if fp2 == nullid and text == filelog.read(fp1):
292 return (fp1, None, None)
292 return (fp1, None, None)
293
293
294 return (None, fp1, fp2)
294 return (None, fp1, fp2)
295
295
296 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
296 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
297 orig_parent = self.dirstate.parents()[0] or nullid
297 orig_parent = self.dirstate.parents()[0] or nullid
298 p1 = p1 or self.dirstate.parents()[0] or nullid
298 p1 = p1 or self.dirstate.parents()[0] or nullid
299 p2 = p2 or self.dirstate.parents()[1] or nullid
299 p2 = p2 or self.dirstate.parents()[1] or nullid
300 c1 = self.changelog.read(p1)
300 c1 = self.changelog.read(p1)
301 c2 = self.changelog.read(p2)
301 c2 = self.changelog.read(p2)
302 m1 = self.manifest.read(c1[0])
302 m1 = self.manifest.read(c1[0])
303 mf1 = self.manifest.readflags(c1[0])
303 mf1 = self.manifest.readflags(c1[0])
304 m2 = self.manifest.read(c2[0])
304 m2 = self.manifest.read(c2[0])
305 changed = []
305 changed = []
306
306
307 if orig_parent == p1:
307 if orig_parent == p1:
308 update_dirstate = 1
308 update_dirstate = 1
309 else:
309 else:
310 update_dirstate = 0
310 update_dirstate = 0
311
311
312 if not wlock:
312 if not wlock:
313 wlock = self.wlock()
313 wlock = self.wlock()
314 lock = self.lock()
314 l = self.lock()
315 tr = self.transaction()
315 tr = self.transaction()
316 mm = m1.copy()
316 mm = m1.copy()
317 mfm = mf1.copy()
317 mfm = mf1.copy()
318 linkrev = self.changelog.count()
318 linkrev = self.changelog.count()
319 for f in files:
319 for f in files:
320 try:
320 try:
321 t = self.wread(f)
321 t = self.wread(f)
322 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
322 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
323 r = self.file(f)
323 r = self.file(f)
324 mfm[f] = tm
324 mfm[f] = tm
325
325
326 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
326 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
327 if entry:
327 if entry:
328 mm[f] = entry
328 mm[f] = entry
329 continue
329 continue
330
330
331 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
331 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
332 changed.append(f)
332 changed.append(f)
333 if update_dirstate:
333 if update_dirstate:
334 self.dirstate.update([f], "n")
334 self.dirstate.update([f], "n")
335 except IOError:
335 except IOError:
336 try:
336 try:
337 del mm[f]
337 del mm[f]
338 del mfm[f]
338 del mfm[f]
339 if update_dirstate:
339 if update_dirstate:
340 self.dirstate.forget([f])
340 self.dirstate.forget([f])
341 except:
341 except:
342 # deleted from p2?
342 # deleted from p2?
343 pass
343 pass
344
344
345 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
345 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
346 user = user or self.ui.username()
346 user = user or self.ui.username()
347 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
347 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
348 tr.close()
348 tr.close()
349 if update_dirstate:
349 if update_dirstate:
350 self.dirstate.setparents(n, nullid)
350 self.dirstate.setparents(n, nullid)
351
351
352 def commit(self, files=None, text="", user=None, date=None,
352 def commit(self, files=None, text="", user=None, date=None,
353 match=util.always, force=False, wlock=None):
353 match=util.always, force=False, wlock=None):
354 commit = []
354 commit = []
355 remove = []
355 remove = []
356 changed = []
356 changed = []
357
357
358 if files:
358 if files:
359 for f in files:
359 for f in files:
360 s = self.dirstate.state(f)
360 s = self.dirstate.state(f)
361 if s in 'nmai':
361 if s in 'nmai':
362 commit.append(f)
362 commit.append(f)
363 elif s == 'r':
363 elif s == 'r':
364 remove.append(f)
364 remove.append(f)
365 else:
365 else:
366 self.ui.warn(_("%s not tracked!\n") % f)
366 self.ui.warn(_("%s not tracked!\n") % f)
367 else:
367 else:
368 modified, added, removed, deleted, unknown = self.changes(match=match)
368 modified, added, removed, deleted, unknown = self.changes(match=match)
369 commit = modified + added
369 commit = modified + added
370 remove = removed
370 remove = removed
371
371
372 p1, p2 = self.dirstate.parents()
372 p1, p2 = self.dirstate.parents()
373 c1 = self.changelog.read(p1)
373 c1 = self.changelog.read(p1)
374 c2 = self.changelog.read(p2)
374 c2 = self.changelog.read(p2)
375 m1 = self.manifest.read(c1[0])
375 m1 = self.manifest.read(c1[0])
376 mf1 = self.manifest.readflags(c1[0])
376 mf1 = self.manifest.readflags(c1[0])
377 m2 = self.manifest.read(c2[0])
377 m2 = self.manifest.read(c2[0])
378
378
379 if not commit and not remove and not force and p2 == nullid:
379 if not commit and not remove and not force and p2 == nullid:
380 self.ui.status(_("nothing changed\n"))
380 self.ui.status(_("nothing changed\n"))
381 return None
381 return None
382
382
383 xp1 = hex(p1)
383 xp1 = hex(p1)
384 if p2 == nullid: xp2 = ''
384 if p2 == nullid: xp2 = ''
385 else: xp2 = hex(p2)
385 else: xp2 = hex(p2)
386
386
387 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
387 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
388
388
389 if not wlock:
389 if not wlock:
390 wlock = self.wlock()
390 wlock = self.wlock()
391 lock = self.lock()
391 l = self.lock()
392 tr = self.transaction()
392 tr = self.transaction()
393
393
394 # check in files
394 # check in files
395 new = {}
395 new = {}
396 linkrev = self.changelog.count()
396 linkrev = self.changelog.count()
397 commit.sort()
397 commit.sort()
398 for f in commit:
398 for f in commit:
399 self.ui.note(f + "\n")
399 self.ui.note(f + "\n")
400 try:
400 try:
401 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
401 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
402 t = self.wread(f)
402 t = self.wread(f)
403 except IOError:
403 except IOError:
404 self.ui.warn(_("trouble committing %s!\n") % f)
404 self.ui.warn(_("trouble committing %s!\n") % f)
405 raise
405 raise
406
406
407 r = self.file(f)
407 r = self.file(f)
408
408
409 meta = {}
409 meta = {}
410 cp = self.dirstate.copied(f)
410 cp = self.dirstate.copied(f)
411 if cp:
411 if cp:
412 meta["copy"] = cp
412 meta["copy"] = cp
413 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
413 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
414 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
414 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
415 fp1, fp2 = nullid, nullid
415 fp1, fp2 = nullid, nullid
416 else:
416 else:
417 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
417 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
418 if entry:
418 if entry:
419 new[f] = entry
419 new[f] = entry
420 continue
420 continue
421
421
422 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
422 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
423 # remember what we've added so that we can later calculate
423 # remember what we've added so that we can later calculate
424 # the files to pull from a set of changesets
424 # the files to pull from a set of changesets
425 changed.append(f)
425 changed.append(f)
426
426
427 # update manifest
427 # update manifest
428 m1 = m1.copy()
428 m1 = m1.copy()
429 m1.update(new)
429 m1.update(new)
430 for f in remove:
430 for f in remove:
431 if f in m1:
431 if f in m1:
432 del m1[f]
432 del m1[f]
433 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
433 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
434 (new, remove))
434 (new, remove))
435
435
436 # add changeset
436 # add changeset
437 new = new.keys()
437 new = new.keys()
438 new.sort()
438 new.sort()
439
439
440 if not text:
440 if not text:
441 edittext = [""]
441 edittext = [""]
442 if p2 != nullid:
442 if p2 != nullid:
443 edittext.append("HG: branch merge")
443 edittext.append("HG: branch merge")
444 edittext.extend(["HG: changed %s" % f for f in changed])
444 edittext.extend(["HG: changed %s" % f for f in changed])
445 edittext.extend(["HG: removed %s" % f for f in remove])
445 edittext.extend(["HG: removed %s" % f for f in remove])
446 if not changed and not remove:
446 if not changed and not remove:
447 edittext.append("HG: no files changed")
447 edittext.append("HG: no files changed")
448 edittext.append("")
448 edittext.append("")
449 # run editor in the repository root
449 # run editor in the repository root
450 olddir = os.getcwd()
450 olddir = os.getcwd()
451 os.chdir(self.root)
451 os.chdir(self.root)
452 edittext = self.ui.edit("\n".join(edittext))
452 edittext = self.ui.edit("\n".join(edittext))
453 os.chdir(olddir)
453 os.chdir(olddir)
454 if not edittext.rstrip():
454 if not edittext.rstrip():
455 return None
455 return None
456 text = edittext
456 text = edittext
457
457
458 user = user or self.ui.username()
458 user = user or self.ui.username()
459 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
459 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
460 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
460 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
461 parent2=xp2)
461 parent2=xp2)
462 tr.close()
462 tr.close()
463
463
464 self.dirstate.setparents(n)
464 self.dirstate.setparents(n)
465 self.dirstate.update(new, "n")
465 self.dirstate.update(new, "n")
466 self.dirstate.forget(remove)
466 self.dirstate.forget(remove)
467
467
468 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
468 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
469 return n
469 return n
470
470
471 def walk(self, node=None, files=[], match=util.always):
471 def walk(self, node=None, files=[], match=util.always):
472 if node:
472 if node:
473 fdict = dict.fromkeys(files)
473 fdict = dict.fromkeys(files)
474 for fn in self.manifest.read(self.changelog.read(node)[0]):
474 for fn in self.manifest.read(self.changelog.read(node)[0]):
475 fdict.pop(fn, None)
475 fdict.pop(fn, None)
476 if match(fn):
476 if match(fn):
477 yield 'm', fn
477 yield 'm', fn
478 for fn in fdict:
478 for fn in fdict:
479 self.ui.warn(_('%s: No such file in rev %s\n') % (
479 self.ui.warn(_('%s: No such file in rev %s\n') % (
480 util.pathto(self.getcwd(), fn), short(node)))
480 util.pathto(self.getcwd(), fn), short(node)))
481 else:
481 else:
482 for src, fn in self.dirstate.walk(files, match):
482 for src, fn in self.dirstate.walk(files, match):
483 yield src, fn
483 yield src, fn
484
484
485 def changes(self, node1=None, node2=None, files=[], match=util.always,
485 def changes(self, node1=None, node2=None, files=[], match=util.always,
486 wlock=None):
486 wlock=None):
487 """return changes between two nodes or node and working directory
487 """return changes between two nodes or node and working directory
488
488
489 If node1 is None, use the first dirstate parent instead.
489 If node1 is None, use the first dirstate parent instead.
490 If node2 is None, compare node1 with working directory.
490 If node2 is None, compare node1 with working directory.
491 """
491 """
492
492
493 def fcmp(fn, mf):
493 def fcmp(fn, mf):
494 t1 = self.wread(fn)
494 t1 = self.wread(fn)
495 t2 = self.file(fn).read(mf.get(fn, nullid))
495 t2 = self.file(fn).read(mf.get(fn, nullid))
496 return cmp(t1, t2)
496 return cmp(t1, t2)
497
497
498 def mfmatches(node):
498 def mfmatches(node):
499 change = self.changelog.read(node)
499 change = self.changelog.read(node)
500 mf = dict(self.manifest.read(change[0]))
500 mf = dict(self.manifest.read(change[0]))
501 for fn in mf.keys():
501 for fn in mf.keys():
502 if not match(fn):
502 if not match(fn):
503 del mf[fn]
503 del mf[fn]
504 return mf
504 return mf
505
505
506 # are we comparing the working directory?
506 # are we comparing the working directory?
507 if not node2:
507 if not node2:
508 if not wlock:
508 if not wlock:
509 try:
509 try:
510 wlock = self.wlock(wait=0)
510 wlock = self.wlock(wait=0)
511 except lock.LockHeld:
511 except lock.LockHeld:
512 wlock = None
512 wlock = None
513 lookup, modified, added, removed, deleted, unknown = (
513 lookup, modified, added, removed, deleted, unknown = (
514 self.dirstate.changes(files, match))
514 self.dirstate.changes(files, match))
515
515
516 # are we comparing working dir against its parent?
516 # are we comparing working dir against its parent?
517 if not node1:
517 if not node1:
518 if lookup:
518 if lookup:
519 # do a full compare of any files that might have changed
519 # do a full compare of any files that might have changed
520 mf2 = mfmatches(self.dirstate.parents()[0])
520 mf2 = mfmatches(self.dirstate.parents()[0])
521 for f in lookup:
521 for f in lookup:
522 if fcmp(f, mf2):
522 if fcmp(f, mf2):
523 modified.append(f)
523 modified.append(f)
524 elif wlock is not None:
524 elif wlock is not None:
525 self.dirstate.update([f], "n")
525 self.dirstate.update([f], "n")
526 else:
526 else:
527 # we are comparing working dir against non-parent
527 # we are comparing working dir against non-parent
528 # generate a pseudo-manifest for the working dir
528 # generate a pseudo-manifest for the working dir
529 mf2 = mfmatches(self.dirstate.parents()[0])
529 mf2 = mfmatches(self.dirstate.parents()[0])
530 for f in lookup + modified + added:
530 for f in lookup + modified + added:
531 mf2[f] = ""
531 mf2[f] = ""
532 for f in removed:
532 for f in removed:
533 if f in mf2:
533 if f in mf2:
534 del mf2[f]
534 del mf2[f]
535 else:
535 else:
536 # we are comparing two revisions
536 # we are comparing two revisions
537 deleted, unknown = [], []
537 deleted, unknown = [], []
538 mf2 = mfmatches(node2)
538 mf2 = mfmatches(node2)
539
539
540 if node1:
540 if node1:
541 # flush lists from dirstate before comparing manifests
541 # flush lists from dirstate before comparing manifests
542 modified, added = [], []
542 modified, added = [], []
543
543
544 mf1 = mfmatches(node1)
544 mf1 = mfmatches(node1)
545
545
546 for fn in mf2:
546 for fn in mf2:
547 if mf1.has_key(fn):
547 if mf1.has_key(fn):
548 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
548 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
549 modified.append(fn)
549 modified.append(fn)
550 del mf1[fn]
550 del mf1[fn]
551 else:
551 else:
552 added.append(fn)
552 added.append(fn)
553
553
554 removed = mf1.keys()
554 removed = mf1.keys()
555
555
556 # sort and return results:
556 # sort and return results:
557 for l in modified, added, removed, deleted, unknown:
557 for l in modified, added, removed, deleted, unknown:
558 l.sort()
558 l.sort()
559 return (modified, added, removed, deleted, unknown)
559 return (modified, added, removed, deleted, unknown)
560
560
561 def add(self, list, wlock=None):
561 def add(self, list, wlock=None):
562 if not wlock:
562 if not wlock:
563 wlock = self.wlock()
563 wlock = self.wlock()
564 for f in list:
564 for f in list:
565 p = self.wjoin(f)
565 p = self.wjoin(f)
566 if not os.path.exists(p):
566 if not os.path.exists(p):
567 self.ui.warn(_("%s does not exist!\n") % f)
567 self.ui.warn(_("%s does not exist!\n") % f)
568 elif not os.path.isfile(p):
568 elif not os.path.isfile(p):
569 self.ui.warn(_("%s not added: only files supported currently\n")
569 self.ui.warn(_("%s not added: only files supported currently\n")
570 % f)
570 % f)
571 elif self.dirstate.state(f) in 'an':
571 elif self.dirstate.state(f) in 'an':
572 self.ui.warn(_("%s already tracked!\n") % f)
572 self.ui.warn(_("%s already tracked!\n") % f)
573 else:
573 else:
574 self.dirstate.update([f], "a")
574 self.dirstate.update([f], "a")
575
575
576 def forget(self, list, wlock=None):
576 def forget(self, list, wlock=None):
577 if not wlock:
577 if not wlock:
578 wlock = self.wlock()
578 wlock = self.wlock()
579 for f in list:
579 for f in list:
580 if self.dirstate.state(f) not in 'ai':
580 if self.dirstate.state(f) not in 'ai':
581 self.ui.warn(_("%s not added!\n") % f)
581 self.ui.warn(_("%s not added!\n") % f)
582 else:
582 else:
583 self.dirstate.forget([f])
583 self.dirstate.forget([f])
584
584
585 def remove(self, list, unlink=False, wlock=None):
585 def remove(self, list, unlink=False, wlock=None):
586 if unlink:
586 if unlink:
587 for f in list:
587 for f in list:
588 try:
588 try:
589 util.unlink(self.wjoin(f))
589 util.unlink(self.wjoin(f))
590 except OSError, inst:
590 except OSError, inst:
591 if inst.errno != errno.ENOENT:
591 if inst.errno != errno.ENOENT:
592 raise
592 raise
593 if not wlock:
593 if not wlock:
594 wlock = self.wlock()
594 wlock = self.wlock()
595 for f in list:
595 for f in list:
596 p = self.wjoin(f)
596 p = self.wjoin(f)
597 if os.path.exists(p):
597 if os.path.exists(p):
598 self.ui.warn(_("%s still exists!\n") % f)
598 self.ui.warn(_("%s still exists!\n") % f)
599 elif self.dirstate.state(f) == 'a':
599 elif self.dirstate.state(f) == 'a':
600 self.dirstate.forget([f])
600 self.dirstate.forget([f])
601 elif f not in self.dirstate:
601 elif f not in self.dirstate:
602 self.ui.warn(_("%s not tracked!\n") % f)
602 self.ui.warn(_("%s not tracked!\n") % f)
603 else:
603 else:
604 self.dirstate.update([f], "r")
604 self.dirstate.update([f], "r")
605
605
606 def undelete(self, list, wlock=None):
606 def undelete(self, list, wlock=None):
607 p = self.dirstate.parents()[0]
607 p = self.dirstate.parents()[0]
608 mn = self.changelog.read(p)[0]
608 mn = self.changelog.read(p)[0]
609 mf = self.manifest.readflags(mn)
609 mf = self.manifest.readflags(mn)
610 m = self.manifest.read(mn)
610 m = self.manifest.read(mn)
611 if not wlock:
611 if not wlock:
612 wlock = self.wlock()
612 wlock = self.wlock()
613 for f in list:
613 for f in list:
614 if self.dirstate.state(f) not in "r":
614 if self.dirstate.state(f) not in "r":
615 self.ui.warn("%s not removed!\n" % f)
615 self.ui.warn("%s not removed!\n" % f)
616 else:
616 else:
617 t = self.file(f).read(m[f])
617 t = self.file(f).read(m[f])
618 self.wwrite(f, t)
618 self.wwrite(f, t)
619 util.set_exec(self.wjoin(f), mf[f])
619 util.set_exec(self.wjoin(f), mf[f])
620 self.dirstate.update([f], "n")
620 self.dirstate.update([f], "n")
621
621
622 def copy(self, source, dest, wlock=None):
622 def copy(self, source, dest, wlock=None):
623 p = self.wjoin(dest)
623 p = self.wjoin(dest)
624 if not os.path.exists(p):
624 if not os.path.exists(p):
625 self.ui.warn(_("%s does not exist!\n") % dest)
625 self.ui.warn(_("%s does not exist!\n") % dest)
626 elif not os.path.isfile(p):
626 elif not os.path.isfile(p):
627 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
627 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
628 else:
628 else:
629 if not wlock:
629 if not wlock:
630 wlock = self.wlock()
630 wlock = self.wlock()
631 if self.dirstate.state(dest) == '?':
631 if self.dirstate.state(dest) == '?':
632 self.dirstate.update([dest], "a")
632 self.dirstate.update([dest], "a")
633 self.dirstate.copy(source, dest)
633 self.dirstate.copy(source, dest)
634
634
635 def heads(self, start=None):
635 def heads(self, start=None):
636 heads = self.changelog.heads(start)
636 heads = self.changelog.heads(start)
637 # sort the output in rev descending order
637 # sort the output in rev descending order
638 heads = [(-self.changelog.rev(h), h) for h in heads]
638 heads = [(-self.changelog.rev(h), h) for h in heads]
639 heads.sort()
639 heads.sort()
640 return [n for (r, n) in heads]
640 return [n for (r, n) in heads]
641
641
642 # branchlookup returns a dict giving a list of branches for
642 # branchlookup returns a dict giving a list of branches for
643 # each head. A branch is defined as the tag of a node or
643 # each head. A branch is defined as the tag of a node or
644 # the branch of the node's parents. If a node has multiple
644 # the branch of the node's parents. If a node has multiple
645 # branch tags, tags are eliminated if they are visible from other
645 # branch tags, tags are eliminated if they are visible from other
646 # branch tags.
646 # branch tags.
647 #
647 #
648 # So, for this graph: a->b->c->d->e
648 # So, for this graph: a->b->c->d->e
649 # \ /
649 # \ /
650 # aa -----/
650 # aa -----/
651 # a has tag 2.6.12
651 # a has tag 2.6.12
652 # d has tag 2.6.13
652 # d has tag 2.6.13
653 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
653 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
654 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
654 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
655 # from the list.
655 # from the list.
656 #
656 #
657 # It is possible that more than one head will have the same branch tag.
657 # It is possible that more than one head will have the same branch tag.
658 # callers need to check the result for multiple heads under the same
658 # callers need to check the result for multiple heads under the same
659 # branch tag if that is a problem for them (ie checkout of a specific
659 # branch tag if that is a problem for them (ie checkout of a specific
660 # branch).
660 # branch).
661 #
661 #
662 # passing in a specific branch will limit the depth of the search
662 # passing in a specific branch will limit the depth of the search
663 # through the parents. It won't limit the branches returned in the
663 # through the parents. It won't limit the branches returned in the
664 # result though.
664 # result though.
665 def branchlookup(self, heads=None, branch=None):
665 def branchlookup(self, heads=None, branch=None):
666 if not heads:
666 if not heads:
667 heads = self.heads()
667 heads = self.heads()
668 headt = [ h for h in heads ]
668 headt = [ h for h in heads ]
669 chlog = self.changelog
669 chlog = self.changelog
670 branches = {}
670 branches = {}
671 merges = []
671 merges = []
672 seenmerge = {}
672 seenmerge = {}
673
673
674 # traverse the tree once for each head, recording in the branches
674 # traverse the tree once for each head, recording in the branches
675 # dict which tags are visible from this head. The branches
675 # dict which tags are visible from this head. The branches
676 # dict also records which tags are visible from each tag
676 # dict also records which tags are visible from each tag
677 # while we traverse.
677 # while we traverse.
678 while headt or merges:
678 while headt or merges:
679 if merges:
679 if merges:
680 n, found = merges.pop()
680 n, found = merges.pop()
681 visit = [n]
681 visit = [n]
682 else:
682 else:
683 h = headt.pop()
683 h = headt.pop()
684 visit = [h]
684 visit = [h]
685 found = [h]
685 found = [h]
686 seen = {}
686 seen = {}
687 while visit:
687 while visit:
688 n = visit.pop()
688 n = visit.pop()
689 if n in seen:
689 if n in seen:
690 continue
690 continue
691 pp = chlog.parents(n)
691 pp = chlog.parents(n)
692 tags = self.nodetags(n)
692 tags = self.nodetags(n)
693 if tags:
693 if tags:
694 for x in tags:
694 for x in tags:
695 if x == 'tip':
695 if x == 'tip':
696 continue
696 continue
697 for f in found:
697 for f in found:
698 branches.setdefault(f, {})[n] = 1
698 branches.setdefault(f, {})[n] = 1
699 branches.setdefault(n, {})[n] = 1
699 branches.setdefault(n, {})[n] = 1
700 break
700 break
701 if n not in found:
701 if n not in found:
702 found.append(n)
702 found.append(n)
703 if branch in tags:
703 if branch in tags:
704 continue
704 continue
705 seen[n] = 1
705 seen[n] = 1
706 if pp[1] != nullid and n not in seenmerge:
706 if pp[1] != nullid and n not in seenmerge:
707 merges.append((pp[1], [x for x in found]))
707 merges.append((pp[1], [x for x in found]))
708 seenmerge[n] = 1
708 seenmerge[n] = 1
709 if pp[0] != nullid:
709 if pp[0] != nullid:
710 visit.append(pp[0])
710 visit.append(pp[0])
711 # traverse the branches dict, eliminating branch tags from each
711 # traverse the branches dict, eliminating branch tags from each
712 # head that are visible from another branch tag for that head.
712 # head that are visible from another branch tag for that head.
713 out = {}
713 out = {}
714 viscache = {}
714 viscache = {}
715 for h in heads:
715 for h in heads:
716 def visible(node):
716 def visible(node):
717 if node in viscache:
717 if node in viscache:
718 return viscache[node]
718 return viscache[node]
719 ret = {}
719 ret = {}
720 visit = [node]
720 visit = [node]
721 while visit:
721 while visit:
722 x = visit.pop()
722 x = visit.pop()
723 if x in viscache:
723 if x in viscache:
724 ret.update(viscache[x])
724 ret.update(viscache[x])
725 elif x not in ret:
725 elif x not in ret:
726 ret[x] = 1
726 ret[x] = 1
727 if x in branches:
727 if x in branches:
728 visit[len(visit):] = branches[x].keys()
728 visit[len(visit):] = branches[x].keys()
729 viscache[node] = ret
729 viscache[node] = ret
730 return ret
730 return ret
731 if h not in branches:
731 if h not in branches:
732 continue
732 continue
733 # O(n^2), but somewhat limited. This only searches the
733 # O(n^2), but somewhat limited. This only searches the
734 # tags visible from a specific head, not all the tags in the
734 # tags visible from a specific head, not all the tags in the
735 # whole repo.
735 # whole repo.
736 for b in branches[h]:
736 for b in branches[h]:
737 vis = False
737 vis = False
738 for bb in branches[h].keys():
738 for bb in branches[h].keys():
739 if b != bb:
739 if b != bb:
740 if b in visible(bb):
740 if b in visible(bb):
741 vis = True
741 vis = True
742 break
742 break
743 if not vis:
743 if not vis:
744 l = out.setdefault(h, [])
744 l = out.setdefault(h, [])
745 l[len(l):] = self.nodetags(b)
745 l[len(l):] = self.nodetags(b)
746 return out
746 return out
747
747
748 def branches(self, nodes):
748 def branches(self, nodes):
749 if not nodes:
749 if not nodes:
750 nodes = [self.changelog.tip()]
750 nodes = [self.changelog.tip()]
751 b = []
751 b = []
752 for n in nodes:
752 for n in nodes:
753 t = n
753 t = n
754 while n:
754 while n:
755 p = self.changelog.parents(n)
755 p = self.changelog.parents(n)
756 if p[1] != nullid or p[0] == nullid:
756 if p[1] != nullid or p[0] == nullid:
757 b.append((t, n, p[0], p[1]))
757 b.append((t, n, p[0], p[1]))
758 break
758 break
759 n = p[0]
759 n = p[0]
760 return b
760 return b
761
761
762 def between(self, pairs):
762 def between(self, pairs):
763 r = []
763 r = []
764
764
765 for top, bottom in pairs:
765 for top, bottom in pairs:
766 n, l, i = top, [], 0
766 n, l, i = top, [], 0
767 f = 1
767 f = 1
768
768
769 while n != bottom:
769 while n != bottom:
770 p = self.changelog.parents(n)[0]
770 p = self.changelog.parents(n)[0]
771 if i == f:
771 if i == f:
772 l.append(n)
772 l.append(n)
773 f = f * 2
773 f = f * 2
774 n = p
774 n = p
775 i += 1
775 i += 1
776
776
777 r.append(l)
777 r.append(l)
778
778
779 return r
779 return r
780
780
781 def findincoming(self, remote, base=None, heads=None):
781 def findincoming(self, remote, base=None, heads=None):
782 m = self.changelog.nodemap
782 m = self.changelog.nodemap
783 search = []
783 search = []
784 fetch = {}
784 fetch = {}
785 seen = {}
785 seen = {}
786 seenbranch = {}
786 seenbranch = {}
787 if base == None:
787 if base == None:
788 base = {}
788 base = {}
789
789
790 # assume we're closer to the tip than the root
790 # assume we're closer to the tip than the root
791 # and start by examining the heads
791 # and start by examining the heads
792 self.ui.status(_("searching for changes\n"))
792 self.ui.status(_("searching for changes\n"))
793
793
794 if not heads:
794 if not heads:
795 heads = remote.heads()
795 heads = remote.heads()
796
796
797 unknown = []
797 unknown = []
798 for h in heads:
798 for h in heads:
799 if h not in m:
799 if h not in m:
800 unknown.append(h)
800 unknown.append(h)
801 else:
801 else:
802 base[h] = 1
802 base[h] = 1
803
803
804 if not unknown:
804 if not unknown:
805 return None
805 return None
806
806
807 rep = {}
807 rep = {}
808 reqcnt = 0
808 reqcnt = 0
809
809
810 # search through remote branches
810 # search through remote branches
811 # a 'branch' here is a linear segment of history, with four parts:
811 # a 'branch' here is a linear segment of history, with four parts:
812 # head, root, first parent, second parent
812 # head, root, first parent, second parent
813 # (a branch always has two parents (or none) by definition)
813 # (a branch always has two parents (or none) by definition)
814 unknown = remote.branches(unknown)
814 unknown = remote.branches(unknown)
815 while unknown:
815 while unknown:
816 r = []
816 r = []
817 while unknown:
817 while unknown:
818 n = unknown.pop(0)
818 n = unknown.pop(0)
819 if n[0] in seen:
819 if n[0] in seen:
820 continue
820 continue
821
821
822 self.ui.debug(_("examining %s:%s\n")
822 self.ui.debug(_("examining %s:%s\n")
823 % (short(n[0]), short(n[1])))
823 % (short(n[0]), short(n[1])))
824 if n[0] == nullid:
824 if n[0] == nullid:
825 break
825 break
826 if n in seenbranch:
826 if n in seenbranch:
827 self.ui.debug(_("branch already found\n"))
827 self.ui.debug(_("branch already found\n"))
828 continue
828 continue
829 if n[1] and n[1] in m: # do we know the base?
829 if n[1] and n[1] in m: # do we know the base?
830 self.ui.debug(_("found incomplete branch %s:%s\n")
830 self.ui.debug(_("found incomplete branch %s:%s\n")
831 % (short(n[0]), short(n[1])))
831 % (short(n[0]), short(n[1])))
832 search.append(n) # schedule branch range for scanning
832 search.append(n) # schedule branch range for scanning
833 seenbranch[n] = 1
833 seenbranch[n] = 1
834 else:
834 else:
835 if n[1] not in seen and n[1] not in fetch:
835 if n[1] not in seen and n[1] not in fetch:
836 if n[2] in m and n[3] in m:
836 if n[2] in m and n[3] in m:
837 self.ui.debug(_("found new changeset %s\n") %
837 self.ui.debug(_("found new changeset %s\n") %
838 short(n[1]))
838 short(n[1]))
839 fetch[n[1]] = 1 # earliest unknown
839 fetch[n[1]] = 1 # earliest unknown
840 base[n[2]] = 1 # latest known
840 base[n[2]] = 1 # latest known
841 continue
841 continue
842
842
843 for a in n[2:4]:
843 for a in n[2:4]:
844 if a not in rep:
844 if a not in rep:
845 r.append(a)
845 r.append(a)
846 rep[a] = 1
846 rep[a] = 1
847
847
848 seen[n[0]] = 1
848 seen[n[0]] = 1
849
849
850 if r:
850 if r:
851 reqcnt += 1
851 reqcnt += 1
852 self.ui.debug(_("request %d: %s\n") %
852 self.ui.debug(_("request %d: %s\n") %
853 (reqcnt, " ".join(map(short, r))))
853 (reqcnt, " ".join(map(short, r))))
854 for p in range(0, len(r), 10):
854 for p in range(0, len(r), 10):
855 for b in remote.branches(r[p:p+10]):
855 for b in remote.branches(r[p:p+10]):
856 self.ui.debug(_("received %s:%s\n") %
856 self.ui.debug(_("received %s:%s\n") %
857 (short(b[0]), short(b[1])))
857 (short(b[0]), short(b[1])))
858 if b[0] in m:
858 if b[0] in m:
859 self.ui.debug(_("found base node %s\n")
859 self.ui.debug(_("found base node %s\n")
860 % short(b[0]))
860 % short(b[0]))
861 base[b[0]] = 1
861 base[b[0]] = 1
862 elif b[0] not in seen:
862 elif b[0] not in seen:
863 unknown.append(b)
863 unknown.append(b)
864
864
865 # do binary search on the branches we found
865 # do binary search on the branches we found
866 while search:
866 while search:
867 n = search.pop(0)
867 n = search.pop(0)
868 reqcnt += 1
868 reqcnt += 1
869 l = remote.between([(n[0], n[1])])[0]
869 l = remote.between([(n[0], n[1])])[0]
870 l.append(n[1])
870 l.append(n[1])
871 p = n[0]
871 p = n[0]
872 f = 1
872 f = 1
873 for i in l:
873 for i in l:
874 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
874 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
875 if i in m:
875 if i in m:
876 if f <= 2:
876 if f <= 2:
877 self.ui.debug(_("found new branch changeset %s\n") %
877 self.ui.debug(_("found new branch changeset %s\n") %
878 short(p))
878 short(p))
879 fetch[p] = 1
879 fetch[p] = 1
880 base[i] = 1
880 base[i] = 1
881 else:
881 else:
882 self.ui.debug(_("narrowed branch search to %s:%s\n")
882 self.ui.debug(_("narrowed branch search to %s:%s\n")
883 % (short(p), short(i)))
883 % (short(p), short(i)))
884 search.append((p, i))
884 search.append((p, i))
885 break
885 break
886 p, f = i, f * 2
886 p, f = i, f * 2
887
887
888 # sanity check our fetch list
888 # sanity check our fetch list
889 for f in fetch.keys():
889 for f in fetch.keys():
890 if f in m:
890 if f in m:
891 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
891 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
892
892
893 if base.keys() == [nullid]:
893 if base.keys() == [nullid]:
894 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
894 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
895
895
896 self.ui.note(_("found new changesets starting at ") +
896 self.ui.note(_("found new changesets starting at ") +
897 " ".join([short(f) for f in fetch]) + "\n")
897 " ".join([short(f) for f in fetch]) + "\n")
898
898
899 self.ui.debug(_("%d total queries\n") % reqcnt)
899 self.ui.debug(_("%d total queries\n") % reqcnt)
900
900
901 return fetch.keys()
901 return fetch.keys()
902
902
903 def findoutgoing(self, remote, base=None, heads=None):
903 def findoutgoing(self, remote, base=None, heads=None):
904 if base == None:
904 if base == None:
905 base = {}
905 base = {}
906 self.findincoming(remote, base, heads)
906 self.findincoming(remote, base, heads)
907
907
908 self.ui.debug(_("common changesets up to ")
908 self.ui.debug(_("common changesets up to ")
909 + " ".join(map(short, base.keys())) + "\n")
909 + " ".join(map(short, base.keys())) + "\n")
910
910
911 remain = dict.fromkeys(self.changelog.nodemap)
911 remain = dict.fromkeys(self.changelog.nodemap)
912
912
913 # prune everything remote has from the tree
913 # prune everything remote has from the tree
914 del remain[nullid]
914 del remain[nullid]
915 remove = base.keys()
915 remove = base.keys()
916 while remove:
916 while remove:
917 n = remove.pop(0)
917 n = remove.pop(0)
918 if n in remain:
918 if n in remain:
919 del remain[n]
919 del remain[n]
920 for p in self.changelog.parents(n):
920 for p in self.changelog.parents(n):
921 remove.append(p)
921 remove.append(p)
922
922
923 # find every node whose parents have been pruned
923 # find every node whose parents have been pruned
924 subset = []
924 subset = []
925 for n in remain:
925 for n in remain:
926 p1, p2 = self.changelog.parents(n)
926 p1, p2 = self.changelog.parents(n)
927 if p1 not in remain and p2 not in remain:
927 if p1 not in remain and p2 not in remain:
928 subset.append(n)
928 subset.append(n)
929
929
930 # this is the set of all roots we have to push
930 # this is the set of all roots we have to push
931 return subset
931 return subset
932
932
933 def pull(self, remote, heads=None):
933 def pull(self, remote, heads=None):
934 lock = self.lock()
934 l = self.lock()
935
935
936 # if we have an empty repo, fetch everything
936 # if we have an empty repo, fetch everything
937 if self.changelog.tip() == nullid:
937 if self.changelog.tip() == nullid:
938 self.ui.status(_("requesting all changes\n"))
938 self.ui.status(_("requesting all changes\n"))
939 fetch = [nullid]
939 fetch = [nullid]
940 else:
940 else:
941 fetch = self.findincoming(remote)
941 fetch = self.findincoming(remote)
942
942
943 if not fetch:
943 if not fetch:
944 self.ui.status(_("no changes found\n"))
944 self.ui.status(_("no changes found\n"))
945 return 1
945 return 1
946
946
947 if heads is None:
947 if heads is None:
948 cg = remote.changegroup(fetch, 'pull')
948 cg = remote.changegroup(fetch, 'pull')
949 else:
949 else:
950 cg = remote.changegroupsubset(fetch, heads, 'pull')
950 cg = remote.changegroupsubset(fetch, heads, 'pull')
951 return self.addchangegroup(cg)
951 return self.addchangegroup(cg)
952
952
953 def push(self, remote, force=False):
953 def push(self, remote, force=False):
954 lock = remote.lock()
954 l = remote.lock()
955
955
956 base = {}
956 base = {}
957 heads = remote.heads()
957 heads = remote.heads()
958 inc = self.findincoming(remote, base, heads)
958 inc = self.findincoming(remote, base, heads)
959 if not force and inc:
959 if not force and inc:
960 self.ui.warn(_("abort: unsynced remote changes!\n"))
960 self.ui.warn(_("abort: unsynced remote changes!\n"))
961 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
961 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
962 return 1
962 return 1
963
963
964 update = self.findoutgoing(remote, base)
964 update = self.findoutgoing(remote, base)
965 if not update:
965 if not update:
966 self.ui.status(_("no changes found\n"))
966 self.ui.status(_("no changes found\n"))
967 return 1
967 return 1
968 elif not force:
968 elif not force:
969 if len(heads) < len(self.changelog.heads()):
969 if len(heads) < len(self.changelog.heads()):
970 self.ui.warn(_("abort: push creates new remote branches!\n"))
970 self.ui.warn(_("abort: push creates new remote branches!\n"))
971 self.ui.status(_("(did you forget to merge?"
971 self.ui.status(_("(did you forget to merge?"
972 " use push -f to force)\n"))
972 " use push -f to force)\n"))
973 return 1
973 return 1
974
974
975 cg = self.changegroup(update, 'push')
975 cg = self.changegroup(update, 'push')
976 return remote.addchangegroup(cg)
976 return remote.addchangegroup(cg)
977
977
978 def changegroupsubset(self, bases, heads, source):
978 def changegroupsubset(self, bases, heads, source):
979 """This function generates a changegroup consisting of all the nodes
979 """This function generates a changegroup consisting of all the nodes
980 that are descendents of any of the bases, and ancestors of any of
980 that are descendents of any of the bases, and ancestors of any of
981 the heads.
981 the heads.
982
982
983 It is fairly complex as determining which filenodes and which
983 It is fairly complex as determining which filenodes and which
984 manifest nodes need to be included for the changeset to be complete
984 manifest nodes need to be included for the changeset to be complete
985 is non-trivial.
985 is non-trivial.
986
986
987 Another wrinkle is doing the reverse, figuring out which changeset in
987 Another wrinkle is doing the reverse, figuring out which changeset in
988 the changegroup a particular filenode or manifestnode belongs to."""
988 the changegroup a particular filenode or manifestnode belongs to."""
989
989
990 self.hook('preoutgoing', throw=True, source=source)
990 self.hook('preoutgoing', throw=True, source=source)
991
991
992 # Set up some initial variables
992 # Set up some initial variables
993 # Make it easy to refer to self.changelog
993 # Make it easy to refer to self.changelog
994 cl = self.changelog
994 cl = self.changelog
995 # msng is short for missing - compute the list of changesets in this
995 # msng is short for missing - compute the list of changesets in this
996 # changegroup.
996 # changegroup.
997 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
997 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
998 # Some bases may turn out to be superfluous, and some heads may be
998 # Some bases may turn out to be superfluous, and some heads may be
999 # too. nodesbetween will return the minimal set of bases and heads
999 # too. nodesbetween will return the minimal set of bases and heads
1000 # necessary to re-create the changegroup.
1000 # necessary to re-create the changegroup.
1001
1001
1002 # Known heads are the list of heads that it is assumed the recipient
1002 # Known heads are the list of heads that it is assumed the recipient
1003 # of this changegroup will know about.
1003 # of this changegroup will know about.
1004 knownheads = {}
1004 knownheads = {}
1005 # We assume that all parents of bases are known heads.
1005 # We assume that all parents of bases are known heads.
1006 for n in bases:
1006 for n in bases:
1007 for p in cl.parents(n):
1007 for p in cl.parents(n):
1008 if p != nullid:
1008 if p != nullid:
1009 knownheads[p] = 1
1009 knownheads[p] = 1
1010 knownheads = knownheads.keys()
1010 knownheads = knownheads.keys()
1011 if knownheads:
1011 if knownheads:
1012 # Now that we know what heads are known, we can compute which
1012 # Now that we know what heads are known, we can compute which
1013 # changesets are known. The recipient must know about all
1013 # changesets are known. The recipient must know about all
1014 # changesets required to reach the known heads from the null
1014 # changesets required to reach the known heads from the null
1015 # changeset.
1015 # changeset.
1016 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1016 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1017 junk = None
1017 junk = None
1018 # Transform the list into an ersatz set.
1018 # Transform the list into an ersatz set.
1019 has_cl_set = dict.fromkeys(has_cl_set)
1019 has_cl_set = dict.fromkeys(has_cl_set)
1020 else:
1020 else:
1021 # If there were no known heads, the recipient cannot be assumed to
1021 # If there were no known heads, the recipient cannot be assumed to
1022 # know about any changesets.
1022 # know about any changesets.
1023 has_cl_set = {}
1023 has_cl_set = {}
1024
1024
1025 # Make it easy to refer to self.manifest
1025 # Make it easy to refer to self.manifest
1026 mnfst = self.manifest
1026 mnfst = self.manifest
1027 # We don't know which manifests are missing yet
1027 # We don't know which manifests are missing yet
1028 msng_mnfst_set = {}
1028 msng_mnfst_set = {}
1029 # Nor do we know which filenodes are missing.
1029 # Nor do we know which filenodes are missing.
1030 msng_filenode_set = {}
1030 msng_filenode_set = {}
1031
1031
1032 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1032 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1033 junk = None
1033 junk = None
1034
1034
1035 # A changeset always belongs to itself, so the changenode lookup
1035 # A changeset always belongs to itself, so the changenode lookup
1036 # function for a changenode is identity.
1036 # function for a changenode is identity.
1037 def identity(x):
1037 def identity(x):
1038 return x
1038 return x
1039
1039
1040 # A function generating function. Sets up an environment for the
1040 # A function generating function. Sets up an environment for the
1041 # inner function.
1041 # inner function.
1042 def cmp_by_rev_func(revlog):
1042 def cmp_by_rev_func(revlog):
1043 # Compare two nodes by their revision number in the environment's
1043 # Compare two nodes by their revision number in the environment's
1044 # revision history. Since the revision number both represents the
1044 # revision history. Since the revision number both represents the
1045 # most efficient order to read the nodes in, and represents a
1045 # most efficient order to read the nodes in, and represents a
1046 # topological sorting of the nodes, this function is often useful.
1046 # topological sorting of the nodes, this function is often useful.
1047 def cmp_by_rev(a, b):
1047 def cmp_by_rev(a, b):
1048 return cmp(revlog.rev(a), revlog.rev(b))
1048 return cmp(revlog.rev(a), revlog.rev(b))
1049 return cmp_by_rev
1049 return cmp_by_rev
1050
1050
1051 # If we determine that a particular file or manifest node must be a
1051 # If we determine that a particular file or manifest node must be a
1052 # node that the recipient of the changegroup will already have, we can
1052 # node that the recipient of the changegroup will already have, we can
1053 # also assume the recipient will have all the parents. This function
1053 # also assume the recipient will have all the parents. This function
1054 # prunes them from the set of missing nodes.
1054 # prunes them from the set of missing nodes.
1055 def prune_parents(revlog, hasset, msngset):
1055 def prune_parents(revlog, hasset, msngset):
1056 haslst = hasset.keys()
1056 haslst = hasset.keys()
1057 haslst.sort(cmp_by_rev_func(revlog))
1057 haslst.sort(cmp_by_rev_func(revlog))
1058 for node in haslst:
1058 for node in haslst:
1059 parentlst = [p for p in revlog.parents(node) if p != nullid]
1059 parentlst = [p for p in revlog.parents(node) if p != nullid]
1060 while parentlst:
1060 while parentlst:
1061 n = parentlst.pop()
1061 n = parentlst.pop()
1062 if n not in hasset:
1062 if n not in hasset:
1063 hasset[n] = 1
1063 hasset[n] = 1
1064 p = [p for p in revlog.parents(n) if p != nullid]
1064 p = [p for p in revlog.parents(n) if p != nullid]
1065 parentlst.extend(p)
1065 parentlst.extend(p)
1066 for n in hasset:
1066 for n in hasset:
1067 msngset.pop(n, None)
1067 msngset.pop(n, None)
1068
1068
1069 # This is a function generating function used to set up an environment
1069 # This is a function generating function used to set up an environment
1070 # for the inner function to execute in.
1070 # for the inner function to execute in.
1071 def manifest_and_file_collector(changedfileset):
1071 def manifest_and_file_collector(changedfileset):
1072 # This is an information gathering function that gathers
1072 # This is an information gathering function that gathers
1073 # information from each changeset node that goes out as part of
1073 # information from each changeset node that goes out as part of
1074 # the changegroup. The information gathered is a list of which
1074 # the changegroup. The information gathered is a list of which
1075 # manifest nodes are potentially required (the recipient may
1075 # manifest nodes are potentially required (the recipient may
1076 # already have them) and total list of all files which were
1076 # already have them) and total list of all files which were
1077 # changed in any changeset in the changegroup.
1077 # changed in any changeset in the changegroup.
1078 #
1078 #
1079 # We also remember the first changenode we saw any manifest
1079 # We also remember the first changenode we saw any manifest
1080 # referenced by so we can later determine which changenode 'owns'
1080 # referenced by so we can later determine which changenode 'owns'
1081 # the manifest.
1081 # the manifest.
1082 def collect_manifests_and_files(clnode):
1082 def collect_manifests_and_files(clnode):
1083 c = cl.read(clnode)
1083 c = cl.read(clnode)
1084 for f in c[3]:
1084 for f in c[3]:
1085 # This is to make sure we only have one instance of each
1085 # This is to make sure we only have one instance of each
1086 # filename string for each filename.
1086 # filename string for each filename.
1087 changedfileset.setdefault(f, f)
1087 changedfileset.setdefault(f, f)
1088 msng_mnfst_set.setdefault(c[0], clnode)
1088 msng_mnfst_set.setdefault(c[0], clnode)
1089 return collect_manifests_and_files
1089 return collect_manifests_and_files
1090
1090
1091 # Figure out which manifest nodes (of the ones we think might be part
1091 # Figure out which manifest nodes (of the ones we think might be part
1092 # of the changegroup) the recipient must know about and remove them
1092 # of the changegroup) the recipient must know about and remove them
1093 # from the changegroup.
1093 # from the changegroup.
1094 def prune_manifests():
1094 def prune_manifests():
1095 has_mnfst_set = {}
1095 has_mnfst_set = {}
1096 for n in msng_mnfst_set:
1096 for n in msng_mnfst_set:
1097 # If a 'missing' manifest thinks it belongs to a changenode
1097 # If a 'missing' manifest thinks it belongs to a changenode
1098 # the recipient is assumed to have, obviously the recipient
1098 # the recipient is assumed to have, obviously the recipient
1099 # must have that manifest.
1099 # must have that manifest.
1100 linknode = cl.node(mnfst.linkrev(n))
1100 linknode = cl.node(mnfst.linkrev(n))
1101 if linknode in has_cl_set:
1101 if linknode in has_cl_set:
1102 has_mnfst_set[n] = 1
1102 has_mnfst_set[n] = 1
1103 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1103 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1104
1104
1105 # Use the information collected in collect_manifests_and_files to say
1105 # Use the information collected in collect_manifests_and_files to say
1106 # which changenode any manifestnode belongs to.
1106 # which changenode any manifestnode belongs to.
1107 def lookup_manifest_link(mnfstnode):
1107 def lookup_manifest_link(mnfstnode):
1108 return msng_mnfst_set[mnfstnode]
1108 return msng_mnfst_set[mnfstnode]
1109
1109
1110 # A function generating function that sets up the initial environment
1110 # A function generating function that sets up the initial environment
1111 # the inner function.
1111 # the inner function.
1112 def filenode_collector(changedfiles):
1112 def filenode_collector(changedfiles):
1113 next_rev = [0]
1113 next_rev = [0]
1114 # This gathers information from each manifestnode included in the
1114 # This gathers information from each manifestnode included in the
1115 # changegroup about which filenodes the manifest node references
1115 # changegroup about which filenodes the manifest node references
1116 # so we can include those in the changegroup too.
1116 # so we can include those in the changegroup too.
1117 #
1117 #
1118 # It also remembers which changenode each filenode belongs to. It
1118 # It also remembers which changenode each filenode belongs to. It
1119 # does this by assuming the a filenode belongs to the changenode
1119 # does this by assuming the a filenode belongs to the changenode
1120 # the first manifest that references it belongs to.
1120 # the first manifest that references it belongs to.
1121 def collect_msng_filenodes(mnfstnode):
1121 def collect_msng_filenodes(mnfstnode):
1122 r = mnfst.rev(mnfstnode)
1122 r = mnfst.rev(mnfstnode)
1123 if r == next_rev[0]:
1123 if r == next_rev[0]:
1124 # If the last rev we looked at was the one just previous,
1124 # If the last rev we looked at was the one just previous,
1125 # we only need to see a diff.
1125 # we only need to see a diff.
1126 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1126 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1127 # For each line in the delta
1127 # For each line in the delta
1128 for dline in delta.splitlines():
1128 for dline in delta.splitlines():
1129 # get the filename and filenode for that line
1129 # get the filename and filenode for that line
1130 f, fnode = dline.split('\0')
1130 f, fnode = dline.split('\0')
1131 fnode = bin(fnode[:40])
1131 fnode = bin(fnode[:40])
1132 f = changedfiles.get(f, None)
1132 f = changedfiles.get(f, None)
1133 # And if the file is in the list of files we care
1133 # And if the file is in the list of files we care
1134 # about.
1134 # about.
1135 if f is not None:
1135 if f is not None:
1136 # Get the changenode this manifest belongs to
1136 # Get the changenode this manifest belongs to
1137 clnode = msng_mnfst_set[mnfstnode]
1137 clnode = msng_mnfst_set[mnfstnode]
1138 # Create the set of filenodes for the file if
1138 # Create the set of filenodes for the file if
1139 # there isn't one already.
1139 # there isn't one already.
1140 ndset = msng_filenode_set.setdefault(f, {})
1140 ndset = msng_filenode_set.setdefault(f, {})
1141 # And set the filenode's changelog node to the
1141 # And set the filenode's changelog node to the
1142 # manifest's if it hasn't been set already.
1142 # manifest's if it hasn't been set already.
1143 ndset.setdefault(fnode, clnode)
1143 ndset.setdefault(fnode, clnode)
1144 else:
1144 else:
1145 # Otherwise we need a full manifest.
1145 # Otherwise we need a full manifest.
1146 m = mnfst.read(mnfstnode)
1146 m = mnfst.read(mnfstnode)
1147 # For every file in we care about.
1147 # For every file in we care about.
1148 for f in changedfiles:
1148 for f in changedfiles:
1149 fnode = m.get(f, None)
1149 fnode = m.get(f, None)
1150 # If it's in the manifest
1150 # If it's in the manifest
1151 if fnode is not None:
1151 if fnode is not None:
1152 # See comments above.
1152 # See comments above.
1153 clnode = msng_mnfst_set[mnfstnode]
1153 clnode = msng_mnfst_set[mnfstnode]
1154 ndset = msng_filenode_set.setdefault(f, {})
1154 ndset = msng_filenode_set.setdefault(f, {})
1155 ndset.setdefault(fnode, clnode)
1155 ndset.setdefault(fnode, clnode)
1156 # Remember the revision we hope to see next.
1156 # Remember the revision we hope to see next.
1157 next_rev[0] = r + 1
1157 next_rev[0] = r + 1
1158 return collect_msng_filenodes
1158 return collect_msng_filenodes
1159
1159
1160 # We have a list of filenodes we think we need for a file, lets remove
1160 # We have a list of filenodes we think we need for a file, lets remove
1161 # all those we now the recipient must have.
1161 # all those we now the recipient must have.
1162 def prune_filenodes(f, filerevlog):
1162 def prune_filenodes(f, filerevlog):
1163 msngset = msng_filenode_set[f]
1163 msngset = msng_filenode_set[f]
1164 hasset = {}
1164 hasset = {}
1165 # If a 'missing' filenode thinks it belongs to a changenode we
1165 # If a 'missing' filenode thinks it belongs to a changenode we
1166 # assume the recipient must have, then the recipient must have
1166 # assume the recipient must have, then the recipient must have
1167 # that filenode.
1167 # that filenode.
1168 for n in msngset:
1168 for n in msngset:
1169 clnode = cl.node(filerevlog.linkrev(n))
1169 clnode = cl.node(filerevlog.linkrev(n))
1170 if clnode in has_cl_set:
1170 if clnode in has_cl_set:
1171 hasset[n] = 1
1171 hasset[n] = 1
1172 prune_parents(filerevlog, hasset, msngset)
1172 prune_parents(filerevlog, hasset, msngset)
1173
1173
1174 # A function generator function that sets up the a context for the
1174 # A function generator function that sets up the a context for the
1175 # inner function.
1175 # inner function.
1176 def lookup_filenode_link_func(fname):
1176 def lookup_filenode_link_func(fname):
1177 msngset = msng_filenode_set[fname]
1177 msngset = msng_filenode_set[fname]
1178 # Lookup the changenode the filenode belongs to.
1178 # Lookup the changenode the filenode belongs to.
1179 def lookup_filenode_link(fnode):
1179 def lookup_filenode_link(fnode):
1180 return msngset[fnode]
1180 return msngset[fnode]
1181 return lookup_filenode_link
1181 return lookup_filenode_link
1182
1182
1183 # Now that we have all theses utility functions to help out and
1183 # Now that we have all theses utility functions to help out and
1184 # logically divide up the task, generate the group.
1184 # logically divide up the task, generate the group.
1185 def gengroup():
1185 def gengroup():
1186 # The set of changed files starts empty.
1186 # The set of changed files starts empty.
1187 changedfiles = {}
1187 changedfiles = {}
1188 # Create a changenode group generator that will call our functions
1188 # Create a changenode group generator that will call our functions
1189 # back to lookup the owning changenode and collect information.
1189 # back to lookup the owning changenode and collect information.
1190 group = cl.group(msng_cl_lst, identity,
1190 group = cl.group(msng_cl_lst, identity,
1191 manifest_and_file_collector(changedfiles))
1191 manifest_and_file_collector(changedfiles))
1192 for chnk in group:
1192 for chnk in group:
1193 yield chnk
1193 yield chnk
1194
1194
1195 # The list of manifests has been collected by the generator
1195 # The list of manifests has been collected by the generator
1196 # calling our functions back.
1196 # calling our functions back.
1197 prune_manifests()
1197 prune_manifests()
1198 msng_mnfst_lst = msng_mnfst_set.keys()
1198 msng_mnfst_lst = msng_mnfst_set.keys()
1199 # Sort the manifestnodes by revision number.
1199 # Sort the manifestnodes by revision number.
1200 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1200 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1201 # Create a generator for the manifestnodes that calls our lookup
1201 # Create a generator for the manifestnodes that calls our lookup
1202 # and data collection functions back.
1202 # and data collection functions back.
1203 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1203 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1204 filenode_collector(changedfiles))
1204 filenode_collector(changedfiles))
1205 for chnk in group:
1205 for chnk in group:
1206 yield chnk
1206 yield chnk
1207
1207
1208 # These are no longer needed, dereference and toss the memory for
1208 # These are no longer needed, dereference and toss the memory for
1209 # them.
1209 # them.
1210 msng_mnfst_lst = None
1210 msng_mnfst_lst = None
1211 msng_mnfst_set.clear()
1211 msng_mnfst_set.clear()
1212
1212
1213 changedfiles = changedfiles.keys()
1213 changedfiles = changedfiles.keys()
1214 changedfiles.sort()
1214 changedfiles.sort()
1215 # Go through all our files in order sorted by name.
1215 # Go through all our files in order sorted by name.
1216 for fname in changedfiles:
1216 for fname in changedfiles:
1217 filerevlog = self.file(fname)
1217 filerevlog = self.file(fname)
1218 # Toss out the filenodes that the recipient isn't really
1218 # Toss out the filenodes that the recipient isn't really
1219 # missing.
1219 # missing.
1220 if msng_filenode_set.has_key(fname):
1220 if msng_filenode_set.has_key(fname):
1221 prune_filenodes(fname, filerevlog)
1221 prune_filenodes(fname, filerevlog)
1222 msng_filenode_lst = msng_filenode_set[fname].keys()
1222 msng_filenode_lst = msng_filenode_set[fname].keys()
1223 else:
1223 else:
1224 msng_filenode_lst = []
1224 msng_filenode_lst = []
1225 # If any filenodes are left, generate the group for them,
1225 # If any filenodes are left, generate the group for them,
1226 # otherwise don't bother.
1226 # otherwise don't bother.
1227 if len(msng_filenode_lst) > 0:
1227 if len(msng_filenode_lst) > 0:
1228 yield struct.pack(">l", len(fname) + 4) + fname
1228 yield struct.pack(">l", len(fname) + 4) + fname
1229 # Sort the filenodes by their revision #
1229 # Sort the filenodes by their revision #
1230 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1230 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1231 # Create a group generator and only pass in a changenode
1231 # Create a group generator and only pass in a changenode
1232 # lookup function as we need to collect no information
1232 # lookup function as we need to collect no information
1233 # from filenodes.
1233 # from filenodes.
1234 group = filerevlog.group(msng_filenode_lst,
1234 group = filerevlog.group(msng_filenode_lst,
1235 lookup_filenode_link_func(fname))
1235 lookup_filenode_link_func(fname))
1236 for chnk in group:
1236 for chnk in group:
1237 yield chnk
1237 yield chnk
1238 if msng_filenode_set.has_key(fname):
1238 if msng_filenode_set.has_key(fname):
1239 # Don't need this anymore, toss it to free memory.
1239 # Don't need this anymore, toss it to free memory.
1240 del msng_filenode_set[fname]
1240 del msng_filenode_set[fname]
1241 # Signal that no more groups are left.
1241 # Signal that no more groups are left.
1242 yield struct.pack(">l", 0)
1242 yield struct.pack(">l", 0)
1243
1243
1244 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1244 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1245
1245
1246 return util.chunkbuffer(gengroup())
1246 return util.chunkbuffer(gengroup())
1247
1247
1248 def changegroup(self, basenodes, source):
1248 def changegroup(self, basenodes, source):
1249 """Generate a changegroup of all nodes that we have that a recipient
1249 """Generate a changegroup of all nodes that we have that a recipient
1250 doesn't.
1250 doesn't.
1251
1251
1252 This is much easier than the previous function as we can assume that
1252 This is much easier than the previous function as we can assume that
1253 the recipient has any changenode we aren't sending them."""
1253 the recipient has any changenode we aren't sending them."""
1254
1254
1255 self.hook('preoutgoing', throw=True, source=source)
1255 self.hook('preoutgoing', throw=True, source=source)
1256
1256
1257 cl = self.changelog
1257 cl = self.changelog
1258 nodes = cl.nodesbetween(basenodes, None)[0]
1258 nodes = cl.nodesbetween(basenodes, None)[0]
1259 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1259 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1260
1260
1261 def identity(x):
1261 def identity(x):
1262 return x
1262 return x
1263
1263
1264 def gennodelst(revlog):
1264 def gennodelst(revlog):
1265 for r in xrange(0, revlog.count()):
1265 for r in xrange(0, revlog.count()):
1266 n = revlog.node(r)
1266 n = revlog.node(r)
1267 if revlog.linkrev(n) in revset:
1267 if revlog.linkrev(n) in revset:
1268 yield n
1268 yield n
1269
1269
1270 def changed_file_collector(changedfileset):
1270 def changed_file_collector(changedfileset):
1271 def collect_changed_files(clnode):
1271 def collect_changed_files(clnode):
1272 c = cl.read(clnode)
1272 c = cl.read(clnode)
1273 for fname in c[3]:
1273 for fname in c[3]:
1274 changedfileset[fname] = 1
1274 changedfileset[fname] = 1
1275 return collect_changed_files
1275 return collect_changed_files
1276
1276
1277 def lookuprevlink_func(revlog):
1277 def lookuprevlink_func(revlog):
1278 def lookuprevlink(n):
1278 def lookuprevlink(n):
1279 return cl.node(revlog.linkrev(n))
1279 return cl.node(revlog.linkrev(n))
1280 return lookuprevlink
1280 return lookuprevlink
1281
1281
1282 def gengroup():
1282 def gengroup():
1283 # construct a list of all changed files
1283 # construct a list of all changed files
1284 changedfiles = {}
1284 changedfiles = {}
1285
1285
1286 for chnk in cl.group(nodes, identity,
1286 for chnk in cl.group(nodes, identity,
1287 changed_file_collector(changedfiles)):
1287 changed_file_collector(changedfiles)):
1288 yield chnk
1288 yield chnk
1289 changedfiles = changedfiles.keys()
1289 changedfiles = changedfiles.keys()
1290 changedfiles.sort()
1290 changedfiles.sort()
1291
1291
1292 mnfst = self.manifest
1292 mnfst = self.manifest
1293 nodeiter = gennodelst(mnfst)
1293 nodeiter = gennodelst(mnfst)
1294 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1294 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1295 yield chnk
1295 yield chnk
1296
1296
1297 for fname in changedfiles:
1297 for fname in changedfiles:
1298 filerevlog = self.file(fname)
1298 filerevlog = self.file(fname)
1299 nodeiter = gennodelst(filerevlog)
1299 nodeiter = gennodelst(filerevlog)
1300 nodeiter = list(nodeiter)
1300 nodeiter = list(nodeiter)
1301 if nodeiter:
1301 if nodeiter:
1302 yield struct.pack(">l", len(fname) + 4) + fname
1302 yield struct.pack(">l", len(fname) + 4) + fname
1303 lookup = lookuprevlink_func(filerevlog)
1303 lookup = lookuprevlink_func(filerevlog)
1304 for chnk in filerevlog.group(nodeiter, lookup):
1304 for chnk in filerevlog.group(nodeiter, lookup):
1305 yield chnk
1305 yield chnk
1306
1306
1307 yield struct.pack(">l", 0)
1307 yield struct.pack(">l", 0)
1308 self.hook('outgoing', node=hex(nodes[0]), source=source)
1308 self.hook('outgoing', node=hex(nodes[0]), source=source)
1309
1309
1310 return util.chunkbuffer(gengroup())
1310 return util.chunkbuffer(gengroup())
1311
1311
1312 def addchangegroup(self, source):
1312 def addchangegroup(self, source):
1313
1313
1314 def getchunk():
1314 def getchunk():
1315 d = source.read(4)
1315 d = source.read(4)
1316 if not d:
1316 if not d:
1317 return ""
1317 return ""
1318 l = struct.unpack(">l", d)[0]
1318 l = struct.unpack(">l", d)[0]
1319 if l <= 4:
1319 if l <= 4:
1320 return ""
1320 return ""
1321 d = source.read(l - 4)
1321 d = source.read(l - 4)
1322 if len(d) < l - 4:
1322 if len(d) < l - 4:
1323 raise repo.RepoError(_("premature EOF reading chunk"
1323 raise repo.RepoError(_("premature EOF reading chunk"
1324 " (got %d bytes, expected %d)")
1324 " (got %d bytes, expected %d)")
1325 % (len(d), l - 4))
1325 % (len(d), l - 4))
1326 return d
1326 return d
1327
1327
1328 def getgroup():
1328 def getgroup():
1329 while 1:
1329 while 1:
1330 c = getchunk()
1330 c = getchunk()
1331 if not c:
1331 if not c:
1332 break
1332 break
1333 yield c
1333 yield c
1334
1334
1335 def csmap(x):
1335 def csmap(x):
1336 self.ui.debug(_("add changeset %s\n") % short(x))
1336 self.ui.debug(_("add changeset %s\n") % short(x))
1337 return self.changelog.count()
1337 return self.changelog.count()
1338
1338
1339 def revmap(x):
1339 def revmap(x):
1340 return self.changelog.rev(x)
1340 return self.changelog.rev(x)
1341
1341
1342 if not source:
1342 if not source:
1343 return
1343 return
1344
1344
1345 self.hook('prechangegroup', throw=True)
1345 self.hook('prechangegroup', throw=True)
1346
1346
1347 changesets = files = revisions = 0
1347 changesets = files = revisions = 0
1348
1348
1349 tr = self.transaction()
1349 tr = self.transaction()
1350
1350
1351 oldheads = len(self.changelog.heads())
1351 oldheads = len(self.changelog.heads())
1352
1352
1353 # pull off the changeset group
1353 # pull off the changeset group
1354 self.ui.status(_("adding changesets\n"))
1354 self.ui.status(_("adding changesets\n"))
1355 co = self.changelog.tip()
1355 co = self.changelog.tip()
1356 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1356 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1357 cnr, cor = map(self.changelog.rev, (cn, co))
1357 cnr, cor = map(self.changelog.rev, (cn, co))
1358 if cn == nullid:
1358 if cn == nullid:
1359 cnr = cor
1359 cnr = cor
1360 changesets = cnr - cor
1360 changesets = cnr - cor
1361
1361
1362 # pull off the manifest group
1362 # pull off the manifest group
1363 self.ui.status(_("adding manifests\n"))
1363 self.ui.status(_("adding manifests\n"))
1364 mm = self.manifest.tip()
1364 mm = self.manifest.tip()
1365 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1365 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1366
1366
1367 # process the files
1367 # process the files
1368 self.ui.status(_("adding file changes\n"))
1368 self.ui.status(_("adding file changes\n"))
1369 while 1:
1369 while 1:
1370 f = getchunk()
1370 f = getchunk()
1371 if not f:
1371 if not f:
1372 break
1372 break
1373 self.ui.debug(_("adding %s revisions\n") % f)
1373 self.ui.debug(_("adding %s revisions\n") % f)
1374 fl = self.file(f)
1374 fl = self.file(f)
1375 o = fl.count()
1375 o = fl.count()
1376 n = fl.addgroup(getgroup(), revmap, tr)
1376 n = fl.addgroup(getgroup(), revmap, tr)
1377 revisions += fl.count() - o
1377 revisions += fl.count() - o
1378 files += 1
1378 files += 1
1379
1379
1380 newheads = len(self.changelog.heads())
1380 newheads = len(self.changelog.heads())
1381 heads = ""
1381 heads = ""
1382 if oldheads and newheads > oldheads:
1382 if oldheads and newheads > oldheads:
1383 heads = _(" (+%d heads)") % (newheads - oldheads)
1383 heads = _(" (+%d heads)") % (newheads - oldheads)
1384
1384
1385 self.ui.status(_("added %d changesets"
1385 self.ui.status(_("added %d changesets"
1386 " with %d changes to %d files%s\n")
1386 " with %d changes to %d files%s\n")
1387 % (changesets, revisions, files, heads))
1387 % (changesets, revisions, files, heads))
1388
1388
1389 self.hook('pretxnchangegroup', throw=True,
1389 self.hook('pretxnchangegroup', throw=True,
1390 node=hex(self.changelog.node(cor+1)))
1390 node=hex(self.changelog.node(cor+1)))
1391
1391
1392 tr.close()
1392 tr.close()
1393
1393
1394 if changesets > 0:
1394 if changesets > 0:
1395 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1395 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1396
1396
1397 for i in range(cor + 1, cnr + 1):
1397 for i in range(cor + 1, cnr + 1):
1398 self.hook("incoming", node=hex(self.changelog.node(i)))
1398 self.hook("incoming", node=hex(self.changelog.node(i)))
1399
1399
1400 def update(self, node, allow=False, force=False, choose=None,
1400 def update(self, node, allow=False, force=False, choose=None,
1401 moddirstate=True, forcemerge=False, wlock=None):
1401 moddirstate=True, forcemerge=False, wlock=None):
1402 pl = self.dirstate.parents()
1402 pl = self.dirstate.parents()
1403 if not force and pl[1] != nullid:
1403 if not force and pl[1] != nullid:
1404 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1404 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1405 return 1
1405 return 1
1406
1406
1407 err = False
1407 err = False
1408
1408
1409 p1, p2 = pl[0], node
1409 p1, p2 = pl[0], node
1410 pa = self.changelog.ancestor(p1, p2)
1410 pa = self.changelog.ancestor(p1, p2)
1411 m1n = self.changelog.read(p1)[0]
1411 m1n = self.changelog.read(p1)[0]
1412 m2n = self.changelog.read(p2)[0]
1412 m2n = self.changelog.read(p2)[0]
1413 man = self.manifest.ancestor(m1n, m2n)
1413 man = self.manifest.ancestor(m1n, m2n)
1414 m1 = self.manifest.read(m1n)
1414 m1 = self.manifest.read(m1n)
1415 mf1 = self.manifest.readflags(m1n)
1415 mf1 = self.manifest.readflags(m1n)
1416 m2 = self.manifest.read(m2n).copy()
1416 m2 = self.manifest.read(m2n).copy()
1417 mf2 = self.manifest.readflags(m2n)
1417 mf2 = self.manifest.readflags(m2n)
1418 ma = self.manifest.read(man)
1418 ma = self.manifest.read(man)
1419 mfa = self.manifest.readflags(man)
1419 mfa = self.manifest.readflags(man)
1420
1420
1421 modified, added, removed, deleted, unknown = self.changes()
1421 modified, added, removed, deleted, unknown = self.changes()
1422
1422
1423 # is this a jump, or a merge? i.e. is there a linear path
1423 # is this a jump, or a merge? i.e. is there a linear path
1424 # from p1 to p2?
1424 # from p1 to p2?
1425 linear_path = (pa == p1 or pa == p2)
1425 linear_path = (pa == p1 or pa == p2)
1426
1426
1427 if allow and linear_path:
1427 if allow and linear_path:
1428 raise util.Abort(_("there is nothing to merge, "
1428 raise util.Abort(_("there is nothing to merge, "
1429 "just use 'hg update'"))
1429 "just use 'hg update'"))
1430 if allow and not forcemerge:
1430 if allow and not forcemerge:
1431 if modified or added or removed:
1431 if modified or added or removed:
1432 raise util.Abort(_("outstanding uncommited changes"))
1432 raise util.Abort(_("outstanding uncommited changes"))
1433 if not forcemerge and not force:
1433 if not forcemerge and not force:
1434 for f in unknown:
1434 for f in unknown:
1435 if f in m2:
1435 if f in m2:
1436 t1 = self.wread(f)
1436 t1 = self.wread(f)
1437 t2 = self.file(f).read(m2[f])
1437 t2 = self.file(f).read(m2[f])
1438 if cmp(t1, t2) != 0:
1438 if cmp(t1, t2) != 0:
1439 raise util.Abort(_("'%s' already exists in the working"
1439 raise util.Abort(_("'%s' already exists in the working"
1440 " dir and differs from remote") % f)
1440 " dir and differs from remote") % f)
1441
1441
1442 # resolve the manifest to determine which files
1442 # resolve the manifest to determine which files
1443 # we care about merging
1443 # we care about merging
1444 self.ui.note(_("resolving manifests\n"))
1444 self.ui.note(_("resolving manifests\n"))
1445 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1445 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1446 (force, allow, moddirstate, linear_path))
1446 (force, allow, moddirstate, linear_path))
1447 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1447 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1448 (short(man), short(m1n), short(m2n)))
1448 (short(man), short(m1n), short(m2n)))
1449
1449
1450 merge = {}
1450 merge = {}
1451 get = {}
1451 get = {}
1452 remove = []
1452 remove = []
1453
1453
1454 # construct a working dir manifest
1454 # construct a working dir manifest
1455 mw = m1.copy()
1455 mw = m1.copy()
1456 mfw = mf1.copy()
1456 mfw = mf1.copy()
1457 umap = dict.fromkeys(unknown)
1457 umap = dict.fromkeys(unknown)
1458
1458
1459 for f in added + modified + unknown:
1459 for f in added + modified + unknown:
1460 mw[f] = ""
1460 mw[f] = ""
1461 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1461 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1462
1462
1463 if moddirstate and not wlock:
1463 if moddirstate and not wlock:
1464 wlock = self.wlock()
1464 wlock = self.wlock()
1465
1465
1466 for f in deleted + removed:
1466 for f in deleted + removed:
1467 if f in mw:
1467 if f in mw:
1468 del mw[f]
1468 del mw[f]
1469
1469
1470 # If we're jumping between revisions (as opposed to merging),
1470 # If we're jumping between revisions (as opposed to merging),
1471 # and if neither the working directory nor the target rev has
1471 # and if neither the working directory nor the target rev has
1472 # the file, then we need to remove it from the dirstate, to
1472 # the file, then we need to remove it from the dirstate, to
1473 # prevent the dirstate from listing the file when it is no
1473 # prevent the dirstate from listing the file when it is no
1474 # longer in the manifest.
1474 # longer in the manifest.
1475 if moddirstate and linear_path and f not in m2:
1475 if moddirstate and linear_path and f not in m2:
1476 self.dirstate.forget((f,))
1476 self.dirstate.forget((f,))
1477
1477
1478 # Compare manifests
1478 # Compare manifests
1479 for f, n in mw.iteritems():
1479 for f, n in mw.iteritems():
1480 if choose and not choose(f):
1480 if choose and not choose(f):
1481 continue
1481 continue
1482 if f in m2:
1482 if f in m2:
1483 s = 0
1483 s = 0
1484
1484
1485 # is the wfile new since m1, and match m2?
1485 # is the wfile new since m1, and match m2?
1486 if f not in m1:
1486 if f not in m1:
1487 t1 = self.wread(f)
1487 t1 = self.wread(f)
1488 t2 = self.file(f).read(m2[f])
1488 t2 = self.file(f).read(m2[f])
1489 if cmp(t1, t2) == 0:
1489 if cmp(t1, t2) == 0:
1490 n = m2[f]
1490 n = m2[f]
1491 del t1, t2
1491 del t1, t2
1492
1492
1493 # are files different?
1493 # are files different?
1494 if n != m2[f]:
1494 if n != m2[f]:
1495 a = ma.get(f, nullid)
1495 a = ma.get(f, nullid)
1496 # are both different from the ancestor?
1496 # are both different from the ancestor?
1497 if n != a and m2[f] != a:
1497 if n != a and m2[f] != a:
1498 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1498 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1499 # merge executable bits
1499 # merge executable bits
1500 # "if we changed or they changed, change in merge"
1500 # "if we changed or they changed, change in merge"
1501 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1501 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1502 mode = ((a^b) | (a^c)) ^ a
1502 mode = ((a^b) | (a^c)) ^ a
1503 merge[f] = (m1.get(f, nullid), m2[f], mode)
1503 merge[f] = (m1.get(f, nullid), m2[f], mode)
1504 s = 1
1504 s = 1
1505 # are we clobbering?
1505 # are we clobbering?
1506 # is remote's version newer?
1506 # is remote's version newer?
1507 # or are we going back in time?
1507 # or are we going back in time?
1508 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1508 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1509 self.ui.debug(_(" remote %s is newer, get\n") % f)
1509 self.ui.debug(_(" remote %s is newer, get\n") % f)
1510 get[f] = m2[f]
1510 get[f] = m2[f]
1511 s = 1
1511 s = 1
1512 elif f in umap:
1512 elif f in umap:
1513 # this unknown file is the same as the checkout
1513 # this unknown file is the same as the checkout
1514 get[f] = m2[f]
1514 get[f] = m2[f]
1515
1515
1516 if not s and mfw[f] != mf2[f]:
1516 if not s and mfw[f] != mf2[f]:
1517 if force:
1517 if force:
1518 self.ui.debug(_(" updating permissions for %s\n") % f)
1518 self.ui.debug(_(" updating permissions for %s\n") % f)
1519 util.set_exec(self.wjoin(f), mf2[f])
1519 util.set_exec(self.wjoin(f), mf2[f])
1520 else:
1520 else:
1521 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1521 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1522 mode = ((a^b) | (a^c)) ^ a
1522 mode = ((a^b) | (a^c)) ^ a
1523 if mode != b:
1523 if mode != b:
1524 self.ui.debug(_(" updating permissions for %s\n")
1524 self.ui.debug(_(" updating permissions for %s\n")
1525 % f)
1525 % f)
1526 util.set_exec(self.wjoin(f), mode)
1526 util.set_exec(self.wjoin(f), mode)
1527 del m2[f]
1527 del m2[f]
1528 elif f in ma:
1528 elif f in ma:
1529 if n != ma[f]:
1529 if n != ma[f]:
1530 r = _("d")
1530 r = _("d")
1531 if not force and (linear_path or allow):
1531 if not force and (linear_path or allow):
1532 r = self.ui.prompt(
1532 r = self.ui.prompt(
1533 (_(" local changed %s which remote deleted\n") % f) +
1533 (_(" local changed %s which remote deleted\n") % f) +
1534 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1534 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1535 if r == _("d"):
1535 if r == _("d"):
1536 remove.append(f)
1536 remove.append(f)
1537 else:
1537 else:
1538 self.ui.debug(_("other deleted %s\n") % f)
1538 self.ui.debug(_("other deleted %s\n") % f)
1539 remove.append(f) # other deleted it
1539 remove.append(f) # other deleted it
1540 else:
1540 else:
1541 # file is created on branch or in working directory
1541 # file is created on branch or in working directory
1542 if force and f not in umap:
1542 if force and f not in umap:
1543 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1543 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1544 remove.append(f)
1544 remove.append(f)
1545 elif n == m1.get(f, nullid): # same as parent
1545 elif n == m1.get(f, nullid): # same as parent
1546 if p2 == pa: # going backwards?
1546 if p2 == pa: # going backwards?
1547 self.ui.debug(_("remote deleted %s\n") % f)
1547 self.ui.debug(_("remote deleted %s\n") % f)
1548 remove.append(f)
1548 remove.append(f)
1549 else:
1549 else:
1550 self.ui.debug(_("local modified %s, keeping\n") % f)
1550 self.ui.debug(_("local modified %s, keeping\n") % f)
1551 else:
1551 else:
1552 self.ui.debug(_("working dir created %s, keeping\n") % f)
1552 self.ui.debug(_("working dir created %s, keeping\n") % f)
1553
1553
1554 for f, n in m2.iteritems():
1554 for f, n in m2.iteritems():
1555 if choose and not choose(f):
1555 if choose and not choose(f):
1556 continue
1556 continue
1557 if f[0] == "/":
1557 if f[0] == "/":
1558 continue
1558 continue
1559 if f in ma and n != ma[f]:
1559 if f in ma and n != ma[f]:
1560 r = _("k")
1560 r = _("k")
1561 if not force and (linear_path or allow):
1561 if not force and (linear_path or allow):
1562 r = self.ui.prompt(
1562 r = self.ui.prompt(
1563 (_("remote changed %s which local deleted\n") % f) +
1563 (_("remote changed %s which local deleted\n") % f) +
1564 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1564 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1565 if r == _("k"):
1565 if r == _("k"):
1566 get[f] = n
1566 get[f] = n
1567 elif f not in ma:
1567 elif f not in ma:
1568 self.ui.debug(_("remote created %s\n") % f)
1568 self.ui.debug(_("remote created %s\n") % f)
1569 get[f] = n
1569 get[f] = n
1570 else:
1570 else:
1571 if force or p2 == pa: # going backwards?
1571 if force or p2 == pa: # going backwards?
1572 self.ui.debug(_("local deleted %s, recreating\n") % f)
1572 self.ui.debug(_("local deleted %s, recreating\n") % f)
1573 get[f] = n
1573 get[f] = n
1574 else:
1574 else:
1575 self.ui.debug(_("local deleted %s\n") % f)
1575 self.ui.debug(_("local deleted %s\n") % f)
1576
1576
1577 del mw, m1, m2, ma
1577 del mw, m1, m2, ma
1578
1578
1579 if force:
1579 if force:
1580 for f in merge:
1580 for f in merge:
1581 get[f] = merge[f][1]
1581 get[f] = merge[f][1]
1582 merge = {}
1582 merge = {}
1583
1583
1584 if linear_path or force:
1584 if linear_path or force:
1585 # we don't need to do any magic, just jump to the new rev
1585 # we don't need to do any magic, just jump to the new rev
1586 branch_merge = False
1586 branch_merge = False
1587 p1, p2 = p2, nullid
1587 p1, p2 = p2, nullid
1588 else:
1588 else:
1589 if not allow:
1589 if not allow:
1590 self.ui.status(_("this update spans a branch"
1590 self.ui.status(_("this update spans a branch"
1591 " affecting the following files:\n"))
1591 " affecting the following files:\n"))
1592 fl = merge.keys() + get.keys()
1592 fl = merge.keys() + get.keys()
1593 fl.sort()
1593 fl.sort()
1594 for f in fl:
1594 for f in fl:
1595 cf = ""
1595 cf = ""
1596 if f in merge:
1596 if f in merge:
1597 cf = _(" (resolve)")
1597 cf = _(" (resolve)")
1598 self.ui.status(" %s%s\n" % (f, cf))
1598 self.ui.status(" %s%s\n" % (f, cf))
1599 self.ui.warn(_("aborting update spanning branches!\n"))
1599 self.ui.warn(_("aborting update spanning branches!\n"))
1600 self.ui.status(_("(use update -m to merge across branches"
1600 self.ui.status(_("(use update -m to merge across branches"
1601 " or -C to lose changes)\n"))
1601 " or -C to lose changes)\n"))
1602 return 1
1602 return 1
1603 branch_merge = True
1603 branch_merge = True
1604
1604
1605 # get the files we don't need to change
1605 # get the files we don't need to change
1606 files = get.keys()
1606 files = get.keys()
1607 files.sort()
1607 files.sort()
1608 for f in files:
1608 for f in files:
1609 if f[0] == "/":
1609 if f[0] == "/":
1610 continue
1610 continue
1611 self.ui.note(_("getting %s\n") % f)
1611 self.ui.note(_("getting %s\n") % f)
1612 t = self.file(f).read(get[f])
1612 t = self.file(f).read(get[f])
1613 self.wwrite(f, t)
1613 self.wwrite(f, t)
1614 util.set_exec(self.wjoin(f), mf2[f])
1614 util.set_exec(self.wjoin(f), mf2[f])
1615 if moddirstate:
1615 if moddirstate:
1616 if branch_merge:
1616 if branch_merge:
1617 self.dirstate.update([f], 'n', st_mtime=-1)
1617 self.dirstate.update([f], 'n', st_mtime=-1)
1618 else:
1618 else:
1619 self.dirstate.update([f], 'n')
1619 self.dirstate.update([f], 'n')
1620
1620
1621 # merge the tricky bits
1621 # merge the tricky bits
1622 files = merge.keys()
1622 files = merge.keys()
1623 files.sort()
1623 files.sort()
1624 for f in files:
1624 for f in files:
1625 self.ui.status(_("merging %s\n") % f)
1625 self.ui.status(_("merging %s\n") % f)
1626 my, other, flag = merge[f]
1626 my, other, flag = merge[f]
1627 ret = self.merge3(f, my, other)
1627 ret = self.merge3(f, my, other)
1628 if ret:
1628 if ret:
1629 err = True
1629 err = True
1630 util.set_exec(self.wjoin(f), flag)
1630 util.set_exec(self.wjoin(f), flag)
1631 if moddirstate:
1631 if moddirstate:
1632 if branch_merge:
1632 if branch_merge:
1633 # We've done a branch merge, mark this file as merged
1633 # We've done a branch merge, mark this file as merged
1634 # so that we properly record the merger later
1634 # so that we properly record the merger later
1635 self.dirstate.update([f], 'm')
1635 self.dirstate.update([f], 'm')
1636 else:
1636 else:
1637 # We've update-merged a locally modified file, so
1637 # We've update-merged a locally modified file, so
1638 # we set the dirstate to emulate a normal checkout
1638 # we set the dirstate to emulate a normal checkout
1639 # of that file some time in the past. Thus our
1639 # of that file some time in the past. Thus our
1640 # merge will appear as a normal local file
1640 # merge will appear as a normal local file
1641 # modification.
1641 # modification.
1642 f_len = len(self.file(f).read(other))
1642 f_len = len(self.file(f).read(other))
1643 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1643 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1644
1644
1645 remove.sort()
1645 remove.sort()
1646 for f in remove:
1646 for f in remove:
1647 self.ui.note(_("removing %s\n") % f)
1647 self.ui.note(_("removing %s\n") % f)
1648 try:
1648 try:
1649 util.unlink(self.wjoin(f))
1649 util.unlink(self.wjoin(f))
1650 except OSError, inst:
1650 except OSError, inst:
1651 if inst.errno != errno.ENOENT:
1651 if inst.errno != errno.ENOENT:
1652 self.ui.warn(_("update failed to remove %s: %s!\n") %
1652 self.ui.warn(_("update failed to remove %s: %s!\n") %
1653 (f, inst.strerror))
1653 (f, inst.strerror))
1654 if moddirstate:
1654 if moddirstate:
1655 if branch_merge:
1655 if branch_merge:
1656 self.dirstate.update(remove, 'r')
1656 self.dirstate.update(remove, 'r')
1657 else:
1657 else:
1658 self.dirstate.forget(remove)
1658 self.dirstate.forget(remove)
1659
1659
1660 if moddirstate:
1660 if moddirstate:
1661 self.dirstate.setparents(p1, p2)
1661 self.dirstate.setparents(p1, p2)
1662 return err
1662 return err
1663
1663
1664 def merge3(self, fn, my, other):
1664 def merge3(self, fn, my, other):
1665 """perform a 3-way merge in the working directory"""
1665 """perform a 3-way merge in the working directory"""
1666
1666
1667 def temp(prefix, node):
1667 def temp(prefix, node):
1668 pre = "%s~%s." % (os.path.basename(fn), prefix)
1668 pre = "%s~%s." % (os.path.basename(fn), prefix)
1669 (fd, name) = tempfile.mkstemp("", pre)
1669 (fd, name) = tempfile.mkstemp("", pre)
1670 f = os.fdopen(fd, "wb")
1670 f = os.fdopen(fd, "wb")
1671 self.wwrite(fn, fl.read(node), f)
1671 self.wwrite(fn, fl.read(node), f)
1672 f.close()
1672 f.close()
1673 return name
1673 return name
1674
1674
1675 fl = self.file(fn)
1675 fl = self.file(fn)
1676 base = fl.ancestor(my, other)
1676 base = fl.ancestor(my, other)
1677 a = self.wjoin(fn)
1677 a = self.wjoin(fn)
1678 b = temp("base", base)
1678 b = temp("base", base)
1679 c = temp("other", other)
1679 c = temp("other", other)
1680
1680
1681 self.ui.note(_("resolving %s\n") % fn)
1681 self.ui.note(_("resolving %s\n") % fn)
1682 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1682 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1683 (fn, short(my), short(other), short(base)))
1683 (fn, short(my), short(other), short(base)))
1684
1684
1685 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1685 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1686 or "hgmerge")
1686 or "hgmerge")
1687 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1687 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1688 if r:
1688 if r:
1689 self.ui.warn(_("merging %s failed!\n") % fn)
1689 self.ui.warn(_("merging %s failed!\n") % fn)
1690
1690
1691 os.unlink(b)
1691 os.unlink(b)
1692 os.unlink(c)
1692 os.unlink(c)
1693 return r
1693 return r
1694
1694
1695 def verify(self):
1695 def verify(self):
1696 filelinkrevs = {}
1696 filelinkrevs = {}
1697 filenodes = {}
1697 filenodes = {}
1698 changesets = revisions = files = 0
1698 changesets = revisions = files = 0
1699 errors = [0]
1699 errors = [0]
1700 neededmanifests = {}
1700 neededmanifests = {}
1701
1701
1702 def err(msg):
1702 def err(msg):
1703 self.ui.warn(msg + "\n")
1703 self.ui.warn(msg + "\n")
1704 errors[0] += 1
1704 errors[0] += 1
1705
1705
1706 def checksize(obj, name):
1706 def checksize(obj, name):
1707 d = obj.checksize()
1707 d = obj.checksize()
1708 if d[0]:
1708 if d[0]:
1709 err(_("%s data length off by %d bytes") % (name, d[0]))
1709 err(_("%s data length off by %d bytes") % (name, d[0]))
1710 if d[1]:
1710 if d[1]:
1711 err(_("%s index contains %d extra bytes") % (name, d[1]))
1711 err(_("%s index contains %d extra bytes") % (name, d[1]))
1712
1712
1713 seen = {}
1713 seen = {}
1714 self.ui.status(_("checking changesets\n"))
1714 self.ui.status(_("checking changesets\n"))
1715 checksize(self.changelog, "changelog")
1715 checksize(self.changelog, "changelog")
1716
1716
1717 for i in range(self.changelog.count()):
1717 for i in range(self.changelog.count()):
1718 changesets += 1
1718 changesets += 1
1719 n = self.changelog.node(i)
1719 n = self.changelog.node(i)
1720 l = self.changelog.linkrev(n)
1720 l = self.changelog.linkrev(n)
1721 if l != i:
1721 if l != i:
1722 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1722 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1723 if n in seen:
1723 if n in seen:
1724 err(_("duplicate changeset at revision %d") % i)
1724 err(_("duplicate changeset at revision %d") % i)
1725 seen[n] = 1
1725 seen[n] = 1
1726
1726
1727 for p in self.changelog.parents(n):
1727 for p in self.changelog.parents(n):
1728 if p not in self.changelog.nodemap:
1728 if p not in self.changelog.nodemap:
1729 err(_("changeset %s has unknown parent %s") %
1729 err(_("changeset %s has unknown parent %s") %
1730 (short(n), short(p)))
1730 (short(n), short(p)))
1731 try:
1731 try:
1732 changes = self.changelog.read(n)
1732 changes = self.changelog.read(n)
1733 except KeyboardInterrupt:
1733 except KeyboardInterrupt:
1734 self.ui.warn(_("interrupted"))
1734 self.ui.warn(_("interrupted"))
1735 raise
1735 raise
1736 except Exception, inst:
1736 except Exception, inst:
1737 err(_("unpacking changeset %s: %s") % (short(n), inst))
1737 err(_("unpacking changeset %s: %s") % (short(n), inst))
1738
1738
1739 neededmanifests[changes[0]] = n
1739 neededmanifests[changes[0]] = n
1740
1740
1741 for f in changes[3]:
1741 for f in changes[3]:
1742 filelinkrevs.setdefault(f, []).append(i)
1742 filelinkrevs.setdefault(f, []).append(i)
1743
1743
1744 seen = {}
1744 seen = {}
1745 self.ui.status(_("checking manifests\n"))
1745 self.ui.status(_("checking manifests\n"))
1746 checksize(self.manifest, "manifest")
1746 checksize(self.manifest, "manifest")
1747
1747
1748 for i in range(self.manifest.count()):
1748 for i in range(self.manifest.count()):
1749 n = self.manifest.node(i)
1749 n = self.manifest.node(i)
1750 l = self.manifest.linkrev(n)
1750 l = self.manifest.linkrev(n)
1751
1751
1752 if l < 0 or l >= self.changelog.count():
1752 if l < 0 or l >= self.changelog.count():
1753 err(_("bad manifest link (%d) at revision %d") % (l, i))
1753 err(_("bad manifest link (%d) at revision %d") % (l, i))
1754
1754
1755 if n in neededmanifests:
1755 if n in neededmanifests:
1756 del neededmanifests[n]
1756 del neededmanifests[n]
1757
1757
1758 if n in seen:
1758 if n in seen:
1759 err(_("duplicate manifest at revision %d") % i)
1759 err(_("duplicate manifest at revision %d") % i)
1760
1760
1761 seen[n] = 1
1761 seen[n] = 1
1762
1762
1763 for p in self.manifest.parents(n):
1763 for p in self.manifest.parents(n):
1764 if p not in self.manifest.nodemap:
1764 if p not in self.manifest.nodemap:
1765 err(_("manifest %s has unknown parent %s") %
1765 err(_("manifest %s has unknown parent %s") %
1766 (short(n), short(p)))
1766 (short(n), short(p)))
1767
1767
1768 try:
1768 try:
1769 delta = mdiff.patchtext(self.manifest.delta(n))
1769 delta = mdiff.patchtext(self.manifest.delta(n))
1770 except KeyboardInterrupt:
1770 except KeyboardInterrupt:
1771 self.ui.warn(_("interrupted"))
1771 self.ui.warn(_("interrupted"))
1772 raise
1772 raise
1773 except Exception, inst:
1773 except Exception, inst:
1774 err(_("unpacking manifest %s: %s") % (short(n), inst))
1774 err(_("unpacking manifest %s: %s") % (short(n), inst))
1775
1775
1776 ff = [ l.split('\0') for l in delta.splitlines() ]
1776 ff = [ l.split('\0') for l in delta.splitlines() ]
1777 for f, fn in ff:
1777 for f, fn in ff:
1778 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1778 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1779
1779
1780 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1780 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1781
1781
1782 for m, c in neededmanifests.items():
1782 for m, c in neededmanifests.items():
1783 err(_("Changeset %s refers to unknown manifest %s") %
1783 err(_("Changeset %s refers to unknown manifest %s") %
1784 (short(m), short(c)))
1784 (short(m), short(c)))
1785 del neededmanifests
1785 del neededmanifests
1786
1786
1787 for f in filenodes:
1787 for f in filenodes:
1788 if f not in filelinkrevs:
1788 if f not in filelinkrevs:
1789 err(_("file %s in manifest but not in changesets") % f)
1789 err(_("file %s in manifest but not in changesets") % f)
1790
1790
1791 for f in filelinkrevs:
1791 for f in filelinkrevs:
1792 if f not in filenodes:
1792 if f not in filenodes:
1793 err(_("file %s in changeset but not in manifest") % f)
1793 err(_("file %s in changeset but not in manifest") % f)
1794
1794
1795 self.ui.status(_("checking files\n"))
1795 self.ui.status(_("checking files\n"))
1796 ff = filenodes.keys()
1796 ff = filenodes.keys()
1797 ff.sort()
1797 ff.sort()
1798 for f in ff:
1798 for f in ff:
1799 if f == "/dev/null":
1799 if f == "/dev/null":
1800 continue
1800 continue
1801 files += 1
1801 files += 1
1802 fl = self.file(f)
1802 fl = self.file(f)
1803 checksize(fl, f)
1803 checksize(fl, f)
1804
1804
1805 nodes = {nullid: 1}
1805 nodes = {nullid: 1}
1806 seen = {}
1806 seen = {}
1807 for i in range(fl.count()):
1807 for i in range(fl.count()):
1808 revisions += 1
1808 revisions += 1
1809 n = fl.node(i)
1809 n = fl.node(i)
1810
1810
1811 if n in seen:
1811 if n in seen:
1812 err(_("%s: duplicate revision %d") % (f, i))
1812 err(_("%s: duplicate revision %d") % (f, i))
1813 if n not in filenodes[f]:
1813 if n not in filenodes[f]:
1814 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1814 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1815 else:
1815 else:
1816 del filenodes[f][n]
1816 del filenodes[f][n]
1817
1817
1818 flr = fl.linkrev(n)
1818 flr = fl.linkrev(n)
1819 if flr not in filelinkrevs[f]:
1819 if flr not in filelinkrevs[f]:
1820 err(_("%s:%s points to unexpected changeset %d")
1820 err(_("%s:%s points to unexpected changeset %d")
1821 % (f, short(n), flr))
1821 % (f, short(n), flr))
1822 else:
1822 else:
1823 filelinkrevs[f].remove(flr)
1823 filelinkrevs[f].remove(flr)
1824
1824
1825 # verify contents
1825 # verify contents
1826 try:
1826 try:
1827 t = fl.read(n)
1827 t = fl.read(n)
1828 except KeyboardInterrupt:
1828 except KeyboardInterrupt:
1829 self.ui.warn(_("interrupted"))
1829 self.ui.warn(_("interrupted"))
1830 raise
1830 raise
1831 except Exception, inst:
1831 except Exception, inst:
1832 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1832 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1833
1833
1834 # verify parents
1834 # verify parents
1835 (p1, p2) = fl.parents(n)
1835 (p1, p2) = fl.parents(n)
1836 if p1 not in nodes:
1836 if p1 not in nodes:
1837 err(_("file %s:%s unknown parent 1 %s") %
1837 err(_("file %s:%s unknown parent 1 %s") %
1838 (f, short(n), short(p1)))
1838 (f, short(n), short(p1)))
1839 if p2 not in nodes:
1839 if p2 not in nodes:
1840 err(_("file %s:%s unknown parent 2 %s") %
1840 err(_("file %s:%s unknown parent 2 %s") %
1841 (f, short(n), short(p1)))
1841 (f, short(n), short(p1)))
1842 nodes[n] = 1
1842 nodes[n] = 1
1843
1843
1844 # cross-check
1844 # cross-check
1845 for node in filenodes[f]:
1845 for node in filenodes[f]:
1846 err(_("node %s in manifests not in %s") % (hex(node), f))
1846 err(_("node %s in manifests not in %s") % (hex(node), f))
1847
1847
1848 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1848 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1849 (files, changesets, revisions))
1849 (files, changesets, revisions))
1850
1850
1851 if errors[0]:
1851 if errors[0]:
1852 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1852 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1853 return 1
1853 return 1
@@ -1,870 +1,864 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 from demandload import demandload
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
17
17
18 def hash(text, p1, p2):
18 def hash(text, p1, p2):
19 """generate a hash from the given text and its parent hashes
19 """generate a hash from the given text and its parent hashes
20
20
21 This hash combines both the current file contents and its history
21 This hash combines both the current file contents and its history
22 in a manner that makes it easy to distinguish nodes with the same
22 in a manner that makes it easy to distinguish nodes with the same
23 content in the revision graph.
23 content in the revision graph.
24 """
24 """
25 l = [p1, p2]
25 l = [p1, p2]
26 l.sort()
26 l.sort()
27 s = sha.new(l[0])
27 s = sha.new(l[0])
28 s.update(l[1])
28 s.update(l[1])
29 s.update(text)
29 s.update(text)
30 return s.digest()
30 return s.digest()
31
31
32 def compress(text):
32 def compress(text):
33 """ generate a possibly-compressed representation of text """
33 """ generate a possibly-compressed representation of text """
34 if not text: return ("", text)
34 if not text: return ("", text)
35 if len(text) < 44:
35 if len(text) < 44:
36 if text[0] == '\0': return ("", text)
36 if text[0] == '\0': return ("", text)
37 return ('u', text)
37 return ('u', text)
38 bin = zlib.compress(text)
38 bin = zlib.compress(text)
39 if len(bin) > len(text):
39 if len(bin) > len(text):
40 if text[0] == '\0': return ("", text)
40 if text[0] == '\0': return ("", text)
41 return ('u', text)
41 return ('u', text)
42 return ("", bin)
42 return ("", bin)
43
43
44 def decompress(bin):
44 def decompress(bin):
45 """ decompress the given input """
45 """ decompress the given input """
46 if not bin: return bin
46 if not bin: return bin
47 t = bin[0]
47 t = bin[0]
48 if t == '\0': return bin
48 if t == '\0': return bin
49 if t == 'x': return zlib.decompress(bin)
49 if t == 'x': return zlib.decompress(bin)
50 if t == 'u': return bin[1:]
50 if t == 'u': return bin[1:]
51 raise RevlogError(_("unknown compression type %s") % t)
51 raise RevlogError(_("unknown compression type %s") % t)
52
52
53 indexformat = ">4l20s20s20s"
53 indexformat = ">4l20s20s20s"
54
54
55 class lazyparser(object):
55 class lazyparser(object):
56 """
56 """
57 this class avoids the need to parse the entirety of large indices
57 this class avoids the need to parse the entirety of large indices
58
58
59 By default we parse and load 1000 entries at a time.
59 By default we parse and load 1000 entries at a time.
60
60
61 If no position is specified, we load the whole index, and replace
61 If no position is specified, we load the whole index, and replace
62 the lazy objects in revlog with the underlying objects for
62 the lazy objects in revlog with the underlying objects for
63 efficiency in cases where we look at most of the nodes.
63 efficiency in cases where we look at most of the nodes.
64 """
64 """
65 def __init__(self, data, revlog):
65 def __init__(self, data, revlog):
66 self.data = data
66 self.data = data
67 self.s = struct.calcsize(indexformat)
67 self.s = struct.calcsize(indexformat)
68 self.l = len(data)/self.s
68 self.l = len(data)/self.s
69 self.index = [None] * self.l
69 self.index = [None] * self.l
70 self.map = {nullid: -1}
70 self.map = {nullid: -1}
71 self.all = 0
71 self.all = 0
72 self.revlog = revlog
72 self.revlog = revlog
73
73
74 def trunc(self, pos):
74 def trunc(self, pos):
75 self.l = pos/self.s
75 self.l = pos/self.s
76
76
77 def load(self, pos=None):
77 def load(self, pos=None):
78 if self.all: return
78 if self.all: return
79 if pos is not None:
79 if pos is not None:
80 block = pos / 1000
80 block = pos / 1000
81 i = block * 1000
81 i = block * 1000
82 end = min(self.l, i + 1000)
82 end = min(self.l, i + 1000)
83 else:
83 else:
84 self.all = 1
84 self.all = 1
85 i = 0
85 i = 0
86 end = self.l
86 end = self.l
87 self.revlog.index = self.index
87 self.revlog.index = self.index
88 self.revlog.nodemap = self.map
88 self.revlog.nodemap = self.map
89
89
90 while i < end:
90 while i < end:
91 d = self.data[i * self.s: (i + 1) * self.s]
91 d = self.data[i * self.s: (i + 1) * self.s]
92 e = struct.unpack(indexformat, d)
92 e = struct.unpack(indexformat, d)
93 self.index[i] = e
93 self.index[i] = e
94 self.map[e[6]] = i
94 self.map[e[6]] = i
95 i += 1
95 i += 1
96
96
97 class lazyindex(object):
97 class lazyindex(object):
98 """a lazy version of the index array"""
98 """a lazy version of the index array"""
99 def __init__(self, parser):
99 def __init__(self, parser):
100 self.p = parser
100 self.p = parser
101 def __len__(self):
101 def __len__(self):
102 return len(self.p.index)
102 return len(self.p.index)
103 def load(self, pos):
103 def load(self, pos):
104 if pos < 0:
104 if pos < 0:
105 pos += len(self.p.index)
105 pos += len(self.p.index)
106 self.p.load(pos)
106 self.p.load(pos)
107 return self.p.index[pos]
107 return self.p.index[pos]
108 def __getitem__(self, pos):
108 def __getitem__(self, pos):
109 return self.p.index[pos] or self.load(pos)
109 return self.p.index[pos] or self.load(pos)
110 def __delitem__(self, pos):
110 def __delitem__(self, pos):
111 del self.p.index[pos]
111 del self.p.index[pos]
112 def append(self, e):
112 def append(self, e):
113 self.p.index.append(e)
113 self.p.index.append(e)
114 def trunc(self, pos):
114 def trunc(self, pos):
115 self.p.trunc(pos)
115 self.p.trunc(pos)
116
116
117 class lazymap(object):
117 class lazymap(object):
118 """a lazy version of the node map"""
118 """a lazy version of the node map"""
119 def __init__(self, parser):
119 def __init__(self, parser):
120 self.p = parser
120 self.p = parser
121 def load(self, key):
121 def load(self, key):
122 if self.p.all: return
122 if self.p.all: return
123 n = self.p.data.find(key)
123 n = self.p.data.find(key)
124 if n < 0:
124 if n < 0:
125 raise KeyError(key)
125 raise KeyError(key)
126 pos = n / self.p.s
126 pos = n / self.p.s
127 self.p.load(pos)
127 self.p.load(pos)
128 def __contains__(self, key):
128 def __contains__(self, key):
129 self.p.load()
129 self.p.load()
130 return key in self.p.map
130 return key in self.p.map
131 def __iter__(self):
131 def __iter__(self):
132 yield nullid
132 yield nullid
133 for i in xrange(self.p.l):
133 for i in xrange(self.p.l):
134 try:
134 try:
135 yield self.p.index[i][6]
135 yield self.p.index[i][6]
136 except:
136 except:
137 self.p.load(i)
137 self.p.load(i)
138 yield self.p.index[i][6]
138 yield self.p.index[i][6]
139 def __getitem__(self, key):
139 def __getitem__(self, key):
140 try:
140 try:
141 return self.p.map[key]
141 return self.p.map[key]
142 except KeyError:
142 except KeyError:
143 try:
143 try:
144 self.load(key)
144 self.load(key)
145 return self.p.map[key]
145 return self.p.map[key]
146 except KeyError:
146 except KeyError:
147 raise KeyError("node " + hex(key))
147 raise KeyError("node " + hex(key))
148 def __setitem__(self, key, val):
148 def __setitem__(self, key, val):
149 self.p.map[key] = val
149 self.p.map[key] = val
150 def __delitem__(self, key):
150 def __delitem__(self, key):
151 del self.p.map[key]
151 del self.p.map[key]
152
152
153 class RevlogError(Exception): pass
153 class RevlogError(Exception): pass
154
154
155 class revlog(object):
155 class revlog(object):
156 """
156 """
157 the underlying revision storage object
157 the underlying revision storage object
158
158
159 A revlog consists of two parts, an index and the revision data.
159 A revlog consists of two parts, an index and the revision data.
160
160
161 The index is a file with a fixed record size containing
161 The index is a file with a fixed record size containing
162 information on each revision, includings its nodeid (hash), the
162 information on each revision, includings its nodeid (hash), the
163 nodeids of its parents, the position and offset of its data within
163 nodeids of its parents, the position and offset of its data within
164 the data file, and the revision it's based on. Finally, each entry
164 the data file, and the revision it's based on. Finally, each entry
165 contains a linkrev entry that can serve as a pointer to external
165 contains a linkrev entry that can serve as a pointer to external
166 data.
166 data.
167
167
168 The revision data itself is a linear collection of data chunks.
168 The revision data itself is a linear collection of data chunks.
169 Each chunk represents a revision and is usually represented as a
169 Each chunk represents a revision and is usually represented as a
170 delta against the previous chunk. To bound lookup time, runs of
170 delta against the previous chunk. To bound lookup time, runs of
171 deltas are limited to about 2 times the length of the original
171 deltas are limited to about 2 times the length of the original
172 version data. This makes retrieval of a version proportional to
172 version data. This makes retrieval of a version proportional to
173 its size, or O(1) relative to the number of revisions.
173 its size, or O(1) relative to the number of revisions.
174
174
175 Both pieces of the revlog are written to in an append-only
175 Both pieces of the revlog are written to in an append-only
176 fashion, which means we never need to rewrite a file to insert or
176 fashion, which means we never need to rewrite a file to insert or
177 remove data, and can use some simple techniques to avoid the need
177 remove data, and can use some simple techniques to avoid the need
178 for locking while reading.
178 for locking while reading.
179 """
179 """
180 def __init__(self, opener, indexfile, datafile):
180 def __init__(self, opener, indexfile, datafile):
181 """
181 """
182 create a revlog object
182 create a revlog object
183
183
184 opener is a function that abstracts the file opening operation
184 opener is a function that abstracts the file opening operation
185 and can be used to implement COW semantics or the like.
185 and can be used to implement COW semantics or the like.
186 """
186 """
187 self.indexfile = indexfile
187 self.indexfile = indexfile
188 self.datafile = datafile
188 self.datafile = datafile
189 self.opener = opener
189 self.opener = opener
190 self.cache = None
190 self.cache = None
191 self.chunkcache = None
191 self.chunkcache = None
192
192
193 try:
193 try:
194 i = self.opener(self.indexfile).read()
194 i = self.opener(self.indexfile).read()
195 except IOError, inst:
195 except IOError, inst:
196 if inst.errno != errno.ENOENT:
196 if inst.errno != errno.ENOENT:
197 raise
197 raise
198 i = ""
198 i = ""
199
199
200 if i and i[:4] != "\0\0\0\0":
200 if i and i[:4] != "\0\0\0\0":
201 raise RevlogError(_("incompatible revlog signature on %s") %
201 raise RevlogError(_("incompatible revlog signature on %s") %
202 self.indexfile)
202 self.indexfile)
203
203
204 if len(i) > 10000:
204 if len(i) > 10000:
205 # big index, let's parse it on demand
205 # big index, let's parse it on demand
206 parser = lazyparser(i, self)
206 parser = lazyparser(i, self)
207 self.index = lazyindex(parser)
207 self.index = lazyindex(parser)
208 self.nodemap = lazymap(parser)
208 self.nodemap = lazymap(parser)
209 else:
209 else:
210 s = struct.calcsize(indexformat)
210 s = struct.calcsize(indexformat)
211 l = len(i) / s
211 l = len(i) / s
212 self.index = [None] * l
212 self.index = [None] * l
213 m = [None] * l
213 m = [None] * l
214
214
215 n = 0
215 n = 0
216 for f in xrange(0, l * s, s):
216 for f in xrange(0, l * s, s):
217 # offset, size, base, linkrev, p1, p2, nodeid
217 # offset, size, base, linkrev, p1, p2, nodeid
218 e = struct.unpack(indexformat, i[f:f + s])
218 e = struct.unpack(indexformat, i[f:f + s])
219 m[n] = (e[6], n)
219 m[n] = (e[6], n)
220 self.index[n] = e
220 self.index[n] = e
221 n += 1
221 n += 1
222
222
223 self.nodemap = dict(m)
223 self.nodemap = dict(m)
224 self.nodemap[nullid] = -1
224 self.nodemap[nullid] = -1
225
225
226 def tip(self): return self.node(len(self.index) - 1)
226 def tip(self): return self.node(len(self.index) - 1)
227 def count(self): return len(self.index)
227 def count(self): return len(self.index)
228 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
228 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
229 def rev(self, node):
229 def rev(self, node):
230 try:
230 try:
231 return self.nodemap[node]
231 return self.nodemap[node]
232 except KeyError:
232 except KeyError:
233 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
233 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
234 def linkrev(self, node): return self.index[self.rev(node)][3]
234 def linkrev(self, node): return self.index[self.rev(node)][3]
235 def parents(self, node):
235 def parents(self, node):
236 if node == nullid: return (nullid, nullid)
236 if node == nullid: return (nullid, nullid)
237 return self.index[self.rev(node)][4:6]
237 return self.index[self.rev(node)][4:6]
238
238
239 def start(self, rev): return self.index[rev][0]
239 def start(self, rev): return self.index[rev][0]
240 def length(self, rev): return self.index[rev][1]
240 def length(self, rev): return self.index[rev][1]
241 def end(self, rev): return self.start(rev) + self.length(rev)
241 def end(self, rev): return self.start(rev) + self.length(rev)
242 def base(self, rev): return self.index[rev][2]
242 def base(self, rev): return self.index[rev][2]
243
243
244 def reachable(self, rev, stop=None):
244 def reachable(self, rev, stop=None):
245 reachable = {}
245 reachable = {}
246 visit = [rev]
246 visit = [rev]
247 reachable[rev] = 1
247 reachable[rev] = 1
248 if stop:
248 if stop:
249 stopn = self.rev(stop)
249 stopn = self.rev(stop)
250 else:
250 else:
251 stopn = 0
251 stopn = 0
252 while visit:
252 while visit:
253 n = visit.pop(0)
253 n = visit.pop(0)
254 if n == stop:
254 if n == stop:
255 continue
255 continue
256 if n == nullid:
256 if n == nullid:
257 continue
257 continue
258 for p in self.parents(n):
258 for p in self.parents(n):
259 if self.rev(p) < stopn:
259 if self.rev(p) < stopn:
260 continue
260 continue
261 if p not in reachable:
261 if p not in reachable:
262 reachable[p] = 1
262 reachable[p] = 1
263 visit.append(p)
263 visit.append(p)
264 return reachable
264 return reachable
265
265
266 def nodesbetween(self, roots=None, heads=None):
266 def nodesbetween(self, roots=None, heads=None):
267 """Return a tuple containing three elements. Elements 1 and 2 contain
267 """Return a tuple containing three elements. Elements 1 and 2 contain
268 a final list bases and heads after all the unreachable ones have been
268 a final list bases and heads after all the unreachable ones have been
269 pruned. Element 0 contains a topologically sorted list of all
269 pruned. Element 0 contains a topologically sorted list of all
270
270
271 nodes that satisfy these constraints:
271 nodes that satisfy these constraints:
272 1. All nodes must be descended from a node in roots (the nodes on
272 1. All nodes must be descended from a node in roots (the nodes on
273 roots are considered descended from themselves).
273 roots are considered descended from themselves).
274 2. All nodes must also be ancestors of a node in heads (the nodes in
274 2. All nodes must also be ancestors of a node in heads (the nodes in
275 heads are considered to be their own ancestors).
275 heads are considered to be their own ancestors).
276
276
277 If roots is unspecified, nullid is assumed as the only root.
277 If roots is unspecified, nullid is assumed as the only root.
278 If heads is unspecified, it is taken to be the output of the
278 If heads is unspecified, it is taken to be the output of the
279 heads method (i.e. a list of all nodes in the repository that
279 heads method (i.e. a list of all nodes in the repository that
280 have no children)."""
280 have no children)."""
281 nonodes = ([], [], [])
281 nonodes = ([], [], [])
282 if roots is not None:
282 if roots is not None:
283 roots = list(roots)
283 roots = list(roots)
284 if not roots:
284 if not roots:
285 return nonodes
285 return nonodes
286 lowestrev = min([self.rev(n) for n in roots])
286 lowestrev = min([self.rev(n) for n in roots])
287 else:
287 else:
288 roots = [nullid] # Everybody's a descendent of nullid
288 roots = [nullid] # Everybody's a descendent of nullid
289 lowestrev = -1
289 lowestrev = -1
290 if (lowestrev == -1) and (heads is None):
290 if (lowestrev == -1) and (heads is None):
291 # We want _all_ the nodes!
291 # We want _all_ the nodes!
292 return ([self.node(r) for r in xrange(0, self.count())],
292 return ([self.node(r) for r in xrange(0, self.count())],
293 [nullid], list(self.heads()))
293 [nullid], list(self.heads()))
294 if heads is None:
294 if heads is None:
295 # All nodes are ancestors, so the latest ancestor is the last
295 # All nodes are ancestors, so the latest ancestor is the last
296 # node.
296 # node.
297 highestrev = self.count() - 1
297 highestrev = self.count() - 1
298 # Set ancestors to None to signal that every node is an ancestor.
298 # Set ancestors to None to signal that every node is an ancestor.
299 ancestors = None
299 ancestors = None
300 # Set heads to an empty dictionary for later discovery of heads
300 # Set heads to an empty dictionary for later discovery of heads
301 heads = {}
301 heads = {}
302 else:
302 else:
303 heads = list(heads)
303 heads = list(heads)
304 if not heads:
304 if not heads:
305 return nonodes
305 return nonodes
306 ancestors = {}
306 ancestors = {}
307 # Start at the top and keep marking parents until we're done.
307 # Start at the top and keep marking parents until we're done.
308 nodestotag = heads[:]
308 nodestotag = heads[:]
309 # Turn heads into a dictionary so we can remove 'fake' heads.
309 # Turn heads into a dictionary so we can remove 'fake' heads.
310 # Also, later we will be using it to filter out the heads we can't
310 # Also, later we will be using it to filter out the heads we can't
311 # find from roots.
311 # find from roots.
312 heads = dict.fromkeys(heads, 0)
312 heads = dict.fromkeys(heads, 0)
313 # Remember where the top was so we can use it as a limit later.
313 # Remember where the top was so we can use it as a limit later.
314 highestrev = max([self.rev(n) for n in nodestotag])
314 highestrev = max([self.rev(n) for n in nodestotag])
315 while nodestotag:
315 while nodestotag:
316 # grab a node to tag
316 # grab a node to tag
317 n = nodestotag.pop()
317 n = nodestotag.pop()
318 # Never tag nullid
318 # Never tag nullid
319 if n == nullid:
319 if n == nullid:
320 continue
320 continue
321 # A node's revision number represents its place in a
321 # A node's revision number represents its place in a
322 # topologically sorted list of nodes.
322 # topologically sorted list of nodes.
323 r = self.rev(n)
323 r = self.rev(n)
324 if r >= lowestrev:
324 if r >= lowestrev:
325 if n not in ancestors:
325 if n not in ancestors:
326 # If we are possibly a descendent of one of the roots
326 # If we are possibly a descendent of one of the roots
327 # and we haven't already been marked as an ancestor
327 # and we haven't already been marked as an ancestor
328 ancestors[n] = 1 # Mark as ancestor
328 ancestors[n] = 1 # Mark as ancestor
329 # Add non-nullid parents to list of nodes to tag.
329 # Add non-nullid parents to list of nodes to tag.
330 nodestotag.extend([p for p in self.parents(n) if
330 nodestotag.extend([p for p in self.parents(n) if
331 p != nullid])
331 p != nullid])
332 elif n in heads: # We've seen it before, is it a fake head?
332 elif n in heads: # We've seen it before, is it a fake head?
333 # So it is, real heads should not be the ancestors of
333 # So it is, real heads should not be the ancestors of
334 # any other heads.
334 # any other heads.
335 heads.pop(n)
335 heads.pop(n)
336 if not ancestors:
336 if not ancestors:
337 return nonodes
337 return nonodes
338 # Now that we have our set of ancestors, we want to remove any
338 # Now that we have our set of ancestors, we want to remove any
339 # roots that are not ancestors.
339 # roots that are not ancestors.
340
340
341 # If one of the roots was nullid, everything is included anyway.
341 # If one of the roots was nullid, everything is included anyway.
342 if lowestrev > -1:
342 if lowestrev > -1:
343 # But, since we weren't, let's recompute the lowest rev to not
343 # But, since we weren't, let's recompute the lowest rev to not
344 # include roots that aren't ancestors.
344 # include roots that aren't ancestors.
345
345
346 # Filter out roots that aren't ancestors of heads
346 # Filter out roots that aren't ancestors of heads
347 roots = [n for n in roots if n in ancestors]
347 roots = [n for n in roots if n in ancestors]
348 # Recompute the lowest revision
348 # Recompute the lowest revision
349 if roots:
349 if roots:
350 lowestrev = min([self.rev(n) for n in roots])
350 lowestrev = min([self.rev(n) for n in roots])
351 else:
351 else:
352 # No more roots? Return empty list
352 # No more roots? Return empty list
353 return nonodes
353 return nonodes
354 else:
354 else:
355 # We are descending from nullid, and don't need to care about
355 # We are descending from nullid, and don't need to care about
356 # any other roots.
356 # any other roots.
357 lowestrev = -1
357 lowestrev = -1
358 roots = [nullid]
358 roots = [nullid]
359 # Transform our roots list into a 'set' (i.e. a dictionary where the
359 # Transform our roots list into a 'set' (i.e. a dictionary where the
360 # values don't matter.
360 # values don't matter.
361 descendents = dict.fromkeys(roots, 1)
361 descendents = dict.fromkeys(roots, 1)
362 # Also, keep the original roots so we can filter out roots that aren't
362 # Also, keep the original roots so we can filter out roots that aren't
363 # 'real' roots (i.e. are descended from other roots).
363 # 'real' roots (i.e. are descended from other roots).
364 roots = descendents.copy()
364 roots = descendents.copy()
365 # Our topologically sorted list of output nodes.
365 # Our topologically sorted list of output nodes.
366 orderedout = []
366 orderedout = []
367 # Don't start at nullid since we don't want nullid in our output list,
367 # Don't start at nullid since we don't want nullid in our output list,
368 # and if nullid shows up in descedents, empty parents will look like
368 # and if nullid shows up in descedents, empty parents will look like
369 # they're descendents.
369 # they're descendents.
370 for r in xrange(max(lowestrev, 0), highestrev + 1):
370 for r in xrange(max(lowestrev, 0), highestrev + 1):
371 n = self.node(r)
371 n = self.node(r)
372 isdescendent = False
372 isdescendent = False
373 if lowestrev == -1: # Everybody is a descendent of nullid
373 if lowestrev == -1: # Everybody is a descendent of nullid
374 isdescendent = True
374 isdescendent = True
375 elif n in descendents:
375 elif n in descendents:
376 # n is already a descendent
376 # n is already a descendent
377 isdescendent = True
377 isdescendent = True
378 # This check only needs to be done here because all the roots
378 # This check only needs to be done here because all the roots
379 # will start being marked is descendents before the loop.
379 # will start being marked is descendents before the loop.
380 if n in roots:
380 if n in roots:
381 # If n was a root, check if it's a 'real' root.
381 # If n was a root, check if it's a 'real' root.
382 p = tuple(self.parents(n))
382 p = tuple(self.parents(n))
383 # If any of its parents are descendents, it's not a root.
383 # If any of its parents are descendents, it's not a root.
384 if (p[0] in descendents) or (p[1] in descendents):
384 if (p[0] in descendents) or (p[1] in descendents):
385 roots.pop(n)
385 roots.pop(n)
386 else:
386 else:
387 p = tuple(self.parents(n))
387 p = tuple(self.parents(n))
388 # A node is a descendent if either of its parents are
388 # A node is a descendent if either of its parents are
389 # descendents. (We seeded the dependents list with the roots
389 # descendents. (We seeded the dependents list with the roots
390 # up there, remember?)
390 # up there, remember?)
391 if (p[0] in descendents) or (p[1] in descendents):
391 if (p[0] in descendents) or (p[1] in descendents):
392 descendents[n] = 1
392 descendents[n] = 1
393 isdescendent = True
393 isdescendent = True
394 if isdescendent and ((ancestors is None) or (n in ancestors)):
394 if isdescendent and ((ancestors is None) or (n in ancestors)):
395 # Only include nodes that are both descendents and ancestors.
395 # Only include nodes that are both descendents and ancestors.
396 orderedout.append(n)
396 orderedout.append(n)
397 if (ancestors is not None) and (n in heads):
397 if (ancestors is not None) and (n in heads):
398 # We're trying to figure out which heads are reachable
398 # We're trying to figure out which heads are reachable
399 # from roots.
399 # from roots.
400 # Mark this head as having been reached
400 # Mark this head as having been reached
401 heads[n] = 1
401 heads[n] = 1
402 elif ancestors is None:
402 elif ancestors is None:
403 # Otherwise, we're trying to discover the heads.
403 # Otherwise, we're trying to discover the heads.
404 # Assume this is a head because if it isn't, the next step
404 # Assume this is a head because if it isn't, the next step
405 # will eventually remove it.
405 # will eventually remove it.
406 heads[n] = 1
406 heads[n] = 1
407 # But, obviously its parents aren't.
407 # But, obviously its parents aren't.
408 for p in self.parents(n):
408 for p in self.parents(n):
409 heads.pop(p, None)
409 heads.pop(p, None)
410 heads = [n for n in heads.iterkeys() if heads[n] != 0]
410 heads = [n for n in heads.iterkeys() if heads[n] != 0]
411 roots = roots.keys()
411 roots = roots.keys()
412 assert orderedout
412 assert orderedout
413 assert roots
413 assert roots
414 assert heads
414 assert heads
415 return (orderedout, roots, heads)
415 return (orderedout, roots, heads)
416
416
417 def heads(self, start=None):
417 def heads(self, start=None):
418 """return the list of all nodes that have no children
418 """return the list of all nodes that have no children
419
419
420 if start is specified, only heads that are descendants of
420 if start is specified, only heads that are descendants of
421 start will be returned
421 start will be returned
422
422
423 """
423 """
424 if start is None:
424 if start is None:
425 start = nullid
425 start = nullid
426 reachable = {start: 1}
426 reachable = {start: 1}
427 heads = {start: 1}
427 heads = {start: 1}
428 startrev = self.rev(start)
428 startrev = self.rev(start)
429
429
430 for r in xrange(startrev + 1, self.count()):
430 for r in xrange(startrev + 1, self.count()):
431 n = self.node(r)
431 n = self.node(r)
432 for pn in self.parents(n):
432 for pn in self.parents(n):
433 if pn in reachable:
433 if pn in reachable:
434 reachable[n] = 1
434 reachable[n] = 1
435 heads[n] = 1
435 heads[n] = 1
436 if pn in heads:
436 if pn in heads:
437 del heads[pn]
437 del heads[pn]
438 return heads.keys()
438 return heads.keys()
439
439
440 def children(self, node):
440 def children(self, node):
441 """find the children of a given node"""
441 """find the children of a given node"""
442 c = []
442 c = []
443 p = self.rev(node)
443 p = self.rev(node)
444 for r in range(p + 1, self.count()):
444 for r in range(p + 1, self.count()):
445 n = self.node(r)
445 n = self.node(r)
446 for pn in self.parents(n):
446 for pn in self.parents(n):
447 if pn == node:
447 if pn == node:
448 c.append(n)
448 c.append(n)
449 continue
449 continue
450 elif pn == nullid:
450 elif pn == nullid:
451 continue
451 continue
452 return c
452 return c
453
453
454 def lookup(self, id):
454 def lookup(self, id):
455 """locate a node based on revision number or subset of hex nodeid"""
455 """locate a node based on revision number or subset of hex nodeid"""
456 try:
456 try:
457 rev = int(id)
457 rev = int(id)
458 if str(rev) != id: raise ValueError
458 if str(rev) != id: raise ValueError
459 if rev < 0: rev = self.count() + rev
459 if rev < 0: rev = self.count() + rev
460 if rev < 0 or rev >= self.count(): raise ValueError
460 if rev < 0 or rev >= self.count(): raise ValueError
461 return self.node(rev)
461 return self.node(rev)
462 except (ValueError, OverflowError):
462 except (ValueError, OverflowError):
463 c = []
463 c = []
464 for n in self.nodemap:
464 for n in self.nodemap:
465 if hex(n).startswith(id):
465 if hex(n).startswith(id):
466 c.append(n)
466 c.append(n)
467 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
467 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
468 if len(c) < 1: raise RevlogError(_("No match found"))
468 if len(c) < 1: raise RevlogError(_("No match found"))
469 return c[0]
469 return c[0]
470
470
471 return None
471 return None
472
472
473 def diff(self, a, b):
473 def diff(self, a, b):
474 """return a delta between two revisions"""
474 """return a delta between two revisions"""
475 return mdiff.textdiff(a, b)
475 return mdiff.textdiff(a, b)
476
476
477 def patches(self, t, pl):
477 def patches(self, t, pl):
478 """apply a list of patches to a string"""
478 """apply a list of patches to a string"""
479 return mdiff.patches(t, pl)
479 return mdiff.patches(t, pl)
480
480
481 def chunk(self, rev):
481 def chunk(self, rev):
482 start, length = self.start(rev), self.length(rev)
482 start, length = self.start(rev), self.length(rev)
483 end = start + length
483 end = start + length
484
484
485 def loadcache():
485 def loadcache():
486 cache_length = max(4096 * 1024, length) # 4Mo
486 cache_length = max(4096 * 1024, length) # 4Mo
487 df = self.opener(self.datafile)
487 df = self.opener(self.datafile)
488 df.seek(start)
488 df.seek(start)
489 self.chunkcache = (start, df.read(cache_length))
489 self.chunkcache = (start, df.read(cache_length))
490
490
491 if not self.chunkcache:
491 if not self.chunkcache:
492 loadcache()
492 loadcache()
493
493
494 cache_start = self.chunkcache[0]
494 cache_start = self.chunkcache[0]
495 cache_end = cache_start + len(self.chunkcache[1])
495 cache_end = cache_start + len(self.chunkcache[1])
496 if start >= cache_start and end <= cache_end:
496 if start >= cache_start and end <= cache_end:
497 # it is cached
497 # it is cached
498 offset = start - cache_start
498 offset = start - cache_start
499 else:
499 else:
500 loadcache()
500 loadcache()
501 offset = 0
501 offset = 0
502
502
503 #def checkchunk():
503 #def checkchunk():
504 # df = self.opener(self.datafile)
504 # df = self.opener(self.datafile)
505 # df.seek(start)
505 # df.seek(start)
506 # return df.read(length)
506 # return df.read(length)
507 #assert s == checkchunk()
507 #assert s == checkchunk()
508 return decompress(self.chunkcache[1][offset:offset + length])
508 return decompress(self.chunkcache[1][offset:offset + length])
509
509
510 def delta(self, node):
510 def delta(self, node):
511 """return or calculate a delta between a node and its predecessor"""
511 """return or calculate a delta between a node and its predecessor"""
512 r = self.rev(node)
512 r = self.rev(node)
513 b = self.base(r)
513 b = self.base(r)
514 if r == b:
514 if r == b:
515 return self.diff(self.revision(self.node(r - 1)),
515 return self.diff(self.revision(self.node(r - 1)),
516 self.revision(node))
516 self.revision(node))
517 else:
517 else:
518 return self.chunk(r)
518 return self.chunk(r)
519
519
520 def revision(self, node):
520 def revision(self, node):
521 """return an uncompressed revision of a given"""
521 """return an uncompressed revision of a given"""
522 if node == nullid: return ""
522 if node == nullid: return ""
523 if self.cache and self.cache[0] == node: return self.cache[2]
523 if self.cache and self.cache[0] == node: return self.cache[2]
524
524
525 # look up what we need to read
525 # look up what we need to read
526 text = None
526 text = None
527 rev = self.rev(node)
527 rev = self.rev(node)
528 base = self.base(rev)
528 base = self.base(rev)
529
529
530 # do we have useful data cached?
530 # do we have useful data cached?
531 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
531 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
532 base = self.cache[1]
532 base = self.cache[1]
533 text = self.cache[2]
533 text = self.cache[2]
534 else:
534 else:
535 text = self.chunk(base)
535 text = self.chunk(base)
536
536
537 bins = []
537 bins = []
538 for r in xrange(base + 1, rev + 1):
538 for r in xrange(base + 1, rev + 1):
539 bins.append(self.chunk(r))
539 bins.append(self.chunk(r))
540
540
541 text = mdiff.patches(text, bins)
541 text = mdiff.patches(text, bins)
542
542
543 p1, p2 = self.parents(node)
543 p1, p2 = self.parents(node)
544 if node != hash(text, p1, p2):
544 if node != hash(text, p1, p2):
545 raise RevlogError(_("integrity check failed on %s:%d")
545 raise RevlogError(_("integrity check failed on %s:%d")
546 % (self.datafile, rev))
546 % (self.datafile, rev))
547
547
548 self.cache = (node, rev, text)
548 self.cache = (node, rev, text)
549 return text
549 return text
550
550
551 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
551 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
552 """add a revision to the log
552 """add a revision to the log
553
553
554 text - the revision data to add
554 text - the revision data to add
555 transaction - the transaction object used for rollback
555 transaction - the transaction object used for rollback
556 link - the linkrev data to add
556 link - the linkrev data to add
557 p1, p2 - the parent nodeids of the revision
557 p1, p2 - the parent nodeids of the revision
558 d - an optional precomputed delta
558 d - an optional precomputed delta
559 """
559 """
560 if text is None: text = ""
560 if text is None: text = ""
561 if p1 is None: p1 = self.tip()
561 if p1 is None: p1 = self.tip()
562 if p2 is None: p2 = nullid
562 if p2 is None: p2 = nullid
563
563
564 node = hash(text, p1, p2)
564 node = hash(text, p1, p2)
565
565
566 if node in self.nodemap:
566 if node in self.nodemap:
567 return node
567 return node
568
568
569 n = self.count()
569 n = self.count()
570 t = n - 1
570 t = n - 1
571
571
572 if n:
572 if n:
573 base = self.base(t)
573 base = self.base(t)
574 start = self.start(base)
574 start = self.start(base)
575 end = self.end(t)
575 end = self.end(t)
576 if not d:
576 if not d:
577 prev = self.revision(self.tip())
577 prev = self.revision(self.tip())
578 d = self.diff(prev, str(text))
578 d = self.diff(prev, str(text))
579 data = compress(d)
579 data = compress(d)
580 l = len(data[1]) + len(data[0])
580 l = len(data[1]) + len(data[0])
581 dist = end - start + l
581 dist = end - start + l
582
582
583 # full versions are inserted when the needed deltas
583 # full versions are inserted when the needed deltas
584 # become comparable to the uncompressed text
584 # become comparable to the uncompressed text
585 if not n or dist > len(text) * 2:
585 if not n or dist > len(text) * 2:
586 data = compress(text)
586 data = compress(text)
587 l = len(data[1]) + len(data[0])
587 l = len(data[1]) + len(data[0])
588 base = n
588 base = n
589 else:
589 else:
590 base = self.base(t)
590 base = self.base(t)
591
591
592 offset = 0
592 offset = 0
593 if t >= 0:
593 if t >= 0:
594 offset = self.end(t)
594 offset = self.end(t)
595
595
596 e = (offset, l, base, link, p1, p2, node)
596 e = (offset, l, base, link, p1, p2, node)
597
597
598 self.index.append(e)
598 self.index.append(e)
599 self.nodemap[node] = n
599 self.nodemap[node] = n
600 entry = struct.pack(indexformat, *e)
600 entry = struct.pack(indexformat, *e)
601
601
602 transaction.add(self.datafile, e[0])
602 transaction.add(self.datafile, e[0])
603 f = self.opener(self.datafile, "a")
603 f = self.opener(self.datafile, "a")
604 if data[0]:
604 if data[0]:
605 f.write(data[0])
605 f.write(data[0])
606 f.write(data[1])
606 f.write(data[1])
607 transaction.add(self.indexfile, n * len(entry))
607 transaction.add(self.indexfile, n * len(entry))
608 self.opener(self.indexfile, "a").write(entry)
608 self.opener(self.indexfile, "a").write(entry)
609
609
610 self.cache = (node, n, text)
610 self.cache = (node, n, text)
611 return node
611 return node
612
612
613 def ancestor(self, a, b):
613 def ancestor(self, a, b):
614 """calculate the least common ancestor of nodes a and b"""
614 """calculate the least common ancestor of nodes a and b"""
615 # calculate the distance of every node from root
615 # calculate the distance of every node from root
616 dist = {nullid: 0}
616 dist = {nullid: 0}
617 for i in xrange(self.count()):
617 for i in xrange(self.count()):
618 n = self.node(i)
618 n = self.node(i)
619 p1, p2 = self.parents(n)
619 p1, p2 = self.parents(n)
620 dist[n] = max(dist[p1], dist[p2]) + 1
620 dist[n] = max(dist[p1], dist[p2]) + 1
621
621
622 # traverse ancestors in order of decreasing distance from root
622 # traverse ancestors in order of decreasing distance from root
623 def ancestors(node):
623 def ancestors(node):
624 # we store negative distances because heap returns smallest member
624 # we store negative distances because heap returns smallest member
625 h = [(-dist[node], node)]
625 h = [(-dist[node], node)]
626 seen = {}
626 seen = {}
627 earliest = self.count()
628 while h:
627 while h:
629 d, n = heapq.heappop(h)
628 d, n = heapq.heappop(h)
630 if n not in seen:
629 if n not in seen:
631 seen[n] = 1
630 seen[n] = 1
632 r = self.rev(n)
633 yield (-d, n)
631 yield (-d, n)
634 for p in self.parents(n):
632 for p in self.parents(n):
635 heapq.heappush(h, (-dist[p], p))
633 heapq.heappush(h, (-dist[p], p))
636
634
637 def generations(node):
635 def generations(node):
638 sg, s = None, {}
636 sg, s = None, {}
639 for g,n in ancestors(node):
637 for g,n in ancestors(node):
640 if g != sg:
638 if g != sg:
641 if sg:
639 if sg:
642 yield sg, s
640 yield sg, s
643 sg, s = g, {n:1}
641 sg, s = g, {n:1}
644 else:
642 else:
645 s[n] = 1
643 s[n] = 1
646 yield sg, s
644 yield sg, s
647
645
648 x = generations(a)
646 x = generations(a)
649 y = generations(b)
647 y = generations(b)
650 gx = x.next()
648 gx = x.next()
651 gy = y.next()
649 gy = y.next()
652
650
653 # increment each ancestor list until it is closer to root than
651 # increment each ancestor list until it is closer to root than
654 # the other, or they match
652 # the other, or they match
655 while 1:
653 while 1:
656 #print "ancestor gen %s %s" % (gx[0], gy[0])
654 #print "ancestor gen %s %s" % (gx[0], gy[0])
657 if gx[0] == gy[0]:
655 if gx[0] == gy[0]:
658 # find the intersection
656 # find the intersection
659 i = [ n for n in gx[1] if n in gy[1] ]
657 i = [ n for n in gx[1] if n in gy[1] ]
660 if i:
658 if i:
661 return i[0]
659 return i[0]
662 else:
660 else:
663 #print "next"
661 #print "next"
664 gy = y.next()
662 gy = y.next()
665 gx = x.next()
663 gx = x.next()
666 elif gx[0] < gy[0]:
664 elif gx[0] < gy[0]:
667 #print "next y"
665 #print "next y"
668 gy = y.next()
666 gy = y.next()
669 else:
667 else:
670 #print "next x"
668 #print "next x"
671 gx = x.next()
669 gx = x.next()
672
670
673 def group(self, nodelist, lookup, infocollect=None):
671 def group(self, nodelist, lookup, infocollect=None):
674 """calculate a delta group
672 """calculate a delta group
675
673
676 Given a list of changeset revs, return a set of deltas and
674 Given a list of changeset revs, return a set of deltas and
677 metadata corresponding to nodes. the first delta is
675 metadata corresponding to nodes. the first delta is
678 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
676 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
679 have this parent as it has all history before these
677 have this parent as it has all history before these
680 changesets. parent is parent[0]
678 changesets. parent is parent[0]
681 """
679 """
682 revs = [self.rev(n) for n in nodelist]
680 revs = [self.rev(n) for n in nodelist]
683
681
684 # if we don't have any revisions touched by these changesets, bail
682 # if we don't have any revisions touched by these changesets, bail
685 if not revs:
683 if not revs:
686 yield struct.pack(">l", 0)
684 yield struct.pack(">l", 0)
687 return
685 return
688
686
689 # add the parent of the first rev
687 # add the parent of the first rev
690 p = self.parents(self.node(revs[0]))[0]
688 p = self.parents(self.node(revs[0]))[0]
691 revs.insert(0, self.rev(p))
689 revs.insert(0, self.rev(p))
692
690
693 # helper to reconstruct intermediate versions
694 def construct(text, base, rev):
695 bins = [self.chunk(r) for r in xrange(base + 1, rev + 1)]
696 return mdiff.patches(text, bins)
697
698 # build deltas
691 # build deltas
699 for d in xrange(0, len(revs) - 1):
692 for d in xrange(0, len(revs) - 1):
700 a, b = revs[d], revs[d + 1]
693 a, b = revs[d], revs[d + 1]
701 na = self.node(a)
694 na = self.node(a)
702 nb = self.node(b)
695 nb = self.node(b)
703
696
704 if infocollect is not None:
697 if infocollect is not None:
705 infocollect(nb)
698 infocollect(nb)
706
699
707 # do we need to construct a new delta?
700 # do we need to construct a new delta?
708 if a + 1 != b or self.base(b) == b:
701 if a + 1 != b or self.base(b) == b:
709 ta = self.revision(na)
702 ta = self.revision(na)
710 tb = self.revision(nb)
703 tb = self.revision(nb)
711 d = self.diff(ta, tb)
704 d = self.diff(ta, tb)
712 else:
705 else:
713 d = self.chunk(b)
706 d = self.chunk(b)
714
707
715 p = self.parents(nb)
708 p = self.parents(nb)
716 meta = nb + p[0] + p[1] + lookup(nb)
709 meta = nb + p[0] + p[1] + lookup(nb)
717 l = struct.pack(">l", len(meta) + len(d) + 4)
710 l = struct.pack(">l", len(meta) + len(d) + 4)
718 yield l
711 yield l
719 yield meta
712 yield meta
720 yield d
713 yield d
721
714
722 yield struct.pack(">l", 0)
715 yield struct.pack(">l", 0)
723
716
724 def addgroup(self, revs, linkmapper, transaction, unique=0):
717 def addgroup(self, revs, linkmapper, transaction, unique=0):
725 """
718 """
726 add a delta group
719 add a delta group
727
720
728 given a set of deltas, add them to the revision log. the
721 given a set of deltas, add them to the revision log. the
729 first delta is against its parent, which should be in our
722 first delta is against its parent, which should be in our
730 log, the rest are against the previous delta.
723 log, the rest are against the previous delta.
731 """
724 """
732
725
733 #track the base of the current delta log
726 #track the base of the current delta log
734 r = self.count()
727 r = self.count()
735 t = r - 1
728 t = r - 1
736 node = nullid
729 node = nullid
737
730
738 base = prev = -1
731 base = prev = -1
739 start = end = measure = 0
732 start = end = measure = 0
740 if r:
733 if r:
741 start = self.start(self.base(t))
734 base = self.base(t)
735 start = self.start(base)
742 end = self.end(t)
736 end = self.end(t)
743 measure = self.length(self.base(t))
737 measure = self.length(base)
744 base = self.base(t)
745 prev = self.tip()
738 prev = self.tip()
746
739
747 transaction.add(self.datafile, end)
740 transaction.add(self.datafile, end)
748 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
741 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
749 dfh = self.opener(self.datafile, "a")
742 dfh = self.opener(self.datafile, "a")
750 ifh = self.opener(self.indexfile, "a")
743 ifh = self.opener(self.indexfile, "a")
751
744
752 # loop through our set of deltas
745 # loop through our set of deltas
753 chain = None
746 chain = None
754 for chunk in revs:
747 for chunk in revs:
755 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
748 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
756 link = linkmapper(cs)
749 link = linkmapper(cs)
757 if node in self.nodemap:
750 if node in self.nodemap:
758 # this can happen if two branches make the same change
751 # this can happen if two branches make the same change
759 # if unique:
752 # if unique:
760 # raise RevlogError(_("already have %s") % hex(node[:4]))
753 # raise RevlogError(_("already have %s") % hex(node[:4]))
761 chain = node
754 chain = node
762 continue
755 continue
763 delta = chunk[80:]
756 delta = chunk[80:]
764
757
765 for p in (p1, p2):
758 for p in (p1, p2):
766 if not p in self.nodemap:
759 if not p in self.nodemap:
767 raise RevlogError(_("unknown parent %s") % short(p1))
760 raise RevlogError(_("unknown parent %s") % short(p1))
768
761
769 if not chain:
762 if not chain:
770 # retrieve the parent revision of the delta chain
763 # retrieve the parent revision of the delta chain
771 chain = p1
764 chain = p1
772 if not chain in self.nodemap:
765 if not chain in self.nodemap:
773 raise RevlogError(_("unknown base %s") % short(chain[:4]))
766 raise RevlogError(_("unknown base %s") % short(chain[:4]))
774
767
775 # full versions are inserted when the needed deltas become
768 # full versions are inserted when the needed deltas become
776 # comparable to the uncompressed text or when the previous
769 # comparable to the uncompressed text or when the previous
777 # version is not the one we have a delta against. We use
770 # version is not the one we have a delta against. We use
778 # the size of the previous full rev as a proxy for the
771 # the size of the previous full rev as a proxy for the
779 # current size.
772 # current size.
780
773
781 if chain == prev:
774 if chain == prev:
782 tempd = compress(delta)
775 tempd = compress(delta)
783 cdelta = tempd[0] + tempd[1]
776 cdelta = tempd[0] + tempd[1]
784
777
785 if chain != prev or (end - start + len(cdelta)) > measure * 2:
778 if chain != prev or (end - start + len(cdelta)) > measure * 2:
786 # flush our writes here so we can read it in revision
779 # flush our writes here so we can read it in revision
787 dfh.flush()
780 dfh.flush()
788 ifh.flush()
781 ifh.flush()
789 text = self.revision(chain)
782 text = self.revision(chain)
790 text = self.patches(text, [delta])
783 text = self.patches(text, [delta])
791 chk = self.addrevision(text, transaction, link, p1, p2)
784 chk = self.addrevision(text, transaction, link, p1, p2)
792 if chk != node:
785 if chk != node:
793 raise RevlogError(_("consistency error adding group"))
786 raise RevlogError(_("consistency error adding group"))
794 measure = len(text)
787 measure = len(text)
795 else:
788 else:
796 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
789 e = (end, len(cdelta), base, link, p1, p2, node)
797 self.index.append(e)
790 self.index.append(e)
798 self.nodemap[node] = r
791 self.nodemap[node] = r
799 dfh.write(cdelta)
792 dfh.write(cdelta)
800 ifh.write(struct.pack(indexformat, *e))
793 ifh.write(struct.pack(indexformat, *e))
801
794
802 t, r, chain, prev = r, r + 1, node, node
795 t, r, chain, prev = r, r + 1, node, node
803 start = self.start(self.base(t))
796 base = self.base(t)
797 start = self.start(base)
804 end = self.end(t)
798 end = self.end(t)
805
799
806 dfh.close()
800 dfh.close()
807 ifh.close()
801 ifh.close()
808 return node
802 return node
809
803
810 def strip(self, rev, minlink):
804 def strip(self, rev, minlink):
811 if self.count() == 0 or rev >= self.count():
805 if self.count() == 0 or rev >= self.count():
812 return
806 return
813
807
814 # When stripping away a revision, we need to make sure it
808 # When stripping away a revision, we need to make sure it
815 # does not actually belong to an older changeset.
809 # does not actually belong to an older changeset.
816 # The minlink parameter defines the oldest revision
810 # The minlink parameter defines the oldest revision
817 # we're allowed to strip away.
811 # we're allowed to strip away.
818 while minlink > self.index[rev][3]:
812 while minlink > self.index[rev][3]:
819 rev += 1
813 rev += 1
820 if rev >= self.count():
814 if rev >= self.count():
821 return
815 return
822
816
823 # first truncate the files on disk
817 # first truncate the files on disk
824 end = self.start(rev)
818 end = self.start(rev)
825 self.opener(self.datafile, "a").truncate(end)
819 self.opener(self.datafile, "a").truncate(end)
826 end = rev * struct.calcsize(indexformat)
820 end = rev * struct.calcsize(indexformat)
827 self.opener(self.indexfile, "a").truncate(end)
821 self.opener(self.indexfile, "a").truncate(end)
828
822
829 # then reset internal state in memory to forget those revisions
823 # then reset internal state in memory to forget those revisions
830 self.cache = None
824 self.cache = None
831 self.chunkcache = None
825 self.chunkcache = None
832 for p in self.index[rev:]:
826 for p in self.index[rev:]:
833 del self.nodemap[p[6]]
827 del self.nodemap[p[6]]
834 del self.index[rev:]
828 del self.index[rev:]
835
829
836 # truncating the lazyindex also truncates the lazymap.
830 # truncating the lazyindex also truncates the lazymap.
837 if isinstance(self.index, lazyindex):
831 if isinstance(self.index, lazyindex):
838 self.index.trunc(end)
832 self.index.trunc(end)
839
833
840
834
841 def checksize(self):
835 def checksize(self):
842 expected = 0
836 expected = 0
843 if self.count():
837 if self.count():
844 expected = self.end(self.count() - 1)
838 expected = self.end(self.count() - 1)
845
839
846 try:
840 try:
847 f = self.opener(self.datafile)
841 f = self.opener(self.datafile)
848 f.seek(0, 2)
842 f.seek(0, 2)
849 actual = f.tell()
843 actual = f.tell()
850 dd = actual - expected
844 dd = actual - expected
851 except IOError, inst:
845 except IOError, inst:
852 if inst.errno != errno.ENOENT:
846 if inst.errno != errno.ENOENT:
853 raise
847 raise
854 dd = 0
848 dd = 0
855
849
856 try:
850 try:
857 f = self.opener(self.indexfile)
851 f = self.opener(self.indexfile)
858 f.seek(0, 2)
852 f.seek(0, 2)
859 actual = f.tell()
853 actual = f.tell()
860 s = struct.calcsize(indexformat)
854 s = struct.calcsize(indexformat)
861 i = actual / s
855 i = actual / s
862 di = actual - (i * s)
856 di = actual - (i * s)
863 except IOError, inst:
857 except IOError, inst:
864 if inst.errno != errno.ENOENT:
858 if inst.errno != errno.ENOENT:
865 raise
859 raise
866 di = 0
860 di = 0
867
861
868 return (dd, di)
862 return (dd, di)
869
863
870
864
General Comments 0
You need to be logged in to leave comments. Login now