##// END OF EJS Templates
Validate paths before reading or writing files in repository or working dir....
Thomas Arendsen Hein -
r1835:bdfb524d default
parent child Browse files
Show More
@@ -1,2952 +1,2953 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20
20
21 def filterfiles(filters, files):
21 def filterfiles(filters, files):
22 l = [x for x in files if x in filters]
22 l = [x for x in files if x in filters]
23
23
24 for t in filters:
24 for t in filters:
25 if t and t[-1] != "/":
25 if t and t[-1] != "/":
26 t += "/"
26 t += "/"
27 l += [x for x in files if x.startswith(t)]
27 l += [x for x in files if x.startswith(t)]
28 return l
28 return l
29
29
30 def relpath(repo, args):
30 def relpath(repo, args):
31 cwd = repo.getcwd()
31 cwd = repo.getcwd()
32 if cwd:
32 if cwd:
33 return [util.normpath(os.path.join(cwd, x)) for x in args]
33 return [util.normpath(os.path.join(cwd, x)) for x in args]
34 return args
34 return args
35
35
36 def matchpats(repo, pats=[], opts={}, head=''):
36 def matchpats(repo, pats=[], opts={}, head=''):
37 cwd = repo.getcwd()
37 cwd = repo.getcwd()
38 if not pats and cwd:
38 if not pats and cwd:
39 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
39 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
40 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
40 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
41 cwd = ''
41 cwd = ''
42 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
42 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
43 opts.get('exclude'), head)
43 opts.get('exclude'), head)
44
44
45 def makewalk(repo, pats, opts, node=None, head=''):
45 def makewalk(repo, pats, opts, node=None, head=''):
46 files, matchfn, anypats = matchpats(repo, pats, opts, head)
46 files, matchfn, anypats = matchpats(repo, pats, opts, head)
47 exact = dict(zip(files, files))
47 exact = dict(zip(files, files))
48 def walk():
48 def walk():
49 for src, fn in repo.walk(node=node, files=files, match=matchfn):
49 for src, fn in repo.walk(node=node, files=files, match=matchfn):
50 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
50 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
51 return files, matchfn, walk()
51 return files, matchfn, walk()
52
52
53 def walk(repo, pats, opts, node=None, head=''):
53 def walk(repo, pats, opts, node=None, head=''):
54 files, matchfn, results = makewalk(repo, pats, opts, node, head)
54 files, matchfn, results = makewalk(repo, pats, opts, node, head)
55 for r in results:
55 for r in results:
56 yield r
56 yield r
57
57
58 def walkchangerevs(ui, repo, pats, opts):
58 def walkchangerevs(ui, repo, pats, opts):
59 '''Iterate over files and the revs they changed in.
59 '''Iterate over files and the revs they changed in.
60
60
61 Callers most commonly need to iterate backwards over the history
61 Callers most commonly need to iterate backwards over the history
62 it is interested in. Doing so has awful (quadratic-looking)
62 it is interested in. Doing so has awful (quadratic-looking)
63 performance, so we use iterators in a "windowed" way.
63 performance, so we use iterators in a "windowed" way.
64
64
65 We walk a window of revisions in the desired order. Within the
65 We walk a window of revisions in the desired order. Within the
66 window, we first walk forwards to gather data, then in the desired
66 window, we first walk forwards to gather data, then in the desired
67 order (usually backwards) to display it.
67 order (usually backwards) to display it.
68
68
69 This function returns an (iterator, getchange, matchfn) tuple. The
69 This function returns an (iterator, getchange, matchfn) tuple. The
70 getchange function returns the changelog entry for a numeric
70 getchange function returns the changelog entry for a numeric
71 revision. The iterator yields 3-tuples. They will be of one of
71 revision. The iterator yields 3-tuples. They will be of one of
72 the following forms:
72 the following forms:
73
73
74 "window", incrementing, lastrev: stepping through a window,
74 "window", incrementing, lastrev: stepping through a window,
75 positive if walking forwards through revs, last rev in the
75 positive if walking forwards through revs, last rev in the
76 sequence iterated over - use to reset state for the current window
76 sequence iterated over - use to reset state for the current window
77
77
78 "add", rev, fns: out-of-order traversal of the given file names
78 "add", rev, fns: out-of-order traversal of the given file names
79 fns, which changed during revision rev - use to gather data for
79 fns, which changed during revision rev - use to gather data for
80 possible display
80 possible display
81
81
82 "iter", rev, None: in-order traversal of the revs earlier iterated
82 "iter", rev, None: in-order traversal of the revs earlier iterated
83 over with "add" - use to display data'''
83 over with "add" - use to display data'''
84
84
85 def increasing_windows(start, end, windowsize=8, sizelimit=512):
85 def increasing_windows(start, end, windowsize=8, sizelimit=512):
86 if start < end:
86 if start < end:
87 while start < end:
87 while start < end:
88 yield start, min(windowsize, end-start)
88 yield start, min(windowsize, end-start)
89 start += windowsize
89 start += windowsize
90 if windowsize < sizelimit:
90 if windowsize < sizelimit:
91 windowsize *= 2
91 windowsize *= 2
92 else:
92 else:
93 while start > end:
93 while start > end:
94 yield start, min(windowsize, start-end-1)
94 yield start, min(windowsize, start-end-1)
95 start -= windowsize
95 start -= windowsize
96 if windowsize < sizelimit:
96 if windowsize < sizelimit:
97 windowsize *= 2
97 windowsize *= 2
98
98
99
99
100 files, matchfn, anypats = matchpats(repo, pats, opts)
100 files, matchfn, anypats = matchpats(repo, pats, opts)
101
101
102 if repo.changelog.count() == 0:
102 if repo.changelog.count() == 0:
103 return [], False, matchfn
103 return [], False, matchfn
104
104
105 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
105 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
106 wanted = {}
106 wanted = {}
107 slowpath = anypats
107 slowpath = anypats
108 fncache = {}
108 fncache = {}
109
109
110 chcache = {}
110 chcache = {}
111 def getchange(rev):
111 def getchange(rev):
112 ch = chcache.get(rev)
112 ch = chcache.get(rev)
113 if ch is None:
113 if ch is None:
114 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
114 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
115 return ch
115 return ch
116
116
117 if not slowpath and not files:
117 if not slowpath and not files:
118 # No files, no patterns. Display all revs.
118 # No files, no patterns. Display all revs.
119 wanted = dict(zip(revs, revs))
119 wanted = dict(zip(revs, revs))
120 if not slowpath:
120 if not slowpath:
121 # Only files, no patterns. Check the history of each file.
121 # Only files, no patterns. Check the history of each file.
122 def filerevgen(filelog):
122 def filerevgen(filelog):
123 for i, window in increasing_windows(filelog.count()-1, -1):
123 for i, window in increasing_windows(filelog.count()-1, -1):
124 revs = []
124 revs = []
125 for j in xrange(i - window, i + 1):
125 for j in xrange(i - window, i + 1):
126 revs.append(filelog.linkrev(filelog.node(j)))
126 revs.append(filelog.linkrev(filelog.node(j)))
127 revs.reverse()
127 revs.reverse()
128 for rev in revs:
128 for rev in revs:
129 yield rev
129 yield rev
130
130
131 minrev, maxrev = min(revs), max(revs)
131 minrev, maxrev = min(revs), max(revs)
132 for file_ in files:
132 for file_ in files:
133 filelog = repo.file(file_)
133 filelog = repo.file(file_)
134 # A zero count may be a directory or deleted file, so
134 # A zero count may be a directory or deleted file, so
135 # try to find matching entries on the slow path.
135 # try to find matching entries on the slow path.
136 if filelog.count() == 0:
136 if filelog.count() == 0:
137 slowpath = True
137 slowpath = True
138 break
138 break
139 for rev in filerevgen(filelog):
139 for rev in filerevgen(filelog):
140 if rev <= maxrev:
140 if rev <= maxrev:
141 if rev < minrev:
141 if rev < minrev:
142 break
142 break
143 fncache.setdefault(rev, [])
143 fncache.setdefault(rev, [])
144 fncache[rev].append(file_)
144 fncache[rev].append(file_)
145 wanted[rev] = 1
145 wanted[rev] = 1
146 if slowpath:
146 if slowpath:
147 # The slow path checks files modified in every changeset.
147 # The slow path checks files modified in every changeset.
148 def changerevgen():
148 def changerevgen():
149 for i, window in increasing_windows(repo.changelog.count()-1, -1):
149 for i, window in increasing_windows(repo.changelog.count()-1, -1):
150 for j in xrange(i - window, i + 1):
150 for j in xrange(i - window, i + 1):
151 yield j, getchange(j)[3]
151 yield j, getchange(j)[3]
152
152
153 for rev, changefiles in changerevgen():
153 for rev, changefiles in changerevgen():
154 matches = filter(matchfn, changefiles)
154 matches = filter(matchfn, changefiles)
155 if matches:
155 if matches:
156 fncache[rev] = matches
156 fncache[rev] = matches
157 wanted[rev] = 1
157 wanted[rev] = 1
158
158
159 def iterate():
159 def iterate():
160 for i, window in increasing_windows(0, len(revs)):
160 for i, window in increasing_windows(0, len(revs)):
161 yield 'window', revs[0] < revs[-1], revs[-1]
161 yield 'window', revs[0] < revs[-1], revs[-1]
162 nrevs = [rev for rev in revs[i:i+window]
162 nrevs = [rev for rev in revs[i:i+window]
163 if rev in wanted]
163 if rev in wanted]
164 srevs = list(nrevs)
164 srevs = list(nrevs)
165 srevs.sort()
165 srevs.sort()
166 for rev in srevs:
166 for rev in srevs:
167 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
167 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
168 yield 'add', rev, fns
168 yield 'add', rev, fns
169 for rev in nrevs:
169 for rev in nrevs:
170 yield 'iter', rev, None
170 yield 'iter', rev, None
171 return iterate(), getchange, matchfn
171 return iterate(), getchange, matchfn
172
172
173 revrangesep = ':'
173 revrangesep = ':'
174
174
175 def revrange(ui, repo, revs, revlog=None):
175 def revrange(ui, repo, revs, revlog=None):
176 """Yield revision as strings from a list of revision specifications."""
176 """Yield revision as strings from a list of revision specifications."""
177 if revlog is None:
177 if revlog is None:
178 revlog = repo.changelog
178 revlog = repo.changelog
179 revcount = revlog.count()
179 revcount = revlog.count()
180 def fix(val, defval):
180 def fix(val, defval):
181 if not val:
181 if not val:
182 return defval
182 return defval
183 try:
183 try:
184 num = int(val)
184 num = int(val)
185 if str(num) != val:
185 if str(num) != val:
186 raise ValueError
186 raise ValueError
187 if num < 0:
187 if num < 0:
188 num += revcount
188 num += revcount
189 if num < 0:
189 if num < 0:
190 num = 0
190 num = 0
191 elif num >= revcount:
191 elif num >= revcount:
192 raise ValueError
192 raise ValueError
193 except ValueError:
193 except ValueError:
194 try:
194 try:
195 num = repo.changelog.rev(repo.lookup(val))
195 num = repo.changelog.rev(repo.lookup(val))
196 except KeyError:
196 except KeyError:
197 try:
197 try:
198 num = revlog.rev(revlog.lookup(val))
198 num = revlog.rev(revlog.lookup(val))
199 except KeyError:
199 except KeyError:
200 raise util.Abort(_('invalid revision identifier %s'), val)
200 raise util.Abort(_('invalid revision identifier %s'), val)
201 return num
201 return num
202 seen = {}
202 seen = {}
203 for spec in revs:
203 for spec in revs:
204 if spec.find(revrangesep) >= 0:
204 if spec.find(revrangesep) >= 0:
205 start, end = spec.split(revrangesep, 1)
205 start, end = spec.split(revrangesep, 1)
206 start = fix(start, 0)
206 start = fix(start, 0)
207 end = fix(end, revcount - 1)
207 end = fix(end, revcount - 1)
208 step = start > end and -1 or 1
208 step = start > end and -1 or 1
209 for rev in xrange(start, end+step, step):
209 for rev in xrange(start, end+step, step):
210 if rev in seen:
210 if rev in seen:
211 continue
211 continue
212 seen[rev] = 1
212 seen[rev] = 1
213 yield str(rev)
213 yield str(rev)
214 else:
214 else:
215 rev = fix(spec, None)
215 rev = fix(spec, None)
216 if rev in seen:
216 if rev in seen:
217 continue
217 continue
218 seen[rev] = 1
218 seen[rev] = 1
219 yield str(rev)
219 yield str(rev)
220
220
221 def make_filename(repo, r, pat, node=None,
221 def make_filename(repo, r, pat, node=None,
222 total=None, seqno=None, revwidth=None, pathname=None):
222 total=None, seqno=None, revwidth=None, pathname=None):
223 node_expander = {
223 node_expander = {
224 'H': lambda: hex(node),
224 'H': lambda: hex(node),
225 'R': lambda: str(r.rev(node)),
225 'R': lambda: str(r.rev(node)),
226 'h': lambda: short(node),
226 'h': lambda: short(node),
227 }
227 }
228 expander = {
228 expander = {
229 '%': lambda: '%',
229 '%': lambda: '%',
230 'b': lambda: os.path.basename(repo.root),
230 'b': lambda: os.path.basename(repo.root),
231 }
231 }
232
232
233 try:
233 try:
234 if node:
234 if node:
235 expander.update(node_expander)
235 expander.update(node_expander)
236 if node and revwidth is not None:
236 if node and revwidth is not None:
237 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
237 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
238 if total is not None:
238 if total is not None:
239 expander['N'] = lambda: str(total)
239 expander['N'] = lambda: str(total)
240 if seqno is not None:
240 if seqno is not None:
241 expander['n'] = lambda: str(seqno)
241 expander['n'] = lambda: str(seqno)
242 if total is not None and seqno is not None:
242 if total is not None and seqno is not None:
243 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
243 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
244 if pathname is not None:
244 if pathname is not None:
245 expander['s'] = lambda: os.path.basename(pathname)
245 expander['s'] = lambda: os.path.basename(pathname)
246 expander['d'] = lambda: os.path.dirname(pathname) or '.'
246 expander['d'] = lambda: os.path.dirname(pathname) or '.'
247 expander['p'] = lambda: pathname
247 expander['p'] = lambda: pathname
248
248
249 newname = []
249 newname = []
250 patlen = len(pat)
250 patlen = len(pat)
251 i = 0
251 i = 0
252 while i < patlen:
252 while i < patlen:
253 c = pat[i]
253 c = pat[i]
254 if c == '%':
254 if c == '%':
255 i += 1
255 i += 1
256 c = pat[i]
256 c = pat[i]
257 c = expander[c]()
257 c = expander[c]()
258 newname.append(c)
258 newname.append(c)
259 i += 1
259 i += 1
260 return ''.join(newname)
260 return ''.join(newname)
261 except KeyError, inst:
261 except KeyError, inst:
262 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
262 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
263 inst.args[0])
263 inst.args[0])
264
264
265 def make_file(repo, r, pat, node=None,
265 def make_file(repo, r, pat, node=None,
266 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
266 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
267 if not pat or pat == '-':
267 if not pat or pat == '-':
268 return 'w' in mode and sys.stdout or sys.stdin
268 return 'w' in mode and sys.stdout or sys.stdin
269 if hasattr(pat, 'write') and 'w' in mode:
269 if hasattr(pat, 'write') and 'w' in mode:
270 return pat
270 return pat
271 if hasattr(pat, 'read') and 'r' in mode:
271 if hasattr(pat, 'read') and 'r' in mode:
272 return pat
272 return pat
273 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
273 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
274 pathname),
274 pathname),
275 mode)
275 mode)
276
276
277 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
277 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
278 changes=None, text=False, opts={}):
278 changes=None, text=False, opts={}):
279 if not node1:
279 if not node1:
280 node1 = repo.dirstate.parents()[0]
280 node1 = repo.dirstate.parents()[0]
281 # reading the data for node1 early allows it to play nicely
281 # reading the data for node1 early allows it to play nicely
282 # with repo.changes and the revlog cache.
282 # with repo.changes and the revlog cache.
283 change = repo.changelog.read(node1)
283 change = repo.changelog.read(node1)
284 mmap = repo.manifest.read(change[0])
284 mmap = repo.manifest.read(change[0])
285 date1 = util.datestr(change[2])
285 date1 = util.datestr(change[2])
286
286
287 if not changes:
287 if not changes:
288 changes = repo.changes(node1, node2, files, match=match)
288 changes = repo.changes(node1, node2, files, match=match)
289 modified, added, removed, deleted, unknown = changes
289 modified, added, removed, deleted, unknown = changes
290 if files:
290 if files:
291 modified, added, removed = map(lambda x: filterfiles(files, x),
291 modified, added, removed = map(lambda x: filterfiles(files, x),
292 (modified, added, removed))
292 (modified, added, removed))
293
293
294 if not modified and not added and not removed:
294 if not modified and not added and not removed:
295 return
295 return
296
296
297 if node2:
297 if node2:
298 change = repo.changelog.read(node2)
298 change = repo.changelog.read(node2)
299 mmap2 = repo.manifest.read(change[0])
299 mmap2 = repo.manifest.read(change[0])
300 date2 = util.datestr(change[2])
300 date2 = util.datestr(change[2])
301 def read(f):
301 def read(f):
302 return repo.file(f).read(mmap2[f])
302 return repo.file(f).read(mmap2[f])
303 else:
303 else:
304 date2 = util.datestr()
304 date2 = util.datestr()
305 def read(f):
305 def read(f):
306 return repo.wread(f)
306 return repo.wread(f)
307
307
308 if ui.quiet:
308 if ui.quiet:
309 r = None
309 r = None
310 else:
310 else:
311 hexfunc = ui.verbose and hex or short
311 hexfunc = ui.verbose and hex or short
312 r = [hexfunc(node) for node in [node1, node2] if node]
312 r = [hexfunc(node) for node in [node1, node2] if node]
313
313
314 diffopts = ui.diffopts()
314 diffopts = ui.diffopts()
315 showfunc = opts.get('show_function') or diffopts['showfunc']
315 showfunc = opts.get('show_function') or diffopts['showfunc']
316 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
316 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
317 for f in modified:
317 for f in modified:
318 to = None
318 to = None
319 if f in mmap:
319 if f in mmap:
320 to = repo.file(f).read(mmap[f])
320 to = repo.file(f).read(mmap[f])
321 tn = read(f)
321 tn = read(f)
322 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
322 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
323 showfunc=showfunc, ignorews=ignorews))
323 showfunc=showfunc, ignorews=ignorews))
324 for f in added:
324 for f in added:
325 to = None
325 to = None
326 tn = read(f)
326 tn = read(f)
327 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
327 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
328 showfunc=showfunc, ignorews=ignorews))
328 showfunc=showfunc, ignorews=ignorews))
329 for f in removed:
329 for f in removed:
330 to = repo.file(f).read(mmap[f])
330 to = repo.file(f).read(mmap[f])
331 tn = None
331 tn = None
332 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
332 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
333 showfunc=showfunc, ignorews=ignorews))
333 showfunc=showfunc, ignorews=ignorews))
334
334
335 def trimuser(ui, name, rev, revcache):
335 def trimuser(ui, name, rev, revcache):
336 """trim the name of the user who committed a change"""
336 """trim the name of the user who committed a change"""
337 user = revcache.get(rev)
337 user = revcache.get(rev)
338 if user is None:
338 if user is None:
339 user = revcache[rev] = ui.shortuser(name)
339 user = revcache[rev] = ui.shortuser(name)
340 return user
340 return user
341
341
342 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
342 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
343 """show a single changeset or file revision"""
343 """show a single changeset or file revision"""
344 log = repo.changelog
344 log = repo.changelog
345 if changenode is None:
345 if changenode is None:
346 changenode = log.node(rev)
346 changenode = log.node(rev)
347 elif not rev:
347 elif not rev:
348 rev = log.rev(changenode)
348 rev = log.rev(changenode)
349
349
350 if ui.quiet:
350 if ui.quiet:
351 ui.write("%d:%s\n" % (rev, short(changenode)))
351 ui.write("%d:%s\n" % (rev, short(changenode)))
352 return
352 return
353
353
354 changes = log.read(changenode)
354 changes = log.read(changenode)
355 date = util.datestr(changes[2])
355 date = util.datestr(changes[2])
356
356
357 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
357 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
358 for p in log.parents(changenode)
358 for p in log.parents(changenode)
359 if ui.debugflag or p != nullid]
359 if ui.debugflag or p != nullid]
360 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
360 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
361 parents = []
361 parents = []
362
362
363 if ui.verbose:
363 if ui.verbose:
364 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
364 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
365 else:
365 else:
366 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
366 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
367
367
368 for tag in repo.nodetags(changenode):
368 for tag in repo.nodetags(changenode):
369 ui.status(_("tag: %s\n") % tag)
369 ui.status(_("tag: %s\n") % tag)
370 for parent in parents:
370 for parent in parents:
371 ui.write(_("parent: %d:%s\n") % parent)
371 ui.write(_("parent: %d:%s\n") % parent)
372
372
373 if brinfo and changenode in brinfo:
373 if brinfo and changenode in brinfo:
374 br = brinfo[changenode]
374 br = brinfo[changenode]
375 ui.write(_("branch: %s\n") % " ".join(br))
375 ui.write(_("branch: %s\n") % " ".join(br))
376
376
377 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
377 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
378 hex(changes[0])))
378 hex(changes[0])))
379 ui.status(_("user: %s\n") % changes[1])
379 ui.status(_("user: %s\n") % changes[1])
380 ui.status(_("date: %s\n") % date)
380 ui.status(_("date: %s\n") % date)
381
381
382 if ui.debugflag:
382 if ui.debugflag:
383 files = repo.changes(log.parents(changenode)[0], changenode)
383 files = repo.changes(log.parents(changenode)[0], changenode)
384 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
384 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
385 if value:
385 if value:
386 ui.note("%-12s %s\n" % (key, " ".join(value)))
386 ui.note("%-12s %s\n" % (key, " ".join(value)))
387 else:
387 else:
388 ui.note(_("files: %s\n") % " ".join(changes[3]))
388 ui.note(_("files: %s\n") % " ".join(changes[3]))
389
389
390 description = changes[4].strip()
390 description = changes[4].strip()
391 if description:
391 if description:
392 if ui.verbose:
392 if ui.verbose:
393 ui.status(_("description:\n"))
393 ui.status(_("description:\n"))
394 ui.status(description)
394 ui.status(description)
395 ui.status("\n\n")
395 ui.status("\n\n")
396 else:
396 else:
397 ui.status(_("summary: %s\n") % description.splitlines()[0])
397 ui.status(_("summary: %s\n") % description.splitlines()[0])
398 ui.status("\n")
398 ui.status("\n")
399
399
400 def show_version(ui):
400 def show_version(ui):
401 """output version and copyright information"""
401 """output version and copyright information"""
402 ui.write(_("Mercurial Distributed SCM (version %s)\n")
402 ui.write(_("Mercurial Distributed SCM (version %s)\n")
403 % version.get_version())
403 % version.get_version())
404 ui.status(_(
404 ui.status(_(
405 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
405 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
406 "This is free software; see the source for copying conditions. "
406 "This is free software; see the source for copying conditions. "
407 "There is NO\nwarranty; "
407 "There is NO\nwarranty; "
408 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
408 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
409 ))
409 ))
410
410
411 def help_(ui, cmd=None, with_version=False):
411 def help_(ui, cmd=None, with_version=False):
412 """show help for a given command or all commands"""
412 """show help for a given command or all commands"""
413 option_lists = []
413 option_lists = []
414 if cmd and cmd != 'shortlist':
414 if cmd and cmd != 'shortlist':
415 if with_version:
415 if with_version:
416 show_version(ui)
416 show_version(ui)
417 ui.write('\n')
417 ui.write('\n')
418 aliases, i = find(cmd)
418 aliases, i = find(cmd)
419 # synopsis
419 # synopsis
420 ui.write("%s\n\n" % i[2])
420 ui.write("%s\n\n" % i[2])
421
421
422 # description
422 # description
423 doc = i[0].__doc__
423 doc = i[0].__doc__
424 if not doc:
424 if not doc:
425 doc = _("(No help text available)")
425 doc = _("(No help text available)")
426 if ui.quiet:
426 if ui.quiet:
427 doc = doc.splitlines(0)[0]
427 doc = doc.splitlines(0)[0]
428 ui.write("%s\n" % doc.rstrip())
428 ui.write("%s\n" % doc.rstrip())
429
429
430 if not ui.quiet:
430 if not ui.quiet:
431 # aliases
431 # aliases
432 if len(aliases) > 1:
432 if len(aliases) > 1:
433 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
433 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
434
434
435 # options
435 # options
436 if i[1]:
436 if i[1]:
437 option_lists.append(("options", i[1]))
437 option_lists.append(("options", i[1]))
438
438
439 else:
439 else:
440 # program name
440 # program name
441 if ui.verbose or with_version:
441 if ui.verbose or with_version:
442 show_version(ui)
442 show_version(ui)
443 else:
443 else:
444 ui.status(_("Mercurial Distributed SCM\n"))
444 ui.status(_("Mercurial Distributed SCM\n"))
445 ui.status('\n')
445 ui.status('\n')
446
446
447 # list of commands
447 # list of commands
448 if cmd == "shortlist":
448 if cmd == "shortlist":
449 ui.status(_('basic commands (use "hg help" '
449 ui.status(_('basic commands (use "hg help" '
450 'for the full list or option "-v" for details):\n\n'))
450 'for the full list or option "-v" for details):\n\n'))
451 elif ui.verbose:
451 elif ui.verbose:
452 ui.status(_('list of commands:\n\n'))
452 ui.status(_('list of commands:\n\n'))
453 else:
453 else:
454 ui.status(_('list of commands (use "hg help -v" '
454 ui.status(_('list of commands (use "hg help -v" '
455 'to show aliases and global options):\n\n'))
455 'to show aliases and global options):\n\n'))
456
456
457 h = {}
457 h = {}
458 cmds = {}
458 cmds = {}
459 for c, e in table.items():
459 for c, e in table.items():
460 f = c.split("|")[0]
460 f = c.split("|")[0]
461 if cmd == "shortlist" and not f.startswith("^"):
461 if cmd == "shortlist" and not f.startswith("^"):
462 continue
462 continue
463 f = f.lstrip("^")
463 f = f.lstrip("^")
464 if not ui.debugflag and f.startswith("debug"):
464 if not ui.debugflag and f.startswith("debug"):
465 continue
465 continue
466 doc = e[0].__doc__
466 doc = e[0].__doc__
467 if not doc:
467 if not doc:
468 doc = _("(No help text available)")
468 doc = _("(No help text available)")
469 h[f] = doc.splitlines(0)[0].rstrip()
469 h[f] = doc.splitlines(0)[0].rstrip()
470 cmds[f] = c.lstrip("^")
470 cmds[f] = c.lstrip("^")
471
471
472 fns = h.keys()
472 fns = h.keys()
473 fns.sort()
473 fns.sort()
474 m = max(map(len, fns))
474 m = max(map(len, fns))
475 for f in fns:
475 for f in fns:
476 if ui.verbose:
476 if ui.verbose:
477 commands = cmds[f].replace("|",", ")
477 commands = cmds[f].replace("|",", ")
478 ui.write(" %s:\n %s\n"%(commands, h[f]))
478 ui.write(" %s:\n %s\n"%(commands, h[f]))
479 else:
479 else:
480 ui.write(' %-*s %s\n' % (m, f, h[f]))
480 ui.write(' %-*s %s\n' % (m, f, h[f]))
481
481
482 # global options
482 # global options
483 if ui.verbose:
483 if ui.verbose:
484 option_lists.append(("global options", globalopts))
484 option_lists.append(("global options", globalopts))
485
485
486 # list all option lists
486 # list all option lists
487 opt_output = []
487 opt_output = []
488 for title, options in option_lists:
488 for title, options in option_lists:
489 opt_output.append(("\n%s:\n" % title, None))
489 opt_output.append(("\n%s:\n" % title, None))
490 for shortopt, longopt, default, desc in options:
490 for shortopt, longopt, default, desc in options:
491 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
491 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
492 longopt and " --%s" % longopt),
492 longopt and " --%s" % longopt),
493 "%s%s" % (desc,
493 "%s%s" % (desc,
494 default
494 default
495 and _(" (default: %s)") % default
495 and _(" (default: %s)") % default
496 or "")))
496 or "")))
497
497
498 if opt_output:
498 if opt_output:
499 opts_len = max([len(line[0]) for line in opt_output if line[1]])
499 opts_len = max([len(line[0]) for line in opt_output if line[1]])
500 for first, second in opt_output:
500 for first, second in opt_output:
501 if second:
501 if second:
502 ui.write(" %-*s %s\n" % (opts_len, first, second))
502 ui.write(" %-*s %s\n" % (opts_len, first, second))
503 else:
503 else:
504 ui.write("%s\n" % first)
504 ui.write("%s\n" % first)
505
505
506 # Commands start here, listed alphabetically
506 # Commands start here, listed alphabetically
507
507
508 def add(ui, repo, *pats, **opts):
508 def add(ui, repo, *pats, **opts):
509 """add the specified files on the next commit
509 """add the specified files on the next commit
510
510
511 Schedule files to be version controlled and added to the repository.
511 Schedule files to be version controlled and added to the repository.
512
512
513 The files will be added to the repository at the next commit.
513 The files will be added to the repository at the next commit.
514
514
515 If no names are given, add all files in the repository.
515 If no names are given, add all files in the repository.
516 """
516 """
517
517
518 names = []
518 names = []
519 for src, abs, rel, exact in walk(repo, pats, opts):
519 for src, abs, rel, exact in walk(repo, pats, opts):
520 if exact:
520 if exact:
521 if ui.verbose:
521 if ui.verbose:
522 ui.status(_('adding %s\n') % rel)
522 ui.status(_('adding %s\n') % rel)
523 names.append(abs)
523 names.append(abs)
524 elif repo.dirstate.state(abs) == '?':
524 elif repo.dirstate.state(abs) == '?':
525 ui.status(_('adding %s\n') % rel)
525 ui.status(_('adding %s\n') % rel)
526 names.append(abs)
526 names.append(abs)
527 repo.add(names)
527 repo.add(names)
528
528
529 def addremove(ui, repo, *pats, **opts):
529 def addremove(ui, repo, *pats, **opts):
530 """add all new files, delete all missing files
530 """add all new files, delete all missing files
531
531
532 Add all new files and remove all missing files from the repository.
532 Add all new files and remove all missing files from the repository.
533
533
534 New files are ignored if they match any of the patterns in .hgignore. As
534 New files are ignored if they match any of the patterns in .hgignore. As
535 with add, these changes take effect at the next commit.
535 with add, these changes take effect at the next commit.
536 """
536 """
537 return addremove_lock(ui, repo, pats, opts)
537 return addremove_lock(ui, repo, pats, opts)
538
538
539 def addremove_lock(ui, repo, pats, opts, wlock=None):
539 def addremove_lock(ui, repo, pats, opts, wlock=None):
540 add, remove = [], []
540 add, remove = [], []
541 for src, abs, rel, exact in walk(repo, pats, opts):
541 for src, abs, rel, exact in walk(repo, pats, opts):
542 if src == 'f' and repo.dirstate.state(abs) == '?':
542 if src == 'f' and repo.dirstate.state(abs) == '?':
543 add.append(abs)
543 add.append(abs)
544 if ui.verbose or not exact:
544 if ui.verbose or not exact:
545 ui.status(_('adding %s\n') % ((pats and rel) or abs))
545 ui.status(_('adding %s\n') % ((pats and rel) or abs))
546 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
546 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
547 remove.append(abs)
547 remove.append(abs)
548 if ui.verbose or not exact:
548 if ui.verbose or not exact:
549 ui.status(_('removing %s\n') % ((pats and rel) or abs))
549 ui.status(_('removing %s\n') % ((pats and rel) or abs))
550 repo.add(add, wlock=wlock)
550 repo.add(add, wlock=wlock)
551 repo.remove(remove, wlock=wlock)
551 repo.remove(remove, wlock=wlock)
552
552
553 def annotate(ui, repo, *pats, **opts):
553 def annotate(ui, repo, *pats, **opts):
554 """show changeset information per file line
554 """show changeset information per file line
555
555
556 List changes in files, showing the revision id responsible for each line
556 List changes in files, showing the revision id responsible for each line
557
557
558 This command is useful to discover who did a change or when a change took
558 This command is useful to discover who did a change or when a change took
559 place.
559 place.
560
560
561 Without the -a option, annotate will avoid processing files it
561 Without the -a option, annotate will avoid processing files it
562 detects as binary. With -a, annotate will generate an annotation
562 detects as binary. With -a, annotate will generate an annotation
563 anyway, probably with undesirable results.
563 anyway, probably with undesirable results.
564 """
564 """
565 def getnode(rev):
565 def getnode(rev):
566 return short(repo.changelog.node(rev))
566 return short(repo.changelog.node(rev))
567
567
568 ucache = {}
568 ucache = {}
569 def getname(rev):
569 def getname(rev):
570 cl = repo.changelog.read(repo.changelog.node(rev))
570 cl = repo.changelog.read(repo.changelog.node(rev))
571 return trimuser(ui, cl[1], rev, ucache)
571 return trimuser(ui, cl[1], rev, ucache)
572
572
573 dcache = {}
573 dcache = {}
574 def getdate(rev):
574 def getdate(rev):
575 datestr = dcache.get(rev)
575 datestr = dcache.get(rev)
576 if datestr is None:
576 if datestr is None:
577 cl = repo.changelog.read(repo.changelog.node(rev))
577 cl = repo.changelog.read(repo.changelog.node(rev))
578 datestr = dcache[rev] = util.datestr(cl[2])
578 datestr = dcache[rev] = util.datestr(cl[2])
579 return datestr
579 return datestr
580
580
581 if not pats:
581 if not pats:
582 raise util.Abort(_('at least one file name or pattern required'))
582 raise util.Abort(_('at least one file name or pattern required'))
583
583
584 opmap = [['user', getname], ['number', str], ['changeset', getnode],
584 opmap = [['user', getname], ['number', str], ['changeset', getnode],
585 ['date', getdate]]
585 ['date', getdate]]
586 if not opts['user'] and not opts['changeset'] and not opts['date']:
586 if not opts['user'] and not opts['changeset'] and not opts['date']:
587 opts['number'] = 1
587 opts['number'] = 1
588
588
589 if opts['rev']:
589 if opts['rev']:
590 node = repo.changelog.lookup(opts['rev'])
590 node = repo.changelog.lookup(opts['rev'])
591 else:
591 else:
592 node = repo.dirstate.parents()[0]
592 node = repo.dirstate.parents()[0]
593 change = repo.changelog.read(node)
593 change = repo.changelog.read(node)
594 mmap = repo.manifest.read(change[0])
594 mmap = repo.manifest.read(change[0])
595
595
596 for src, abs, rel, exact in walk(repo, pats, opts):
596 for src, abs, rel, exact in walk(repo, pats, opts):
597 if abs not in mmap:
597 if abs not in mmap:
598 ui.warn(_("warning: %s is not in the repository!\n") %
598 ui.warn(_("warning: %s is not in the repository!\n") %
599 ((pats and rel) or abs))
599 ((pats and rel) or abs))
600 continue
600 continue
601
601
602 f = repo.file(abs)
602 f = repo.file(abs)
603 if not opts['text'] and util.binary(f.read(mmap[abs])):
603 if not opts['text'] and util.binary(f.read(mmap[abs])):
604 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
604 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
605 continue
605 continue
606
606
607 lines = f.annotate(mmap[abs])
607 lines = f.annotate(mmap[abs])
608 pieces = []
608 pieces = []
609
609
610 for o, f in opmap:
610 for o, f in opmap:
611 if opts[o]:
611 if opts[o]:
612 l = [f(n) for n, dummy in lines]
612 l = [f(n) for n, dummy in lines]
613 if l:
613 if l:
614 m = max(map(len, l))
614 m = max(map(len, l))
615 pieces.append(["%*s" % (m, x) for x in l])
615 pieces.append(["%*s" % (m, x) for x in l])
616
616
617 if pieces:
617 if pieces:
618 for p, l in zip(zip(*pieces), lines):
618 for p, l in zip(zip(*pieces), lines):
619 ui.write("%s: %s" % (" ".join(p), l[1]))
619 ui.write("%s: %s" % (" ".join(p), l[1]))
620
620
621 def bundle(ui, repo, fname, dest="default-push", **opts):
621 def bundle(ui, repo, fname, dest="default-push", **opts):
622 """create a changegroup file
622 """create a changegroup file
623
623
624 Generate a compressed changegroup file collecting all changesets
624 Generate a compressed changegroup file collecting all changesets
625 not found in the other repository.
625 not found in the other repository.
626
626
627 This file can then be transferred using conventional means and
627 This file can then be transferred using conventional means and
628 applied to another repository with the unbundle command. This is
628 applied to another repository with the unbundle command. This is
629 useful when native push and pull are not available or when
629 useful when native push and pull are not available or when
630 exporting an entire repository is undesirable. The standard file
630 exporting an entire repository is undesirable. The standard file
631 extension is ".hg".
631 extension is ".hg".
632
632
633 Unlike import/export, this exactly preserves all changeset
633 Unlike import/export, this exactly preserves all changeset
634 contents including permissions, rename data, and revision history.
634 contents including permissions, rename data, and revision history.
635 """
635 """
636 f = open(fname, "wb")
636 f = open(fname, "wb")
637 dest = ui.expandpath(dest, repo.root)
637 dest = ui.expandpath(dest, repo.root)
638 other = hg.repository(ui, dest)
638 other = hg.repository(ui, dest)
639 o = repo.findoutgoing(other)
639 o = repo.findoutgoing(other)
640 cg = repo.changegroup(o, 'bundle')
640 cg = repo.changegroup(o, 'bundle')
641
641
642 try:
642 try:
643 f.write("HG10")
643 f.write("HG10")
644 z = bz2.BZ2Compressor(9)
644 z = bz2.BZ2Compressor(9)
645 while 1:
645 while 1:
646 chunk = cg.read(4096)
646 chunk = cg.read(4096)
647 if not chunk:
647 if not chunk:
648 break
648 break
649 f.write(z.compress(chunk))
649 f.write(z.compress(chunk))
650 f.write(z.flush())
650 f.write(z.flush())
651 except:
651 except:
652 os.unlink(fname)
652 os.unlink(fname)
653 raise
653 raise
654
654
655 def cat(ui, repo, file1, *pats, **opts):
655 def cat(ui, repo, file1, *pats, **opts):
656 """output the latest or given revisions of files
656 """output the latest or given revisions of files
657
657
658 Print the specified files as they were at the given revision.
658 Print the specified files as they were at the given revision.
659 If no revision is given then the tip is used.
659 If no revision is given then the tip is used.
660
660
661 Output may be to a file, in which case the name of the file is
661 Output may be to a file, in which case the name of the file is
662 given using a format string. The formatting rules are the same as
662 given using a format string. The formatting rules are the same as
663 for the export command, with the following additions:
663 for the export command, with the following additions:
664
664
665 %s basename of file being printed
665 %s basename of file being printed
666 %d dirname of file being printed, or '.' if in repo root
666 %d dirname of file being printed, or '.' if in repo root
667 %p root-relative path name of file being printed
667 %p root-relative path name of file being printed
668 """
668 """
669 mf = {}
669 mf = {}
670 rev = opts['rev']
670 rev = opts['rev']
671 if rev:
671 if rev:
672 node = repo.lookup(rev)
672 node = repo.lookup(rev)
673 else:
673 else:
674 node = repo.changelog.tip()
674 node = repo.changelog.tip()
675 change = repo.changelog.read(node)
675 change = repo.changelog.read(node)
676 mf = repo.manifest.read(change[0])
676 mf = repo.manifest.read(change[0])
677 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
677 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
678 r = repo.file(abs)
678 r = repo.file(abs)
679 n = mf[abs]
679 n = mf[abs]
680 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
680 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
681 fp.write(r.read(n))
681 fp.write(r.read(n))
682
682
683 def clone(ui, source, dest=None, **opts):
683 def clone(ui, source, dest=None, **opts):
684 """make a copy of an existing repository
684 """make a copy of an existing repository
685
685
686 Create a copy of an existing repository in a new directory.
686 Create a copy of an existing repository in a new directory.
687
687
688 If no destination directory name is specified, it defaults to the
688 If no destination directory name is specified, it defaults to the
689 basename of the source.
689 basename of the source.
690
690
691 The location of the source is added to the new repository's
691 The location of the source is added to the new repository's
692 .hg/hgrc file, as the default to be used for future pulls.
692 .hg/hgrc file, as the default to be used for future pulls.
693
693
694 For efficiency, hardlinks are used for cloning whenever the source
694 For efficiency, hardlinks are used for cloning whenever the source
695 and destination are on the same filesystem. Some filesystems,
695 and destination are on the same filesystem. Some filesystems,
696 such as AFS, implement hardlinking incorrectly, but do not report
696 such as AFS, implement hardlinking incorrectly, but do not report
697 errors. In these cases, use the --pull option to avoid
697 errors. In these cases, use the --pull option to avoid
698 hardlinking.
698 hardlinking.
699
699
700 See pull for valid source format details.
700 See pull for valid source format details.
701 """
701 """
702 if dest is None:
702 if dest is None:
703 dest = os.path.basename(os.path.normpath(source))
703 dest = os.path.basename(os.path.normpath(source))
704
704
705 if os.path.exists(dest):
705 if os.path.exists(dest):
706 raise util.Abort(_("destination '%s' already exists"), dest)
706 raise util.Abort(_("destination '%s' already exists"), dest)
707
707
708 dest = os.path.realpath(dest)
708 dest = os.path.realpath(dest)
709
709
710 class Dircleanup(object):
710 class Dircleanup(object):
711 def __init__(self, dir_):
711 def __init__(self, dir_):
712 self.rmtree = shutil.rmtree
712 self.rmtree = shutil.rmtree
713 self.dir_ = dir_
713 self.dir_ = dir_
714 os.mkdir(dir_)
714 os.mkdir(dir_)
715 def close(self):
715 def close(self):
716 self.dir_ = None
716 self.dir_ = None
717 def __del__(self):
717 def __del__(self):
718 if self.dir_:
718 if self.dir_:
719 self.rmtree(self.dir_, True)
719 self.rmtree(self.dir_, True)
720
720
721 if opts['ssh']:
721 if opts['ssh']:
722 ui.setconfig("ui", "ssh", opts['ssh'])
722 ui.setconfig("ui", "ssh", opts['ssh'])
723 if opts['remotecmd']:
723 if opts['remotecmd']:
724 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
724 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
725
725
726 if not os.path.exists(source):
726 if not os.path.exists(source):
727 source = ui.expandpath(source)
727 source = ui.expandpath(source)
728
728
729 d = Dircleanup(dest)
729 d = Dircleanup(dest)
730 abspath = source
730 abspath = source
731 other = hg.repository(ui, source)
731 other = hg.repository(ui, source)
732
732
733 copy = False
733 copy = False
734 if other.dev() != -1:
734 if other.dev() != -1:
735 abspath = os.path.abspath(source)
735 abspath = os.path.abspath(source)
736 if not opts['pull'] and not opts['rev']:
736 if not opts['pull'] and not opts['rev']:
737 copy = True
737 copy = True
738
738
739 if copy:
739 if copy:
740 try:
740 try:
741 # we use a lock here because if we race with commit, we
741 # we use a lock here because if we race with commit, we
742 # can end up with extra data in the cloned revlogs that's
742 # can end up with extra data in the cloned revlogs that's
743 # not pointed to by changesets, thus causing verify to
743 # not pointed to by changesets, thus causing verify to
744 # fail
744 # fail
745 l1 = other.lock()
745 l1 = other.lock()
746 except lock.LockException:
746 except lock.LockException:
747 copy = False
747 copy = False
748
748
749 if copy:
749 if copy:
750 # we lock here to avoid premature writing to the target
750 # we lock here to avoid premature writing to the target
751 os.mkdir(os.path.join(dest, ".hg"))
751 os.mkdir(os.path.join(dest, ".hg"))
752 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
752 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
753
753
754 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
754 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
755 for f in files.split():
755 for f in files.split():
756 src = os.path.join(source, ".hg", f)
756 src = os.path.join(source, ".hg", f)
757 dst = os.path.join(dest, ".hg", f)
757 dst = os.path.join(dest, ".hg", f)
758 try:
758 try:
759 util.copyfiles(src, dst)
759 util.copyfiles(src, dst)
760 except OSError, inst:
760 except OSError, inst:
761 if inst.errno != errno.ENOENT:
761 if inst.errno != errno.ENOENT:
762 raise
762 raise
763
763
764 repo = hg.repository(ui, dest)
764 repo = hg.repository(ui, dest)
765
765
766 else:
766 else:
767 revs = None
767 revs = None
768 if opts['rev']:
768 if opts['rev']:
769 if not other.local():
769 if not other.local():
770 error = _("clone -r not supported yet for remote repositories.")
770 error = _("clone -r not supported yet for remote repositories.")
771 raise util.Abort(error)
771 raise util.Abort(error)
772 else:
772 else:
773 revs = [other.lookup(rev) for rev in opts['rev']]
773 revs = [other.lookup(rev) for rev in opts['rev']]
774 repo = hg.repository(ui, dest, create=1)
774 repo = hg.repository(ui, dest, create=1)
775 repo.pull(other, heads = revs)
775 repo.pull(other, heads = revs)
776
776
777 f = repo.opener("hgrc", "w", text=True)
777 f = repo.opener("hgrc", "w", text=True)
778 f.write("[paths]\n")
778 f.write("[paths]\n")
779 f.write("default = %s\n" % abspath)
779 f.write("default = %s\n" % abspath)
780 f.close()
780 f.close()
781
781
782 if not opts['noupdate']:
782 if not opts['noupdate']:
783 update(ui, repo)
783 update(ui, repo)
784
784
785 d.close()
785 d.close()
786
786
787 def commit(ui, repo, *pats, **opts):
787 def commit(ui, repo, *pats, **opts):
788 """commit the specified files or all outstanding changes
788 """commit the specified files or all outstanding changes
789
789
790 Commit changes to the given files into the repository.
790 Commit changes to the given files into the repository.
791
791
792 If a list of files is omitted, all changes reported by "hg status"
792 If a list of files is omitted, all changes reported by "hg status"
793 will be commited.
793 will be commited.
794
794
795 The HGEDITOR or EDITOR environment variables are used to start an
795 The HGEDITOR or EDITOR environment variables are used to start an
796 editor to add a commit comment.
796 editor to add a commit comment.
797 """
797 """
798 message = opts['message']
798 message = opts['message']
799 logfile = opts['logfile']
799 logfile = opts['logfile']
800
800
801 if message and logfile:
801 if message and logfile:
802 raise util.Abort(_('options --message and --logfile are mutually '
802 raise util.Abort(_('options --message and --logfile are mutually '
803 'exclusive'))
803 'exclusive'))
804 if not message and logfile:
804 if not message and logfile:
805 try:
805 try:
806 if logfile == '-':
806 if logfile == '-':
807 message = sys.stdin.read()
807 message = sys.stdin.read()
808 else:
808 else:
809 message = open(logfile).read()
809 message = open(logfile).read()
810 except IOError, inst:
810 except IOError, inst:
811 raise util.Abort(_("can't read commit message '%s': %s") %
811 raise util.Abort(_("can't read commit message '%s': %s") %
812 (logfile, inst.strerror))
812 (logfile, inst.strerror))
813
813
814 if opts['addremove']:
814 if opts['addremove']:
815 addremove(ui, repo, *pats, **opts)
815 addremove(ui, repo, *pats, **opts)
816 fns, match, anypats = matchpats(repo, pats, opts)
816 fns, match, anypats = matchpats(repo, pats, opts)
817 if pats:
817 if pats:
818 modified, added, removed, deleted, unknown = (
818 modified, added, removed, deleted, unknown = (
819 repo.changes(files=fns, match=match))
819 repo.changes(files=fns, match=match))
820 files = modified + added + removed
820 files = modified + added + removed
821 else:
821 else:
822 files = []
822 files = []
823 try:
823 try:
824 repo.commit(files, message, opts['user'], opts['date'], match)
824 repo.commit(files, message, opts['user'], opts['date'], match)
825 except ValueError, inst:
825 except ValueError, inst:
826 raise util.Abort(str(inst))
826 raise util.Abort(str(inst))
827
827
828 def docopy(ui, repo, pats, opts, wlock):
828 def docopy(ui, repo, pats, opts, wlock):
829 # called with the repo lock held
829 # called with the repo lock held
830 cwd = repo.getcwd()
830 cwd = repo.getcwd()
831 errors = 0
831 errors = 0
832 copied = []
832 copied = []
833 targets = {}
833 targets = {}
834
834
835 def okaytocopy(abs, rel, exact):
835 def okaytocopy(abs, rel, exact):
836 reasons = {'?': _('is not managed'),
836 reasons = {'?': _('is not managed'),
837 'a': _('has been marked for add'),
837 'a': _('has been marked for add'),
838 'r': _('has been marked for remove')}
838 'r': _('has been marked for remove')}
839 state = repo.dirstate.state(abs)
839 state = repo.dirstate.state(abs)
840 reason = reasons.get(state)
840 reason = reasons.get(state)
841 if reason:
841 if reason:
842 if state == 'a':
842 if state == 'a':
843 origsrc = repo.dirstate.copied(abs)
843 origsrc = repo.dirstate.copied(abs)
844 if origsrc is not None:
844 if origsrc is not None:
845 return origsrc
845 return origsrc
846 if exact:
846 if exact:
847 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
847 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
848 else:
848 else:
849 return abs
849 return abs
850
850
851 def copy(origsrc, abssrc, relsrc, target, exact):
851 def copy(origsrc, abssrc, relsrc, target, exact):
852 abstarget = util.canonpath(repo.root, cwd, target)
852 abstarget = util.canonpath(repo.root, cwd, target)
853 reltarget = util.pathto(cwd, abstarget)
853 reltarget = util.pathto(cwd, abstarget)
854 prevsrc = targets.get(abstarget)
854 prevsrc = targets.get(abstarget)
855 if prevsrc is not None:
855 if prevsrc is not None:
856 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
856 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
857 (reltarget, abssrc, prevsrc))
857 (reltarget, abssrc, prevsrc))
858 return
858 return
859 if (not opts['after'] and os.path.exists(reltarget) or
859 if (not opts['after'] and os.path.exists(reltarget) or
860 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
860 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
861 if not opts['force']:
861 if not opts['force']:
862 ui.warn(_('%s: not overwriting - file exists\n') %
862 ui.warn(_('%s: not overwriting - file exists\n') %
863 reltarget)
863 reltarget)
864 return
864 return
865 if not opts['after']:
865 if not opts['after']:
866 os.unlink(reltarget)
866 os.unlink(reltarget)
867 if opts['after']:
867 if opts['after']:
868 if not os.path.exists(reltarget):
868 if not os.path.exists(reltarget):
869 return
869 return
870 else:
870 else:
871 targetdir = os.path.dirname(reltarget) or '.'
871 targetdir = os.path.dirname(reltarget) or '.'
872 if not os.path.isdir(targetdir):
872 if not os.path.isdir(targetdir):
873 os.makedirs(targetdir)
873 os.makedirs(targetdir)
874 try:
874 try:
875 restore = repo.dirstate.state(abstarget) == 'r'
875 restore = repo.dirstate.state(abstarget) == 'r'
876 if restore:
876 if restore:
877 repo.undelete([abstarget], wlock)
877 repo.undelete([abstarget], wlock)
878 try:
878 try:
879 shutil.copyfile(relsrc, reltarget)
879 shutil.copyfile(relsrc, reltarget)
880 shutil.copymode(relsrc, reltarget)
880 shutil.copymode(relsrc, reltarget)
881 restore = False
881 restore = False
882 finally:
882 finally:
883 if restore:
883 if restore:
884 repo.remove([abstarget], wlock)
884 repo.remove([abstarget], wlock)
885 except shutil.Error, inst:
885 except shutil.Error, inst:
886 raise util.Abort(str(inst))
886 raise util.Abort(str(inst))
887 except IOError, inst:
887 except IOError, inst:
888 if inst.errno == errno.ENOENT:
888 if inst.errno == errno.ENOENT:
889 ui.warn(_('%s: deleted in working copy\n') % relsrc)
889 ui.warn(_('%s: deleted in working copy\n') % relsrc)
890 else:
890 else:
891 ui.warn(_('%s: cannot copy - %s\n') %
891 ui.warn(_('%s: cannot copy - %s\n') %
892 (relsrc, inst.strerror))
892 (relsrc, inst.strerror))
893 errors += 1
893 errors += 1
894 return
894 return
895 if ui.verbose or not exact:
895 if ui.verbose or not exact:
896 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
896 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
897 targets[abstarget] = abssrc
897 targets[abstarget] = abssrc
898 if abstarget != origsrc:
898 if abstarget != origsrc:
899 repo.copy(origsrc, abstarget, wlock)
899 repo.copy(origsrc, abstarget, wlock)
900 copied.append((abssrc, relsrc, exact))
900 copied.append((abssrc, relsrc, exact))
901
901
902 def targetpathfn(pat, dest, srcs):
902 def targetpathfn(pat, dest, srcs):
903 if os.path.isdir(pat):
903 if os.path.isdir(pat):
904 abspfx = util.canonpath(repo.root, cwd, pat)
904 abspfx = util.canonpath(repo.root, cwd, pat)
905 if destdirexists:
905 if destdirexists:
906 striplen = len(os.path.split(abspfx)[0])
906 striplen = len(os.path.split(abspfx)[0])
907 else:
907 else:
908 striplen = len(abspfx)
908 striplen = len(abspfx)
909 if striplen:
909 if striplen:
910 striplen += len(os.sep)
910 striplen += len(os.sep)
911 res = lambda p: os.path.join(dest, p[striplen:])
911 res = lambda p: os.path.join(dest, p[striplen:])
912 elif destdirexists:
912 elif destdirexists:
913 res = lambda p: os.path.join(dest, os.path.basename(p))
913 res = lambda p: os.path.join(dest, os.path.basename(p))
914 else:
914 else:
915 res = lambda p: dest
915 res = lambda p: dest
916 return res
916 return res
917
917
918 def targetpathafterfn(pat, dest, srcs):
918 def targetpathafterfn(pat, dest, srcs):
919 if util.patkind(pat, None)[0]:
919 if util.patkind(pat, None)[0]:
920 # a mercurial pattern
920 # a mercurial pattern
921 res = lambda p: os.path.join(dest, os.path.basename(p))
921 res = lambda p: os.path.join(dest, os.path.basename(p))
922 else:
922 else:
923 abspfx = util.canonpath(repo.root, cwd, pat)
923 abspfx = util.canonpath(repo.root, cwd, pat)
924 if len(abspfx) < len(srcs[0][0]):
924 if len(abspfx) < len(srcs[0][0]):
925 # A directory. Either the target path contains the last
925 # A directory. Either the target path contains the last
926 # component of the source path or it does not.
926 # component of the source path or it does not.
927 def evalpath(striplen):
927 def evalpath(striplen):
928 score = 0
928 score = 0
929 for s in srcs:
929 for s in srcs:
930 t = os.path.join(dest, s[0][striplen:])
930 t = os.path.join(dest, s[0][striplen:])
931 if os.path.exists(t):
931 if os.path.exists(t):
932 score += 1
932 score += 1
933 return score
933 return score
934
934
935 striplen = len(abspfx)
935 striplen = len(abspfx)
936 if striplen:
936 if striplen:
937 striplen += len(os.sep)
937 striplen += len(os.sep)
938 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
938 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
939 score = evalpath(striplen)
939 score = evalpath(striplen)
940 striplen1 = len(os.path.split(abspfx)[0])
940 striplen1 = len(os.path.split(abspfx)[0])
941 if striplen1:
941 if striplen1:
942 striplen1 += len(os.sep)
942 striplen1 += len(os.sep)
943 if evalpath(striplen1) > score:
943 if evalpath(striplen1) > score:
944 striplen = striplen1
944 striplen = striplen1
945 res = lambda p: os.path.join(dest, p[striplen:])
945 res = lambda p: os.path.join(dest, p[striplen:])
946 else:
946 else:
947 # a file
947 # a file
948 if destdirexists:
948 if destdirexists:
949 res = lambda p: os.path.join(dest, os.path.basename(p))
949 res = lambda p: os.path.join(dest, os.path.basename(p))
950 else:
950 else:
951 res = lambda p: dest
951 res = lambda p: dest
952 return res
952 return res
953
953
954
954
955 pats = list(pats)
955 pats = list(pats)
956 if not pats:
956 if not pats:
957 raise util.Abort(_('no source or destination specified'))
957 raise util.Abort(_('no source or destination specified'))
958 if len(pats) == 1:
958 if len(pats) == 1:
959 raise util.Abort(_('no destination specified'))
959 raise util.Abort(_('no destination specified'))
960 dest = pats.pop()
960 dest = pats.pop()
961 destdirexists = os.path.isdir(dest)
961 destdirexists = os.path.isdir(dest)
962 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
962 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
963 raise util.Abort(_('with multiple sources, destination must be an '
963 raise util.Abort(_('with multiple sources, destination must be an '
964 'existing directory'))
964 'existing directory'))
965 if opts['after']:
965 if opts['after']:
966 tfn = targetpathafterfn
966 tfn = targetpathafterfn
967 else:
967 else:
968 tfn = targetpathfn
968 tfn = targetpathfn
969 copylist = []
969 copylist = []
970 for pat in pats:
970 for pat in pats:
971 srcs = []
971 srcs = []
972 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
972 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
973 origsrc = okaytocopy(abssrc, relsrc, exact)
973 origsrc = okaytocopy(abssrc, relsrc, exact)
974 if origsrc:
974 if origsrc:
975 srcs.append((origsrc, abssrc, relsrc, exact))
975 srcs.append((origsrc, abssrc, relsrc, exact))
976 if not srcs:
976 if not srcs:
977 continue
977 continue
978 copylist.append((tfn(pat, dest, srcs), srcs))
978 copylist.append((tfn(pat, dest, srcs), srcs))
979 if not copylist:
979 if not copylist:
980 raise util.Abort(_('no files to copy'))
980 raise util.Abort(_('no files to copy'))
981
981
982 for targetpath, srcs in copylist:
982 for targetpath, srcs in copylist:
983 for origsrc, abssrc, relsrc, exact in srcs:
983 for origsrc, abssrc, relsrc, exact in srcs:
984 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
984 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
985
985
986 if errors:
986 if errors:
987 ui.warn(_('(consider using --after)\n'))
987 ui.warn(_('(consider using --after)\n'))
988 return errors, copied
988 return errors, copied
989
989
990 def copy(ui, repo, *pats, **opts):
990 def copy(ui, repo, *pats, **opts):
991 """mark files as copied for the next commit
991 """mark files as copied for the next commit
992
992
993 Mark dest as having copies of source files. If dest is a
993 Mark dest as having copies of source files. If dest is a
994 directory, copies are put in that directory. If dest is a file,
994 directory, copies are put in that directory. If dest is a file,
995 there can only be one source.
995 there can only be one source.
996
996
997 By default, this command copies the contents of files as they
997 By default, this command copies the contents of files as they
998 stand in the working directory. If invoked with --after, the
998 stand in the working directory. If invoked with --after, the
999 operation is recorded, but no copying is performed.
999 operation is recorded, but no copying is performed.
1000
1000
1001 This command takes effect in the next commit.
1001 This command takes effect in the next commit.
1002
1002
1003 NOTE: This command should be treated as experimental. While it
1003 NOTE: This command should be treated as experimental. While it
1004 should properly record copied files, this information is not yet
1004 should properly record copied files, this information is not yet
1005 fully used by merge, nor fully reported by log.
1005 fully used by merge, nor fully reported by log.
1006 """
1006 """
1007 try:
1007 try:
1008 wlock = repo.wlock(0)
1008 wlock = repo.wlock(0)
1009 errs, copied = docopy(ui, repo, pats, opts, wlock)
1009 errs, copied = docopy(ui, repo, pats, opts, wlock)
1010 except lock.LockHeld, inst:
1010 except lock.LockHeld, inst:
1011 ui.warn(_("repository lock held by %s\n") % inst.args[0])
1011 ui.warn(_("repository lock held by %s\n") % inst.args[0])
1012 errs = 1
1012 errs = 1
1013 return errs
1013 return errs
1014
1014
1015 def debugancestor(ui, index, rev1, rev2):
1015 def debugancestor(ui, index, rev1, rev2):
1016 """find the ancestor revision of two revisions in a given index"""
1016 """find the ancestor revision of two revisions in a given index"""
1017 r = revlog.revlog(util.opener(os.getcwd()), index, "")
1017 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "")
1018 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1018 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1019 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1019 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1020
1020
1021 def debugrebuildstate(ui, repo, rev=None):
1021 def debugrebuildstate(ui, repo, rev=None):
1022 """rebuild the dirstate as it would look like for the given revision"""
1022 """rebuild the dirstate as it would look like for the given revision"""
1023 if not rev:
1023 if not rev:
1024 rev = repo.changelog.tip()
1024 rev = repo.changelog.tip()
1025 else:
1025 else:
1026 rev = repo.lookup(rev)
1026 rev = repo.lookup(rev)
1027 change = repo.changelog.read(rev)
1027 change = repo.changelog.read(rev)
1028 n = change[0]
1028 n = change[0]
1029 files = repo.manifest.readflags(n)
1029 files = repo.manifest.readflags(n)
1030 wlock = repo.wlock()
1030 wlock = repo.wlock()
1031 repo.dirstate.rebuild(rev, files.iteritems())
1031 repo.dirstate.rebuild(rev, files.iteritems())
1032
1032
1033 def debugcheckstate(ui, repo):
1033 def debugcheckstate(ui, repo):
1034 """validate the correctness of the current dirstate"""
1034 """validate the correctness of the current dirstate"""
1035 parent1, parent2 = repo.dirstate.parents()
1035 parent1, parent2 = repo.dirstate.parents()
1036 repo.dirstate.read()
1036 repo.dirstate.read()
1037 dc = repo.dirstate.map
1037 dc = repo.dirstate.map
1038 keys = dc.keys()
1038 keys = dc.keys()
1039 keys.sort()
1039 keys.sort()
1040 m1n = repo.changelog.read(parent1)[0]
1040 m1n = repo.changelog.read(parent1)[0]
1041 m2n = repo.changelog.read(parent2)[0]
1041 m2n = repo.changelog.read(parent2)[0]
1042 m1 = repo.manifest.read(m1n)
1042 m1 = repo.manifest.read(m1n)
1043 m2 = repo.manifest.read(m2n)
1043 m2 = repo.manifest.read(m2n)
1044 errors = 0
1044 errors = 0
1045 for f in dc:
1045 for f in dc:
1046 state = repo.dirstate.state(f)
1046 state = repo.dirstate.state(f)
1047 if state in "nr" and f not in m1:
1047 if state in "nr" and f not in m1:
1048 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1048 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1049 errors += 1
1049 errors += 1
1050 if state in "a" and f in m1:
1050 if state in "a" and f in m1:
1051 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1051 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1052 errors += 1
1052 errors += 1
1053 if state in "m" and f not in m1 and f not in m2:
1053 if state in "m" and f not in m1 and f not in m2:
1054 ui.warn(_("%s in state %s, but not in either manifest\n") %
1054 ui.warn(_("%s in state %s, but not in either manifest\n") %
1055 (f, state))
1055 (f, state))
1056 errors += 1
1056 errors += 1
1057 for f in m1:
1057 for f in m1:
1058 state = repo.dirstate.state(f)
1058 state = repo.dirstate.state(f)
1059 if state not in "nrm":
1059 if state not in "nrm":
1060 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1060 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1061 errors += 1
1061 errors += 1
1062 if errors:
1062 if errors:
1063 error = _(".hg/dirstate inconsistent with current parent's manifest")
1063 error = _(".hg/dirstate inconsistent with current parent's manifest")
1064 raise util.Abort(error)
1064 raise util.Abort(error)
1065
1065
1066 def debugconfig(ui):
1066 def debugconfig(ui):
1067 """show combined config settings from all hgrc files"""
1067 """show combined config settings from all hgrc files"""
1068 try:
1068 try:
1069 repo = hg.repository(ui)
1069 repo = hg.repository(ui)
1070 except hg.RepoError:
1070 except hg.RepoError:
1071 pass
1071 pass
1072 for section, name, value in ui.walkconfig():
1072 for section, name, value in ui.walkconfig():
1073 ui.write('%s.%s=%s\n' % (section, name, value))
1073 ui.write('%s.%s=%s\n' % (section, name, value))
1074
1074
1075 def debugsetparents(ui, repo, rev1, rev2=None):
1075 def debugsetparents(ui, repo, rev1, rev2=None):
1076 """manually set the parents of the current working directory
1076 """manually set the parents of the current working directory
1077
1077
1078 This is useful for writing repository conversion tools, but should
1078 This is useful for writing repository conversion tools, but should
1079 be used with care.
1079 be used with care.
1080 """
1080 """
1081
1081
1082 if not rev2:
1082 if not rev2:
1083 rev2 = hex(nullid)
1083 rev2 = hex(nullid)
1084
1084
1085 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1085 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1086
1086
1087 def debugstate(ui, repo):
1087 def debugstate(ui, repo):
1088 """show the contents of the current dirstate"""
1088 """show the contents of the current dirstate"""
1089 repo.dirstate.read()
1089 repo.dirstate.read()
1090 dc = repo.dirstate.map
1090 dc = repo.dirstate.map
1091 keys = dc.keys()
1091 keys = dc.keys()
1092 keys.sort()
1092 keys.sort()
1093 for file_ in keys:
1093 for file_ in keys:
1094 ui.write("%c %3o %10d %s %s\n"
1094 ui.write("%c %3o %10d %s %s\n"
1095 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1095 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1096 time.strftime("%x %X",
1096 time.strftime("%x %X",
1097 time.localtime(dc[file_][3])), file_))
1097 time.localtime(dc[file_][3])), file_))
1098 for f in repo.dirstate.copies:
1098 for f in repo.dirstate.copies:
1099 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1099 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1100
1100
1101 def debugdata(ui, file_, rev):
1101 def debugdata(ui, file_, rev):
1102 """dump the contents of an data file revision"""
1102 """dump the contents of an data file revision"""
1103 r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
1103 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1104 file_[:-2] + ".i", file_)
1104 try:
1105 try:
1105 ui.write(r.revision(r.lookup(rev)))
1106 ui.write(r.revision(r.lookup(rev)))
1106 except KeyError:
1107 except KeyError:
1107 raise util.Abort(_('invalid revision identifier %s'), rev)
1108 raise util.Abort(_('invalid revision identifier %s'), rev)
1108
1109
1109 def debugindex(ui, file_):
1110 def debugindex(ui, file_):
1110 """dump the contents of an index file"""
1111 """dump the contents of an index file"""
1111 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1112 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "")
1112 ui.write(" rev offset length base linkrev" +
1113 ui.write(" rev offset length base linkrev" +
1113 " nodeid p1 p2\n")
1114 " nodeid p1 p2\n")
1114 for i in range(r.count()):
1115 for i in range(r.count()):
1115 e = r.index[i]
1116 e = r.index[i]
1116 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1117 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1117 i, e[0], e[1], e[2], e[3],
1118 i, e[0], e[1], e[2], e[3],
1118 short(e[6]), short(e[4]), short(e[5])))
1119 short(e[6]), short(e[4]), short(e[5])))
1119
1120
1120 def debugindexdot(ui, file_):
1121 def debugindexdot(ui, file_):
1121 """dump an index DAG as a .dot file"""
1122 """dump an index DAG as a .dot file"""
1122 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1123 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "")
1123 ui.write("digraph G {\n")
1124 ui.write("digraph G {\n")
1124 for i in range(r.count()):
1125 for i in range(r.count()):
1125 e = r.index[i]
1126 e = r.index[i]
1126 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1127 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1127 if e[5] != nullid:
1128 if e[5] != nullid:
1128 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1129 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1129 ui.write("}\n")
1130 ui.write("}\n")
1130
1131
1131 def debugrename(ui, repo, file, rev=None):
1132 def debugrename(ui, repo, file, rev=None):
1132 """dump rename information"""
1133 """dump rename information"""
1133 r = repo.file(relpath(repo, [file])[0])
1134 r = repo.file(relpath(repo, [file])[0])
1134 if rev:
1135 if rev:
1135 try:
1136 try:
1136 # assume all revision numbers are for changesets
1137 # assume all revision numbers are for changesets
1137 n = repo.lookup(rev)
1138 n = repo.lookup(rev)
1138 change = repo.changelog.read(n)
1139 change = repo.changelog.read(n)
1139 m = repo.manifest.read(change[0])
1140 m = repo.manifest.read(change[0])
1140 n = m[relpath(repo, [file])[0]]
1141 n = m[relpath(repo, [file])[0]]
1141 except (hg.RepoError, KeyError):
1142 except (hg.RepoError, KeyError):
1142 n = r.lookup(rev)
1143 n = r.lookup(rev)
1143 else:
1144 else:
1144 n = r.tip()
1145 n = r.tip()
1145 m = r.renamed(n)
1146 m = r.renamed(n)
1146 if m:
1147 if m:
1147 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1148 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1148 else:
1149 else:
1149 ui.write(_("not renamed\n"))
1150 ui.write(_("not renamed\n"))
1150
1151
1151 def debugwalk(ui, repo, *pats, **opts):
1152 def debugwalk(ui, repo, *pats, **opts):
1152 """show how files match on given patterns"""
1153 """show how files match on given patterns"""
1153 items = list(walk(repo, pats, opts))
1154 items = list(walk(repo, pats, opts))
1154 if not items:
1155 if not items:
1155 return
1156 return
1156 fmt = '%%s %%-%ds %%-%ds %%s' % (
1157 fmt = '%%s %%-%ds %%-%ds %%s' % (
1157 max([len(abs) for (src, abs, rel, exact) in items]),
1158 max([len(abs) for (src, abs, rel, exact) in items]),
1158 max([len(rel) for (src, abs, rel, exact) in items]))
1159 max([len(rel) for (src, abs, rel, exact) in items]))
1159 for src, abs, rel, exact in items:
1160 for src, abs, rel, exact in items:
1160 line = fmt % (src, abs, rel, exact and 'exact' or '')
1161 line = fmt % (src, abs, rel, exact and 'exact' or '')
1161 ui.write("%s\n" % line.rstrip())
1162 ui.write("%s\n" % line.rstrip())
1162
1163
1163 def diff(ui, repo, *pats, **opts):
1164 def diff(ui, repo, *pats, **opts):
1164 """diff repository (or selected files)
1165 """diff repository (or selected files)
1165
1166
1166 Show differences between revisions for the specified files.
1167 Show differences between revisions for the specified files.
1167
1168
1168 Differences between files are shown using the unified diff format.
1169 Differences between files are shown using the unified diff format.
1169
1170
1170 When two revision arguments are given, then changes are shown
1171 When two revision arguments are given, then changes are shown
1171 between those revisions. If only one revision is specified then
1172 between those revisions. If only one revision is specified then
1172 that revision is compared to the working directory, and, when no
1173 that revision is compared to the working directory, and, when no
1173 revisions are specified, the working directory files are compared
1174 revisions are specified, the working directory files are compared
1174 to its parent.
1175 to its parent.
1175
1176
1176 Without the -a option, diff will avoid generating diffs of files
1177 Without the -a option, diff will avoid generating diffs of files
1177 it detects as binary. With -a, diff will generate a diff anyway,
1178 it detects as binary. With -a, diff will generate a diff anyway,
1178 probably with undesirable results.
1179 probably with undesirable results.
1179 """
1180 """
1180 node1, node2 = None, None
1181 node1, node2 = None, None
1181 revs = [repo.lookup(x) for x in opts['rev']]
1182 revs = [repo.lookup(x) for x in opts['rev']]
1182
1183
1183 if len(revs) > 0:
1184 if len(revs) > 0:
1184 node1 = revs[0]
1185 node1 = revs[0]
1185 if len(revs) > 1:
1186 if len(revs) > 1:
1186 node2 = revs[1]
1187 node2 = revs[1]
1187 if len(revs) > 2:
1188 if len(revs) > 2:
1188 raise util.Abort(_("too many revisions to diff"))
1189 raise util.Abort(_("too many revisions to diff"))
1189
1190
1190 fns, matchfn, anypats = matchpats(repo, pats, opts)
1191 fns, matchfn, anypats = matchpats(repo, pats, opts)
1191
1192
1192 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1193 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1193 text=opts['text'], opts=opts)
1194 text=opts['text'], opts=opts)
1194
1195
1195 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1196 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1196 node = repo.lookup(changeset)
1197 node = repo.lookup(changeset)
1197 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1198 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1198 if opts['switch_parent']:
1199 if opts['switch_parent']:
1199 parents.reverse()
1200 parents.reverse()
1200 prev = (parents and parents[0]) or nullid
1201 prev = (parents and parents[0]) or nullid
1201 change = repo.changelog.read(node)
1202 change = repo.changelog.read(node)
1202
1203
1203 fp = make_file(repo, repo.changelog, opts['output'],
1204 fp = make_file(repo, repo.changelog, opts['output'],
1204 node=node, total=total, seqno=seqno,
1205 node=node, total=total, seqno=seqno,
1205 revwidth=revwidth)
1206 revwidth=revwidth)
1206 if fp != sys.stdout:
1207 if fp != sys.stdout:
1207 ui.note("%s\n" % fp.name)
1208 ui.note("%s\n" % fp.name)
1208
1209
1209 fp.write("# HG changeset patch\n")
1210 fp.write("# HG changeset patch\n")
1210 fp.write("# User %s\n" % change[1])
1211 fp.write("# User %s\n" % change[1])
1211 fp.write("# Node ID %s\n" % hex(node))
1212 fp.write("# Node ID %s\n" % hex(node))
1212 fp.write("# Parent %s\n" % hex(prev))
1213 fp.write("# Parent %s\n" % hex(prev))
1213 if len(parents) > 1:
1214 if len(parents) > 1:
1214 fp.write("# Parent %s\n" % hex(parents[1]))
1215 fp.write("# Parent %s\n" % hex(parents[1]))
1215 fp.write(change[4].rstrip())
1216 fp.write(change[4].rstrip())
1216 fp.write("\n\n")
1217 fp.write("\n\n")
1217
1218
1218 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1219 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1219 if fp != sys.stdout:
1220 if fp != sys.stdout:
1220 fp.close()
1221 fp.close()
1221
1222
1222 def export(ui, repo, *changesets, **opts):
1223 def export(ui, repo, *changesets, **opts):
1223 """dump the header and diffs for one or more changesets
1224 """dump the header and diffs for one or more changesets
1224
1225
1225 Print the changeset header and diffs for one or more revisions.
1226 Print the changeset header and diffs for one or more revisions.
1226
1227
1227 The information shown in the changeset header is: author,
1228 The information shown in the changeset header is: author,
1228 changeset hash, parent and commit comment.
1229 changeset hash, parent and commit comment.
1229
1230
1230 Output may be to a file, in which case the name of the file is
1231 Output may be to a file, in which case the name of the file is
1231 given using a format string. The formatting rules are as follows:
1232 given using a format string. The formatting rules are as follows:
1232
1233
1233 %% literal "%" character
1234 %% literal "%" character
1234 %H changeset hash (40 bytes of hexadecimal)
1235 %H changeset hash (40 bytes of hexadecimal)
1235 %N number of patches being generated
1236 %N number of patches being generated
1236 %R changeset revision number
1237 %R changeset revision number
1237 %b basename of the exporting repository
1238 %b basename of the exporting repository
1238 %h short-form changeset hash (12 bytes of hexadecimal)
1239 %h short-form changeset hash (12 bytes of hexadecimal)
1239 %n zero-padded sequence number, starting at 1
1240 %n zero-padded sequence number, starting at 1
1240 %r zero-padded changeset revision number
1241 %r zero-padded changeset revision number
1241
1242
1242 Without the -a option, export will avoid generating diffs of files
1243 Without the -a option, export will avoid generating diffs of files
1243 it detects as binary. With -a, export will generate a diff anyway,
1244 it detects as binary. With -a, export will generate a diff anyway,
1244 probably with undesirable results.
1245 probably with undesirable results.
1245
1246
1246 With the --switch-parent option, the diff will be against the second
1247 With the --switch-parent option, the diff will be against the second
1247 parent. It can be useful to review a merge.
1248 parent. It can be useful to review a merge.
1248 """
1249 """
1249 if not changesets:
1250 if not changesets:
1250 raise util.Abort(_("export requires at least one changeset"))
1251 raise util.Abort(_("export requires at least one changeset"))
1251 seqno = 0
1252 seqno = 0
1252 revs = list(revrange(ui, repo, changesets))
1253 revs = list(revrange(ui, repo, changesets))
1253 total = len(revs)
1254 total = len(revs)
1254 revwidth = max(map(len, revs))
1255 revwidth = max(map(len, revs))
1255 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1256 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1256 ui.note(msg)
1257 ui.note(msg)
1257 for cset in revs:
1258 for cset in revs:
1258 seqno += 1
1259 seqno += 1
1259 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1260 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1260
1261
1261 def forget(ui, repo, *pats, **opts):
1262 def forget(ui, repo, *pats, **opts):
1262 """don't add the specified files on the next commit
1263 """don't add the specified files on the next commit
1263
1264
1264 Undo an 'hg add' scheduled for the next commit.
1265 Undo an 'hg add' scheduled for the next commit.
1265 """
1266 """
1266 forget = []
1267 forget = []
1267 for src, abs, rel, exact in walk(repo, pats, opts):
1268 for src, abs, rel, exact in walk(repo, pats, opts):
1268 if repo.dirstate.state(abs) == 'a':
1269 if repo.dirstate.state(abs) == 'a':
1269 forget.append(abs)
1270 forget.append(abs)
1270 if ui.verbose or not exact:
1271 if ui.verbose or not exact:
1271 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1272 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1272 repo.forget(forget)
1273 repo.forget(forget)
1273
1274
1274 def grep(ui, repo, pattern, *pats, **opts):
1275 def grep(ui, repo, pattern, *pats, **opts):
1275 """search for a pattern in specified files and revisions
1276 """search for a pattern in specified files and revisions
1276
1277
1277 Search revisions of files for a regular expression.
1278 Search revisions of files for a regular expression.
1278
1279
1279 This command behaves differently than Unix grep. It only accepts
1280 This command behaves differently than Unix grep. It only accepts
1280 Python/Perl regexps. It searches repository history, not the
1281 Python/Perl regexps. It searches repository history, not the
1281 working directory. It always prints the revision number in which
1282 working directory. It always prints the revision number in which
1282 a match appears.
1283 a match appears.
1283
1284
1284 By default, grep only prints output for the first revision of a
1285 By default, grep only prints output for the first revision of a
1285 file in which it finds a match. To get it to print every revision
1286 file in which it finds a match. To get it to print every revision
1286 that contains a change in match status ("-" for a match that
1287 that contains a change in match status ("-" for a match that
1287 becomes a non-match, or "+" for a non-match that becomes a match),
1288 becomes a non-match, or "+" for a non-match that becomes a match),
1288 use the --all flag.
1289 use the --all flag.
1289 """
1290 """
1290 reflags = 0
1291 reflags = 0
1291 if opts['ignore_case']:
1292 if opts['ignore_case']:
1292 reflags |= re.I
1293 reflags |= re.I
1293 regexp = re.compile(pattern, reflags)
1294 regexp = re.compile(pattern, reflags)
1294 sep, eol = ':', '\n'
1295 sep, eol = ':', '\n'
1295 if opts['print0']:
1296 if opts['print0']:
1296 sep = eol = '\0'
1297 sep = eol = '\0'
1297
1298
1298 fcache = {}
1299 fcache = {}
1299 def getfile(fn):
1300 def getfile(fn):
1300 if fn not in fcache:
1301 if fn not in fcache:
1301 fcache[fn] = repo.file(fn)
1302 fcache[fn] = repo.file(fn)
1302 return fcache[fn]
1303 return fcache[fn]
1303
1304
1304 def matchlines(body):
1305 def matchlines(body):
1305 begin = 0
1306 begin = 0
1306 linenum = 0
1307 linenum = 0
1307 while True:
1308 while True:
1308 match = regexp.search(body, begin)
1309 match = regexp.search(body, begin)
1309 if not match:
1310 if not match:
1310 break
1311 break
1311 mstart, mend = match.span()
1312 mstart, mend = match.span()
1312 linenum += body.count('\n', begin, mstart) + 1
1313 linenum += body.count('\n', begin, mstart) + 1
1313 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1314 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1314 lend = body.find('\n', mend)
1315 lend = body.find('\n', mend)
1315 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1316 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1316 begin = lend + 1
1317 begin = lend + 1
1317
1318
1318 class linestate(object):
1319 class linestate(object):
1319 def __init__(self, line, linenum, colstart, colend):
1320 def __init__(self, line, linenum, colstart, colend):
1320 self.line = line
1321 self.line = line
1321 self.linenum = linenum
1322 self.linenum = linenum
1322 self.colstart = colstart
1323 self.colstart = colstart
1323 self.colend = colend
1324 self.colend = colend
1324 def __eq__(self, other):
1325 def __eq__(self, other):
1325 return self.line == other.line
1326 return self.line == other.line
1326 def __hash__(self):
1327 def __hash__(self):
1327 return hash(self.line)
1328 return hash(self.line)
1328
1329
1329 matches = {}
1330 matches = {}
1330 def grepbody(fn, rev, body):
1331 def grepbody(fn, rev, body):
1331 matches[rev].setdefault(fn, {})
1332 matches[rev].setdefault(fn, {})
1332 m = matches[rev][fn]
1333 m = matches[rev][fn]
1333 for lnum, cstart, cend, line in matchlines(body):
1334 for lnum, cstart, cend, line in matchlines(body):
1334 s = linestate(line, lnum, cstart, cend)
1335 s = linestate(line, lnum, cstart, cend)
1335 m[s] = s
1336 m[s] = s
1336
1337
1337 # FIXME: prev isn't used, why ?
1338 # FIXME: prev isn't used, why ?
1338 prev = {}
1339 prev = {}
1339 ucache = {}
1340 ucache = {}
1340 def display(fn, rev, states, prevstates):
1341 def display(fn, rev, states, prevstates):
1341 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1342 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1342 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1343 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1343 counts = {'-': 0, '+': 0}
1344 counts = {'-': 0, '+': 0}
1344 filerevmatches = {}
1345 filerevmatches = {}
1345 for l in diff:
1346 for l in diff:
1346 if incrementing or not opts['all']:
1347 if incrementing or not opts['all']:
1347 change = ((l in prevstates) and '-') or '+'
1348 change = ((l in prevstates) and '-') or '+'
1348 r = rev
1349 r = rev
1349 else:
1350 else:
1350 change = ((l in states) and '-') or '+'
1351 change = ((l in states) and '-') or '+'
1351 r = prev[fn]
1352 r = prev[fn]
1352 cols = [fn, str(rev)]
1353 cols = [fn, str(rev)]
1353 if opts['line_number']:
1354 if opts['line_number']:
1354 cols.append(str(l.linenum))
1355 cols.append(str(l.linenum))
1355 if opts['all']:
1356 if opts['all']:
1356 cols.append(change)
1357 cols.append(change)
1357 if opts['user']:
1358 if opts['user']:
1358 cols.append(trimuser(ui, getchange(rev)[1], rev,
1359 cols.append(trimuser(ui, getchange(rev)[1], rev,
1359 ucache))
1360 ucache))
1360 if opts['files_with_matches']:
1361 if opts['files_with_matches']:
1361 c = (fn, rev)
1362 c = (fn, rev)
1362 if c in filerevmatches:
1363 if c in filerevmatches:
1363 continue
1364 continue
1364 filerevmatches[c] = 1
1365 filerevmatches[c] = 1
1365 else:
1366 else:
1366 cols.append(l.line)
1367 cols.append(l.line)
1367 ui.write(sep.join(cols), eol)
1368 ui.write(sep.join(cols), eol)
1368 counts[change] += 1
1369 counts[change] += 1
1369 return counts['+'], counts['-']
1370 return counts['+'], counts['-']
1370
1371
1371 fstate = {}
1372 fstate = {}
1372 skip = {}
1373 skip = {}
1373 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1374 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1374 count = 0
1375 count = 0
1375 incrementing = False
1376 incrementing = False
1376 for st, rev, fns in changeiter:
1377 for st, rev, fns in changeiter:
1377 if st == 'window':
1378 if st == 'window':
1378 incrementing = rev
1379 incrementing = rev
1379 matches.clear()
1380 matches.clear()
1380 elif st == 'add':
1381 elif st == 'add':
1381 change = repo.changelog.read(repo.lookup(str(rev)))
1382 change = repo.changelog.read(repo.lookup(str(rev)))
1382 mf = repo.manifest.read(change[0])
1383 mf = repo.manifest.read(change[0])
1383 matches[rev] = {}
1384 matches[rev] = {}
1384 for fn in fns:
1385 for fn in fns:
1385 if fn in skip:
1386 if fn in skip:
1386 continue
1387 continue
1387 fstate.setdefault(fn, {})
1388 fstate.setdefault(fn, {})
1388 try:
1389 try:
1389 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1390 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1390 except KeyError:
1391 except KeyError:
1391 pass
1392 pass
1392 elif st == 'iter':
1393 elif st == 'iter':
1393 states = matches[rev].items()
1394 states = matches[rev].items()
1394 states.sort()
1395 states.sort()
1395 for fn, m in states:
1396 for fn, m in states:
1396 if fn in skip:
1397 if fn in skip:
1397 continue
1398 continue
1398 if incrementing or not opts['all'] or fstate[fn]:
1399 if incrementing or not opts['all'] or fstate[fn]:
1399 pos, neg = display(fn, rev, m, fstate[fn])
1400 pos, neg = display(fn, rev, m, fstate[fn])
1400 count += pos + neg
1401 count += pos + neg
1401 if pos and not opts['all']:
1402 if pos and not opts['all']:
1402 skip[fn] = True
1403 skip[fn] = True
1403 fstate[fn] = m
1404 fstate[fn] = m
1404 prev[fn] = rev
1405 prev[fn] = rev
1405
1406
1406 if not incrementing:
1407 if not incrementing:
1407 fstate = fstate.items()
1408 fstate = fstate.items()
1408 fstate.sort()
1409 fstate.sort()
1409 for fn, state in fstate:
1410 for fn, state in fstate:
1410 if fn in skip:
1411 if fn in skip:
1411 continue
1412 continue
1412 display(fn, rev, {}, state)
1413 display(fn, rev, {}, state)
1413 return (count == 0 and 1) or 0
1414 return (count == 0 and 1) or 0
1414
1415
1415 def heads(ui, repo, **opts):
1416 def heads(ui, repo, **opts):
1416 """show current repository heads
1417 """show current repository heads
1417
1418
1418 Show all repository head changesets.
1419 Show all repository head changesets.
1419
1420
1420 Repository "heads" are changesets that don't have children
1421 Repository "heads" are changesets that don't have children
1421 changesets. They are where development generally takes place and
1422 changesets. They are where development generally takes place and
1422 are the usual targets for update and merge operations.
1423 are the usual targets for update and merge operations.
1423 """
1424 """
1424 if opts['rev']:
1425 if opts['rev']:
1425 heads = repo.heads(repo.lookup(opts['rev']))
1426 heads = repo.heads(repo.lookup(opts['rev']))
1426 else:
1427 else:
1427 heads = repo.heads()
1428 heads = repo.heads()
1428 br = None
1429 br = None
1429 if opts['branches']:
1430 if opts['branches']:
1430 br = repo.branchlookup(heads)
1431 br = repo.branchlookup(heads)
1431 for n in heads:
1432 for n in heads:
1432 show_changeset(ui, repo, changenode=n, brinfo=br)
1433 show_changeset(ui, repo, changenode=n, brinfo=br)
1433
1434
1434 def identify(ui, repo):
1435 def identify(ui, repo):
1435 """print information about the working copy
1436 """print information about the working copy
1436
1437
1437 Print a short summary of the current state of the repo.
1438 Print a short summary of the current state of the repo.
1438
1439
1439 This summary identifies the repository state using one or two parent
1440 This summary identifies the repository state using one or two parent
1440 hash identifiers, followed by a "+" if there are uncommitted changes
1441 hash identifiers, followed by a "+" if there are uncommitted changes
1441 in the working directory, followed by a list of tags for this revision.
1442 in the working directory, followed by a list of tags for this revision.
1442 """
1443 """
1443 parents = [p for p in repo.dirstate.parents() if p != nullid]
1444 parents = [p for p in repo.dirstate.parents() if p != nullid]
1444 if not parents:
1445 if not parents:
1445 ui.write(_("unknown\n"))
1446 ui.write(_("unknown\n"))
1446 return
1447 return
1447
1448
1448 hexfunc = ui.verbose and hex or short
1449 hexfunc = ui.verbose and hex or short
1449 modified, added, removed, deleted, unknown = repo.changes()
1450 modified, added, removed, deleted, unknown = repo.changes()
1450 output = ["%s%s" %
1451 output = ["%s%s" %
1451 ('+'.join([hexfunc(parent) for parent in parents]),
1452 ('+'.join([hexfunc(parent) for parent in parents]),
1452 (modified or added or removed or deleted) and "+" or "")]
1453 (modified or added or removed or deleted) and "+" or "")]
1453
1454
1454 if not ui.quiet:
1455 if not ui.quiet:
1455 # multiple tags for a single parent separated by '/'
1456 # multiple tags for a single parent separated by '/'
1456 parenttags = ['/'.join(tags)
1457 parenttags = ['/'.join(tags)
1457 for tags in map(repo.nodetags, parents) if tags]
1458 for tags in map(repo.nodetags, parents) if tags]
1458 # tags for multiple parents separated by ' + '
1459 # tags for multiple parents separated by ' + '
1459 if parenttags:
1460 if parenttags:
1460 output.append(' + '.join(parenttags))
1461 output.append(' + '.join(parenttags))
1461
1462
1462 ui.write("%s\n" % ' '.join(output))
1463 ui.write("%s\n" % ' '.join(output))
1463
1464
1464 def import_(ui, repo, patch1, *patches, **opts):
1465 def import_(ui, repo, patch1, *patches, **opts):
1465 """import an ordered set of patches
1466 """import an ordered set of patches
1466
1467
1467 Import a list of patches and commit them individually.
1468 Import a list of patches and commit them individually.
1468
1469
1469 If there are outstanding changes in the working directory, import
1470 If there are outstanding changes in the working directory, import
1470 will abort unless given the -f flag.
1471 will abort unless given the -f flag.
1471
1472
1472 If a patch looks like a mail message (its first line starts with
1473 If a patch looks like a mail message (its first line starts with
1473 "From " or looks like an RFC822 header), it will not be applied
1474 "From " or looks like an RFC822 header), it will not be applied
1474 unless the -f option is used. The importer neither parses nor
1475 unless the -f option is used. The importer neither parses nor
1475 discards mail headers, so use -f only to override the "mailness"
1476 discards mail headers, so use -f only to override the "mailness"
1476 safety check, not to import a real mail message.
1477 safety check, not to import a real mail message.
1477 """
1478 """
1478 patches = (patch1,) + patches
1479 patches = (patch1,) + patches
1479
1480
1480 if not opts['force']:
1481 if not opts['force']:
1481 modified, added, removed, deleted, unknown = repo.changes()
1482 modified, added, removed, deleted, unknown = repo.changes()
1482 if modified or added or removed or deleted:
1483 if modified or added or removed or deleted:
1483 raise util.Abort(_("outstanding uncommitted changes"))
1484 raise util.Abort(_("outstanding uncommitted changes"))
1484
1485
1485 d = opts["base"]
1486 d = opts["base"]
1486 strip = opts["strip"]
1487 strip = opts["strip"]
1487
1488
1488 mailre = re.compile(r'(?:From |[\w-]+:)')
1489 mailre = re.compile(r'(?:From |[\w-]+:)')
1489
1490
1490 # attempt to detect the start of a patch
1491 # attempt to detect the start of a patch
1491 # (this heuristic is borrowed from quilt)
1492 # (this heuristic is borrowed from quilt)
1492 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1493 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1493 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1494 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1494 '(---|\*\*\*)[ \t])')
1495 '(---|\*\*\*)[ \t])')
1495
1496
1496 for patch in patches:
1497 for patch in patches:
1497 ui.status(_("applying %s\n") % patch)
1498 ui.status(_("applying %s\n") % patch)
1498 pf = os.path.join(d, patch)
1499 pf = os.path.join(d, patch)
1499
1500
1500 message = []
1501 message = []
1501 user = None
1502 user = None
1502 hgpatch = False
1503 hgpatch = False
1503 for line in file(pf):
1504 for line in file(pf):
1504 line = line.rstrip()
1505 line = line.rstrip()
1505 if (not message and not hgpatch and
1506 if (not message and not hgpatch and
1506 mailre.match(line) and not opts['force']):
1507 mailre.match(line) and not opts['force']):
1507 if len(line) > 35:
1508 if len(line) > 35:
1508 line = line[:32] + '...'
1509 line = line[:32] + '...'
1509 raise util.Abort(_('first line looks like a '
1510 raise util.Abort(_('first line looks like a '
1510 'mail header: ') + line)
1511 'mail header: ') + line)
1511 if diffre.match(line):
1512 if diffre.match(line):
1512 break
1513 break
1513 elif hgpatch:
1514 elif hgpatch:
1514 # parse values when importing the result of an hg export
1515 # parse values when importing the result of an hg export
1515 if line.startswith("# User "):
1516 if line.startswith("# User "):
1516 user = line[7:]
1517 user = line[7:]
1517 ui.debug(_('User: %s\n') % user)
1518 ui.debug(_('User: %s\n') % user)
1518 elif not line.startswith("# ") and line:
1519 elif not line.startswith("# ") and line:
1519 message.append(line)
1520 message.append(line)
1520 hgpatch = False
1521 hgpatch = False
1521 elif line == '# HG changeset patch':
1522 elif line == '# HG changeset patch':
1522 hgpatch = True
1523 hgpatch = True
1523 message = [] # We may have collected garbage
1524 message = [] # We may have collected garbage
1524 else:
1525 else:
1525 message.append(line)
1526 message.append(line)
1526
1527
1527 # make sure message isn't empty
1528 # make sure message isn't empty
1528 if not message:
1529 if not message:
1529 message = _("imported patch %s\n") % patch
1530 message = _("imported patch %s\n") % patch
1530 else:
1531 else:
1531 message = "%s\n" % '\n'.join(message)
1532 message = "%s\n" % '\n'.join(message)
1532 ui.debug(_('message:\n%s\n') % message)
1533 ui.debug(_('message:\n%s\n') % message)
1533
1534
1534 files = util.patch(strip, pf, ui)
1535 files = util.patch(strip, pf, ui)
1535
1536
1536 if len(files) > 0:
1537 if len(files) > 0:
1537 addremove(ui, repo, *files)
1538 addremove(ui, repo, *files)
1538 repo.commit(files, message, user)
1539 repo.commit(files, message, user)
1539
1540
1540 def incoming(ui, repo, source="default", **opts):
1541 def incoming(ui, repo, source="default", **opts):
1541 """show new changesets found in source
1542 """show new changesets found in source
1542
1543
1543 Show new changesets found in the specified repo or the default
1544 Show new changesets found in the specified repo or the default
1544 pull repo. These are the changesets that would be pulled if a pull
1545 pull repo. These are the changesets that would be pulled if a pull
1545 was requested.
1546 was requested.
1546
1547
1547 Currently only local repositories are supported.
1548 Currently only local repositories are supported.
1548 """
1549 """
1549 source = ui.expandpath(source, repo.root)
1550 source = ui.expandpath(source, repo.root)
1550 other = hg.repository(ui, source)
1551 other = hg.repository(ui, source)
1551 if not other.local():
1552 if not other.local():
1552 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1553 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1553 o = repo.findincoming(other)
1554 o = repo.findincoming(other)
1554 if not o:
1555 if not o:
1555 return
1556 return
1556 o = other.changelog.nodesbetween(o)[0]
1557 o = other.changelog.nodesbetween(o)[0]
1557 if opts['newest_first']:
1558 if opts['newest_first']:
1558 o.reverse()
1559 o.reverse()
1559 for n in o:
1560 for n in o:
1560 parents = [p for p in other.changelog.parents(n) if p != nullid]
1561 parents = [p for p in other.changelog.parents(n) if p != nullid]
1561 if opts['no_merges'] and len(parents) == 2:
1562 if opts['no_merges'] and len(parents) == 2:
1562 continue
1563 continue
1563 show_changeset(ui, other, changenode=n)
1564 show_changeset(ui, other, changenode=n)
1564 if opts['patch']:
1565 if opts['patch']:
1565 prev = (parents and parents[0]) or nullid
1566 prev = (parents and parents[0]) or nullid
1566 dodiff(ui, ui, other, prev, n)
1567 dodiff(ui, ui, other, prev, n)
1567 ui.write("\n")
1568 ui.write("\n")
1568
1569
1569 def init(ui, dest="."):
1570 def init(ui, dest="."):
1570 """create a new repository in the given directory
1571 """create a new repository in the given directory
1571
1572
1572 Initialize a new repository in the given directory. If the given
1573 Initialize a new repository in the given directory. If the given
1573 directory does not exist, it is created.
1574 directory does not exist, it is created.
1574
1575
1575 If no directory is given, the current directory is used.
1576 If no directory is given, the current directory is used.
1576 """
1577 """
1577 if not os.path.exists(dest):
1578 if not os.path.exists(dest):
1578 os.mkdir(dest)
1579 os.mkdir(dest)
1579 hg.repository(ui, dest, create=1)
1580 hg.repository(ui, dest, create=1)
1580
1581
1581 def locate(ui, repo, *pats, **opts):
1582 def locate(ui, repo, *pats, **opts):
1582 """locate files matching specific patterns
1583 """locate files matching specific patterns
1583
1584
1584 Print all files under Mercurial control whose names match the
1585 Print all files under Mercurial control whose names match the
1585 given patterns.
1586 given patterns.
1586
1587
1587 This command searches the current directory and its
1588 This command searches the current directory and its
1588 subdirectories. To search an entire repository, move to the root
1589 subdirectories. To search an entire repository, move to the root
1589 of the repository.
1590 of the repository.
1590
1591
1591 If no patterns are given to match, this command prints all file
1592 If no patterns are given to match, this command prints all file
1592 names.
1593 names.
1593
1594
1594 If you want to feed the output of this command into the "xargs"
1595 If you want to feed the output of this command into the "xargs"
1595 command, use the "-0" option to both this command and "xargs".
1596 command, use the "-0" option to both this command and "xargs".
1596 This will avoid the problem of "xargs" treating single filenames
1597 This will avoid the problem of "xargs" treating single filenames
1597 that contain white space as multiple filenames.
1598 that contain white space as multiple filenames.
1598 """
1599 """
1599 end = opts['print0'] and '\0' or '\n'
1600 end = opts['print0'] and '\0' or '\n'
1600 rev = opts['rev']
1601 rev = opts['rev']
1601 if rev:
1602 if rev:
1602 node = repo.lookup(rev)
1603 node = repo.lookup(rev)
1603 else:
1604 else:
1604 node = None
1605 node = None
1605
1606
1606 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1607 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1607 head='(?:.*/|)'):
1608 head='(?:.*/|)'):
1608 if not node and repo.dirstate.state(abs) == '?':
1609 if not node and repo.dirstate.state(abs) == '?':
1609 continue
1610 continue
1610 if opts['fullpath']:
1611 if opts['fullpath']:
1611 ui.write(os.path.join(repo.root, abs), end)
1612 ui.write(os.path.join(repo.root, abs), end)
1612 else:
1613 else:
1613 ui.write(((pats and rel) or abs), end)
1614 ui.write(((pats and rel) or abs), end)
1614
1615
1615 def log(ui, repo, *pats, **opts):
1616 def log(ui, repo, *pats, **opts):
1616 """show revision history of entire repository or files
1617 """show revision history of entire repository or files
1617
1618
1618 Print the revision history of the specified files or the entire project.
1619 Print the revision history of the specified files or the entire project.
1619
1620
1620 By default this command outputs: changeset id and hash, tags,
1621 By default this command outputs: changeset id and hash, tags,
1621 non-trivial parents, user, date and time, and a summary for each
1622 non-trivial parents, user, date and time, and a summary for each
1622 commit. When the -v/--verbose switch is used, the list of changed
1623 commit. When the -v/--verbose switch is used, the list of changed
1623 files and full commit message is shown.
1624 files and full commit message is shown.
1624 """
1625 """
1625 class dui(object):
1626 class dui(object):
1626 # Implement and delegate some ui protocol. Save hunks of
1627 # Implement and delegate some ui protocol. Save hunks of
1627 # output for later display in the desired order.
1628 # output for later display in the desired order.
1628 def __init__(self, ui):
1629 def __init__(self, ui):
1629 self.ui = ui
1630 self.ui = ui
1630 self.hunk = {}
1631 self.hunk = {}
1631 def bump(self, rev):
1632 def bump(self, rev):
1632 self.rev = rev
1633 self.rev = rev
1633 self.hunk[rev] = []
1634 self.hunk[rev] = []
1634 def note(self, *args):
1635 def note(self, *args):
1635 if self.verbose:
1636 if self.verbose:
1636 self.write(*args)
1637 self.write(*args)
1637 def status(self, *args):
1638 def status(self, *args):
1638 if not self.quiet:
1639 if not self.quiet:
1639 self.write(*args)
1640 self.write(*args)
1640 def write(self, *args):
1641 def write(self, *args):
1641 self.hunk[self.rev].append(args)
1642 self.hunk[self.rev].append(args)
1642 def debug(self, *args):
1643 def debug(self, *args):
1643 if self.debugflag:
1644 if self.debugflag:
1644 self.write(*args)
1645 self.write(*args)
1645 def __getattr__(self, key):
1646 def __getattr__(self, key):
1646 return getattr(self.ui, key)
1647 return getattr(self.ui, key)
1647
1648
1648 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1649 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1649
1650
1650 if opts['limit']:
1651 if opts['limit']:
1651 try:
1652 try:
1652 limit = int(opts['limit'])
1653 limit = int(opts['limit'])
1653 except ValueError:
1654 except ValueError:
1654 raise util.Abort(_('limit must be a positive integer'))
1655 raise util.Abort(_('limit must be a positive integer'))
1655 if limit <= 0: raise util.Abort(_('limit must be positive'))
1656 if limit <= 0: raise util.Abort(_('limit must be positive'))
1656 else:
1657 else:
1657 limit = sys.maxint
1658 limit = sys.maxint
1658 count = 0
1659 count = 0
1659
1660
1660 for st, rev, fns in changeiter:
1661 for st, rev, fns in changeiter:
1661 if st == 'window':
1662 if st == 'window':
1662 du = dui(ui)
1663 du = dui(ui)
1663 elif st == 'add':
1664 elif st == 'add':
1664 du.bump(rev)
1665 du.bump(rev)
1665 changenode = repo.changelog.node(rev)
1666 changenode = repo.changelog.node(rev)
1666 parents = [p for p in repo.changelog.parents(changenode)
1667 parents = [p for p in repo.changelog.parents(changenode)
1667 if p != nullid]
1668 if p != nullid]
1668 if opts['no_merges'] and len(parents) == 2:
1669 if opts['no_merges'] and len(parents) == 2:
1669 continue
1670 continue
1670 if opts['only_merges'] and len(parents) != 2:
1671 if opts['only_merges'] and len(parents) != 2:
1671 continue
1672 continue
1672
1673
1673 if opts['keyword']:
1674 if opts['keyword']:
1674 changes = getchange(rev)
1675 changes = getchange(rev)
1675 miss = 0
1676 miss = 0
1676 for k in [kw.lower() for kw in opts['keyword']]:
1677 for k in [kw.lower() for kw in opts['keyword']]:
1677 if not (k in changes[1].lower() or
1678 if not (k in changes[1].lower() or
1678 k in changes[4].lower() or
1679 k in changes[4].lower() or
1679 k in " ".join(changes[3][:20]).lower()):
1680 k in " ".join(changes[3][:20]).lower()):
1680 miss = 1
1681 miss = 1
1681 break
1682 break
1682 if miss:
1683 if miss:
1683 continue
1684 continue
1684
1685
1685 br = None
1686 br = None
1686 if opts['branches']:
1687 if opts['branches']:
1687 br = repo.branchlookup([repo.changelog.node(rev)])
1688 br = repo.branchlookup([repo.changelog.node(rev)])
1688
1689
1689 show_changeset(du, repo, rev, brinfo=br)
1690 show_changeset(du, repo, rev, brinfo=br)
1690 if opts['patch']:
1691 if opts['patch']:
1691 prev = (parents and parents[0]) or nullid
1692 prev = (parents and parents[0]) or nullid
1692 dodiff(du, du, repo, prev, changenode, match=matchfn)
1693 dodiff(du, du, repo, prev, changenode, match=matchfn)
1693 du.write("\n\n")
1694 du.write("\n\n")
1694 elif st == 'iter':
1695 elif st == 'iter':
1695 if count == limit: break
1696 if count == limit: break
1696 if du.hunk[rev]:
1697 if du.hunk[rev]:
1697 count += 1
1698 count += 1
1698 for args in du.hunk[rev]:
1699 for args in du.hunk[rev]:
1699 ui.write(*args)
1700 ui.write(*args)
1700
1701
1701 def manifest(ui, repo, rev=None):
1702 def manifest(ui, repo, rev=None):
1702 """output the latest or given revision of the project manifest
1703 """output the latest or given revision of the project manifest
1703
1704
1704 Print a list of version controlled files for the given revision.
1705 Print a list of version controlled files for the given revision.
1705
1706
1706 The manifest is the list of files being version controlled. If no revision
1707 The manifest is the list of files being version controlled. If no revision
1707 is given then the tip is used.
1708 is given then the tip is used.
1708 """
1709 """
1709 if rev:
1710 if rev:
1710 try:
1711 try:
1711 # assume all revision numbers are for changesets
1712 # assume all revision numbers are for changesets
1712 n = repo.lookup(rev)
1713 n = repo.lookup(rev)
1713 change = repo.changelog.read(n)
1714 change = repo.changelog.read(n)
1714 n = change[0]
1715 n = change[0]
1715 except hg.RepoError:
1716 except hg.RepoError:
1716 n = repo.manifest.lookup(rev)
1717 n = repo.manifest.lookup(rev)
1717 else:
1718 else:
1718 n = repo.manifest.tip()
1719 n = repo.manifest.tip()
1719 m = repo.manifest.read(n)
1720 m = repo.manifest.read(n)
1720 mf = repo.manifest.readflags(n)
1721 mf = repo.manifest.readflags(n)
1721 files = m.keys()
1722 files = m.keys()
1722 files.sort()
1723 files.sort()
1723
1724
1724 for f in files:
1725 for f in files:
1725 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1726 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1726
1727
1727 def outgoing(ui, repo, dest="default-push", **opts):
1728 def outgoing(ui, repo, dest="default-push", **opts):
1728 """show changesets not found in destination
1729 """show changesets not found in destination
1729
1730
1730 Show changesets not found in the specified destination repo or the
1731 Show changesets not found in the specified destination repo or the
1731 default push repo. These are the changesets that would be pushed
1732 default push repo. These are the changesets that would be pushed
1732 if a push was requested.
1733 if a push was requested.
1733
1734
1734 See pull for valid source format details.
1735 See pull for valid source format details.
1735 """
1736 """
1736 dest = ui.expandpath(dest, repo.root)
1737 dest = ui.expandpath(dest, repo.root)
1737 other = hg.repository(ui, dest)
1738 other = hg.repository(ui, dest)
1738 o = repo.findoutgoing(other)
1739 o = repo.findoutgoing(other)
1739 o = repo.changelog.nodesbetween(o)[0]
1740 o = repo.changelog.nodesbetween(o)[0]
1740 if opts['newest_first']:
1741 if opts['newest_first']:
1741 o.reverse()
1742 o.reverse()
1742 for n in o:
1743 for n in o:
1743 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1744 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1744 if opts['no_merges'] and len(parents) == 2:
1745 if opts['no_merges'] and len(parents) == 2:
1745 continue
1746 continue
1746 show_changeset(ui, repo, changenode=n)
1747 show_changeset(ui, repo, changenode=n)
1747 if opts['patch']:
1748 if opts['patch']:
1748 prev = (parents and parents[0]) or nullid
1749 prev = (parents and parents[0]) or nullid
1749 dodiff(ui, ui, repo, prev, n)
1750 dodiff(ui, ui, repo, prev, n)
1750 ui.write("\n")
1751 ui.write("\n")
1751
1752
1752 def parents(ui, repo, rev=None, branches=None):
1753 def parents(ui, repo, rev=None, branches=None):
1753 """show the parents of the working dir or revision
1754 """show the parents of the working dir or revision
1754
1755
1755 Print the working directory's parent revisions.
1756 Print the working directory's parent revisions.
1756 """
1757 """
1757 if rev:
1758 if rev:
1758 p = repo.changelog.parents(repo.lookup(rev))
1759 p = repo.changelog.parents(repo.lookup(rev))
1759 else:
1760 else:
1760 p = repo.dirstate.parents()
1761 p = repo.dirstate.parents()
1761
1762
1762 br = None
1763 br = None
1763 if branches is not None:
1764 if branches is not None:
1764 br = repo.branchlookup(p)
1765 br = repo.branchlookup(p)
1765 for n in p:
1766 for n in p:
1766 if n != nullid:
1767 if n != nullid:
1767 show_changeset(ui, repo, changenode=n, brinfo=br)
1768 show_changeset(ui, repo, changenode=n, brinfo=br)
1768
1769
1769 def paths(ui, search=None):
1770 def paths(ui, search=None):
1770 """show definition of symbolic path names
1771 """show definition of symbolic path names
1771
1772
1772 Show definition of symbolic path name NAME. If no name is given, show
1773 Show definition of symbolic path name NAME. If no name is given, show
1773 definition of available names.
1774 definition of available names.
1774
1775
1775 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1776 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1776 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1777 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1777 """
1778 """
1778 try:
1779 try:
1779 repo = hg.repository(ui=ui)
1780 repo = hg.repository(ui=ui)
1780 except hg.RepoError:
1781 except hg.RepoError:
1781 pass
1782 pass
1782
1783
1783 if search:
1784 if search:
1784 for name, path in ui.configitems("paths"):
1785 for name, path in ui.configitems("paths"):
1785 if name == search:
1786 if name == search:
1786 ui.write("%s\n" % path)
1787 ui.write("%s\n" % path)
1787 return
1788 return
1788 ui.warn(_("not found!\n"))
1789 ui.warn(_("not found!\n"))
1789 return 1
1790 return 1
1790 else:
1791 else:
1791 for name, path in ui.configitems("paths"):
1792 for name, path in ui.configitems("paths"):
1792 ui.write("%s = %s\n" % (name, path))
1793 ui.write("%s = %s\n" % (name, path))
1793
1794
1794 def pull(ui, repo, source="default", **opts):
1795 def pull(ui, repo, source="default", **opts):
1795 """pull changes from the specified source
1796 """pull changes from the specified source
1796
1797
1797 Pull changes from a remote repository to a local one.
1798 Pull changes from a remote repository to a local one.
1798
1799
1799 This finds all changes from the repository at the specified path
1800 This finds all changes from the repository at the specified path
1800 or URL and adds them to the local repository. By default, this
1801 or URL and adds them to the local repository. By default, this
1801 does not update the copy of the project in the working directory.
1802 does not update the copy of the project in the working directory.
1802
1803
1803 Valid URLs are of the form:
1804 Valid URLs are of the form:
1804
1805
1805 local/filesystem/path
1806 local/filesystem/path
1806 http://[user@]host[:port][/path]
1807 http://[user@]host[:port][/path]
1807 https://[user@]host[:port][/path]
1808 https://[user@]host[:port][/path]
1808 ssh://[user@]host[:port][/path]
1809 ssh://[user@]host[:port][/path]
1809
1810
1810 SSH requires an accessible shell account on the destination machine
1811 SSH requires an accessible shell account on the destination machine
1811 and a copy of hg in the remote path. With SSH, paths are relative
1812 and a copy of hg in the remote path. With SSH, paths are relative
1812 to the remote user's home directory by default; use two slashes at
1813 to the remote user's home directory by default; use two slashes at
1813 the start of a path to specify it as relative to the filesystem root.
1814 the start of a path to specify it as relative to the filesystem root.
1814 """
1815 """
1815 source = ui.expandpath(source, repo.root)
1816 source = ui.expandpath(source, repo.root)
1816 ui.status(_('pulling from %s\n') % (source))
1817 ui.status(_('pulling from %s\n') % (source))
1817
1818
1818 if opts['ssh']:
1819 if opts['ssh']:
1819 ui.setconfig("ui", "ssh", opts['ssh'])
1820 ui.setconfig("ui", "ssh", opts['ssh'])
1820 if opts['remotecmd']:
1821 if opts['remotecmd']:
1821 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1822 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1822
1823
1823 other = hg.repository(ui, source)
1824 other = hg.repository(ui, source)
1824 revs = None
1825 revs = None
1825 if opts['rev'] and not other.local():
1826 if opts['rev'] and not other.local():
1826 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
1827 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
1827 elif opts['rev']:
1828 elif opts['rev']:
1828 revs = [other.lookup(rev) for rev in opts['rev']]
1829 revs = [other.lookup(rev) for rev in opts['rev']]
1829 r = repo.pull(other, heads=revs)
1830 r = repo.pull(other, heads=revs)
1830 if not r:
1831 if not r:
1831 if opts['update']:
1832 if opts['update']:
1832 return update(ui, repo)
1833 return update(ui, repo)
1833 else:
1834 else:
1834 ui.status(_("(run 'hg update' to get a working copy)\n"))
1835 ui.status(_("(run 'hg update' to get a working copy)\n"))
1835
1836
1836 return r
1837 return r
1837
1838
1838 def push(ui, repo, dest="default-push", **opts):
1839 def push(ui, repo, dest="default-push", **opts):
1839 """push changes to the specified destination
1840 """push changes to the specified destination
1840
1841
1841 Push changes from the local repository to the given destination.
1842 Push changes from the local repository to the given destination.
1842
1843
1843 This is the symmetrical operation for pull. It helps to move
1844 This is the symmetrical operation for pull. It helps to move
1844 changes from the current repository to a different one. If the
1845 changes from the current repository to a different one. If the
1845 destination is local this is identical to a pull in that directory
1846 destination is local this is identical to a pull in that directory
1846 from the current one.
1847 from the current one.
1847
1848
1848 By default, push will refuse to run if it detects the result would
1849 By default, push will refuse to run if it detects the result would
1849 increase the number of remote heads. This generally indicates the
1850 increase the number of remote heads. This generally indicates the
1850 the client has forgotten to sync and merge before pushing.
1851 the client has forgotten to sync and merge before pushing.
1851
1852
1852 Valid URLs are of the form:
1853 Valid URLs are of the form:
1853
1854
1854 local/filesystem/path
1855 local/filesystem/path
1855 ssh://[user@]host[:port][/path]
1856 ssh://[user@]host[:port][/path]
1856
1857
1857 SSH requires an accessible shell account on the destination
1858 SSH requires an accessible shell account on the destination
1858 machine and a copy of hg in the remote path.
1859 machine and a copy of hg in the remote path.
1859 """
1860 """
1860 dest = ui.expandpath(dest, repo.root)
1861 dest = ui.expandpath(dest, repo.root)
1861 ui.status('pushing to %s\n' % (dest))
1862 ui.status('pushing to %s\n' % (dest))
1862
1863
1863 if opts['ssh']:
1864 if opts['ssh']:
1864 ui.setconfig("ui", "ssh", opts['ssh'])
1865 ui.setconfig("ui", "ssh", opts['ssh'])
1865 if opts['remotecmd']:
1866 if opts['remotecmd']:
1866 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1867 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1867
1868
1868 other = hg.repository(ui, dest)
1869 other = hg.repository(ui, dest)
1869 revs = None
1870 revs = None
1870 if opts['rev']:
1871 if opts['rev']:
1871 revs = [repo.lookup(rev) for rev in opts['rev']]
1872 revs = [repo.lookup(rev) for rev in opts['rev']]
1872 r = repo.push(other, opts['force'], revs=revs)
1873 r = repo.push(other, opts['force'], revs=revs)
1873 return r
1874 return r
1874
1875
1875 def rawcommit(ui, repo, *flist, **rc):
1876 def rawcommit(ui, repo, *flist, **rc):
1876 """raw commit interface (DEPRECATED)
1877 """raw commit interface (DEPRECATED)
1877
1878
1878 (DEPRECATED)
1879 (DEPRECATED)
1879 Lowlevel commit, for use in helper scripts.
1880 Lowlevel commit, for use in helper scripts.
1880
1881
1881 This command is not intended to be used by normal users, as it is
1882 This command is not intended to be used by normal users, as it is
1882 primarily useful for importing from other SCMs.
1883 primarily useful for importing from other SCMs.
1883
1884
1884 This command is now deprecated and will be removed in a future
1885 This command is now deprecated and will be removed in a future
1885 release, please use debugsetparents and commit instead.
1886 release, please use debugsetparents and commit instead.
1886 """
1887 """
1887
1888
1888 ui.warn(_("(the rawcommit command is deprecated)\n"))
1889 ui.warn(_("(the rawcommit command is deprecated)\n"))
1889
1890
1890 message = rc['message']
1891 message = rc['message']
1891 if not message and rc['logfile']:
1892 if not message and rc['logfile']:
1892 try:
1893 try:
1893 message = open(rc['logfile']).read()
1894 message = open(rc['logfile']).read()
1894 except IOError:
1895 except IOError:
1895 pass
1896 pass
1896 if not message and not rc['logfile']:
1897 if not message and not rc['logfile']:
1897 raise util.Abort(_("missing commit message"))
1898 raise util.Abort(_("missing commit message"))
1898
1899
1899 files = relpath(repo, list(flist))
1900 files = relpath(repo, list(flist))
1900 if rc['files']:
1901 if rc['files']:
1901 files += open(rc['files']).read().splitlines()
1902 files += open(rc['files']).read().splitlines()
1902
1903
1903 rc['parent'] = map(repo.lookup, rc['parent'])
1904 rc['parent'] = map(repo.lookup, rc['parent'])
1904
1905
1905 try:
1906 try:
1906 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1907 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1907 except ValueError, inst:
1908 except ValueError, inst:
1908 raise util.Abort(str(inst))
1909 raise util.Abort(str(inst))
1909
1910
1910 def recover(ui, repo):
1911 def recover(ui, repo):
1911 """roll back an interrupted transaction
1912 """roll back an interrupted transaction
1912
1913
1913 Recover from an interrupted commit or pull.
1914 Recover from an interrupted commit or pull.
1914
1915
1915 This command tries to fix the repository status after an interrupted
1916 This command tries to fix the repository status after an interrupted
1916 operation. It should only be necessary when Mercurial suggests it.
1917 operation. It should only be necessary when Mercurial suggests it.
1917 """
1918 """
1918 if repo.recover():
1919 if repo.recover():
1919 return repo.verify()
1920 return repo.verify()
1920 return False
1921 return False
1921
1922
1922 def remove(ui, repo, pat, *pats, **opts):
1923 def remove(ui, repo, pat, *pats, **opts):
1923 """remove the specified files on the next commit
1924 """remove the specified files on the next commit
1924
1925
1925 Schedule the indicated files for removal from the repository.
1926 Schedule the indicated files for removal from the repository.
1926
1927
1927 This command schedules the files to be removed at the next commit.
1928 This command schedules the files to be removed at the next commit.
1928 This only removes files from the current branch, not from the
1929 This only removes files from the current branch, not from the
1929 entire project history. If the files still exist in the working
1930 entire project history. If the files still exist in the working
1930 directory, they will be deleted from it.
1931 directory, they will be deleted from it.
1931 """
1932 """
1932 names = []
1933 names = []
1933 def okaytoremove(abs, rel, exact):
1934 def okaytoremove(abs, rel, exact):
1934 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
1935 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
1935 reason = None
1936 reason = None
1936 if modified:
1937 if modified:
1937 reason = _('is modified')
1938 reason = _('is modified')
1938 elif added:
1939 elif added:
1939 reason = _('has been marked for add')
1940 reason = _('has been marked for add')
1940 elif unknown:
1941 elif unknown:
1941 reason = _('is not managed')
1942 reason = _('is not managed')
1942 if reason:
1943 if reason:
1943 if exact:
1944 if exact:
1944 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1945 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1945 else:
1946 else:
1946 return True
1947 return True
1947 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1948 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1948 if okaytoremove(abs, rel, exact):
1949 if okaytoremove(abs, rel, exact):
1949 if ui.verbose or not exact:
1950 if ui.verbose or not exact:
1950 ui.status(_('removing %s\n') % rel)
1951 ui.status(_('removing %s\n') % rel)
1951 names.append(abs)
1952 names.append(abs)
1952 repo.remove(names, unlink=True)
1953 repo.remove(names, unlink=True)
1953
1954
1954 def rename(ui, repo, *pats, **opts):
1955 def rename(ui, repo, *pats, **opts):
1955 """rename files; equivalent of copy + remove
1956 """rename files; equivalent of copy + remove
1956
1957
1957 Mark dest as copies of sources; mark sources for deletion. If
1958 Mark dest as copies of sources; mark sources for deletion. If
1958 dest is a directory, copies are put in that directory. If dest is
1959 dest is a directory, copies are put in that directory. If dest is
1959 a file, there can only be one source.
1960 a file, there can only be one source.
1960
1961
1961 By default, this command copies the contents of files as they
1962 By default, this command copies the contents of files as they
1962 stand in the working directory. If invoked with --after, the
1963 stand in the working directory. If invoked with --after, the
1963 operation is recorded, but no copying is performed.
1964 operation is recorded, but no copying is performed.
1964
1965
1965 This command takes effect in the next commit.
1966 This command takes effect in the next commit.
1966
1967
1967 NOTE: This command should be treated as experimental. While it
1968 NOTE: This command should be treated as experimental. While it
1968 should properly record rename files, this information is not yet
1969 should properly record rename files, this information is not yet
1969 fully used by merge, nor fully reported by log.
1970 fully used by merge, nor fully reported by log.
1970 """
1971 """
1971 try:
1972 try:
1972 wlock = repo.wlock(0)
1973 wlock = repo.wlock(0)
1973 errs, copied = docopy(ui, repo, pats, opts, wlock)
1974 errs, copied = docopy(ui, repo, pats, opts, wlock)
1974 names = []
1975 names = []
1975 for abs, rel, exact in copied:
1976 for abs, rel, exact in copied:
1976 if ui.verbose or not exact:
1977 if ui.verbose or not exact:
1977 ui.status(_('removing %s\n') % rel)
1978 ui.status(_('removing %s\n') % rel)
1978 names.append(abs)
1979 names.append(abs)
1979 repo.remove(names, True, wlock)
1980 repo.remove(names, True, wlock)
1980 except lock.LockHeld, inst:
1981 except lock.LockHeld, inst:
1981 ui.warn(_("repository lock held by %s\n") % inst.args[0])
1982 ui.warn(_("repository lock held by %s\n") % inst.args[0])
1982 errs = 1
1983 errs = 1
1983 return errs
1984 return errs
1984
1985
1985 def revert(ui, repo, *pats, **opts):
1986 def revert(ui, repo, *pats, **opts):
1986 """revert modified files or dirs back to their unmodified states
1987 """revert modified files or dirs back to their unmodified states
1987
1988
1988 In its default mode, it reverts any uncommitted modifications made
1989 In its default mode, it reverts any uncommitted modifications made
1989 to the named files or directories. This restores the contents of
1990 to the named files or directories. This restores the contents of
1990 the affected files to an unmodified state.
1991 the affected files to an unmodified state.
1991
1992
1992 Using the -r option, it reverts the given files or directories to
1993 Using the -r option, it reverts the given files or directories to
1993 their state as of an earlier revision. This can be helpful to "roll
1994 their state as of an earlier revision. This can be helpful to "roll
1994 back" some or all of a change that should not have been committed.
1995 back" some or all of a change that should not have been committed.
1995
1996
1996 Revert modifies the working directory. It does not commit any
1997 Revert modifies the working directory. It does not commit any
1997 changes, or change the parent of the current working directory.
1998 changes, or change the parent of the current working directory.
1998
1999
1999 If a file has been deleted, it is recreated. If the executable
2000 If a file has been deleted, it is recreated. If the executable
2000 mode of a file was changed, it is reset.
2001 mode of a file was changed, it is reset.
2001
2002
2002 If names are given, all files matching the names are reverted.
2003 If names are given, all files matching the names are reverted.
2003
2004
2004 If no arguments are given, all files in the repository are reverted.
2005 If no arguments are given, all files in the repository are reverted.
2005 """
2006 """
2006 node = opts['rev'] and repo.lookup(opts['rev']) or \
2007 node = opts['rev'] and repo.lookup(opts['rev']) or \
2007 repo.dirstate.parents()[0]
2008 repo.dirstate.parents()[0]
2008
2009
2009 files, choose, anypats = matchpats(repo, pats, opts)
2010 files, choose, anypats = matchpats(repo, pats, opts)
2010 modified, added, removed, deleted, unknown = repo.changes(match=choose)
2011 modified, added, removed, deleted, unknown = repo.changes(match=choose)
2011 repo.forget(added)
2012 repo.forget(added)
2012 repo.undelete(removed)
2013 repo.undelete(removed)
2013
2014
2014 return repo.update(node, False, True, choose, False)
2015 return repo.update(node, False, True, choose, False)
2015
2016
2016 def root(ui, repo):
2017 def root(ui, repo):
2017 """print the root (top) of the current working dir
2018 """print the root (top) of the current working dir
2018
2019
2019 Print the root directory of the current repository.
2020 Print the root directory of the current repository.
2020 """
2021 """
2021 ui.write(repo.root + "\n")
2022 ui.write(repo.root + "\n")
2022
2023
2023 def serve(ui, repo, **opts):
2024 def serve(ui, repo, **opts):
2024 """export the repository via HTTP
2025 """export the repository via HTTP
2025
2026
2026 Start a local HTTP repository browser and pull server.
2027 Start a local HTTP repository browser and pull server.
2027
2028
2028 By default, the server logs accesses to stdout and errors to
2029 By default, the server logs accesses to stdout and errors to
2029 stderr. Use the "-A" and "-E" options to log to files.
2030 stderr. Use the "-A" and "-E" options to log to files.
2030 """
2031 """
2031
2032
2032 if opts["stdio"]:
2033 if opts["stdio"]:
2033 fin, fout = sys.stdin, sys.stdout
2034 fin, fout = sys.stdin, sys.stdout
2034 sys.stdout = sys.stderr
2035 sys.stdout = sys.stderr
2035
2036
2036 # Prevent insertion/deletion of CRs
2037 # Prevent insertion/deletion of CRs
2037 util.set_binary(fin)
2038 util.set_binary(fin)
2038 util.set_binary(fout)
2039 util.set_binary(fout)
2039
2040
2040 def getarg():
2041 def getarg():
2041 argline = fin.readline()[:-1]
2042 argline = fin.readline()[:-1]
2042 arg, l = argline.split()
2043 arg, l = argline.split()
2043 val = fin.read(int(l))
2044 val = fin.read(int(l))
2044 return arg, val
2045 return arg, val
2045 def respond(v):
2046 def respond(v):
2046 fout.write("%d\n" % len(v))
2047 fout.write("%d\n" % len(v))
2047 fout.write(v)
2048 fout.write(v)
2048 fout.flush()
2049 fout.flush()
2049
2050
2050 lock = None
2051 lock = None
2051
2052
2052 while 1:
2053 while 1:
2053 cmd = fin.readline()[:-1]
2054 cmd = fin.readline()[:-1]
2054 if cmd == '':
2055 if cmd == '':
2055 return
2056 return
2056 if cmd == "heads":
2057 if cmd == "heads":
2057 h = repo.heads()
2058 h = repo.heads()
2058 respond(" ".join(map(hex, h)) + "\n")
2059 respond(" ".join(map(hex, h)) + "\n")
2059 if cmd == "lock":
2060 if cmd == "lock":
2060 lock = repo.lock()
2061 lock = repo.lock()
2061 respond("")
2062 respond("")
2062 if cmd == "unlock":
2063 if cmd == "unlock":
2063 if lock:
2064 if lock:
2064 lock.release()
2065 lock.release()
2065 lock = None
2066 lock = None
2066 respond("")
2067 respond("")
2067 elif cmd == "branches":
2068 elif cmd == "branches":
2068 arg, nodes = getarg()
2069 arg, nodes = getarg()
2069 nodes = map(bin, nodes.split(" "))
2070 nodes = map(bin, nodes.split(" "))
2070 r = []
2071 r = []
2071 for b in repo.branches(nodes):
2072 for b in repo.branches(nodes):
2072 r.append(" ".join(map(hex, b)) + "\n")
2073 r.append(" ".join(map(hex, b)) + "\n")
2073 respond("".join(r))
2074 respond("".join(r))
2074 elif cmd == "between":
2075 elif cmd == "between":
2075 arg, pairs = getarg()
2076 arg, pairs = getarg()
2076 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2077 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2077 r = []
2078 r = []
2078 for b in repo.between(pairs):
2079 for b in repo.between(pairs):
2079 r.append(" ".join(map(hex, b)) + "\n")
2080 r.append(" ".join(map(hex, b)) + "\n")
2080 respond("".join(r))
2081 respond("".join(r))
2081 elif cmd == "changegroup":
2082 elif cmd == "changegroup":
2082 nodes = []
2083 nodes = []
2083 arg, roots = getarg()
2084 arg, roots = getarg()
2084 nodes = map(bin, roots.split(" "))
2085 nodes = map(bin, roots.split(" "))
2085
2086
2086 cg = repo.changegroup(nodes, 'serve')
2087 cg = repo.changegroup(nodes, 'serve')
2087 while 1:
2088 while 1:
2088 d = cg.read(4096)
2089 d = cg.read(4096)
2089 if not d:
2090 if not d:
2090 break
2091 break
2091 fout.write(d)
2092 fout.write(d)
2092
2093
2093 fout.flush()
2094 fout.flush()
2094
2095
2095 elif cmd == "addchangegroup":
2096 elif cmd == "addchangegroup":
2096 if not lock:
2097 if not lock:
2097 respond("not locked")
2098 respond("not locked")
2098 continue
2099 continue
2099 respond("")
2100 respond("")
2100
2101
2101 r = repo.addchangegroup(fin)
2102 r = repo.addchangegroup(fin)
2102 respond("")
2103 respond("")
2103
2104
2104 optlist = "name templates style address port ipv6 accesslog errorlog"
2105 optlist = "name templates style address port ipv6 accesslog errorlog"
2105 for o in optlist.split():
2106 for o in optlist.split():
2106 if opts[o]:
2107 if opts[o]:
2107 ui.setconfig("web", o, opts[o])
2108 ui.setconfig("web", o, opts[o])
2108
2109
2109 if opts['daemon'] and not opts['daemon_pipefds']:
2110 if opts['daemon'] and not opts['daemon_pipefds']:
2110 rfd, wfd = os.pipe()
2111 rfd, wfd = os.pipe()
2111 args = sys.argv[:]
2112 args = sys.argv[:]
2112 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2113 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2113 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2114 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2114 args[0], args)
2115 args[0], args)
2115 os.close(wfd)
2116 os.close(wfd)
2116 os.read(rfd, 1)
2117 os.read(rfd, 1)
2117 os._exit(0)
2118 os._exit(0)
2118
2119
2119 try:
2120 try:
2120 httpd = hgweb.create_server(repo)
2121 httpd = hgweb.create_server(repo)
2121 except socket.error, inst:
2122 except socket.error, inst:
2122 raise util.Abort(_('cannot start server: ') + inst.args[1])
2123 raise util.Abort(_('cannot start server: ') + inst.args[1])
2123
2124
2124 if ui.verbose:
2125 if ui.verbose:
2125 addr, port = httpd.socket.getsockname()
2126 addr, port = httpd.socket.getsockname()
2126 if addr == '0.0.0.0':
2127 if addr == '0.0.0.0':
2127 addr = socket.gethostname()
2128 addr = socket.gethostname()
2128 else:
2129 else:
2129 try:
2130 try:
2130 addr = socket.gethostbyaddr(addr)[0]
2131 addr = socket.gethostbyaddr(addr)[0]
2131 except socket.error:
2132 except socket.error:
2132 pass
2133 pass
2133 if port != 80:
2134 if port != 80:
2134 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2135 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2135 else:
2136 else:
2136 ui.status(_('listening at http://%s/\n') % addr)
2137 ui.status(_('listening at http://%s/\n') % addr)
2137
2138
2138 if opts['pid_file']:
2139 if opts['pid_file']:
2139 fp = open(opts['pid_file'], 'w')
2140 fp = open(opts['pid_file'], 'w')
2140 fp.write(str(os.getpid()))
2141 fp.write(str(os.getpid()))
2141 fp.close()
2142 fp.close()
2142
2143
2143 if opts['daemon_pipefds']:
2144 if opts['daemon_pipefds']:
2144 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2145 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2145 os.close(rfd)
2146 os.close(rfd)
2146 os.write(wfd, 'y')
2147 os.write(wfd, 'y')
2147 os.close(wfd)
2148 os.close(wfd)
2148 sys.stdout.flush()
2149 sys.stdout.flush()
2149 sys.stderr.flush()
2150 sys.stderr.flush()
2150 fd = os.open(util.nulldev, os.O_RDWR)
2151 fd = os.open(util.nulldev, os.O_RDWR)
2151 if fd != 0: os.dup2(fd, 0)
2152 if fd != 0: os.dup2(fd, 0)
2152 if fd != 1: os.dup2(fd, 1)
2153 if fd != 1: os.dup2(fd, 1)
2153 if fd != 2: os.dup2(fd, 2)
2154 if fd != 2: os.dup2(fd, 2)
2154 if fd not in (0, 1, 2): os.close(fd)
2155 if fd not in (0, 1, 2): os.close(fd)
2155
2156
2156 httpd.serve_forever()
2157 httpd.serve_forever()
2157
2158
2158 def status(ui, repo, *pats, **opts):
2159 def status(ui, repo, *pats, **opts):
2159 """show changed files in the working directory
2160 """show changed files in the working directory
2160
2161
2161 Show changed files in the repository. If names are
2162 Show changed files in the repository. If names are
2162 given, only files that match are shown.
2163 given, only files that match are shown.
2163
2164
2164 The codes used to show the status of files are:
2165 The codes used to show the status of files are:
2165 M = modified
2166 M = modified
2166 A = added
2167 A = added
2167 R = removed
2168 R = removed
2168 ! = deleted, but still tracked
2169 ! = deleted, but still tracked
2169 ? = not tracked
2170 ? = not tracked
2170 """
2171 """
2171
2172
2172 files, matchfn, anypats = matchpats(repo, pats, opts)
2173 files, matchfn, anypats = matchpats(repo, pats, opts)
2173 cwd = (pats and repo.getcwd()) or ''
2174 cwd = (pats and repo.getcwd()) or ''
2174 modified, added, removed, deleted, unknown = [
2175 modified, added, removed, deleted, unknown = [
2175 [util.pathto(cwd, x) for x in n]
2176 [util.pathto(cwd, x) for x in n]
2176 for n in repo.changes(files=files, match=matchfn)]
2177 for n in repo.changes(files=files, match=matchfn)]
2177
2178
2178 changetypes = [(_('modified'), 'M', modified),
2179 changetypes = [(_('modified'), 'M', modified),
2179 (_('added'), 'A', added),
2180 (_('added'), 'A', added),
2180 (_('removed'), 'R', removed),
2181 (_('removed'), 'R', removed),
2181 (_('deleted'), '!', deleted),
2182 (_('deleted'), '!', deleted),
2182 (_('unknown'), '?', unknown)]
2183 (_('unknown'), '?', unknown)]
2183
2184
2184 end = opts['print0'] and '\0' or '\n'
2185 end = opts['print0'] and '\0' or '\n'
2185
2186
2186 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2187 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2187 or changetypes):
2188 or changetypes):
2188 if opts['no_status']:
2189 if opts['no_status']:
2189 format = "%%s%s" % end
2190 format = "%%s%s" % end
2190 else:
2191 else:
2191 format = "%s %%s%s" % (char, end);
2192 format = "%s %%s%s" % (char, end);
2192
2193
2193 for f in changes:
2194 for f in changes:
2194 ui.write(format % f)
2195 ui.write(format % f)
2195
2196
2196 def tag(ui, repo, name, rev_=None, **opts):
2197 def tag(ui, repo, name, rev_=None, **opts):
2197 """add a tag for the current tip or a given revision
2198 """add a tag for the current tip or a given revision
2198
2199
2199 Name a particular revision using <name>.
2200 Name a particular revision using <name>.
2200
2201
2201 Tags are used to name particular revisions of the repository and are
2202 Tags are used to name particular revisions of the repository and are
2202 very useful to compare different revision, to go back to significant
2203 very useful to compare different revision, to go back to significant
2203 earlier versions or to mark branch points as releases, etc.
2204 earlier versions or to mark branch points as releases, etc.
2204
2205
2205 If no revision is given, the tip is used.
2206 If no revision is given, the tip is used.
2206
2207
2207 To facilitate version control, distribution, and merging of tags,
2208 To facilitate version control, distribution, and merging of tags,
2208 they are stored as a file named ".hgtags" which is managed
2209 they are stored as a file named ".hgtags" which is managed
2209 similarly to other project files and can be hand-edited if
2210 similarly to other project files and can be hand-edited if
2210 necessary. The file '.hg/localtags' is used for local tags (not
2211 necessary. The file '.hg/localtags' is used for local tags (not
2211 shared among repositories).
2212 shared among repositories).
2212 """
2213 """
2213 if name == "tip":
2214 if name == "tip":
2214 raise util.Abort(_("the name 'tip' is reserved"))
2215 raise util.Abort(_("the name 'tip' is reserved"))
2215 if rev_ is not None:
2216 if rev_ is not None:
2216 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2217 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2217 "please use 'hg tag [-r REV] NAME' instead\n"))
2218 "please use 'hg tag [-r REV] NAME' instead\n"))
2218 if opts['rev']:
2219 if opts['rev']:
2219 raise util.Abort(_("use only one form to specify the revision"))
2220 raise util.Abort(_("use only one form to specify the revision"))
2220 if opts['rev']:
2221 if opts['rev']:
2221 rev_ = opts['rev']
2222 rev_ = opts['rev']
2222 if rev_:
2223 if rev_:
2223 r = hex(repo.lookup(rev_))
2224 r = hex(repo.lookup(rev_))
2224 else:
2225 else:
2225 r = hex(repo.changelog.tip())
2226 r = hex(repo.changelog.tip())
2226
2227
2227 disallowed = (revrangesep, '\r', '\n')
2228 disallowed = (revrangesep, '\r', '\n')
2228 for c in disallowed:
2229 for c in disallowed:
2229 if name.find(c) >= 0:
2230 if name.find(c) >= 0:
2230 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2231 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2231
2232
2232 repo.hook('pretag', throw=True, node=r, tag=name,
2233 repo.hook('pretag', throw=True, node=r, tag=name,
2233 local=int(not not opts['local']))
2234 local=int(not not opts['local']))
2234
2235
2235 if opts['local']:
2236 if opts['local']:
2236 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2237 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2237 repo.hook('tag', node=r, tag=name, local=1)
2238 repo.hook('tag', node=r, tag=name, local=1)
2238 return
2239 return
2239
2240
2240 for x in repo.changes():
2241 for x in repo.changes():
2241 if ".hgtags" in x:
2242 if ".hgtags" in x:
2242 raise util.Abort(_("working copy of .hgtags is changed "
2243 raise util.Abort(_("working copy of .hgtags is changed "
2243 "(please commit .hgtags manually)"))
2244 "(please commit .hgtags manually)"))
2244
2245
2245 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2246 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2246 if repo.dirstate.state(".hgtags") == '?':
2247 if repo.dirstate.state(".hgtags") == '?':
2247 repo.add([".hgtags"])
2248 repo.add([".hgtags"])
2248
2249
2249 message = (opts['message'] or
2250 message = (opts['message'] or
2250 _("Added tag %s for changeset %s") % (name, r))
2251 _("Added tag %s for changeset %s") % (name, r))
2251 try:
2252 try:
2252 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2253 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2253 repo.hook('tag', node=r, tag=name, local=0)
2254 repo.hook('tag', node=r, tag=name, local=0)
2254 except ValueError, inst:
2255 except ValueError, inst:
2255 raise util.Abort(str(inst))
2256 raise util.Abort(str(inst))
2256
2257
2257 def tags(ui, repo):
2258 def tags(ui, repo):
2258 """list repository tags
2259 """list repository tags
2259
2260
2260 List the repository tags.
2261 List the repository tags.
2261
2262
2262 This lists both regular and local tags.
2263 This lists both regular and local tags.
2263 """
2264 """
2264
2265
2265 l = repo.tagslist()
2266 l = repo.tagslist()
2266 l.reverse()
2267 l.reverse()
2267 for t, n in l:
2268 for t, n in l:
2268 try:
2269 try:
2269 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2270 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2270 except KeyError:
2271 except KeyError:
2271 r = " ?:?"
2272 r = " ?:?"
2272 ui.write("%-30s %s\n" % (t, r))
2273 ui.write("%-30s %s\n" % (t, r))
2273
2274
2274 def tip(ui, repo, **opts):
2275 def tip(ui, repo, **opts):
2275 """show the tip revision
2276 """show the tip revision
2276
2277
2277 Show the tip revision.
2278 Show the tip revision.
2278 """
2279 """
2279 n = repo.changelog.tip()
2280 n = repo.changelog.tip()
2280 br = None
2281 br = None
2281 if opts['branches']:
2282 if opts['branches']:
2282 br = repo.branchlookup([n])
2283 br = repo.branchlookup([n])
2283 show_changeset(ui, repo, changenode=n, brinfo=br)
2284 show_changeset(ui, repo, changenode=n, brinfo=br)
2284 if opts['patch']:
2285 if opts['patch']:
2285 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2286 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2286
2287
2287 def unbundle(ui, repo, fname, **opts):
2288 def unbundle(ui, repo, fname, **opts):
2288 """apply a changegroup file
2289 """apply a changegroup file
2289
2290
2290 Apply a compressed changegroup file generated by the bundle
2291 Apply a compressed changegroup file generated by the bundle
2291 command.
2292 command.
2292 """
2293 """
2293 f = urllib.urlopen(fname)
2294 f = urllib.urlopen(fname)
2294
2295
2295 if f.read(4) != "HG10":
2296 if f.read(4) != "HG10":
2296 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2297 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2297
2298
2298 def bzgenerator(f):
2299 def bzgenerator(f):
2299 zd = bz2.BZ2Decompressor()
2300 zd = bz2.BZ2Decompressor()
2300 for chunk in f:
2301 for chunk in f:
2301 yield zd.decompress(chunk)
2302 yield zd.decompress(chunk)
2302
2303
2303 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2304 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2304 if repo.addchangegroup(util.chunkbuffer(bzgen)):
2305 if repo.addchangegroup(util.chunkbuffer(bzgen)):
2305 return 1
2306 return 1
2306
2307
2307 if opts['update']:
2308 if opts['update']:
2308 return update(ui, repo)
2309 return update(ui, repo)
2309 else:
2310 else:
2310 ui.status(_("(run 'hg update' to get a working copy)\n"))
2311 ui.status(_("(run 'hg update' to get a working copy)\n"))
2311
2312
2312 def undo(ui, repo):
2313 def undo(ui, repo):
2313 """undo the last commit or pull
2314 """undo the last commit or pull
2314
2315
2315 Roll back the last pull or commit transaction on the
2316 Roll back the last pull or commit transaction on the
2316 repository, restoring the project to its earlier state.
2317 repository, restoring the project to its earlier state.
2317
2318
2318 This command should be used with care. There is only one level of
2319 This command should be used with care. There is only one level of
2319 undo and there is no redo.
2320 undo and there is no redo.
2320
2321
2321 This command is not intended for use on public repositories. Once
2322 This command is not intended for use on public repositories. Once
2322 a change is visible for pull by other users, undoing it locally is
2323 a change is visible for pull by other users, undoing it locally is
2323 ineffective.
2324 ineffective.
2324 """
2325 """
2325 repo.undo()
2326 repo.undo()
2326
2327
2327 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2328 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2328 branch=None):
2329 branch=None):
2329 """update or merge working directory
2330 """update or merge working directory
2330
2331
2331 Update the working directory to the specified revision.
2332 Update the working directory to the specified revision.
2332
2333
2333 If there are no outstanding changes in the working directory and
2334 If there are no outstanding changes in the working directory and
2334 there is a linear relationship between the current version and the
2335 there is a linear relationship between the current version and the
2335 requested version, the result is the requested version.
2336 requested version, the result is the requested version.
2336
2337
2337 Otherwise the result is a merge between the contents of the
2338 Otherwise the result is a merge between the contents of the
2338 current working directory and the requested version. Files that
2339 current working directory and the requested version. Files that
2339 changed between either parent are marked as changed for the next
2340 changed between either parent are marked as changed for the next
2340 commit and a commit must be performed before any further updates
2341 commit and a commit must be performed before any further updates
2341 are allowed.
2342 are allowed.
2342
2343
2343 By default, update will refuse to run if doing so would require
2344 By default, update will refuse to run if doing so would require
2344 merging or discarding local changes.
2345 merging or discarding local changes.
2345 """
2346 """
2346 if branch:
2347 if branch:
2347 br = repo.branchlookup(branch=branch)
2348 br = repo.branchlookup(branch=branch)
2348 found = []
2349 found = []
2349 for x in br:
2350 for x in br:
2350 if branch in br[x]:
2351 if branch in br[x]:
2351 found.append(x)
2352 found.append(x)
2352 if len(found) > 1:
2353 if len(found) > 1:
2353 ui.warn(_("Found multiple heads for %s\n") % branch)
2354 ui.warn(_("Found multiple heads for %s\n") % branch)
2354 for x in found:
2355 for x in found:
2355 show_changeset(ui, repo, changenode=x, brinfo=br)
2356 show_changeset(ui, repo, changenode=x, brinfo=br)
2356 return 1
2357 return 1
2357 if len(found) == 1:
2358 if len(found) == 1:
2358 node = found[0]
2359 node = found[0]
2359 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2360 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2360 else:
2361 else:
2361 ui.warn(_("branch %s not found\n") % (branch))
2362 ui.warn(_("branch %s not found\n") % (branch))
2362 return 1
2363 return 1
2363 else:
2364 else:
2364 node = node and repo.lookup(node) or repo.changelog.tip()
2365 node = node and repo.lookup(node) or repo.changelog.tip()
2365 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2366 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2366
2367
2367 def verify(ui, repo):
2368 def verify(ui, repo):
2368 """verify the integrity of the repository
2369 """verify the integrity of the repository
2369
2370
2370 Verify the integrity of the current repository.
2371 Verify the integrity of the current repository.
2371
2372
2372 This will perform an extensive check of the repository's
2373 This will perform an extensive check of the repository's
2373 integrity, validating the hashes and checksums of each entry in
2374 integrity, validating the hashes and checksums of each entry in
2374 the changelog, manifest, and tracked files, as well as the
2375 the changelog, manifest, and tracked files, as well as the
2375 integrity of their crosslinks and indices.
2376 integrity of their crosslinks and indices.
2376 """
2377 """
2377 return repo.verify()
2378 return repo.verify()
2378
2379
2379 # Command options and aliases are listed here, alphabetically
2380 # Command options and aliases are listed here, alphabetically
2380
2381
2381 table = {
2382 table = {
2382 "^add":
2383 "^add":
2383 (add,
2384 (add,
2384 [('I', 'include', [], _('include names matching the given patterns')),
2385 [('I', 'include', [], _('include names matching the given patterns')),
2385 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2386 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2386 _('hg add [OPTION]... [FILE]...')),
2387 _('hg add [OPTION]... [FILE]...')),
2387 "addremove":
2388 "addremove":
2388 (addremove,
2389 (addremove,
2389 [('I', 'include', [], _('include names matching the given patterns')),
2390 [('I', 'include', [], _('include names matching the given patterns')),
2390 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2391 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2391 _('hg addremove [OPTION]... [FILE]...')),
2392 _('hg addremove [OPTION]... [FILE]...')),
2392 "^annotate":
2393 "^annotate":
2393 (annotate,
2394 (annotate,
2394 [('r', 'rev', '', _('annotate the specified revision')),
2395 [('r', 'rev', '', _('annotate the specified revision')),
2395 ('a', 'text', None, _('treat all files as text')),
2396 ('a', 'text', None, _('treat all files as text')),
2396 ('u', 'user', None, _('list the author')),
2397 ('u', 'user', None, _('list the author')),
2397 ('d', 'date', None, _('list the date')),
2398 ('d', 'date', None, _('list the date')),
2398 ('n', 'number', None, _('list the revision number (default)')),
2399 ('n', 'number', None, _('list the revision number (default)')),
2399 ('c', 'changeset', None, _('list the changeset')),
2400 ('c', 'changeset', None, _('list the changeset')),
2400 ('I', 'include', [], _('include names matching the given patterns')),
2401 ('I', 'include', [], _('include names matching the given patterns')),
2401 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2402 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2402 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2403 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2403 "bundle":
2404 "bundle":
2404 (bundle,
2405 (bundle,
2405 [],
2406 [],
2406 _('hg bundle FILE DEST')),
2407 _('hg bundle FILE DEST')),
2407 "cat":
2408 "cat":
2408 (cat,
2409 (cat,
2409 [('o', 'output', '', _('print output to file with formatted name')),
2410 [('o', 'output', '', _('print output to file with formatted name')),
2410 ('r', 'rev', '', _('print the given revision')),
2411 ('r', 'rev', '', _('print the given revision')),
2411 ('I', 'include', [], _('include names matching the given patterns')),
2412 ('I', 'include', [], _('include names matching the given patterns')),
2412 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2413 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2413 _('hg cat [OPTION]... FILE...')),
2414 _('hg cat [OPTION]... FILE...')),
2414 "^clone":
2415 "^clone":
2415 (clone,
2416 (clone,
2416 [('U', 'noupdate', None, _('do not update the new working directory')),
2417 [('U', 'noupdate', None, _('do not update the new working directory')),
2417 ('r', 'rev', [],
2418 ('r', 'rev', [],
2418 _('a changeset you would like to have after cloning')),
2419 _('a changeset you would like to have after cloning')),
2419 ('', 'pull', None, _('use pull protocol to copy metadata')),
2420 ('', 'pull', None, _('use pull protocol to copy metadata')),
2420 ('e', 'ssh', '', _('specify ssh command to use')),
2421 ('e', 'ssh', '', _('specify ssh command to use')),
2421 ('', 'remotecmd', '',
2422 ('', 'remotecmd', '',
2422 _('specify hg command to run on the remote side'))],
2423 _('specify hg command to run on the remote side'))],
2423 _('hg clone [OPTION]... SOURCE [DEST]')),
2424 _('hg clone [OPTION]... SOURCE [DEST]')),
2424 "^commit|ci":
2425 "^commit|ci":
2425 (commit,
2426 (commit,
2426 [('A', 'addremove', None, _('run addremove during commit')),
2427 [('A', 'addremove', None, _('run addremove during commit')),
2427 ('m', 'message', '', _('use <text> as commit message')),
2428 ('m', 'message', '', _('use <text> as commit message')),
2428 ('l', 'logfile', '', _('read the commit message from <file>')),
2429 ('l', 'logfile', '', _('read the commit message from <file>')),
2429 ('d', 'date', '', _('record datecode as commit date')),
2430 ('d', 'date', '', _('record datecode as commit date')),
2430 ('u', 'user', '', _('record user as commiter')),
2431 ('u', 'user', '', _('record user as commiter')),
2431 ('I', 'include', [], _('include names matching the given patterns')),
2432 ('I', 'include', [], _('include names matching the given patterns')),
2432 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2433 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2433 _('hg commit [OPTION]... [FILE]...')),
2434 _('hg commit [OPTION]... [FILE]...')),
2434 "copy|cp":
2435 "copy|cp":
2435 (copy,
2436 (copy,
2436 [('A', 'after', None, _('record a copy that has already occurred')),
2437 [('A', 'after', None, _('record a copy that has already occurred')),
2437 ('f', 'force', None,
2438 ('f', 'force', None,
2438 _('forcibly copy over an existing managed file')),
2439 _('forcibly copy over an existing managed file')),
2439 ('I', 'include', [], _('include names matching the given patterns')),
2440 ('I', 'include', [], _('include names matching the given patterns')),
2440 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2441 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2441 _('hg copy [OPTION]... [SOURCE]... DEST')),
2442 _('hg copy [OPTION]... [SOURCE]... DEST')),
2442 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2443 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2443 "debugrebuildstate":
2444 "debugrebuildstate":
2444 (debugrebuildstate,
2445 (debugrebuildstate,
2445 [('r', 'rev', '', _('revision to rebuild to'))],
2446 [('r', 'rev', '', _('revision to rebuild to'))],
2446 _('debugrebuildstate [-r REV] [REV]')),
2447 _('debugrebuildstate [-r REV] [REV]')),
2447 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2448 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2448 "debugconfig": (debugconfig, [], _('debugconfig')),
2449 "debugconfig": (debugconfig, [], _('debugconfig')),
2449 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2450 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2450 "debugstate": (debugstate, [], _('debugstate')),
2451 "debugstate": (debugstate, [], _('debugstate')),
2451 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2452 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2452 "debugindex": (debugindex, [], _('debugindex FILE')),
2453 "debugindex": (debugindex, [], _('debugindex FILE')),
2453 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2454 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2454 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2455 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2455 "debugwalk":
2456 "debugwalk":
2456 (debugwalk,
2457 (debugwalk,
2457 [('I', 'include', [], _('include names matching the given patterns')),
2458 [('I', 'include', [], _('include names matching the given patterns')),
2458 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2459 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2459 _('debugwalk [OPTION]... [FILE]...')),
2460 _('debugwalk [OPTION]... [FILE]...')),
2460 "^diff":
2461 "^diff":
2461 (diff,
2462 (diff,
2462 [('r', 'rev', [], _('revision')),
2463 [('r', 'rev', [], _('revision')),
2463 ('a', 'text', None, _('treat all files as text')),
2464 ('a', 'text', None, _('treat all files as text')),
2464 ('p', 'show-function', None,
2465 ('p', 'show-function', None,
2465 _('show which function each change is in')),
2466 _('show which function each change is in')),
2466 ('w', 'ignore-all-space', None,
2467 ('w', 'ignore-all-space', None,
2467 _('ignore white space when comparing lines')),
2468 _('ignore white space when comparing lines')),
2468 ('I', 'include', [], _('include names matching the given patterns')),
2469 ('I', 'include', [], _('include names matching the given patterns')),
2469 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2470 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2470 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2471 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2471 "^export":
2472 "^export":
2472 (export,
2473 (export,
2473 [('o', 'output', '', _('print output to file with formatted name')),
2474 [('o', 'output', '', _('print output to file with formatted name')),
2474 ('a', 'text', None, _('treat all files as text')),
2475 ('a', 'text', None, _('treat all files as text')),
2475 ('', 'switch-parent', None, _('diff against the second parent'))],
2476 ('', 'switch-parent', None, _('diff against the second parent'))],
2476 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2477 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2477 "forget":
2478 "forget":
2478 (forget,
2479 (forget,
2479 [('I', 'include', [], _('include names matching the given patterns')),
2480 [('I', 'include', [], _('include names matching the given patterns')),
2480 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2481 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2481 _('hg forget [OPTION]... FILE...')),
2482 _('hg forget [OPTION]... FILE...')),
2482 "grep":
2483 "grep":
2483 (grep,
2484 (grep,
2484 [('0', 'print0', None, _('end fields with NUL')),
2485 [('0', 'print0', None, _('end fields with NUL')),
2485 ('', 'all', None, _('print all revisions that match')),
2486 ('', 'all', None, _('print all revisions that match')),
2486 ('i', 'ignore-case', None, _('ignore case when matching')),
2487 ('i', 'ignore-case', None, _('ignore case when matching')),
2487 ('l', 'files-with-matches', None,
2488 ('l', 'files-with-matches', None,
2488 _('print only filenames and revs that match')),
2489 _('print only filenames and revs that match')),
2489 ('n', 'line-number', None, _('print matching line numbers')),
2490 ('n', 'line-number', None, _('print matching line numbers')),
2490 ('r', 'rev', [], _('search in given revision range')),
2491 ('r', 'rev', [], _('search in given revision range')),
2491 ('u', 'user', None, _('print user who committed change')),
2492 ('u', 'user', None, _('print user who committed change')),
2492 ('I', 'include', [], _('include names matching the given patterns')),
2493 ('I', 'include', [], _('include names matching the given patterns')),
2493 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2494 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2494 _('hg grep [OPTION]... PATTERN [FILE]...')),
2495 _('hg grep [OPTION]... PATTERN [FILE]...')),
2495 "heads":
2496 "heads":
2496 (heads,
2497 (heads,
2497 [('b', 'branches', None, _('show branches')),
2498 [('b', 'branches', None, _('show branches')),
2498 ('r', 'rev', '', _('show only heads which are descendants of rev'))],
2499 ('r', 'rev', '', _('show only heads which are descendants of rev'))],
2499 _('hg heads [-b] [-r <rev>]')),
2500 _('hg heads [-b] [-r <rev>]')),
2500 "help": (help_, [], _('hg help [COMMAND]')),
2501 "help": (help_, [], _('hg help [COMMAND]')),
2501 "identify|id": (identify, [], _('hg identify')),
2502 "identify|id": (identify, [], _('hg identify')),
2502 "import|patch":
2503 "import|patch":
2503 (import_,
2504 (import_,
2504 [('p', 'strip', 1,
2505 [('p', 'strip', 1,
2505 _('directory strip option for patch. This has the same\n') +
2506 _('directory strip option for patch. This has the same\n') +
2506 _('meaning as the corresponding patch option')),
2507 _('meaning as the corresponding patch option')),
2507 ('b', 'base', '', _('base path')),
2508 ('b', 'base', '', _('base path')),
2508 ('f', 'force', None,
2509 ('f', 'force', None,
2509 _('skip check for outstanding uncommitted changes'))],
2510 _('skip check for outstanding uncommitted changes'))],
2510 _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
2511 _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
2511 "incoming|in": (incoming,
2512 "incoming|in": (incoming,
2512 [('M', 'no-merges', None, _('do not show merges')),
2513 [('M', 'no-merges', None, _('do not show merges')),
2513 ('p', 'patch', None, _('show patch')),
2514 ('p', 'patch', None, _('show patch')),
2514 ('n', 'newest-first', None, _('show newest record first'))],
2515 ('n', 'newest-first', None, _('show newest record first'))],
2515 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2516 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2516 "^init": (init, [], _('hg init [DEST]')),
2517 "^init": (init, [], _('hg init [DEST]')),
2517 "locate":
2518 "locate":
2518 (locate,
2519 (locate,
2519 [('r', 'rev', '', _('search the repository as it stood at rev')),
2520 [('r', 'rev', '', _('search the repository as it stood at rev')),
2520 ('0', 'print0', None,
2521 ('0', 'print0', None,
2521 _('end filenames with NUL, for use with xargs')),
2522 _('end filenames with NUL, for use with xargs')),
2522 ('f', 'fullpath', None,
2523 ('f', 'fullpath', None,
2523 _('print complete paths from the filesystem root')),
2524 _('print complete paths from the filesystem root')),
2524 ('I', 'include', [], _('include names matching the given patterns')),
2525 ('I', 'include', [], _('include names matching the given patterns')),
2525 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2526 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2526 _('hg locate [OPTION]... [PATTERN]...')),
2527 _('hg locate [OPTION]... [PATTERN]...')),
2527 "^log|history":
2528 "^log|history":
2528 (log,
2529 (log,
2529 [('b', 'branches', None, _('show branches')),
2530 [('b', 'branches', None, _('show branches')),
2530 ('k', 'keyword', [], _('search for a keyword')),
2531 ('k', 'keyword', [], _('search for a keyword')),
2531 ('l', 'limit', '', _('limit number of changes displayed')),
2532 ('l', 'limit', '', _('limit number of changes displayed')),
2532 ('r', 'rev', [], _('show the specified revision or range')),
2533 ('r', 'rev', [], _('show the specified revision or range')),
2533 ('M', 'no-merges', None, _('do not show merges')),
2534 ('M', 'no-merges', None, _('do not show merges')),
2534 ('m', 'only-merges', None, _('show only merges')),
2535 ('m', 'only-merges', None, _('show only merges')),
2535 ('p', 'patch', None, _('show patch')),
2536 ('p', 'patch', None, _('show patch')),
2536 ('I', 'include', [], _('include names matching the given patterns')),
2537 ('I', 'include', [], _('include names matching the given patterns')),
2537 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2538 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2538 _('hg log [OPTION]... [FILE]')),
2539 _('hg log [OPTION]... [FILE]')),
2539 "manifest": (manifest, [], _('hg manifest [REV]')),
2540 "manifest": (manifest, [], _('hg manifest [REV]')),
2540 "outgoing|out": (outgoing,
2541 "outgoing|out": (outgoing,
2541 [('M', 'no-merges', None, _('do not show merges')),
2542 [('M', 'no-merges', None, _('do not show merges')),
2542 ('p', 'patch', None, _('show patch')),
2543 ('p', 'patch', None, _('show patch')),
2543 ('n', 'newest-first', None, _('show newest record first'))],
2544 ('n', 'newest-first', None, _('show newest record first'))],
2544 _('hg outgoing [-M] [-p] [-n] [DEST]')),
2545 _('hg outgoing [-M] [-p] [-n] [DEST]')),
2545 "^parents":
2546 "^parents":
2546 (parents,
2547 (parents,
2547 [('b', 'branches', None, _('show branches'))],
2548 [('b', 'branches', None, _('show branches'))],
2548 _('hg parents [-b] [REV]')),
2549 _('hg parents [-b] [REV]')),
2549 "paths": (paths, [], _('hg paths [NAME]')),
2550 "paths": (paths, [], _('hg paths [NAME]')),
2550 "^pull":
2551 "^pull":
2551 (pull,
2552 (pull,
2552 [('u', 'update', None,
2553 [('u', 'update', None,
2553 _('update the working directory to tip after pull')),
2554 _('update the working directory to tip after pull')),
2554 ('e', 'ssh', '', _('specify ssh command to use')),
2555 ('e', 'ssh', '', _('specify ssh command to use')),
2555 ('r', 'rev', [], _('a specific revision you would like to pull')),
2556 ('r', 'rev', [], _('a specific revision you would like to pull')),
2556 ('', 'remotecmd', '',
2557 ('', 'remotecmd', '',
2557 _('specify hg command to run on the remote side'))],
2558 _('specify hg command to run on the remote side'))],
2558 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
2559 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
2559 "^push":
2560 "^push":
2560 (push,
2561 (push,
2561 [('f', 'force', None, _('force push')),
2562 [('f', 'force', None, _('force push')),
2562 ('e', 'ssh', '', _('specify ssh command to use')),
2563 ('e', 'ssh', '', _('specify ssh command to use')),
2563 ('r', 'rev', [], _('a specific revision you would like to push')),
2564 ('r', 'rev', [], _('a specific revision you would like to push')),
2564 ('', 'remotecmd', '',
2565 ('', 'remotecmd', '',
2565 _('specify hg command to run on the remote side'))],
2566 _('specify hg command to run on the remote side'))],
2566 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
2567 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
2567 "debugrawcommit|rawcommit":
2568 "debugrawcommit|rawcommit":
2568 (rawcommit,
2569 (rawcommit,
2569 [('p', 'parent', [], _('parent')),
2570 [('p', 'parent', [], _('parent')),
2570 ('d', 'date', '', _('date code')),
2571 ('d', 'date', '', _('date code')),
2571 ('u', 'user', '', _('user')),
2572 ('u', 'user', '', _('user')),
2572 ('F', 'files', '', _('file list')),
2573 ('F', 'files', '', _('file list')),
2573 ('m', 'message', '', _('commit message')),
2574 ('m', 'message', '', _('commit message')),
2574 ('l', 'logfile', '', _('commit message file'))],
2575 ('l', 'logfile', '', _('commit message file'))],
2575 _('hg debugrawcommit [OPTION]... [FILE]...')),
2576 _('hg debugrawcommit [OPTION]... [FILE]...')),
2576 "recover": (recover, [], _('hg recover')),
2577 "recover": (recover, [], _('hg recover')),
2577 "^remove|rm":
2578 "^remove|rm":
2578 (remove,
2579 (remove,
2579 [('I', 'include', [], _('include names matching the given patterns')),
2580 [('I', 'include', [], _('include names matching the given patterns')),
2580 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2581 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2581 _('hg remove [OPTION]... FILE...')),
2582 _('hg remove [OPTION]... FILE...')),
2582 "rename|mv":
2583 "rename|mv":
2583 (rename,
2584 (rename,
2584 [('A', 'after', None, _('record a rename that has already occurred')),
2585 [('A', 'after', None, _('record a rename that has already occurred')),
2585 ('f', 'force', None,
2586 ('f', 'force', None,
2586 _('forcibly copy over an existing managed file')),
2587 _('forcibly copy over an existing managed file')),
2587 ('I', 'include', [], _('include names matching the given patterns')),
2588 ('I', 'include', [], _('include names matching the given patterns')),
2588 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2589 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2589 _('hg rename [OPTION]... [SOURCE]... DEST')),
2590 _('hg rename [OPTION]... [SOURCE]... DEST')),
2590 "^revert":
2591 "^revert":
2591 (revert,
2592 (revert,
2592 [('r', 'rev', '', _('revision to revert to')),
2593 [('r', 'rev', '', _('revision to revert to')),
2593 ('I', 'include', [], _('include names matching the given patterns')),
2594 ('I', 'include', [], _('include names matching the given patterns')),
2594 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2595 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2595 _('hg revert [-r REV] [NAME]...')),
2596 _('hg revert [-r REV] [NAME]...')),
2596 "root": (root, [], _('hg root')),
2597 "root": (root, [], _('hg root')),
2597 "^serve":
2598 "^serve":
2598 (serve,
2599 (serve,
2599 [('A', 'accesslog', '', _('name of access log file to write to')),
2600 [('A', 'accesslog', '', _('name of access log file to write to')),
2600 ('d', 'daemon', None, _('run server in background')),
2601 ('d', 'daemon', None, _('run server in background')),
2601 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2602 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2602 ('E', 'errorlog', '', _('name of error log file to write to')),
2603 ('E', 'errorlog', '', _('name of error log file to write to')),
2603 ('p', 'port', 0, _('port to use (default: 8000)')),
2604 ('p', 'port', 0, _('port to use (default: 8000)')),
2604 ('a', 'address', '', _('address to use')),
2605 ('a', 'address', '', _('address to use')),
2605 ('n', 'name', '',
2606 ('n', 'name', '',
2606 _('name to show in web pages (default: working dir)')),
2607 _('name to show in web pages (default: working dir)')),
2607 ('', 'pid-file', '', _('name of file to write process ID to')),
2608 ('', 'pid-file', '', _('name of file to write process ID to')),
2608 ('', 'stdio', None, _('for remote clients')),
2609 ('', 'stdio', None, _('for remote clients')),
2609 ('t', 'templates', '', _('web templates to use')),
2610 ('t', 'templates', '', _('web templates to use')),
2610 ('', 'style', '', _('template style to use')),
2611 ('', 'style', '', _('template style to use')),
2611 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2612 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2612 _('hg serve [OPTION]...')),
2613 _('hg serve [OPTION]...')),
2613 "^status|st":
2614 "^status|st":
2614 (status,
2615 (status,
2615 [('m', 'modified', None, _('show only modified files')),
2616 [('m', 'modified', None, _('show only modified files')),
2616 ('a', 'added', None, _('show only added files')),
2617 ('a', 'added', None, _('show only added files')),
2617 ('r', 'removed', None, _('show only removed files')),
2618 ('r', 'removed', None, _('show only removed files')),
2618 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2619 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2619 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2620 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2620 ('n', 'no-status', None, _('hide status prefix')),
2621 ('n', 'no-status', None, _('hide status prefix')),
2621 ('0', 'print0', None,
2622 ('0', 'print0', None,
2622 _('end filenames with NUL, for use with xargs')),
2623 _('end filenames with NUL, for use with xargs')),
2623 ('I', 'include', [], _('include names matching the given patterns')),
2624 ('I', 'include', [], _('include names matching the given patterns')),
2624 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2625 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2625 _('hg status [OPTION]... [FILE]...')),
2626 _('hg status [OPTION]... [FILE]...')),
2626 "tag":
2627 "tag":
2627 (tag,
2628 (tag,
2628 [('l', 'local', None, _('make the tag local')),
2629 [('l', 'local', None, _('make the tag local')),
2629 ('m', 'message', '', _('message for tag commit log entry')),
2630 ('m', 'message', '', _('message for tag commit log entry')),
2630 ('d', 'date', '', _('record datecode as commit date')),
2631 ('d', 'date', '', _('record datecode as commit date')),
2631 ('u', 'user', '', _('record user as commiter')),
2632 ('u', 'user', '', _('record user as commiter')),
2632 ('r', 'rev', '', _('revision to tag'))],
2633 ('r', 'rev', '', _('revision to tag'))],
2633 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2634 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2634 "tags": (tags, [], _('hg tags')),
2635 "tags": (tags, [], _('hg tags')),
2635 "tip":
2636 "tip":
2636 (tip,
2637 (tip,
2637 [('b', 'branches', None, _('show branches')),
2638 [('b', 'branches', None, _('show branches')),
2638 ('p', 'patch', None, _('show patch'))],
2639 ('p', 'patch', None, _('show patch'))],
2639 _('hg tip [-b] [-p]')),
2640 _('hg tip [-b] [-p]')),
2640 "unbundle":
2641 "unbundle":
2641 (unbundle,
2642 (unbundle,
2642 [('u', 'update', None,
2643 [('u', 'update', None,
2643 _('update the working directory to tip after unbundle'))],
2644 _('update the working directory to tip after unbundle'))],
2644 _('hg unbundle [-u] FILE')),
2645 _('hg unbundle [-u] FILE')),
2645 "undo": (undo, [], _('hg undo')),
2646 "undo": (undo, [], _('hg undo')),
2646 "^update|up|checkout|co":
2647 "^update|up|checkout|co":
2647 (update,
2648 (update,
2648 [('b', 'branch', '', _('checkout the head of a specific branch')),
2649 [('b', 'branch', '', _('checkout the head of a specific branch')),
2649 ('m', 'merge', None, _('allow merging of branches')),
2650 ('m', 'merge', None, _('allow merging of branches')),
2650 ('C', 'clean', None, _('overwrite locally modified files')),
2651 ('C', 'clean', None, _('overwrite locally modified files')),
2651 ('f', 'force', None, _('force a merge with outstanding changes'))],
2652 ('f', 'force', None, _('force a merge with outstanding changes'))],
2652 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
2653 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
2653 "verify": (verify, [], _('hg verify')),
2654 "verify": (verify, [], _('hg verify')),
2654 "version": (show_version, [], _('hg version')),
2655 "version": (show_version, [], _('hg version')),
2655 }
2656 }
2656
2657
2657 globalopts = [
2658 globalopts = [
2658 ('R', 'repository', '', _('repository root directory')),
2659 ('R', 'repository', '', _('repository root directory')),
2659 ('', 'cwd', '', _('change working directory')),
2660 ('', 'cwd', '', _('change working directory')),
2660 ('y', 'noninteractive', None,
2661 ('y', 'noninteractive', None,
2661 _('do not prompt, assume \'yes\' for any required answers')),
2662 _('do not prompt, assume \'yes\' for any required answers')),
2662 ('q', 'quiet', None, _('suppress output')),
2663 ('q', 'quiet', None, _('suppress output')),
2663 ('v', 'verbose', None, _('enable additional output')),
2664 ('v', 'verbose', None, _('enable additional output')),
2664 ('', 'debug', None, _('enable debugging output')),
2665 ('', 'debug', None, _('enable debugging output')),
2665 ('', 'debugger', None, _('start debugger')),
2666 ('', 'debugger', None, _('start debugger')),
2666 ('', 'traceback', None, _('print traceback on exception')),
2667 ('', 'traceback', None, _('print traceback on exception')),
2667 ('', 'time', None, _('time how long the command takes')),
2668 ('', 'time', None, _('time how long the command takes')),
2668 ('', 'profile', None, _('print command execution profile')),
2669 ('', 'profile', None, _('print command execution profile')),
2669 ('', 'version', None, _('output version information and exit')),
2670 ('', 'version', None, _('output version information and exit')),
2670 ('h', 'help', None, _('display help and exit')),
2671 ('h', 'help', None, _('display help and exit')),
2671 ]
2672 ]
2672
2673
2673 norepo = ("clone init version help debugancestor debugconfig debugdata"
2674 norepo = ("clone init version help debugancestor debugconfig debugdata"
2674 " debugindex debugindexdot paths")
2675 " debugindex debugindexdot paths")
2675
2676
2676 def find(cmd):
2677 def find(cmd):
2677 """Return (aliases, command table entry) for command string."""
2678 """Return (aliases, command table entry) for command string."""
2678 choice = None
2679 choice = None
2679 count = 0
2680 count = 0
2680 for e in table.keys():
2681 for e in table.keys():
2681 aliases = e.lstrip("^").split("|")
2682 aliases = e.lstrip("^").split("|")
2682 if cmd in aliases:
2683 if cmd in aliases:
2683 return aliases, table[e]
2684 return aliases, table[e]
2684 for a in aliases:
2685 for a in aliases:
2685 if a.startswith(cmd):
2686 if a.startswith(cmd):
2686 count += 1
2687 count += 1
2687 choice = aliases, table[e]
2688 choice = aliases, table[e]
2688 break
2689 break
2689
2690
2690 if count > 1:
2691 if count > 1:
2691 raise AmbiguousCommand(cmd)
2692 raise AmbiguousCommand(cmd)
2692
2693
2693 if choice:
2694 if choice:
2694 return choice
2695 return choice
2695
2696
2696 raise UnknownCommand(cmd)
2697 raise UnknownCommand(cmd)
2697
2698
2698 class SignalInterrupt(Exception):
2699 class SignalInterrupt(Exception):
2699 """Exception raised on SIGTERM and SIGHUP."""
2700 """Exception raised on SIGTERM and SIGHUP."""
2700
2701
2701 def catchterm(*args):
2702 def catchterm(*args):
2702 raise SignalInterrupt
2703 raise SignalInterrupt
2703
2704
2704 def run():
2705 def run():
2705 sys.exit(dispatch(sys.argv[1:]))
2706 sys.exit(dispatch(sys.argv[1:]))
2706
2707
2707 class ParseError(Exception):
2708 class ParseError(Exception):
2708 """Exception raised on errors in parsing the command line."""
2709 """Exception raised on errors in parsing the command line."""
2709
2710
2710 def parse(ui, args):
2711 def parse(ui, args):
2711 options = {}
2712 options = {}
2712 cmdoptions = {}
2713 cmdoptions = {}
2713
2714
2714 try:
2715 try:
2715 args = fancyopts.fancyopts(args, globalopts, options)
2716 args = fancyopts.fancyopts(args, globalopts, options)
2716 except fancyopts.getopt.GetoptError, inst:
2717 except fancyopts.getopt.GetoptError, inst:
2717 raise ParseError(None, inst)
2718 raise ParseError(None, inst)
2718
2719
2719 if args:
2720 if args:
2720 cmd, args = args[0], args[1:]
2721 cmd, args = args[0], args[1:]
2721 aliases, i = find(cmd)
2722 aliases, i = find(cmd)
2722 cmd = aliases[0]
2723 cmd = aliases[0]
2723 defaults = ui.config("defaults", cmd)
2724 defaults = ui.config("defaults", cmd)
2724 if defaults:
2725 if defaults:
2725 args = defaults.split() + args
2726 args = defaults.split() + args
2726 c = list(i[1])
2727 c = list(i[1])
2727 else:
2728 else:
2728 cmd = None
2729 cmd = None
2729 c = []
2730 c = []
2730
2731
2731 # combine global options into local
2732 # combine global options into local
2732 for o in globalopts:
2733 for o in globalopts:
2733 c.append((o[0], o[1], options[o[1]], o[3]))
2734 c.append((o[0], o[1], options[o[1]], o[3]))
2734
2735
2735 try:
2736 try:
2736 args = fancyopts.fancyopts(args, c, cmdoptions)
2737 args = fancyopts.fancyopts(args, c, cmdoptions)
2737 except fancyopts.getopt.GetoptError, inst:
2738 except fancyopts.getopt.GetoptError, inst:
2738 raise ParseError(cmd, inst)
2739 raise ParseError(cmd, inst)
2739
2740
2740 # separate global options back out
2741 # separate global options back out
2741 for o in globalopts:
2742 for o in globalopts:
2742 n = o[1]
2743 n = o[1]
2743 options[n] = cmdoptions[n]
2744 options[n] = cmdoptions[n]
2744 del cmdoptions[n]
2745 del cmdoptions[n]
2745
2746
2746 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2747 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2747
2748
2748 def dispatch(args):
2749 def dispatch(args):
2749 signal.signal(signal.SIGTERM, catchterm)
2750 signal.signal(signal.SIGTERM, catchterm)
2750 try:
2751 try:
2751 signal.signal(signal.SIGHUP, catchterm)
2752 signal.signal(signal.SIGHUP, catchterm)
2752 except AttributeError:
2753 except AttributeError:
2753 pass
2754 pass
2754
2755
2755 try:
2756 try:
2756 u = ui.ui()
2757 u = ui.ui()
2757 except util.Abort, inst:
2758 except util.Abort, inst:
2758 sys.stderr.write(_("abort: %s\n") % inst)
2759 sys.stderr.write(_("abort: %s\n") % inst)
2759 sys.exit(1)
2760 sys.exit(1)
2760
2761
2761 external = []
2762 external = []
2762 for x in u.extensions():
2763 for x in u.extensions():
2763 def on_exception(exc, inst):
2764 def on_exception(exc, inst):
2764 u.warn(_("*** failed to import extension %s\n") % x[1])
2765 u.warn(_("*** failed to import extension %s\n") % x[1])
2765 u.warn("%s\n" % inst)
2766 u.warn("%s\n" % inst)
2766 if "--traceback" in sys.argv[1:]:
2767 if "--traceback" in sys.argv[1:]:
2767 traceback.print_exc()
2768 traceback.print_exc()
2768 if x[1]:
2769 if x[1]:
2769 try:
2770 try:
2770 mod = imp.load_source(x[0], x[1])
2771 mod = imp.load_source(x[0], x[1])
2771 except Exception, inst:
2772 except Exception, inst:
2772 on_exception(Exception, inst)
2773 on_exception(Exception, inst)
2773 continue
2774 continue
2774 else:
2775 else:
2775 def importh(name):
2776 def importh(name):
2776 mod = __import__(name)
2777 mod = __import__(name)
2777 components = name.split('.')
2778 components = name.split('.')
2778 for comp in components[1:]:
2779 for comp in components[1:]:
2779 mod = getattr(mod, comp)
2780 mod = getattr(mod, comp)
2780 return mod
2781 return mod
2781 try:
2782 try:
2782 mod = importh(x[0])
2783 mod = importh(x[0])
2783 except Exception, inst:
2784 except Exception, inst:
2784 on_exception(Exception, inst)
2785 on_exception(Exception, inst)
2785 continue
2786 continue
2786
2787
2787 external.append(mod)
2788 external.append(mod)
2788 for x in external:
2789 for x in external:
2789 cmdtable = getattr(x, 'cmdtable', {})
2790 cmdtable = getattr(x, 'cmdtable', {})
2790 for t in cmdtable:
2791 for t in cmdtable:
2791 if t in table:
2792 if t in table:
2792 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2793 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2793 table.update(cmdtable)
2794 table.update(cmdtable)
2794
2795
2795 try:
2796 try:
2796 cmd, func, args, options, cmdoptions = parse(u, args)
2797 cmd, func, args, options, cmdoptions = parse(u, args)
2797 except ParseError, inst:
2798 except ParseError, inst:
2798 if inst.args[0]:
2799 if inst.args[0]:
2799 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2800 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2800 help_(u, inst.args[0])
2801 help_(u, inst.args[0])
2801 else:
2802 else:
2802 u.warn(_("hg: %s\n") % inst.args[1])
2803 u.warn(_("hg: %s\n") % inst.args[1])
2803 help_(u, 'shortlist')
2804 help_(u, 'shortlist')
2804 sys.exit(-1)
2805 sys.exit(-1)
2805 except AmbiguousCommand, inst:
2806 except AmbiguousCommand, inst:
2806 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2807 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2807 sys.exit(1)
2808 sys.exit(1)
2808 except UnknownCommand, inst:
2809 except UnknownCommand, inst:
2809 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2810 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2810 help_(u, 'shortlist')
2811 help_(u, 'shortlist')
2811 sys.exit(1)
2812 sys.exit(1)
2812
2813
2813 if options["time"]:
2814 if options["time"]:
2814 def get_times():
2815 def get_times():
2815 t = os.times()
2816 t = os.times()
2816 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2817 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2817 t = (t[0], t[1], t[2], t[3], time.clock())
2818 t = (t[0], t[1], t[2], t[3], time.clock())
2818 return t
2819 return t
2819 s = get_times()
2820 s = get_times()
2820 def print_time():
2821 def print_time():
2821 t = get_times()
2822 t = get_times()
2822 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2823 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2823 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2824 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2824 atexit.register(print_time)
2825 atexit.register(print_time)
2825
2826
2826 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2827 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2827 not options["noninteractive"])
2828 not options["noninteractive"])
2828
2829
2829 # enter the debugger before command execution
2830 # enter the debugger before command execution
2830 if options['debugger']:
2831 if options['debugger']:
2831 pdb.set_trace()
2832 pdb.set_trace()
2832
2833
2833 try:
2834 try:
2834 try:
2835 try:
2835 if options['help']:
2836 if options['help']:
2836 help_(u, cmd, options['version'])
2837 help_(u, cmd, options['version'])
2837 sys.exit(0)
2838 sys.exit(0)
2838 elif options['version']:
2839 elif options['version']:
2839 show_version(u)
2840 show_version(u)
2840 sys.exit(0)
2841 sys.exit(0)
2841 elif not cmd:
2842 elif not cmd:
2842 help_(u, 'shortlist')
2843 help_(u, 'shortlist')
2843 sys.exit(0)
2844 sys.exit(0)
2844
2845
2845 if options['cwd']:
2846 if options['cwd']:
2846 try:
2847 try:
2847 os.chdir(options['cwd'])
2848 os.chdir(options['cwd'])
2848 except OSError, inst:
2849 except OSError, inst:
2849 raise util.Abort('%s: %s' %
2850 raise util.Abort('%s: %s' %
2850 (options['cwd'], inst.strerror))
2851 (options['cwd'], inst.strerror))
2851
2852
2852 if cmd not in norepo.split():
2853 if cmd not in norepo.split():
2853 path = options["repository"] or ""
2854 path = options["repository"] or ""
2854 repo = hg.repository(ui=u, path=path)
2855 repo = hg.repository(ui=u, path=path)
2855 for x in external:
2856 for x in external:
2856 if hasattr(x, 'reposetup'):
2857 if hasattr(x, 'reposetup'):
2857 x.reposetup(u, repo)
2858 x.reposetup(u, repo)
2858 d = lambda: func(u, repo, *args, **cmdoptions)
2859 d = lambda: func(u, repo, *args, **cmdoptions)
2859 else:
2860 else:
2860 d = lambda: func(u, *args, **cmdoptions)
2861 d = lambda: func(u, *args, **cmdoptions)
2861
2862
2862 if options['profile']:
2863 if options['profile']:
2863 import hotshot, hotshot.stats
2864 import hotshot, hotshot.stats
2864 prof = hotshot.Profile("hg.prof")
2865 prof = hotshot.Profile("hg.prof")
2865 try:
2866 try:
2866 try:
2867 try:
2867 return prof.runcall(d)
2868 return prof.runcall(d)
2868 except:
2869 except:
2869 try:
2870 try:
2870 u.warn(_('exception raised - generating profile '
2871 u.warn(_('exception raised - generating profile '
2871 'anyway\n'))
2872 'anyway\n'))
2872 except:
2873 except:
2873 pass
2874 pass
2874 raise
2875 raise
2875 finally:
2876 finally:
2876 prof.close()
2877 prof.close()
2877 stats = hotshot.stats.load("hg.prof")
2878 stats = hotshot.stats.load("hg.prof")
2878 stats.strip_dirs()
2879 stats.strip_dirs()
2879 stats.sort_stats('time', 'calls')
2880 stats.sort_stats('time', 'calls')
2880 stats.print_stats(40)
2881 stats.print_stats(40)
2881 else:
2882 else:
2882 return d()
2883 return d()
2883 except:
2884 except:
2884 # enter the debugger when we hit an exception
2885 # enter the debugger when we hit an exception
2885 if options['debugger']:
2886 if options['debugger']:
2886 pdb.post_mortem(sys.exc_info()[2])
2887 pdb.post_mortem(sys.exc_info()[2])
2887 if options['traceback']:
2888 if options['traceback']:
2888 traceback.print_exc()
2889 traceback.print_exc()
2889 raise
2890 raise
2890 except hg.RepoError, inst:
2891 except hg.RepoError, inst:
2891 u.warn(_("abort: "), inst, "!\n")
2892 u.warn(_("abort: "), inst, "!\n")
2892 except revlog.RevlogError, inst:
2893 except revlog.RevlogError, inst:
2893 u.warn(_("abort: "), inst, "!\n")
2894 u.warn(_("abort: "), inst, "!\n")
2894 except SignalInterrupt:
2895 except SignalInterrupt:
2895 u.warn(_("killed!\n"))
2896 u.warn(_("killed!\n"))
2896 except KeyboardInterrupt:
2897 except KeyboardInterrupt:
2897 try:
2898 try:
2898 u.warn(_("interrupted!\n"))
2899 u.warn(_("interrupted!\n"))
2899 except IOError, inst:
2900 except IOError, inst:
2900 if inst.errno == errno.EPIPE:
2901 if inst.errno == errno.EPIPE:
2901 if u.debugflag:
2902 if u.debugflag:
2902 u.warn(_("\nbroken pipe\n"))
2903 u.warn(_("\nbroken pipe\n"))
2903 else:
2904 else:
2904 raise
2905 raise
2905 except IOError, inst:
2906 except IOError, inst:
2906 if hasattr(inst, "code"):
2907 if hasattr(inst, "code"):
2907 u.warn(_("abort: %s\n") % inst)
2908 u.warn(_("abort: %s\n") % inst)
2908 elif hasattr(inst, "reason"):
2909 elif hasattr(inst, "reason"):
2909 u.warn(_("abort: error: %s\n") % inst.reason[1])
2910 u.warn(_("abort: error: %s\n") % inst.reason[1])
2910 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2911 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2911 if u.debugflag:
2912 if u.debugflag:
2912 u.warn(_("broken pipe\n"))
2913 u.warn(_("broken pipe\n"))
2913 elif getattr(inst, "strerror", None):
2914 elif getattr(inst, "strerror", None):
2914 if getattr(inst, "filename", None):
2915 if getattr(inst, "filename", None):
2915 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2916 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2916 else:
2917 else:
2917 u.warn(_("abort: %s\n") % inst.strerror)
2918 u.warn(_("abort: %s\n") % inst.strerror)
2918 else:
2919 else:
2919 raise
2920 raise
2920 except OSError, inst:
2921 except OSError, inst:
2921 if hasattr(inst, "filename"):
2922 if hasattr(inst, "filename"):
2922 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2923 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2923 else:
2924 else:
2924 u.warn(_("abort: %s\n") % inst.strerror)
2925 u.warn(_("abort: %s\n") % inst.strerror)
2925 except util.Abort, inst:
2926 except util.Abort, inst:
2926 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2927 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2927 sys.exit(1)
2928 sys.exit(1)
2928 except TypeError, inst:
2929 except TypeError, inst:
2929 # was this an argument error?
2930 # was this an argument error?
2930 tb = traceback.extract_tb(sys.exc_info()[2])
2931 tb = traceback.extract_tb(sys.exc_info()[2])
2931 if len(tb) > 2: # no
2932 if len(tb) > 2: # no
2932 raise
2933 raise
2933 u.debug(inst, "\n")
2934 u.debug(inst, "\n")
2934 u.warn(_("%s: invalid arguments\n") % cmd)
2935 u.warn(_("%s: invalid arguments\n") % cmd)
2935 help_(u, cmd)
2936 help_(u, cmd)
2936 except AmbiguousCommand, inst:
2937 except AmbiguousCommand, inst:
2937 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2938 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2938 help_(u, 'shortlist')
2939 help_(u, 'shortlist')
2939 except UnknownCommand, inst:
2940 except UnknownCommand, inst:
2940 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2941 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2941 help_(u, 'shortlist')
2942 help_(u, 'shortlist')
2942 except SystemExit:
2943 except SystemExit:
2943 # don't catch this in the catch-all below
2944 # don't catch this in the catch-all below
2944 raise
2945 raise
2945 except:
2946 except:
2946 u.warn(_("** unknown exception encountered, details follow\n"))
2947 u.warn(_("** unknown exception encountered, details follow\n"))
2947 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2948 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2948 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
2949 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
2949 % version.get_version())
2950 % version.get_version())
2950 raise
2951 raise
2951
2952
2952 sys.exit(-1)
2953 sys.exit(-1)
@@ -1,1897 +1,1898 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14
14
15 class localrepository(object):
15 class localrepository(object):
16 def __del__(self):
16 def __del__(self):
17 self.transhandle = None
17 self.transhandle = None
18 def __init__(self, ui, path=None, create=0):
18 def __init__(self, ui, path=None, create=0):
19 if not path:
19 if not path:
20 p = os.getcwd()
20 p = os.getcwd()
21 while not os.path.isdir(os.path.join(p, ".hg")):
21 while not os.path.isdir(os.path.join(p, ".hg")):
22 oldp = p
22 oldp = p
23 p = os.path.dirname(p)
23 p = os.path.dirname(p)
24 if p == oldp:
24 if p == oldp:
25 raise repo.RepoError(_("no repo found"))
25 raise repo.RepoError(_("no repo found"))
26 path = p
26 path = p
27 self.path = os.path.join(path, ".hg")
27 self.path = os.path.join(path, ".hg")
28
28
29 if not create and not os.path.isdir(self.path):
29 if not create and not os.path.isdir(self.path):
30 raise repo.RepoError(_("repository %s not found") % path)
30 raise repo.RepoError(_("repository %s not found") % path)
31
31
32 self.root = os.path.abspath(path)
32 self.root = os.path.abspath(path)
33 self.ui = ui
33 self.ui = ui
34 self.opener = util.opener(self.path)
34 self.opener = util.opener(self.path)
35 self.wopener = util.opener(self.root)
35 self.wopener = util.opener(self.root)
36 self.manifest = manifest.manifest(self.opener)
36 self.manifest = manifest.manifest(self.opener)
37 self.changelog = changelog.changelog(self.opener)
37 self.changelog = changelog.changelog(self.opener)
38 self.tagscache = None
38 self.tagscache = None
39 self.nodetagscache = None
39 self.nodetagscache = None
40 self.encodepats = None
40 self.encodepats = None
41 self.decodepats = None
41 self.decodepats = None
42 self.transhandle = None
42 self.transhandle = None
43
43
44 if create:
44 if create:
45 os.mkdir(self.path)
45 os.mkdir(self.path)
46 os.mkdir(self.join("data"))
46 os.mkdir(self.join("data"))
47
47
48 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
48 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
49 try:
49 try:
50 self.ui.readconfig(self.join("hgrc"))
50 self.ui.readconfig(self.join("hgrc"))
51 except IOError:
51 except IOError:
52 pass
52 pass
53
53
54 def hook(self, name, throw=False, **args):
54 def hook(self, name, throw=False, **args):
55 def runhook(name, cmd):
55 def runhook(name, cmd):
56 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
56 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
57 old = {}
57 old = {}
58 for k, v in args.items():
58 for k, v in args.items():
59 k = k.upper()
59 k = k.upper()
60 old['HG_' + k] = os.environ.get(k, None)
60 old['HG_' + k] = os.environ.get(k, None)
61 old[k] = os.environ.get(k, None)
61 old[k] = os.environ.get(k, None)
62 os.environ['HG_' + k] = str(v)
62 os.environ['HG_' + k] = str(v)
63 os.environ[k] = str(v)
63 os.environ[k] = str(v)
64
64
65 try:
65 try:
66 # Hooks run in the repository root
66 # Hooks run in the repository root
67 olddir = os.getcwd()
67 olddir = os.getcwd()
68 os.chdir(self.root)
68 os.chdir(self.root)
69 r = os.system(cmd)
69 r = os.system(cmd)
70 finally:
70 finally:
71 for k, v in old.items():
71 for k, v in old.items():
72 if v is not None:
72 if v is not None:
73 os.environ[k] = v
73 os.environ[k] = v
74 else:
74 else:
75 del os.environ[k]
75 del os.environ[k]
76
76
77 os.chdir(olddir)
77 os.chdir(olddir)
78
78
79 if r:
79 if r:
80 desc, r = util.explain_exit(r)
80 desc, r = util.explain_exit(r)
81 if throw:
81 if throw:
82 raise util.Abort(_('%s hook %s') % (name, desc))
82 raise util.Abort(_('%s hook %s') % (name, desc))
83 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
83 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
84 return False
84 return False
85 return True
85 return True
86
86
87 r = True
87 r = True
88 for hname, cmd in self.ui.configitems("hooks"):
88 for hname, cmd in self.ui.configitems("hooks"):
89 s = hname.split(".")
89 s = hname.split(".")
90 if s[0] == name and cmd:
90 if s[0] == name and cmd:
91 r = runhook(hname, cmd) and r
91 r = runhook(hname, cmd) and r
92 return r
92 return r
93
93
94 def tags(self):
94 def tags(self):
95 '''return a mapping of tag to node'''
95 '''return a mapping of tag to node'''
96 if not self.tagscache:
96 if not self.tagscache:
97 self.tagscache = {}
97 self.tagscache = {}
98 def addtag(self, k, n):
98 def addtag(self, k, n):
99 try:
99 try:
100 bin_n = bin(n)
100 bin_n = bin(n)
101 except TypeError:
101 except TypeError:
102 bin_n = ''
102 bin_n = ''
103 self.tagscache[k.strip()] = bin_n
103 self.tagscache[k.strip()] = bin_n
104
104
105 try:
105 try:
106 # read each head of the tags file, ending with the tip
106 # read each head of the tags file, ending with the tip
107 # and add each tag found to the map, with "newer" ones
107 # and add each tag found to the map, with "newer" ones
108 # taking precedence
108 # taking precedence
109 fl = self.file(".hgtags")
109 fl = self.file(".hgtags")
110 h = fl.heads()
110 h = fl.heads()
111 h.reverse()
111 h.reverse()
112 for r in h:
112 for r in h:
113 for l in fl.read(r).splitlines():
113 for l in fl.read(r).splitlines():
114 if l:
114 if l:
115 n, k = l.split(" ", 1)
115 n, k = l.split(" ", 1)
116 addtag(self, k, n)
116 addtag(self, k, n)
117 except KeyError:
117 except KeyError:
118 pass
118 pass
119
119
120 try:
120 try:
121 f = self.opener("localtags")
121 f = self.opener("localtags")
122 for l in f:
122 for l in f:
123 n, k = l.split(" ", 1)
123 n, k = l.split(" ", 1)
124 addtag(self, k, n)
124 addtag(self, k, n)
125 except IOError:
125 except IOError:
126 pass
126 pass
127
127
128 self.tagscache['tip'] = self.changelog.tip()
128 self.tagscache['tip'] = self.changelog.tip()
129
129
130 return self.tagscache
130 return self.tagscache
131
131
132 def tagslist(self):
132 def tagslist(self):
133 '''return a list of tags ordered by revision'''
133 '''return a list of tags ordered by revision'''
134 l = []
134 l = []
135 for t, n in self.tags().items():
135 for t, n in self.tags().items():
136 try:
136 try:
137 r = self.changelog.rev(n)
137 r = self.changelog.rev(n)
138 except:
138 except:
139 r = -2 # sort to the beginning of the list if unknown
139 r = -2 # sort to the beginning of the list if unknown
140 l.append((r, t, n))
140 l.append((r, t, n))
141 l.sort()
141 l.sort()
142 return [(t, n) for r, t, n in l]
142 return [(t, n) for r, t, n in l]
143
143
144 def nodetags(self, node):
144 def nodetags(self, node):
145 '''return the tags associated with a node'''
145 '''return the tags associated with a node'''
146 if not self.nodetagscache:
146 if not self.nodetagscache:
147 self.nodetagscache = {}
147 self.nodetagscache = {}
148 for t, n in self.tags().items():
148 for t, n in self.tags().items():
149 self.nodetagscache.setdefault(n, []).append(t)
149 self.nodetagscache.setdefault(n, []).append(t)
150 return self.nodetagscache.get(node, [])
150 return self.nodetagscache.get(node, [])
151
151
152 def lookup(self, key):
152 def lookup(self, key):
153 try:
153 try:
154 return self.tags()[key]
154 return self.tags()[key]
155 except KeyError:
155 except KeyError:
156 try:
156 try:
157 return self.changelog.lookup(key)
157 return self.changelog.lookup(key)
158 except:
158 except:
159 raise repo.RepoError(_("unknown revision '%s'") % key)
159 raise repo.RepoError(_("unknown revision '%s'") % key)
160
160
161 def dev(self):
161 def dev(self):
162 return os.stat(self.path).st_dev
162 return os.stat(self.path).st_dev
163
163
164 def local(self):
164 def local(self):
165 return True
165 return True
166
166
167 def join(self, f):
167 def join(self, f):
168 return os.path.join(self.path, f)
168 return os.path.join(self.path, f)
169
169
170 def wjoin(self, f):
170 def wjoin(self, f):
171 return os.path.join(self.root, f)
171 return os.path.join(self.root, f)
172
172
173 def file(self, f):
173 def file(self, f):
174 if f[0] == '/':
174 if f[0] == '/':
175 f = f[1:]
175 f = f[1:]
176 return filelog.filelog(self.opener, f)
176 return filelog.filelog(self.opener, f)
177
177
178 def getcwd(self):
178 def getcwd(self):
179 return self.dirstate.getcwd()
179 return self.dirstate.getcwd()
180
180
181 def wfile(self, f, mode='r'):
181 def wfile(self, f, mode='r'):
182 return self.wopener(f, mode)
182 return self.wopener(f, mode)
183
183
184 def wread(self, filename):
184 def wread(self, filename):
185 if self.encodepats == None:
185 if self.encodepats == None:
186 l = []
186 l = []
187 for pat, cmd in self.ui.configitems("encode"):
187 for pat, cmd in self.ui.configitems("encode"):
188 mf = util.matcher("", "/", [pat], [], [])[1]
188 mf = util.matcher("", "/", [pat], [], [])[1]
189 l.append((mf, cmd))
189 l.append((mf, cmd))
190 self.encodepats = l
190 self.encodepats = l
191
191
192 data = self.wopener(filename, 'r').read()
192 data = self.wopener(filename, 'r').read()
193
193
194 for mf, cmd in self.encodepats:
194 for mf, cmd in self.encodepats:
195 if mf(filename):
195 if mf(filename):
196 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
196 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
197 data = util.filter(data, cmd)
197 data = util.filter(data, cmd)
198 break
198 break
199
199
200 return data
200 return data
201
201
202 def wwrite(self, filename, data, fd=None):
202 def wwrite(self, filename, data, fd=None):
203 if self.decodepats == None:
203 if self.decodepats == None:
204 l = []
204 l = []
205 for pat, cmd in self.ui.configitems("decode"):
205 for pat, cmd in self.ui.configitems("decode"):
206 mf = util.matcher("", "/", [pat], [], [])[1]
206 mf = util.matcher("", "/", [pat], [], [])[1]
207 l.append((mf, cmd))
207 l.append((mf, cmd))
208 self.decodepats = l
208 self.decodepats = l
209
209
210 for mf, cmd in self.decodepats:
210 for mf, cmd in self.decodepats:
211 if mf(filename):
211 if mf(filename):
212 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
212 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
213 data = util.filter(data, cmd)
213 data = util.filter(data, cmd)
214 break
214 break
215
215
216 if fd:
216 if fd:
217 return fd.write(data)
217 return fd.write(data)
218 return self.wopener(filename, 'w').write(data)
218 return self.wopener(filename, 'w').write(data)
219
219
220 def transaction(self):
220 def transaction(self):
221 tr = self.transhandle
221 tr = self.transhandle
222 if tr != None and tr.running():
222 if tr != None and tr.running():
223 return tr.nest()
223 return tr.nest()
224
224
225 # save dirstate for undo
225 # save dirstate for undo
226 try:
226 try:
227 ds = self.opener("dirstate").read()
227 ds = self.opener("dirstate").read()
228 except IOError:
228 except IOError:
229 ds = ""
229 ds = ""
230 self.opener("journal.dirstate", "w").write(ds)
230 self.opener("journal.dirstate", "w").write(ds)
231
231
232 tr = transaction.transaction(self.ui.warn, self.opener,
232 tr = transaction.transaction(self.ui.warn, self.opener,
233 self.join("journal"),
233 self.join("journal"),
234 aftertrans(self.path))
234 aftertrans(self.path))
235 self.transhandle = tr
235 self.transhandle = tr
236 return tr
236 return tr
237
237
238 def recover(self):
238 def recover(self):
239 l = self.lock()
239 l = self.lock()
240 if os.path.exists(self.join("journal")):
240 if os.path.exists(self.join("journal")):
241 self.ui.status(_("rolling back interrupted transaction\n"))
241 self.ui.status(_("rolling back interrupted transaction\n"))
242 transaction.rollback(self.opener, self.join("journal"))
242 transaction.rollback(self.opener, self.join("journal"))
243 self.reload()
243 self.reload()
244 return True
244 return True
245 else:
245 else:
246 self.ui.warn(_("no interrupted transaction available\n"))
246 self.ui.warn(_("no interrupted transaction available\n"))
247 return False
247 return False
248
248
249 def undo(self, wlock=None):
249 def undo(self, wlock=None):
250 if not wlock:
250 if not wlock:
251 wlock = self.wlock()
251 wlock = self.wlock()
252 l = self.lock()
252 l = self.lock()
253 if os.path.exists(self.join("undo")):
253 if os.path.exists(self.join("undo")):
254 self.ui.status(_("rolling back last transaction\n"))
254 self.ui.status(_("rolling back last transaction\n"))
255 transaction.rollback(self.opener, self.join("undo"))
255 transaction.rollback(self.opener, self.join("undo"))
256 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
256 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
257 self.reload()
257 self.reload()
258 self.wreload()
258 self.wreload()
259 else:
259 else:
260 self.ui.warn(_("no undo information available\n"))
260 self.ui.warn(_("no undo information available\n"))
261
261
262 def wreload(self):
262 def wreload(self):
263 self.dirstate.read()
263 self.dirstate.read()
264
264
265 def reload(self):
265 def reload(self):
266 self.changelog.load()
266 self.changelog.load()
267 self.manifest.load()
267 self.manifest.load()
268 self.tagscache = None
268 self.tagscache = None
269 self.nodetagscache = None
269 self.nodetagscache = None
270
270
271 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None):
271 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None):
272 try:
272 try:
273 l = lock.lock(self.join(lockname), 0, releasefn)
273 l = lock.lock(self.join(lockname), 0, releasefn)
274 except lock.LockHeld, inst:
274 except lock.LockHeld, inst:
275 if not wait:
275 if not wait:
276 raise inst
276 raise inst
277 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
277 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
278 try:
278 try:
279 # default to 600 seconds timeout
279 # default to 600 seconds timeout
280 l = lock.lock(self.join(lockname),
280 l = lock.lock(self.join(lockname),
281 int(self.ui.config("ui", "timeout") or 600),
281 int(self.ui.config("ui", "timeout") or 600),
282 releasefn)
282 releasefn)
283 except lock.LockHeld, inst:
283 except lock.LockHeld, inst:
284 raise util.Abort(_("timeout while waiting for "
284 raise util.Abort(_("timeout while waiting for "
285 "lock held by %s") % inst.args[0])
285 "lock held by %s") % inst.args[0])
286 if acquirefn:
286 if acquirefn:
287 acquirefn()
287 acquirefn()
288 return l
288 return l
289
289
290 def lock(self, wait=1):
290 def lock(self, wait=1):
291 return self.do_lock("lock", wait, acquirefn=self.reload)
291 return self.do_lock("lock", wait, acquirefn=self.reload)
292
292
293 def wlock(self, wait=1):
293 def wlock(self, wait=1):
294 return self.do_lock("wlock", wait,
294 return self.do_lock("wlock", wait,
295 self.dirstate.write,
295 self.dirstate.write,
296 self.wreload)
296 self.wreload)
297
297
298 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
298 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
299 "determine whether a new filenode is needed"
299 "determine whether a new filenode is needed"
300 fp1 = manifest1.get(filename, nullid)
300 fp1 = manifest1.get(filename, nullid)
301 fp2 = manifest2.get(filename, nullid)
301 fp2 = manifest2.get(filename, nullid)
302
302
303 if fp2 != nullid:
303 if fp2 != nullid:
304 # is one parent an ancestor of the other?
304 # is one parent an ancestor of the other?
305 fpa = filelog.ancestor(fp1, fp2)
305 fpa = filelog.ancestor(fp1, fp2)
306 if fpa == fp1:
306 if fpa == fp1:
307 fp1, fp2 = fp2, nullid
307 fp1, fp2 = fp2, nullid
308 elif fpa == fp2:
308 elif fpa == fp2:
309 fp2 = nullid
309 fp2 = nullid
310
310
311 # is the file unmodified from the parent? report existing entry
311 # is the file unmodified from the parent? report existing entry
312 if fp2 == nullid and text == filelog.read(fp1):
312 if fp2 == nullid and text == filelog.read(fp1):
313 return (fp1, None, None)
313 return (fp1, None, None)
314
314
315 return (None, fp1, fp2)
315 return (None, fp1, fp2)
316
316
317 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
317 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
318 orig_parent = self.dirstate.parents()[0] or nullid
318 orig_parent = self.dirstate.parents()[0] or nullid
319 p1 = p1 or self.dirstate.parents()[0] or nullid
319 p1 = p1 or self.dirstate.parents()[0] or nullid
320 p2 = p2 or self.dirstate.parents()[1] or nullid
320 p2 = p2 or self.dirstate.parents()[1] or nullid
321 c1 = self.changelog.read(p1)
321 c1 = self.changelog.read(p1)
322 c2 = self.changelog.read(p2)
322 c2 = self.changelog.read(p2)
323 m1 = self.manifest.read(c1[0])
323 m1 = self.manifest.read(c1[0])
324 mf1 = self.manifest.readflags(c1[0])
324 mf1 = self.manifest.readflags(c1[0])
325 m2 = self.manifest.read(c2[0])
325 m2 = self.manifest.read(c2[0])
326 changed = []
326 changed = []
327
327
328 if orig_parent == p1:
328 if orig_parent == p1:
329 update_dirstate = 1
329 update_dirstate = 1
330 else:
330 else:
331 update_dirstate = 0
331 update_dirstate = 0
332
332
333 if not wlock:
333 if not wlock:
334 wlock = self.wlock()
334 wlock = self.wlock()
335 l = self.lock()
335 l = self.lock()
336 tr = self.transaction()
336 tr = self.transaction()
337 mm = m1.copy()
337 mm = m1.copy()
338 mfm = mf1.copy()
338 mfm = mf1.copy()
339 linkrev = self.changelog.count()
339 linkrev = self.changelog.count()
340 for f in files:
340 for f in files:
341 try:
341 try:
342 t = self.wread(f)
342 t = self.wread(f)
343 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
343 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
344 r = self.file(f)
344 r = self.file(f)
345 mfm[f] = tm
345 mfm[f] = tm
346
346
347 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
347 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
348 if entry:
348 if entry:
349 mm[f] = entry
349 mm[f] = entry
350 continue
350 continue
351
351
352 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
352 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
353 changed.append(f)
353 changed.append(f)
354 if update_dirstate:
354 if update_dirstate:
355 self.dirstate.update([f], "n")
355 self.dirstate.update([f], "n")
356 except IOError:
356 except IOError:
357 try:
357 try:
358 del mm[f]
358 del mm[f]
359 del mfm[f]
359 del mfm[f]
360 if update_dirstate:
360 if update_dirstate:
361 self.dirstate.forget([f])
361 self.dirstate.forget([f])
362 except:
362 except:
363 # deleted from p2?
363 # deleted from p2?
364 pass
364 pass
365
365
366 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
366 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
367 user = user or self.ui.username()
367 user = user or self.ui.username()
368 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
368 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
369 tr.close()
369 tr.close()
370 if update_dirstate:
370 if update_dirstate:
371 self.dirstate.setparents(n, nullid)
371 self.dirstate.setparents(n, nullid)
372
372
373 def commit(self, files=None, text="", user=None, date=None,
373 def commit(self, files=None, text="", user=None, date=None,
374 match=util.always, force=False, lock=None, wlock=None):
374 match=util.always, force=False, lock=None, wlock=None):
375 commit = []
375 commit = []
376 remove = []
376 remove = []
377 changed = []
377 changed = []
378
378
379 if files:
379 if files:
380 for f in files:
380 for f in files:
381 s = self.dirstate.state(f)
381 s = self.dirstate.state(f)
382 if s in 'nmai':
382 if s in 'nmai':
383 commit.append(f)
383 commit.append(f)
384 elif s == 'r':
384 elif s == 'r':
385 remove.append(f)
385 remove.append(f)
386 else:
386 else:
387 self.ui.warn(_("%s not tracked!\n") % f)
387 self.ui.warn(_("%s not tracked!\n") % f)
388 else:
388 else:
389 modified, added, removed, deleted, unknown = self.changes(match=match)
389 modified, added, removed, deleted, unknown = self.changes(match=match)
390 commit = modified + added
390 commit = modified + added
391 remove = removed
391 remove = removed
392
392
393 p1, p2 = self.dirstate.parents()
393 p1, p2 = self.dirstate.parents()
394 c1 = self.changelog.read(p1)
394 c1 = self.changelog.read(p1)
395 c2 = self.changelog.read(p2)
395 c2 = self.changelog.read(p2)
396 m1 = self.manifest.read(c1[0])
396 m1 = self.manifest.read(c1[0])
397 mf1 = self.manifest.readflags(c1[0])
397 mf1 = self.manifest.readflags(c1[0])
398 m2 = self.manifest.read(c2[0])
398 m2 = self.manifest.read(c2[0])
399
399
400 if not commit and not remove and not force and p2 == nullid:
400 if not commit and not remove and not force and p2 == nullid:
401 self.ui.status(_("nothing changed\n"))
401 self.ui.status(_("nothing changed\n"))
402 return None
402 return None
403
403
404 xp1 = hex(p1)
404 xp1 = hex(p1)
405 if p2 == nullid: xp2 = ''
405 if p2 == nullid: xp2 = ''
406 else: xp2 = hex(p2)
406 else: xp2 = hex(p2)
407
407
408 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
408 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
409
409
410 if not wlock:
410 if not wlock:
411 wlock = self.wlock()
411 wlock = self.wlock()
412 if not lock:
412 if not lock:
413 lock = self.lock()
413 lock = self.lock()
414 tr = self.transaction()
414 tr = self.transaction()
415
415
416 # check in files
416 # check in files
417 new = {}
417 new = {}
418 linkrev = self.changelog.count()
418 linkrev = self.changelog.count()
419 commit.sort()
419 commit.sort()
420 for f in commit:
420 for f in commit:
421 self.ui.note(f + "\n")
421 self.ui.note(f + "\n")
422 try:
422 try:
423 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
423 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
424 t = self.wread(f)
424 t = self.wread(f)
425 except IOError:
425 except IOError:
426 self.ui.warn(_("trouble committing %s!\n") % f)
426 self.ui.warn(_("trouble committing %s!\n") % f)
427 raise
427 raise
428
428
429 r = self.file(f)
429 r = self.file(f)
430
430
431 meta = {}
431 meta = {}
432 cp = self.dirstate.copied(f)
432 cp = self.dirstate.copied(f)
433 if cp:
433 if cp:
434 meta["copy"] = cp
434 meta["copy"] = cp
435 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
435 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
436 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
436 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
437 fp1, fp2 = nullid, nullid
437 fp1, fp2 = nullid, nullid
438 else:
438 else:
439 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
439 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
440 if entry:
440 if entry:
441 new[f] = entry
441 new[f] = entry
442 continue
442 continue
443
443
444 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
444 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
445 # remember what we've added so that we can later calculate
445 # remember what we've added so that we can later calculate
446 # the files to pull from a set of changesets
446 # the files to pull from a set of changesets
447 changed.append(f)
447 changed.append(f)
448
448
449 # update manifest
449 # update manifest
450 m1 = m1.copy()
450 m1 = m1.copy()
451 m1.update(new)
451 m1.update(new)
452 for f in remove:
452 for f in remove:
453 if f in m1:
453 if f in m1:
454 del m1[f]
454 del m1[f]
455 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
455 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
456 (new, remove))
456 (new, remove))
457
457
458 # add changeset
458 # add changeset
459 new = new.keys()
459 new = new.keys()
460 new.sort()
460 new.sort()
461
461
462 if not text:
462 if not text:
463 edittext = [""]
463 edittext = [""]
464 if p2 != nullid:
464 if p2 != nullid:
465 edittext.append("HG: branch merge")
465 edittext.append("HG: branch merge")
466 edittext.extend(["HG: changed %s" % f for f in changed])
466 edittext.extend(["HG: changed %s" % f for f in changed])
467 edittext.extend(["HG: removed %s" % f for f in remove])
467 edittext.extend(["HG: removed %s" % f for f in remove])
468 if not changed and not remove:
468 if not changed and not remove:
469 edittext.append("HG: no files changed")
469 edittext.append("HG: no files changed")
470 edittext.append("")
470 edittext.append("")
471 # run editor in the repository root
471 # run editor in the repository root
472 olddir = os.getcwd()
472 olddir = os.getcwd()
473 os.chdir(self.root)
473 os.chdir(self.root)
474 edittext = self.ui.edit("\n".join(edittext))
474 edittext = self.ui.edit("\n".join(edittext))
475 os.chdir(olddir)
475 os.chdir(olddir)
476 if not edittext.rstrip():
476 if not edittext.rstrip():
477 return None
477 return None
478 text = edittext
478 text = edittext
479
479
480 user = user or self.ui.username()
480 user = user or self.ui.username()
481 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
481 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
482 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
482 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
483 parent2=xp2)
483 parent2=xp2)
484 tr.close()
484 tr.close()
485
485
486 self.dirstate.setparents(n)
486 self.dirstate.setparents(n)
487 self.dirstate.update(new, "n")
487 self.dirstate.update(new, "n")
488 self.dirstate.forget(remove)
488 self.dirstate.forget(remove)
489
489
490 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
490 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
491 return n
491 return n
492
492
493 def walk(self, node=None, files=[], match=util.always):
493 def walk(self, node=None, files=[], match=util.always):
494 if node:
494 if node:
495 fdict = dict.fromkeys(files)
495 fdict = dict.fromkeys(files)
496 for fn in self.manifest.read(self.changelog.read(node)[0]):
496 for fn in self.manifest.read(self.changelog.read(node)[0]):
497 fdict.pop(fn, None)
497 fdict.pop(fn, None)
498 if match(fn):
498 if match(fn):
499 yield 'm', fn
499 yield 'm', fn
500 for fn in fdict:
500 for fn in fdict:
501 self.ui.warn(_('%s: No such file in rev %s\n') % (
501 self.ui.warn(_('%s: No such file in rev %s\n') % (
502 util.pathto(self.getcwd(), fn), short(node)))
502 util.pathto(self.getcwd(), fn), short(node)))
503 else:
503 else:
504 for src, fn in self.dirstate.walk(files, match):
504 for src, fn in self.dirstate.walk(files, match):
505 yield src, fn
505 yield src, fn
506
506
507 def changes(self, node1=None, node2=None, files=[], match=util.always,
507 def changes(self, node1=None, node2=None, files=[], match=util.always,
508 wlock=None):
508 wlock=None):
509 """return changes between two nodes or node and working directory
509 """return changes between two nodes or node and working directory
510
510
511 If node1 is None, use the first dirstate parent instead.
511 If node1 is None, use the first dirstate parent instead.
512 If node2 is None, compare node1 with working directory.
512 If node2 is None, compare node1 with working directory.
513 """
513 """
514
514
515 def fcmp(fn, mf):
515 def fcmp(fn, mf):
516 t1 = self.wread(fn)
516 t1 = self.wread(fn)
517 t2 = self.file(fn).read(mf.get(fn, nullid))
517 t2 = self.file(fn).read(mf.get(fn, nullid))
518 return cmp(t1, t2)
518 return cmp(t1, t2)
519
519
520 def mfmatches(node):
520 def mfmatches(node):
521 change = self.changelog.read(node)
521 change = self.changelog.read(node)
522 mf = dict(self.manifest.read(change[0]))
522 mf = dict(self.manifest.read(change[0]))
523 for fn in mf.keys():
523 for fn in mf.keys():
524 if not match(fn):
524 if not match(fn):
525 del mf[fn]
525 del mf[fn]
526 return mf
526 return mf
527
527
528 if node1:
528 if node1:
529 # read the manifest from node1 before the manifest from node2,
529 # read the manifest from node1 before the manifest from node2,
530 # so that we'll hit the manifest cache if we're going through
530 # so that we'll hit the manifest cache if we're going through
531 # all the revisions in parent->child order.
531 # all the revisions in parent->child order.
532 mf1 = mfmatches(node1)
532 mf1 = mfmatches(node1)
533
533
534 # are we comparing the working directory?
534 # are we comparing the working directory?
535 if not node2:
535 if not node2:
536 if not wlock:
536 if not wlock:
537 try:
537 try:
538 wlock = self.wlock(wait=0)
538 wlock = self.wlock(wait=0)
539 except lock.LockException:
539 except lock.LockException:
540 wlock = None
540 wlock = None
541 lookup, modified, added, removed, deleted, unknown = (
541 lookup, modified, added, removed, deleted, unknown = (
542 self.dirstate.changes(files, match))
542 self.dirstate.changes(files, match))
543
543
544 # are we comparing working dir against its parent?
544 # are we comparing working dir against its parent?
545 if not node1:
545 if not node1:
546 if lookup:
546 if lookup:
547 # do a full compare of any files that might have changed
547 # do a full compare of any files that might have changed
548 mf2 = mfmatches(self.dirstate.parents()[0])
548 mf2 = mfmatches(self.dirstate.parents()[0])
549 for f in lookup:
549 for f in lookup:
550 if fcmp(f, mf2):
550 if fcmp(f, mf2):
551 modified.append(f)
551 modified.append(f)
552 elif wlock is not None:
552 elif wlock is not None:
553 self.dirstate.update([f], "n")
553 self.dirstate.update([f], "n")
554 else:
554 else:
555 # we are comparing working dir against non-parent
555 # we are comparing working dir against non-parent
556 # generate a pseudo-manifest for the working dir
556 # generate a pseudo-manifest for the working dir
557 mf2 = mfmatches(self.dirstate.parents()[0])
557 mf2 = mfmatches(self.dirstate.parents()[0])
558 for f in lookup + modified + added:
558 for f in lookup + modified + added:
559 mf2[f] = ""
559 mf2[f] = ""
560 for f in removed:
560 for f in removed:
561 if f in mf2:
561 if f in mf2:
562 del mf2[f]
562 del mf2[f]
563 else:
563 else:
564 # we are comparing two revisions
564 # we are comparing two revisions
565 deleted, unknown = [], []
565 deleted, unknown = [], []
566 mf2 = mfmatches(node2)
566 mf2 = mfmatches(node2)
567
567
568 if node1:
568 if node1:
569 # flush lists from dirstate before comparing manifests
569 # flush lists from dirstate before comparing manifests
570 modified, added = [], []
570 modified, added = [], []
571
571
572 for fn in mf2:
572 for fn in mf2:
573 if mf1.has_key(fn):
573 if mf1.has_key(fn):
574 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
574 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
575 modified.append(fn)
575 modified.append(fn)
576 del mf1[fn]
576 del mf1[fn]
577 else:
577 else:
578 added.append(fn)
578 added.append(fn)
579
579
580 removed = mf1.keys()
580 removed = mf1.keys()
581
581
582 # sort and return results:
582 # sort and return results:
583 for l in modified, added, removed, deleted, unknown:
583 for l in modified, added, removed, deleted, unknown:
584 l.sort()
584 l.sort()
585 return (modified, added, removed, deleted, unknown)
585 return (modified, added, removed, deleted, unknown)
586
586
587 def add(self, list, wlock=None):
587 def add(self, list, wlock=None):
588 if not wlock:
588 if not wlock:
589 wlock = self.wlock()
589 wlock = self.wlock()
590 for f in list:
590 for f in list:
591 p = self.wjoin(f)
591 p = self.wjoin(f)
592 if not os.path.exists(p):
592 if not os.path.exists(p):
593 self.ui.warn(_("%s does not exist!\n") % f)
593 self.ui.warn(_("%s does not exist!\n") % f)
594 elif not os.path.isfile(p):
594 elif not os.path.isfile(p):
595 self.ui.warn(_("%s not added: only files supported currently\n")
595 self.ui.warn(_("%s not added: only files supported currently\n")
596 % f)
596 % f)
597 elif self.dirstate.state(f) in 'an':
597 elif self.dirstate.state(f) in 'an':
598 self.ui.warn(_("%s already tracked!\n") % f)
598 self.ui.warn(_("%s already tracked!\n") % f)
599 else:
599 else:
600 self.dirstate.update([f], "a")
600 self.dirstate.update([f], "a")
601
601
602 def forget(self, list, wlock=None):
602 def forget(self, list, wlock=None):
603 if not wlock:
603 if not wlock:
604 wlock = self.wlock()
604 wlock = self.wlock()
605 for f in list:
605 for f in list:
606 if self.dirstate.state(f) not in 'ai':
606 if self.dirstate.state(f) not in 'ai':
607 self.ui.warn(_("%s not added!\n") % f)
607 self.ui.warn(_("%s not added!\n") % f)
608 else:
608 else:
609 self.dirstate.forget([f])
609 self.dirstate.forget([f])
610
610
611 def remove(self, list, unlink=False, wlock=None):
611 def remove(self, list, unlink=False, wlock=None):
612 if unlink:
612 if unlink:
613 for f in list:
613 for f in list:
614 try:
614 try:
615 util.unlink(self.wjoin(f))
615 util.unlink(self.wjoin(f))
616 except OSError, inst:
616 except OSError, inst:
617 if inst.errno != errno.ENOENT:
617 if inst.errno != errno.ENOENT:
618 raise
618 raise
619 if not wlock:
619 if not wlock:
620 wlock = self.wlock()
620 wlock = self.wlock()
621 for f in list:
621 for f in list:
622 p = self.wjoin(f)
622 p = self.wjoin(f)
623 if os.path.exists(p):
623 if os.path.exists(p):
624 self.ui.warn(_("%s still exists!\n") % f)
624 self.ui.warn(_("%s still exists!\n") % f)
625 elif self.dirstate.state(f) == 'a':
625 elif self.dirstate.state(f) == 'a':
626 self.dirstate.forget([f])
626 self.dirstate.forget([f])
627 elif f not in self.dirstate:
627 elif f not in self.dirstate:
628 self.ui.warn(_("%s not tracked!\n") % f)
628 self.ui.warn(_("%s not tracked!\n") % f)
629 else:
629 else:
630 self.dirstate.update([f], "r")
630 self.dirstate.update([f], "r")
631
631
632 def undelete(self, list, wlock=None):
632 def undelete(self, list, wlock=None):
633 p = self.dirstate.parents()[0]
633 p = self.dirstate.parents()[0]
634 mn = self.changelog.read(p)[0]
634 mn = self.changelog.read(p)[0]
635 mf = self.manifest.readflags(mn)
635 mf = self.manifest.readflags(mn)
636 m = self.manifest.read(mn)
636 m = self.manifest.read(mn)
637 if not wlock:
637 if not wlock:
638 wlock = self.wlock()
638 wlock = self.wlock()
639 for f in list:
639 for f in list:
640 if self.dirstate.state(f) not in "r":
640 if self.dirstate.state(f) not in "r":
641 self.ui.warn("%s not removed!\n" % f)
641 self.ui.warn("%s not removed!\n" % f)
642 else:
642 else:
643 t = self.file(f).read(m[f])
643 t = self.file(f).read(m[f])
644 self.wwrite(f, t)
644 self.wwrite(f, t)
645 util.set_exec(self.wjoin(f), mf[f])
645 util.set_exec(self.wjoin(f), mf[f])
646 self.dirstate.update([f], "n")
646 self.dirstate.update([f], "n")
647
647
648 def copy(self, source, dest, wlock=None):
648 def copy(self, source, dest, wlock=None):
649 p = self.wjoin(dest)
649 p = self.wjoin(dest)
650 if not os.path.exists(p):
650 if not os.path.exists(p):
651 self.ui.warn(_("%s does not exist!\n") % dest)
651 self.ui.warn(_("%s does not exist!\n") % dest)
652 elif not os.path.isfile(p):
652 elif not os.path.isfile(p):
653 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
653 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
654 else:
654 else:
655 if not wlock:
655 if not wlock:
656 wlock = self.wlock()
656 wlock = self.wlock()
657 if self.dirstate.state(dest) == '?':
657 if self.dirstate.state(dest) == '?':
658 self.dirstate.update([dest], "a")
658 self.dirstate.update([dest], "a")
659 self.dirstate.copy(source, dest)
659 self.dirstate.copy(source, dest)
660
660
661 def heads(self, start=None):
661 def heads(self, start=None):
662 heads = self.changelog.heads(start)
662 heads = self.changelog.heads(start)
663 # sort the output in rev descending order
663 # sort the output in rev descending order
664 heads = [(-self.changelog.rev(h), h) for h in heads]
664 heads = [(-self.changelog.rev(h), h) for h in heads]
665 heads.sort()
665 heads.sort()
666 return [n for (r, n) in heads]
666 return [n for (r, n) in heads]
667
667
668 # branchlookup returns a dict giving a list of branches for
668 # branchlookup returns a dict giving a list of branches for
669 # each head. A branch is defined as the tag of a node or
669 # each head. A branch is defined as the tag of a node or
670 # the branch of the node's parents. If a node has multiple
670 # the branch of the node's parents. If a node has multiple
671 # branch tags, tags are eliminated if they are visible from other
671 # branch tags, tags are eliminated if they are visible from other
672 # branch tags.
672 # branch tags.
673 #
673 #
674 # So, for this graph: a->b->c->d->e
674 # So, for this graph: a->b->c->d->e
675 # \ /
675 # \ /
676 # aa -----/
676 # aa -----/
677 # a has tag 2.6.12
677 # a has tag 2.6.12
678 # d has tag 2.6.13
678 # d has tag 2.6.13
679 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
679 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
680 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
680 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
681 # from the list.
681 # from the list.
682 #
682 #
683 # It is possible that more than one head will have the same branch tag.
683 # It is possible that more than one head will have the same branch tag.
684 # callers need to check the result for multiple heads under the same
684 # callers need to check the result for multiple heads under the same
685 # branch tag if that is a problem for them (ie checkout of a specific
685 # branch tag if that is a problem for them (ie checkout of a specific
686 # branch).
686 # branch).
687 #
687 #
688 # passing in a specific branch will limit the depth of the search
688 # passing in a specific branch will limit the depth of the search
689 # through the parents. It won't limit the branches returned in the
689 # through the parents. It won't limit the branches returned in the
690 # result though.
690 # result though.
691 def branchlookup(self, heads=None, branch=None):
691 def branchlookup(self, heads=None, branch=None):
692 if not heads:
692 if not heads:
693 heads = self.heads()
693 heads = self.heads()
694 headt = [ h for h in heads ]
694 headt = [ h for h in heads ]
695 chlog = self.changelog
695 chlog = self.changelog
696 branches = {}
696 branches = {}
697 merges = []
697 merges = []
698 seenmerge = {}
698 seenmerge = {}
699
699
700 # traverse the tree once for each head, recording in the branches
700 # traverse the tree once for each head, recording in the branches
701 # dict which tags are visible from this head. The branches
701 # dict which tags are visible from this head. The branches
702 # dict also records which tags are visible from each tag
702 # dict also records which tags are visible from each tag
703 # while we traverse.
703 # while we traverse.
704 while headt or merges:
704 while headt or merges:
705 if merges:
705 if merges:
706 n, found = merges.pop()
706 n, found = merges.pop()
707 visit = [n]
707 visit = [n]
708 else:
708 else:
709 h = headt.pop()
709 h = headt.pop()
710 visit = [h]
710 visit = [h]
711 found = [h]
711 found = [h]
712 seen = {}
712 seen = {}
713 while visit:
713 while visit:
714 n = visit.pop()
714 n = visit.pop()
715 if n in seen:
715 if n in seen:
716 continue
716 continue
717 pp = chlog.parents(n)
717 pp = chlog.parents(n)
718 tags = self.nodetags(n)
718 tags = self.nodetags(n)
719 if tags:
719 if tags:
720 for x in tags:
720 for x in tags:
721 if x == 'tip':
721 if x == 'tip':
722 continue
722 continue
723 for f in found:
723 for f in found:
724 branches.setdefault(f, {})[n] = 1
724 branches.setdefault(f, {})[n] = 1
725 branches.setdefault(n, {})[n] = 1
725 branches.setdefault(n, {})[n] = 1
726 break
726 break
727 if n not in found:
727 if n not in found:
728 found.append(n)
728 found.append(n)
729 if branch in tags:
729 if branch in tags:
730 continue
730 continue
731 seen[n] = 1
731 seen[n] = 1
732 if pp[1] != nullid and n not in seenmerge:
732 if pp[1] != nullid and n not in seenmerge:
733 merges.append((pp[1], [x for x in found]))
733 merges.append((pp[1], [x for x in found]))
734 seenmerge[n] = 1
734 seenmerge[n] = 1
735 if pp[0] != nullid:
735 if pp[0] != nullid:
736 visit.append(pp[0])
736 visit.append(pp[0])
737 # traverse the branches dict, eliminating branch tags from each
737 # traverse the branches dict, eliminating branch tags from each
738 # head that are visible from another branch tag for that head.
738 # head that are visible from another branch tag for that head.
739 out = {}
739 out = {}
740 viscache = {}
740 viscache = {}
741 for h in heads:
741 for h in heads:
742 def visible(node):
742 def visible(node):
743 if node in viscache:
743 if node in viscache:
744 return viscache[node]
744 return viscache[node]
745 ret = {}
745 ret = {}
746 visit = [node]
746 visit = [node]
747 while visit:
747 while visit:
748 x = visit.pop()
748 x = visit.pop()
749 if x in viscache:
749 if x in viscache:
750 ret.update(viscache[x])
750 ret.update(viscache[x])
751 elif x not in ret:
751 elif x not in ret:
752 ret[x] = 1
752 ret[x] = 1
753 if x in branches:
753 if x in branches:
754 visit[len(visit):] = branches[x].keys()
754 visit[len(visit):] = branches[x].keys()
755 viscache[node] = ret
755 viscache[node] = ret
756 return ret
756 return ret
757 if h not in branches:
757 if h not in branches:
758 continue
758 continue
759 # O(n^2), but somewhat limited. This only searches the
759 # O(n^2), but somewhat limited. This only searches the
760 # tags visible from a specific head, not all the tags in the
760 # tags visible from a specific head, not all the tags in the
761 # whole repo.
761 # whole repo.
762 for b in branches[h]:
762 for b in branches[h]:
763 vis = False
763 vis = False
764 for bb in branches[h].keys():
764 for bb in branches[h].keys():
765 if b != bb:
765 if b != bb:
766 if b in visible(bb):
766 if b in visible(bb):
767 vis = True
767 vis = True
768 break
768 break
769 if not vis:
769 if not vis:
770 l = out.setdefault(h, [])
770 l = out.setdefault(h, [])
771 l[len(l):] = self.nodetags(b)
771 l[len(l):] = self.nodetags(b)
772 return out
772 return out
773
773
774 def branches(self, nodes):
774 def branches(self, nodes):
775 if not nodes:
775 if not nodes:
776 nodes = [self.changelog.tip()]
776 nodes = [self.changelog.tip()]
777 b = []
777 b = []
778 for n in nodes:
778 for n in nodes:
779 t = n
779 t = n
780 while n:
780 while n:
781 p = self.changelog.parents(n)
781 p = self.changelog.parents(n)
782 if p[1] != nullid or p[0] == nullid:
782 if p[1] != nullid or p[0] == nullid:
783 b.append((t, n, p[0], p[1]))
783 b.append((t, n, p[0], p[1]))
784 break
784 break
785 n = p[0]
785 n = p[0]
786 return b
786 return b
787
787
788 def between(self, pairs):
788 def between(self, pairs):
789 r = []
789 r = []
790
790
791 for top, bottom in pairs:
791 for top, bottom in pairs:
792 n, l, i = top, [], 0
792 n, l, i = top, [], 0
793 f = 1
793 f = 1
794
794
795 while n != bottom:
795 while n != bottom:
796 p = self.changelog.parents(n)[0]
796 p = self.changelog.parents(n)[0]
797 if i == f:
797 if i == f:
798 l.append(n)
798 l.append(n)
799 f = f * 2
799 f = f * 2
800 n = p
800 n = p
801 i += 1
801 i += 1
802
802
803 r.append(l)
803 r.append(l)
804
804
805 return r
805 return r
806
806
807 def findincoming(self, remote, base=None, heads=None):
807 def findincoming(self, remote, base=None, heads=None):
808 m = self.changelog.nodemap
808 m = self.changelog.nodemap
809 search = []
809 search = []
810 fetch = {}
810 fetch = {}
811 seen = {}
811 seen = {}
812 seenbranch = {}
812 seenbranch = {}
813 if base == None:
813 if base == None:
814 base = {}
814 base = {}
815
815
816 # assume we're closer to the tip than the root
816 # assume we're closer to the tip than the root
817 # and start by examining the heads
817 # and start by examining the heads
818 self.ui.status(_("searching for changes\n"))
818 self.ui.status(_("searching for changes\n"))
819
819
820 if not heads:
820 if not heads:
821 heads = remote.heads()
821 heads = remote.heads()
822
822
823 unknown = []
823 unknown = []
824 for h in heads:
824 for h in heads:
825 if h not in m:
825 if h not in m:
826 unknown.append(h)
826 unknown.append(h)
827 else:
827 else:
828 base[h] = 1
828 base[h] = 1
829
829
830 if not unknown:
830 if not unknown:
831 return None
831 return None
832
832
833 rep = {}
833 rep = {}
834 reqcnt = 0
834 reqcnt = 0
835
835
836 # search through remote branches
836 # search through remote branches
837 # a 'branch' here is a linear segment of history, with four parts:
837 # a 'branch' here is a linear segment of history, with four parts:
838 # head, root, first parent, second parent
838 # head, root, first parent, second parent
839 # (a branch always has two parents (or none) by definition)
839 # (a branch always has two parents (or none) by definition)
840 unknown = remote.branches(unknown)
840 unknown = remote.branches(unknown)
841 while unknown:
841 while unknown:
842 r = []
842 r = []
843 while unknown:
843 while unknown:
844 n = unknown.pop(0)
844 n = unknown.pop(0)
845 if n[0] in seen:
845 if n[0] in seen:
846 continue
846 continue
847
847
848 self.ui.debug(_("examining %s:%s\n")
848 self.ui.debug(_("examining %s:%s\n")
849 % (short(n[0]), short(n[1])))
849 % (short(n[0]), short(n[1])))
850 if n[0] == nullid:
850 if n[0] == nullid:
851 break
851 break
852 if n in seenbranch:
852 if n in seenbranch:
853 self.ui.debug(_("branch already found\n"))
853 self.ui.debug(_("branch already found\n"))
854 continue
854 continue
855 if n[1] and n[1] in m: # do we know the base?
855 if n[1] and n[1] in m: # do we know the base?
856 self.ui.debug(_("found incomplete branch %s:%s\n")
856 self.ui.debug(_("found incomplete branch %s:%s\n")
857 % (short(n[0]), short(n[1])))
857 % (short(n[0]), short(n[1])))
858 search.append(n) # schedule branch range for scanning
858 search.append(n) # schedule branch range for scanning
859 seenbranch[n] = 1
859 seenbranch[n] = 1
860 else:
860 else:
861 if n[1] not in seen and n[1] not in fetch:
861 if n[1] not in seen and n[1] not in fetch:
862 if n[2] in m and n[3] in m:
862 if n[2] in m and n[3] in m:
863 self.ui.debug(_("found new changeset %s\n") %
863 self.ui.debug(_("found new changeset %s\n") %
864 short(n[1]))
864 short(n[1]))
865 fetch[n[1]] = 1 # earliest unknown
865 fetch[n[1]] = 1 # earliest unknown
866 base[n[2]] = 1 # latest known
866 base[n[2]] = 1 # latest known
867 continue
867 continue
868
868
869 for a in n[2:4]:
869 for a in n[2:4]:
870 if a not in rep:
870 if a not in rep:
871 r.append(a)
871 r.append(a)
872 rep[a] = 1
872 rep[a] = 1
873
873
874 seen[n[0]] = 1
874 seen[n[0]] = 1
875
875
876 if r:
876 if r:
877 reqcnt += 1
877 reqcnt += 1
878 self.ui.debug(_("request %d: %s\n") %
878 self.ui.debug(_("request %d: %s\n") %
879 (reqcnt, " ".join(map(short, r))))
879 (reqcnt, " ".join(map(short, r))))
880 for p in range(0, len(r), 10):
880 for p in range(0, len(r), 10):
881 for b in remote.branches(r[p:p+10]):
881 for b in remote.branches(r[p:p+10]):
882 self.ui.debug(_("received %s:%s\n") %
882 self.ui.debug(_("received %s:%s\n") %
883 (short(b[0]), short(b[1])))
883 (short(b[0]), short(b[1])))
884 if b[0] in m:
884 if b[0] in m:
885 self.ui.debug(_("found base node %s\n")
885 self.ui.debug(_("found base node %s\n")
886 % short(b[0]))
886 % short(b[0]))
887 base[b[0]] = 1
887 base[b[0]] = 1
888 elif b[0] not in seen:
888 elif b[0] not in seen:
889 unknown.append(b)
889 unknown.append(b)
890
890
891 # do binary search on the branches we found
891 # do binary search on the branches we found
892 while search:
892 while search:
893 n = search.pop(0)
893 n = search.pop(0)
894 reqcnt += 1
894 reqcnt += 1
895 l = remote.between([(n[0], n[1])])[0]
895 l = remote.between([(n[0], n[1])])[0]
896 l.append(n[1])
896 l.append(n[1])
897 p = n[0]
897 p = n[0]
898 f = 1
898 f = 1
899 for i in l:
899 for i in l:
900 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
900 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
901 if i in m:
901 if i in m:
902 if f <= 2:
902 if f <= 2:
903 self.ui.debug(_("found new branch changeset %s\n") %
903 self.ui.debug(_("found new branch changeset %s\n") %
904 short(p))
904 short(p))
905 fetch[p] = 1
905 fetch[p] = 1
906 base[i] = 1
906 base[i] = 1
907 else:
907 else:
908 self.ui.debug(_("narrowed branch search to %s:%s\n")
908 self.ui.debug(_("narrowed branch search to %s:%s\n")
909 % (short(p), short(i)))
909 % (short(p), short(i)))
910 search.append((p, i))
910 search.append((p, i))
911 break
911 break
912 p, f = i, f * 2
912 p, f = i, f * 2
913
913
914 # sanity check our fetch list
914 # sanity check our fetch list
915 for f in fetch.keys():
915 for f in fetch.keys():
916 if f in m:
916 if f in m:
917 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
917 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
918
918
919 if base.keys() == [nullid]:
919 if base.keys() == [nullid]:
920 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
920 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
921
921
922 self.ui.note(_("found new changesets starting at ") +
922 self.ui.note(_("found new changesets starting at ") +
923 " ".join([short(f) for f in fetch]) + "\n")
923 " ".join([short(f) for f in fetch]) + "\n")
924
924
925 self.ui.debug(_("%d total queries\n") % reqcnt)
925 self.ui.debug(_("%d total queries\n") % reqcnt)
926
926
927 return fetch.keys()
927 return fetch.keys()
928
928
929 def findoutgoing(self, remote, base=None, heads=None):
929 def findoutgoing(self, remote, base=None, heads=None):
930 if base == None:
930 if base == None:
931 base = {}
931 base = {}
932 self.findincoming(remote, base, heads)
932 self.findincoming(remote, base, heads)
933
933
934 self.ui.debug(_("common changesets up to ")
934 self.ui.debug(_("common changesets up to ")
935 + " ".join(map(short, base.keys())) + "\n")
935 + " ".join(map(short, base.keys())) + "\n")
936
936
937 remain = dict.fromkeys(self.changelog.nodemap)
937 remain = dict.fromkeys(self.changelog.nodemap)
938
938
939 # prune everything remote has from the tree
939 # prune everything remote has from the tree
940 del remain[nullid]
940 del remain[nullid]
941 remove = base.keys()
941 remove = base.keys()
942 while remove:
942 while remove:
943 n = remove.pop(0)
943 n = remove.pop(0)
944 if n in remain:
944 if n in remain:
945 del remain[n]
945 del remain[n]
946 for p in self.changelog.parents(n):
946 for p in self.changelog.parents(n):
947 remove.append(p)
947 remove.append(p)
948
948
949 # find every node whose parents have been pruned
949 # find every node whose parents have been pruned
950 subset = []
950 subset = []
951 for n in remain:
951 for n in remain:
952 p1, p2 = self.changelog.parents(n)
952 p1, p2 = self.changelog.parents(n)
953 if p1 not in remain and p2 not in remain:
953 if p1 not in remain and p2 not in remain:
954 subset.append(n)
954 subset.append(n)
955
955
956 # this is the set of all roots we have to push
956 # this is the set of all roots we have to push
957 return subset
957 return subset
958
958
959 def pull(self, remote, heads=None):
959 def pull(self, remote, heads=None):
960 l = self.lock()
960 l = self.lock()
961
961
962 # if we have an empty repo, fetch everything
962 # if we have an empty repo, fetch everything
963 if self.changelog.tip() == nullid:
963 if self.changelog.tip() == nullid:
964 self.ui.status(_("requesting all changes\n"))
964 self.ui.status(_("requesting all changes\n"))
965 fetch = [nullid]
965 fetch = [nullid]
966 else:
966 else:
967 fetch = self.findincoming(remote)
967 fetch = self.findincoming(remote)
968
968
969 if not fetch:
969 if not fetch:
970 self.ui.status(_("no changes found\n"))
970 self.ui.status(_("no changes found\n"))
971 return 1
971 return 1
972
972
973 if heads is None:
973 if heads is None:
974 cg = remote.changegroup(fetch, 'pull')
974 cg = remote.changegroup(fetch, 'pull')
975 else:
975 else:
976 cg = remote.changegroupsubset(fetch, heads, 'pull')
976 cg = remote.changegroupsubset(fetch, heads, 'pull')
977 return self.addchangegroup(cg)
977 return self.addchangegroup(cg)
978
978
979 def push(self, remote, force=False, revs=None):
979 def push(self, remote, force=False, revs=None):
980 lock = remote.lock()
980 lock = remote.lock()
981
981
982 base = {}
982 base = {}
983 heads = remote.heads()
983 heads = remote.heads()
984 inc = self.findincoming(remote, base, heads)
984 inc = self.findincoming(remote, base, heads)
985 if not force and inc:
985 if not force and inc:
986 self.ui.warn(_("abort: unsynced remote changes!\n"))
986 self.ui.warn(_("abort: unsynced remote changes!\n"))
987 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
987 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
988 return 1
988 return 1
989
989
990 update = self.findoutgoing(remote, base)
990 update = self.findoutgoing(remote, base)
991 if revs is not None:
991 if revs is not None:
992 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
992 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
993 else:
993 else:
994 bases, heads = update, self.changelog.heads()
994 bases, heads = update, self.changelog.heads()
995
995
996 if not bases:
996 if not bases:
997 self.ui.status(_("no changes found\n"))
997 self.ui.status(_("no changes found\n"))
998 return 1
998 return 1
999 elif not force:
999 elif not force:
1000 if len(bases) < len(heads):
1000 if len(bases) < len(heads):
1001 self.ui.warn(_("abort: push creates new remote branches!\n"))
1001 self.ui.warn(_("abort: push creates new remote branches!\n"))
1002 self.ui.status(_("(did you forget to merge?"
1002 self.ui.status(_("(did you forget to merge?"
1003 " use push -f to force)\n"))
1003 " use push -f to force)\n"))
1004 return 1
1004 return 1
1005
1005
1006 if revs is None:
1006 if revs is None:
1007 cg = self.changegroup(update, 'push')
1007 cg = self.changegroup(update, 'push')
1008 else:
1008 else:
1009 cg = self.changegroupsubset(update, revs, 'push')
1009 cg = self.changegroupsubset(update, revs, 'push')
1010 return remote.addchangegroup(cg)
1010 return remote.addchangegroup(cg)
1011
1011
1012 def changegroupsubset(self, bases, heads, source):
1012 def changegroupsubset(self, bases, heads, source):
1013 """This function generates a changegroup consisting of all the nodes
1013 """This function generates a changegroup consisting of all the nodes
1014 that are descendents of any of the bases, and ancestors of any of
1014 that are descendents of any of the bases, and ancestors of any of
1015 the heads.
1015 the heads.
1016
1016
1017 It is fairly complex as determining which filenodes and which
1017 It is fairly complex as determining which filenodes and which
1018 manifest nodes need to be included for the changeset to be complete
1018 manifest nodes need to be included for the changeset to be complete
1019 is non-trivial.
1019 is non-trivial.
1020
1020
1021 Another wrinkle is doing the reverse, figuring out which changeset in
1021 Another wrinkle is doing the reverse, figuring out which changeset in
1022 the changegroup a particular filenode or manifestnode belongs to."""
1022 the changegroup a particular filenode or manifestnode belongs to."""
1023
1023
1024 self.hook('preoutgoing', throw=True, source=source)
1024 self.hook('preoutgoing', throw=True, source=source)
1025
1025
1026 # Set up some initial variables
1026 # Set up some initial variables
1027 # Make it easy to refer to self.changelog
1027 # Make it easy to refer to self.changelog
1028 cl = self.changelog
1028 cl = self.changelog
1029 # msng is short for missing - compute the list of changesets in this
1029 # msng is short for missing - compute the list of changesets in this
1030 # changegroup.
1030 # changegroup.
1031 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1031 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1032 # Some bases may turn out to be superfluous, and some heads may be
1032 # Some bases may turn out to be superfluous, and some heads may be
1033 # too. nodesbetween will return the minimal set of bases and heads
1033 # too. nodesbetween will return the minimal set of bases and heads
1034 # necessary to re-create the changegroup.
1034 # necessary to re-create the changegroup.
1035
1035
1036 # Known heads are the list of heads that it is assumed the recipient
1036 # Known heads are the list of heads that it is assumed the recipient
1037 # of this changegroup will know about.
1037 # of this changegroup will know about.
1038 knownheads = {}
1038 knownheads = {}
1039 # We assume that all parents of bases are known heads.
1039 # We assume that all parents of bases are known heads.
1040 for n in bases:
1040 for n in bases:
1041 for p in cl.parents(n):
1041 for p in cl.parents(n):
1042 if p != nullid:
1042 if p != nullid:
1043 knownheads[p] = 1
1043 knownheads[p] = 1
1044 knownheads = knownheads.keys()
1044 knownheads = knownheads.keys()
1045 if knownheads:
1045 if knownheads:
1046 # Now that we know what heads are known, we can compute which
1046 # Now that we know what heads are known, we can compute which
1047 # changesets are known. The recipient must know about all
1047 # changesets are known. The recipient must know about all
1048 # changesets required to reach the known heads from the null
1048 # changesets required to reach the known heads from the null
1049 # changeset.
1049 # changeset.
1050 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1050 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1051 junk = None
1051 junk = None
1052 # Transform the list into an ersatz set.
1052 # Transform the list into an ersatz set.
1053 has_cl_set = dict.fromkeys(has_cl_set)
1053 has_cl_set = dict.fromkeys(has_cl_set)
1054 else:
1054 else:
1055 # If there were no known heads, the recipient cannot be assumed to
1055 # If there were no known heads, the recipient cannot be assumed to
1056 # know about any changesets.
1056 # know about any changesets.
1057 has_cl_set = {}
1057 has_cl_set = {}
1058
1058
1059 # Make it easy to refer to self.manifest
1059 # Make it easy to refer to self.manifest
1060 mnfst = self.manifest
1060 mnfst = self.manifest
1061 # We don't know which manifests are missing yet
1061 # We don't know which manifests are missing yet
1062 msng_mnfst_set = {}
1062 msng_mnfst_set = {}
1063 # Nor do we know which filenodes are missing.
1063 # Nor do we know which filenodes are missing.
1064 msng_filenode_set = {}
1064 msng_filenode_set = {}
1065
1065
1066 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1066 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1067 junk = None
1067 junk = None
1068
1068
1069 # A changeset always belongs to itself, so the changenode lookup
1069 # A changeset always belongs to itself, so the changenode lookup
1070 # function for a changenode is identity.
1070 # function for a changenode is identity.
1071 def identity(x):
1071 def identity(x):
1072 return x
1072 return x
1073
1073
1074 # A function generating function. Sets up an environment for the
1074 # A function generating function. Sets up an environment for the
1075 # inner function.
1075 # inner function.
1076 def cmp_by_rev_func(revlog):
1076 def cmp_by_rev_func(revlog):
1077 # Compare two nodes by their revision number in the environment's
1077 # Compare two nodes by their revision number in the environment's
1078 # revision history. Since the revision number both represents the
1078 # revision history. Since the revision number both represents the
1079 # most efficient order to read the nodes in, and represents a
1079 # most efficient order to read the nodes in, and represents a
1080 # topological sorting of the nodes, this function is often useful.
1080 # topological sorting of the nodes, this function is often useful.
1081 def cmp_by_rev(a, b):
1081 def cmp_by_rev(a, b):
1082 return cmp(revlog.rev(a), revlog.rev(b))
1082 return cmp(revlog.rev(a), revlog.rev(b))
1083 return cmp_by_rev
1083 return cmp_by_rev
1084
1084
1085 # If we determine that a particular file or manifest node must be a
1085 # If we determine that a particular file or manifest node must be a
1086 # node that the recipient of the changegroup will already have, we can
1086 # node that the recipient of the changegroup will already have, we can
1087 # also assume the recipient will have all the parents. This function
1087 # also assume the recipient will have all the parents. This function
1088 # prunes them from the set of missing nodes.
1088 # prunes them from the set of missing nodes.
1089 def prune_parents(revlog, hasset, msngset):
1089 def prune_parents(revlog, hasset, msngset):
1090 haslst = hasset.keys()
1090 haslst = hasset.keys()
1091 haslst.sort(cmp_by_rev_func(revlog))
1091 haslst.sort(cmp_by_rev_func(revlog))
1092 for node in haslst:
1092 for node in haslst:
1093 parentlst = [p for p in revlog.parents(node) if p != nullid]
1093 parentlst = [p for p in revlog.parents(node) if p != nullid]
1094 while parentlst:
1094 while parentlst:
1095 n = parentlst.pop()
1095 n = parentlst.pop()
1096 if n not in hasset:
1096 if n not in hasset:
1097 hasset[n] = 1
1097 hasset[n] = 1
1098 p = [p for p in revlog.parents(n) if p != nullid]
1098 p = [p for p in revlog.parents(n) if p != nullid]
1099 parentlst.extend(p)
1099 parentlst.extend(p)
1100 for n in hasset:
1100 for n in hasset:
1101 msngset.pop(n, None)
1101 msngset.pop(n, None)
1102
1102
1103 # This is a function generating function used to set up an environment
1103 # This is a function generating function used to set up an environment
1104 # for the inner function to execute in.
1104 # for the inner function to execute in.
1105 def manifest_and_file_collector(changedfileset):
1105 def manifest_and_file_collector(changedfileset):
1106 # This is an information gathering function that gathers
1106 # This is an information gathering function that gathers
1107 # information from each changeset node that goes out as part of
1107 # information from each changeset node that goes out as part of
1108 # the changegroup. The information gathered is a list of which
1108 # the changegroup. The information gathered is a list of which
1109 # manifest nodes are potentially required (the recipient may
1109 # manifest nodes are potentially required (the recipient may
1110 # already have them) and total list of all files which were
1110 # already have them) and total list of all files which were
1111 # changed in any changeset in the changegroup.
1111 # changed in any changeset in the changegroup.
1112 #
1112 #
1113 # We also remember the first changenode we saw any manifest
1113 # We also remember the first changenode we saw any manifest
1114 # referenced by so we can later determine which changenode 'owns'
1114 # referenced by so we can later determine which changenode 'owns'
1115 # the manifest.
1115 # the manifest.
1116 def collect_manifests_and_files(clnode):
1116 def collect_manifests_and_files(clnode):
1117 c = cl.read(clnode)
1117 c = cl.read(clnode)
1118 for f in c[3]:
1118 for f in c[3]:
1119 # This is to make sure we only have one instance of each
1119 # This is to make sure we only have one instance of each
1120 # filename string for each filename.
1120 # filename string for each filename.
1121 changedfileset.setdefault(f, f)
1121 changedfileset.setdefault(f, f)
1122 msng_mnfst_set.setdefault(c[0], clnode)
1122 msng_mnfst_set.setdefault(c[0], clnode)
1123 return collect_manifests_and_files
1123 return collect_manifests_and_files
1124
1124
1125 # Figure out which manifest nodes (of the ones we think might be part
1125 # Figure out which manifest nodes (of the ones we think might be part
1126 # of the changegroup) the recipient must know about and remove them
1126 # of the changegroup) the recipient must know about and remove them
1127 # from the changegroup.
1127 # from the changegroup.
1128 def prune_manifests():
1128 def prune_manifests():
1129 has_mnfst_set = {}
1129 has_mnfst_set = {}
1130 for n in msng_mnfst_set:
1130 for n in msng_mnfst_set:
1131 # If a 'missing' manifest thinks it belongs to a changenode
1131 # If a 'missing' manifest thinks it belongs to a changenode
1132 # the recipient is assumed to have, obviously the recipient
1132 # the recipient is assumed to have, obviously the recipient
1133 # must have that manifest.
1133 # must have that manifest.
1134 linknode = cl.node(mnfst.linkrev(n))
1134 linknode = cl.node(mnfst.linkrev(n))
1135 if linknode in has_cl_set:
1135 if linknode in has_cl_set:
1136 has_mnfst_set[n] = 1
1136 has_mnfst_set[n] = 1
1137 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1137 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1138
1138
1139 # Use the information collected in collect_manifests_and_files to say
1139 # Use the information collected in collect_manifests_and_files to say
1140 # which changenode any manifestnode belongs to.
1140 # which changenode any manifestnode belongs to.
1141 def lookup_manifest_link(mnfstnode):
1141 def lookup_manifest_link(mnfstnode):
1142 return msng_mnfst_set[mnfstnode]
1142 return msng_mnfst_set[mnfstnode]
1143
1143
1144 # A function generating function that sets up the initial environment
1144 # A function generating function that sets up the initial environment
1145 # the inner function.
1145 # the inner function.
1146 def filenode_collector(changedfiles):
1146 def filenode_collector(changedfiles):
1147 next_rev = [0]
1147 next_rev = [0]
1148 # This gathers information from each manifestnode included in the
1148 # This gathers information from each manifestnode included in the
1149 # changegroup about which filenodes the manifest node references
1149 # changegroup about which filenodes the manifest node references
1150 # so we can include those in the changegroup too.
1150 # so we can include those in the changegroup too.
1151 #
1151 #
1152 # It also remembers which changenode each filenode belongs to. It
1152 # It also remembers which changenode each filenode belongs to. It
1153 # does this by assuming the a filenode belongs to the changenode
1153 # does this by assuming the a filenode belongs to the changenode
1154 # the first manifest that references it belongs to.
1154 # the first manifest that references it belongs to.
1155 def collect_msng_filenodes(mnfstnode):
1155 def collect_msng_filenodes(mnfstnode):
1156 r = mnfst.rev(mnfstnode)
1156 r = mnfst.rev(mnfstnode)
1157 if r == next_rev[0]:
1157 if r == next_rev[0]:
1158 # If the last rev we looked at was the one just previous,
1158 # If the last rev we looked at was the one just previous,
1159 # we only need to see a diff.
1159 # we only need to see a diff.
1160 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1160 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1161 # For each line in the delta
1161 # For each line in the delta
1162 for dline in delta.splitlines():
1162 for dline in delta.splitlines():
1163 # get the filename and filenode for that line
1163 # get the filename and filenode for that line
1164 f, fnode = dline.split('\0')
1164 f, fnode = dline.split('\0')
1165 fnode = bin(fnode[:40])
1165 fnode = bin(fnode[:40])
1166 f = changedfiles.get(f, None)
1166 f = changedfiles.get(f, None)
1167 # And if the file is in the list of files we care
1167 # And if the file is in the list of files we care
1168 # about.
1168 # about.
1169 if f is not None:
1169 if f is not None:
1170 # Get the changenode this manifest belongs to
1170 # Get the changenode this manifest belongs to
1171 clnode = msng_mnfst_set[mnfstnode]
1171 clnode = msng_mnfst_set[mnfstnode]
1172 # Create the set of filenodes for the file if
1172 # Create the set of filenodes for the file if
1173 # there isn't one already.
1173 # there isn't one already.
1174 ndset = msng_filenode_set.setdefault(f, {})
1174 ndset = msng_filenode_set.setdefault(f, {})
1175 # And set the filenode's changelog node to the
1175 # And set the filenode's changelog node to the
1176 # manifest's if it hasn't been set already.
1176 # manifest's if it hasn't been set already.
1177 ndset.setdefault(fnode, clnode)
1177 ndset.setdefault(fnode, clnode)
1178 else:
1178 else:
1179 # Otherwise we need a full manifest.
1179 # Otherwise we need a full manifest.
1180 m = mnfst.read(mnfstnode)
1180 m = mnfst.read(mnfstnode)
1181 # For every file in we care about.
1181 # For every file in we care about.
1182 for f in changedfiles:
1182 for f in changedfiles:
1183 fnode = m.get(f, None)
1183 fnode = m.get(f, None)
1184 # If it's in the manifest
1184 # If it's in the manifest
1185 if fnode is not None:
1185 if fnode is not None:
1186 # See comments above.
1186 # See comments above.
1187 clnode = msng_mnfst_set[mnfstnode]
1187 clnode = msng_mnfst_set[mnfstnode]
1188 ndset = msng_filenode_set.setdefault(f, {})
1188 ndset = msng_filenode_set.setdefault(f, {})
1189 ndset.setdefault(fnode, clnode)
1189 ndset.setdefault(fnode, clnode)
1190 # Remember the revision we hope to see next.
1190 # Remember the revision we hope to see next.
1191 next_rev[0] = r + 1
1191 next_rev[0] = r + 1
1192 return collect_msng_filenodes
1192 return collect_msng_filenodes
1193
1193
1194 # We have a list of filenodes we think we need for a file, lets remove
1194 # We have a list of filenodes we think we need for a file, lets remove
1195 # all those we now the recipient must have.
1195 # all those we now the recipient must have.
1196 def prune_filenodes(f, filerevlog):
1196 def prune_filenodes(f, filerevlog):
1197 msngset = msng_filenode_set[f]
1197 msngset = msng_filenode_set[f]
1198 hasset = {}
1198 hasset = {}
1199 # If a 'missing' filenode thinks it belongs to a changenode we
1199 # If a 'missing' filenode thinks it belongs to a changenode we
1200 # assume the recipient must have, then the recipient must have
1200 # assume the recipient must have, then the recipient must have
1201 # that filenode.
1201 # that filenode.
1202 for n in msngset:
1202 for n in msngset:
1203 clnode = cl.node(filerevlog.linkrev(n))
1203 clnode = cl.node(filerevlog.linkrev(n))
1204 if clnode in has_cl_set:
1204 if clnode in has_cl_set:
1205 hasset[n] = 1
1205 hasset[n] = 1
1206 prune_parents(filerevlog, hasset, msngset)
1206 prune_parents(filerevlog, hasset, msngset)
1207
1207
1208 # A function generator function that sets up the a context for the
1208 # A function generator function that sets up the a context for the
1209 # inner function.
1209 # inner function.
1210 def lookup_filenode_link_func(fname):
1210 def lookup_filenode_link_func(fname):
1211 msngset = msng_filenode_set[fname]
1211 msngset = msng_filenode_set[fname]
1212 # Lookup the changenode the filenode belongs to.
1212 # Lookup the changenode the filenode belongs to.
1213 def lookup_filenode_link(fnode):
1213 def lookup_filenode_link(fnode):
1214 return msngset[fnode]
1214 return msngset[fnode]
1215 return lookup_filenode_link
1215 return lookup_filenode_link
1216
1216
1217 # Now that we have all theses utility functions to help out and
1217 # Now that we have all theses utility functions to help out and
1218 # logically divide up the task, generate the group.
1218 # logically divide up the task, generate the group.
1219 def gengroup():
1219 def gengroup():
1220 # The set of changed files starts empty.
1220 # The set of changed files starts empty.
1221 changedfiles = {}
1221 changedfiles = {}
1222 # Create a changenode group generator that will call our functions
1222 # Create a changenode group generator that will call our functions
1223 # back to lookup the owning changenode and collect information.
1223 # back to lookup the owning changenode and collect information.
1224 group = cl.group(msng_cl_lst, identity,
1224 group = cl.group(msng_cl_lst, identity,
1225 manifest_and_file_collector(changedfiles))
1225 manifest_and_file_collector(changedfiles))
1226 for chnk in group:
1226 for chnk in group:
1227 yield chnk
1227 yield chnk
1228
1228
1229 # The list of manifests has been collected by the generator
1229 # The list of manifests has been collected by the generator
1230 # calling our functions back.
1230 # calling our functions back.
1231 prune_manifests()
1231 prune_manifests()
1232 msng_mnfst_lst = msng_mnfst_set.keys()
1232 msng_mnfst_lst = msng_mnfst_set.keys()
1233 # Sort the manifestnodes by revision number.
1233 # Sort the manifestnodes by revision number.
1234 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1234 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1235 # Create a generator for the manifestnodes that calls our lookup
1235 # Create a generator for the manifestnodes that calls our lookup
1236 # and data collection functions back.
1236 # and data collection functions back.
1237 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1237 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1238 filenode_collector(changedfiles))
1238 filenode_collector(changedfiles))
1239 for chnk in group:
1239 for chnk in group:
1240 yield chnk
1240 yield chnk
1241
1241
1242 # These are no longer needed, dereference and toss the memory for
1242 # These are no longer needed, dereference and toss the memory for
1243 # them.
1243 # them.
1244 msng_mnfst_lst = None
1244 msng_mnfst_lst = None
1245 msng_mnfst_set.clear()
1245 msng_mnfst_set.clear()
1246
1246
1247 changedfiles = changedfiles.keys()
1247 changedfiles = changedfiles.keys()
1248 changedfiles.sort()
1248 changedfiles.sort()
1249 # Go through all our files in order sorted by name.
1249 # Go through all our files in order sorted by name.
1250 for fname in changedfiles:
1250 for fname in changedfiles:
1251 filerevlog = self.file(fname)
1251 filerevlog = self.file(fname)
1252 # Toss out the filenodes that the recipient isn't really
1252 # Toss out the filenodes that the recipient isn't really
1253 # missing.
1253 # missing.
1254 if msng_filenode_set.has_key(fname):
1254 if msng_filenode_set.has_key(fname):
1255 prune_filenodes(fname, filerevlog)
1255 prune_filenodes(fname, filerevlog)
1256 msng_filenode_lst = msng_filenode_set[fname].keys()
1256 msng_filenode_lst = msng_filenode_set[fname].keys()
1257 else:
1257 else:
1258 msng_filenode_lst = []
1258 msng_filenode_lst = []
1259 # If any filenodes are left, generate the group for them,
1259 # If any filenodes are left, generate the group for them,
1260 # otherwise don't bother.
1260 # otherwise don't bother.
1261 if len(msng_filenode_lst) > 0:
1261 if len(msng_filenode_lst) > 0:
1262 yield struct.pack(">l", len(fname) + 4) + fname
1262 yield struct.pack(">l", len(fname) + 4) + fname
1263 # Sort the filenodes by their revision #
1263 # Sort the filenodes by their revision #
1264 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1264 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1265 # Create a group generator and only pass in a changenode
1265 # Create a group generator and only pass in a changenode
1266 # lookup function as we need to collect no information
1266 # lookup function as we need to collect no information
1267 # from filenodes.
1267 # from filenodes.
1268 group = filerevlog.group(msng_filenode_lst,
1268 group = filerevlog.group(msng_filenode_lst,
1269 lookup_filenode_link_func(fname))
1269 lookup_filenode_link_func(fname))
1270 for chnk in group:
1270 for chnk in group:
1271 yield chnk
1271 yield chnk
1272 if msng_filenode_set.has_key(fname):
1272 if msng_filenode_set.has_key(fname):
1273 # Don't need this anymore, toss it to free memory.
1273 # Don't need this anymore, toss it to free memory.
1274 del msng_filenode_set[fname]
1274 del msng_filenode_set[fname]
1275 # Signal that no more groups are left.
1275 # Signal that no more groups are left.
1276 yield struct.pack(">l", 0)
1276 yield struct.pack(">l", 0)
1277
1277
1278 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1278 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1279
1279
1280 return util.chunkbuffer(gengroup())
1280 return util.chunkbuffer(gengroup())
1281
1281
1282 def changegroup(self, basenodes, source):
1282 def changegroup(self, basenodes, source):
1283 """Generate a changegroup of all nodes that we have that a recipient
1283 """Generate a changegroup of all nodes that we have that a recipient
1284 doesn't.
1284 doesn't.
1285
1285
1286 This is much easier than the previous function as we can assume that
1286 This is much easier than the previous function as we can assume that
1287 the recipient has any changenode we aren't sending them."""
1287 the recipient has any changenode we aren't sending them."""
1288
1288
1289 self.hook('preoutgoing', throw=True, source=source)
1289 self.hook('preoutgoing', throw=True, source=source)
1290
1290
1291 cl = self.changelog
1291 cl = self.changelog
1292 nodes = cl.nodesbetween(basenodes, None)[0]
1292 nodes = cl.nodesbetween(basenodes, None)[0]
1293 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1293 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1294
1294
1295 def identity(x):
1295 def identity(x):
1296 return x
1296 return x
1297
1297
1298 def gennodelst(revlog):
1298 def gennodelst(revlog):
1299 for r in xrange(0, revlog.count()):
1299 for r in xrange(0, revlog.count()):
1300 n = revlog.node(r)
1300 n = revlog.node(r)
1301 if revlog.linkrev(n) in revset:
1301 if revlog.linkrev(n) in revset:
1302 yield n
1302 yield n
1303
1303
1304 def changed_file_collector(changedfileset):
1304 def changed_file_collector(changedfileset):
1305 def collect_changed_files(clnode):
1305 def collect_changed_files(clnode):
1306 c = cl.read(clnode)
1306 c = cl.read(clnode)
1307 for fname in c[3]:
1307 for fname in c[3]:
1308 changedfileset[fname] = 1
1308 changedfileset[fname] = 1
1309 return collect_changed_files
1309 return collect_changed_files
1310
1310
1311 def lookuprevlink_func(revlog):
1311 def lookuprevlink_func(revlog):
1312 def lookuprevlink(n):
1312 def lookuprevlink(n):
1313 return cl.node(revlog.linkrev(n))
1313 return cl.node(revlog.linkrev(n))
1314 return lookuprevlink
1314 return lookuprevlink
1315
1315
1316 def gengroup():
1316 def gengroup():
1317 # construct a list of all changed files
1317 # construct a list of all changed files
1318 changedfiles = {}
1318 changedfiles = {}
1319
1319
1320 for chnk in cl.group(nodes, identity,
1320 for chnk in cl.group(nodes, identity,
1321 changed_file_collector(changedfiles)):
1321 changed_file_collector(changedfiles)):
1322 yield chnk
1322 yield chnk
1323 changedfiles = changedfiles.keys()
1323 changedfiles = changedfiles.keys()
1324 changedfiles.sort()
1324 changedfiles.sort()
1325
1325
1326 mnfst = self.manifest
1326 mnfst = self.manifest
1327 nodeiter = gennodelst(mnfst)
1327 nodeiter = gennodelst(mnfst)
1328 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1328 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1329 yield chnk
1329 yield chnk
1330
1330
1331 for fname in changedfiles:
1331 for fname in changedfiles:
1332 filerevlog = self.file(fname)
1332 filerevlog = self.file(fname)
1333 nodeiter = gennodelst(filerevlog)
1333 nodeiter = gennodelst(filerevlog)
1334 nodeiter = list(nodeiter)
1334 nodeiter = list(nodeiter)
1335 if nodeiter:
1335 if nodeiter:
1336 yield struct.pack(">l", len(fname) + 4) + fname
1336 yield struct.pack(">l", len(fname) + 4) + fname
1337 lookup = lookuprevlink_func(filerevlog)
1337 lookup = lookuprevlink_func(filerevlog)
1338 for chnk in filerevlog.group(nodeiter, lookup):
1338 for chnk in filerevlog.group(nodeiter, lookup):
1339 yield chnk
1339 yield chnk
1340
1340
1341 yield struct.pack(">l", 0)
1341 yield struct.pack(">l", 0)
1342 self.hook('outgoing', node=hex(nodes[0]), source=source)
1342 self.hook('outgoing', node=hex(nodes[0]), source=source)
1343
1343
1344 return util.chunkbuffer(gengroup())
1344 return util.chunkbuffer(gengroup())
1345
1345
1346 def addchangegroup(self, source):
1346 def addchangegroup(self, source):
1347
1347
1348 def getchunk():
1348 def getchunk():
1349 d = source.read(4)
1349 d = source.read(4)
1350 if not d:
1350 if not d:
1351 return ""
1351 return ""
1352 l = struct.unpack(">l", d)[0]
1352 l = struct.unpack(">l", d)[0]
1353 if l <= 4:
1353 if l <= 4:
1354 return ""
1354 return ""
1355 d = source.read(l - 4)
1355 d = source.read(l - 4)
1356 if len(d) < l - 4:
1356 if len(d) < l - 4:
1357 raise repo.RepoError(_("premature EOF reading chunk"
1357 raise repo.RepoError(_("premature EOF reading chunk"
1358 " (got %d bytes, expected %d)")
1358 " (got %d bytes, expected %d)")
1359 % (len(d), l - 4))
1359 % (len(d), l - 4))
1360 return d
1360 return d
1361
1361
1362 def getgroup():
1362 def getgroup():
1363 while 1:
1363 while 1:
1364 c = getchunk()
1364 c = getchunk()
1365 if not c:
1365 if not c:
1366 break
1366 break
1367 yield c
1367 yield c
1368
1368
1369 def csmap(x):
1369 def csmap(x):
1370 self.ui.debug(_("add changeset %s\n") % short(x))
1370 self.ui.debug(_("add changeset %s\n") % short(x))
1371 return self.changelog.count()
1371 return self.changelog.count()
1372
1372
1373 def revmap(x):
1373 def revmap(x):
1374 return self.changelog.rev(x)
1374 return self.changelog.rev(x)
1375
1375
1376 if not source:
1376 if not source:
1377 return
1377 return
1378
1378
1379 self.hook('prechangegroup', throw=True)
1379 self.hook('prechangegroup', throw=True)
1380
1380
1381 changesets = files = revisions = 0
1381 changesets = files = revisions = 0
1382
1382
1383 tr = self.transaction()
1383 tr = self.transaction()
1384
1384
1385 oldheads = len(self.changelog.heads())
1385 oldheads = len(self.changelog.heads())
1386
1386
1387 # pull off the changeset group
1387 # pull off the changeset group
1388 self.ui.status(_("adding changesets\n"))
1388 self.ui.status(_("adding changesets\n"))
1389 co = self.changelog.tip()
1389 co = self.changelog.tip()
1390 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1390 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1391 cnr, cor = map(self.changelog.rev, (cn, co))
1391 cnr, cor = map(self.changelog.rev, (cn, co))
1392 if cn == nullid:
1392 if cn == nullid:
1393 cnr = cor
1393 cnr = cor
1394 changesets = cnr - cor
1394 changesets = cnr - cor
1395
1395
1396 # pull off the manifest group
1396 # pull off the manifest group
1397 self.ui.status(_("adding manifests\n"))
1397 self.ui.status(_("adding manifests\n"))
1398 mm = self.manifest.tip()
1398 mm = self.manifest.tip()
1399 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1399 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1400
1400
1401 # process the files
1401 # process the files
1402 self.ui.status(_("adding file changes\n"))
1402 self.ui.status(_("adding file changes\n"))
1403 while 1:
1403 while 1:
1404 f = getchunk()
1404 f = getchunk()
1405 if not f:
1405 if not f:
1406 break
1406 break
1407 self.ui.debug(_("adding %s revisions\n") % f)
1407 self.ui.debug(_("adding %s revisions\n") % f)
1408 fl = self.file(f)
1408 fl = self.file(f)
1409 o = fl.count()
1409 o = fl.count()
1410 n = fl.addgroup(getgroup(), revmap, tr)
1410 n = fl.addgroup(getgroup(), revmap, tr)
1411 revisions += fl.count() - o
1411 revisions += fl.count() - o
1412 files += 1
1412 files += 1
1413
1413
1414 newheads = len(self.changelog.heads())
1414 newheads = len(self.changelog.heads())
1415 heads = ""
1415 heads = ""
1416 if oldheads and newheads > oldheads:
1416 if oldheads and newheads > oldheads:
1417 heads = _(" (+%d heads)") % (newheads - oldheads)
1417 heads = _(" (+%d heads)") % (newheads - oldheads)
1418
1418
1419 self.ui.status(_("added %d changesets"
1419 self.ui.status(_("added %d changesets"
1420 " with %d changes to %d files%s\n")
1420 " with %d changes to %d files%s\n")
1421 % (changesets, revisions, files, heads))
1421 % (changesets, revisions, files, heads))
1422
1422
1423 self.hook('pretxnchangegroup', throw=True,
1423 self.hook('pretxnchangegroup', throw=True,
1424 node=hex(self.changelog.node(cor+1)))
1424 node=hex(self.changelog.node(cor+1)))
1425
1425
1426 tr.close()
1426 tr.close()
1427
1427
1428 if changesets > 0:
1428 if changesets > 0:
1429 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1429 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1430
1430
1431 for i in range(cor + 1, cnr + 1):
1431 for i in range(cor + 1, cnr + 1):
1432 self.hook("incoming", node=hex(self.changelog.node(i)))
1432 self.hook("incoming", node=hex(self.changelog.node(i)))
1433
1433
1434 def update(self, node, allow=False, force=False, choose=None,
1434 def update(self, node, allow=False, force=False, choose=None,
1435 moddirstate=True, forcemerge=False, wlock=None):
1435 moddirstate=True, forcemerge=False, wlock=None):
1436 pl = self.dirstate.parents()
1436 pl = self.dirstate.parents()
1437 if not force and pl[1] != nullid:
1437 if not force and pl[1] != nullid:
1438 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1438 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1439 return 1
1439 return 1
1440
1440
1441 err = False
1441 err = False
1442
1442
1443 p1, p2 = pl[0], node
1443 p1, p2 = pl[0], node
1444 pa = self.changelog.ancestor(p1, p2)
1444 pa = self.changelog.ancestor(p1, p2)
1445 m1n = self.changelog.read(p1)[0]
1445 m1n = self.changelog.read(p1)[0]
1446 m2n = self.changelog.read(p2)[0]
1446 m2n = self.changelog.read(p2)[0]
1447 man = self.manifest.ancestor(m1n, m2n)
1447 man = self.manifest.ancestor(m1n, m2n)
1448 m1 = self.manifest.read(m1n)
1448 m1 = self.manifest.read(m1n)
1449 mf1 = self.manifest.readflags(m1n)
1449 mf1 = self.manifest.readflags(m1n)
1450 m2 = self.manifest.read(m2n).copy()
1450 m2 = self.manifest.read(m2n).copy()
1451 mf2 = self.manifest.readflags(m2n)
1451 mf2 = self.manifest.readflags(m2n)
1452 ma = self.manifest.read(man)
1452 ma = self.manifest.read(man)
1453 mfa = self.manifest.readflags(man)
1453 mfa = self.manifest.readflags(man)
1454
1454
1455 modified, added, removed, deleted, unknown = self.changes()
1455 modified, added, removed, deleted, unknown = self.changes()
1456
1456
1457 # is this a jump, or a merge? i.e. is there a linear path
1457 # is this a jump, or a merge? i.e. is there a linear path
1458 # from p1 to p2?
1458 # from p1 to p2?
1459 linear_path = (pa == p1 or pa == p2)
1459 linear_path = (pa == p1 or pa == p2)
1460
1460
1461 if allow and linear_path:
1461 if allow and linear_path:
1462 raise util.Abort(_("there is nothing to merge, "
1462 raise util.Abort(_("there is nothing to merge, "
1463 "just use 'hg update'"))
1463 "just use 'hg update'"))
1464 if allow and not forcemerge:
1464 if allow and not forcemerge:
1465 if modified or added or removed:
1465 if modified or added or removed:
1466 raise util.Abort(_("outstanding uncommited changes"))
1466 raise util.Abort(_("outstanding uncommited changes"))
1467 if not forcemerge and not force:
1467 if not forcemerge and not force:
1468 for f in unknown:
1468 for f in unknown:
1469 if f in m2:
1469 if f in m2:
1470 t1 = self.wread(f)
1470 t1 = self.wread(f)
1471 t2 = self.file(f).read(m2[f])
1471 t2 = self.file(f).read(m2[f])
1472 if cmp(t1, t2) != 0:
1472 if cmp(t1, t2) != 0:
1473 raise util.Abort(_("'%s' already exists in the working"
1473 raise util.Abort(_("'%s' already exists in the working"
1474 " dir and differs from remote") % f)
1474 " dir and differs from remote") % f)
1475
1475
1476 # resolve the manifest to determine which files
1476 # resolve the manifest to determine which files
1477 # we care about merging
1477 # we care about merging
1478 self.ui.note(_("resolving manifests\n"))
1478 self.ui.note(_("resolving manifests\n"))
1479 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1479 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1480 (force, allow, moddirstate, linear_path))
1480 (force, allow, moddirstate, linear_path))
1481 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1481 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1482 (short(man), short(m1n), short(m2n)))
1482 (short(man), short(m1n), short(m2n)))
1483
1483
1484 merge = {}
1484 merge = {}
1485 get = {}
1485 get = {}
1486 remove = []
1486 remove = []
1487
1487
1488 # construct a working dir manifest
1488 # construct a working dir manifest
1489 mw = m1.copy()
1489 mw = m1.copy()
1490 mfw = mf1.copy()
1490 mfw = mf1.copy()
1491 umap = dict.fromkeys(unknown)
1491 umap = dict.fromkeys(unknown)
1492
1492
1493 for f in added + modified + unknown:
1493 for f in added + modified + unknown:
1494 mw[f] = ""
1494 mw[f] = ""
1495 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1495 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1496
1496
1497 if moddirstate and not wlock:
1497 if moddirstate and not wlock:
1498 wlock = self.wlock()
1498 wlock = self.wlock()
1499
1499
1500 for f in deleted + removed:
1500 for f in deleted + removed:
1501 if f in mw:
1501 if f in mw:
1502 del mw[f]
1502 del mw[f]
1503
1503
1504 # If we're jumping between revisions (as opposed to merging),
1504 # If we're jumping between revisions (as opposed to merging),
1505 # and if neither the working directory nor the target rev has
1505 # and if neither the working directory nor the target rev has
1506 # the file, then we need to remove it from the dirstate, to
1506 # the file, then we need to remove it from the dirstate, to
1507 # prevent the dirstate from listing the file when it is no
1507 # prevent the dirstate from listing the file when it is no
1508 # longer in the manifest.
1508 # longer in the manifest.
1509 if moddirstate and linear_path and f not in m2:
1509 if moddirstate and linear_path and f not in m2:
1510 self.dirstate.forget((f,))
1510 self.dirstate.forget((f,))
1511
1511
1512 # Compare manifests
1512 # Compare manifests
1513 for f, n in mw.iteritems():
1513 for f, n in mw.iteritems():
1514 if choose and not choose(f):
1514 if choose and not choose(f):
1515 continue
1515 continue
1516 if f in m2:
1516 if f in m2:
1517 s = 0
1517 s = 0
1518
1518
1519 # is the wfile new since m1, and match m2?
1519 # is the wfile new since m1, and match m2?
1520 if f not in m1:
1520 if f not in m1:
1521 t1 = self.wread(f)
1521 t1 = self.wread(f)
1522 t2 = self.file(f).read(m2[f])
1522 t2 = self.file(f).read(m2[f])
1523 if cmp(t1, t2) == 0:
1523 if cmp(t1, t2) == 0:
1524 n = m2[f]
1524 n = m2[f]
1525 del t1, t2
1525 del t1, t2
1526
1526
1527 # are files different?
1527 # are files different?
1528 if n != m2[f]:
1528 if n != m2[f]:
1529 a = ma.get(f, nullid)
1529 a = ma.get(f, nullid)
1530 # are both different from the ancestor?
1530 # are both different from the ancestor?
1531 if n != a and m2[f] != a:
1531 if n != a and m2[f] != a:
1532 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1532 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1533 # merge executable bits
1533 # merge executable bits
1534 # "if we changed or they changed, change in merge"
1534 # "if we changed or they changed, change in merge"
1535 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1535 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1536 mode = ((a^b) | (a^c)) ^ a
1536 mode = ((a^b) | (a^c)) ^ a
1537 merge[f] = (m1.get(f, nullid), m2[f], mode)
1537 merge[f] = (m1.get(f, nullid), m2[f], mode)
1538 s = 1
1538 s = 1
1539 # are we clobbering?
1539 # are we clobbering?
1540 # is remote's version newer?
1540 # is remote's version newer?
1541 # or are we going back in time?
1541 # or are we going back in time?
1542 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1542 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1543 self.ui.debug(_(" remote %s is newer, get\n") % f)
1543 self.ui.debug(_(" remote %s is newer, get\n") % f)
1544 get[f] = m2[f]
1544 get[f] = m2[f]
1545 s = 1
1545 s = 1
1546 elif f in umap:
1546 elif f in umap:
1547 # this unknown file is the same as the checkout
1547 # this unknown file is the same as the checkout
1548 get[f] = m2[f]
1548 get[f] = m2[f]
1549
1549
1550 if not s and mfw[f] != mf2[f]:
1550 if not s and mfw[f] != mf2[f]:
1551 if force:
1551 if force:
1552 self.ui.debug(_(" updating permissions for %s\n") % f)
1552 self.ui.debug(_(" updating permissions for %s\n") % f)
1553 util.set_exec(self.wjoin(f), mf2[f])
1553 util.set_exec(self.wjoin(f), mf2[f])
1554 else:
1554 else:
1555 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1555 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1556 mode = ((a^b) | (a^c)) ^ a
1556 mode = ((a^b) | (a^c)) ^ a
1557 if mode != b:
1557 if mode != b:
1558 self.ui.debug(_(" updating permissions for %s\n")
1558 self.ui.debug(_(" updating permissions for %s\n")
1559 % f)
1559 % f)
1560 util.set_exec(self.wjoin(f), mode)
1560 util.set_exec(self.wjoin(f), mode)
1561 del m2[f]
1561 del m2[f]
1562 elif f in ma:
1562 elif f in ma:
1563 if n != ma[f]:
1563 if n != ma[f]:
1564 r = _("d")
1564 r = _("d")
1565 if not force and (linear_path or allow):
1565 if not force and (linear_path or allow):
1566 r = self.ui.prompt(
1566 r = self.ui.prompt(
1567 (_(" local changed %s which remote deleted\n") % f) +
1567 (_(" local changed %s which remote deleted\n") % f) +
1568 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1568 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1569 if r == _("d"):
1569 if r == _("d"):
1570 remove.append(f)
1570 remove.append(f)
1571 else:
1571 else:
1572 self.ui.debug(_("other deleted %s\n") % f)
1572 self.ui.debug(_("other deleted %s\n") % f)
1573 remove.append(f) # other deleted it
1573 remove.append(f) # other deleted it
1574 else:
1574 else:
1575 # file is created on branch or in working directory
1575 # file is created on branch or in working directory
1576 if force and f not in umap:
1576 if force and f not in umap:
1577 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1577 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1578 remove.append(f)
1578 remove.append(f)
1579 elif n == m1.get(f, nullid): # same as parent
1579 elif n == m1.get(f, nullid): # same as parent
1580 if p2 == pa: # going backwards?
1580 if p2 == pa: # going backwards?
1581 self.ui.debug(_("remote deleted %s\n") % f)
1581 self.ui.debug(_("remote deleted %s\n") % f)
1582 remove.append(f)
1582 remove.append(f)
1583 else:
1583 else:
1584 self.ui.debug(_("local modified %s, keeping\n") % f)
1584 self.ui.debug(_("local modified %s, keeping\n") % f)
1585 else:
1585 else:
1586 self.ui.debug(_("working dir created %s, keeping\n") % f)
1586 self.ui.debug(_("working dir created %s, keeping\n") % f)
1587
1587
1588 for f, n in m2.iteritems():
1588 for f, n in m2.iteritems():
1589 if choose and not choose(f):
1589 if choose and not choose(f):
1590 continue
1590 continue
1591 if f[0] == "/":
1591 if f[0] == "/":
1592 continue
1592 continue
1593 if f in ma and n != ma[f]:
1593 if f in ma and n != ma[f]:
1594 r = _("k")
1594 r = _("k")
1595 if not force and (linear_path or allow):
1595 if not force and (linear_path or allow):
1596 r = self.ui.prompt(
1596 r = self.ui.prompt(
1597 (_("remote changed %s which local deleted\n") % f) +
1597 (_("remote changed %s which local deleted\n") % f) +
1598 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1598 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1599 if r == _("k"):
1599 if r == _("k"):
1600 get[f] = n
1600 get[f] = n
1601 elif f not in ma:
1601 elif f not in ma:
1602 self.ui.debug(_("remote created %s\n") % f)
1602 self.ui.debug(_("remote created %s\n") % f)
1603 get[f] = n
1603 get[f] = n
1604 else:
1604 else:
1605 if force or p2 == pa: # going backwards?
1605 if force or p2 == pa: # going backwards?
1606 self.ui.debug(_("local deleted %s, recreating\n") % f)
1606 self.ui.debug(_("local deleted %s, recreating\n") % f)
1607 get[f] = n
1607 get[f] = n
1608 else:
1608 else:
1609 self.ui.debug(_("local deleted %s\n") % f)
1609 self.ui.debug(_("local deleted %s\n") % f)
1610
1610
1611 del mw, m1, m2, ma
1611 del mw, m1, m2, ma
1612
1612
1613 if force:
1613 if force:
1614 for f in merge:
1614 for f in merge:
1615 get[f] = merge[f][1]
1615 get[f] = merge[f][1]
1616 merge = {}
1616 merge = {}
1617
1617
1618 if linear_path or force:
1618 if linear_path or force:
1619 # we don't need to do any magic, just jump to the new rev
1619 # we don't need to do any magic, just jump to the new rev
1620 branch_merge = False
1620 branch_merge = False
1621 p1, p2 = p2, nullid
1621 p1, p2 = p2, nullid
1622 else:
1622 else:
1623 if not allow:
1623 if not allow:
1624 self.ui.status(_("this update spans a branch"
1624 self.ui.status(_("this update spans a branch"
1625 " affecting the following files:\n"))
1625 " affecting the following files:\n"))
1626 fl = merge.keys() + get.keys()
1626 fl = merge.keys() + get.keys()
1627 fl.sort()
1627 fl.sort()
1628 for f in fl:
1628 for f in fl:
1629 cf = ""
1629 cf = ""
1630 if f in merge:
1630 if f in merge:
1631 cf = _(" (resolve)")
1631 cf = _(" (resolve)")
1632 self.ui.status(" %s%s\n" % (f, cf))
1632 self.ui.status(" %s%s\n" % (f, cf))
1633 self.ui.warn(_("aborting update spanning branches!\n"))
1633 self.ui.warn(_("aborting update spanning branches!\n"))
1634 self.ui.status(_("(use update -m to merge across branches"
1634 self.ui.status(_("(use update -m to merge across branches"
1635 " or -C to lose changes)\n"))
1635 " or -C to lose changes)\n"))
1636 return 1
1636 return 1
1637 branch_merge = True
1637 branch_merge = True
1638
1638
1639 # get the files we don't need to change
1639 # get the files we don't need to change
1640 files = get.keys()
1640 files = get.keys()
1641 files.sort()
1641 files.sort()
1642 for f in files:
1642 for f in files:
1643 if f[0] == "/":
1643 if f[0] == "/":
1644 continue
1644 continue
1645 self.ui.note(_("getting %s\n") % f)
1645 self.ui.note(_("getting %s\n") % f)
1646 t = self.file(f).read(get[f])
1646 t = self.file(f).read(get[f])
1647 self.wwrite(f, t)
1647 self.wwrite(f, t)
1648 util.set_exec(self.wjoin(f), mf2[f])
1648 util.set_exec(self.wjoin(f), mf2[f])
1649 if moddirstate:
1649 if moddirstate:
1650 if branch_merge:
1650 if branch_merge:
1651 self.dirstate.update([f], 'n', st_mtime=-1)
1651 self.dirstate.update([f], 'n', st_mtime=-1)
1652 else:
1652 else:
1653 self.dirstate.update([f], 'n')
1653 self.dirstate.update([f], 'n')
1654
1654
1655 # merge the tricky bits
1655 # merge the tricky bits
1656 files = merge.keys()
1656 files = merge.keys()
1657 files.sort()
1657 files.sort()
1658 for f in files:
1658 for f in files:
1659 self.ui.status(_("merging %s\n") % f)
1659 self.ui.status(_("merging %s\n") % f)
1660 my, other, flag = merge[f]
1660 my, other, flag = merge[f]
1661 ret = self.merge3(f, my, other)
1661 ret = self.merge3(f, my, other)
1662 if ret:
1662 if ret:
1663 err = True
1663 err = True
1664 util.set_exec(self.wjoin(f), flag)
1664 util.set_exec(self.wjoin(f), flag)
1665 if moddirstate:
1665 if moddirstate:
1666 if branch_merge:
1666 if branch_merge:
1667 # We've done a branch merge, mark this file as merged
1667 # We've done a branch merge, mark this file as merged
1668 # so that we properly record the merger later
1668 # so that we properly record the merger later
1669 self.dirstate.update([f], 'm')
1669 self.dirstate.update([f], 'm')
1670 else:
1670 else:
1671 # We've update-merged a locally modified file, so
1671 # We've update-merged a locally modified file, so
1672 # we set the dirstate to emulate a normal checkout
1672 # we set the dirstate to emulate a normal checkout
1673 # of that file some time in the past. Thus our
1673 # of that file some time in the past. Thus our
1674 # merge will appear as a normal local file
1674 # merge will appear as a normal local file
1675 # modification.
1675 # modification.
1676 f_len = len(self.file(f).read(other))
1676 f_len = len(self.file(f).read(other))
1677 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1677 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1678
1678
1679 remove.sort()
1679 remove.sort()
1680 for f in remove:
1680 for f in remove:
1681 self.ui.note(_("removing %s\n") % f)
1681 self.ui.note(_("removing %s\n") % f)
1682 util.audit_path(f)
1682 try:
1683 try:
1683 util.unlink(self.wjoin(f))
1684 util.unlink(self.wjoin(f))
1684 except OSError, inst:
1685 except OSError, inst:
1685 if inst.errno != errno.ENOENT:
1686 if inst.errno != errno.ENOENT:
1686 self.ui.warn(_("update failed to remove %s: %s!\n") %
1687 self.ui.warn(_("update failed to remove %s: %s!\n") %
1687 (f, inst.strerror))
1688 (f, inst.strerror))
1688 if moddirstate:
1689 if moddirstate:
1689 if branch_merge:
1690 if branch_merge:
1690 self.dirstate.update(remove, 'r')
1691 self.dirstate.update(remove, 'r')
1691 else:
1692 else:
1692 self.dirstate.forget(remove)
1693 self.dirstate.forget(remove)
1693
1694
1694 if moddirstate:
1695 if moddirstate:
1695 self.dirstate.setparents(p1, p2)
1696 self.dirstate.setparents(p1, p2)
1696 return err
1697 return err
1697
1698
1698 def merge3(self, fn, my, other):
1699 def merge3(self, fn, my, other):
1699 """perform a 3-way merge in the working directory"""
1700 """perform a 3-way merge in the working directory"""
1700
1701
1701 def temp(prefix, node):
1702 def temp(prefix, node):
1702 pre = "%s~%s." % (os.path.basename(fn), prefix)
1703 pre = "%s~%s." % (os.path.basename(fn), prefix)
1703 (fd, name) = tempfile.mkstemp("", pre)
1704 (fd, name) = tempfile.mkstemp("", pre)
1704 f = os.fdopen(fd, "wb")
1705 f = os.fdopen(fd, "wb")
1705 self.wwrite(fn, fl.read(node), f)
1706 self.wwrite(fn, fl.read(node), f)
1706 f.close()
1707 f.close()
1707 return name
1708 return name
1708
1709
1709 fl = self.file(fn)
1710 fl = self.file(fn)
1710 base = fl.ancestor(my, other)
1711 base = fl.ancestor(my, other)
1711 a = self.wjoin(fn)
1712 a = self.wjoin(fn)
1712 b = temp("base", base)
1713 b = temp("base", base)
1713 c = temp("other", other)
1714 c = temp("other", other)
1714
1715
1715 self.ui.note(_("resolving %s\n") % fn)
1716 self.ui.note(_("resolving %s\n") % fn)
1716 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1717 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1717 (fn, short(my), short(other), short(base)))
1718 (fn, short(my), short(other), short(base)))
1718
1719
1719 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1720 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1720 or "hgmerge")
1721 or "hgmerge")
1721 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1722 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1722 if r:
1723 if r:
1723 self.ui.warn(_("merging %s failed!\n") % fn)
1724 self.ui.warn(_("merging %s failed!\n") % fn)
1724
1725
1725 os.unlink(b)
1726 os.unlink(b)
1726 os.unlink(c)
1727 os.unlink(c)
1727 return r
1728 return r
1728
1729
1729 def verify(self):
1730 def verify(self):
1730 filelinkrevs = {}
1731 filelinkrevs = {}
1731 filenodes = {}
1732 filenodes = {}
1732 changesets = revisions = files = 0
1733 changesets = revisions = files = 0
1733 errors = [0]
1734 errors = [0]
1734 neededmanifests = {}
1735 neededmanifests = {}
1735
1736
1736 def err(msg):
1737 def err(msg):
1737 self.ui.warn(msg + "\n")
1738 self.ui.warn(msg + "\n")
1738 errors[0] += 1
1739 errors[0] += 1
1739
1740
1740 def checksize(obj, name):
1741 def checksize(obj, name):
1741 d = obj.checksize()
1742 d = obj.checksize()
1742 if d[0]:
1743 if d[0]:
1743 err(_("%s data length off by %d bytes") % (name, d[0]))
1744 err(_("%s data length off by %d bytes") % (name, d[0]))
1744 if d[1]:
1745 if d[1]:
1745 err(_("%s index contains %d extra bytes") % (name, d[1]))
1746 err(_("%s index contains %d extra bytes") % (name, d[1]))
1746
1747
1747 seen = {}
1748 seen = {}
1748 self.ui.status(_("checking changesets\n"))
1749 self.ui.status(_("checking changesets\n"))
1749 checksize(self.changelog, "changelog")
1750 checksize(self.changelog, "changelog")
1750
1751
1751 for i in range(self.changelog.count()):
1752 for i in range(self.changelog.count()):
1752 changesets += 1
1753 changesets += 1
1753 n = self.changelog.node(i)
1754 n = self.changelog.node(i)
1754 l = self.changelog.linkrev(n)
1755 l = self.changelog.linkrev(n)
1755 if l != i:
1756 if l != i:
1756 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1757 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1757 if n in seen:
1758 if n in seen:
1758 err(_("duplicate changeset at revision %d") % i)
1759 err(_("duplicate changeset at revision %d") % i)
1759 seen[n] = 1
1760 seen[n] = 1
1760
1761
1761 for p in self.changelog.parents(n):
1762 for p in self.changelog.parents(n):
1762 if p not in self.changelog.nodemap:
1763 if p not in self.changelog.nodemap:
1763 err(_("changeset %s has unknown parent %s") %
1764 err(_("changeset %s has unknown parent %s") %
1764 (short(n), short(p)))
1765 (short(n), short(p)))
1765 try:
1766 try:
1766 changes = self.changelog.read(n)
1767 changes = self.changelog.read(n)
1767 except KeyboardInterrupt:
1768 except KeyboardInterrupt:
1768 self.ui.warn(_("interrupted"))
1769 self.ui.warn(_("interrupted"))
1769 raise
1770 raise
1770 except Exception, inst:
1771 except Exception, inst:
1771 err(_("unpacking changeset %s: %s") % (short(n), inst))
1772 err(_("unpacking changeset %s: %s") % (short(n), inst))
1772
1773
1773 neededmanifests[changes[0]] = n
1774 neededmanifests[changes[0]] = n
1774
1775
1775 for f in changes[3]:
1776 for f in changes[3]:
1776 filelinkrevs.setdefault(f, []).append(i)
1777 filelinkrevs.setdefault(f, []).append(i)
1777
1778
1778 seen = {}
1779 seen = {}
1779 self.ui.status(_("checking manifests\n"))
1780 self.ui.status(_("checking manifests\n"))
1780 checksize(self.manifest, "manifest")
1781 checksize(self.manifest, "manifest")
1781
1782
1782 for i in range(self.manifest.count()):
1783 for i in range(self.manifest.count()):
1783 n = self.manifest.node(i)
1784 n = self.manifest.node(i)
1784 l = self.manifest.linkrev(n)
1785 l = self.manifest.linkrev(n)
1785
1786
1786 if l < 0 or l >= self.changelog.count():
1787 if l < 0 or l >= self.changelog.count():
1787 err(_("bad manifest link (%d) at revision %d") % (l, i))
1788 err(_("bad manifest link (%d) at revision %d") % (l, i))
1788
1789
1789 if n in neededmanifests:
1790 if n in neededmanifests:
1790 del neededmanifests[n]
1791 del neededmanifests[n]
1791
1792
1792 if n in seen:
1793 if n in seen:
1793 err(_("duplicate manifest at revision %d") % i)
1794 err(_("duplicate manifest at revision %d") % i)
1794
1795
1795 seen[n] = 1
1796 seen[n] = 1
1796
1797
1797 for p in self.manifest.parents(n):
1798 for p in self.manifest.parents(n):
1798 if p not in self.manifest.nodemap:
1799 if p not in self.manifest.nodemap:
1799 err(_("manifest %s has unknown parent %s") %
1800 err(_("manifest %s has unknown parent %s") %
1800 (short(n), short(p)))
1801 (short(n), short(p)))
1801
1802
1802 try:
1803 try:
1803 delta = mdiff.patchtext(self.manifest.delta(n))
1804 delta = mdiff.patchtext(self.manifest.delta(n))
1804 except KeyboardInterrupt:
1805 except KeyboardInterrupt:
1805 self.ui.warn(_("interrupted"))
1806 self.ui.warn(_("interrupted"))
1806 raise
1807 raise
1807 except Exception, inst:
1808 except Exception, inst:
1808 err(_("unpacking manifest %s: %s") % (short(n), inst))
1809 err(_("unpacking manifest %s: %s") % (short(n), inst))
1809
1810
1810 ff = [ l.split('\0') for l in delta.splitlines() ]
1811 ff = [ l.split('\0') for l in delta.splitlines() ]
1811 for f, fn in ff:
1812 for f, fn in ff:
1812 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1813 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1813
1814
1814 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1815 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1815
1816
1816 for m, c in neededmanifests.items():
1817 for m, c in neededmanifests.items():
1817 err(_("Changeset %s refers to unknown manifest %s") %
1818 err(_("Changeset %s refers to unknown manifest %s") %
1818 (short(m), short(c)))
1819 (short(m), short(c)))
1819 del neededmanifests
1820 del neededmanifests
1820
1821
1821 for f in filenodes:
1822 for f in filenodes:
1822 if f not in filelinkrevs:
1823 if f not in filelinkrevs:
1823 err(_("file %s in manifest but not in changesets") % f)
1824 err(_("file %s in manifest but not in changesets") % f)
1824
1825
1825 for f in filelinkrevs:
1826 for f in filelinkrevs:
1826 if f not in filenodes:
1827 if f not in filenodes:
1827 err(_("file %s in changeset but not in manifest") % f)
1828 err(_("file %s in changeset but not in manifest") % f)
1828
1829
1829 self.ui.status(_("checking files\n"))
1830 self.ui.status(_("checking files\n"))
1830 ff = filenodes.keys()
1831 ff = filenodes.keys()
1831 ff.sort()
1832 ff.sort()
1832 for f in ff:
1833 for f in ff:
1833 if f == "/dev/null":
1834 if f == "/dev/null":
1834 continue
1835 continue
1835 files += 1
1836 files += 1
1836 fl = self.file(f)
1837 fl = self.file(f)
1837 checksize(fl, f)
1838 checksize(fl, f)
1838
1839
1839 nodes = {nullid: 1}
1840 nodes = {nullid: 1}
1840 seen = {}
1841 seen = {}
1841 for i in range(fl.count()):
1842 for i in range(fl.count()):
1842 revisions += 1
1843 revisions += 1
1843 n = fl.node(i)
1844 n = fl.node(i)
1844
1845
1845 if n in seen:
1846 if n in seen:
1846 err(_("%s: duplicate revision %d") % (f, i))
1847 err(_("%s: duplicate revision %d") % (f, i))
1847 if n not in filenodes[f]:
1848 if n not in filenodes[f]:
1848 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1849 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1849 else:
1850 else:
1850 del filenodes[f][n]
1851 del filenodes[f][n]
1851
1852
1852 flr = fl.linkrev(n)
1853 flr = fl.linkrev(n)
1853 if flr not in filelinkrevs[f]:
1854 if flr not in filelinkrevs[f]:
1854 err(_("%s:%s points to unexpected changeset %d")
1855 err(_("%s:%s points to unexpected changeset %d")
1855 % (f, short(n), flr))
1856 % (f, short(n), flr))
1856 else:
1857 else:
1857 filelinkrevs[f].remove(flr)
1858 filelinkrevs[f].remove(flr)
1858
1859
1859 # verify contents
1860 # verify contents
1860 try:
1861 try:
1861 t = fl.read(n)
1862 t = fl.read(n)
1862 except KeyboardInterrupt:
1863 except KeyboardInterrupt:
1863 self.ui.warn(_("interrupted"))
1864 self.ui.warn(_("interrupted"))
1864 raise
1865 raise
1865 except Exception, inst:
1866 except Exception, inst:
1866 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1867 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1867
1868
1868 # verify parents
1869 # verify parents
1869 (p1, p2) = fl.parents(n)
1870 (p1, p2) = fl.parents(n)
1870 if p1 not in nodes:
1871 if p1 not in nodes:
1871 err(_("file %s:%s unknown parent 1 %s") %
1872 err(_("file %s:%s unknown parent 1 %s") %
1872 (f, short(n), short(p1)))
1873 (f, short(n), short(p1)))
1873 if p2 not in nodes:
1874 if p2 not in nodes:
1874 err(_("file %s:%s unknown parent 2 %s") %
1875 err(_("file %s:%s unknown parent 2 %s") %
1875 (f, short(n), short(p1)))
1876 (f, short(n), short(p1)))
1876 nodes[n] = 1
1877 nodes[n] = 1
1877
1878
1878 # cross-check
1879 # cross-check
1879 for node in filenodes[f]:
1880 for node in filenodes[f]:
1880 err(_("node %s in manifests not in %s") % (hex(node), f))
1881 err(_("node %s in manifests not in %s") % (hex(node), f))
1881
1882
1882 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1883 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1883 (files, changesets, revisions))
1884 (files, changesets, revisions))
1884
1885
1885 if errors[0]:
1886 if errors[0]:
1886 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1887 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1887 return 1
1888 return 1
1888
1889
1889 # used to avoid circular references so destructors work
1890 # used to avoid circular references so destructors work
1890 def aftertrans(base):
1891 def aftertrans(base):
1891 p = base
1892 p = base
1892 def a():
1893 def a():
1893 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1894 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1894 util.rename(os.path.join(p, "journal.dirstate"),
1895 util.rename(os.path.join(p, "journal.dirstate"),
1895 os.path.join(p, "undo.dirstate"))
1896 os.path.join(p, "undo.dirstate"))
1896 return a
1897 return a
1897
1898
@@ -1,705 +1,715 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8
8
9 This contains helper routines that are independent of the SCM core and hide
9 This contains helper routines that are independent of the SCM core and hide
10 platform-specific details from the core.
10 platform-specific details from the core.
11 """
11 """
12
12
13 import os, errno
13 import os, errno
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import *
15 from demandload import *
16 demandload(globals(), "cStringIO errno popen2 re shutil sys tempfile")
16 demandload(globals(), "cStringIO errno popen2 re shutil sys tempfile")
17 demandload(globals(), "threading time")
17 demandload(globals(), "threading time")
18
18
19 def pipefilter(s, cmd):
19 def pipefilter(s, cmd):
20 '''filter string S through command CMD, returning its output'''
20 '''filter string S through command CMD, returning its output'''
21 (pout, pin) = popen2.popen2(cmd, -1, 'b')
21 (pout, pin) = popen2.popen2(cmd, -1, 'b')
22 def writer():
22 def writer():
23 pin.write(s)
23 pin.write(s)
24 pin.close()
24 pin.close()
25
25
26 # we should use select instead on UNIX, but this will work on most
26 # we should use select instead on UNIX, but this will work on most
27 # systems, including Windows
27 # systems, including Windows
28 w = threading.Thread(target=writer)
28 w = threading.Thread(target=writer)
29 w.start()
29 w.start()
30 f = pout.read()
30 f = pout.read()
31 pout.close()
31 pout.close()
32 w.join()
32 w.join()
33 return f
33 return f
34
34
35 def tempfilter(s, cmd):
35 def tempfilter(s, cmd):
36 '''filter string S through a pair of temporary files with CMD.
36 '''filter string S through a pair of temporary files with CMD.
37 CMD is used as a template to create the real command to be run,
37 CMD is used as a template to create the real command to be run,
38 with the strings INFILE and OUTFILE replaced by the real names of
38 with the strings INFILE and OUTFILE replaced by the real names of
39 the temporary files generated.'''
39 the temporary files generated.'''
40 inname, outname = None, None
40 inname, outname = None, None
41 try:
41 try:
42 infd, inname = tempfile.mkstemp(prefix='hgfin')
42 infd, inname = tempfile.mkstemp(prefix='hgfin')
43 fp = os.fdopen(infd, 'wb')
43 fp = os.fdopen(infd, 'wb')
44 fp.write(s)
44 fp.write(s)
45 fp.close()
45 fp.close()
46 outfd, outname = tempfile.mkstemp(prefix='hgfout')
46 outfd, outname = tempfile.mkstemp(prefix='hgfout')
47 os.close(outfd)
47 os.close(outfd)
48 cmd = cmd.replace('INFILE', inname)
48 cmd = cmd.replace('INFILE', inname)
49 cmd = cmd.replace('OUTFILE', outname)
49 cmd = cmd.replace('OUTFILE', outname)
50 code = os.system(cmd)
50 code = os.system(cmd)
51 if code: raise Abort(_("command '%s' failed: %s") %
51 if code: raise Abort(_("command '%s' failed: %s") %
52 (cmd, explain_exit(code)))
52 (cmd, explain_exit(code)))
53 return open(outname, 'rb').read()
53 return open(outname, 'rb').read()
54 finally:
54 finally:
55 try:
55 try:
56 if inname: os.unlink(inname)
56 if inname: os.unlink(inname)
57 except: pass
57 except: pass
58 try:
58 try:
59 if outname: os.unlink(outname)
59 if outname: os.unlink(outname)
60 except: pass
60 except: pass
61
61
62 filtertable = {
62 filtertable = {
63 'tempfile:': tempfilter,
63 'tempfile:': tempfilter,
64 'pipe:': pipefilter,
64 'pipe:': pipefilter,
65 }
65 }
66
66
67 def filter(s, cmd):
67 def filter(s, cmd):
68 "filter a string through a command that transforms its input to its output"
68 "filter a string through a command that transforms its input to its output"
69 for name, fn in filtertable.iteritems():
69 for name, fn in filtertable.iteritems():
70 if cmd.startswith(name):
70 if cmd.startswith(name):
71 return fn(s, cmd[len(name):].lstrip())
71 return fn(s, cmd[len(name):].lstrip())
72 return pipefilter(s, cmd)
72 return pipefilter(s, cmd)
73
73
74 def patch(strip, patchname, ui):
74 def patch(strip, patchname, ui):
75 """apply the patch <patchname> to the working directory.
75 """apply the patch <patchname> to the working directory.
76 a list of patched files is returned"""
76 a list of patched files is returned"""
77 fp = os.popen('patch -p%d < "%s"' % (strip, patchname))
77 fp = os.popen('patch -p%d < "%s"' % (strip, patchname))
78 files = {}
78 files = {}
79 for line in fp:
79 for line in fp:
80 line = line.rstrip()
80 line = line.rstrip()
81 ui.status("%s\n" % line)
81 ui.status("%s\n" % line)
82 if line.startswith('patching file '):
82 if line.startswith('patching file '):
83 pf = parse_patch_output(line)
83 pf = parse_patch_output(line)
84 files.setdefault(pf, 1)
84 files.setdefault(pf, 1)
85 code = fp.close()
85 code = fp.close()
86 if code:
86 if code:
87 raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
87 raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
88 return files.keys()
88 return files.keys()
89
89
90 def binary(s):
90 def binary(s):
91 """return true if a string is binary data using diff's heuristic"""
91 """return true if a string is binary data using diff's heuristic"""
92 if s and '\0' in s[:4096]:
92 if s and '\0' in s[:4096]:
93 return True
93 return True
94 return False
94 return False
95
95
96 def unique(g):
96 def unique(g):
97 """return the uniq elements of iterable g"""
97 """return the uniq elements of iterable g"""
98 seen = {}
98 seen = {}
99 for f in g:
99 for f in g:
100 if f not in seen:
100 if f not in seen:
101 seen[f] = 1
101 seen[f] = 1
102 yield f
102 yield f
103
103
104 class Abort(Exception):
104 class Abort(Exception):
105 """Raised if a command needs to print an error and exit."""
105 """Raised if a command needs to print an error and exit."""
106
106
107 def always(fn): return True
107 def always(fn): return True
108 def never(fn): return False
108 def never(fn): return False
109
109
110 def patkind(name, dflt_pat='glob'):
110 def patkind(name, dflt_pat='glob'):
111 """Split a string into an optional pattern kind prefix and the
111 """Split a string into an optional pattern kind prefix and the
112 actual pattern."""
112 actual pattern."""
113 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
113 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
114 if name.startswith(prefix + ':'): return name.split(':', 1)
114 if name.startswith(prefix + ':'): return name.split(':', 1)
115 return dflt_pat, name
115 return dflt_pat, name
116
116
117 def globre(pat, head='^', tail='$'):
117 def globre(pat, head='^', tail='$'):
118 "convert a glob pattern into a regexp"
118 "convert a glob pattern into a regexp"
119 i, n = 0, len(pat)
119 i, n = 0, len(pat)
120 res = ''
120 res = ''
121 group = False
121 group = False
122 def peek(): return i < n and pat[i]
122 def peek(): return i < n and pat[i]
123 while i < n:
123 while i < n:
124 c = pat[i]
124 c = pat[i]
125 i = i+1
125 i = i+1
126 if c == '*':
126 if c == '*':
127 if peek() == '*':
127 if peek() == '*':
128 i += 1
128 i += 1
129 res += '.*'
129 res += '.*'
130 else:
130 else:
131 res += '[^/]*'
131 res += '[^/]*'
132 elif c == '?':
132 elif c == '?':
133 res += '.'
133 res += '.'
134 elif c == '[':
134 elif c == '[':
135 j = i
135 j = i
136 if j < n and pat[j] in '!]':
136 if j < n and pat[j] in '!]':
137 j += 1
137 j += 1
138 while j < n and pat[j] != ']':
138 while j < n and pat[j] != ']':
139 j += 1
139 j += 1
140 if j >= n:
140 if j >= n:
141 res += '\\['
141 res += '\\['
142 else:
142 else:
143 stuff = pat[i:j].replace('\\','\\\\')
143 stuff = pat[i:j].replace('\\','\\\\')
144 i = j + 1
144 i = j + 1
145 if stuff[0] == '!':
145 if stuff[0] == '!':
146 stuff = '^' + stuff[1:]
146 stuff = '^' + stuff[1:]
147 elif stuff[0] == '^':
147 elif stuff[0] == '^':
148 stuff = '\\' + stuff
148 stuff = '\\' + stuff
149 res = '%s[%s]' % (res, stuff)
149 res = '%s[%s]' % (res, stuff)
150 elif c == '{':
150 elif c == '{':
151 group = True
151 group = True
152 res += '(?:'
152 res += '(?:'
153 elif c == '}' and group:
153 elif c == '}' and group:
154 res += ')'
154 res += ')'
155 group = False
155 group = False
156 elif c == ',' and group:
156 elif c == ',' and group:
157 res += '|'
157 res += '|'
158 else:
158 else:
159 res += re.escape(c)
159 res += re.escape(c)
160 return head + res + tail
160 return head + res + tail
161
161
162 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
162 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
163
163
164 def pathto(n1, n2):
164 def pathto(n1, n2):
165 '''return the relative path from one place to another.
165 '''return the relative path from one place to another.
166 this returns a path in the form used by the local filesystem, not hg.'''
166 this returns a path in the form used by the local filesystem, not hg.'''
167 if not n1: return localpath(n2)
167 if not n1: return localpath(n2)
168 a, b = n1.split('/'), n2.split('/')
168 a, b = n1.split('/'), n2.split('/')
169 a.reverse()
169 a.reverse()
170 b.reverse()
170 b.reverse()
171 while a and b and a[-1] == b[-1]:
171 while a and b and a[-1] == b[-1]:
172 a.pop()
172 a.pop()
173 b.pop()
173 b.pop()
174 b.reverse()
174 b.reverse()
175 return os.sep.join((['..'] * len(a)) + b)
175 return os.sep.join((['..'] * len(a)) + b)
176
176
177 def canonpath(root, cwd, myname):
177 def canonpath(root, cwd, myname):
178 """return the canonical path of myname, given cwd and root"""
178 """return the canonical path of myname, given cwd and root"""
179 if root == os.sep:
179 if root == os.sep:
180 rootsep = os.sep
180 rootsep = os.sep
181 else:
181 else:
182 rootsep = root + os.sep
182 rootsep = root + os.sep
183 name = myname
183 name = myname
184 if not name.startswith(os.sep):
184 if not name.startswith(os.sep):
185 name = os.path.join(root, cwd, name)
185 name = os.path.join(root, cwd, name)
186 name = os.path.normpath(name)
186 name = os.path.normpath(name)
187 if name.startswith(rootsep):
187 if name.startswith(rootsep):
188 return pconvert(name[len(rootsep):])
188 return pconvert(name[len(rootsep):])
189 elif name == root:
189 elif name == root:
190 return ''
190 return ''
191 else:
191 else:
192 raise Abort('%s not under root' % myname)
192 raise Abort('%s not under root' % myname)
193
193
194 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
194 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
195 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
195 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
196
196
197 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
197 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
198 if os.name == 'nt':
198 if os.name == 'nt':
199 dflt_pat = 'glob'
199 dflt_pat = 'glob'
200 else:
200 else:
201 dflt_pat = 'relpath'
201 dflt_pat = 'relpath'
202 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
202 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
203
203
204 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
204 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
205 """build a function to match a set of file patterns
205 """build a function to match a set of file patterns
206
206
207 arguments:
207 arguments:
208 canonroot - the canonical root of the tree you're matching against
208 canonroot - the canonical root of the tree you're matching against
209 cwd - the current working directory, if relevant
209 cwd - the current working directory, if relevant
210 names - patterns to find
210 names - patterns to find
211 inc - patterns to include
211 inc - patterns to include
212 exc - patterns to exclude
212 exc - patterns to exclude
213 head - a regex to prepend to patterns to control whether a match is rooted
213 head - a regex to prepend to patterns to control whether a match is rooted
214
214
215 a pattern is one of:
215 a pattern is one of:
216 'glob:<rooted glob>'
216 'glob:<rooted glob>'
217 're:<rooted regexp>'
217 're:<rooted regexp>'
218 'path:<rooted path>'
218 'path:<rooted path>'
219 'relglob:<relative glob>'
219 'relglob:<relative glob>'
220 'relpath:<relative path>'
220 'relpath:<relative path>'
221 'relre:<relative regexp>'
221 'relre:<relative regexp>'
222 '<rooted path or regexp>'
222 '<rooted path or regexp>'
223
223
224 returns:
224 returns:
225 a 3-tuple containing
225 a 3-tuple containing
226 - list of explicit non-pattern names passed in
226 - list of explicit non-pattern names passed in
227 - a bool match(filename) function
227 - a bool match(filename) function
228 - a bool indicating if any patterns were passed in
228 - a bool indicating if any patterns were passed in
229
229
230 todo:
230 todo:
231 make head regex a rooted bool
231 make head regex a rooted bool
232 """
232 """
233
233
234 def contains_glob(name):
234 def contains_glob(name):
235 for c in name:
235 for c in name:
236 if c in _globchars: return True
236 if c in _globchars: return True
237 return False
237 return False
238
238
239 def regex(kind, name, tail):
239 def regex(kind, name, tail):
240 '''convert a pattern into a regular expression'''
240 '''convert a pattern into a regular expression'''
241 if kind == 're':
241 if kind == 're':
242 return name
242 return name
243 elif kind == 'path':
243 elif kind == 'path':
244 return '^' + re.escape(name) + '(?:/|$)'
244 return '^' + re.escape(name) + '(?:/|$)'
245 elif kind == 'relglob':
245 elif kind == 'relglob':
246 return head + globre(name, '(?:|.*/)', tail)
246 return head + globre(name, '(?:|.*/)', tail)
247 elif kind == 'relpath':
247 elif kind == 'relpath':
248 return head + re.escape(name) + tail
248 return head + re.escape(name) + tail
249 elif kind == 'relre':
249 elif kind == 'relre':
250 if name.startswith('^'):
250 if name.startswith('^'):
251 return name
251 return name
252 return '.*' + name
252 return '.*' + name
253 return head + globre(name, '', tail)
253 return head + globre(name, '', tail)
254
254
255 def matchfn(pats, tail):
255 def matchfn(pats, tail):
256 """build a matching function from a set of patterns"""
256 """build a matching function from a set of patterns"""
257 if not pats:
257 if not pats:
258 return
258 return
259 matches = []
259 matches = []
260 for k, p in pats:
260 for k, p in pats:
261 try:
261 try:
262 pat = '(?:%s)' % regex(k, p, tail)
262 pat = '(?:%s)' % regex(k, p, tail)
263 matches.append(re.compile(pat).match)
263 matches.append(re.compile(pat).match)
264 except re.error:
264 except re.error:
265 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
265 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
266 else: raise Abort("invalid pattern (%s): %s" % (k, p))
266 else: raise Abort("invalid pattern (%s): %s" % (k, p))
267
267
268 def buildfn(text):
268 def buildfn(text):
269 for m in matches:
269 for m in matches:
270 r = m(text)
270 r = m(text)
271 if r:
271 if r:
272 return r
272 return r
273
273
274 return buildfn
274 return buildfn
275
275
276 def globprefix(pat):
276 def globprefix(pat):
277 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
277 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
278 root = []
278 root = []
279 for p in pat.split(os.sep):
279 for p in pat.split(os.sep):
280 if contains_glob(p): break
280 if contains_glob(p): break
281 root.append(p)
281 root.append(p)
282 return '/'.join(root)
282 return '/'.join(root)
283
283
284 pats = []
284 pats = []
285 files = []
285 files = []
286 roots = []
286 roots = []
287 for kind, name in [patkind(p, dflt_pat) for p in names]:
287 for kind, name in [patkind(p, dflt_pat) for p in names]:
288 if kind in ('glob', 'relpath'):
288 if kind in ('glob', 'relpath'):
289 name = canonpath(canonroot, cwd, name)
289 name = canonpath(canonroot, cwd, name)
290 if name == '':
290 if name == '':
291 kind, name = 'glob', '**'
291 kind, name = 'glob', '**'
292 if kind in ('glob', 'path', 're'):
292 if kind in ('glob', 'path', 're'):
293 pats.append((kind, name))
293 pats.append((kind, name))
294 if kind == 'glob':
294 if kind == 'glob':
295 root = globprefix(name)
295 root = globprefix(name)
296 if root: roots.append(root)
296 if root: roots.append(root)
297 elif kind == 'relpath':
297 elif kind == 'relpath':
298 files.append((kind, name))
298 files.append((kind, name))
299 roots.append(name)
299 roots.append(name)
300
300
301 patmatch = matchfn(pats, '$') or always
301 patmatch = matchfn(pats, '$') or always
302 filematch = matchfn(files, '(?:/|$)') or always
302 filematch = matchfn(files, '(?:/|$)') or always
303 incmatch = always
303 incmatch = always
304 if inc:
304 if inc:
305 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
305 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
306 excmatch = lambda fn: False
306 excmatch = lambda fn: False
307 if exc:
307 if exc:
308 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
308 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
309
309
310 return (roots,
310 return (roots,
311 lambda fn: (incmatch(fn) and not excmatch(fn) and
311 lambda fn: (incmatch(fn) and not excmatch(fn) and
312 (fn.endswith('/') or
312 (fn.endswith('/') or
313 (not pats and not files) or
313 (not pats and not files) or
314 (pats and patmatch(fn)) or
314 (pats and patmatch(fn)) or
315 (files and filematch(fn)))),
315 (files and filematch(fn)))),
316 (inc or exc or (pats and pats != [('glob', '**')])) and True)
316 (inc or exc or (pats and pats != [('glob', '**')])) and True)
317
317
318 def system(cmd, errprefix=None):
318 def system(cmd, errprefix=None):
319 """execute a shell command that must succeed"""
319 """execute a shell command that must succeed"""
320 rc = os.system(cmd)
320 rc = os.system(cmd)
321 if rc:
321 if rc:
322 errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
322 errmsg = "%s %s" % (os.path.basename(cmd.split(None, 1)[0]),
323 explain_exit(rc)[0])
323 explain_exit(rc)[0])
324 if errprefix:
324 if errprefix:
325 errmsg = "%s: %s" % (errprefix, errmsg)
325 errmsg = "%s: %s" % (errprefix, errmsg)
326 raise Abort(errmsg)
326 raise Abort(errmsg)
327
327
328 def rename(src, dst):
328 def rename(src, dst):
329 """forcibly rename a file"""
329 """forcibly rename a file"""
330 try:
330 try:
331 os.rename(src, dst)
331 os.rename(src, dst)
332 except:
332 except:
333 os.unlink(dst)
333 os.unlink(dst)
334 os.rename(src, dst)
334 os.rename(src, dst)
335
335
336 def unlink(f):
336 def unlink(f):
337 """unlink and remove the directory if it is empty"""
337 """unlink and remove the directory if it is empty"""
338 os.unlink(f)
338 os.unlink(f)
339 # try removing directories that might now be empty
339 # try removing directories that might now be empty
340 try: os.removedirs(os.path.dirname(f))
340 try: os.removedirs(os.path.dirname(f))
341 except: pass
341 except: pass
342
342
343 def copyfiles(src, dst, hardlink=None):
343 def copyfiles(src, dst, hardlink=None):
344 """Copy a directory tree using hardlinks if possible"""
344 """Copy a directory tree using hardlinks if possible"""
345
345
346 if hardlink is None:
346 if hardlink is None:
347 hardlink = (os.stat(src).st_dev ==
347 hardlink = (os.stat(src).st_dev ==
348 os.stat(os.path.dirname(dst)).st_dev)
348 os.stat(os.path.dirname(dst)).st_dev)
349
349
350 if os.path.isdir(src):
350 if os.path.isdir(src):
351 os.mkdir(dst)
351 os.mkdir(dst)
352 for name in os.listdir(src):
352 for name in os.listdir(src):
353 srcname = os.path.join(src, name)
353 srcname = os.path.join(src, name)
354 dstname = os.path.join(dst, name)
354 dstname = os.path.join(dst, name)
355 copyfiles(srcname, dstname, hardlink)
355 copyfiles(srcname, dstname, hardlink)
356 else:
356 else:
357 if hardlink:
357 if hardlink:
358 try:
358 try:
359 os_link(src, dst)
359 os_link(src, dst)
360 except:
360 except:
361 hardlink = False
361 hardlink = False
362 shutil.copy(src, dst)
362 shutil.copy(src, dst)
363 else:
363 else:
364 shutil.copy(src, dst)
364 shutil.copy(src, dst)
365
365
366 def opener(base):
366 def audit_path(path):
367 """Abort if path contains dangerous components"""
368 parts = os.path.normcase(path).split(os.sep)
369 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
370 or os.pardir in parts):
371 raise Abort(_("path contains illegal component: %s\n") % path)
372
373 def opener(base, audit=True):
367 """
374 """
368 return a function that opens files relative to base
375 return a function that opens files relative to base
369
376
370 this function is used to hide the details of COW semantics and
377 this function is used to hide the details of COW semantics and
371 remote file access from higher level code.
378 remote file access from higher level code.
372 """
379 """
373 p = base
380 p = base
381 audit_p = audit
374
382
375 def mktempcopy(name):
383 def mktempcopy(name):
376 d, fn = os.path.split(name)
384 d, fn = os.path.split(name)
377 fd, temp = tempfile.mkstemp(prefix=fn, dir=d)
385 fd, temp = tempfile.mkstemp(prefix=fn, dir=d)
378 fp = os.fdopen(fd, "wb")
386 fp = os.fdopen(fd, "wb")
379 try:
387 try:
380 fp.write(file(name, "rb").read())
388 fp.write(file(name, "rb").read())
381 except:
389 except:
382 try: os.unlink(temp)
390 try: os.unlink(temp)
383 except: pass
391 except: pass
384 raise
392 raise
385 fp.close()
393 fp.close()
386 st = os.lstat(name)
394 st = os.lstat(name)
387 os.chmod(temp, st.st_mode)
395 os.chmod(temp, st.st_mode)
388 return temp
396 return temp
389
397
390 class atomicfile(file):
398 class atomicfile(file):
391 """the file will only be copied on close"""
399 """the file will only be copied on close"""
392 def __init__(self, name, mode, atomic=False):
400 def __init__(self, name, mode, atomic=False):
393 self.__name = name
401 self.__name = name
394 self.temp = mktempcopy(name)
402 self.temp = mktempcopy(name)
395 file.__init__(self, self.temp, mode)
403 file.__init__(self, self.temp, mode)
396 def close(self):
404 def close(self):
397 if not self.closed:
405 if not self.closed:
398 file.close(self)
406 file.close(self)
399 rename(self.temp, self.__name)
407 rename(self.temp, self.__name)
400 def __del__(self):
408 def __del__(self):
401 self.close()
409 self.close()
402
410
403 def o(path, mode="r", text=False, atomic=False):
411 def o(path, mode="r", text=False, atomic=False):
412 if audit_p:
413 audit_path(path)
404 f = os.path.join(p, path)
414 f = os.path.join(p, path)
405
415
406 if not text:
416 if not text:
407 mode += "b" # for that other OS
417 mode += "b" # for that other OS
408
418
409 if mode[0] != "r":
419 if mode[0] != "r":
410 try:
420 try:
411 nlink = nlinks(f)
421 nlink = nlinks(f)
412 except OSError:
422 except OSError:
413 d = os.path.dirname(f)
423 d = os.path.dirname(f)
414 if not os.path.isdir(d):
424 if not os.path.isdir(d):
415 os.makedirs(d)
425 os.makedirs(d)
416 else:
426 else:
417 if atomic:
427 if atomic:
418 return atomicfile(f, mode)
428 return atomicfile(f, mode)
419 if nlink > 1:
429 if nlink > 1:
420 rename(mktempcopy(f), f)
430 rename(mktempcopy(f), f)
421 return file(f, mode)
431 return file(f, mode)
422
432
423 return o
433 return o
424
434
425 def _makelock_file(info, pathname):
435 def _makelock_file(info, pathname):
426 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
436 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
427 os.write(ld, info)
437 os.write(ld, info)
428 os.close(ld)
438 os.close(ld)
429
439
430 def _readlock_file(pathname):
440 def _readlock_file(pathname):
431 return file(pathname).read()
441 return file(pathname).read()
432
442
433 def nlinks(pathname):
443 def nlinks(pathname):
434 """Return number of hardlinks for the given file."""
444 """Return number of hardlinks for the given file."""
435 return os.stat(pathname).st_nlink
445 return os.stat(pathname).st_nlink
436
446
437 if hasattr(os, 'link'):
447 if hasattr(os, 'link'):
438 os_link = os.link
448 os_link = os.link
439 else:
449 else:
440 def os_link(src, dst):
450 def os_link(src, dst):
441 raise OSError(0, _("Hardlinks not supported"))
451 raise OSError(0, _("Hardlinks not supported"))
442
452
443 # Platform specific variants
453 # Platform specific variants
444 if os.name == 'nt':
454 if os.name == 'nt':
445 demandload(globals(), "msvcrt")
455 demandload(globals(), "msvcrt")
446 nulldev = 'NUL:'
456 nulldev = 'NUL:'
447
457
448 class winstdout:
458 class winstdout:
449 '''stdout on windows misbehaves if sent through a pipe'''
459 '''stdout on windows misbehaves if sent through a pipe'''
450
460
451 def __init__(self, fp):
461 def __init__(self, fp):
452 self.fp = fp
462 self.fp = fp
453
463
454 def __getattr__(self, key):
464 def __getattr__(self, key):
455 return getattr(self.fp, key)
465 return getattr(self.fp, key)
456
466
457 def close(self):
467 def close(self):
458 try:
468 try:
459 self.fp.close()
469 self.fp.close()
460 except: pass
470 except: pass
461
471
462 def write(self, s):
472 def write(self, s):
463 try:
473 try:
464 return self.fp.write(s)
474 return self.fp.write(s)
465 except IOError, inst:
475 except IOError, inst:
466 if inst.errno != 0: raise
476 if inst.errno != 0: raise
467 self.close()
477 self.close()
468 raise IOError(errno.EPIPE, 'Broken pipe')
478 raise IOError(errno.EPIPE, 'Broken pipe')
469
479
470 sys.stdout = winstdout(sys.stdout)
480 sys.stdout = winstdout(sys.stdout)
471
481
472 try:
482 try:
473 import win32api, win32process
483 import win32api, win32process
474 filename = win32process.GetModuleFileNameEx(win32api.GetCurrentProcess(), 0)
484 filename = win32process.GetModuleFileNameEx(win32api.GetCurrentProcess(), 0)
475 systemrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
485 systemrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
476
486
477 except ImportError:
487 except ImportError:
478 systemrc = r'c:\mercurial\mercurial.ini'
488 systemrc = r'c:\mercurial\mercurial.ini'
479 pass
489 pass
480
490
481 rcpath = (systemrc,
491 rcpath = (systemrc,
482 os.path.join(os.path.expanduser('~'), 'mercurial.ini'))
492 os.path.join(os.path.expanduser('~'), 'mercurial.ini'))
483
493
484 def parse_patch_output(output_line):
494 def parse_patch_output(output_line):
485 """parses the output produced by patch and returns the file name"""
495 """parses the output produced by patch and returns the file name"""
486 pf = output_line[14:]
496 pf = output_line[14:]
487 if pf[0] == '`':
497 if pf[0] == '`':
488 pf = pf[1:-1] # Remove the quotes
498 pf = pf[1:-1] # Remove the quotes
489 return pf
499 return pf
490
500
491 try: # ActivePython can create hard links using win32file module
501 try: # ActivePython can create hard links using win32file module
492 import win32file
502 import win32file
493
503
494 def os_link(src, dst): # NB will only succeed on NTFS
504 def os_link(src, dst): # NB will only succeed on NTFS
495 win32file.CreateHardLink(dst, src)
505 win32file.CreateHardLink(dst, src)
496
506
497 def nlinks(pathname):
507 def nlinks(pathname):
498 """Return number of hardlinks for the given file."""
508 """Return number of hardlinks for the given file."""
499 try:
509 try:
500 fh = win32file.CreateFile(pathname,
510 fh = win32file.CreateFile(pathname,
501 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
511 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
502 None, win32file.OPEN_EXISTING, 0, None)
512 None, win32file.OPEN_EXISTING, 0, None)
503 res = win32file.GetFileInformationByHandle(fh)
513 res = win32file.GetFileInformationByHandle(fh)
504 fh.Close()
514 fh.Close()
505 return res[7]
515 return res[7]
506 except:
516 except:
507 return os.stat(pathname).st_nlink
517 return os.stat(pathname).st_nlink
508
518
509 except ImportError:
519 except ImportError:
510 pass
520 pass
511
521
512 def is_exec(f, last):
522 def is_exec(f, last):
513 return last
523 return last
514
524
515 def set_exec(f, mode):
525 def set_exec(f, mode):
516 pass
526 pass
517
527
518 def set_binary(fd):
528 def set_binary(fd):
519 msvcrt.setmode(fd.fileno(), os.O_BINARY)
529 msvcrt.setmode(fd.fileno(), os.O_BINARY)
520
530
521 def pconvert(path):
531 def pconvert(path):
522 return path.replace("\\", "/")
532 return path.replace("\\", "/")
523
533
524 def localpath(path):
534 def localpath(path):
525 return path.replace('/', '\\')
535 return path.replace('/', '\\')
526
536
527 def normpath(path):
537 def normpath(path):
528 return pconvert(os.path.normpath(path))
538 return pconvert(os.path.normpath(path))
529
539
530 makelock = _makelock_file
540 makelock = _makelock_file
531 readlock = _readlock_file
541 readlock = _readlock_file
532
542
533 def explain_exit(code):
543 def explain_exit(code):
534 return _("exited with status %d") % code, code
544 return _("exited with status %d") % code, code
535
545
536 else:
546 else:
537 nulldev = '/dev/null'
547 nulldev = '/dev/null'
538
548
539 def rcfiles(path):
549 def rcfiles(path):
540 rcs = [os.path.join(path, 'hgrc')]
550 rcs = [os.path.join(path, 'hgrc')]
541 rcdir = os.path.join(path, 'hgrc.d')
551 rcdir = os.path.join(path, 'hgrc.d')
542 try:
552 try:
543 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
553 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
544 if f.endswith(".rc")])
554 if f.endswith(".rc")])
545 except OSError, inst: pass
555 except OSError, inst: pass
546 return rcs
556 return rcs
547 rcpath = []
557 rcpath = []
548 if len(sys.argv) > 0:
558 if len(sys.argv) > 0:
549 rcpath.extend(rcfiles(os.path.dirname(sys.argv[0]) + '/../etc/mercurial'))
559 rcpath.extend(rcfiles(os.path.dirname(sys.argv[0]) + '/../etc/mercurial'))
550 rcpath.extend(rcfiles('/etc/mercurial'))
560 rcpath.extend(rcfiles('/etc/mercurial'))
551 rcpath.append(os.path.expanduser('~/.hgrc'))
561 rcpath.append(os.path.expanduser('~/.hgrc'))
552 rcpath = [os.path.normpath(f) for f in rcpath]
562 rcpath = [os.path.normpath(f) for f in rcpath]
553
563
554 def parse_patch_output(output_line):
564 def parse_patch_output(output_line):
555 """parses the output produced by patch and returns the file name"""
565 """parses the output produced by patch and returns the file name"""
556 pf = output_line[14:]
566 pf = output_line[14:]
557 if pf.startswith("'") and pf.endswith("'") and pf.find(" ") >= 0:
567 if pf.startswith("'") and pf.endswith("'") and pf.find(" ") >= 0:
558 pf = pf[1:-1] # Remove the quotes
568 pf = pf[1:-1] # Remove the quotes
559 return pf
569 return pf
560
570
561 def is_exec(f, last):
571 def is_exec(f, last):
562 """check whether a file is executable"""
572 """check whether a file is executable"""
563 return (os.stat(f).st_mode & 0100 != 0)
573 return (os.stat(f).st_mode & 0100 != 0)
564
574
565 def set_exec(f, mode):
575 def set_exec(f, mode):
566 s = os.stat(f).st_mode
576 s = os.stat(f).st_mode
567 if (s & 0100 != 0) == mode:
577 if (s & 0100 != 0) == mode:
568 return
578 return
569 if mode:
579 if mode:
570 # Turn on +x for every +r bit when making a file executable
580 # Turn on +x for every +r bit when making a file executable
571 # and obey umask.
581 # and obey umask.
572 umask = os.umask(0)
582 umask = os.umask(0)
573 os.umask(umask)
583 os.umask(umask)
574 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
584 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
575 else:
585 else:
576 os.chmod(f, s & 0666)
586 os.chmod(f, s & 0666)
577
587
578 def set_binary(fd):
588 def set_binary(fd):
579 pass
589 pass
580
590
581 def pconvert(path):
591 def pconvert(path):
582 return path
592 return path
583
593
584 def localpath(path):
594 def localpath(path):
585 return path
595 return path
586
596
587 normpath = os.path.normpath
597 normpath = os.path.normpath
588
598
589 def makelock(info, pathname):
599 def makelock(info, pathname):
590 try:
600 try:
591 os.symlink(info, pathname)
601 os.symlink(info, pathname)
592 except OSError, why:
602 except OSError, why:
593 if why.errno == errno.EEXIST:
603 if why.errno == errno.EEXIST:
594 raise
604 raise
595 else:
605 else:
596 _makelock_file(info, pathname)
606 _makelock_file(info, pathname)
597
607
598 def readlock(pathname):
608 def readlock(pathname):
599 try:
609 try:
600 return os.readlink(pathname)
610 return os.readlink(pathname)
601 except OSError, why:
611 except OSError, why:
602 if why.errno == errno.EINVAL:
612 if why.errno == errno.EINVAL:
603 return _readlock_file(pathname)
613 return _readlock_file(pathname)
604 else:
614 else:
605 raise
615 raise
606
616
607 def explain_exit(code):
617 def explain_exit(code):
608 """return a 2-tuple (desc, code) describing a process's status"""
618 """return a 2-tuple (desc, code) describing a process's status"""
609 if os.WIFEXITED(code):
619 if os.WIFEXITED(code):
610 val = os.WEXITSTATUS(code)
620 val = os.WEXITSTATUS(code)
611 return _("exited with status %d") % val, val
621 return _("exited with status %d") % val, val
612 elif os.WIFSIGNALED(code):
622 elif os.WIFSIGNALED(code):
613 val = os.WTERMSIG(code)
623 val = os.WTERMSIG(code)
614 return _("killed by signal %d") % val, val
624 return _("killed by signal %d") % val, val
615 elif os.WIFSTOPPED(code):
625 elif os.WIFSTOPPED(code):
616 val = os.WSTOPSIG(code)
626 val = os.WSTOPSIG(code)
617 return _("stopped by signal %d") % val, val
627 return _("stopped by signal %d") % val, val
618 raise ValueError(_("invalid exit code"))
628 raise ValueError(_("invalid exit code"))
619
629
620 class chunkbuffer(object):
630 class chunkbuffer(object):
621 """Allow arbitrary sized chunks of data to be efficiently read from an
631 """Allow arbitrary sized chunks of data to be efficiently read from an
622 iterator over chunks of arbitrary size."""
632 iterator over chunks of arbitrary size."""
623
633
624 def __init__(self, in_iter, targetsize = 2**16):
634 def __init__(self, in_iter, targetsize = 2**16):
625 """in_iter is the iterator that's iterating over the input chunks.
635 """in_iter is the iterator that's iterating over the input chunks.
626 targetsize is how big a buffer to try to maintain."""
636 targetsize is how big a buffer to try to maintain."""
627 self.in_iter = iter(in_iter)
637 self.in_iter = iter(in_iter)
628 self.buf = ''
638 self.buf = ''
629 self.targetsize = int(targetsize)
639 self.targetsize = int(targetsize)
630 if self.targetsize <= 0:
640 if self.targetsize <= 0:
631 raise ValueError(_("targetsize must be greater than 0, was %d") %
641 raise ValueError(_("targetsize must be greater than 0, was %d") %
632 targetsize)
642 targetsize)
633 self.iterempty = False
643 self.iterempty = False
634
644
635 def fillbuf(self):
645 def fillbuf(self):
636 """Ignore target size; read every chunk from iterator until empty."""
646 """Ignore target size; read every chunk from iterator until empty."""
637 if not self.iterempty:
647 if not self.iterempty:
638 collector = cStringIO.StringIO()
648 collector = cStringIO.StringIO()
639 collector.write(self.buf)
649 collector.write(self.buf)
640 for ch in self.in_iter:
650 for ch in self.in_iter:
641 collector.write(ch)
651 collector.write(ch)
642 self.buf = collector.getvalue()
652 self.buf = collector.getvalue()
643 self.iterempty = True
653 self.iterempty = True
644
654
645 def read(self, l):
655 def read(self, l):
646 """Read L bytes of data from the iterator of chunks of data.
656 """Read L bytes of data from the iterator of chunks of data.
647 Returns less than L bytes if the iterator runs dry."""
657 Returns less than L bytes if the iterator runs dry."""
648 if l > len(self.buf) and not self.iterempty:
658 if l > len(self.buf) and not self.iterempty:
649 # Clamp to a multiple of self.targetsize
659 # Clamp to a multiple of self.targetsize
650 targetsize = self.targetsize * ((l // self.targetsize) + 1)
660 targetsize = self.targetsize * ((l // self.targetsize) + 1)
651 collector = cStringIO.StringIO()
661 collector = cStringIO.StringIO()
652 collector.write(self.buf)
662 collector.write(self.buf)
653 collected = len(self.buf)
663 collected = len(self.buf)
654 for chunk in self.in_iter:
664 for chunk in self.in_iter:
655 collector.write(chunk)
665 collector.write(chunk)
656 collected += len(chunk)
666 collected += len(chunk)
657 if collected >= targetsize:
667 if collected >= targetsize:
658 break
668 break
659 if collected < targetsize:
669 if collected < targetsize:
660 self.iterempty = True
670 self.iterempty = True
661 self.buf = collector.getvalue()
671 self.buf = collector.getvalue()
662 s, self.buf = self.buf[:l], buffer(self.buf, l)
672 s, self.buf = self.buf[:l], buffer(self.buf, l)
663 return s
673 return s
664
674
665 def filechunkiter(f, size = 65536):
675 def filechunkiter(f, size = 65536):
666 """Create a generator that produces all the data in the file size
676 """Create a generator that produces all the data in the file size
667 (default 65536) bytes at a time. Chunks may be less than size
677 (default 65536) bytes at a time. Chunks may be less than size
668 bytes if the chunk is the last chunk in the file, or the file is a
678 bytes if the chunk is the last chunk in the file, or the file is a
669 socket or some other type of file that sometimes reads less data
679 socket or some other type of file that sometimes reads less data
670 than is requested."""
680 than is requested."""
671 s = f.read(size)
681 s = f.read(size)
672 while len(s) > 0:
682 while len(s) > 0:
673 yield s
683 yield s
674 s = f.read(size)
684 s = f.read(size)
675
685
676 def makedate():
686 def makedate():
677 lt = time.localtime()
687 lt = time.localtime()
678 if lt[8] == 1 and time.daylight:
688 if lt[8] == 1 and time.daylight:
679 tz = time.altzone
689 tz = time.altzone
680 else:
690 else:
681 tz = time.timezone
691 tz = time.timezone
682 return time.mktime(lt), tz
692 return time.mktime(lt), tz
683
693
684 def datestr(date=None, format='%c'):
694 def datestr(date=None, format='%c'):
685 """represent a (unixtime, offset) tuple as a localized time.
695 """represent a (unixtime, offset) tuple as a localized time.
686 unixtime is seconds since the epoch, and offset is the time zone's
696 unixtime is seconds since the epoch, and offset is the time zone's
687 number of seconds away from UTC."""
697 number of seconds away from UTC."""
688 t, tz = date or makedate()
698 t, tz = date or makedate()
689 return ("%s %+03d%02d" %
699 return ("%s %+03d%02d" %
690 (time.strftime(format, time.gmtime(float(t) - tz)),
700 (time.strftime(format, time.gmtime(float(t) - tz)),
691 -tz / 3600,
701 -tz / 3600,
692 ((-tz % 3600) / 60)))
702 ((-tz % 3600) / 60)))
693
703
694 def walkrepos(path):
704 def walkrepos(path):
695 '''yield every hg repository under path, recursively.'''
705 '''yield every hg repository under path, recursively.'''
696 def errhandler(err):
706 def errhandler(err):
697 if err.filename == path:
707 if err.filename == path:
698 raise err
708 raise err
699
709
700 for root, dirs, files in os.walk(path, onerror=errhandler):
710 for root, dirs, files in os.walk(path, onerror=errhandler):
701 for d in dirs:
711 for d in dirs:
702 if d == '.hg':
712 if d == '.hg':
703 yield root
713 yield root
704 dirs[:] = []
714 dirs[:] = []
705 break
715 break
General Comments 0
You need to be logged in to leave comments. Login now