##// END OF EJS Templates
change locate to use relglobs by default...
Alexis S. L. Carvalho -
r4195:e8ee8fde default
parent child Browse files
Show More
@@ -1,769 +1,770 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), 'os sys')
11 demandload(globals(), 'os sys')
12 demandload(globals(), 'mdiff util templater patch')
12 demandload(globals(), 'mdiff util templater patch')
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 def revpair(repo, revs):
16 def revpair(repo, revs):
17 '''return pair of nodes, given list of revisions. second item can
17 '''return pair of nodes, given list of revisions. second item can
18 be None, meaning use working dir.'''
18 be None, meaning use working dir.'''
19
19
20 def revfix(repo, val, defval):
20 def revfix(repo, val, defval):
21 if not val and val != 0 and defval is not None:
21 if not val and val != 0 and defval is not None:
22 val = defval
22 val = defval
23 return repo.lookup(val)
23 return repo.lookup(val)
24
24
25 if not revs:
25 if not revs:
26 return repo.dirstate.parents()[0], None
26 return repo.dirstate.parents()[0], None
27 end = None
27 end = None
28 if len(revs) == 1:
28 if len(revs) == 1:
29 if revrangesep in revs[0]:
29 if revrangesep in revs[0]:
30 start, end = revs[0].split(revrangesep, 1)
30 start, end = revs[0].split(revrangesep, 1)
31 start = revfix(repo, start, 0)
31 start = revfix(repo, start, 0)
32 end = revfix(repo, end, repo.changelog.count() - 1)
32 end = revfix(repo, end, repo.changelog.count() - 1)
33 else:
33 else:
34 start = revfix(repo, revs[0], None)
34 start = revfix(repo, revs[0], None)
35 elif len(revs) == 2:
35 elif len(revs) == 2:
36 if revrangesep in revs[0] or revrangesep in revs[1]:
36 if revrangesep in revs[0] or revrangesep in revs[1]:
37 raise util.Abort(_('too many revisions specified'))
37 raise util.Abort(_('too many revisions specified'))
38 start = revfix(repo, revs[0], None)
38 start = revfix(repo, revs[0], None)
39 end = revfix(repo, revs[1], None)
39 end = revfix(repo, revs[1], None)
40 else:
40 else:
41 raise util.Abort(_('too many revisions specified'))
41 raise util.Abort(_('too many revisions specified'))
42 return start, end
42 return start, end
43
43
44 def revrange(repo, revs):
44 def revrange(repo, revs):
45 """Yield revision as strings from a list of revision specifications."""
45 """Yield revision as strings from a list of revision specifications."""
46
46
47 def revfix(repo, val, defval):
47 def revfix(repo, val, defval):
48 if not val and val != 0 and defval is not None:
48 if not val and val != 0 and defval is not None:
49 return defval
49 return defval
50 return repo.changelog.rev(repo.lookup(val))
50 return repo.changelog.rev(repo.lookup(val))
51
51
52 seen, l = {}, []
52 seen, l = {}, []
53 for spec in revs:
53 for spec in revs:
54 if revrangesep in spec:
54 if revrangesep in spec:
55 start, end = spec.split(revrangesep, 1)
55 start, end = spec.split(revrangesep, 1)
56 start = revfix(repo, start, 0)
56 start = revfix(repo, start, 0)
57 end = revfix(repo, end, repo.changelog.count() - 1)
57 end = revfix(repo, end, repo.changelog.count() - 1)
58 step = start > end and -1 or 1
58 step = start > end and -1 or 1
59 for rev in xrange(start, end+step, step):
59 for rev in xrange(start, end+step, step):
60 if rev in seen:
60 if rev in seen:
61 continue
61 continue
62 seen[rev] = 1
62 seen[rev] = 1
63 l.append(rev)
63 l.append(rev)
64 else:
64 else:
65 rev = revfix(repo, spec, None)
65 rev = revfix(repo, spec, None)
66 if rev in seen:
66 if rev in seen:
67 continue
67 continue
68 seen[rev] = 1
68 seen[rev] = 1
69 l.append(rev)
69 l.append(rev)
70
70
71 return l
71 return l
72
72
73 def make_filename(repo, pat, node,
73 def make_filename(repo, pat, node,
74 total=None, seqno=None, revwidth=None, pathname=None):
74 total=None, seqno=None, revwidth=None, pathname=None):
75 node_expander = {
75 node_expander = {
76 'H': lambda: hex(node),
76 'H': lambda: hex(node),
77 'R': lambda: str(repo.changelog.rev(node)),
77 'R': lambda: str(repo.changelog.rev(node)),
78 'h': lambda: short(node),
78 'h': lambda: short(node),
79 }
79 }
80 expander = {
80 expander = {
81 '%': lambda: '%',
81 '%': lambda: '%',
82 'b': lambda: os.path.basename(repo.root),
82 'b': lambda: os.path.basename(repo.root),
83 }
83 }
84
84
85 try:
85 try:
86 if node:
86 if node:
87 expander.update(node_expander)
87 expander.update(node_expander)
88 if node and revwidth is not None:
88 if node and revwidth is not None:
89 expander['r'] = (lambda:
89 expander['r'] = (lambda:
90 str(repo.changelog.rev(node)).zfill(revwidth))
90 str(repo.changelog.rev(node)).zfill(revwidth))
91 if total is not None:
91 if total is not None:
92 expander['N'] = lambda: str(total)
92 expander['N'] = lambda: str(total)
93 if seqno is not None:
93 if seqno is not None:
94 expander['n'] = lambda: str(seqno)
94 expander['n'] = lambda: str(seqno)
95 if total is not None and seqno is not None:
95 if total is not None and seqno is not None:
96 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
96 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
97 if pathname is not None:
97 if pathname is not None:
98 expander['s'] = lambda: os.path.basename(pathname)
98 expander['s'] = lambda: os.path.basename(pathname)
99 expander['d'] = lambda: os.path.dirname(pathname) or '.'
99 expander['d'] = lambda: os.path.dirname(pathname) or '.'
100 expander['p'] = lambda: pathname
100 expander['p'] = lambda: pathname
101
101
102 newname = []
102 newname = []
103 patlen = len(pat)
103 patlen = len(pat)
104 i = 0
104 i = 0
105 while i < patlen:
105 while i < patlen:
106 c = pat[i]
106 c = pat[i]
107 if c == '%':
107 if c == '%':
108 i += 1
108 i += 1
109 c = pat[i]
109 c = pat[i]
110 c = expander[c]()
110 c = expander[c]()
111 newname.append(c)
111 newname.append(c)
112 i += 1
112 i += 1
113 return ''.join(newname)
113 return ''.join(newname)
114 except KeyError, inst:
114 except KeyError, inst:
115 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
115 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
116 inst.args[0])
116 inst.args[0])
117
117
118 def make_file(repo, pat, node=None,
118 def make_file(repo, pat, node=None,
119 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
119 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
120 if not pat or pat == '-':
120 if not pat or pat == '-':
121 return 'w' in mode and sys.stdout or sys.stdin
121 return 'w' in mode and sys.stdout or sys.stdin
122 if hasattr(pat, 'write') and 'w' in mode:
122 if hasattr(pat, 'write') and 'w' in mode:
123 return pat
123 return pat
124 if hasattr(pat, 'read') and 'r' in mode:
124 if hasattr(pat, 'read') and 'r' in mode:
125 return pat
125 return pat
126 return open(make_filename(repo, pat, node, total, seqno, revwidth,
126 return open(make_filename(repo, pat, node, total, seqno, revwidth,
127 pathname),
127 pathname),
128 mode)
128 mode)
129
129
130 def matchpats(repo, pats=[], opts={}, head='', globbed=False):
130 def matchpats(repo, pats=[], opts={}, head='', globbed=False, default=None):
131 cwd = repo.getcwd()
131 cwd = repo.getcwd()
132 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
132 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
133 opts.get('exclude'), head, globbed=globbed)
133 opts.get('exclude'), head, globbed=globbed,
134 default=default)
134
135
135 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None,
136 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None,
136 globbed=False):
137 globbed=False, default=None):
137 files, matchfn, anypats = matchpats(repo, pats, opts, head,
138 files, matchfn, anypats = matchpats(repo, pats, opts, head,
138 globbed=globbed)
139 globbed=globbed, default=default)
139 exact = dict.fromkeys(files)
140 exact = dict.fromkeys(files)
140 for src, fn in repo.walk(node=node, files=files, match=matchfn,
141 for src, fn in repo.walk(node=node, files=files, match=matchfn,
141 badmatch=badmatch):
142 badmatch=badmatch):
142 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
143 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
143
144
144 def findrenames(repo, added=None, removed=None, threshold=0.5):
145 def findrenames(repo, added=None, removed=None, threshold=0.5):
145 if added is None or removed is None:
146 if added is None or removed is None:
146 added, removed = repo.status()[1:3]
147 added, removed = repo.status()[1:3]
147 changes = repo.changelog.read(repo.dirstate.parents()[0])
148 changes = repo.changelog.read(repo.dirstate.parents()[0])
148 mf = repo.manifest.read(changes[0])
149 mf = repo.manifest.read(changes[0])
149 for a in added:
150 for a in added:
150 aa = repo.wread(a)
151 aa = repo.wread(a)
151 bestscore, bestname = None, None
152 bestscore, bestname = None, None
152 for r in removed:
153 for r in removed:
153 rr = repo.file(r).read(mf[r])
154 rr = repo.file(r).read(mf[r])
154 delta = mdiff.textdiff(aa, rr)
155 delta = mdiff.textdiff(aa, rr)
155 if len(delta) < len(aa):
156 if len(delta) < len(aa):
156 myscore = 1.0 - (float(len(delta)) / len(aa))
157 myscore = 1.0 - (float(len(delta)) / len(aa))
157 if bestscore is None or myscore > bestscore:
158 if bestscore is None or myscore > bestscore:
158 bestscore, bestname = myscore, r
159 bestscore, bestname = myscore, r
159 if bestname and bestscore >= threshold:
160 if bestname and bestscore >= threshold:
160 yield bestname, a, bestscore
161 yield bestname, a, bestscore
161
162
162 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
163 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
163 similarity=None):
164 similarity=None):
164 if dry_run is None:
165 if dry_run is None:
165 dry_run = opts.get('dry_run')
166 dry_run = opts.get('dry_run')
166 if similarity is None:
167 if similarity is None:
167 similarity = float(opts.get('similarity') or 0)
168 similarity = float(opts.get('similarity') or 0)
168 add, remove = [], []
169 add, remove = [], []
169 mapping = {}
170 mapping = {}
170 for src, abs, rel, exact in walk(repo, pats, opts):
171 for src, abs, rel, exact in walk(repo, pats, opts):
171 if src == 'f' and repo.dirstate.state(abs) == '?':
172 if src == 'f' and repo.dirstate.state(abs) == '?':
172 add.append(abs)
173 add.append(abs)
173 mapping[abs] = rel, exact
174 mapping[abs] = rel, exact
174 if repo.ui.verbose or not exact:
175 if repo.ui.verbose or not exact:
175 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
176 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
176 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
177 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
177 remove.append(abs)
178 remove.append(abs)
178 mapping[abs] = rel, exact
179 mapping[abs] = rel, exact
179 if repo.ui.verbose or not exact:
180 if repo.ui.verbose or not exact:
180 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
181 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
181 if not dry_run:
182 if not dry_run:
182 repo.add(add, wlock=wlock)
183 repo.add(add, wlock=wlock)
183 repo.remove(remove, wlock=wlock)
184 repo.remove(remove, wlock=wlock)
184 if similarity > 0:
185 if similarity > 0:
185 for old, new, score in findrenames(repo, add, remove, similarity):
186 for old, new, score in findrenames(repo, add, remove, similarity):
186 oldrel, oldexact = mapping[old]
187 oldrel, oldexact = mapping[old]
187 newrel, newexact = mapping[new]
188 newrel, newexact = mapping[new]
188 if repo.ui.verbose or not oldexact or not newexact:
189 if repo.ui.verbose or not oldexact or not newexact:
189 repo.ui.status(_('recording removal of %s as rename to %s '
190 repo.ui.status(_('recording removal of %s as rename to %s '
190 '(%d%% similar)\n') %
191 '(%d%% similar)\n') %
191 (oldrel, newrel, score * 100))
192 (oldrel, newrel, score * 100))
192 if not dry_run:
193 if not dry_run:
193 repo.copy(old, new, wlock=wlock)
194 repo.copy(old, new, wlock=wlock)
194
195
195 class changeset_printer(object):
196 class changeset_printer(object):
196 '''show changeset information when templating not requested.'''
197 '''show changeset information when templating not requested.'''
197
198
198 def __init__(self, ui, repo, patch, brinfo, buffered):
199 def __init__(self, ui, repo, patch, brinfo, buffered):
199 self.ui = ui
200 self.ui = ui
200 self.repo = repo
201 self.repo = repo
201 self.buffered = buffered
202 self.buffered = buffered
202 self.patch = patch
203 self.patch = patch
203 self.brinfo = brinfo
204 self.brinfo = brinfo
204 self.header = {}
205 self.header = {}
205 self.hunk = {}
206 self.hunk = {}
206 self.lastheader = None
207 self.lastheader = None
207
208
208 def flush(self, rev):
209 def flush(self, rev):
209 if rev in self.header:
210 if rev in self.header:
210 h = self.header[rev]
211 h = self.header[rev]
211 if h != self.lastheader:
212 if h != self.lastheader:
212 self.lastheader = h
213 self.lastheader = h
213 self.ui.write(h)
214 self.ui.write(h)
214 del self.header[rev]
215 del self.header[rev]
215 if rev in self.hunk:
216 if rev in self.hunk:
216 self.ui.write(self.hunk[rev])
217 self.ui.write(self.hunk[rev])
217 del self.hunk[rev]
218 del self.hunk[rev]
218 return 1
219 return 1
219 return 0
220 return 0
220
221
221 def show(self, rev=0, changenode=None, copies=None, **props):
222 def show(self, rev=0, changenode=None, copies=None, **props):
222 if self.buffered:
223 if self.buffered:
223 self.ui.pushbuffer()
224 self.ui.pushbuffer()
224 self._show(rev, changenode, copies, props)
225 self._show(rev, changenode, copies, props)
225 self.hunk[rev] = self.ui.popbuffer()
226 self.hunk[rev] = self.ui.popbuffer()
226 else:
227 else:
227 self._show(rev, changenode, copies, props)
228 self._show(rev, changenode, copies, props)
228
229
229 def _show(self, rev, changenode, copies, props):
230 def _show(self, rev, changenode, copies, props):
230 '''show a single changeset or file revision'''
231 '''show a single changeset or file revision'''
231 log = self.repo.changelog
232 log = self.repo.changelog
232 if changenode is None:
233 if changenode is None:
233 changenode = log.node(rev)
234 changenode = log.node(rev)
234 elif not rev:
235 elif not rev:
235 rev = log.rev(changenode)
236 rev = log.rev(changenode)
236
237
237 if self.ui.quiet:
238 if self.ui.quiet:
238 self.ui.write("%d:%s\n" % (rev, short(changenode)))
239 self.ui.write("%d:%s\n" % (rev, short(changenode)))
239 return
240 return
240
241
241 changes = log.read(changenode)
242 changes = log.read(changenode)
242 date = util.datestr(changes[2])
243 date = util.datestr(changes[2])
243 extra = changes[5]
244 extra = changes[5]
244 branch = extra.get("branch")
245 branch = extra.get("branch")
245
246
246 hexfunc = self.ui.debugflag and hex or short
247 hexfunc = self.ui.debugflag and hex or short
247
248
248 parents = log.parentrevs(rev)
249 parents = log.parentrevs(rev)
249 if not self.ui.debugflag:
250 if not self.ui.debugflag:
250 if parents[1] == nullrev:
251 if parents[1] == nullrev:
251 if parents[0] >= rev - 1:
252 if parents[0] >= rev - 1:
252 parents = []
253 parents = []
253 else:
254 else:
254 parents = [parents[0]]
255 parents = [parents[0]]
255 parents = [(p, hexfunc(log.node(p))) for p in parents]
256 parents = [(p, hexfunc(log.node(p))) for p in parents]
256
257
257 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
258 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
258
259
259 if branch:
260 if branch:
260 branch = util.tolocal(branch)
261 branch = util.tolocal(branch)
261 self.ui.write(_("branch: %s\n") % branch)
262 self.ui.write(_("branch: %s\n") % branch)
262 for tag in self.repo.nodetags(changenode):
263 for tag in self.repo.nodetags(changenode):
263 self.ui.write(_("tag: %s\n") % tag)
264 self.ui.write(_("tag: %s\n") % tag)
264 for parent in parents:
265 for parent in parents:
265 self.ui.write(_("parent: %d:%s\n") % parent)
266 self.ui.write(_("parent: %d:%s\n") % parent)
266
267
267 if self.brinfo:
268 if self.brinfo:
268 br = self.repo.branchlookup([changenode])
269 br = self.repo.branchlookup([changenode])
269 if br:
270 if br:
270 self.ui.write(_("branch: %s\n") % " ".join(br[changenode]))
271 self.ui.write(_("branch: %s\n") % " ".join(br[changenode]))
271
272
272 if self.ui.debugflag:
273 if self.ui.debugflag:
273 self.ui.write(_("manifest: %d:%s\n") %
274 self.ui.write(_("manifest: %d:%s\n") %
274 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
275 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
275 self.ui.write(_("user: %s\n") % changes[1])
276 self.ui.write(_("user: %s\n") % changes[1])
276 self.ui.write(_("date: %s\n") % date)
277 self.ui.write(_("date: %s\n") % date)
277
278
278 if self.ui.debugflag:
279 if self.ui.debugflag:
279 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
280 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
280 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
281 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
281 files):
282 files):
282 if value:
283 if value:
283 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
284 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
284 elif changes[3] and self.ui.verbose:
285 elif changes[3] and self.ui.verbose:
285 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
286 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
286 if copies and self.ui.verbose:
287 if copies and self.ui.verbose:
287 copies = ['%s (%s)' % c for c in copies]
288 copies = ['%s (%s)' % c for c in copies]
288 self.ui.write(_("copies: %s\n") % ' '.join(copies))
289 self.ui.write(_("copies: %s\n") % ' '.join(copies))
289
290
290 if extra and self.ui.debugflag:
291 if extra and self.ui.debugflag:
291 extraitems = extra.items()
292 extraitems = extra.items()
292 extraitems.sort()
293 extraitems.sort()
293 for key, value in extraitems:
294 for key, value in extraitems:
294 self.ui.write(_("extra: %s=%s\n")
295 self.ui.write(_("extra: %s=%s\n")
295 % (key, value.encode('string_escape')))
296 % (key, value.encode('string_escape')))
296
297
297 description = changes[4].strip()
298 description = changes[4].strip()
298 if description:
299 if description:
299 if self.ui.verbose:
300 if self.ui.verbose:
300 self.ui.write(_("description:\n"))
301 self.ui.write(_("description:\n"))
301 self.ui.write(description)
302 self.ui.write(description)
302 self.ui.write("\n\n")
303 self.ui.write("\n\n")
303 else:
304 else:
304 self.ui.write(_("summary: %s\n") %
305 self.ui.write(_("summary: %s\n") %
305 description.splitlines()[0])
306 description.splitlines()[0])
306 self.ui.write("\n")
307 self.ui.write("\n")
307
308
308 self.showpatch(changenode)
309 self.showpatch(changenode)
309
310
310 def showpatch(self, node):
311 def showpatch(self, node):
311 if self.patch:
312 if self.patch:
312 prev = self.repo.changelog.parents(node)[0]
313 prev = self.repo.changelog.parents(node)[0]
313 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
314 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
314 self.ui.write("\n")
315 self.ui.write("\n")
315
316
316 class changeset_templater(changeset_printer):
317 class changeset_templater(changeset_printer):
317 '''format changeset information.'''
318 '''format changeset information.'''
318
319
319 def __init__(self, ui, repo, patch, brinfo, mapfile, buffered):
320 def __init__(self, ui, repo, patch, brinfo, mapfile, buffered):
320 changeset_printer.__init__(self, ui, repo, patch, brinfo, buffered)
321 changeset_printer.__init__(self, ui, repo, patch, brinfo, buffered)
321 self.t = templater.templater(mapfile, templater.common_filters,
322 self.t = templater.templater(mapfile, templater.common_filters,
322 cache={'parent': '{rev}:{node|short} ',
323 cache={'parent': '{rev}:{node|short} ',
323 'manifest': '{rev}:{node|short}',
324 'manifest': '{rev}:{node|short}',
324 'filecopy': '{name} ({source})'})
325 'filecopy': '{name} ({source})'})
325
326
326 def use_template(self, t):
327 def use_template(self, t):
327 '''set template string to use'''
328 '''set template string to use'''
328 self.t.cache['changeset'] = t
329 self.t.cache['changeset'] = t
329
330
330 def _show(self, rev, changenode, copies, props):
331 def _show(self, rev, changenode, copies, props):
331 '''show a single changeset or file revision'''
332 '''show a single changeset or file revision'''
332 log = self.repo.changelog
333 log = self.repo.changelog
333 if changenode is None:
334 if changenode is None:
334 changenode = log.node(rev)
335 changenode = log.node(rev)
335 elif not rev:
336 elif not rev:
336 rev = log.rev(changenode)
337 rev = log.rev(changenode)
337
338
338 changes = log.read(changenode)
339 changes = log.read(changenode)
339
340
340 def showlist(name, values, plural=None, **args):
341 def showlist(name, values, plural=None, **args):
341 '''expand set of values.
342 '''expand set of values.
342 name is name of key in template map.
343 name is name of key in template map.
343 values is list of strings or dicts.
344 values is list of strings or dicts.
344 plural is plural of name, if not simply name + 's'.
345 plural is plural of name, if not simply name + 's'.
345
346
346 expansion works like this, given name 'foo'.
347 expansion works like this, given name 'foo'.
347
348
348 if values is empty, expand 'no_foos'.
349 if values is empty, expand 'no_foos'.
349
350
350 if 'foo' not in template map, return values as a string,
351 if 'foo' not in template map, return values as a string,
351 joined by space.
352 joined by space.
352
353
353 expand 'start_foos'.
354 expand 'start_foos'.
354
355
355 for each value, expand 'foo'. if 'last_foo' in template
356 for each value, expand 'foo'. if 'last_foo' in template
356 map, expand it instead of 'foo' for last key.
357 map, expand it instead of 'foo' for last key.
357
358
358 expand 'end_foos'.
359 expand 'end_foos'.
359 '''
360 '''
360 if plural: names = plural
361 if plural: names = plural
361 else: names = name + 's'
362 else: names = name + 's'
362 if not values:
363 if not values:
363 noname = 'no_' + names
364 noname = 'no_' + names
364 if noname in self.t:
365 if noname in self.t:
365 yield self.t(noname, **args)
366 yield self.t(noname, **args)
366 return
367 return
367 if name not in self.t:
368 if name not in self.t:
368 if isinstance(values[0], str):
369 if isinstance(values[0], str):
369 yield ' '.join(values)
370 yield ' '.join(values)
370 else:
371 else:
371 for v in values:
372 for v in values:
372 yield dict(v, **args)
373 yield dict(v, **args)
373 return
374 return
374 startname = 'start_' + names
375 startname = 'start_' + names
375 if startname in self.t:
376 if startname in self.t:
376 yield self.t(startname, **args)
377 yield self.t(startname, **args)
377 vargs = args.copy()
378 vargs = args.copy()
378 def one(v, tag=name):
379 def one(v, tag=name):
379 try:
380 try:
380 vargs.update(v)
381 vargs.update(v)
381 except (AttributeError, ValueError):
382 except (AttributeError, ValueError):
382 try:
383 try:
383 for a, b in v:
384 for a, b in v:
384 vargs[a] = b
385 vargs[a] = b
385 except ValueError:
386 except ValueError:
386 vargs[name] = v
387 vargs[name] = v
387 return self.t(tag, **vargs)
388 return self.t(tag, **vargs)
388 lastname = 'last_' + name
389 lastname = 'last_' + name
389 if lastname in self.t:
390 if lastname in self.t:
390 last = values.pop()
391 last = values.pop()
391 else:
392 else:
392 last = None
393 last = None
393 for v in values:
394 for v in values:
394 yield one(v)
395 yield one(v)
395 if last is not None:
396 if last is not None:
396 yield one(last, tag=lastname)
397 yield one(last, tag=lastname)
397 endname = 'end_' + names
398 endname = 'end_' + names
398 if endname in self.t:
399 if endname in self.t:
399 yield self.t(endname, **args)
400 yield self.t(endname, **args)
400
401
401 def showbranches(**args):
402 def showbranches(**args):
402 branch = changes[5].get("branch")
403 branch = changes[5].get("branch")
403 if branch:
404 if branch:
404 branch = util.tolocal(branch)
405 branch = util.tolocal(branch)
405 return showlist('branch', [branch], plural='branches', **args)
406 return showlist('branch', [branch], plural='branches', **args)
406 # add old style branches if requested
407 # add old style branches if requested
407 if self.brinfo:
408 if self.brinfo:
408 br = self.repo.branchlookup([changenode])
409 br = self.repo.branchlookup([changenode])
409 if changenode in br:
410 if changenode in br:
410 return showlist('branch', br[changenode],
411 return showlist('branch', br[changenode],
411 plural='branches', **args)
412 plural='branches', **args)
412
413
413 def showparents(**args):
414 def showparents(**args):
414 parents = [[('rev', log.rev(p)), ('node', hex(p))]
415 parents = [[('rev', log.rev(p)), ('node', hex(p))]
415 for p in log.parents(changenode)
416 for p in log.parents(changenode)
416 if self.ui.debugflag or p != nullid]
417 if self.ui.debugflag or p != nullid]
417 if (not self.ui.debugflag and len(parents) == 1 and
418 if (not self.ui.debugflag and len(parents) == 1 and
418 parents[0][0][1] == rev - 1):
419 parents[0][0][1] == rev - 1):
419 return
420 return
420 return showlist('parent', parents, **args)
421 return showlist('parent', parents, **args)
421
422
422 def showtags(**args):
423 def showtags(**args):
423 return showlist('tag', self.repo.nodetags(changenode), **args)
424 return showlist('tag', self.repo.nodetags(changenode), **args)
424
425
425 def showextras(**args):
426 def showextras(**args):
426 extras = changes[5].items()
427 extras = changes[5].items()
427 extras.sort()
428 extras.sort()
428 for key, value in extras:
429 for key, value in extras:
429 args = args.copy()
430 args = args.copy()
430 args.update(dict(key=key, value=value))
431 args.update(dict(key=key, value=value))
431 yield self.t('extra', **args)
432 yield self.t('extra', **args)
432
433
433 def showcopies(**args):
434 def showcopies(**args):
434 c = [{'name': x[0], 'source': x[1]} for x in copies]
435 c = [{'name': x[0], 'source': x[1]} for x in copies]
435 return showlist('file_copy', c, plural='file_copies', **args)
436 return showlist('file_copy', c, plural='file_copies', **args)
436
437
437 if self.ui.debugflag:
438 if self.ui.debugflag:
438 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
439 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
439 def showfiles(**args):
440 def showfiles(**args):
440 return showlist('file', files[0], **args)
441 return showlist('file', files[0], **args)
441 def showadds(**args):
442 def showadds(**args):
442 return showlist('file_add', files[1], **args)
443 return showlist('file_add', files[1], **args)
443 def showdels(**args):
444 def showdels(**args):
444 return showlist('file_del', files[2], **args)
445 return showlist('file_del', files[2], **args)
445 def showmanifest(**args):
446 def showmanifest(**args):
446 args = args.copy()
447 args = args.copy()
447 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
448 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
448 node=hex(changes[0])))
449 node=hex(changes[0])))
449 return self.t('manifest', **args)
450 return self.t('manifest', **args)
450 else:
451 else:
451 def showfiles(**args):
452 def showfiles(**args):
452 return showlist('file', changes[3], **args)
453 return showlist('file', changes[3], **args)
453 showadds = ''
454 showadds = ''
454 showdels = ''
455 showdels = ''
455 showmanifest = ''
456 showmanifest = ''
456
457
457 defprops = {
458 defprops = {
458 'author': changes[1],
459 'author': changes[1],
459 'branches': showbranches,
460 'branches': showbranches,
460 'date': changes[2],
461 'date': changes[2],
461 'desc': changes[4],
462 'desc': changes[4],
462 'file_adds': showadds,
463 'file_adds': showadds,
463 'file_dels': showdels,
464 'file_dels': showdels,
464 'files': showfiles,
465 'files': showfiles,
465 'file_copies': showcopies,
466 'file_copies': showcopies,
466 'manifest': showmanifest,
467 'manifest': showmanifest,
467 'node': hex(changenode),
468 'node': hex(changenode),
468 'parents': showparents,
469 'parents': showparents,
469 'rev': rev,
470 'rev': rev,
470 'tags': showtags,
471 'tags': showtags,
471 'extras': showextras,
472 'extras': showextras,
472 }
473 }
473 props = props.copy()
474 props = props.copy()
474 props.update(defprops)
475 props.update(defprops)
475
476
476 try:
477 try:
477 if self.ui.debugflag and 'header_debug' in self.t:
478 if self.ui.debugflag and 'header_debug' in self.t:
478 key = 'header_debug'
479 key = 'header_debug'
479 elif self.ui.quiet and 'header_quiet' in self.t:
480 elif self.ui.quiet and 'header_quiet' in self.t:
480 key = 'header_quiet'
481 key = 'header_quiet'
481 elif self.ui.verbose and 'header_verbose' in self.t:
482 elif self.ui.verbose and 'header_verbose' in self.t:
482 key = 'header_verbose'
483 key = 'header_verbose'
483 elif 'header' in self.t:
484 elif 'header' in self.t:
484 key = 'header'
485 key = 'header'
485 else:
486 else:
486 key = ''
487 key = ''
487 if key:
488 if key:
488 h = templater.stringify(self.t(key, **props))
489 h = templater.stringify(self.t(key, **props))
489 if self.buffered:
490 if self.buffered:
490 self.header[rev] = h
491 self.header[rev] = h
491 else:
492 else:
492 self.ui.write(h)
493 self.ui.write(h)
493 if self.ui.debugflag and 'changeset_debug' in self.t:
494 if self.ui.debugflag and 'changeset_debug' in self.t:
494 key = 'changeset_debug'
495 key = 'changeset_debug'
495 elif self.ui.quiet and 'changeset_quiet' in self.t:
496 elif self.ui.quiet and 'changeset_quiet' in self.t:
496 key = 'changeset_quiet'
497 key = 'changeset_quiet'
497 elif self.ui.verbose and 'changeset_verbose' in self.t:
498 elif self.ui.verbose and 'changeset_verbose' in self.t:
498 key = 'changeset_verbose'
499 key = 'changeset_verbose'
499 else:
500 else:
500 key = 'changeset'
501 key = 'changeset'
501 self.ui.write(templater.stringify(self.t(key, **props)))
502 self.ui.write(templater.stringify(self.t(key, **props)))
502 self.showpatch(changenode)
503 self.showpatch(changenode)
503 except KeyError, inst:
504 except KeyError, inst:
504 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
505 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
505 inst.args[0]))
506 inst.args[0]))
506 except SyntaxError, inst:
507 except SyntaxError, inst:
507 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
508 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
508
509
509 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
510 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
510 """show one changeset using template or regular display.
511 """show one changeset using template or regular display.
511
512
512 Display format will be the first non-empty hit of:
513 Display format will be the first non-empty hit of:
513 1. option 'template'
514 1. option 'template'
514 2. option 'style'
515 2. option 'style'
515 3. [ui] setting 'logtemplate'
516 3. [ui] setting 'logtemplate'
516 4. [ui] setting 'style'
517 4. [ui] setting 'style'
517 If all of these values are either the unset or the empty string,
518 If all of these values are either the unset or the empty string,
518 regular display via changeset_printer() is done.
519 regular display via changeset_printer() is done.
519 """
520 """
520 # options
521 # options
521 patch = False
522 patch = False
522 if opts.get('patch'):
523 if opts.get('patch'):
523 patch = matchfn or util.always
524 patch = matchfn or util.always
524
525
525 br = None
526 br = None
526 if opts.get('branches'):
527 if opts.get('branches'):
527 ui.warn(_("the --branches option is deprecated, "
528 ui.warn(_("the --branches option is deprecated, "
528 "please use 'hg branches' instead\n"))
529 "please use 'hg branches' instead\n"))
529 br = True
530 br = True
530 tmpl = opts.get('template')
531 tmpl = opts.get('template')
531 mapfile = None
532 mapfile = None
532 if tmpl:
533 if tmpl:
533 tmpl = templater.parsestring(tmpl, quoted=False)
534 tmpl = templater.parsestring(tmpl, quoted=False)
534 else:
535 else:
535 mapfile = opts.get('style')
536 mapfile = opts.get('style')
536 # ui settings
537 # ui settings
537 if not mapfile:
538 if not mapfile:
538 tmpl = ui.config('ui', 'logtemplate')
539 tmpl = ui.config('ui', 'logtemplate')
539 if tmpl:
540 if tmpl:
540 tmpl = templater.parsestring(tmpl)
541 tmpl = templater.parsestring(tmpl)
541 else:
542 else:
542 mapfile = ui.config('ui', 'style')
543 mapfile = ui.config('ui', 'style')
543
544
544 if tmpl or mapfile:
545 if tmpl or mapfile:
545 if mapfile:
546 if mapfile:
546 if not os.path.split(mapfile)[0]:
547 if not os.path.split(mapfile)[0]:
547 mapname = (templater.templatepath('map-cmdline.' + mapfile)
548 mapname = (templater.templatepath('map-cmdline.' + mapfile)
548 or templater.templatepath(mapfile))
549 or templater.templatepath(mapfile))
549 if mapname: mapfile = mapname
550 if mapname: mapfile = mapname
550 try:
551 try:
551 t = changeset_templater(ui, repo, patch, br, mapfile, buffered)
552 t = changeset_templater(ui, repo, patch, br, mapfile, buffered)
552 except SyntaxError, inst:
553 except SyntaxError, inst:
553 raise util.Abort(inst.args[0])
554 raise util.Abort(inst.args[0])
554 if tmpl: t.use_template(tmpl)
555 if tmpl: t.use_template(tmpl)
555 return t
556 return t
556 return changeset_printer(ui, repo, patch, br, buffered)
557 return changeset_printer(ui, repo, patch, br, buffered)
557
558
558 def finddate(ui, repo, date):
559 def finddate(ui, repo, date):
559 """Find the tipmost changeset that matches the given date spec"""
560 """Find the tipmost changeset that matches the given date spec"""
560 df = util.matchdate(date + " to " + date)
561 df = util.matchdate(date + " to " + date)
561 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
562 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
562 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
563 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
563 results = {}
564 results = {}
564 for st, rev, fns in changeiter:
565 for st, rev, fns in changeiter:
565 if st == 'add':
566 if st == 'add':
566 d = get(rev)[2]
567 d = get(rev)[2]
567 if df(d[0]):
568 if df(d[0]):
568 results[rev] = d
569 results[rev] = d
569 elif st == 'iter':
570 elif st == 'iter':
570 if rev in results:
571 if rev in results:
571 ui.status("Found revision %s from %s\n" %
572 ui.status("Found revision %s from %s\n" %
572 (rev, util.datestr(results[rev])))
573 (rev, util.datestr(results[rev])))
573 return str(rev)
574 return str(rev)
574
575
575 raise util.Abort(_("revision matching date not found"))
576 raise util.Abort(_("revision matching date not found"))
576
577
577 def walkchangerevs(ui, repo, pats, change, opts):
578 def walkchangerevs(ui, repo, pats, change, opts):
578 '''Iterate over files and the revs they changed in.
579 '''Iterate over files and the revs they changed in.
579
580
580 Callers most commonly need to iterate backwards over the history
581 Callers most commonly need to iterate backwards over the history
581 it is interested in. Doing so has awful (quadratic-looking)
582 it is interested in. Doing so has awful (quadratic-looking)
582 performance, so we use iterators in a "windowed" way.
583 performance, so we use iterators in a "windowed" way.
583
584
584 We walk a window of revisions in the desired order. Within the
585 We walk a window of revisions in the desired order. Within the
585 window, we first walk forwards to gather data, then in the desired
586 window, we first walk forwards to gather data, then in the desired
586 order (usually backwards) to display it.
587 order (usually backwards) to display it.
587
588
588 This function returns an (iterator, matchfn) tuple. The iterator
589 This function returns an (iterator, matchfn) tuple. The iterator
589 yields 3-tuples. They will be of one of the following forms:
590 yields 3-tuples. They will be of one of the following forms:
590
591
591 "window", incrementing, lastrev: stepping through a window,
592 "window", incrementing, lastrev: stepping through a window,
592 positive if walking forwards through revs, last rev in the
593 positive if walking forwards through revs, last rev in the
593 sequence iterated over - use to reset state for the current window
594 sequence iterated over - use to reset state for the current window
594
595
595 "add", rev, fns: out-of-order traversal of the given file names
596 "add", rev, fns: out-of-order traversal of the given file names
596 fns, which changed during revision rev - use to gather data for
597 fns, which changed during revision rev - use to gather data for
597 possible display
598 possible display
598
599
599 "iter", rev, None: in-order traversal of the revs earlier iterated
600 "iter", rev, None: in-order traversal of the revs earlier iterated
600 over with "add" - use to display data'''
601 over with "add" - use to display data'''
601
602
602 def increasing_windows(start, end, windowsize=8, sizelimit=512):
603 def increasing_windows(start, end, windowsize=8, sizelimit=512):
603 if start < end:
604 if start < end:
604 while start < end:
605 while start < end:
605 yield start, min(windowsize, end-start)
606 yield start, min(windowsize, end-start)
606 start += windowsize
607 start += windowsize
607 if windowsize < sizelimit:
608 if windowsize < sizelimit:
608 windowsize *= 2
609 windowsize *= 2
609 else:
610 else:
610 while start > end:
611 while start > end:
611 yield start, min(windowsize, start-end-1)
612 yield start, min(windowsize, start-end-1)
612 start -= windowsize
613 start -= windowsize
613 if windowsize < sizelimit:
614 if windowsize < sizelimit:
614 windowsize *= 2
615 windowsize *= 2
615
616
616 files, matchfn, anypats = matchpats(repo, pats, opts)
617 files, matchfn, anypats = matchpats(repo, pats, opts)
617 follow = opts.get('follow') or opts.get('follow_first')
618 follow = opts.get('follow') or opts.get('follow_first')
618
619
619 if repo.changelog.count() == 0:
620 if repo.changelog.count() == 0:
620 return [], matchfn
621 return [], matchfn
621
622
622 if follow:
623 if follow:
623 defrange = '%s:0' % repo.changectx().rev()
624 defrange = '%s:0' % repo.changectx().rev()
624 else:
625 else:
625 defrange = 'tip:0'
626 defrange = 'tip:0'
626 revs = revrange(repo, opts['rev'] or [defrange])
627 revs = revrange(repo, opts['rev'] or [defrange])
627 wanted = {}
628 wanted = {}
628 slowpath = anypats or opts.get('removed')
629 slowpath = anypats or opts.get('removed')
629 fncache = {}
630 fncache = {}
630
631
631 if not slowpath and not files:
632 if not slowpath and not files:
632 # No files, no patterns. Display all revs.
633 # No files, no patterns. Display all revs.
633 wanted = dict.fromkeys(revs)
634 wanted = dict.fromkeys(revs)
634 copies = []
635 copies = []
635 if not slowpath:
636 if not slowpath:
636 # Only files, no patterns. Check the history of each file.
637 # Only files, no patterns. Check the history of each file.
637 def filerevgen(filelog, node):
638 def filerevgen(filelog, node):
638 cl_count = repo.changelog.count()
639 cl_count = repo.changelog.count()
639 if node is None:
640 if node is None:
640 last = filelog.count() - 1
641 last = filelog.count() - 1
641 else:
642 else:
642 last = filelog.rev(node)
643 last = filelog.rev(node)
643 for i, window in increasing_windows(last, nullrev):
644 for i, window in increasing_windows(last, nullrev):
644 revs = []
645 revs = []
645 for j in xrange(i - window, i + 1):
646 for j in xrange(i - window, i + 1):
646 n = filelog.node(j)
647 n = filelog.node(j)
647 revs.append((filelog.linkrev(n),
648 revs.append((filelog.linkrev(n),
648 follow and filelog.renamed(n)))
649 follow and filelog.renamed(n)))
649 revs.reverse()
650 revs.reverse()
650 for rev in revs:
651 for rev in revs:
651 # only yield rev for which we have the changelog, it can
652 # only yield rev for which we have the changelog, it can
652 # happen while doing "hg log" during a pull or commit
653 # happen while doing "hg log" during a pull or commit
653 if rev[0] < cl_count:
654 if rev[0] < cl_count:
654 yield rev
655 yield rev
655 def iterfiles():
656 def iterfiles():
656 for filename in files:
657 for filename in files:
657 yield filename, None
658 yield filename, None
658 for filename_node in copies:
659 for filename_node in copies:
659 yield filename_node
660 yield filename_node
660 minrev, maxrev = min(revs), max(revs)
661 minrev, maxrev = min(revs), max(revs)
661 for file_, node in iterfiles():
662 for file_, node in iterfiles():
662 filelog = repo.file(file_)
663 filelog = repo.file(file_)
663 # A zero count may be a directory or deleted file, so
664 # A zero count may be a directory or deleted file, so
664 # try to find matching entries on the slow path.
665 # try to find matching entries on the slow path.
665 if filelog.count() == 0:
666 if filelog.count() == 0:
666 slowpath = True
667 slowpath = True
667 break
668 break
668 for rev, copied in filerevgen(filelog, node):
669 for rev, copied in filerevgen(filelog, node):
669 if rev <= maxrev:
670 if rev <= maxrev:
670 if rev < minrev:
671 if rev < minrev:
671 break
672 break
672 fncache.setdefault(rev, [])
673 fncache.setdefault(rev, [])
673 fncache[rev].append(file_)
674 fncache[rev].append(file_)
674 wanted[rev] = 1
675 wanted[rev] = 1
675 if follow and copied:
676 if follow and copied:
676 copies.append(copied)
677 copies.append(copied)
677 if slowpath:
678 if slowpath:
678 if follow:
679 if follow:
679 raise util.Abort(_('can only follow copies/renames for explicit '
680 raise util.Abort(_('can only follow copies/renames for explicit '
680 'file names'))
681 'file names'))
681
682
682 # The slow path checks files modified in every changeset.
683 # The slow path checks files modified in every changeset.
683 def changerevgen():
684 def changerevgen():
684 for i, window in increasing_windows(repo.changelog.count()-1,
685 for i, window in increasing_windows(repo.changelog.count()-1,
685 nullrev):
686 nullrev):
686 for j in xrange(i - window, i + 1):
687 for j in xrange(i - window, i + 1):
687 yield j, change(j)[3]
688 yield j, change(j)[3]
688
689
689 for rev, changefiles in changerevgen():
690 for rev, changefiles in changerevgen():
690 matches = filter(matchfn, changefiles)
691 matches = filter(matchfn, changefiles)
691 if matches:
692 if matches:
692 fncache[rev] = matches
693 fncache[rev] = matches
693 wanted[rev] = 1
694 wanted[rev] = 1
694
695
695 class followfilter:
696 class followfilter:
696 def __init__(self, onlyfirst=False):
697 def __init__(self, onlyfirst=False):
697 self.startrev = nullrev
698 self.startrev = nullrev
698 self.roots = []
699 self.roots = []
699 self.onlyfirst = onlyfirst
700 self.onlyfirst = onlyfirst
700
701
701 def match(self, rev):
702 def match(self, rev):
702 def realparents(rev):
703 def realparents(rev):
703 if self.onlyfirst:
704 if self.onlyfirst:
704 return repo.changelog.parentrevs(rev)[0:1]
705 return repo.changelog.parentrevs(rev)[0:1]
705 else:
706 else:
706 return filter(lambda x: x != nullrev,
707 return filter(lambda x: x != nullrev,
707 repo.changelog.parentrevs(rev))
708 repo.changelog.parentrevs(rev))
708
709
709 if self.startrev == nullrev:
710 if self.startrev == nullrev:
710 self.startrev = rev
711 self.startrev = rev
711 return True
712 return True
712
713
713 if rev > self.startrev:
714 if rev > self.startrev:
714 # forward: all descendants
715 # forward: all descendants
715 if not self.roots:
716 if not self.roots:
716 self.roots.append(self.startrev)
717 self.roots.append(self.startrev)
717 for parent in realparents(rev):
718 for parent in realparents(rev):
718 if parent in self.roots:
719 if parent in self.roots:
719 self.roots.append(rev)
720 self.roots.append(rev)
720 return True
721 return True
721 else:
722 else:
722 # backwards: all parents
723 # backwards: all parents
723 if not self.roots:
724 if not self.roots:
724 self.roots.extend(realparents(self.startrev))
725 self.roots.extend(realparents(self.startrev))
725 if rev in self.roots:
726 if rev in self.roots:
726 self.roots.remove(rev)
727 self.roots.remove(rev)
727 self.roots.extend(realparents(rev))
728 self.roots.extend(realparents(rev))
728 return True
729 return True
729
730
730 return False
731 return False
731
732
732 # it might be worthwhile to do this in the iterator if the rev range
733 # it might be worthwhile to do this in the iterator if the rev range
733 # is descending and the prune args are all within that range
734 # is descending and the prune args are all within that range
734 for rev in opts.get('prune', ()):
735 for rev in opts.get('prune', ()):
735 rev = repo.changelog.rev(repo.lookup(rev))
736 rev = repo.changelog.rev(repo.lookup(rev))
736 ff = followfilter()
737 ff = followfilter()
737 stop = min(revs[0], revs[-1])
738 stop = min(revs[0], revs[-1])
738 for x in xrange(rev, stop-1, -1):
739 for x in xrange(rev, stop-1, -1):
739 if ff.match(x) and x in wanted:
740 if ff.match(x) and x in wanted:
740 del wanted[x]
741 del wanted[x]
741
742
742 def iterate():
743 def iterate():
743 if follow and not files:
744 if follow and not files:
744 ff = followfilter(onlyfirst=opts.get('follow_first'))
745 ff = followfilter(onlyfirst=opts.get('follow_first'))
745 def want(rev):
746 def want(rev):
746 if ff.match(rev) and rev in wanted:
747 if ff.match(rev) and rev in wanted:
747 return True
748 return True
748 return False
749 return False
749 else:
750 else:
750 def want(rev):
751 def want(rev):
751 return rev in wanted
752 return rev in wanted
752
753
753 for i, window in increasing_windows(0, len(revs)):
754 for i, window in increasing_windows(0, len(revs)):
754 yield 'window', revs[0] < revs[-1], revs[-1]
755 yield 'window', revs[0] < revs[-1], revs[-1]
755 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
756 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
756 srevs = list(nrevs)
757 srevs = list(nrevs)
757 srevs.sort()
758 srevs.sort()
758 for rev in srevs:
759 for rev in srevs:
759 fns = fncache.get(rev)
760 fns = fncache.get(rev)
760 if not fns:
761 if not fns:
761 def fns_generator():
762 def fns_generator():
762 for f in change(rev)[3]:
763 for f in change(rev)[3]:
763 if matchfn(f):
764 if matchfn(f):
764 yield f
765 yield f
765 fns = fns_generator()
766 fns = fns_generator()
766 yield 'add', rev, fns
767 yield 'add', rev, fns
767 for rev in nrevs:
768 for rev in nrevs:
768 yield 'iter', rev, None
769 yield 'iter', rev, None
769 return iterate(), matchfn
770 return iterate(), matchfn
@@ -1,3344 +1,3343 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "bisect os re sys signal imp urllib pdb shlex stat")
11 demandload(globals(), "bisect os re sys signal imp urllib pdb shlex stat")
12 demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
13 demandload(globals(), "difflib patch time help mdiff tempfile")
13 demandload(globals(), "difflib patch time help mdiff tempfile")
14 demandload(globals(), "traceback errno version atexit socket")
14 demandload(globals(), "traceback errno version atexit socket")
15 demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
15 demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
16
16
17 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
18 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
19 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
20 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
21
21
22 def bail_if_changed(repo):
22 def bail_if_changed(repo):
23 modified, added, removed, deleted = repo.status()[:4]
23 modified, added, removed, deleted = repo.status()[:4]
24 if modified or added or removed or deleted:
24 if modified or added or removed or deleted:
25 raise util.Abort(_("outstanding uncommitted changes"))
25 raise util.Abort(_("outstanding uncommitted changes"))
26
26
27 def logmessage(opts):
27 def logmessage(opts):
28 """ get the log message according to -m and -l option """
28 """ get the log message according to -m and -l option """
29 message = opts['message']
29 message = opts['message']
30 logfile = opts['logfile']
30 logfile = opts['logfile']
31
31
32 if message and logfile:
32 if message and logfile:
33 raise util.Abort(_('options --message and --logfile are mutually '
33 raise util.Abort(_('options --message and --logfile are mutually '
34 'exclusive'))
34 'exclusive'))
35 if not message and logfile:
35 if not message and logfile:
36 try:
36 try:
37 if logfile == '-':
37 if logfile == '-':
38 message = sys.stdin.read()
38 message = sys.stdin.read()
39 else:
39 else:
40 message = open(logfile).read()
40 message = open(logfile).read()
41 except IOError, inst:
41 except IOError, inst:
42 raise util.Abort(_("can't read commit message '%s': %s") %
42 raise util.Abort(_("can't read commit message '%s': %s") %
43 (logfile, inst.strerror))
43 (logfile, inst.strerror))
44 return message
44 return message
45
45
46 def setremoteconfig(ui, opts):
46 def setremoteconfig(ui, opts):
47 "copy remote options to ui tree"
47 "copy remote options to ui tree"
48 if opts.get('ssh'):
48 if opts.get('ssh'):
49 ui.setconfig("ui", "ssh", opts['ssh'])
49 ui.setconfig("ui", "ssh", opts['ssh'])
50 if opts.get('remotecmd'):
50 if opts.get('remotecmd'):
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52
52
53 # Commands start here, listed alphabetically
53 # Commands start here, listed alphabetically
54
54
55 def add(ui, repo, *pats, **opts):
55 def add(ui, repo, *pats, **opts):
56 """add the specified files on the next commit
56 """add the specified files on the next commit
57
57
58 Schedule files to be version controlled and added to the repository.
58 Schedule files to be version controlled and added to the repository.
59
59
60 The files will be added to the repository at the next commit. To
60 The files will be added to the repository at the next commit. To
61 undo an add before that, see hg revert.
61 undo an add before that, see hg revert.
62
62
63 If no names are given, add all files in the repository.
63 If no names are given, add all files in the repository.
64 """
64 """
65
65
66 names = []
66 names = []
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 if exact:
68 if exact:
69 if ui.verbose:
69 if ui.verbose:
70 ui.status(_('adding %s\n') % rel)
70 ui.status(_('adding %s\n') % rel)
71 names.append(abs)
71 names.append(abs)
72 elif repo.dirstate.state(abs) == '?':
72 elif repo.dirstate.state(abs) == '?':
73 ui.status(_('adding %s\n') % rel)
73 ui.status(_('adding %s\n') % rel)
74 names.append(abs)
74 names.append(abs)
75 if not opts.get('dry_run'):
75 if not opts.get('dry_run'):
76 repo.add(names)
76 repo.add(names)
77
77
78 def addremove(ui, repo, *pats, **opts):
78 def addremove(ui, repo, *pats, **opts):
79 """add all new files, delete all missing files
79 """add all new files, delete all missing files
80
80
81 Add all new files and remove all missing files from the repository.
81 Add all new files and remove all missing files from the repository.
82
82
83 New files are ignored if they match any of the patterns in .hgignore. As
83 New files are ignored if they match any of the patterns in .hgignore. As
84 with add, these changes take effect at the next commit.
84 with add, these changes take effect at the next commit.
85
85
86 Use the -s option to detect renamed files. With a parameter > 0,
86 Use the -s option to detect renamed files. With a parameter > 0,
87 this compares every removed file with every added file and records
87 this compares every removed file with every added file and records
88 those similar enough as renames. This option takes a percentage
88 those similar enough as renames. This option takes a percentage
89 between 0 (disabled) and 100 (files must be identical) as its
89 between 0 (disabled) and 100 (files must be identical) as its
90 parameter. Detecting renamed files this way can be expensive.
90 parameter. Detecting renamed files this way can be expensive.
91 """
91 """
92 sim = float(opts.get('similarity') or 0)
92 sim = float(opts.get('similarity') or 0)
93 if sim < 0 or sim > 100:
93 if sim < 0 or sim > 100:
94 raise util.Abort(_('similarity must be between 0 and 100'))
94 raise util.Abort(_('similarity must be between 0 and 100'))
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96
96
97 def annotate(ui, repo, *pats, **opts):
97 def annotate(ui, repo, *pats, **opts):
98 """show changeset information per file line
98 """show changeset information per file line
99
99
100 List changes in files, showing the revision id responsible for each line
100 List changes in files, showing the revision id responsible for each line
101
101
102 This command is useful to discover who did a change or when a change took
102 This command is useful to discover who did a change or when a change took
103 place.
103 place.
104
104
105 Without the -a option, annotate will avoid processing files it
105 Without the -a option, annotate will avoid processing files it
106 detects as binary. With -a, annotate will generate an annotation
106 detects as binary. With -a, annotate will generate an annotation
107 anyway, probably with undesirable results.
107 anyway, probably with undesirable results.
108 """
108 """
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110
110
111 if not pats:
111 if not pats:
112 raise util.Abort(_('at least one file name or pattern required'))
112 raise util.Abort(_('at least one file name or pattern required'))
113
113
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 ['number', lambda x: str(x.rev())],
115 ['number', lambda x: str(x.rev())],
116 ['changeset', lambda x: short(x.node())],
116 ['changeset', lambda x: short(x.node())],
117 ['date', getdate], ['follow', lambda x: x.path()]]
117 ['date', getdate], ['follow', lambda x: x.path()]]
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 and not opts['follow']):
119 and not opts['follow']):
120 opts['number'] = 1
120 opts['number'] = 1
121
121
122 ctx = repo.changectx(opts['rev'])
122 ctx = repo.changectx(opts['rev'])
123
123
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 node=ctx.node()):
125 node=ctx.node()):
126 fctx = ctx.filectx(abs)
126 fctx = ctx.filectx(abs)
127 if not opts['text'] and util.binary(fctx.data()):
127 if not opts['text'] and util.binary(fctx.data()):
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 continue
129 continue
130
130
131 lines = fctx.annotate(follow=opts.get('follow'))
131 lines = fctx.annotate(follow=opts.get('follow'))
132 pieces = []
132 pieces = []
133
133
134 for o, f in opmap:
134 for o, f in opmap:
135 if opts[o]:
135 if opts[o]:
136 l = [f(n) for n, dummy in lines]
136 l = [f(n) for n, dummy in lines]
137 if l:
137 if l:
138 m = max(map(len, l))
138 m = max(map(len, l))
139 pieces.append(["%*s" % (m, x) for x in l])
139 pieces.append(["%*s" % (m, x) for x in l])
140
140
141 if pieces:
141 if pieces:
142 for p, l in zip(zip(*pieces), lines):
142 for p, l in zip(zip(*pieces), lines):
143 ui.write("%s: %s" % (" ".join(p), l[1]))
143 ui.write("%s: %s" % (" ".join(p), l[1]))
144
144
145 def archive(ui, repo, dest, **opts):
145 def archive(ui, repo, dest, **opts):
146 '''create unversioned archive of a repository revision
146 '''create unversioned archive of a repository revision
147
147
148 By default, the revision used is the parent of the working
148 By default, the revision used is the parent of the working
149 directory; use "-r" to specify a different revision.
149 directory; use "-r" to specify a different revision.
150
150
151 To specify the type of archive to create, use "-t". Valid
151 To specify the type of archive to create, use "-t". Valid
152 types are:
152 types are:
153
153
154 "files" (default): a directory full of files
154 "files" (default): a directory full of files
155 "tar": tar archive, uncompressed
155 "tar": tar archive, uncompressed
156 "tbz2": tar archive, compressed using bzip2
156 "tbz2": tar archive, compressed using bzip2
157 "tgz": tar archive, compressed using gzip
157 "tgz": tar archive, compressed using gzip
158 "uzip": zip archive, uncompressed
158 "uzip": zip archive, uncompressed
159 "zip": zip archive, compressed using deflate
159 "zip": zip archive, compressed using deflate
160
160
161 The exact name of the destination archive or directory is given
161 The exact name of the destination archive or directory is given
162 using a format string; see "hg help export" for details.
162 using a format string; see "hg help export" for details.
163
163
164 Each member added to an archive file has a directory prefix
164 Each member added to an archive file has a directory prefix
165 prepended. Use "-p" to specify a format string for the prefix.
165 prepended. Use "-p" to specify a format string for the prefix.
166 The default is the basename of the archive, with suffixes removed.
166 The default is the basename of the archive, with suffixes removed.
167 '''
167 '''
168
168
169 node = repo.changectx(opts['rev']).node()
169 node = repo.changectx(opts['rev']).node()
170 dest = cmdutil.make_filename(repo, dest, node)
170 dest = cmdutil.make_filename(repo, dest, node)
171 if os.path.realpath(dest) == repo.root:
171 if os.path.realpath(dest) == repo.root:
172 raise util.Abort(_('repository root cannot be destination'))
172 raise util.Abort(_('repository root cannot be destination'))
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 kind = opts.get('type') or 'files'
174 kind = opts.get('type') or 'files'
175 prefix = opts['prefix']
175 prefix = opts['prefix']
176 if dest == '-':
176 if dest == '-':
177 if kind == 'files':
177 if kind == 'files':
178 raise util.Abort(_('cannot archive plain files to stdout'))
178 raise util.Abort(_('cannot archive plain files to stdout'))
179 dest = sys.stdout
179 dest = sys.stdout
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 prefix = cmdutil.make_filename(repo, prefix, node)
181 prefix = cmdutil.make_filename(repo, prefix, node)
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 matchfn, prefix)
183 matchfn, prefix)
184
184
185 def backout(ui, repo, rev, **opts):
185 def backout(ui, repo, rev, **opts):
186 '''reverse effect of earlier changeset
186 '''reverse effect of earlier changeset
187
187
188 Commit the backed out changes as a new changeset. The new
188 Commit the backed out changes as a new changeset. The new
189 changeset is a child of the backed out changeset.
189 changeset is a child of the backed out changeset.
190
190
191 If you back out a changeset other than the tip, a new head is
191 If you back out a changeset other than the tip, a new head is
192 created. This head is the parent of the working directory. If
192 created. This head is the parent of the working directory. If
193 you back out an old changeset, your working directory will appear
193 you back out an old changeset, your working directory will appear
194 old after the backout. You should merge the backout changeset
194 old after the backout. You should merge the backout changeset
195 with another head.
195 with another head.
196
196
197 The --merge option remembers the parent of the working directory
197 The --merge option remembers the parent of the working directory
198 before starting the backout, then merges the new head with that
198 before starting the backout, then merges the new head with that
199 changeset afterwards. This saves you from doing the merge by
199 changeset afterwards. This saves you from doing the merge by
200 hand. The result of this merge is not committed, as for a normal
200 hand. The result of this merge is not committed, as for a normal
201 merge.'''
201 merge.'''
202
202
203 bail_if_changed(repo)
203 bail_if_changed(repo)
204 op1, op2 = repo.dirstate.parents()
204 op1, op2 = repo.dirstate.parents()
205 if op2 != nullid:
205 if op2 != nullid:
206 raise util.Abort(_('outstanding uncommitted merge'))
206 raise util.Abort(_('outstanding uncommitted merge'))
207 node = repo.lookup(rev)
207 node = repo.lookup(rev)
208 p1, p2 = repo.changelog.parents(node)
208 p1, p2 = repo.changelog.parents(node)
209 if p1 == nullid:
209 if p1 == nullid:
210 raise util.Abort(_('cannot back out a change with no parents'))
210 raise util.Abort(_('cannot back out a change with no parents'))
211 if p2 != nullid:
211 if p2 != nullid:
212 if not opts['parent']:
212 if not opts['parent']:
213 raise util.Abort(_('cannot back out a merge changeset without '
213 raise util.Abort(_('cannot back out a merge changeset without '
214 '--parent'))
214 '--parent'))
215 p = repo.lookup(opts['parent'])
215 p = repo.lookup(opts['parent'])
216 if p not in (p1, p2):
216 if p not in (p1, p2):
217 raise util.Abort(_('%s is not a parent of %s') %
217 raise util.Abort(_('%s is not a parent of %s') %
218 (short(p), short(node)))
218 (short(p), short(node)))
219 parent = p
219 parent = p
220 else:
220 else:
221 if opts['parent']:
221 if opts['parent']:
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
223 parent = p1
223 parent = p1
224 hg.clean(repo, node, show_stats=False)
224 hg.clean(repo, node, show_stats=False)
225 revert_opts = opts.copy()
225 revert_opts = opts.copy()
226 revert_opts['date'] = None
226 revert_opts['date'] = None
227 revert_opts['all'] = True
227 revert_opts['all'] = True
228 revert_opts['rev'] = hex(parent)
228 revert_opts['rev'] = hex(parent)
229 revert(ui, repo, **revert_opts)
229 revert(ui, repo, **revert_opts)
230 commit_opts = opts.copy()
230 commit_opts = opts.copy()
231 commit_opts['addremove'] = False
231 commit_opts['addremove'] = False
232 if not commit_opts['message'] and not commit_opts['logfile']:
232 if not commit_opts['message'] and not commit_opts['logfile']:
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
234 commit_opts['force_editor'] = True
234 commit_opts['force_editor'] = True
235 commit(ui, repo, **commit_opts)
235 commit(ui, repo, **commit_opts)
236 def nice(node):
236 def nice(node):
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
238 ui.status(_('changeset %s backs out changeset %s\n') %
238 ui.status(_('changeset %s backs out changeset %s\n') %
239 (nice(repo.changelog.tip()), nice(node)))
239 (nice(repo.changelog.tip()), nice(node)))
240 if op1 != node:
240 if op1 != node:
241 if opts['merge']:
241 if opts['merge']:
242 ui.status(_('merging with changeset %s\n') % nice(op1))
242 ui.status(_('merging with changeset %s\n') % nice(op1))
243 n = _lookup(repo, hex(op1))
243 n = _lookup(repo, hex(op1))
244 hg.merge(repo, n)
244 hg.merge(repo, n)
245 else:
245 else:
246 ui.status(_('the backout changeset is a new head - '
246 ui.status(_('the backout changeset is a new head - '
247 'do not forget to merge\n'))
247 'do not forget to merge\n'))
248 ui.status(_('(use "backout --merge" '
248 ui.status(_('(use "backout --merge" '
249 'if you want to auto-merge)\n'))
249 'if you want to auto-merge)\n'))
250
250
251 def branch(ui, repo, label=None):
251 def branch(ui, repo, label=None):
252 """set or show the current branch name
252 """set or show the current branch name
253
253
254 With <name>, set the current branch name. Otherwise, show the
254 With <name>, set the current branch name. Otherwise, show the
255 current branch name.
255 current branch name.
256 """
256 """
257
257
258 if label is not None:
258 if label is not None:
259 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
259 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
260 else:
260 else:
261 b = util.tolocal(repo.workingctx().branch())
261 b = util.tolocal(repo.workingctx().branch())
262 if b:
262 if b:
263 ui.write("%s\n" % b)
263 ui.write("%s\n" % b)
264
264
265 def branches(ui, repo):
265 def branches(ui, repo):
266 """list repository named branches
266 """list repository named branches
267
267
268 List the repository's named branches.
268 List the repository's named branches.
269 """
269 """
270 b = repo.branchtags()
270 b = repo.branchtags()
271 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
271 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
272 l.sort()
272 l.sort()
273 for r, n, t in l:
273 for r, n, t in l:
274 hexfunc = ui.debugflag and hex or short
274 hexfunc = ui.debugflag and hex or short
275 if ui.quiet:
275 if ui.quiet:
276 ui.write("%s\n" % t)
276 ui.write("%s\n" % t)
277 else:
277 else:
278 spaces = " " * (30 - util.locallen(t))
278 spaces = " " * (30 - util.locallen(t))
279 ui.write("%s%s %s:%s\n" % (t, spaces, -r, hexfunc(n)))
279 ui.write("%s%s %s:%s\n" % (t, spaces, -r, hexfunc(n)))
280
280
281 def bundle(ui, repo, fname, dest=None, **opts):
281 def bundle(ui, repo, fname, dest=None, **opts):
282 """create a changegroup file
282 """create a changegroup file
283
283
284 Generate a compressed changegroup file collecting changesets not
284 Generate a compressed changegroup file collecting changesets not
285 found in the other repository.
285 found in the other repository.
286
286
287 If no destination repository is specified the destination is assumed
287 If no destination repository is specified the destination is assumed
288 to have all the nodes specified by one or more --base parameters.
288 to have all the nodes specified by one or more --base parameters.
289
289
290 The bundle file can then be transferred using conventional means and
290 The bundle file can then be transferred using conventional means and
291 applied to another repository with the unbundle or pull command.
291 applied to another repository with the unbundle or pull command.
292 This is useful when direct push and pull are not available or when
292 This is useful when direct push and pull are not available or when
293 exporting an entire repository is undesirable.
293 exporting an entire repository is undesirable.
294
294
295 Applying bundles preserves all changeset contents including
295 Applying bundles preserves all changeset contents including
296 permissions, copy/rename information, and revision history.
296 permissions, copy/rename information, and revision history.
297 """
297 """
298 revs = opts.get('rev') or None
298 revs = opts.get('rev') or None
299 if revs:
299 if revs:
300 revs = [repo.lookup(rev) for rev in revs]
300 revs = [repo.lookup(rev) for rev in revs]
301 base = opts.get('base')
301 base = opts.get('base')
302 if base:
302 if base:
303 if dest:
303 if dest:
304 raise util.Abort(_("--base is incompatible with specifiying "
304 raise util.Abort(_("--base is incompatible with specifiying "
305 "a destination"))
305 "a destination"))
306 base = [repo.lookup(rev) for rev in base]
306 base = [repo.lookup(rev) for rev in base]
307 # create the right base
307 # create the right base
308 # XXX: nodesbetween / changegroup* should be "fixed" instead
308 # XXX: nodesbetween / changegroup* should be "fixed" instead
309 o = []
309 o = []
310 has = {nullid: None}
310 has = {nullid: None}
311 for n in base:
311 for n in base:
312 has.update(repo.changelog.reachable(n))
312 has.update(repo.changelog.reachable(n))
313 if revs:
313 if revs:
314 visit = list(revs)
314 visit = list(revs)
315 else:
315 else:
316 visit = repo.changelog.heads()
316 visit = repo.changelog.heads()
317 seen = {}
317 seen = {}
318 while visit:
318 while visit:
319 n = visit.pop(0)
319 n = visit.pop(0)
320 parents = [p for p in repo.changelog.parents(n) if p not in has]
320 parents = [p for p in repo.changelog.parents(n) if p not in has]
321 if len(parents) == 0:
321 if len(parents) == 0:
322 o.insert(0, n)
322 o.insert(0, n)
323 else:
323 else:
324 for p in parents:
324 for p in parents:
325 if p not in seen:
325 if p not in seen:
326 seen[p] = 1
326 seen[p] = 1
327 visit.append(p)
327 visit.append(p)
328 else:
328 else:
329 setremoteconfig(ui, opts)
329 setremoteconfig(ui, opts)
330 dest = ui.expandpath(dest or 'default-push', dest or 'default')
330 dest = ui.expandpath(dest or 'default-push', dest or 'default')
331 other = hg.repository(ui, dest)
331 other = hg.repository(ui, dest)
332 o = repo.findoutgoing(other, force=opts['force'])
332 o = repo.findoutgoing(other, force=opts['force'])
333
333
334 if revs:
334 if revs:
335 cg = repo.changegroupsubset(o, revs, 'bundle')
335 cg = repo.changegroupsubset(o, revs, 'bundle')
336 else:
336 else:
337 cg = repo.changegroup(o, 'bundle')
337 cg = repo.changegroup(o, 'bundle')
338 changegroup.writebundle(cg, fname, "HG10BZ")
338 changegroup.writebundle(cg, fname, "HG10BZ")
339
339
340 def cat(ui, repo, file1, *pats, **opts):
340 def cat(ui, repo, file1, *pats, **opts):
341 """output the current or given revision of files
341 """output the current or given revision of files
342
342
343 Print the specified files as they were at the given revision.
343 Print the specified files as they were at the given revision.
344 If no revision is given, the parent of the working directory is used,
344 If no revision is given, the parent of the working directory is used,
345 or tip if no revision is checked out.
345 or tip if no revision is checked out.
346
346
347 Output may be to a file, in which case the name of the file is
347 Output may be to a file, in which case the name of the file is
348 given using a format string. The formatting rules are the same as
348 given using a format string. The formatting rules are the same as
349 for the export command, with the following additions:
349 for the export command, with the following additions:
350
350
351 %s basename of file being printed
351 %s basename of file being printed
352 %d dirname of file being printed, or '.' if in repo root
352 %d dirname of file being printed, or '.' if in repo root
353 %p root-relative path name of file being printed
353 %p root-relative path name of file being printed
354 """
354 """
355 ctx = repo.changectx(opts['rev'])
355 ctx = repo.changectx(opts['rev'])
356 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
356 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
357 ctx.node()):
357 ctx.node()):
358 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
358 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
359 fp.write(ctx.filectx(abs).data())
359 fp.write(ctx.filectx(abs).data())
360
360
361 def clone(ui, source, dest=None, **opts):
361 def clone(ui, source, dest=None, **opts):
362 """make a copy of an existing repository
362 """make a copy of an existing repository
363
363
364 Create a copy of an existing repository in a new directory.
364 Create a copy of an existing repository in a new directory.
365
365
366 If no destination directory name is specified, it defaults to the
366 If no destination directory name is specified, it defaults to the
367 basename of the source.
367 basename of the source.
368
368
369 The location of the source is added to the new repository's
369 The location of the source is added to the new repository's
370 .hg/hgrc file, as the default to be used for future pulls.
370 .hg/hgrc file, as the default to be used for future pulls.
371
371
372 For efficiency, hardlinks are used for cloning whenever the source
372 For efficiency, hardlinks are used for cloning whenever the source
373 and destination are on the same filesystem (note this applies only
373 and destination are on the same filesystem (note this applies only
374 to the repository data, not to the checked out files). Some
374 to the repository data, not to the checked out files). Some
375 filesystems, such as AFS, implement hardlinking incorrectly, but
375 filesystems, such as AFS, implement hardlinking incorrectly, but
376 do not report errors. In these cases, use the --pull option to
376 do not report errors. In these cases, use the --pull option to
377 avoid hardlinking.
377 avoid hardlinking.
378
378
379 You can safely clone repositories and checked out files using full
379 You can safely clone repositories and checked out files using full
380 hardlinks with
380 hardlinks with
381
381
382 $ cp -al REPO REPOCLONE
382 $ cp -al REPO REPOCLONE
383
383
384 which is the fastest way to clone. However, the operation is not
384 which is the fastest way to clone. However, the operation is not
385 atomic (making sure REPO is not modified during the operation is
385 atomic (making sure REPO is not modified during the operation is
386 up to you) and you have to make sure your editor breaks hardlinks
386 up to you) and you have to make sure your editor breaks hardlinks
387 (Emacs and most Linux Kernel tools do so).
387 (Emacs and most Linux Kernel tools do so).
388
388
389 If you use the -r option to clone up to a specific revision, no
389 If you use the -r option to clone up to a specific revision, no
390 subsequent revisions will be present in the cloned repository.
390 subsequent revisions will be present in the cloned repository.
391 This option implies --pull, even on local repositories.
391 This option implies --pull, even on local repositories.
392
392
393 See pull for valid source format details.
393 See pull for valid source format details.
394
394
395 It is possible to specify an ssh:// URL as the destination, but no
395 It is possible to specify an ssh:// URL as the destination, but no
396 .hg/hgrc and working directory will be created on the remote side.
396 .hg/hgrc and working directory will be created on the remote side.
397 Look at the help text for the pull command for important details
397 Look at the help text for the pull command for important details
398 about ssh:// URLs.
398 about ssh:// URLs.
399 """
399 """
400 setremoteconfig(ui, opts)
400 setremoteconfig(ui, opts)
401 hg.clone(ui, ui.expandpath(source), dest,
401 hg.clone(ui, ui.expandpath(source), dest,
402 pull=opts['pull'],
402 pull=opts['pull'],
403 stream=opts['uncompressed'],
403 stream=opts['uncompressed'],
404 rev=opts['rev'],
404 rev=opts['rev'],
405 update=not opts['noupdate'])
405 update=not opts['noupdate'])
406
406
407 def commit(ui, repo, *pats, **opts):
407 def commit(ui, repo, *pats, **opts):
408 """commit the specified files or all outstanding changes
408 """commit the specified files or all outstanding changes
409
409
410 Commit changes to the given files into the repository.
410 Commit changes to the given files into the repository.
411
411
412 If a list of files is omitted, all changes reported by "hg status"
412 If a list of files is omitted, all changes reported by "hg status"
413 will be committed.
413 will be committed.
414
414
415 If no commit message is specified, the editor configured in your hgrc
415 If no commit message is specified, the editor configured in your hgrc
416 or in the EDITOR environment variable is started to enter a message.
416 or in the EDITOR environment variable is started to enter a message.
417 """
417 """
418 message = logmessage(opts)
418 message = logmessage(opts)
419
419
420 if opts['addremove']:
420 if opts['addremove']:
421 cmdutil.addremove(repo, pats, opts)
421 cmdutil.addremove(repo, pats, opts)
422 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
422 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
423 if pats:
423 if pats:
424 status = repo.status(files=fns, match=match)
424 status = repo.status(files=fns, match=match)
425 modified, added, removed, deleted, unknown = status[:5]
425 modified, added, removed, deleted, unknown = status[:5]
426 files = modified + added + removed
426 files = modified + added + removed
427 slist = None
427 slist = None
428 for f in fns:
428 for f in fns:
429 if f not in files:
429 if f not in files:
430 rf = repo.wjoin(f)
430 rf = repo.wjoin(f)
431 if f in unknown:
431 if f in unknown:
432 raise util.Abort(_("file %s not tracked!") % rf)
432 raise util.Abort(_("file %s not tracked!") % rf)
433 try:
433 try:
434 mode = os.lstat(rf)[stat.ST_MODE]
434 mode = os.lstat(rf)[stat.ST_MODE]
435 except OSError:
435 except OSError:
436 raise util.Abort(_("file %s not found!") % rf)
436 raise util.Abort(_("file %s not found!") % rf)
437 if stat.S_ISDIR(mode):
437 if stat.S_ISDIR(mode):
438 name = f + '/'
438 name = f + '/'
439 if slist is None:
439 if slist is None:
440 slist = list(files)
440 slist = list(files)
441 slist.sort()
441 slist.sort()
442 i = bisect.bisect(slist, name)
442 i = bisect.bisect(slist, name)
443 if i >= len(slist) or not slist[i].startswith(name):
443 if i >= len(slist) or not slist[i].startswith(name):
444 raise util.Abort(_("no match under directory %s!")
444 raise util.Abort(_("no match under directory %s!")
445 % rf)
445 % rf)
446 elif not stat.S_ISREG(mode):
446 elif not stat.S_ISREG(mode):
447 raise util.Abort(_("can't commit %s: "
447 raise util.Abort(_("can't commit %s: "
448 "unsupported file type!") % rf)
448 "unsupported file type!") % rf)
449 else:
449 else:
450 files = []
450 files = []
451 try:
451 try:
452 repo.commit(files, message, opts['user'], opts['date'], match,
452 repo.commit(files, message, opts['user'], opts['date'], match,
453 force_editor=opts.get('force_editor'))
453 force_editor=opts.get('force_editor'))
454 except ValueError, inst:
454 except ValueError, inst:
455 raise util.Abort(str(inst))
455 raise util.Abort(str(inst))
456
456
457 def docopy(ui, repo, pats, opts, wlock):
457 def docopy(ui, repo, pats, opts, wlock):
458 # called with the repo lock held
458 # called with the repo lock held
459 #
459 #
460 # hgsep => pathname that uses "/" to separate directories
460 # hgsep => pathname that uses "/" to separate directories
461 # ossep => pathname that uses os.sep to separate directories
461 # ossep => pathname that uses os.sep to separate directories
462 cwd = repo.getcwd()
462 cwd = repo.getcwd()
463 errors = 0
463 errors = 0
464 copied = []
464 copied = []
465 targets = {}
465 targets = {}
466
466
467 # abs: hgsep
467 # abs: hgsep
468 # rel: ossep
468 # rel: ossep
469 # return: hgsep
469 # return: hgsep
470 def okaytocopy(abs, rel, exact):
470 def okaytocopy(abs, rel, exact):
471 reasons = {'?': _('is not managed'),
471 reasons = {'?': _('is not managed'),
472 'a': _('has been marked for add'),
472 'a': _('has been marked for add'),
473 'r': _('has been marked for remove')}
473 'r': _('has been marked for remove')}
474 state = repo.dirstate.state(abs)
474 state = repo.dirstate.state(abs)
475 reason = reasons.get(state)
475 reason = reasons.get(state)
476 if reason:
476 if reason:
477 if state == 'a':
477 if state == 'a':
478 origsrc = repo.dirstate.copied(abs)
478 origsrc = repo.dirstate.copied(abs)
479 if origsrc is not None:
479 if origsrc is not None:
480 return origsrc
480 return origsrc
481 if exact:
481 if exact:
482 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
482 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
483 else:
483 else:
484 return abs
484 return abs
485
485
486 # origsrc: hgsep
486 # origsrc: hgsep
487 # abssrc: hgsep
487 # abssrc: hgsep
488 # relsrc: ossep
488 # relsrc: ossep
489 # target: ossep
489 # target: ossep
490 def copy(origsrc, abssrc, relsrc, target, exact):
490 def copy(origsrc, abssrc, relsrc, target, exact):
491 abstarget = util.canonpath(repo.root, cwd, target)
491 abstarget = util.canonpath(repo.root, cwd, target)
492 reltarget = util.pathto(cwd, abstarget)
492 reltarget = util.pathto(cwd, abstarget)
493 prevsrc = targets.get(abstarget)
493 prevsrc = targets.get(abstarget)
494 if prevsrc is not None:
494 if prevsrc is not None:
495 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
495 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
496 (reltarget, util.localpath(abssrc),
496 (reltarget, util.localpath(abssrc),
497 util.localpath(prevsrc)))
497 util.localpath(prevsrc)))
498 return
498 return
499 if (not opts['after'] and os.path.exists(reltarget) or
499 if (not opts['after'] and os.path.exists(reltarget) or
500 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
500 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
501 if not opts['force']:
501 if not opts['force']:
502 ui.warn(_('%s: not overwriting - file exists\n') %
502 ui.warn(_('%s: not overwriting - file exists\n') %
503 reltarget)
503 reltarget)
504 return
504 return
505 if not opts['after'] and not opts.get('dry_run'):
505 if not opts['after'] and not opts.get('dry_run'):
506 os.unlink(reltarget)
506 os.unlink(reltarget)
507 if opts['after']:
507 if opts['after']:
508 if not os.path.exists(reltarget):
508 if not os.path.exists(reltarget):
509 return
509 return
510 else:
510 else:
511 targetdir = os.path.dirname(reltarget) or '.'
511 targetdir = os.path.dirname(reltarget) or '.'
512 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
512 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
513 os.makedirs(targetdir)
513 os.makedirs(targetdir)
514 try:
514 try:
515 restore = repo.dirstate.state(abstarget) == 'r'
515 restore = repo.dirstate.state(abstarget) == 'r'
516 if restore and not opts.get('dry_run'):
516 if restore and not opts.get('dry_run'):
517 repo.undelete([abstarget], wlock)
517 repo.undelete([abstarget], wlock)
518 try:
518 try:
519 if not opts.get('dry_run'):
519 if not opts.get('dry_run'):
520 util.copyfile(relsrc, reltarget)
520 util.copyfile(relsrc, reltarget)
521 restore = False
521 restore = False
522 finally:
522 finally:
523 if restore:
523 if restore:
524 repo.remove([abstarget], wlock)
524 repo.remove([abstarget], wlock)
525 except IOError, inst:
525 except IOError, inst:
526 if inst.errno == errno.ENOENT:
526 if inst.errno == errno.ENOENT:
527 ui.warn(_('%s: deleted in working copy\n') % relsrc)
527 ui.warn(_('%s: deleted in working copy\n') % relsrc)
528 else:
528 else:
529 ui.warn(_('%s: cannot copy - %s\n') %
529 ui.warn(_('%s: cannot copy - %s\n') %
530 (relsrc, inst.strerror))
530 (relsrc, inst.strerror))
531 errors += 1
531 errors += 1
532 return
532 return
533 if ui.verbose or not exact:
533 if ui.verbose or not exact:
534 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
534 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
535 targets[abstarget] = abssrc
535 targets[abstarget] = abssrc
536 if abstarget != origsrc and not opts.get('dry_run'):
536 if abstarget != origsrc and not opts.get('dry_run'):
537 repo.copy(origsrc, abstarget, wlock)
537 repo.copy(origsrc, abstarget, wlock)
538 copied.append((abssrc, relsrc, exact))
538 copied.append((abssrc, relsrc, exact))
539
539
540 # pat: ossep
540 # pat: ossep
541 # dest ossep
541 # dest ossep
542 # srcs: list of (hgsep, hgsep, ossep, bool)
542 # srcs: list of (hgsep, hgsep, ossep, bool)
543 # return: function that takes hgsep and returns ossep
543 # return: function that takes hgsep and returns ossep
544 def targetpathfn(pat, dest, srcs):
544 def targetpathfn(pat, dest, srcs):
545 if os.path.isdir(pat):
545 if os.path.isdir(pat):
546 abspfx = util.canonpath(repo.root, cwd, pat)
546 abspfx = util.canonpath(repo.root, cwd, pat)
547 abspfx = util.localpath(abspfx)
547 abspfx = util.localpath(abspfx)
548 if destdirexists:
548 if destdirexists:
549 striplen = len(os.path.split(abspfx)[0])
549 striplen = len(os.path.split(abspfx)[0])
550 else:
550 else:
551 striplen = len(abspfx)
551 striplen = len(abspfx)
552 if striplen:
552 if striplen:
553 striplen += len(os.sep)
553 striplen += len(os.sep)
554 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
554 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
555 elif destdirexists:
555 elif destdirexists:
556 res = lambda p: os.path.join(dest,
556 res = lambda p: os.path.join(dest,
557 os.path.basename(util.localpath(p)))
557 os.path.basename(util.localpath(p)))
558 else:
558 else:
559 res = lambda p: dest
559 res = lambda p: dest
560 return res
560 return res
561
561
562 # pat: ossep
562 # pat: ossep
563 # dest ossep
563 # dest ossep
564 # srcs: list of (hgsep, hgsep, ossep, bool)
564 # srcs: list of (hgsep, hgsep, ossep, bool)
565 # return: function that takes hgsep and returns ossep
565 # return: function that takes hgsep and returns ossep
566 def targetpathafterfn(pat, dest, srcs):
566 def targetpathafterfn(pat, dest, srcs):
567 if util.patkind(pat, None)[0]:
567 if util.patkind(pat, None)[0]:
568 # a mercurial pattern
568 # a mercurial pattern
569 res = lambda p: os.path.join(dest,
569 res = lambda p: os.path.join(dest,
570 os.path.basename(util.localpath(p)))
570 os.path.basename(util.localpath(p)))
571 else:
571 else:
572 abspfx = util.canonpath(repo.root, cwd, pat)
572 abspfx = util.canonpath(repo.root, cwd, pat)
573 if len(abspfx) < len(srcs[0][0]):
573 if len(abspfx) < len(srcs[0][0]):
574 # A directory. Either the target path contains the last
574 # A directory. Either the target path contains the last
575 # component of the source path or it does not.
575 # component of the source path or it does not.
576 def evalpath(striplen):
576 def evalpath(striplen):
577 score = 0
577 score = 0
578 for s in srcs:
578 for s in srcs:
579 t = os.path.join(dest, util.localpath(s[0])[striplen:])
579 t = os.path.join(dest, util.localpath(s[0])[striplen:])
580 if os.path.exists(t):
580 if os.path.exists(t):
581 score += 1
581 score += 1
582 return score
582 return score
583
583
584 abspfx = util.localpath(abspfx)
584 abspfx = util.localpath(abspfx)
585 striplen = len(abspfx)
585 striplen = len(abspfx)
586 if striplen:
586 if striplen:
587 striplen += len(os.sep)
587 striplen += len(os.sep)
588 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
588 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
589 score = evalpath(striplen)
589 score = evalpath(striplen)
590 striplen1 = len(os.path.split(abspfx)[0])
590 striplen1 = len(os.path.split(abspfx)[0])
591 if striplen1:
591 if striplen1:
592 striplen1 += len(os.sep)
592 striplen1 += len(os.sep)
593 if evalpath(striplen1) > score:
593 if evalpath(striplen1) > score:
594 striplen = striplen1
594 striplen = striplen1
595 res = lambda p: os.path.join(dest,
595 res = lambda p: os.path.join(dest,
596 util.localpath(p)[striplen:])
596 util.localpath(p)[striplen:])
597 else:
597 else:
598 # a file
598 # a file
599 if destdirexists:
599 if destdirexists:
600 res = lambda p: os.path.join(dest,
600 res = lambda p: os.path.join(dest,
601 os.path.basename(util.localpath(p)))
601 os.path.basename(util.localpath(p)))
602 else:
602 else:
603 res = lambda p: dest
603 res = lambda p: dest
604 return res
604 return res
605
605
606
606
607 pats = util.expand_glob(pats)
607 pats = util.expand_glob(pats)
608 if not pats:
608 if not pats:
609 raise util.Abort(_('no source or destination specified'))
609 raise util.Abort(_('no source or destination specified'))
610 if len(pats) == 1:
610 if len(pats) == 1:
611 raise util.Abort(_('no destination specified'))
611 raise util.Abort(_('no destination specified'))
612 dest = pats.pop()
612 dest = pats.pop()
613 destdirexists = os.path.isdir(dest)
613 destdirexists = os.path.isdir(dest)
614 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
614 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
615 raise util.Abort(_('with multiple sources, destination must be an '
615 raise util.Abort(_('with multiple sources, destination must be an '
616 'existing directory'))
616 'existing directory'))
617 if opts['after']:
617 if opts['after']:
618 tfn = targetpathafterfn
618 tfn = targetpathafterfn
619 else:
619 else:
620 tfn = targetpathfn
620 tfn = targetpathfn
621 copylist = []
621 copylist = []
622 for pat in pats:
622 for pat in pats:
623 srcs = []
623 srcs = []
624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
625 globbed=True):
625 globbed=True):
626 origsrc = okaytocopy(abssrc, relsrc, exact)
626 origsrc = okaytocopy(abssrc, relsrc, exact)
627 if origsrc:
627 if origsrc:
628 srcs.append((origsrc, abssrc, relsrc, exact))
628 srcs.append((origsrc, abssrc, relsrc, exact))
629 if not srcs:
629 if not srcs:
630 continue
630 continue
631 copylist.append((tfn(pat, dest, srcs), srcs))
631 copylist.append((tfn(pat, dest, srcs), srcs))
632 if not copylist:
632 if not copylist:
633 raise util.Abort(_('no files to copy'))
633 raise util.Abort(_('no files to copy'))
634
634
635 for targetpath, srcs in copylist:
635 for targetpath, srcs in copylist:
636 for origsrc, abssrc, relsrc, exact in srcs:
636 for origsrc, abssrc, relsrc, exact in srcs:
637 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
637 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
638
638
639 if errors:
639 if errors:
640 ui.warn(_('(consider using --after)\n'))
640 ui.warn(_('(consider using --after)\n'))
641 return errors, copied
641 return errors, copied
642
642
643 def copy(ui, repo, *pats, **opts):
643 def copy(ui, repo, *pats, **opts):
644 """mark files as copied for the next commit
644 """mark files as copied for the next commit
645
645
646 Mark dest as having copies of source files. If dest is a
646 Mark dest as having copies of source files. If dest is a
647 directory, copies are put in that directory. If dest is a file,
647 directory, copies are put in that directory. If dest is a file,
648 there can only be one source.
648 there can only be one source.
649
649
650 By default, this command copies the contents of files as they
650 By default, this command copies the contents of files as they
651 stand in the working directory. If invoked with --after, the
651 stand in the working directory. If invoked with --after, the
652 operation is recorded, but no copying is performed.
652 operation is recorded, but no copying is performed.
653
653
654 This command takes effect in the next commit. To undo a copy
654 This command takes effect in the next commit. To undo a copy
655 before that, see hg revert.
655 before that, see hg revert.
656 """
656 """
657 wlock = repo.wlock(0)
657 wlock = repo.wlock(0)
658 errs, copied = docopy(ui, repo, pats, opts, wlock)
658 errs, copied = docopy(ui, repo, pats, opts, wlock)
659 return errs
659 return errs
660
660
661 def debugancestor(ui, index, rev1, rev2):
661 def debugancestor(ui, index, rev1, rev2):
662 """find the ancestor revision of two revisions in a given index"""
662 """find the ancestor revision of two revisions in a given index"""
663 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
663 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
664 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
664 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
665 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
665 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
666
666
667 def debugcomplete(ui, cmd='', **opts):
667 def debugcomplete(ui, cmd='', **opts):
668 """returns the completion list associated with the given command"""
668 """returns the completion list associated with the given command"""
669
669
670 if opts['options']:
670 if opts['options']:
671 options = []
671 options = []
672 otables = [globalopts]
672 otables = [globalopts]
673 if cmd:
673 if cmd:
674 aliases, entry = findcmd(ui, cmd)
674 aliases, entry = findcmd(ui, cmd)
675 otables.append(entry[1])
675 otables.append(entry[1])
676 for t in otables:
676 for t in otables:
677 for o in t:
677 for o in t:
678 if o[0]:
678 if o[0]:
679 options.append('-%s' % o[0])
679 options.append('-%s' % o[0])
680 options.append('--%s' % o[1])
680 options.append('--%s' % o[1])
681 ui.write("%s\n" % "\n".join(options))
681 ui.write("%s\n" % "\n".join(options))
682 return
682 return
683
683
684 clist = findpossible(ui, cmd).keys()
684 clist = findpossible(ui, cmd).keys()
685 clist.sort()
685 clist.sort()
686 ui.write("%s\n" % "\n".join(clist))
686 ui.write("%s\n" % "\n".join(clist))
687
687
688 def debugrebuildstate(ui, repo, rev=None):
688 def debugrebuildstate(ui, repo, rev=None):
689 """rebuild the dirstate as it would look like for the given revision"""
689 """rebuild the dirstate as it would look like for the given revision"""
690 if not rev:
690 if not rev:
691 rev = repo.changelog.tip()
691 rev = repo.changelog.tip()
692 else:
692 else:
693 rev = repo.lookup(rev)
693 rev = repo.lookup(rev)
694 change = repo.changelog.read(rev)
694 change = repo.changelog.read(rev)
695 n = change[0]
695 n = change[0]
696 files = repo.manifest.read(n)
696 files = repo.manifest.read(n)
697 wlock = repo.wlock()
697 wlock = repo.wlock()
698 repo.dirstate.rebuild(rev, files)
698 repo.dirstate.rebuild(rev, files)
699
699
700 def debugcheckstate(ui, repo):
700 def debugcheckstate(ui, repo):
701 """validate the correctness of the current dirstate"""
701 """validate the correctness of the current dirstate"""
702 parent1, parent2 = repo.dirstate.parents()
702 parent1, parent2 = repo.dirstate.parents()
703 repo.dirstate.read()
703 repo.dirstate.read()
704 dc = repo.dirstate.map
704 dc = repo.dirstate.map
705 keys = dc.keys()
705 keys = dc.keys()
706 keys.sort()
706 keys.sort()
707 m1n = repo.changelog.read(parent1)[0]
707 m1n = repo.changelog.read(parent1)[0]
708 m2n = repo.changelog.read(parent2)[0]
708 m2n = repo.changelog.read(parent2)[0]
709 m1 = repo.manifest.read(m1n)
709 m1 = repo.manifest.read(m1n)
710 m2 = repo.manifest.read(m2n)
710 m2 = repo.manifest.read(m2n)
711 errors = 0
711 errors = 0
712 for f in dc:
712 for f in dc:
713 state = repo.dirstate.state(f)
713 state = repo.dirstate.state(f)
714 if state in "nr" and f not in m1:
714 if state in "nr" and f not in m1:
715 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
715 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
716 errors += 1
716 errors += 1
717 if state in "a" and f in m1:
717 if state in "a" and f in m1:
718 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
718 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
719 errors += 1
719 errors += 1
720 if state in "m" and f not in m1 and f not in m2:
720 if state in "m" and f not in m1 and f not in m2:
721 ui.warn(_("%s in state %s, but not in either manifest\n") %
721 ui.warn(_("%s in state %s, but not in either manifest\n") %
722 (f, state))
722 (f, state))
723 errors += 1
723 errors += 1
724 for f in m1:
724 for f in m1:
725 state = repo.dirstate.state(f)
725 state = repo.dirstate.state(f)
726 if state not in "nrm":
726 if state not in "nrm":
727 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
727 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
728 errors += 1
728 errors += 1
729 if errors:
729 if errors:
730 error = _(".hg/dirstate inconsistent with current parent's manifest")
730 error = _(".hg/dirstate inconsistent with current parent's manifest")
731 raise util.Abort(error)
731 raise util.Abort(error)
732
732
733 def showconfig(ui, repo, *values, **opts):
733 def showconfig(ui, repo, *values, **opts):
734 """show combined config settings from all hgrc files
734 """show combined config settings from all hgrc files
735
735
736 With no args, print names and values of all config items.
736 With no args, print names and values of all config items.
737
737
738 With one arg of the form section.name, print just the value of
738 With one arg of the form section.name, print just the value of
739 that config item.
739 that config item.
740
740
741 With multiple args, print names and values of all config items
741 With multiple args, print names and values of all config items
742 with matching section names."""
742 with matching section names."""
743
743
744 untrusted = bool(opts.get('untrusted'))
744 untrusted = bool(opts.get('untrusted'))
745 if values:
745 if values:
746 if len([v for v in values if '.' in v]) > 1:
746 if len([v for v in values if '.' in v]) > 1:
747 raise util.Abort(_('only one config item permitted'))
747 raise util.Abort(_('only one config item permitted'))
748 for section, name, value in ui.walkconfig(untrusted=untrusted):
748 for section, name, value in ui.walkconfig(untrusted=untrusted):
749 sectname = section + '.' + name
749 sectname = section + '.' + name
750 if values:
750 if values:
751 for v in values:
751 for v in values:
752 if v == section:
752 if v == section:
753 ui.write('%s=%s\n' % (sectname, value))
753 ui.write('%s=%s\n' % (sectname, value))
754 elif v == sectname:
754 elif v == sectname:
755 ui.write(value, '\n')
755 ui.write(value, '\n')
756 else:
756 else:
757 ui.write('%s=%s\n' % (sectname, value))
757 ui.write('%s=%s\n' % (sectname, value))
758
758
759 def debugsetparents(ui, repo, rev1, rev2=None):
759 def debugsetparents(ui, repo, rev1, rev2=None):
760 """manually set the parents of the current working directory
760 """manually set the parents of the current working directory
761
761
762 This is useful for writing repository conversion tools, but should
762 This is useful for writing repository conversion tools, but should
763 be used with care.
763 be used with care.
764 """
764 """
765
765
766 if not rev2:
766 if not rev2:
767 rev2 = hex(nullid)
767 rev2 = hex(nullid)
768
768
769 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
769 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
770
770
771 def debugstate(ui, repo):
771 def debugstate(ui, repo):
772 """show the contents of the current dirstate"""
772 """show the contents of the current dirstate"""
773 repo.dirstate.read()
773 repo.dirstate.read()
774 dc = repo.dirstate.map
774 dc = repo.dirstate.map
775 keys = dc.keys()
775 keys = dc.keys()
776 keys.sort()
776 keys.sort()
777 for file_ in keys:
777 for file_ in keys:
778 if dc[file_][3] == -1:
778 if dc[file_][3] == -1:
779 # Pad or slice to locale representation
779 # Pad or slice to locale representation
780 locale_len = len(time.strftime("%x %X", time.localtime(0)))
780 locale_len = len(time.strftime("%x %X", time.localtime(0)))
781 timestr = 'unset'
781 timestr = 'unset'
782 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
782 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
783 else:
783 else:
784 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
784 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
785 ui.write("%c %3o %10d %s %s\n"
785 ui.write("%c %3o %10d %s %s\n"
786 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
786 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
787 timestr, file_))
787 timestr, file_))
788 for f in repo.dirstate.copies():
788 for f in repo.dirstate.copies():
789 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
789 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
790
790
791 def debugdata(ui, file_, rev):
791 def debugdata(ui, file_, rev):
792 """dump the contents of an data file revision"""
792 """dump the contents of an data file revision"""
793 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
793 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
794 file_[:-2] + ".i", file_, 0)
794 file_[:-2] + ".i", file_, 0)
795 try:
795 try:
796 ui.write(r.revision(r.lookup(rev)))
796 ui.write(r.revision(r.lookup(rev)))
797 except KeyError:
797 except KeyError:
798 raise util.Abort(_('invalid revision identifier %s') % rev)
798 raise util.Abort(_('invalid revision identifier %s') % rev)
799
799
800 def debugdate(ui, date, range=None, **opts):
800 def debugdate(ui, date, range=None, **opts):
801 """parse and display a date"""
801 """parse and display a date"""
802 if opts["extended"]:
802 if opts["extended"]:
803 d = util.parsedate(date, util.extendeddateformats)
803 d = util.parsedate(date, util.extendeddateformats)
804 else:
804 else:
805 d = util.parsedate(date)
805 d = util.parsedate(date)
806 ui.write("internal: %s %s\n" % d)
806 ui.write("internal: %s %s\n" % d)
807 ui.write("standard: %s\n" % util.datestr(d))
807 ui.write("standard: %s\n" % util.datestr(d))
808 if range:
808 if range:
809 m = util.matchdate(range)
809 m = util.matchdate(range)
810 ui.write("match: %s\n" % m(d[0]))
810 ui.write("match: %s\n" % m(d[0]))
811
811
812 def debugindex(ui, file_):
812 def debugindex(ui, file_):
813 """dump the contents of an index file"""
813 """dump the contents of an index file"""
814 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
814 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
815 ui.write(" rev offset length base linkrev" +
815 ui.write(" rev offset length base linkrev" +
816 " nodeid p1 p2\n")
816 " nodeid p1 p2\n")
817 for i in xrange(r.count()):
817 for i in xrange(r.count()):
818 node = r.node(i)
818 node = r.node(i)
819 pp = r.parents(node)
819 pp = r.parents(node)
820 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
820 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
821 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
821 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
822 short(node), short(pp[0]), short(pp[1])))
822 short(node), short(pp[0]), short(pp[1])))
823
823
824 def debugindexdot(ui, file_):
824 def debugindexdot(ui, file_):
825 """dump an index DAG as a .dot file"""
825 """dump an index DAG as a .dot file"""
826 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
826 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
827 ui.write("digraph G {\n")
827 ui.write("digraph G {\n")
828 for i in xrange(r.count()):
828 for i in xrange(r.count()):
829 node = r.node(i)
829 node = r.node(i)
830 pp = r.parents(node)
830 pp = r.parents(node)
831 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
831 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
832 if pp[1] != nullid:
832 if pp[1] != nullid:
833 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
833 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
834 ui.write("}\n")
834 ui.write("}\n")
835
835
836 def debuginstall(ui):
836 def debuginstall(ui):
837 '''test Mercurial installation'''
837 '''test Mercurial installation'''
838
838
839 def writetemp(contents):
839 def writetemp(contents):
840 (fd, name) = tempfile.mkstemp()
840 (fd, name) = tempfile.mkstemp()
841 f = os.fdopen(fd, "wb")
841 f = os.fdopen(fd, "wb")
842 f.write(contents)
842 f.write(contents)
843 f.close()
843 f.close()
844 return name
844 return name
845
845
846 problems = 0
846 problems = 0
847
847
848 # encoding
848 # encoding
849 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
849 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
850 try:
850 try:
851 util.fromlocal("test")
851 util.fromlocal("test")
852 except util.Abort, inst:
852 except util.Abort, inst:
853 ui.write(" %s\n" % inst)
853 ui.write(" %s\n" % inst)
854 ui.write(_(" (check that your locale is properly set)\n"))
854 ui.write(_(" (check that your locale is properly set)\n"))
855 problems += 1
855 problems += 1
856
856
857 # compiled modules
857 # compiled modules
858 ui.status(_("Checking extensions...\n"))
858 ui.status(_("Checking extensions...\n"))
859 try:
859 try:
860 import bdiff, mpatch, base85
860 import bdiff, mpatch, base85
861 except Exception, inst:
861 except Exception, inst:
862 ui.write(" %s\n" % inst)
862 ui.write(" %s\n" % inst)
863 ui.write(_(" One or more extensions could not be found"))
863 ui.write(_(" One or more extensions could not be found"))
864 ui.write(_(" (check that you compiled the extensions)\n"))
864 ui.write(_(" (check that you compiled the extensions)\n"))
865 problems += 1
865 problems += 1
866
866
867 # templates
867 # templates
868 ui.status(_("Checking templates...\n"))
868 ui.status(_("Checking templates...\n"))
869 try:
869 try:
870 import templater
870 import templater
871 t = templater.templater(templater.templatepath("map-cmdline.default"))
871 t = templater.templater(templater.templatepath("map-cmdline.default"))
872 except Exception, inst:
872 except Exception, inst:
873 ui.write(" %s\n" % inst)
873 ui.write(" %s\n" % inst)
874 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
874 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
875 problems += 1
875 problems += 1
876
876
877 # patch
877 # patch
878 ui.status(_("Checking patch...\n"))
878 ui.status(_("Checking patch...\n"))
879 path = os.environ.get('PATH', '')
879 path = os.environ.get('PATH', '')
880 patcher = util.find_in_path('gpatch', path,
880 patcher = util.find_in_path('gpatch', path,
881 util.find_in_path('patch', path, None))
881 util.find_in_path('patch', path, None))
882 if not patcher:
882 if not patcher:
883 ui.write(_(" Can't find patch or gpatch in PATH\n"))
883 ui.write(_(" Can't find patch or gpatch in PATH\n"))
884 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
884 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
885 problems += 1
885 problems += 1
886 else:
886 else:
887 # actually attempt a patch here
887 # actually attempt a patch here
888 a = "1\n2\n3\n4\n"
888 a = "1\n2\n3\n4\n"
889 b = "1\n2\n3\ninsert\n4\n"
889 b = "1\n2\n3\ninsert\n4\n"
890 d = mdiff.unidiff(a, None, b, None, "a")
890 d = mdiff.unidiff(a, None, b, None, "a")
891 fa = writetemp(a)
891 fa = writetemp(a)
892 fd = writetemp(d)
892 fd = writetemp(d)
893 fp = os.popen('%s %s %s' % (patcher, fa, fd))
893 fp = os.popen('%s %s %s' % (patcher, fa, fd))
894 files = []
894 files = []
895 output = ""
895 output = ""
896 for line in fp:
896 for line in fp:
897 output += line
897 output += line
898 if line.startswith('patching file '):
898 if line.startswith('patching file '):
899 pf = util.parse_patch_output(line.rstrip())
899 pf = util.parse_patch_output(line.rstrip())
900 files.append(pf)
900 files.append(pf)
901 if files != [fa]:
901 if files != [fa]:
902 ui.write(_(" unexpected patch output!"))
902 ui.write(_(" unexpected patch output!"))
903 ui.write(_(" (you may have an incompatible version of patch)\n"))
903 ui.write(_(" (you may have an incompatible version of patch)\n"))
904 ui.write(output)
904 ui.write(output)
905 problems += 1
905 problems += 1
906 a = file(fa).read()
906 a = file(fa).read()
907 if a != b:
907 if a != b:
908 ui.write(_(" patch test failed!"))
908 ui.write(_(" patch test failed!"))
909 ui.write(_(" (you may have an incompatible version of patch)\n"))
909 ui.write(_(" (you may have an incompatible version of patch)\n"))
910 problems += 1
910 problems += 1
911 os.unlink(fa)
911 os.unlink(fa)
912 os.unlink(fd)
912 os.unlink(fd)
913
913
914 # merge helper
914 # merge helper
915 ui.status(_("Checking merge helper...\n"))
915 ui.status(_("Checking merge helper...\n"))
916 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
916 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
917 or "hgmerge")
917 or "hgmerge")
918 cmdpath = util.find_in_path(cmd, path)
918 cmdpath = util.find_in_path(cmd, path)
919 if not cmdpath:
919 if not cmdpath:
920 cmdpath = util.find_in_path(cmd.split()[0], path)
920 cmdpath = util.find_in_path(cmd.split()[0], path)
921 if not cmdpath:
921 if not cmdpath:
922 if cmd == 'hgmerge':
922 if cmd == 'hgmerge':
923 ui.write(_(" No merge helper set and can't find default"
923 ui.write(_(" No merge helper set and can't find default"
924 " hgmerge script in PATH\n"))
924 " hgmerge script in PATH\n"))
925 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
925 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
926 else:
926 else:
927 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
927 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
928 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
928 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
929 problems += 1
929 problems += 1
930 else:
930 else:
931 # actually attempt a patch here
931 # actually attempt a patch here
932 fa = writetemp("1\n2\n3\n4\n")
932 fa = writetemp("1\n2\n3\n4\n")
933 fl = writetemp("1\n2\n3\ninsert\n4\n")
933 fl = writetemp("1\n2\n3\ninsert\n4\n")
934 fr = writetemp("begin\n1\n2\n3\n4\n")
934 fr = writetemp("begin\n1\n2\n3\n4\n")
935 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
935 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
936 if r:
936 if r:
937 ui.write(_(" got unexpected merge error %d!") % r)
937 ui.write(_(" got unexpected merge error %d!") % r)
938 problems += 1
938 problems += 1
939 m = file(fl).read()
939 m = file(fl).read()
940 if m != "begin\n1\n2\n3\ninsert\n4\n":
940 if m != "begin\n1\n2\n3\ninsert\n4\n":
941 ui.write(_(" got unexpected merge results!") % r)
941 ui.write(_(" got unexpected merge results!") % r)
942 ui.write(_(" (your merge helper may have the"
942 ui.write(_(" (your merge helper may have the"
943 " wrong argument order)\n"))
943 " wrong argument order)\n"))
944 ui.write(m)
944 ui.write(m)
945 os.unlink(fa)
945 os.unlink(fa)
946 os.unlink(fl)
946 os.unlink(fl)
947 os.unlink(fr)
947 os.unlink(fr)
948
948
949 # editor
949 # editor
950 ui.status(_("Checking commit editor...\n"))
950 ui.status(_("Checking commit editor...\n"))
951 editor = (os.environ.get("HGEDITOR") or
951 editor = (os.environ.get("HGEDITOR") or
952 ui.config("ui", "editor") or
952 ui.config("ui", "editor") or
953 os.environ.get("EDITOR", "vi"))
953 os.environ.get("EDITOR", "vi"))
954 cmdpath = util.find_in_path(editor, path)
954 cmdpath = util.find_in_path(editor, path)
955 if not cmdpath:
955 if not cmdpath:
956 cmdpath = util.find_in_path(editor.split()[0], path)
956 cmdpath = util.find_in_path(editor.split()[0], path)
957 if not cmdpath:
957 if not cmdpath:
958 if editor == 'vi':
958 if editor == 'vi':
959 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
959 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
960 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
960 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
961 else:
961 else:
962 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
962 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
963 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
963 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
964 problems += 1
964 problems += 1
965
965
966 # check username
966 # check username
967 ui.status(_("Checking username...\n"))
967 ui.status(_("Checking username...\n"))
968 user = os.environ.get("HGUSER")
968 user = os.environ.get("HGUSER")
969 if user is None:
969 if user is None:
970 user = ui.config("ui", "username")
970 user = ui.config("ui", "username")
971 if user is None:
971 if user is None:
972 user = os.environ.get("EMAIL")
972 user = os.environ.get("EMAIL")
973 if not user:
973 if not user:
974 ui.warn(" ")
974 ui.warn(" ")
975 ui.username()
975 ui.username()
976 ui.write(_(" (specify a username in your .hgrc file)\n"))
976 ui.write(_(" (specify a username in your .hgrc file)\n"))
977
977
978 if not problems:
978 if not problems:
979 ui.status(_("No problems detected\n"))
979 ui.status(_("No problems detected\n"))
980 else:
980 else:
981 ui.write(_("%s problems detected,"
981 ui.write(_("%s problems detected,"
982 " please check your install!\n") % problems)
982 " please check your install!\n") % problems)
983
983
984 return problems
984 return problems
985
985
986 def debugrename(ui, repo, file1, *pats, **opts):
986 def debugrename(ui, repo, file1, *pats, **opts):
987 """dump rename information"""
987 """dump rename information"""
988
988
989 ctx = repo.changectx(opts.get('rev', 'tip'))
989 ctx = repo.changectx(opts.get('rev', 'tip'))
990 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
990 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
991 ctx.node()):
991 ctx.node()):
992 m = ctx.filectx(abs).renamed()
992 m = ctx.filectx(abs).renamed()
993 if m:
993 if m:
994 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
994 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
995 else:
995 else:
996 ui.write(_("%s not renamed\n") % rel)
996 ui.write(_("%s not renamed\n") % rel)
997
997
998 def debugwalk(ui, repo, *pats, **opts):
998 def debugwalk(ui, repo, *pats, **opts):
999 """show how files match on given patterns"""
999 """show how files match on given patterns"""
1000 items = list(cmdutil.walk(repo, pats, opts))
1000 items = list(cmdutil.walk(repo, pats, opts))
1001 if not items:
1001 if not items:
1002 return
1002 return
1003 fmt = '%%s %%-%ds %%-%ds %%s' % (
1003 fmt = '%%s %%-%ds %%-%ds %%s' % (
1004 max([len(abs) for (src, abs, rel, exact) in items]),
1004 max([len(abs) for (src, abs, rel, exact) in items]),
1005 max([len(rel) for (src, abs, rel, exact) in items]))
1005 max([len(rel) for (src, abs, rel, exact) in items]))
1006 for src, abs, rel, exact in items:
1006 for src, abs, rel, exact in items:
1007 line = fmt % (src, abs, rel, exact and 'exact' or '')
1007 line = fmt % (src, abs, rel, exact and 'exact' or '')
1008 ui.write("%s\n" % line.rstrip())
1008 ui.write("%s\n" % line.rstrip())
1009
1009
1010 def diff(ui, repo, *pats, **opts):
1010 def diff(ui, repo, *pats, **opts):
1011 """diff repository (or selected files)
1011 """diff repository (or selected files)
1012
1012
1013 Show differences between revisions for the specified files.
1013 Show differences between revisions for the specified files.
1014
1014
1015 Differences between files are shown using the unified diff format.
1015 Differences between files are shown using the unified diff format.
1016
1016
1017 NOTE: diff may generate unexpected results for merges, as it will
1017 NOTE: diff may generate unexpected results for merges, as it will
1018 default to comparing against the working directory's first parent
1018 default to comparing against the working directory's first parent
1019 changeset if no revisions are specified.
1019 changeset if no revisions are specified.
1020
1020
1021 When two revision arguments are given, then changes are shown
1021 When two revision arguments are given, then changes are shown
1022 between those revisions. If only one revision is specified then
1022 between those revisions. If only one revision is specified then
1023 that revision is compared to the working directory, and, when no
1023 that revision is compared to the working directory, and, when no
1024 revisions are specified, the working directory files are compared
1024 revisions are specified, the working directory files are compared
1025 to its parent.
1025 to its parent.
1026
1026
1027 Without the -a option, diff will avoid generating diffs of files
1027 Without the -a option, diff will avoid generating diffs of files
1028 it detects as binary. With -a, diff will generate a diff anyway,
1028 it detects as binary. With -a, diff will generate a diff anyway,
1029 probably with undesirable results.
1029 probably with undesirable results.
1030 """
1030 """
1031 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1031 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1032
1032
1033 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1033 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1034
1034
1035 patch.diff(repo, node1, node2, fns, match=matchfn,
1035 patch.diff(repo, node1, node2, fns, match=matchfn,
1036 opts=patch.diffopts(ui, opts))
1036 opts=patch.diffopts(ui, opts))
1037
1037
1038 def export(ui, repo, *changesets, **opts):
1038 def export(ui, repo, *changesets, **opts):
1039 """dump the header and diffs for one or more changesets
1039 """dump the header and diffs for one or more changesets
1040
1040
1041 Print the changeset header and diffs for one or more revisions.
1041 Print the changeset header and diffs for one or more revisions.
1042
1042
1043 The information shown in the changeset header is: author,
1043 The information shown in the changeset header is: author,
1044 changeset hash, parent(s) and commit comment.
1044 changeset hash, parent(s) and commit comment.
1045
1045
1046 NOTE: export may generate unexpected diff output for merge changesets,
1046 NOTE: export may generate unexpected diff output for merge changesets,
1047 as it will compare the merge changeset against its first parent only.
1047 as it will compare the merge changeset against its first parent only.
1048
1048
1049 Output may be to a file, in which case the name of the file is
1049 Output may be to a file, in which case the name of the file is
1050 given using a format string. The formatting rules are as follows:
1050 given using a format string. The formatting rules are as follows:
1051
1051
1052 %% literal "%" character
1052 %% literal "%" character
1053 %H changeset hash (40 bytes of hexadecimal)
1053 %H changeset hash (40 bytes of hexadecimal)
1054 %N number of patches being generated
1054 %N number of patches being generated
1055 %R changeset revision number
1055 %R changeset revision number
1056 %b basename of the exporting repository
1056 %b basename of the exporting repository
1057 %h short-form changeset hash (12 bytes of hexadecimal)
1057 %h short-form changeset hash (12 bytes of hexadecimal)
1058 %n zero-padded sequence number, starting at 1
1058 %n zero-padded sequence number, starting at 1
1059 %r zero-padded changeset revision number
1059 %r zero-padded changeset revision number
1060
1060
1061 Without the -a option, export will avoid generating diffs of files
1061 Without the -a option, export will avoid generating diffs of files
1062 it detects as binary. With -a, export will generate a diff anyway,
1062 it detects as binary. With -a, export will generate a diff anyway,
1063 probably with undesirable results.
1063 probably with undesirable results.
1064
1064
1065 With the --switch-parent option, the diff will be against the second
1065 With the --switch-parent option, the diff will be against the second
1066 parent. It can be useful to review a merge.
1066 parent. It can be useful to review a merge.
1067 """
1067 """
1068 if not changesets:
1068 if not changesets:
1069 raise util.Abort(_("export requires at least one changeset"))
1069 raise util.Abort(_("export requires at least one changeset"))
1070 revs = cmdutil.revrange(repo, changesets)
1070 revs = cmdutil.revrange(repo, changesets)
1071 if len(revs) > 1:
1071 if len(revs) > 1:
1072 ui.note(_('exporting patches:\n'))
1072 ui.note(_('exporting patches:\n'))
1073 else:
1073 else:
1074 ui.note(_('exporting patch:\n'))
1074 ui.note(_('exporting patch:\n'))
1075 patch.export(repo, revs, template=opts['output'],
1075 patch.export(repo, revs, template=opts['output'],
1076 switch_parent=opts['switch_parent'],
1076 switch_parent=opts['switch_parent'],
1077 opts=patch.diffopts(ui, opts))
1077 opts=patch.diffopts(ui, opts))
1078
1078
1079 def grep(ui, repo, pattern, *pats, **opts):
1079 def grep(ui, repo, pattern, *pats, **opts):
1080 """search for a pattern in specified files and revisions
1080 """search for a pattern in specified files and revisions
1081
1081
1082 Search revisions of files for a regular expression.
1082 Search revisions of files for a regular expression.
1083
1083
1084 This command behaves differently than Unix grep. It only accepts
1084 This command behaves differently than Unix grep. It only accepts
1085 Python/Perl regexps. It searches repository history, not the
1085 Python/Perl regexps. It searches repository history, not the
1086 working directory. It always prints the revision number in which
1086 working directory. It always prints the revision number in which
1087 a match appears.
1087 a match appears.
1088
1088
1089 By default, grep only prints output for the first revision of a
1089 By default, grep only prints output for the first revision of a
1090 file in which it finds a match. To get it to print every revision
1090 file in which it finds a match. To get it to print every revision
1091 that contains a change in match status ("-" for a match that
1091 that contains a change in match status ("-" for a match that
1092 becomes a non-match, or "+" for a non-match that becomes a match),
1092 becomes a non-match, or "+" for a non-match that becomes a match),
1093 use the --all flag.
1093 use the --all flag.
1094 """
1094 """
1095 reflags = 0
1095 reflags = 0
1096 if opts['ignore_case']:
1096 if opts['ignore_case']:
1097 reflags |= re.I
1097 reflags |= re.I
1098 regexp = re.compile(pattern, reflags)
1098 regexp = re.compile(pattern, reflags)
1099 sep, eol = ':', '\n'
1099 sep, eol = ':', '\n'
1100 if opts['print0']:
1100 if opts['print0']:
1101 sep = eol = '\0'
1101 sep = eol = '\0'
1102
1102
1103 fcache = {}
1103 fcache = {}
1104 def getfile(fn):
1104 def getfile(fn):
1105 if fn not in fcache:
1105 if fn not in fcache:
1106 fcache[fn] = repo.file(fn)
1106 fcache[fn] = repo.file(fn)
1107 return fcache[fn]
1107 return fcache[fn]
1108
1108
1109 def matchlines(body):
1109 def matchlines(body):
1110 begin = 0
1110 begin = 0
1111 linenum = 0
1111 linenum = 0
1112 while True:
1112 while True:
1113 match = regexp.search(body, begin)
1113 match = regexp.search(body, begin)
1114 if not match:
1114 if not match:
1115 break
1115 break
1116 mstart, mend = match.span()
1116 mstart, mend = match.span()
1117 linenum += body.count('\n', begin, mstart) + 1
1117 linenum += body.count('\n', begin, mstart) + 1
1118 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1118 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1119 lend = body.find('\n', mend)
1119 lend = body.find('\n', mend)
1120 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1120 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1121 begin = lend + 1
1121 begin = lend + 1
1122
1122
1123 class linestate(object):
1123 class linestate(object):
1124 def __init__(self, line, linenum, colstart, colend):
1124 def __init__(self, line, linenum, colstart, colend):
1125 self.line = line
1125 self.line = line
1126 self.linenum = linenum
1126 self.linenum = linenum
1127 self.colstart = colstart
1127 self.colstart = colstart
1128 self.colend = colend
1128 self.colend = colend
1129
1129
1130 def __eq__(self, other):
1130 def __eq__(self, other):
1131 return self.line == other.line
1131 return self.line == other.line
1132
1132
1133 matches = {}
1133 matches = {}
1134 copies = {}
1134 copies = {}
1135 def grepbody(fn, rev, body):
1135 def grepbody(fn, rev, body):
1136 matches[rev].setdefault(fn, [])
1136 matches[rev].setdefault(fn, [])
1137 m = matches[rev][fn]
1137 m = matches[rev][fn]
1138 for lnum, cstart, cend, line in matchlines(body):
1138 for lnum, cstart, cend, line in matchlines(body):
1139 s = linestate(line, lnum, cstart, cend)
1139 s = linestate(line, lnum, cstart, cend)
1140 m.append(s)
1140 m.append(s)
1141
1141
1142 def difflinestates(a, b):
1142 def difflinestates(a, b):
1143 sm = difflib.SequenceMatcher(None, a, b)
1143 sm = difflib.SequenceMatcher(None, a, b)
1144 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1144 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1145 if tag == 'insert':
1145 if tag == 'insert':
1146 for i in xrange(blo, bhi):
1146 for i in xrange(blo, bhi):
1147 yield ('+', b[i])
1147 yield ('+', b[i])
1148 elif tag == 'delete':
1148 elif tag == 'delete':
1149 for i in xrange(alo, ahi):
1149 for i in xrange(alo, ahi):
1150 yield ('-', a[i])
1150 yield ('-', a[i])
1151 elif tag == 'replace':
1151 elif tag == 'replace':
1152 for i in xrange(alo, ahi):
1152 for i in xrange(alo, ahi):
1153 yield ('-', a[i])
1153 yield ('-', a[i])
1154 for i in xrange(blo, bhi):
1154 for i in xrange(blo, bhi):
1155 yield ('+', b[i])
1155 yield ('+', b[i])
1156
1156
1157 prev = {}
1157 prev = {}
1158 def display(fn, rev, states, prevstates):
1158 def display(fn, rev, states, prevstates):
1159 counts = {'-': 0, '+': 0}
1159 counts = {'-': 0, '+': 0}
1160 filerevmatches = {}
1160 filerevmatches = {}
1161 if incrementing or not opts['all']:
1161 if incrementing or not opts['all']:
1162 a, b, r = prevstates, states, rev
1162 a, b, r = prevstates, states, rev
1163 else:
1163 else:
1164 a, b, r = states, prevstates, prev.get(fn, -1)
1164 a, b, r = states, prevstates, prev.get(fn, -1)
1165 for change, l in difflinestates(a, b):
1165 for change, l in difflinestates(a, b):
1166 cols = [fn, str(r)]
1166 cols = [fn, str(r)]
1167 if opts['line_number']:
1167 if opts['line_number']:
1168 cols.append(str(l.linenum))
1168 cols.append(str(l.linenum))
1169 if opts['all']:
1169 if opts['all']:
1170 cols.append(change)
1170 cols.append(change)
1171 if opts['user']:
1171 if opts['user']:
1172 cols.append(ui.shortuser(get(r)[1]))
1172 cols.append(ui.shortuser(get(r)[1]))
1173 if opts['files_with_matches']:
1173 if opts['files_with_matches']:
1174 c = (fn, r)
1174 c = (fn, r)
1175 if c in filerevmatches:
1175 if c in filerevmatches:
1176 continue
1176 continue
1177 filerevmatches[c] = 1
1177 filerevmatches[c] = 1
1178 else:
1178 else:
1179 cols.append(l.line)
1179 cols.append(l.line)
1180 ui.write(sep.join(cols), eol)
1180 ui.write(sep.join(cols), eol)
1181 counts[change] += 1
1181 counts[change] += 1
1182 return counts['+'], counts['-']
1182 return counts['+'], counts['-']
1183
1183
1184 fstate = {}
1184 fstate = {}
1185 skip = {}
1185 skip = {}
1186 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1186 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1187 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1187 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1188 count = 0
1188 count = 0
1189 incrementing = False
1189 incrementing = False
1190 follow = opts.get('follow')
1190 follow = opts.get('follow')
1191 for st, rev, fns in changeiter:
1191 for st, rev, fns in changeiter:
1192 if st == 'window':
1192 if st == 'window':
1193 incrementing = rev
1193 incrementing = rev
1194 matches.clear()
1194 matches.clear()
1195 elif st == 'add':
1195 elif st == 'add':
1196 mf = repo.changectx(rev).manifest()
1196 mf = repo.changectx(rev).manifest()
1197 matches[rev] = {}
1197 matches[rev] = {}
1198 for fn in fns:
1198 for fn in fns:
1199 if fn in skip:
1199 if fn in skip:
1200 continue
1200 continue
1201 fstate.setdefault(fn, {})
1201 fstate.setdefault(fn, {})
1202 try:
1202 try:
1203 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1203 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1204 if follow:
1204 if follow:
1205 copied = getfile(fn).renamed(mf[fn])
1205 copied = getfile(fn).renamed(mf[fn])
1206 if copied:
1206 if copied:
1207 copies.setdefault(rev, {})[fn] = copied[0]
1207 copies.setdefault(rev, {})[fn] = copied[0]
1208 except KeyError:
1208 except KeyError:
1209 pass
1209 pass
1210 elif st == 'iter':
1210 elif st == 'iter':
1211 states = matches[rev].items()
1211 states = matches[rev].items()
1212 states.sort()
1212 states.sort()
1213 for fn, m in states:
1213 for fn, m in states:
1214 copy = copies.get(rev, {}).get(fn)
1214 copy = copies.get(rev, {}).get(fn)
1215 if fn in skip:
1215 if fn in skip:
1216 if copy:
1216 if copy:
1217 skip[copy] = True
1217 skip[copy] = True
1218 continue
1218 continue
1219 if incrementing or not opts['all'] or fstate[fn]:
1219 if incrementing or not opts['all'] or fstate[fn]:
1220 pos, neg = display(fn, rev, m, fstate[fn])
1220 pos, neg = display(fn, rev, m, fstate[fn])
1221 count += pos + neg
1221 count += pos + neg
1222 if pos and not opts['all']:
1222 if pos and not opts['all']:
1223 skip[fn] = True
1223 skip[fn] = True
1224 if copy:
1224 if copy:
1225 skip[copy] = True
1225 skip[copy] = True
1226 fstate[fn] = m
1226 fstate[fn] = m
1227 if copy:
1227 if copy:
1228 fstate[copy] = m
1228 fstate[copy] = m
1229 prev[fn] = rev
1229 prev[fn] = rev
1230
1230
1231 if not incrementing:
1231 if not incrementing:
1232 fstate = fstate.items()
1232 fstate = fstate.items()
1233 fstate.sort()
1233 fstate.sort()
1234 for fn, state in fstate:
1234 for fn, state in fstate:
1235 if fn in skip:
1235 if fn in skip:
1236 continue
1236 continue
1237 if fn not in copies.get(prev[fn], {}):
1237 if fn not in copies.get(prev[fn], {}):
1238 display(fn, rev, {}, state)
1238 display(fn, rev, {}, state)
1239 return (count == 0 and 1) or 0
1239 return (count == 0 and 1) or 0
1240
1240
1241 def heads(ui, repo, **opts):
1241 def heads(ui, repo, **opts):
1242 """show current repository heads
1242 """show current repository heads
1243
1243
1244 Show all repository head changesets.
1244 Show all repository head changesets.
1245
1245
1246 Repository "heads" are changesets that don't have children
1246 Repository "heads" are changesets that don't have children
1247 changesets. They are where development generally takes place and
1247 changesets. They are where development generally takes place and
1248 are the usual targets for update and merge operations.
1248 are the usual targets for update and merge operations.
1249 """
1249 """
1250 if opts['rev']:
1250 if opts['rev']:
1251 heads = repo.heads(repo.lookup(opts['rev']))
1251 heads = repo.heads(repo.lookup(opts['rev']))
1252 else:
1252 else:
1253 heads = repo.heads()
1253 heads = repo.heads()
1254 displayer = cmdutil.show_changeset(ui, repo, opts)
1254 displayer = cmdutil.show_changeset(ui, repo, opts)
1255 for n in heads:
1255 for n in heads:
1256 displayer.show(changenode=n)
1256 displayer.show(changenode=n)
1257
1257
1258 def help_(ui, name=None, with_version=False):
1258 def help_(ui, name=None, with_version=False):
1259 """show help for a command, extension, or list of commands
1259 """show help for a command, extension, or list of commands
1260
1260
1261 With no arguments, print a list of commands and short help.
1261 With no arguments, print a list of commands and short help.
1262
1262
1263 Given a command name, print help for that command.
1263 Given a command name, print help for that command.
1264
1264
1265 Given an extension name, print help for that extension, and the
1265 Given an extension name, print help for that extension, and the
1266 commands it provides."""
1266 commands it provides."""
1267 option_lists = []
1267 option_lists = []
1268
1268
1269 def helpcmd(name):
1269 def helpcmd(name):
1270 if with_version:
1270 if with_version:
1271 version_(ui)
1271 version_(ui)
1272 ui.write('\n')
1272 ui.write('\n')
1273 aliases, i = findcmd(ui, name)
1273 aliases, i = findcmd(ui, name)
1274 # synopsis
1274 # synopsis
1275 ui.write("%s\n\n" % i[2])
1275 ui.write("%s\n\n" % i[2])
1276
1276
1277 # description
1277 # description
1278 doc = i[0].__doc__
1278 doc = i[0].__doc__
1279 if not doc:
1279 if not doc:
1280 doc = _("(No help text available)")
1280 doc = _("(No help text available)")
1281 if ui.quiet:
1281 if ui.quiet:
1282 doc = doc.splitlines(0)[0]
1282 doc = doc.splitlines(0)[0]
1283 ui.write("%s\n" % doc.rstrip())
1283 ui.write("%s\n" % doc.rstrip())
1284
1284
1285 if not ui.quiet:
1285 if not ui.quiet:
1286 # aliases
1286 # aliases
1287 if len(aliases) > 1:
1287 if len(aliases) > 1:
1288 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1288 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1289
1289
1290 # options
1290 # options
1291 if i[1]:
1291 if i[1]:
1292 option_lists.append(("options", i[1]))
1292 option_lists.append(("options", i[1]))
1293
1293
1294 def helplist(select=None):
1294 def helplist(select=None):
1295 h = {}
1295 h = {}
1296 cmds = {}
1296 cmds = {}
1297 for c, e in table.items():
1297 for c, e in table.items():
1298 f = c.split("|", 1)[0]
1298 f = c.split("|", 1)[0]
1299 if select and not select(f):
1299 if select and not select(f):
1300 continue
1300 continue
1301 if name == "shortlist" and not f.startswith("^"):
1301 if name == "shortlist" and not f.startswith("^"):
1302 continue
1302 continue
1303 f = f.lstrip("^")
1303 f = f.lstrip("^")
1304 if not ui.debugflag and f.startswith("debug"):
1304 if not ui.debugflag and f.startswith("debug"):
1305 continue
1305 continue
1306 doc = e[0].__doc__
1306 doc = e[0].__doc__
1307 if not doc:
1307 if not doc:
1308 doc = _("(No help text available)")
1308 doc = _("(No help text available)")
1309 h[f] = doc.splitlines(0)[0].rstrip()
1309 h[f] = doc.splitlines(0)[0].rstrip()
1310 cmds[f] = c.lstrip("^")
1310 cmds[f] = c.lstrip("^")
1311
1311
1312 fns = h.keys()
1312 fns = h.keys()
1313 fns.sort()
1313 fns.sort()
1314 m = max(map(len, fns))
1314 m = max(map(len, fns))
1315 for f in fns:
1315 for f in fns:
1316 if ui.verbose:
1316 if ui.verbose:
1317 commands = cmds[f].replace("|",", ")
1317 commands = cmds[f].replace("|",", ")
1318 ui.write(" %s:\n %s\n"%(commands, h[f]))
1318 ui.write(" %s:\n %s\n"%(commands, h[f]))
1319 else:
1319 else:
1320 ui.write(' %-*s %s\n' % (m, f, h[f]))
1320 ui.write(' %-*s %s\n' % (m, f, h[f]))
1321
1321
1322 def helptopic(name):
1322 def helptopic(name):
1323 v = None
1323 v = None
1324 for i in help.helptable:
1324 for i in help.helptable:
1325 l = i.split('|')
1325 l = i.split('|')
1326 if name in l:
1326 if name in l:
1327 v = i
1327 v = i
1328 header = l[-1]
1328 header = l[-1]
1329 if not v:
1329 if not v:
1330 raise UnknownCommand(name)
1330 raise UnknownCommand(name)
1331
1331
1332 # description
1332 # description
1333 doc = help.helptable[v]
1333 doc = help.helptable[v]
1334 if not doc:
1334 if not doc:
1335 doc = _("(No help text available)")
1335 doc = _("(No help text available)")
1336 if callable(doc):
1336 if callable(doc):
1337 doc = doc()
1337 doc = doc()
1338
1338
1339 ui.write("%s\n" % header)
1339 ui.write("%s\n" % header)
1340 ui.write("%s\n" % doc.rstrip())
1340 ui.write("%s\n" % doc.rstrip())
1341
1341
1342 def helpext(name):
1342 def helpext(name):
1343 try:
1343 try:
1344 mod = findext(name)
1344 mod = findext(name)
1345 except KeyError:
1345 except KeyError:
1346 raise UnknownCommand(name)
1346 raise UnknownCommand(name)
1347
1347
1348 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1348 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1349 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1349 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1350 for d in doc[1:]:
1350 for d in doc[1:]:
1351 ui.write(d, '\n')
1351 ui.write(d, '\n')
1352
1352
1353 ui.status('\n')
1353 ui.status('\n')
1354
1354
1355 try:
1355 try:
1356 ct = mod.cmdtable
1356 ct = mod.cmdtable
1357 except AttributeError:
1357 except AttributeError:
1358 ui.status(_('no commands defined\n'))
1358 ui.status(_('no commands defined\n'))
1359 return
1359 return
1360
1360
1361 if ui.verbose:
1361 if ui.verbose:
1362 ui.status(_('list of commands:\n\n'))
1362 ui.status(_('list of commands:\n\n'))
1363 else:
1363 else:
1364 ui.status(_('list of commands (use "hg help -v %s" '
1364 ui.status(_('list of commands (use "hg help -v %s" '
1365 'to show aliases and global options):\n\n') % name)
1365 'to show aliases and global options):\n\n') % name)
1366
1366
1367 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1367 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1368 helplist(modcmds.has_key)
1368 helplist(modcmds.has_key)
1369
1369
1370 if name and name != 'shortlist':
1370 if name and name != 'shortlist':
1371 i = None
1371 i = None
1372 for f in (helpcmd, helptopic, helpext):
1372 for f in (helpcmd, helptopic, helpext):
1373 try:
1373 try:
1374 f(name)
1374 f(name)
1375 i = None
1375 i = None
1376 break
1376 break
1377 except UnknownCommand, inst:
1377 except UnknownCommand, inst:
1378 i = inst
1378 i = inst
1379 if i:
1379 if i:
1380 raise i
1380 raise i
1381
1381
1382 else:
1382 else:
1383 # program name
1383 # program name
1384 if ui.verbose or with_version:
1384 if ui.verbose or with_version:
1385 version_(ui)
1385 version_(ui)
1386 else:
1386 else:
1387 ui.status(_("Mercurial Distributed SCM\n"))
1387 ui.status(_("Mercurial Distributed SCM\n"))
1388 ui.status('\n')
1388 ui.status('\n')
1389
1389
1390 # list of commands
1390 # list of commands
1391 if name == "shortlist":
1391 if name == "shortlist":
1392 ui.status(_('basic commands (use "hg help" '
1392 ui.status(_('basic commands (use "hg help" '
1393 'for the full list or option "-v" for details):\n\n'))
1393 'for the full list or option "-v" for details):\n\n'))
1394 elif ui.verbose:
1394 elif ui.verbose:
1395 ui.status(_('list of commands:\n\n'))
1395 ui.status(_('list of commands:\n\n'))
1396 else:
1396 else:
1397 ui.status(_('list of commands (use "hg help -v" '
1397 ui.status(_('list of commands (use "hg help -v" '
1398 'to show aliases and global options):\n\n'))
1398 'to show aliases and global options):\n\n'))
1399
1399
1400 helplist()
1400 helplist()
1401
1401
1402 # global options
1402 # global options
1403 if ui.verbose:
1403 if ui.verbose:
1404 option_lists.append(("global options", globalopts))
1404 option_lists.append(("global options", globalopts))
1405
1405
1406 # list all option lists
1406 # list all option lists
1407 opt_output = []
1407 opt_output = []
1408 for title, options in option_lists:
1408 for title, options in option_lists:
1409 opt_output.append(("\n%s:\n" % title, None))
1409 opt_output.append(("\n%s:\n" % title, None))
1410 for shortopt, longopt, default, desc in options:
1410 for shortopt, longopt, default, desc in options:
1411 if "DEPRECATED" in desc and not ui.verbose: continue
1411 if "DEPRECATED" in desc and not ui.verbose: continue
1412 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1412 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1413 longopt and " --%s" % longopt),
1413 longopt and " --%s" % longopt),
1414 "%s%s" % (desc,
1414 "%s%s" % (desc,
1415 default
1415 default
1416 and _(" (default: %s)") % default
1416 and _(" (default: %s)") % default
1417 or "")))
1417 or "")))
1418
1418
1419 if opt_output:
1419 if opt_output:
1420 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1420 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1421 for first, second in opt_output:
1421 for first, second in opt_output:
1422 if second:
1422 if second:
1423 ui.write(" %-*s %s\n" % (opts_len, first, second))
1423 ui.write(" %-*s %s\n" % (opts_len, first, second))
1424 else:
1424 else:
1425 ui.write("%s\n" % first)
1425 ui.write("%s\n" % first)
1426
1426
1427 def identify(ui, repo):
1427 def identify(ui, repo):
1428 """print information about the working copy
1428 """print information about the working copy
1429
1429
1430 Print a short summary of the current state of the repo.
1430 Print a short summary of the current state of the repo.
1431
1431
1432 This summary identifies the repository state using one or two parent
1432 This summary identifies the repository state using one or two parent
1433 hash identifiers, followed by a "+" if there are uncommitted changes
1433 hash identifiers, followed by a "+" if there are uncommitted changes
1434 in the working directory, followed by a list of tags for this revision.
1434 in the working directory, followed by a list of tags for this revision.
1435 """
1435 """
1436 parents = [p for p in repo.dirstate.parents() if p != nullid]
1436 parents = [p for p in repo.dirstate.parents() if p != nullid]
1437 if not parents:
1437 if not parents:
1438 ui.write(_("unknown\n"))
1438 ui.write(_("unknown\n"))
1439 return
1439 return
1440
1440
1441 hexfunc = ui.debugflag and hex or short
1441 hexfunc = ui.debugflag and hex or short
1442 modified, added, removed, deleted = repo.status()[:4]
1442 modified, added, removed, deleted = repo.status()[:4]
1443 output = ["%s%s" %
1443 output = ["%s%s" %
1444 ('+'.join([hexfunc(parent) for parent in parents]),
1444 ('+'.join([hexfunc(parent) for parent in parents]),
1445 (modified or added or removed or deleted) and "+" or "")]
1445 (modified or added or removed or deleted) and "+" or "")]
1446
1446
1447 if not ui.quiet:
1447 if not ui.quiet:
1448
1448
1449 branch = util.tolocal(repo.workingctx().branch())
1449 branch = util.tolocal(repo.workingctx().branch())
1450 if branch:
1450 if branch:
1451 output.append("(%s)" % branch)
1451 output.append("(%s)" % branch)
1452
1452
1453 # multiple tags for a single parent separated by '/'
1453 # multiple tags for a single parent separated by '/'
1454 parenttags = ['/'.join(tags)
1454 parenttags = ['/'.join(tags)
1455 for tags in map(repo.nodetags, parents) if tags]
1455 for tags in map(repo.nodetags, parents) if tags]
1456 # tags for multiple parents separated by ' + '
1456 # tags for multiple parents separated by ' + '
1457 if parenttags:
1457 if parenttags:
1458 output.append(' + '.join(parenttags))
1458 output.append(' + '.join(parenttags))
1459
1459
1460 ui.write("%s\n" % ' '.join(output))
1460 ui.write("%s\n" % ' '.join(output))
1461
1461
1462 def import_(ui, repo, patch1, *patches, **opts):
1462 def import_(ui, repo, patch1, *patches, **opts):
1463 """import an ordered set of patches
1463 """import an ordered set of patches
1464
1464
1465 Import a list of patches and commit them individually.
1465 Import a list of patches and commit them individually.
1466
1466
1467 If there are outstanding changes in the working directory, import
1467 If there are outstanding changes in the working directory, import
1468 will abort unless given the -f flag.
1468 will abort unless given the -f flag.
1469
1469
1470 You can import a patch straight from a mail message. Even patches
1470 You can import a patch straight from a mail message. Even patches
1471 as attachments work (body part must be type text/plain or
1471 as attachments work (body part must be type text/plain or
1472 text/x-patch to be used). From and Subject headers of email
1472 text/x-patch to be used). From and Subject headers of email
1473 message are used as default committer and commit message. All
1473 message are used as default committer and commit message. All
1474 text/plain body parts before first diff are added to commit
1474 text/plain body parts before first diff are added to commit
1475 message.
1475 message.
1476
1476
1477 If imported patch was generated by hg export, user and description
1477 If imported patch was generated by hg export, user and description
1478 from patch override values from message headers and body. Values
1478 from patch override values from message headers and body. Values
1479 given on command line with -m and -u override these.
1479 given on command line with -m and -u override these.
1480
1480
1481 To read a patch from standard input, use patch name "-".
1481 To read a patch from standard input, use patch name "-".
1482 """
1482 """
1483 patches = (patch1,) + patches
1483 patches = (patch1,) + patches
1484
1484
1485 if not opts['force']:
1485 if not opts['force']:
1486 bail_if_changed(repo)
1486 bail_if_changed(repo)
1487
1487
1488 d = opts["base"]
1488 d = opts["base"]
1489 strip = opts["strip"]
1489 strip = opts["strip"]
1490
1490
1491 wlock = repo.wlock()
1491 wlock = repo.wlock()
1492 lock = repo.lock()
1492 lock = repo.lock()
1493
1493
1494 for p in patches:
1494 for p in patches:
1495 pf = os.path.join(d, p)
1495 pf = os.path.join(d, p)
1496
1496
1497 if pf == '-':
1497 if pf == '-':
1498 ui.status(_("applying patch from stdin\n"))
1498 ui.status(_("applying patch from stdin\n"))
1499 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1499 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1500 else:
1500 else:
1501 ui.status(_("applying %s\n") % p)
1501 ui.status(_("applying %s\n") % p)
1502 tmpname, message, user, date = patch.extract(ui, file(pf))
1502 tmpname, message, user, date = patch.extract(ui, file(pf))
1503
1503
1504 if tmpname is None:
1504 if tmpname is None:
1505 raise util.Abort(_('no diffs found'))
1505 raise util.Abort(_('no diffs found'))
1506
1506
1507 try:
1507 try:
1508 cmdline_message = logmessage(opts)
1508 cmdline_message = logmessage(opts)
1509 if cmdline_message:
1509 if cmdline_message:
1510 # pickup the cmdline msg
1510 # pickup the cmdline msg
1511 message = cmdline_message
1511 message = cmdline_message
1512 elif message:
1512 elif message:
1513 # pickup the patch msg
1513 # pickup the patch msg
1514 message = message.strip()
1514 message = message.strip()
1515 else:
1515 else:
1516 # launch the editor
1516 # launch the editor
1517 message = None
1517 message = None
1518 ui.debug(_('message:\n%s\n') % message)
1518 ui.debug(_('message:\n%s\n') % message)
1519
1519
1520 files = {}
1520 files = {}
1521 try:
1521 try:
1522 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1522 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1523 files=files)
1523 files=files)
1524 finally:
1524 finally:
1525 files = patch.updatedir(ui, repo, files, wlock=wlock)
1525 files = patch.updatedir(ui, repo, files, wlock=wlock)
1526 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1526 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1527 finally:
1527 finally:
1528 os.unlink(tmpname)
1528 os.unlink(tmpname)
1529
1529
1530 def incoming(ui, repo, source="default", **opts):
1530 def incoming(ui, repo, source="default", **opts):
1531 """show new changesets found in source
1531 """show new changesets found in source
1532
1532
1533 Show new changesets found in the specified path/URL or the default
1533 Show new changesets found in the specified path/URL or the default
1534 pull location. These are the changesets that would be pulled if a pull
1534 pull location. These are the changesets that would be pulled if a pull
1535 was requested.
1535 was requested.
1536
1536
1537 For remote repository, using --bundle avoids downloading the changesets
1537 For remote repository, using --bundle avoids downloading the changesets
1538 twice if the incoming is followed by a pull.
1538 twice if the incoming is followed by a pull.
1539
1539
1540 See pull for valid source format details.
1540 See pull for valid source format details.
1541 """
1541 """
1542 source = ui.expandpath(source)
1542 source = ui.expandpath(source)
1543 setremoteconfig(ui, opts)
1543 setremoteconfig(ui, opts)
1544
1544
1545 other = hg.repository(ui, source)
1545 other = hg.repository(ui, source)
1546 incoming = repo.findincoming(other, force=opts["force"])
1546 incoming = repo.findincoming(other, force=opts["force"])
1547 if not incoming:
1547 if not incoming:
1548 ui.status(_("no changes found\n"))
1548 ui.status(_("no changes found\n"))
1549 return
1549 return
1550
1550
1551 cleanup = None
1551 cleanup = None
1552 try:
1552 try:
1553 fname = opts["bundle"]
1553 fname = opts["bundle"]
1554 if fname or not other.local():
1554 if fname or not other.local():
1555 # create a bundle (uncompressed if other repo is not local)
1555 # create a bundle (uncompressed if other repo is not local)
1556 cg = other.changegroup(incoming, "incoming")
1556 cg = other.changegroup(incoming, "incoming")
1557 bundletype = other.local() and "HG10BZ" or "HG10UN"
1557 bundletype = other.local() and "HG10BZ" or "HG10UN"
1558 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1558 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1559 # keep written bundle?
1559 # keep written bundle?
1560 if opts["bundle"]:
1560 if opts["bundle"]:
1561 cleanup = None
1561 cleanup = None
1562 if not other.local():
1562 if not other.local():
1563 # use the created uncompressed bundlerepo
1563 # use the created uncompressed bundlerepo
1564 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1564 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1565
1565
1566 revs = None
1566 revs = None
1567 if opts['rev']:
1567 if opts['rev']:
1568 revs = [other.lookup(rev) for rev in opts['rev']]
1568 revs = [other.lookup(rev) for rev in opts['rev']]
1569 o = other.changelog.nodesbetween(incoming, revs)[0]
1569 o = other.changelog.nodesbetween(incoming, revs)[0]
1570 if opts['newest_first']:
1570 if opts['newest_first']:
1571 o.reverse()
1571 o.reverse()
1572 displayer = cmdutil.show_changeset(ui, other, opts)
1572 displayer = cmdutil.show_changeset(ui, other, opts)
1573 for n in o:
1573 for n in o:
1574 parents = [p for p in other.changelog.parents(n) if p != nullid]
1574 parents = [p for p in other.changelog.parents(n) if p != nullid]
1575 if opts['no_merges'] and len(parents) == 2:
1575 if opts['no_merges'] and len(parents) == 2:
1576 continue
1576 continue
1577 displayer.show(changenode=n)
1577 displayer.show(changenode=n)
1578 finally:
1578 finally:
1579 if hasattr(other, 'close'):
1579 if hasattr(other, 'close'):
1580 other.close()
1580 other.close()
1581 if cleanup:
1581 if cleanup:
1582 os.unlink(cleanup)
1582 os.unlink(cleanup)
1583
1583
1584 def init(ui, dest=".", **opts):
1584 def init(ui, dest=".", **opts):
1585 """create a new repository in the given directory
1585 """create a new repository in the given directory
1586
1586
1587 Initialize a new repository in the given directory. If the given
1587 Initialize a new repository in the given directory. If the given
1588 directory does not exist, it is created.
1588 directory does not exist, it is created.
1589
1589
1590 If no directory is given, the current directory is used.
1590 If no directory is given, the current directory is used.
1591
1591
1592 It is possible to specify an ssh:// URL as the destination.
1592 It is possible to specify an ssh:// URL as the destination.
1593 Look at the help text for the pull command for important details
1593 Look at the help text for the pull command for important details
1594 about ssh:// URLs.
1594 about ssh:// URLs.
1595 """
1595 """
1596 setremoteconfig(ui, opts)
1596 setremoteconfig(ui, opts)
1597 hg.repository(ui, dest, create=1)
1597 hg.repository(ui, dest, create=1)
1598
1598
1599 def locate(ui, repo, *pats, **opts):
1599 def locate(ui, repo, *pats, **opts):
1600 """locate files matching specific patterns
1600 """locate files matching specific patterns
1601
1601
1602 Print all files under Mercurial control whose names match the
1602 Print all files under Mercurial control whose names match the
1603 given patterns.
1603 given patterns.
1604
1604
1605 This command searches the current directory and its
1605 This command searches the entire repository by default. To search
1606 subdirectories. To search an entire repository, move to the root
1606 just the current directory and its subdirectories, use "--include .".
1607 of the repository.
1608
1607
1609 If no patterns are given to match, this command prints all file
1608 If no patterns are given to match, this command prints all file
1610 names.
1609 names.
1611
1610
1612 If you want to feed the output of this command into the "xargs"
1611 If you want to feed the output of this command into the "xargs"
1613 command, use the "-0" option to both this command and "xargs".
1612 command, use the "-0" option to both this command and "xargs".
1614 This will avoid the problem of "xargs" treating single filenames
1613 This will avoid the problem of "xargs" treating single filenames
1615 that contain white space as multiple filenames.
1614 that contain white space as multiple filenames.
1616 """
1615 """
1617 end = opts['print0'] and '\0' or '\n'
1616 end = opts['print0'] and '\0' or '\n'
1618 rev = opts['rev']
1617 rev = opts['rev']
1619 if rev:
1618 if rev:
1620 node = repo.lookup(rev)
1619 node = repo.lookup(rev)
1621 else:
1620 else:
1622 node = None
1621 node = None
1623
1622
1624 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1623 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1625 head='(?:.*/|)'):
1624 default='relglob'):
1626 if not node and repo.dirstate.state(abs) == '?':
1625 if not node and repo.dirstate.state(abs) == '?':
1627 continue
1626 continue
1628 if opts['fullpath']:
1627 if opts['fullpath']:
1629 ui.write(os.path.join(repo.root, abs), end)
1628 ui.write(os.path.join(repo.root, abs), end)
1630 else:
1629 else:
1631 ui.write(((pats and rel) or abs), end)
1630 ui.write(((pats and rel) or abs), end)
1632
1631
1633 def log(ui, repo, *pats, **opts):
1632 def log(ui, repo, *pats, **opts):
1634 """show revision history of entire repository or files
1633 """show revision history of entire repository or files
1635
1634
1636 Print the revision history of the specified files or the entire
1635 Print the revision history of the specified files or the entire
1637 project.
1636 project.
1638
1637
1639 File history is shown without following rename or copy history of
1638 File history is shown without following rename or copy history of
1640 files. Use -f/--follow with a file name to follow history across
1639 files. Use -f/--follow with a file name to follow history across
1641 renames and copies. --follow without a file name will only show
1640 renames and copies. --follow without a file name will only show
1642 ancestors or descendants of the starting revision. --follow-first
1641 ancestors or descendants of the starting revision. --follow-first
1643 only follows the first parent of merge revisions.
1642 only follows the first parent of merge revisions.
1644
1643
1645 If no revision range is specified, the default is tip:0 unless
1644 If no revision range is specified, the default is tip:0 unless
1646 --follow is set, in which case the working directory parent is
1645 --follow is set, in which case the working directory parent is
1647 used as the starting revision.
1646 used as the starting revision.
1648
1647
1649 By default this command outputs: changeset id and hash, tags,
1648 By default this command outputs: changeset id and hash, tags,
1650 non-trivial parents, user, date and time, and a summary for each
1649 non-trivial parents, user, date and time, and a summary for each
1651 commit. When the -v/--verbose switch is used, the list of changed
1650 commit. When the -v/--verbose switch is used, the list of changed
1652 files and full commit message is shown.
1651 files and full commit message is shown.
1653
1652
1654 NOTE: log -p may generate unexpected diff output for merge
1653 NOTE: log -p may generate unexpected diff output for merge
1655 changesets, as it will compare the merge changeset against its
1654 changesets, as it will compare the merge changeset against its
1656 first parent only. Also, the files: list will only reflect files
1655 first parent only. Also, the files: list will only reflect files
1657 that are different from BOTH parents.
1656 that are different from BOTH parents.
1658
1657
1659 """
1658 """
1660
1659
1661 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1660 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1662 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1661 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1663
1662
1664 if opts['limit']:
1663 if opts['limit']:
1665 try:
1664 try:
1666 limit = int(opts['limit'])
1665 limit = int(opts['limit'])
1667 except ValueError:
1666 except ValueError:
1668 raise util.Abort(_('limit must be a positive integer'))
1667 raise util.Abort(_('limit must be a positive integer'))
1669 if limit <= 0: raise util.Abort(_('limit must be positive'))
1668 if limit <= 0: raise util.Abort(_('limit must be positive'))
1670 else:
1669 else:
1671 limit = sys.maxint
1670 limit = sys.maxint
1672 count = 0
1671 count = 0
1673
1672
1674 if opts['copies'] and opts['rev']:
1673 if opts['copies'] and opts['rev']:
1675 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1674 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1676 else:
1675 else:
1677 endrev = repo.changelog.count()
1676 endrev = repo.changelog.count()
1678 rcache = {}
1677 rcache = {}
1679 ncache = {}
1678 ncache = {}
1680 dcache = []
1679 dcache = []
1681 def getrenamed(fn, rev, man):
1680 def getrenamed(fn, rev, man):
1682 '''looks up all renames for a file (up to endrev) the first
1681 '''looks up all renames for a file (up to endrev) the first
1683 time the file is given. It indexes on the changerev and only
1682 time the file is given. It indexes on the changerev and only
1684 parses the manifest if linkrev != changerev.
1683 parses the manifest if linkrev != changerev.
1685 Returns rename info for fn at changerev rev.'''
1684 Returns rename info for fn at changerev rev.'''
1686 if fn not in rcache:
1685 if fn not in rcache:
1687 rcache[fn] = {}
1686 rcache[fn] = {}
1688 ncache[fn] = {}
1687 ncache[fn] = {}
1689 fl = repo.file(fn)
1688 fl = repo.file(fn)
1690 for i in xrange(fl.count()):
1689 for i in xrange(fl.count()):
1691 node = fl.node(i)
1690 node = fl.node(i)
1692 lr = fl.linkrev(node)
1691 lr = fl.linkrev(node)
1693 renamed = fl.renamed(node)
1692 renamed = fl.renamed(node)
1694 rcache[fn][lr] = renamed
1693 rcache[fn][lr] = renamed
1695 if renamed:
1694 if renamed:
1696 ncache[fn][node] = renamed
1695 ncache[fn][node] = renamed
1697 if lr >= endrev:
1696 if lr >= endrev:
1698 break
1697 break
1699 if rev in rcache[fn]:
1698 if rev in rcache[fn]:
1700 return rcache[fn][rev]
1699 return rcache[fn][rev]
1701 mr = repo.manifest.rev(man)
1700 mr = repo.manifest.rev(man)
1702 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1701 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1703 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1702 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1704 if not dcache or dcache[0] != man:
1703 if not dcache or dcache[0] != man:
1705 dcache[:] = [man, repo.manifest.readdelta(man)]
1704 dcache[:] = [man, repo.manifest.readdelta(man)]
1706 if fn in dcache[1]:
1705 if fn in dcache[1]:
1707 return ncache[fn].get(dcache[1][fn])
1706 return ncache[fn].get(dcache[1][fn])
1708 return None
1707 return None
1709
1708
1710 df = False
1709 df = False
1711 if opts["date"]:
1710 if opts["date"]:
1712 df = util.matchdate(opts["date"])
1711 df = util.matchdate(opts["date"])
1713
1712
1714
1713
1715 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1714 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1716 for st, rev, fns in changeiter:
1715 for st, rev, fns in changeiter:
1717 if st == 'add':
1716 if st == 'add':
1718 changenode = repo.changelog.node(rev)
1717 changenode = repo.changelog.node(rev)
1719 parents = [p for p in repo.changelog.parentrevs(rev)
1718 parents = [p for p in repo.changelog.parentrevs(rev)
1720 if p != nullrev]
1719 if p != nullrev]
1721 if opts['no_merges'] and len(parents) == 2:
1720 if opts['no_merges'] and len(parents) == 2:
1722 continue
1721 continue
1723 if opts['only_merges'] and len(parents) != 2:
1722 if opts['only_merges'] and len(parents) != 2:
1724 continue
1723 continue
1725
1724
1726 if df:
1725 if df:
1727 changes = get(rev)
1726 changes = get(rev)
1728 if not df(changes[2][0]):
1727 if not df(changes[2][0]):
1729 continue
1728 continue
1730
1729
1731 if opts['keyword']:
1730 if opts['keyword']:
1732 changes = get(rev)
1731 changes = get(rev)
1733 miss = 0
1732 miss = 0
1734 for k in [kw.lower() for kw in opts['keyword']]:
1733 for k in [kw.lower() for kw in opts['keyword']]:
1735 if not (k in changes[1].lower() or
1734 if not (k in changes[1].lower() or
1736 k in changes[4].lower() or
1735 k in changes[4].lower() or
1737 k in " ".join(changes[3][:20]).lower()):
1736 k in " ".join(changes[3][:20]).lower()):
1738 miss = 1
1737 miss = 1
1739 break
1738 break
1740 if miss:
1739 if miss:
1741 continue
1740 continue
1742
1741
1743 copies = []
1742 copies = []
1744 if opts.get('copies') and rev:
1743 if opts.get('copies') and rev:
1745 mf = get(rev)[0]
1744 mf = get(rev)[0]
1746 for fn in get(rev)[3]:
1745 for fn in get(rev)[3]:
1747 rename = getrenamed(fn, rev, mf)
1746 rename = getrenamed(fn, rev, mf)
1748 if rename:
1747 if rename:
1749 copies.append((fn, rename[0]))
1748 copies.append((fn, rename[0]))
1750 displayer.show(rev, changenode, copies=copies)
1749 displayer.show(rev, changenode, copies=copies)
1751 elif st == 'iter':
1750 elif st == 'iter':
1752 if count == limit: break
1751 if count == limit: break
1753 if displayer.flush(rev):
1752 if displayer.flush(rev):
1754 count += 1
1753 count += 1
1755
1754
1756 def manifest(ui, repo, rev=None):
1755 def manifest(ui, repo, rev=None):
1757 """output the current or given revision of the project manifest
1756 """output the current or given revision of the project manifest
1758
1757
1759 Print a list of version controlled files for the given revision.
1758 Print a list of version controlled files for the given revision.
1760 If no revision is given, the parent of the working directory is used,
1759 If no revision is given, the parent of the working directory is used,
1761 or tip if no revision is checked out.
1760 or tip if no revision is checked out.
1762
1761
1763 The manifest is the list of files being version controlled. If no revision
1762 The manifest is the list of files being version controlled. If no revision
1764 is given then the first parent of the working directory is used.
1763 is given then the first parent of the working directory is used.
1765
1764
1766 With -v flag, print file permissions. With --debug flag, print
1765 With -v flag, print file permissions. With --debug flag, print
1767 file revision hashes.
1766 file revision hashes.
1768 """
1767 """
1769
1768
1770 m = repo.changectx(rev).manifest()
1769 m = repo.changectx(rev).manifest()
1771 files = m.keys()
1770 files = m.keys()
1772 files.sort()
1771 files.sort()
1773
1772
1774 for f in files:
1773 for f in files:
1775 if ui.debugflag:
1774 if ui.debugflag:
1776 ui.write("%40s " % hex(m[f]))
1775 ui.write("%40s " % hex(m[f]))
1777 if ui.verbose:
1776 if ui.verbose:
1778 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1777 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1779 ui.write("%s\n" % f)
1778 ui.write("%s\n" % f)
1780
1779
1781 def merge(ui, repo, node=None, force=None, branch=None):
1780 def merge(ui, repo, node=None, force=None, branch=None):
1782 """merge working directory with another revision
1781 """merge working directory with another revision
1783
1782
1784 Merge the contents of the current working directory and the
1783 Merge the contents of the current working directory and the
1785 requested revision. Files that changed between either parent are
1784 requested revision. Files that changed between either parent are
1786 marked as changed for the next commit and a commit must be
1785 marked as changed for the next commit and a commit must be
1787 performed before any further updates are allowed.
1786 performed before any further updates are allowed.
1788
1787
1789 If no revision is specified, the working directory's parent is a
1788 If no revision is specified, the working directory's parent is a
1790 head revision, and the repository contains exactly one other head,
1789 head revision, and the repository contains exactly one other head,
1791 the other head is merged with by default. Otherwise, an explicit
1790 the other head is merged with by default. Otherwise, an explicit
1792 revision to merge with must be provided.
1791 revision to merge with must be provided.
1793 """
1792 """
1794
1793
1795 if node or branch:
1794 if node or branch:
1796 node = _lookup(repo, node, branch)
1795 node = _lookup(repo, node, branch)
1797 else:
1796 else:
1798 heads = repo.heads()
1797 heads = repo.heads()
1799 if len(heads) > 2:
1798 if len(heads) > 2:
1800 raise util.Abort(_('repo has %d heads - '
1799 raise util.Abort(_('repo has %d heads - '
1801 'please merge with an explicit rev') %
1800 'please merge with an explicit rev') %
1802 len(heads))
1801 len(heads))
1803 if len(heads) == 1:
1802 if len(heads) == 1:
1804 raise util.Abort(_('there is nothing to merge - '
1803 raise util.Abort(_('there is nothing to merge - '
1805 'use "hg update" instead'))
1804 'use "hg update" instead'))
1806 parent = repo.dirstate.parents()[0]
1805 parent = repo.dirstate.parents()[0]
1807 if parent not in heads:
1806 if parent not in heads:
1808 raise util.Abort(_('working dir not at a head rev - '
1807 raise util.Abort(_('working dir not at a head rev - '
1809 'use "hg update" or merge with an explicit rev'))
1808 'use "hg update" or merge with an explicit rev'))
1810 node = parent == heads[0] and heads[-1] or heads[0]
1809 node = parent == heads[0] and heads[-1] or heads[0]
1811 return hg.merge(repo, node, force=force)
1810 return hg.merge(repo, node, force=force)
1812
1811
1813 def outgoing(ui, repo, dest=None, **opts):
1812 def outgoing(ui, repo, dest=None, **opts):
1814 """show changesets not found in destination
1813 """show changesets not found in destination
1815
1814
1816 Show changesets not found in the specified destination repository or
1815 Show changesets not found in the specified destination repository or
1817 the default push location. These are the changesets that would be pushed
1816 the default push location. These are the changesets that would be pushed
1818 if a push was requested.
1817 if a push was requested.
1819
1818
1820 See pull for valid destination format details.
1819 See pull for valid destination format details.
1821 """
1820 """
1822 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1821 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1823 setremoteconfig(ui, opts)
1822 setremoteconfig(ui, opts)
1824 revs = None
1823 revs = None
1825 if opts['rev']:
1824 if opts['rev']:
1826 revs = [repo.lookup(rev) for rev in opts['rev']]
1825 revs = [repo.lookup(rev) for rev in opts['rev']]
1827
1826
1828 other = hg.repository(ui, dest)
1827 other = hg.repository(ui, dest)
1829 o = repo.findoutgoing(other, force=opts['force'])
1828 o = repo.findoutgoing(other, force=opts['force'])
1830 if not o:
1829 if not o:
1831 ui.status(_("no changes found\n"))
1830 ui.status(_("no changes found\n"))
1832 return
1831 return
1833 o = repo.changelog.nodesbetween(o, revs)[0]
1832 o = repo.changelog.nodesbetween(o, revs)[0]
1834 if opts['newest_first']:
1833 if opts['newest_first']:
1835 o.reverse()
1834 o.reverse()
1836 displayer = cmdutil.show_changeset(ui, repo, opts)
1835 displayer = cmdutil.show_changeset(ui, repo, opts)
1837 for n in o:
1836 for n in o:
1838 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1837 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1839 if opts['no_merges'] and len(parents) == 2:
1838 if opts['no_merges'] and len(parents) == 2:
1840 continue
1839 continue
1841 displayer.show(changenode=n)
1840 displayer.show(changenode=n)
1842
1841
1843 def parents(ui, repo, file_=None, **opts):
1842 def parents(ui, repo, file_=None, **opts):
1844 """show the parents of the working dir or revision
1843 """show the parents of the working dir or revision
1845
1844
1846 Print the working directory's parent revisions.
1845 Print the working directory's parent revisions.
1847 """
1846 """
1848 rev = opts.get('rev')
1847 rev = opts.get('rev')
1849 if rev:
1848 if rev:
1850 if file_:
1849 if file_:
1851 ctx = repo.filectx(file_, changeid=rev)
1850 ctx = repo.filectx(file_, changeid=rev)
1852 else:
1851 else:
1853 ctx = repo.changectx(rev)
1852 ctx = repo.changectx(rev)
1854 p = [cp.node() for cp in ctx.parents()]
1853 p = [cp.node() for cp in ctx.parents()]
1855 else:
1854 else:
1856 p = repo.dirstate.parents()
1855 p = repo.dirstate.parents()
1857
1856
1858 displayer = cmdutil.show_changeset(ui, repo, opts)
1857 displayer = cmdutil.show_changeset(ui, repo, opts)
1859 for n in p:
1858 for n in p:
1860 if n != nullid:
1859 if n != nullid:
1861 displayer.show(changenode=n)
1860 displayer.show(changenode=n)
1862
1861
1863 def paths(ui, repo, search=None):
1862 def paths(ui, repo, search=None):
1864 """show definition of symbolic path names
1863 """show definition of symbolic path names
1865
1864
1866 Show definition of symbolic path name NAME. If no name is given, show
1865 Show definition of symbolic path name NAME. If no name is given, show
1867 definition of available names.
1866 definition of available names.
1868
1867
1869 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1868 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1870 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1869 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1871 """
1870 """
1872 if search:
1871 if search:
1873 for name, path in ui.configitems("paths"):
1872 for name, path in ui.configitems("paths"):
1874 if name == search:
1873 if name == search:
1875 ui.write("%s\n" % path)
1874 ui.write("%s\n" % path)
1876 return
1875 return
1877 ui.warn(_("not found!\n"))
1876 ui.warn(_("not found!\n"))
1878 return 1
1877 return 1
1879 else:
1878 else:
1880 for name, path in ui.configitems("paths"):
1879 for name, path in ui.configitems("paths"):
1881 ui.write("%s = %s\n" % (name, path))
1880 ui.write("%s = %s\n" % (name, path))
1882
1881
1883 def postincoming(ui, repo, modheads, optupdate):
1882 def postincoming(ui, repo, modheads, optupdate):
1884 if modheads == 0:
1883 if modheads == 0:
1885 return
1884 return
1886 if optupdate:
1885 if optupdate:
1887 if modheads == 1:
1886 if modheads == 1:
1888 return hg.update(repo, repo.changelog.tip()) # update
1887 return hg.update(repo, repo.changelog.tip()) # update
1889 else:
1888 else:
1890 ui.status(_("not updating, since new heads added\n"))
1889 ui.status(_("not updating, since new heads added\n"))
1891 if modheads > 1:
1890 if modheads > 1:
1892 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1891 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1893 else:
1892 else:
1894 ui.status(_("(run 'hg update' to get a working copy)\n"))
1893 ui.status(_("(run 'hg update' to get a working copy)\n"))
1895
1894
1896 def pull(ui, repo, source="default", **opts):
1895 def pull(ui, repo, source="default", **opts):
1897 """pull changes from the specified source
1896 """pull changes from the specified source
1898
1897
1899 Pull changes from a remote repository to a local one.
1898 Pull changes from a remote repository to a local one.
1900
1899
1901 This finds all changes from the repository at the specified path
1900 This finds all changes from the repository at the specified path
1902 or URL and adds them to the local repository. By default, this
1901 or URL and adds them to the local repository. By default, this
1903 does not update the copy of the project in the working directory.
1902 does not update the copy of the project in the working directory.
1904
1903
1905 Valid URLs are of the form:
1904 Valid URLs are of the form:
1906
1905
1907 local/filesystem/path (or file://local/filesystem/path)
1906 local/filesystem/path (or file://local/filesystem/path)
1908 http://[user@]host[:port]/[path]
1907 http://[user@]host[:port]/[path]
1909 https://[user@]host[:port]/[path]
1908 https://[user@]host[:port]/[path]
1910 ssh://[user@]host[:port]/[path]
1909 ssh://[user@]host[:port]/[path]
1911 static-http://host[:port]/[path]
1910 static-http://host[:port]/[path]
1912
1911
1913 Paths in the local filesystem can either point to Mercurial
1912 Paths in the local filesystem can either point to Mercurial
1914 repositories or to bundle files (as created by 'hg bundle' or
1913 repositories or to bundle files (as created by 'hg bundle' or
1915 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1914 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1916 allows access to a Mercurial repository where you simply use a web
1915 allows access to a Mercurial repository where you simply use a web
1917 server to publish the .hg directory as static content.
1916 server to publish the .hg directory as static content.
1918
1917
1919 Some notes about using SSH with Mercurial:
1918 Some notes about using SSH with Mercurial:
1920 - SSH requires an accessible shell account on the destination machine
1919 - SSH requires an accessible shell account on the destination machine
1921 and a copy of hg in the remote path or specified with as remotecmd.
1920 and a copy of hg in the remote path or specified with as remotecmd.
1922 - path is relative to the remote user's home directory by default.
1921 - path is relative to the remote user's home directory by default.
1923 Use an extra slash at the start of a path to specify an absolute path:
1922 Use an extra slash at the start of a path to specify an absolute path:
1924 ssh://example.com//tmp/repository
1923 ssh://example.com//tmp/repository
1925 - Mercurial doesn't use its own compression via SSH; the right thing
1924 - Mercurial doesn't use its own compression via SSH; the right thing
1926 to do is to configure it in your ~/.ssh/config, e.g.:
1925 to do is to configure it in your ~/.ssh/config, e.g.:
1927 Host *.mylocalnetwork.example.com
1926 Host *.mylocalnetwork.example.com
1928 Compression no
1927 Compression no
1929 Host *
1928 Host *
1930 Compression yes
1929 Compression yes
1931 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1930 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1932 with the --ssh command line option.
1931 with the --ssh command line option.
1933 """
1932 """
1934 source = ui.expandpath(source)
1933 source = ui.expandpath(source)
1935 setremoteconfig(ui, opts)
1934 setremoteconfig(ui, opts)
1936
1935
1937 other = hg.repository(ui, source)
1936 other = hg.repository(ui, source)
1938 ui.status(_('pulling from %s\n') % (source))
1937 ui.status(_('pulling from %s\n') % (source))
1939 revs = None
1938 revs = None
1940 if opts['rev']:
1939 if opts['rev']:
1941 if 'lookup' in other.capabilities:
1940 if 'lookup' in other.capabilities:
1942 revs = [other.lookup(rev) for rev in opts['rev']]
1941 revs = [other.lookup(rev) for rev in opts['rev']]
1943 else:
1942 else:
1944 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1943 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1945 raise util.Abort(error)
1944 raise util.Abort(error)
1946 modheads = repo.pull(other, heads=revs, force=opts['force'])
1945 modheads = repo.pull(other, heads=revs, force=opts['force'])
1947 return postincoming(ui, repo, modheads, opts['update'])
1946 return postincoming(ui, repo, modheads, opts['update'])
1948
1947
1949 def push(ui, repo, dest=None, **opts):
1948 def push(ui, repo, dest=None, **opts):
1950 """push changes to the specified destination
1949 """push changes to the specified destination
1951
1950
1952 Push changes from the local repository to the given destination.
1951 Push changes from the local repository to the given destination.
1953
1952
1954 This is the symmetrical operation for pull. It helps to move
1953 This is the symmetrical operation for pull. It helps to move
1955 changes from the current repository to a different one. If the
1954 changes from the current repository to a different one. If the
1956 destination is local this is identical to a pull in that directory
1955 destination is local this is identical to a pull in that directory
1957 from the current one.
1956 from the current one.
1958
1957
1959 By default, push will refuse to run if it detects the result would
1958 By default, push will refuse to run if it detects the result would
1960 increase the number of remote heads. This generally indicates the
1959 increase the number of remote heads. This generally indicates the
1961 the client has forgotten to sync and merge before pushing.
1960 the client has forgotten to sync and merge before pushing.
1962
1961
1963 Valid URLs are of the form:
1962 Valid URLs are of the form:
1964
1963
1965 local/filesystem/path (or file://local/filesystem/path)
1964 local/filesystem/path (or file://local/filesystem/path)
1966 ssh://[user@]host[:port]/[path]
1965 ssh://[user@]host[:port]/[path]
1967 http://[user@]host[:port]/[path]
1966 http://[user@]host[:port]/[path]
1968 https://[user@]host[:port]/[path]
1967 https://[user@]host[:port]/[path]
1969
1968
1970 Look at the help text for the pull command for important details
1969 Look at the help text for the pull command for important details
1971 about ssh:// URLs.
1970 about ssh:// URLs.
1972
1971
1973 Pushing to http:// and https:// URLs is only possible, if this
1972 Pushing to http:// and https:// URLs is only possible, if this
1974 feature is explicitly enabled on the remote Mercurial server.
1973 feature is explicitly enabled on the remote Mercurial server.
1975 """
1974 """
1976 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1975 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1977 setremoteconfig(ui, opts)
1976 setremoteconfig(ui, opts)
1978
1977
1979 other = hg.repository(ui, dest)
1978 other = hg.repository(ui, dest)
1980 ui.status('pushing to %s\n' % (dest))
1979 ui.status('pushing to %s\n' % (dest))
1981 revs = None
1980 revs = None
1982 if opts['rev']:
1981 if opts['rev']:
1983 revs = [repo.lookup(rev) for rev in opts['rev']]
1982 revs = [repo.lookup(rev) for rev in opts['rev']]
1984 r = repo.push(other, opts['force'], revs=revs)
1983 r = repo.push(other, opts['force'], revs=revs)
1985 return r == 0
1984 return r == 0
1986
1985
1987 def rawcommit(ui, repo, *pats, **opts):
1986 def rawcommit(ui, repo, *pats, **opts):
1988 """raw commit interface (DEPRECATED)
1987 """raw commit interface (DEPRECATED)
1989
1988
1990 (DEPRECATED)
1989 (DEPRECATED)
1991 Lowlevel commit, for use in helper scripts.
1990 Lowlevel commit, for use in helper scripts.
1992
1991
1993 This command is not intended to be used by normal users, as it is
1992 This command is not intended to be used by normal users, as it is
1994 primarily useful for importing from other SCMs.
1993 primarily useful for importing from other SCMs.
1995
1994
1996 This command is now deprecated and will be removed in a future
1995 This command is now deprecated and will be removed in a future
1997 release, please use debugsetparents and commit instead.
1996 release, please use debugsetparents and commit instead.
1998 """
1997 """
1999
1998
2000 ui.warn(_("(the rawcommit command is deprecated)\n"))
1999 ui.warn(_("(the rawcommit command is deprecated)\n"))
2001
2000
2002 message = logmessage(opts)
2001 message = logmessage(opts)
2003
2002
2004 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2003 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2005 if opts['files']:
2004 if opts['files']:
2006 files += open(opts['files']).read().splitlines()
2005 files += open(opts['files']).read().splitlines()
2007
2006
2008 parents = [repo.lookup(p) for p in opts['parent']]
2007 parents = [repo.lookup(p) for p in opts['parent']]
2009
2008
2010 try:
2009 try:
2011 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2010 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2012 except ValueError, inst:
2011 except ValueError, inst:
2013 raise util.Abort(str(inst))
2012 raise util.Abort(str(inst))
2014
2013
2015 def recover(ui, repo):
2014 def recover(ui, repo):
2016 """roll back an interrupted transaction
2015 """roll back an interrupted transaction
2017
2016
2018 Recover from an interrupted commit or pull.
2017 Recover from an interrupted commit or pull.
2019
2018
2020 This command tries to fix the repository status after an interrupted
2019 This command tries to fix the repository status after an interrupted
2021 operation. It should only be necessary when Mercurial suggests it.
2020 operation. It should only be necessary when Mercurial suggests it.
2022 """
2021 """
2023 if repo.recover():
2022 if repo.recover():
2024 return hg.verify(repo)
2023 return hg.verify(repo)
2025 return 1
2024 return 1
2026
2025
2027 def remove(ui, repo, *pats, **opts):
2026 def remove(ui, repo, *pats, **opts):
2028 """remove the specified files on the next commit
2027 """remove the specified files on the next commit
2029
2028
2030 Schedule the indicated files for removal from the repository.
2029 Schedule the indicated files for removal from the repository.
2031
2030
2032 This only removes files from the current branch, not from the
2031 This only removes files from the current branch, not from the
2033 entire project history. If the files still exist in the working
2032 entire project history. If the files still exist in the working
2034 directory, they will be deleted from it. If invoked with --after,
2033 directory, they will be deleted from it. If invoked with --after,
2035 files that have been manually deleted are marked as removed.
2034 files that have been manually deleted are marked as removed.
2036
2035
2037 This command schedules the files to be removed at the next commit.
2036 This command schedules the files to be removed at the next commit.
2038 To undo a remove before that, see hg revert.
2037 To undo a remove before that, see hg revert.
2039
2038
2040 Modified files and added files are not removed by default. To
2039 Modified files and added files are not removed by default. To
2041 remove them, use the -f/--force option.
2040 remove them, use the -f/--force option.
2042 """
2041 """
2043 names = []
2042 names = []
2044 if not opts['after'] and not pats:
2043 if not opts['after'] and not pats:
2045 raise util.Abort(_('no files specified'))
2044 raise util.Abort(_('no files specified'))
2046 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2045 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2047 exact = dict.fromkeys(files)
2046 exact = dict.fromkeys(files)
2048 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2047 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2049 modified, added, removed, deleted, unknown = mardu
2048 modified, added, removed, deleted, unknown = mardu
2050 remove, forget = [], []
2049 remove, forget = [], []
2051 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2050 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2052 reason = None
2051 reason = None
2053 if abs not in deleted and opts['after']:
2052 if abs not in deleted and opts['after']:
2054 reason = _('is still present')
2053 reason = _('is still present')
2055 elif abs in modified and not opts['force']:
2054 elif abs in modified and not opts['force']:
2056 reason = _('is modified (use -f to force removal)')
2055 reason = _('is modified (use -f to force removal)')
2057 elif abs in added:
2056 elif abs in added:
2058 if opts['force']:
2057 if opts['force']:
2059 forget.append(abs)
2058 forget.append(abs)
2060 continue
2059 continue
2061 reason = _('has been marked for add (use -f to force removal)')
2060 reason = _('has been marked for add (use -f to force removal)')
2062 elif abs in unknown:
2061 elif abs in unknown:
2063 reason = _('is not managed')
2062 reason = _('is not managed')
2064 elif abs in removed:
2063 elif abs in removed:
2065 continue
2064 continue
2066 if reason:
2065 if reason:
2067 if exact:
2066 if exact:
2068 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2067 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2069 else:
2068 else:
2070 if ui.verbose or not exact:
2069 if ui.verbose or not exact:
2071 ui.status(_('removing %s\n') % rel)
2070 ui.status(_('removing %s\n') % rel)
2072 remove.append(abs)
2071 remove.append(abs)
2073 repo.forget(forget)
2072 repo.forget(forget)
2074 repo.remove(remove, unlink=not opts['after'])
2073 repo.remove(remove, unlink=not opts['after'])
2075
2074
2076 def rename(ui, repo, *pats, **opts):
2075 def rename(ui, repo, *pats, **opts):
2077 """rename files; equivalent of copy + remove
2076 """rename files; equivalent of copy + remove
2078
2077
2079 Mark dest as copies of sources; mark sources for deletion. If
2078 Mark dest as copies of sources; mark sources for deletion. If
2080 dest is a directory, copies are put in that directory. If dest is
2079 dest is a directory, copies are put in that directory. If dest is
2081 a file, there can only be one source.
2080 a file, there can only be one source.
2082
2081
2083 By default, this command copies the contents of files as they
2082 By default, this command copies the contents of files as they
2084 stand in the working directory. If invoked with --after, the
2083 stand in the working directory. If invoked with --after, the
2085 operation is recorded, but no copying is performed.
2084 operation is recorded, but no copying is performed.
2086
2085
2087 This command takes effect in the next commit. To undo a rename
2086 This command takes effect in the next commit. To undo a rename
2088 before that, see hg revert.
2087 before that, see hg revert.
2089 """
2088 """
2090 wlock = repo.wlock(0)
2089 wlock = repo.wlock(0)
2091 errs, copied = docopy(ui, repo, pats, opts, wlock)
2090 errs, copied = docopy(ui, repo, pats, opts, wlock)
2092 names = []
2091 names = []
2093 for abs, rel, exact in copied:
2092 for abs, rel, exact in copied:
2094 if ui.verbose or not exact:
2093 if ui.verbose or not exact:
2095 ui.status(_('removing %s\n') % rel)
2094 ui.status(_('removing %s\n') % rel)
2096 names.append(abs)
2095 names.append(abs)
2097 if not opts.get('dry_run'):
2096 if not opts.get('dry_run'):
2098 repo.remove(names, True, wlock)
2097 repo.remove(names, True, wlock)
2099 return errs
2098 return errs
2100
2099
2101 def revert(ui, repo, *pats, **opts):
2100 def revert(ui, repo, *pats, **opts):
2102 """revert files or dirs to their states as of some revision
2101 """revert files or dirs to their states as of some revision
2103
2102
2104 With no revision specified, revert the named files or directories
2103 With no revision specified, revert the named files or directories
2105 to the contents they had in the parent of the working directory.
2104 to the contents they had in the parent of the working directory.
2106 This restores the contents of the affected files to an unmodified
2105 This restores the contents of the affected files to an unmodified
2107 state and unschedules adds, removes, copies, and renames. If the
2106 state and unschedules adds, removes, copies, and renames. If the
2108 working directory has two parents, you must explicitly specify the
2107 working directory has two parents, you must explicitly specify the
2109 revision to revert to.
2108 revision to revert to.
2110
2109
2111 Modified files are saved with a .orig suffix before reverting.
2110 Modified files are saved with a .orig suffix before reverting.
2112 To disable these backups, use --no-backup.
2111 To disable these backups, use --no-backup.
2113
2112
2114 Using the -r option, revert the given files or directories to their
2113 Using the -r option, revert the given files or directories to their
2115 contents as of a specific revision. This can be helpful to "roll
2114 contents as of a specific revision. This can be helpful to "roll
2116 back" some or all of a change that should not have been committed.
2115 back" some or all of a change that should not have been committed.
2117
2116
2118 Revert modifies the working directory. It does not commit any
2117 Revert modifies the working directory. It does not commit any
2119 changes, or change the parent of the working directory. If you
2118 changes, or change the parent of the working directory. If you
2120 revert to a revision other than the parent of the working
2119 revert to a revision other than the parent of the working
2121 directory, the reverted files will thus appear modified
2120 directory, the reverted files will thus appear modified
2122 afterwards.
2121 afterwards.
2123
2122
2124 If a file has been deleted, it is recreated. If the executable
2123 If a file has been deleted, it is recreated. If the executable
2125 mode of a file was changed, it is reset.
2124 mode of a file was changed, it is reset.
2126
2125
2127 If names are given, all files matching the names are reverted.
2126 If names are given, all files matching the names are reverted.
2128
2127
2129 If no arguments are given, no files are reverted.
2128 If no arguments are given, no files are reverted.
2130 """
2129 """
2131
2130
2132 if opts["date"]:
2131 if opts["date"]:
2133 if opts["rev"]:
2132 if opts["rev"]:
2134 raise util.Abort(_("you can't specify a revision and a date"))
2133 raise util.Abort(_("you can't specify a revision and a date"))
2135 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2134 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2136
2135
2137 if not pats and not opts['all']:
2136 if not pats and not opts['all']:
2138 raise util.Abort(_('no files or directories specified; '
2137 raise util.Abort(_('no files or directories specified; '
2139 'use --all to revert the whole repo'))
2138 'use --all to revert the whole repo'))
2140
2139
2141 parent, p2 = repo.dirstate.parents()
2140 parent, p2 = repo.dirstate.parents()
2142 if not opts['rev'] and p2 != nullid:
2141 if not opts['rev'] and p2 != nullid:
2143 raise util.Abort(_('uncommitted merge - please provide a '
2142 raise util.Abort(_('uncommitted merge - please provide a '
2144 'specific revision'))
2143 'specific revision'))
2145 node = repo.changectx(opts['rev']).node()
2144 node = repo.changectx(opts['rev']).node()
2146 mf = repo.manifest.read(repo.changelog.read(node)[0])
2145 mf = repo.manifest.read(repo.changelog.read(node)[0])
2147 if node == parent:
2146 if node == parent:
2148 pmf = mf
2147 pmf = mf
2149 else:
2148 else:
2150 pmf = None
2149 pmf = None
2151
2150
2152 wlock = repo.wlock()
2151 wlock = repo.wlock()
2153
2152
2154 # need all matching names in dirstate and manifest of target rev,
2153 # need all matching names in dirstate and manifest of target rev,
2155 # so have to walk both. do not print errors if files exist in one
2154 # so have to walk both. do not print errors if files exist in one
2156 # but not other.
2155 # but not other.
2157
2156
2158 names = {}
2157 names = {}
2159 target_only = {}
2158 target_only = {}
2160
2159
2161 # walk dirstate.
2160 # walk dirstate.
2162
2161
2163 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2162 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2164 badmatch=mf.has_key):
2163 badmatch=mf.has_key):
2165 names[abs] = (rel, exact)
2164 names[abs] = (rel, exact)
2166 if src == 'b':
2165 if src == 'b':
2167 target_only[abs] = True
2166 target_only[abs] = True
2168
2167
2169 # walk target manifest.
2168 # walk target manifest.
2170
2169
2171 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2170 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2172 badmatch=names.has_key):
2171 badmatch=names.has_key):
2173 if abs in names: continue
2172 if abs in names: continue
2174 names[abs] = (rel, exact)
2173 names[abs] = (rel, exact)
2175 target_only[abs] = True
2174 target_only[abs] = True
2176
2175
2177 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2176 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2178 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2177 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2179
2178
2180 revert = ([], _('reverting %s\n'))
2179 revert = ([], _('reverting %s\n'))
2181 add = ([], _('adding %s\n'))
2180 add = ([], _('adding %s\n'))
2182 remove = ([], _('removing %s\n'))
2181 remove = ([], _('removing %s\n'))
2183 forget = ([], _('forgetting %s\n'))
2182 forget = ([], _('forgetting %s\n'))
2184 undelete = ([], _('undeleting %s\n'))
2183 undelete = ([], _('undeleting %s\n'))
2185 update = {}
2184 update = {}
2186
2185
2187 disptable = (
2186 disptable = (
2188 # dispatch table:
2187 # dispatch table:
2189 # file state
2188 # file state
2190 # action if in target manifest
2189 # action if in target manifest
2191 # action if not in target manifest
2190 # action if not in target manifest
2192 # make backup if in target manifest
2191 # make backup if in target manifest
2193 # make backup if not in target manifest
2192 # make backup if not in target manifest
2194 (modified, revert, remove, True, True),
2193 (modified, revert, remove, True, True),
2195 (added, revert, forget, True, False),
2194 (added, revert, forget, True, False),
2196 (removed, undelete, None, False, False),
2195 (removed, undelete, None, False, False),
2197 (deleted, revert, remove, False, False),
2196 (deleted, revert, remove, False, False),
2198 (unknown, add, None, True, False),
2197 (unknown, add, None, True, False),
2199 (target_only, add, None, False, False),
2198 (target_only, add, None, False, False),
2200 )
2199 )
2201
2200
2202 entries = names.items()
2201 entries = names.items()
2203 entries.sort()
2202 entries.sort()
2204
2203
2205 for abs, (rel, exact) in entries:
2204 for abs, (rel, exact) in entries:
2206 mfentry = mf.get(abs)
2205 mfentry = mf.get(abs)
2207 def handle(xlist, dobackup):
2206 def handle(xlist, dobackup):
2208 xlist[0].append(abs)
2207 xlist[0].append(abs)
2209 update[abs] = 1
2208 update[abs] = 1
2210 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2209 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2211 bakname = "%s.orig" % rel
2210 bakname = "%s.orig" % rel
2212 ui.note(_('saving current version of %s as %s\n') %
2211 ui.note(_('saving current version of %s as %s\n') %
2213 (rel, bakname))
2212 (rel, bakname))
2214 if not opts.get('dry_run'):
2213 if not opts.get('dry_run'):
2215 util.copyfile(rel, bakname)
2214 util.copyfile(rel, bakname)
2216 if ui.verbose or not exact:
2215 if ui.verbose or not exact:
2217 ui.status(xlist[1] % rel)
2216 ui.status(xlist[1] % rel)
2218 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2217 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2219 if abs not in table: continue
2218 if abs not in table: continue
2220 # file has changed in dirstate
2219 # file has changed in dirstate
2221 if mfentry:
2220 if mfentry:
2222 handle(hitlist, backuphit)
2221 handle(hitlist, backuphit)
2223 elif misslist is not None:
2222 elif misslist is not None:
2224 handle(misslist, backupmiss)
2223 handle(misslist, backupmiss)
2225 else:
2224 else:
2226 if exact: ui.warn(_('file not managed: %s\n') % rel)
2225 if exact: ui.warn(_('file not managed: %s\n') % rel)
2227 break
2226 break
2228 else:
2227 else:
2229 # file has not changed in dirstate
2228 # file has not changed in dirstate
2230 if node == parent:
2229 if node == parent:
2231 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2230 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2232 continue
2231 continue
2233 if pmf is None:
2232 if pmf is None:
2234 # only need parent manifest in this unlikely case,
2233 # only need parent manifest in this unlikely case,
2235 # so do not read by default
2234 # so do not read by default
2236 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2235 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2237 if abs in pmf:
2236 if abs in pmf:
2238 if mfentry:
2237 if mfentry:
2239 # if version of file is same in parent and target
2238 # if version of file is same in parent and target
2240 # manifests, do nothing
2239 # manifests, do nothing
2241 if pmf[abs] != mfentry:
2240 if pmf[abs] != mfentry:
2242 handle(revert, False)
2241 handle(revert, False)
2243 else:
2242 else:
2244 handle(remove, False)
2243 handle(remove, False)
2245
2244
2246 if not opts.get('dry_run'):
2245 if not opts.get('dry_run'):
2247 repo.dirstate.forget(forget[0])
2246 repo.dirstate.forget(forget[0])
2248 r = hg.revert(repo, node, update.has_key, wlock)
2247 r = hg.revert(repo, node, update.has_key, wlock)
2249 repo.dirstate.update(add[0], 'a')
2248 repo.dirstate.update(add[0], 'a')
2250 repo.dirstate.update(undelete[0], 'n')
2249 repo.dirstate.update(undelete[0], 'n')
2251 repo.dirstate.update(remove[0], 'r')
2250 repo.dirstate.update(remove[0], 'r')
2252 return r
2251 return r
2253
2252
2254 def rollback(ui, repo):
2253 def rollback(ui, repo):
2255 """roll back the last transaction in this repository
2254 """roll back the last transaction in this repository
2256
2255
2257 Roll back the last transaction in this repository, restoring the
2256 Roll back the last transaction in this repository, restoring the
2258 project to its state prior to the transaction.
2257 project to its state prior to the transaction.
2259
2258
2260 Transactions are used to encapsulate the effects of all commands
2259 Transactions are used to encapsulate the effects of all commands
2261 that create new changesets or propagate existing changesets into a
2260 that create new changesets or propagate existing changesets into a
2262 repository. For example, the following commands are transactional,
2261 repository. For example, the following commands are transactional,
2263 and their effects can be rolled back:
2262 and their effects can be rolled back:
2264
2263
2265 commit
2264 commit
2266 import
2265 import
2267 pull
2266 pull
2268 push (with this repository as destination)
2267 push (with this repository as destination)
2269 unbundle
2268 unbundle
2270
2269
2271 This command should be used with care. There is only one level of
2270 This command should be used with care. There is only one level of
2272 rollback, and there is no way to undo a rollback.
2271 rollback, and there is no way to undo a rollback.
2273
2272
2274 This command is not intended for use on public repositories. Once
2273 This command is not intended for use on public repositories. Once
2275 changes are visible for pull by other users, rolling a transaction
2274 changes are visible for pull by other users, rolling a transaction
2276 back locally is ineffective (someone else may already have pulled
2275 back locally is ineffective (someone else may already have pulled
2277 the changes). Furthermore, a race is possible with readers of the
2276 the changes). Furthermore, a race is possible with readers of the
2278 repository; for example an in-progress pull from the repository
2277 repository; for example an in-progress pull from the repository
2279 may fail if a rollback is performed.
2278 may fail if a rollback is performed.
2280 """
2279 """
2281 repo.rollback()
2280 repo.rollback()
2282
2281
2283 def root(ui, repo):
2282 def root(ui, repo):
2284 """print the root (top) of the current working dir
2283 """print the root (top) of the current working dir
2285
2284
2286 Print the root directory of the current repository.
2285 Print the root directory of the current repository.
2287 """
2286 """
2288 ui.write(repo.root + "\n")
2287 ui.write(repo.root + "\n")
2289
2288
2290 def serve(ui, repo, **opts):
2289 def serve(ui, repo, **opts):
2291 """export the repository via HTTP
2290 """export the repository via HTTP
2292
2291
2293 Start a local HTTP repository browser and pull server.
2292 Start a local HTTP repository browser and pull server.
2294
2293
2295 By default, the server logs accesses to stdout and errors to
2294 By default, the server logs accesses to stdout and errors to
2296 stderr. Use the "-A" and "-E" options to log to files.
2295 stderr. Use the "-A" and "-E" options to log to files.
2297 """
2296 """
2298
2297
2299 if opts["stdio"]:
2298 if opts["stdio"]:
2300 if repo is None:
2299 if repo is None:
2301 raise hg.RepoError(_("There is no Mercurial repository here"
2300 raise hg.RepoError(_("There is no Mercurial repository here"
2302 " (.hg not found)"))
2301 " (.hg not found)"))
2303 s = sshserver.sshserver(ui, repo)
2302 s = sshserver.sshserver(ui, repo)
2304 s.serve_forever()
2303 s.serve_forever()
2305
2304
2306 parentui = ui.parentui or ui
2305 parentui = ui.parentui or ui
2307 optlist = ("name templates style address port ipv6"
2306 optlist = ("name templates style address port ipv6"
2308 " accesslog errorlog webdir_conf")
2307 " accesslog errorlog webdir_conf")
2309 for o in optlist.split():
2308 for o in optlist.split():
2310 if opts[o]:
2309 if opts[o]:
2311 parentui.setconfig("web", o, str(opts[o]))
2310 parentui.setconfig("web", o, str(opts[o]))
2312
2311
2313 if repo is None and not ui.config("web", "webdir_conf"):
2312 if repo is None and not ui.config("web", "webdir_conf"):
2314 raise hg.RepoError(_("There is no Mercurial repository here"
2313 raise hg.RepoError(_("There is no Mercurial repository here"
2315 " (.hg not found)"))
2314 " (.hg not found)"))
2316
2315
2317 if opts['daemon'] and not opts['daemon_pipefds']:
2316 if opts['daemon'] and not opts['daemon_pipefds']:
2318 rfd, wfd = os.pipe()
2317 rfd, wfd = os.pipe()
2319 args = sys.argv[:]
2318 args = sys.argv[:]
2320 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2319 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2321 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2320 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2322 args[0], args)
2321 args[0], args)
2323 os.close(wfd)
2322 os.close(wfd)
2324 os.read(rfd, 1)
2323 os.read(rfd, 1)
2325 os._exit(0)
2324 os._exit(0)
2326
2325
2327 httpd = hgweb.server.create_server(parentui, repo)
2326 httpd = hgweb.server.create_server(parentui, repo)
2328
2327
2329 if ui.verbose:
2328 if ui.verbose:
2330 if httpd.port != 80:
2329 if httpd.port != 80:
2331 ui.status(_('listening at http://%s:%d/\n') %
2330 ui.status(_('listening at http://%s:%d/\n') %
2332 (httpd.addr, httpd.port))
2331 (httpd.addr, httpd.port))
2333 else:
2332 else:
2334 ui.status(_('listening at http://%s/\n') % httpd.addr)
2333 ui.status(_('listening at http://%s/\n') % httpd.addr)
2335
2334
2336 if opts['pid_file']:
2335 if opts['pid_file']:
2337 fp = open(opts['pid_file'], 'w')
2336 fp = open(opts['pid_file'], 'w')
2338 fp.write(str(os.getpid()) + '\n')
2337 fp.write(str(os.getpid()) + '\n')
2339 fp.close()
2338 fp.close()
2340
2339
2341 if opts['daemon_pipefds']:
2340 if opts['daemon_pipefds']:
2342 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2341 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2343 os.close(rfd)
2342 os.close(rfd)
2344 os.write(wfd, 'y')
2343 os.write(wfd, 'y')
2345 os.close(wfd)
2344 os.close(wfd)
2346 sys.stdout.flush()
2345 sys.stdout.flush()
2347 sys.stderr.flush()
2346 sys.stderr.flush()
2348 fd = os.open(util.nulldev, os.O_RDWR)
2347 fd = os.open(util.nulldev, os.O_RDWR)
2349 if fd != 0: os.dup2(fd, 0)
2348 if fd != 0: os.dup2(fd, 0)
2350 if fd != 1: os.dup2(fd, 1)
2349 if fd != 1: os.dup2(fd, 1)
2351 if fd != 2: os.dup2(fd, 2)
2350 if fd != 2: os.dup2(fd, 2)
2352 if fd not in (0, 1, 2): os.close(fd)
2351 if fd not in (0, 1, 2): os.close(fd)
2353
2352
2354 httpd.serve_forever()
2353 httpd.serve_forever()
2355
2354
2356 def status(ui, repo, *pats, **opts):
2355 def status(ui, repo, *pats, **opts):
2357 """show changed files in the working directory
2356 """show changed files in the working directory
2358
2357
2359 Show status of files in the repository. If names are given, only
2358 Show status of files in the repository. If names are given, only
2360 files that match are shown. Files that are clean or ignored, are
2359 files that match are shown. Files that are clean or ignored, are
2361 not listed unless -c (clean), -i (ignored) or -A is given.
2360 not listed unless -c (clean), -i (ignored) or -A is given.
2362
2361
2363 NOTE: status may appear to disagree with diff if permissions have
2362 NOTE: status may appear to disagree with diff if permissions have
2364 changed or a merge has occurred. The standard diff format does not
2363 changed or a merge has occurred. The standard diff format does not
2365 report permission changes and diff only reports changes relative
2364 report permission changes and diff only reports changes relative
2366 to one merge parent.
2365 to one merge parent.
2367
2366
2368 If one revision is given, it is used as the base revision.
2367 If one revision is given, it is used as the base revision.
2369 If two revisions are given, the difference between them is shown.
2368 If two revisions are given, the difference between them is shown.
2370
2369
2371 The codes used to show the status of files are:
2370 The codes used to show the status of files are:
2372 M = modified
2371 M = modified
2373 A = added
2372 A = added
2374 R = removed
2373 R = removed
2375 C = clean
2374 C = clean
2376 ! = deleted, but still tracked
2375 ! = deleted, but still tracked
2377 ? = not tracked
2376 ? = not tracked
2378 I = ignored (not shown by default)
2377 I = ignored (not shown by default)
2379 = the previous added file was copied from here
2378 = the previous added file was copied from here
2380 """
2379 """
2381
2380
2382 all = opts['all']
2381 all = opts['all']
2383 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2382 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2384
2383
2385 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2384 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2386 cwd = (pats and repo.getcwd()) or ''
2385 cwd = (pats and repo.getcwd()) or ''
2387 modified, added, removed, deleted, unknown, ignored, clean = [
2386 modified, added, removed, deleted, unknown, ignored, clean = [
2388 n for n in repo.status(node1=node1, node2=node2, files=files,
2387 n for n in repo.status(node1=node1, node2=node2, files=files,
2389 match=matchfn,
2388 match=matchfn,
2390 list_ignored=all or opts['ignored'],
2389 list_ignored=all or opts['ignored'],
2391 list_clean=all or opts['clean'])]
2390 list_clean=all or opts['clean'])]
2392
2391
2393 changetypes = (('modified', 'M', modified),
2392 changetypes = (('modified', 'M', modified),
2394 ('added', 'A', added),
2393 ('added', 'A', added),
2395 ('removed', 'R', removed),
2394 ('removed', 'R', removed),
2396 ('deleted', '!', deleted),
2395 ('deleted', '!', deleted),
2397 ('unknown', '?', unknown),
2396 ('unknown', '?', unknown),
2398 ('ignored', 'I', ignored))
2397 ('ignored', 'I', ignored))
2399
2398
2400 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2399 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2401
2400
2402 end = opts['print0'] and '\0' or '\n'
2401 end = opts['print0'] and '\0' or '\n'
2403
2402
2404 for opt, char, changes in ([ct for ct in explicit_changetypes
2403 for opt, char, changes in ([ct for ct in explicit_changetypes
2405 if all or opts[ct[0]]]
2404 if all or opts[ct[0]]]
2406 or changetypes):
2405 or changetypes):
2407 if opts['no_status']:
2406 if opts['no_status']:
2408 format = "%%s%s" % end
2407 format = "%%s%s" % end
2409 else:
2408 else:
2410 format = "%s %%s%s" % (char, end)
2409 format = "%s %%s%s" % (char, end)
2411
2410
2412 for f in changes:
2411 for f in changes:
2413 ui.write(format % util.pathto(cwd, f))
2412 ui.write(format % util.pathto(cwd, f))
2414 if ((all or opts.get('copies')) and not opts.get('no_status')):
2413 if ((all or opts.get('copies')) and not opts.get('no_status')):
2415 copied = repo.dirstate.copied(f)
2414 copied = repo.dirstate.copied(f)
2416 if copied:
2415 if copied:
2417 ui.write(' %s%s' % (util.pathto(cwd, copied), end))
2416 ui.write(' %s%s' % (util.pathto(cwd, copied), end))
2418
2417
2419 def tag(ui, repo, name, rev_=None, **opts):
2418 def tag(ui, repo, name, rev_=None, **opts):
2420 """add a tag for the current or given revision
2419 """add a tag for the current or given revision
2421
2420
2422 Name a particular revision using <name>.
2421 Name a particular revision using <name>.
2423
2422
2424 Tags are used to name particular revisions of the repository and are
2423 Tags are used to name particular revisions of the repository and are
2425 very useful to compare different revision, to go back to significant
2424 very useful to compare different revision, to go back to significant
2426 earlier versions or to mark branch points as releases, etc.
2425 earlier versions or to mark branch points as releases, etc.
2427
2426
2428 If no revision is given, the parent of the working directory is used,
2427 If no revision is given, the parent of the working directory is used,
2429 or tip if no revision is checked out.
2428 or tip if no revision is checked out.
2430
2429
2431 To facilitate version control, distribution, and merging of tags,
2430 To facilitate version control, distribution, and merging of tags,
2432 they are stored as a file named ".hgtags" which is managed
2431 they are stored as a file named ".hgtags" which is managed
2433 similarly to other project files and can be hand-edited if
2432 similarly to other project files and can be hand-edited if
2434 necessary. The file '.hg/localtags' is used for local tags (not
2433 necessary. The file '.hg/localtags' is used for local tags (not
2435 shared among repositories).
2434 shared among repositories).
2436 """
2435 """
2437 if name in ['tip', '.', 'null']:
2436 if name in ['tip', '.', 'null']:
2438 raise util.Abort(_("the name '%s' is reserved") % name)
2437 raise util.Abort(_("the name '%s' is reserved") % name)
2439 if rev_ is not None:
2438 if rev_ is not None:
2440 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2439 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2441 "please use 'hg tag [-r REV] NAME' instead\n"))
2440 "please use 'hg tag [-r REV] NAME' instead\n"))
2442 if opts['rev']:
2441 if opts['rev']:
2443 raise util.Abort(_("use only one form to specify the revision"))
2442 raise util.Abort(_("use only one form to specify the revision"))
2444 if opts['rev']:
2443 if opts['rev']:
2445 rev_ = opts['rev']
2444 rev_ = opts['rev']
2446 if not rev_ and repo.dirstate.parents()[1] != nullid:
2445 if not rev_ and repo.dirstate.parents()[1] != nullid:
2447 raise util.Abort(_('uncommitted merge - please provide a '
2446 raise util.Abort(_('uncommitted merge - please provide a '
2448 'specific revision'))
2447 'specific revision'))
2449 r = repo.changectx(rev_).node()
2448 r = repo.changectx(rev_).node()
2450
2449
2451 message = opts['message']
2450 message = opts['message']
2452 if not message:
2451 if not message:
2453 message = _('Added tag %s for changeset %s') % (name, short(r))
2452 message = _('Added tag %s for changeset %s') % (name, short(r))
2454
2453
2455 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2454 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2456
2455
2457 def tags(ui, repo):
2456 def tags(ui, repo):
2458 """list repository tags
2457 """list repository tags
2459
2458
2460 List the repository tags.
2459 List the repository tags.
2461
2460
2462 This lists both regular and local tags.
2461 This lists both regular and local tags.
2463 """
2462 """
2464
2463
2465 l = repo.tagslist()
2464 l = repo.tagslist()
2466 l.reverse()
2465 l.reverse()
2467 hexfunc = ui.debugflag and hex or short
2466 hexfunc = ui.debugflag and hex or short
2468 for t, n in l:
2467 for t, n in l:
2469 try:
2468 try:
2470 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2469 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2471 except KeyError:
2470 except KeyError:
2472 r = " ?:?"
2471 r = " ?:?"
2473 if ui.quiet:
2472 if ui.quiet:
2474 ui.write("%s\n" % t)
2473 ui.write("%s\n" % t)
2475 else:
2474 else:
2476 spaces = " " * (30 - util.locallen(t))
2475 spaces = " " * (30 - util.locallen(t))
2477 ui.write("%s%s %s\n" % (t, spaces, r))
2476 ui.write("%s%s %s\n" % (t, spaces, r))
2478
2477
2479 def tip(ui, repo, **opts):
2478 def tip(ui, repo, **opts):
2480 """show the tip revision
2479 """show the tip revision
2481
2480
2482 Show the tip revision.
2481 Show the tip revision.
2483 """
2482 """
2484 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2483 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2485
2484
2486 def unbundle(ui, repo, fname, **opts):
2485 def unbundle(ui, repo, fname, **opts):
2487 """apply a changegroup file
2486 """apply a changegroup file
2488
2487
2489 Apply a compressed changegroup file generated by the bundle
2488 Apply a compressed changegroup file generated by the bundle
2490 command.
2489 command.
2491 """
2490 """
2492 if os.path.exists(fname):
2491 if os.path.exists(fname):
2493 f = open(fname, "rb")
2492 f = open(fname, "rb")
2494 else:
2493 else:
2495 f = urllib.urlopen(fname)
2494 f = urllib.urlopen(fname)
2496 gen = changegroup.readbundle(f, fname)
2495 gen = changegroup.readbundle(f, fname)
2497 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2496 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2498 return postincoming(ui, repo, modheads, opts['update'])
2497 return postincoming(ui, repo, modheads, opts['update'])
2499
2498
2500 def update(ui, repo, node=None, clean=False, branch=None, date=None):
2499 def update(ui, repo, node=None, clean=False, branch=None, date=None):
2501 """update working directory
2500 """update working directory
2502
2501
2503 Update the working directory to the specified revision.
2502 Update the working directory to the specified revision.
2504
2503
2505 If there are no outstanding changes in the working directory and
2504 If there are no outstanding changes in the working directory and
2506 there is a linear relationship between the current version and the
2505 there is a linear relationship between the current version and the
2507 requested version, the result is the requested version.
2506 requested version, the result is the requested version.
2508
2507
2509 To merge the working directory with another revision, use the
2508 To merge the working directory with another revision, use the
2510 merge command.
2509 merge command.
2511
2510
2512 By default, update will refuse to run if doing so would require
2511 By default, update will refuse to run if doing so would require
2513 discarding local changes.
2512 discarding local changes.
2514 """
2513 """
2515 if date:
2514 if date:
2516 if node:
2515 if node:
2517 raise util.Abort(_("you can't specify a revision and a date"))
2516 raise util.Abort(_("you can't specify a revision and a date"))
2518 node = cmdutil.finddate(ui, repo, date)
2517 node = cmdutil.finddate(ui, repo, date)
2519
2518
2520 node = _lookup(repo, node, branch)
2519 node = _lookup(repo, node, branch)
2521 if clean:
2520 if clean:
2522 return hg.clean(repo, node)
2521 return hg.clean(repo, node)
2523 else:
2522 else:
2524 return hg.update(repo, node)
2523 return hg.update(repo, node)
2525
2524
2526 def _lookup(repo, node, branch=None):
2525 def _lookup(repo, node, branch=None):
2527 if branch:
2526 if branch:
2528 repo.ui.warn(_("the --branch option is deprecated, "
2527 repo.ui.warn(_("the --branch option is deprecated, "
2529 "please use 'hg branch' instead\n"))
2528 "please use 'hg branch' instead\n"))
2530 br = repo.branchlookup(branch=branch)
2529 br = repo.branchlookup(branch=branch)
2531 found = []
2530 found = []
2532 for x in br:
2531 for x in br:
2533 if branch in br[x]:
2532 if branch in br[x]:
2534 found.append(x)
2533 found.append(x)
2535 if len(found) > 1:
2534 if len(found) > 1:
2536 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2535 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2537 for x in found:
2536 for x in found:
2538 cmdutil.show_changeset(ui, repo, {}).show(changenode=x)
2537 cmdutil.show_changeset(ui, repo, {}).show(changenode=x)
2539 raise util.Abort("")
2538 raise util.Abort("")
2540 if len(found) == 1:
2539 if len(found) == 1:
2541 node = found[0]
2540 node = found[0]
2542 repo.ui.warn(_("Using head %s for branch %s\n")
2541 repo.ui.warn(_("Using head %s for branch %s\n")
2543 % (short(node), branch))
2542 % (short(node), branch))
2544 else:
2543 else:
2545 raise util.Abort(_("branch %s not found") % branch)
2544 raise util.Abort(_("branch %s not found") % branch)
2546 else:
2545 else:
2547 if node:
2546 if node:
2548 node = repo.lookup(node)
2547 node = repo.lookup(node)
2549 else:
2548 else:
2550 wc = repo.workingctx()
2549 wc = repo.workingctx()
2551 node = repo.branchtags()[wc.branch()]
2550 node = repo.branchtags()[wc.branch()]
2552 return node
2551 return node
2553
2552
2554 def verify(ui, repo):
2553 def verify(ui, repo):
2555 """verify the integrity of the repository
2554 """verify the integrity of the repository
2556
2555
2557 Verify the integrity of the current repository.
2556 Verify the integrity of the current repository.
2558
2557
2559 This will perform an extensive check of the repository's
2558 This will perform an extensive check of the repository's
2560 integrity, validating the hashes and checksums of each entry in
2559 integrity, validating the hashes and checksums of each entry in
2561 the changelog, manifest, and tracked files, as well as the
2560 the changelog, manifest, and tracked files, as well as the
2562 integrity of their crosslinks and indices.
2561 integrity of their crosslinks and indices.
2563 """
2562 """
2564 return hg.verify(repo)
2563 return hg.verify(repo)
2565
2564
2566 def version_(ui):
2565 def version_(ui):
2567 """output version and copyright information"""
2566 """output version and copyright information"""
2568 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2567 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2569 % version.get_version())
2568 % version.get_version())
2570 ui.status(_(
2569 ui.status(_(
2571 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2570 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2572 "This is free software; see the source for copying conditions. "
2571 "This is free software; see the source for copying conditions. "
2573 "There is NO\nwarranty; "
2572 "There is NO\nwarranty; "
2574 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2573 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2575 ))
2574 ))
2576
2575
2577 # Command options and aliases are listed here, alphabetically
2576 # Command options and aliases are listed here, alphabetically
2578
2577
2579 globalopts = [
2578 globalopts = [
2580 ('R', 'repository', '',
2579 ('R', 'repository', '',
2581 _('repository root directory or symbolic path name')),
2580 _('repository root directory or symbolic path name')),
2582 ('', 'cwd', '', _('change working directory')),
2581 ('', 'cwd', '', _('change working directory')),
2583 ('y', 'noninteractive', None,
2582 ('y', 'noninteractive', None,
2584 _('do not prompt, assume \'yes\' for any required answers')),
2583 _('do not prompt, assume \'yes\' for any required answers')),
2585 ('q', 'quiet', None, _('suppress output')),
2584 ('q', 'quiet', None, _('suppress output')),
2586 ('v', 'verbose', None, _('enable additional output')),
2585 ('v', 'verbose', None, _('enable additional output')),
2587 ('', 'config', [], _('set/override config option')),
2586 ('', 'config', [], _('set/override config option')),
2588 ('', 'debug', None, _('enable debugging output')),
2587 ('', 'debug', None, _('enable debugging output')),
2589 ('', 'debugger', None, _('start debugger')),
2588 ('', 'debugger', None, _('start debugger')),
2590 ('', 'encoding', util._encoding, _('set the charset encoding')),
2589 ('', 'encoding', util._encoding, _('set the charset encoding')),
2591 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2590 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2592 ('', 'lsprof', None, _('print improved command execution profile')),
2591 ('', 'lsprof', None, _('print improved command execution profile')),
2593 ('', 'traceback', None, _('print traceback on exception')),
2592 ('', 'traceback', None, _('print traceback on exception')),
2594 ('', 'time', None, _('time how long the command takes')),
2593 ('', 'time', None, _('time how long the command takes')),
2595 ('', 'profile', None, _('print command execution profile')),
2594 ('', 'profile', None, _('print command execution profile')),
2596 ('', 'version', None, _('output version information and exit')),
2595 ('', 'version', None, _('output version information and exit')),
2597 ('h', 'help', None, _('display help and exit')),
2596 ('h', 'help', None, _('display help and exit')),
2598 ]
2597 ]
2599
2598
2600 dryrunopts = [('n', 'dry-run', None,
2599 dryrunopts = [('n', 'dry-run', None,
2601 _('do not perform actions, just print output'))]
2600 _('do not perform actions, just print output'))]
2602
2601
2603 remoteopts = [
2602 remoteopts = [
2604 ('e', 'ssh', '', _('specify ssh command to use')),
2603 ('e', 'ssh', '', _('specify ssh command to use')),
2605 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2604 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2606 ]
2605 ]
2607
2606
2608 walkopts = [
2607 walkopts = [
2609 ('I', 'include', [], _('include names matching the given patterns')),
2608 ('I', 'include', [], _('include names matching the given patterns')),
2610 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2609 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2611 ]
2610 ]
2612
2611
2613 commitopts = [
2612 commitopts = [
2614 ('m', 'message', '', _('use <text> as commit message')),
2613 ('m', 'message', '', _('use <text> as commit message')),
2615 ('l', 'logfile', '', _('read commit message from <file>')),
2614 ('l', 'logfile', '', _('read commit message from <file>')),
2616 ]
2615 ]
2617
2616
2618 table = {
2617 table = {
2619 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2618 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2620 "addremove":
2619 "addremove":
2621 (addremove,
2620 (addremove,
2622 [('s', 'similarity', '',
2621 [('s', 'similarity', '',
2623 _('guess renamed files by similarity (0<=s<=100)')),
2622 _('guess renamed files by similarity (0<=s<=100)')),
2624 ] + walkopts + dryrunopts,
2623 ] + walkopts + dryrunopts,
2625 _('hg addremove [OPTION]... [FILE]...')),
2624 _('hg addremove [OPTION]... [FILE]...')),
2626 "^annotate":
2625 "^annotate":
2627 (annotate,
2626 (annotate,
2628 [('r', 'rev', '', _('annotate the specified revision')),
2627 [('r', 'rev', '', _('annotate the specified revision')),
2629 ('f', 'follow', None, _('follow file copies and renames')),
2628 ('f', 'follow', None, _('follow file copies and renames')),
2630 ('a', 'text', None, _('treat all files as text')),
2629 ('a', 'text', None, _('treat all files as text')),
2631 ('u', 'user', None, _('list the author')),
2630 ('u', 'user', None, _('list the author')),
2632 ('d', 'date', None, _('list the date')),
2631 ('d', 'date', None, _('list the date')),
2633 ('n', 'number', None, _('list the revision number (default)')),
2632 ('n', 'number', None, _('list the revision number (default)')),
2634 ('c', 'changeset', None, _('list the changeset')),
2633 ('c', 'changeset', None, _('list the changeset')),
2635 ] + walkopts,
2634 ] + walkopts,
2636 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2635 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2637 "archive":
2636 "archive":
2638 (archive,
2637 (archive,
2639 [('', 'no-decode', None, _('do not pass files through decoders')),
2638 [('', 'no-decode', None, _('do not pass files through decoders')),
2640 ('p', 'prefix', '', _('directory prefix for files in archive')),
2639 ('p', 'prefix', '', _('directory prefix for files in archive')),
2641 ('r', 'rev', '', _('revision to distribute')),
2640 ('r', 'rev', '', _('revision to distribute')),
2642 ('t', 'type', '', _('type of distribution to create')),
2641 ('t', 'type', '', _('type of distribution to create')),
2643 ] + walkopts,
2642 ] + walkopts,
2644 _('hg archive [OPTION]... DEST')),
2643 _('hg archive [OPTION]... DEST')),
2645 "backout":
2644 "backout":
2646 (backout,
2645 (backout,
2647 [('', 'merge', None,
2646 [('', 'merge', None,
2648 _('merge with old dirstate parent after backout')),
2647 _('merge with old dirstate parent after backout')),
2649 ('d', 'date', '', _('record datecode as commit date')),
2648 ('d', 'date', '', _('record datecode as commit date')),
2650 ('', 'parent', '', _('parent to choose when backing out merge')),
2649 ('', 'parent', '', _('parent to choose when backing out merge')),
2651 ('u', 'user', '', _('record user as committer')),
2650 ('u', 'user', '', _('record user as committer')),
2652 ] + walkopts + commitopts,
2651 ] + walkopts + commitopts,
2653 _('hg backout [OPTION]... REV')),
2652 _('hg backout [OPTION]... REV')),
2654 "branch": (branch, [], _('hg branch [NAME]')),
2653 "branch": (branch, [], _('hg branch [NAME]')),
2655 "branches": (branches, [], _('hg branches')),
2654 "branches": (branches, [], _('hg branches')),
2656 "bundle":
2655 "bundle":
2657 (bundle,
2656 (bundle,
2658 [('f', 'force', None,
2657 [('f', 'force', None,
2659 _('run even when remote repository is unrelated')),
2658 _('run even when remote repository is unrelated')),
2660 ('r', 'rev', [],
2659 ('r', 'rev', [],
2661 _('a changeset you would like to bundle')),
2660 _('a changeset you would like to bundle')),
2662 ('', 'base', [],
2661 ('', 'base', [],
2663 _('a base changeset to specify instead of a destination')),
2662 _('a base changeset to specify instead of a destination')),
2664 ] + remoteopts,
2663 ] + remoteopts,
2665 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2664 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2666 "cat":
2665 "cat":
2667 (cat,
2666 (cat,
2668 [('o', 'output', '', _('print output to file with formatted name')),
2667 [('o', 'output', '', _('print output to file with formatted name')),
2669 ('r', 'rev', '', _('print the given revision')),
2668 ('r', 'rev', '', _('print the given revision')),
2670 ] + walkopts,
2669 ] + walkopts,
2671 _('hg cat [OPTION]... FILE...')),
2670 _('hg cat [OPTION]... FILE...')),
2672 "^clone":
2671 "^clone":
2673 (clone,
2672 (clone,
2674 [('U', 'noupdate', None, _('do not update the new working directory')),
2673 [('U', 'noupdate', None, _('do not update the new working directory')),
2675 ('r', 'rev', [],
2674 ('r', 'rev', [],
2676 _('a changeset you would like to have after cloning')),
2675 _('a changeset you would like to have after cloning')),
2677 ('', 'pull', None, _('use pull protocol to copy metadata')),
2676 ('', 'pull', None, _('use pull protocol to copy metadata')),
2678 ('', 'uncompressed', None,
2677 ('', 'uncompressed', None,
2679 _('use uncompressed transfer (fast over LAN)')),
2678 _('use uncompressed transfer (fast over LAN)')),
2680 ] + remoteopts,
2679 ] + remoteopts,
2681 _('hg clone [OPTION]... SOURCE [DEST]')),
2680 _('hg clone [OPTION]... SOURCE [DEST]')),
2682 "^commit|ci":
2681 "^commit|ci":
2683 (commit,
2682 (commit,
2684 [('A', 'addremove', None,
2683 [('A', 'addremove', None,
2685 _('mark new/missing files as added/removed before committing')),
2684 _('mark new/missing files as added/removed before committing')),
2686 ('d', 'date', '', _('record datecode as commit date')),
2685 ('d', 'date', '', _('record datecode as commit date')),
2687 ('u', 'user', '', _('record user as commiter')),
2686 ('u', 'user', '', _('record user as commiter')),
2688 ] + walkopts + commitopts,
2687 ] + walkopts + commitopts,
2689 _('hg commit [OPTION]... [FILE]...')),
2688 _('hg commit [OPTION]... [FILE]...')),
2690 "copy|cp":
2689 "copy|cp":
2691 (copy,
2690 (copy,
2692 [('A', 'after', None, _('record a copy that has already occurred')),
2691 [('A', 'after', None, _('record a copy that has already occurred')),
2693 ('f', 'force', None,
2692 ('f', 'force', None,
2694 _('forcibly copy over an existing managed file')),
2693 _('forcibly copy over an existing managed file')),
2695 ] + walkopts + dryrunopts,
2694 ] + walkopts + dryrunopts,
2696 _('hg copy [OPTION]... [SOURCE]... DEST')),
2695 _('hg copy [OPTION]... [SOURCE]... DEST')),
2697 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2696 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2698 "debugcomplete":
2697 "debugcomplete":
2699 (debugcomplete,
2698 (debugcomplete,
2700 [('o', 'options', None, _('show the command options'))],
2699 [('o', 'options', None, _('show the command options'))],
2701 _('debugcomplete [-o] CMD')),
2700 _('debugcomplete [-o] CMD')),
2702 "debuginstall": (debuginstall, [], _('debuginstall')),
2701 "debuginstall": (debuginstall, [], _('debuginstall')),
2703 "debugrebuildstate":
2702 "debugrebuildstate":
2704 (debugrebuildstate,
2703 (debugrebuildstate,
2705 [('r', 'rev', '', _('revision to rebuild to'))],
2704 [('r', 'rev', '', _('revision to rebuild to'))],
2706 _('debugrebuildstate [-r REV] [REV]')),
2705 _('debugrebuildstate [-r REV] [REV]')),
2707 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2706 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2708 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2707 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2709 "debugstate": (debugstate, [], _('debugstate')),
2708 "debugstate": (debugstate, [], _('debugstate')),
2710 "debugdate":
2709 "debugdate":
2711 (debugdate,
2710 (debugdate,
2712 [('e', 'extended', None, _('try extended date formats'))],
2711 [('e', 'extended', None, _('try extended date formats'))],
2713 _('debugdate [-e] DATE [RANGE]')),
2712 _('debugdate [-e] DATE [RANGE]')),
2714 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2713 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2715 "debugindex": (debugindex, [], _('debugindex FILE')),
2714 "debugindex": (debugindex, [], _('debugindex FILE')),
2716 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2715 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2717 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2716 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2718 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2717 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2719 "^diff":
2718 "^diff":
2720 (diff,
2719 (diff,
2721 [('r', 'rev', [], _('revision')),
2720 [('r', 'rev', [], _('revision')),
2722 ('a', 'text', None, _('treat all files as text')),
2721 ('a', 'text', None, _('treat all files as text')),
2723 ('p', 'show-function', None,
2722 ('p', 'show-function', None,
2724 _('show which function each change is in')),
2723 _('show which function each change is in')),
2725 ('g', 'git', None, _('use git extended diff format')),
2724 ('g', 'git', None, _('use git extended diff format')),
2726 ('', 'nodates', None, _("don't include dates in diff headers")),
2725 ('', 'nodates', None, _("don't include dates in diff headers")),
2727 ('w', 'ignore-all-space', None,
2726 ('w', 'ignore-all-space', None,
2728 _('ignore white space when comparing lines')),
2727 _('ignore white space when comparing lines')),
2729 ('b', 'ignore-space-change', None,
2728 ('b', 'ignore-space-change', None,
2730 _('ignore changes in the amount of white space')),
2729 _('ignore changes in the amount of white space')),
2731 ('B', 'ignore-blank-lines', None,
2730 ('B', 'ignore-blank-lines', None,
2732 _('ignore changes whose lines are all blank')),
2731 _('ignore changes whose lines are all blank')),
2733 ] + walkopts,
2732 ] + walkopts,
2734 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2733 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2735 "^export":
2734 "^export":
2736 (export,
2735 (export,
2737 [('o', 'output', '', _('print output to file with formatted name')),
2736 [('o', 'output', '', _('print output to file with formatted name')),
2738 ('a', 'text', None, _('treat all files as text')),
2737 ('a', 'text', None, _('treat all files as text')),
2739 ('g', 'git', None, _('use git extended diff format')),
2738 ('g', 'git', None, _('use git extended diff format')),
2740 ('', 'nodates', None, _("don't include dates in diff headers")),
2739 ('', 'nodates', None, _("don't include dates in diff headers")),
2741 ('', 'switch-parent', None, _('diff against the second parent'))],
2740 ('', 'switch-parent', None, _('diff against the second parent'))],
2742 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2741 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2743 "grep":
2742 "grep":
2744 (grep,
2743 (grep,
2745 [('0', 'print0', None, _('end fields with NUL')),
2744 [('0', 'print0', None, _('end fields with NUL')),
2746 ('', 'all', None, _('print all revisions that match')),
2745 ('', 'all', None, _('print all revisions that match')),
2747 ('f', 'follow', None,
2746 ('f', 'follow', None,
2748 _('follow changeset history, or file history across copies and renames')),
2747 _('follow changeset history, or file history across copies and renames')),
2749 ('i', 'ignore-case', None, _('ignore case when matching')),
2748 ('i', 'ignore-case', None, _('ignore case when matching')),
2750 ('l', 'files-with-matches', None,
2749 ('l', 'files-with-matches', None,
2751 _('print only filenames and revs that match')),
2750 _('print only filenames and revs that match')),
2752 ('n', 'line-number', None, _('print matching line numbers')),
2751 ('n', 'line-number', None, _('print matching line numbers')),
2753 ('r', 'rev', [], _('search in given revision range')),
2752 ('r', 'rev', [], _('search in given revision range')),
2754 ('u', 'user', None, _('print user who committed change')),
2753 ('u', 'user', None, _('print user who committed change')),
2755 ] + walkopts,
2754 ] + walkopts,
2756 _('hg grep [OPTION]... PATTERN [FILE]...')),
2755 _('hg grep [OPTION]... PATTERN [FILE]...')),
2757 "heads":
2756 "heads":
2758 (heads,
2757 (heads,
2759 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2758 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2760 ('', 'style', '', _('display using template map file')),
2759 ('', 'style', '', _('display using template map file')),
2761 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2760 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2762 ('', 'template', '', _('display with template'))],
2761 ('', 'template', '', _('display with template'))],
2763 _('hg heads [-r REV]')),
2762 _('hg heads [-r REV]')),
2764 "help": (help_, [], _('hg help [COMMAND]')),
2763 "help": (help_, [], _('hg help [COMMAND]')),
2765 "identify|id": (identify, [], _('hg identify')),
2764 "identify|id": (identify, [], _('hg identify')),
2766 "import|patch":
2765 "import|patch":
2767 (import_,
2766 (import_,
2768 [('p', 'strip', 1,
2767 [('p', 'strip', 1,
2769 _('directory strip option for patch. This has the same\n'
2768 _('directory strip option for patch. This has the same\n'
2770 'meaning as the corresponding patch option')),
2769 'meaning as the corresponding patch option')),
2771 ('b', 'base', '', _('base path (DEPRECATED)')),
2770 ('b', 'base', '', _('base path (DEPRECATED)')),
2772 ('f', 'force', None,
2771 ('f', 'force', None,
2773 _('skip check for outstanding uncommitted changes'))] + commitopts,
2772 _('skip check for outstanding uncommitted changes'))] + commitopts,
2774 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2773 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2775 "incoming|in": (incoming,
2774 "incoming|in": (incoming,
2776 [('M', 'no-merges', None, _('do not show merges')),
2775 [('M', 'no-merges', None, _('do not show merges')),
2777 ('f', 'force', None,
2776 ('f', 'force', None,
2778 _('run even when remote repository is unrelated')),
2777 _('run even when remote repository is unrelated')),
2779 ('', 'style', '', _('display using template map file')),
2778 ('', 'style', '', _('display using template map file')),
2780 ('n', 'newest-first', None, _('show newest record first')),
2779 ('n', 'newest-first', None, _('show newest record first')),
2781 ('', 'bundle', '', _('file to store the bundles into')),
2780 ('', 'bundle', '', _('file to store the bundles into')),
2782 ('p', 'patch', None, _('show patch')),
2781 ('p', 'patch', None, _('show patch')),
2783 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2782 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2784 ('', 'template', '', _('display with template')),
2783 ('', 'template', '', _('display with template')),
2785 ] + remoteopts,
2784 ] + remoteopts,
2786 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2785 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2787 ' [--bundle FILENAME] [SOURCE]')),
2786 ' [--bundle FILENAME] [SOURCE]')),
2788 "^init":
2787 "^init":
2789 (init,
2788 (init,
2790 remoteopts,
2789 remoteopts,
2791 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2790 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2792 "locate":
2791 "locate":
2793 (locate,
2792 (locate,
2794 [('r', 'rev', '', _('search the repository as it stood at rev')),
2793 [('r', 'rev', '', _('search the repository as it stood at rev')),
2795 ('0', 'print0', None,
2794 ('0', 'print0', None,
2796 _('end filenames with NUL, for use with xargs')),
2795 _('end filenames with NUL, for use with xargs')),
2797 ('f', 'fullpath', None,
2796 ('f', 'fullpath', None,
2798 _('print complete paths from the filesystem root')),
2797 _('print complete paths from the filesystem root')),
2799 ] + walkopts,
2798 ] + walkopts,
2800 _('hg locate [OPTION]... [PATTERN]...')),
2799 _('hg locate [OPTION]... [PATTERN]...')),
2801 "^log|history":
2800 "^log|history":
2802 (log,
2801 (log,
2803 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2802 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2804 ('f', 'follow', None,
2803 ('f', 'follow', None,
2805 _('follow changeset history, or file history across copies and renames')),
2804 _('follow changeset history, or file history across copies and renames')),
2806 ('', 'follow-first', None,
2805 ('', 'follow-first', None,
2807 _('only follow the first parent of merge changesets')),
2806 _('only follow the first parent of merge changesets')),
2808 ('d', 'date', '', _('show revs matching date spec')),
2807 ('d', 'date', '', _('show revs matching date spec')),
2809 ('C', 'copies', None, _('show copied files')),
2808 ('C', 'copies', None, _('show copied files')),
2810 ('k', 'keyword', [], _('search for a keyword')),
2809 ('k', 'keyword', [], _('search for a keyword')),
2811 ('l', 'limit', '', _('limit number of changes displayed')),
2810 ('l', 'limit', '', _('limit number of changes displayed')),
2812 ('r', 'rev', [], _('show the specified revision or range')),
2811 ('r', 'rev', [], _('show the specified revision or range')),
2813 ('', 'removed', None, _('include revs where files were removed')),
2812 ('', 'removed', None, _('include revs where files were removed')),
2814 ('M', 'no-merges', None, _('do not show merges')),
2813 ('M', 'no-merges', None, _('do not show merges')),
2815 ('', 'style', '', _('display using template map file')),
2814 ('', 'style', '', _('display using template map file')),
2816 ('m', 'only-merges', None, _('show only merges')),
2815 ('m', 'only-merges', None, _('show only merges')),
2817 ('p', 'patch', None, _('show patch')),
2816 ('p', 'patch', None, _('show patch')),
2818 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2817 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2819 ('', 'template', '', _('display with template')),
2818 ('', 'template', '', _('display with template')),
2820 ] + walkopts,
2819 ] + walkopts,
2821 _('hg log [OPTION]... [FILE]')),
2820 _('hg log [OPTION]... [FILE]')),
2822 "manifest": (manifest, [], _('hg manifest [REV]')),
2821 "manifest": (manifest, [], _('hg manifest [REV]')),
2823 "^merge":
2822 "^merge":
2824 (merge,
2823 (merge,
2825 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
2824 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
2826 ('f', 'force', None, _('force a merge with outstanding changes'))],
2825 ('f', 'force', None, _('force a merge with outstanding changes'))],
2827 _('hg merge [-f] [REV]')),
2826 _('hg merge [-f] [REV]')),
2828 "outgoing|out": (outgoing,
2827 "outgoing|out": (outgoing,
2829 [('M', 'no-merges', None, _('do not show merges')),
2828 [('M', 'no-merges', None, _('do not show merges')),
2830 ('f', 'force', None,
2829 ('f', 'force', None,
2831 _('run even when remote repository is unrelated')),
2830 _('run even when remote repository is unrelated')),
2832 ('p', 'patch', None, _('show patch')),
2831 ('p', 'patch', None, _('show patch')),
2833 ('', 'style', '', _('display using template map file')),
2832 ('', 'style', '', _('display using template map file')),
2834 ('r', 'rev', [], _('a specific revision you would like to push')),
2833 ('r', 'rev', [], _('a specific revision you would like to push')),
2835 ('n', 'newest-first', None, _('show newest record first')),
2834 ('n', 'newest-first', None, _('show newest record first')),
2836 ('', 'template', '', _('display with template')),
2835 ('', 'template', '', _('display with template')),
2837 ] + remoteopts,
2836 ] + remoteopts,
2838 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2837 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2839 "^parents":
2838 "^parents":
2840 (parents,
2839 (parents,
2841 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2840 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2842 ('r', 'rev', '', _('show parents from the specified rev')),
2841 ('r', 'rev', '', _('show parents from the specified rev')),
2843 ('', 'style', '', _('display using template map file')),
2842 ('', 'style', '', _('display using template map file')),
2844 ('', 'template', '', _('display with template'))],
2843 ('', 'template', '', _('display with template'))],
2845 _('hg parents [-r REV] [FILE]')),
2844 _('hg parents [-r REV] [FILE]')),
2846 "paths": (paths, [], _('hg paths [NAME]')),
2845 "paths": (paths, [], _('hg paths [NAME]')),
2847 "^pull":
2846 "^pull":
2848 (pull,
2847 (pull,
2849 [('u', 'update', None,
2848 [('u', 'update', None,
2850 _('update to new tip if changesets were pulled')),
2849 _('update to new tip if changesets were pulled')),
2851 ('f', 'force', None,
2850 ('f', 'force', None,
2852 _('run even when remote repository is unrelated')),
2851 _('run even when remote repository is unrelated')),
2853 ('r', 'rev', [],
2852 ('r', 'rev', [],
2854 _('a specific revision up to which you would like to pull')),
2853 _('a specific revision up to which you would like to pull')),
2855 ] + remoteopts,
2854 ] + remoteopts,
2856 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2855 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2857 "^push":
2856 "^push":
2858 (push,
2857 (push,
2859 [('f', 'force', None, _('force push')),
2858 [('f', 'force', None, _('force push')),
2860 ('r', 'rev', [], _('a specific revision you would like to push')),
2859 ('r', 'rev', [], _('a specific revision you would like to push')),
2861 ] + remoteopts,
2860 ] + remoteopts,
2862 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2861 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2863 "debugrawcommit|rawcommit":
2862 "debugrawcommit|rawcommit":
2864 (rawcommit,
2863 (rawcommit,
2865 [('p', 'parent', [], _('parent')),
2864 [('p', 'parent', [], _('parent')),
2866 ('d', 'date', '', _('date code')),
2865 ('d', 'date', '', _('date code')),
2867 ('u', 'user', '', _('user')),
2866 ('u', 'user', '', _('user')),
2868 ('F', 'files', '', _('file list'))
2867 ('F', 'files', '', _('file list'))
2869 ] + commitopts,
2868 ] + commitopts,
2870 _('hg debugrawcommit [OPTION]... [FILE]...')),
2869 _('hg debugrawcommit [OPTION]... [FILE]...')),
2871 "recover": (recover, [], _('hg recover')),
2870 "recover": (recover, [], _('hg recover')),
2872 "^remove|rm":
2871 "^remove|rm":
2873 (remove,
2872 (remove,
2874 [('A', 'after', None, _('record remove that has already occurred')),
2873 [('A', 'after', None, _('record remove that has already occurred')),
2875 ('f', 'force', None, _('remove file even if modified')),
2874 ('f', 'force', None, _('remove file even if modified')),
2876 ] + walkopts,
2875 ] + walkopts,
2877 _('hg remove [OPTION]... FILE...')),
2876 _('hg remove [OPTION]... FILE...')),
2878 "rename|mv":
2877 "rename|mv":
2879 (rename,
2878 (rename,
2880 [('A', 'after', None, _('record a rename that has already occurred')),
2879 [('A', 'after', None, _('record a rename that has already occurred')),
2881 ('f', 'force', None,
2880 ('f', 'force', None,
2882 _('forcibly copy over an existing managed file')),
2881 _('forcibly copy over an existing managed file')),
2883 ] + walkopts + dryrunopts,
2882 ] + walkopts + dryrunopts,
2884 _('hg rename [OPTION]... SOURCE... DEST')),
2883 _('hg rename [OPTION]... SOURCE... DEST')),
2885 "^revert":
2884 "^revert":
2886 (revert,
2885 (revert,
2887 [('a', 'all', None, _('revert all changes when no arguments given')),
2886 [('a', 'all', None, _('revert all changes when no arguments given')),
2888 ('d', 'date', '', _('tipmost revision matching date')),
2887 ('d', 'date', '', _('tipmost revision matching date')),
2889 ('r', 'rev', '', _('revision to revert to')),
2888 ('r', 'rev', '', _('revision to revert to')),
2890 ('', 'no-backup', None, _('do not save backup copies of files')),
2889 ('', 'no-backup', None, _('do not save backup copies of files')),
2891 ] + walkopts + dryrunopts,
2890 ] + walkopts + dryrunopts,
2892 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2891 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2893 "rollback": (rollback, [], _('hg rollback')),
2892 "rollback": (rollback, [], _('hg rollback')),
2894 "root": (root, [], _('hg root')),
2893 "root": (root, [], _('hg root')),
2895 "showconfig|debugconfig":
2894 "showconfig|debugconfig":
2896 (showconfig,
2895 (showconfig,
2897 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2896 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2898 _('showconfig [-u] [NAME]...')),
2897 _('showconfig [-u] [NAME]...')),
2899 "^serve":
2898 "^serve":
2900 (serve,
2899 (serve,
2901 [('A', 'accesslog', '', _('name of access log file to write to')),
2900 [('A', 'accesslog', '', _('name of access log file to write to')),
2902 ('d', 'daemon', None, _('run server in background')),
2901 ('d', 'daemon', None, _('run server in background')),
2903 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2902 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2904 ('E', 'errorlog', '', _('name of error log file to write to')),
2903 ('E', 'errorlog', '', _('name of error log file to write to')),
2905 ('p', 'port', 0, _('port to use (default: 8000)')),
2904 ('p', 'port', 0, _('port to use (default: 8000)')),
2906 ('a', 'address', '', _('address to use')),
2905 ('a', 'address', '', _('address to use')),
2907 ('n', 'name', '',
2906 ('n', 'name', '',
2908 _('name to show in web pages (default: working dir)')),
2907 _('name to show in web pages (default: working dir)')),
2909 ('', 'webdir-conf', '', _('name of the webdir config file'
2908 ('', 'webdir-conf', '', _('name of the webdir config file'
2910 ' (serve more than one repo)')),
2909 ' (serve more than one repo)')),
2911 ('', 'pid-file', '', _('name of file to write process ID to')),
2910 ('', 'pid-file', '', _('name of file to write process ID to')),
2912 ('', 'stdio', None, _('for remote clients')),
2911 ('', 'stdio', None, _('for remote clients')),
2913 ('t', 'templates', '', _('web templates to use')),
2912 ('t', 'templates', '', _('web templates to use')),
2914 ('', 'style', '', _('template style to use')),
2913 ('', 'style', '', _('template style to use')),
2915 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2914 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2916 _('hg serve [OPTION]...')),
2915 _('hg serve [OPTION]...')),
2917 "^status|st":
2916 "^status|st":
2918 (status,
2917 (status,
2919 [('A', 'all', None, _('show status of all files')),
2918 [('A', 'all', None, _('show status of all files')),
2920 ('m', 'modified', None, _('show only modified files')),
2919 ('m', 'modified', None, _('show only modified files')),
2921 ('a', 'added', None, _('show only added files')),
2920 ('a', 'added', None, _('show only added files')),
2922 ('r', 'removed', None, _('show only removed files')),
2921 ('r', 'removed', None, _('show only removed files')),
2923 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2922 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2924 ('c', 'clean', None, _('show only files without changes')),
2923 ('c', 'clean', None, _('show only files without changes')),
2925 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2924 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2926 ('i', 'ignored', None, _('show only ignored files')),
2925 ('i', 'ignored', None, _('show only ignored files')),
2927 ('n', 'no-status', None, _('hide status prefix')),
2926 ('n', 'no-status', None, _('hide status prefix')),
2928 ('C', 'copies', None, _('show source of copied files')),
2927 ('C', 'copies', None, _('show source of copied files')),
2929 ('0', 'print0', None,
2928 ('0', 'print0', None,
2930 _('end filenames with NUL, for use with xargs')),
2929 _('end filenames with NUL, for use with xargs')),
2931 ('', 'rev', [], _('show difference from revision')),
2930 ('', 'rev', [], _('show difference from revision')),
2932 ] + walkopts,
2931 ] + walkopts,
2933 _('hg status [OPTION]... [FILE]...')),
2932 _('hg status [OPTION]... [FILE]...')),
2934 "tag":
2933 "tag":
2935 (tag,
2934 (tag,
2936 [('l', 'local', None, _('make the tag local')),
2935 [('l', 'local', None, _('make the tag local')),
2937 ('m', 'message', '', _('message for tag commit log entry')),
2936 ('m', 'message', '', _('message for tag commit log entry')),
2938 ('d', 'date', '', _('record datecode as commit date')),
2937 ('d', 'date', '', _('record datecode as commit date')),
2939 ('u', 'user', '', _('record user as commiter')),
2938 ('u', 'user', '', _('record user as commiter')),
2940 ('r', 'rev', '', _('revision to tag'))],
2939 ('r', 'rev', '', _('revision to tag'))],
2941 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2940 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2942 "tags": (tags, [], _('hg tags')),
2941 "tags": (tags, [], _('hg tags')),
2943 "tip":
2942 "tip":
2944 (tip,
2943 (tip,
2945 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2944 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2946 ('', 'style', '', _('display using template map file')),
2945 ('', 'style', '', _('display using template map file')),
2947 ('p', 'patch', None, _('show patch')),
2946 ('p', 'patch', None, _('show patch')),
2948 ('', 'template', '', _('display with template'))],
2947 ('', 'template', '', _('display with template'))],
2949 _('hg tip [-p]')),
2948 _('hg tip [-p]')),
2950 "unbundle":
2949 "unbundle":
2951 (unbundle,
2950 (unbundle,
2952 [('u', 'update', None,
2951 [('u', 'update', None,
2953 _('update to new tip if changesets were unbundled'))],
2952 _('update to new tip if changesets were unbundled'))],
2954 _('hg unbundle [-u] FILE')),
2953 _('hg unbundle [-u] FILE')),
2955 "^update|up|checkout|co":
2954 "^update|up|checkout|co":
2956 (update,
2955 (update,
2957 [('b', 'branch', '',
2956 [('b', 'branch', '',
2958 _('checkout the head of a specific branch (DEPRECATED)')),
2957 _('checkout the head of a specific branch (DEPRECATED)')),
2959 ('C', 'clean', None, _('overwrite locally modified files')),
2958 ('C', 'clean', None, _('overwrite locally modified files')),
2960 ('d', 'date', '', _('tipmost revision matching date'))],
2959 ('d', 'date', '', _('tipmost revision matching date'))],
2961 _('hg update [-C] [-d DATE] [REV]')),
2960 _('hg update [-C] [-d DATE] [REV]')),
2962 "verify": (verify, [], _('hg verify')),
2961 "verify": (verify, [], _('hg verify')),
2963 "version": (version_, [], _('hg version')),
2962 "version": (version_, [], _('hg version')),
2964 }
2963 }
2965
2964
2966 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2965 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2967 " debugindex debugindexdot debugdate debuginstall")
2966 " debugindex debugindexdot debugdate debuginstall")
2968 optionalrepo = ("paths serve showconfig")
2967 optionalrepo = ("paths serve showconfig")
2969
2968
2970 def findpossible(ui, cmd):
2969 def findpossible(ui, cmd):
2971 """
2970 """
2972 Return cmd -> (aliases, command table entry)
2971 Return cmd -> (aliases, command table entry)
2973 for each matching command.
2972 for each matching command.
2974 Return debug commands (or their aliases) only if no normal command matches.
2973 Return debug commands (or their aliases) only if no normal command matches.
2975 """
2974 """
2976 choice = {}
2975 choice = {}
2977 debugchoice = {}
2976 debugchoice = {}
2978 for e in table.keys():
2977 for e in table.keys():
2979 aliases = e.lstrip("^").split("|")
2978 aliases = e.lstrip("^").split("|")
2980 found = None
2979 found = None
2981 if cmd in aliases:
2980 if cmd in aliases:
2982 found = cmd
2981 found = cmd
2983 elif not ui.config("ui", "strict"):
2982 elif not ui.config("ui", "strict"):
2984 for a in aliases:
2983 for a in aliases:
2985 if a.startswith(cmd):
2984 if a.startswith(cmd):
2986 found = a
2985 found = a
2987 break
2986 break
2988 if found is not None:
2987 if found is not None:
2989 if aliases[0].startswith("debug") or found.startswith("debug"):
2988 if aliases[0].startswith("debug") or found.startswith("debug"):
2990 debugchoice[found] = (aliases, table[e])
2989 debugchoice[found] = (aliases, table[e])
2991 else:
2990 else:
2992 choice[found] = (aliases, table[e])
2991 choice[found] = (aliases, table[e])
2993
2992
2994 if not choice and debugchoice:
2993 if not choice and debugchoice:
2995 choice = debugchoice
2994 choice = debugchoice
2996
2995
2997 return choice
2996 return choice
2998
2997
2999 def findcmd(ui, cmd):
2998 def findcmd(ui, cmd):
3000 """Return (aliases, command table entry) for command string."""
2999 """Return (aliases, command table entry) for command string."""
3001 choice = findpossible(ui, cmd)
3000 choice = findpossible(ui, cmd)
3002
3001
3003 if choice.has_key(cmd):
3002 if choice.has_key(cmd):
3004 return choice[cmd]
3003 return choice[cmd]
3005
3004
3006 if len(choice) > 1:
3005 if len(choice) > 1:
3007 clist = choice.keys()
3006 clist = choice.keys()
3008 clist.sort()
3007 clist.sort()
3009 raise AmbiguousCommand(cmd, clist)
3008 raise AmbiguousCommand(cmd, clist)
3010
3009
3011 if choice:
3010 if choice:
3012 return choice.values()[0]
3011 return choice.values()[0]
3013
3012
3014 raise UnknownCommand(cmd)
3013 raise UnknownCommand(cmd)
3015
3014
3016 def catchterm(*args):
3015 def catchterm(*args):
3017 raise util.SignalInterrupt
3016 raise util.SignalInterrupt
3018
3017
3019 def run():
3018 def run():
3020 sys.exit(dispatch(sys.argv[1:]))
3019 sys.exit(dispatch(sys.argv[1:]))
3021
3020
3022 class ParseError(Exception):
3021 class ParseError(Exception):
3023 """Exception raised on errors in parsing the command line."""
3022 """Exception raised on errors in parsing the command line."""
3024
3023
3025 def parse(ui, args):
3024 def parse(ui, args):
3026 options = {}
3025 options = {}
3027 cmdoptions = {}
3026 cmdoptions = {}
3028
3027
3029 try:
3028 try:
3030 args = fancyopts.fancyopts(args, globalopts, options)
3029 args = fancyopts.fancyopts(args, globalopts, options)
3031 except fancyopts.getopt.GetoptError, inst:
3030 except fancyopts.getopt.GetoptError, inst:
3032 raise ParseError(None, inst)
3031 raise ParseError(None, inst)
3033
3032
3034 if args:
3033 if args:
3035 cmd, args = args[0], args[1:]
3034 cmd, args = args[0], args[1:]
3036 aliases, i = findcmd(ui, cmd)
3035 aliases, i = findcmd(ui, cmd)
3037 cmd = aliases[0]
3036 cmd = aliases[0]
3038 defaults = ui.config("defaults", cmd)
3037 defaults = ui.config("defaults", cmd)
3039 if defaults:
3038 if defaults:
3040 args = shlex.split(defaults) + args
3039 args = shlex.split(defaults) + args
3041 c = list(i[1])
3040 c = list(i[1])
3042 else:
3041 else:
3043 cmd = None
3042 cmd = None
3044 c = []
3043 c = []
3045
3044
3046 # combine global options into local
3045 # combine global options into local
3047 for o in globalopts:
3046 for o in globalopts:
3048 c.append((o[0], o[1], options[o[1]], o[3]))
3047 c.append((o[0], o[1], options[o[1]], o[3]))
3049
3048
3050 try:
3049 try:
3051 args = fancyopts.fancyopts(args, c, cmdoptions)
3050 args = fancyopts.fancyopts(args, c, cmdoptions)
3052 except fancyopts.getopt.GetoptError, inst:
3051 except fancyopts.getopt.GetoptError, inst:
3053 raise ParseError(cmd, inst)
3052 raise ParseError(cmd, inst)
3054
3053
3055 # separate global options back out
3054 # separate global options back out
3056 for o in globalopts:
3055 for o in globalopts:
3057 n = o[1]
3056 n = o[1]
3058 options[n] = cmdoptions[n]
3057 options[n] = cmdoptions[n]
3059 del cmdoptions[n]
3058 del cmdoptions[n]
3060
3059
3061 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3060 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3062
3061
3063 external = {}
3062 external = {}
3064
3063
3065 def findext(name):
3064 def findext(name):
3066 '''return module with given extension name'''
3065 '''return module with given extension name'''
3067 try:
3066 try:
3068 return sys.modules[external[name]]
3067 return sys.modules[external[name]]
3069 except KeyError:
3068 except KeyError:
3070 for k, v in external.iteritems():
3069 for k, v in external.iteritems():
3071 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3070 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3072 return sys.modules[v]
3071 return sys.modules[v]
3073 raise KeyError(name)
3072 raise KeyError(name)
3074
3073
3075 def load_extensions(ui):
3074 def load_extensions(ui):
3076 added = []
3075 added = []
3077 for ext_name, load_from_name in ui.extensions():
3076 for ext_name, load_from_name in ui.extensions():
3078 if ext_name in external:
3077 if ext_name in external:
3079 continue
3078 continue
3080 try:
3079 try:
3081 if load_from_name:
3080 if load_from_name:
3082 # the module will be loaded in sys.modules
3081 # the module will be loaded in sys.modules
3083 # choose an unique name so that it doesn't
3082 # choose an unique name so that it doesn't
3084 # conflicts with other modules
3083 # conflicts with other modules
3085 module_name = "hgext_%s" % ext_name.replace('.', '_')
3084 module_name = "hgext_%s" % ext_name.replace('.', '_')
3086 mod = imp.load_source(module_name, load_from_name)
3085 mod = imp.load_source(module_name, load_from_name)
3087 else:
3086 else:
3088 def importh(name):
3087 def importh(name):
3089 mod = __import__(name)
3088 mod = __import__(name)
3090 components = name.split('.')
3089 components = name.split('.')
3091 for comp in components[1:]:
3090 for comp in components[1:]:
3092 mod = getattr(mod, comp)
3091 mod = getattr(mod, comp)
3093 return mod
3092 return mod
3094 try:
3093 try:
3095 mod = importh("hgext.%s" % ext_name)
3094 mod = importh("hgext.%s" % ext_name)
3096 except ImportError:
3095 except ImportError:
3097 mod = importh(ext_name)
3096 mod = importh(ext_name)
3098 external[ext_name] = mod.__name__
3097 external[ext_name] = mod.__name__
3099 added.append((mod, ext_name))
3098 added.append((mod, ext_name))
3100 except (util.SignalInterrupt, KeyboardInterrupt):
3099 except (util.SignalInterrupt, KeyboardInterrupt):
3101 raise
3100 raise
3102 except Exception, inst:
3101 except Exception, inst:
3103 ui.warn(_("*** failed to import extension %s: %s\n") %
3102 ui.warn(_("*** failed to import extension %s: %s\n") %
3104 (ext_name, inst))
3103 (ext_name, inst))
3105 if ui.print_exc():
3104 if ui.print_exc():
3106 return 1
3105 return 1
3107
3106
3108 for mod, name in added:
3107 for mod, name in added:
3109 uisetup = getattr(mod, 'uisetup', None)
3108 uisetup = getattr(mod, 'uisetup', None)
3110 if uisetup:
3109 if uisetup:
3111 uisetup(ui)
3110 uisetup(ui)
3112 reposetup = getattr(mod, 'reposetup', None)
3111 reposetup = getattr(mod, 'reposetup', None)
3113 if reposetup:
3112 if reposetup:
3114 hg.repo_setup_hooks.append(reposetup)
3113 hg.repo_setup_hooks.append(reposetup)
3115 cmdtable = getattr(mod, 'cmdtable', {})
3114 cmdtable = getattr(mod, 'cmdtable', {})
3116 for t in cmdtable:
3115 for t in cmdtable:
3117 if t in table:
3116 if t in table:
3118 ui.warn(_("module %s overrides %s\n") % (name, t))
3117 ui.warn(_("module %s overrides %s\n") % (name, t))
3119 table.update(cmdtable)
3118 table.update(cmdtable)
3120
3119
3121 def parseconfig(config):
3120 def parseconfig(config):
3122 """parse the --config options from the command line"""
3121 """parse the --config options from the command line"""
3123 parsed = []
3122 parsed = []
3124 for cfg in config:
3123 for cfg in config:
3125 try:
3124 try:
3126 name, value = cfg.split('=', 1)
3125 name, value = cfg.split('=', 1)
3127 section, name = name.split('.', 1)
3126 section, name = name.split('.', 1)
3128 if not section or not name:
3127 if not section or not name:
3129 raise IndexError
3128 raise IndexError
3130 parsed.append((section, name, value))
3129 parsed.append((section, name, value))
3131 except (IndexError, ValueError):
3130 except (IndexError, ValueError):
3132 raise util.Abort(_('malformed --config option: %s') % cfg)
3131 raise util.Abort(_('malformed --config option: %s') % cfg)
3133 return parsed
3132 return parsed
3134
3133
3135 def dispatch(args):
3134 def dispatch(args):
3136 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3135 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3137 num = getattr(signal, name, None)
3136 num = getattr(signal, name, None)
3138 if num: signal.signal(num, catchterm)
3137 if num: signal.signal(num, catchterm)
3139
3138
3140 try:
3139 try:
3141 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3140 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3142 except util.Abort, inst:
3141 except util.Abort, inst:
3143 sys.stderr.write(_("abort: %s\n") % inst)
3142 sys.stderr.write(_("abort: %s\n") % inst)
3144 return -1
3143 return -1
3145
3144
3146 load_extensions(u)
3145 load_extensions(u)
3147 u.addreadhook(load_extensions)
3146 u.addreadhook(load_extensions)
3148
3147
3149 try:
3148 try:
3150 cmd, func, args, options, cmdoptions = parse(u, args)
3149 cmd, func, args, options, cmdoptions = parse(u, args)
3151 if options["encoding"]:
3150 if options["encoding"]:
3152 util._encoding = options["encoding"]
3151 util._encoding = options["encoding"]
3153 if options["encodingmode"]:
3152 if options["encodingmode"]:
3154 util._encodingmode = options["encodingmode"]
3153 util._encodingmode = options["encodingmode"]
3155 if options["time"]:
3154 if options["time"]:
3156 def get_times():
3155 def get_times():
3157 t = os.times()
3156 t = os.times()
3158 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3157 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3159 t = (t[0], t[1], t[2], t[3], time.clock())
3158 t = (t[0], t[1], t[2], t[3], time.clock())
3160 return t
3159 return t
3161 s = get_times()
3160 s = get_times()
3162 def print_time():
3161 def print_time():
3163 t = get_times()
3162 t = get_times()
3164 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3163 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3165 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3164 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3166 atexit.register(print_time)
3165 atexit.register(print_time)
3167
3166
3168 # enter the debugger before command execution
3167 # enter the debugger before command execution
3169 if options['debugger']:
3168 if options['debugger']:
3170 pdb.set_trace()
3169 pdb.set_trace()
3171
3170
3172 try:
3171 try:
3173 if options['cwd']:
3172 if options['cwd']:
3174 os.chdir(options['cwd'])
3173 os.chdir(options['cwd'])
3175
3174
3176 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3175 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3177 not options["noninteractive"], options["traceback"],
3176 not options["noninteractive"], options["traceback"],
3178 parseconfig(options["config"]))
3177 parseconfig(options["config"]))
3179
3178
3180 path = u.expandpath(options["repository"]) or ""
3179 path = u.expandpath(options["repository"]) or ""
3181 repo = path and hg.repository(u, path=path) or None
3180 repo = path and hg.repository(u, path=path) or None
3182 if repo and not repo.local():
3181 if repo and not repo.local():
3183 raise util.Abort(_("repository '%s' is not local") % path)
3182 raise util.Abort(_("repository '%s' is not local") % path)
3184
3183
3185 if options['help']:
3184 if options['help']:
3186 return help_(u, cmd, options['version'])
3185 return help_(u, cmd, options['version'])
3187 elif options['version']:
3186 elif options['version']:
3188 return version_(u)
3187 return version_(u)
3189 elif not cmd:
3188 elif not cmd:
3190 return help_(u, 'shortlist')
3189 return help_(u, 'shortlist')
3191
3190
3192 if cmd not in norepo.split():
3191 if cmd not in norepo.split():
3193 try:
3192 try:
3194 if not repo:
3193 if not repo:
3195 repo = hg.repository(u, path=path)
3194 repo = hg.repository(u, path=path)
3196 u = repo.ui
3195 u = repo.ui
3197 except hg.RepoError:
3196 except hg.RepoError:
3198 if cmd not in optionalrepo.split():
3197 if cmd not in optionalrepo.split():
3199 raise
3198 raise
3200 d = lambda: func(u, repo, *args, **cmdoptions)
3199 d = lambda: func(u, repo, *args, **cmdoptions)
3201 else:
3200 else:
3202 d = lambda: func(u, *args, **cmdoptions)
3201 d = lambda: func(u, *args, **cmdoptions)
3203
3202
3204 try:
3203 try:
3205 if options['profile']:
3204 if options['profile']:
3206 import hotshot, hotshot.stats
3205 import hotshot, hotshot.stats
3207 prof = hotshot.Profile("hg.prof")
3206 prof = hotshot.Profile("hg.prof")
3208 try:
3207 try:
3209 try:
3208 try:
3210 return prof.runcall(d)
3209 return prof.runcall(d)
3211 except:
3210 except:
3212 try:
3211 try:
3213 u.warn(_('exception raised - generating '
3212 u.warn(_('exception raised - generating '
3214 'profile anyway\n'))
3213 'profile anyway\n'))
3215 except:
3214 except:
3216 pass
3215 pass
3217 raise
3216 raise
3218 finally:
3217 finally:
3219 prof.close()
3218 prof.close()
3220 stats = hotshot.stats.load("hg.prof")
3219 stats = hotshot.stats.load("hg.prof")
3221 stats.strip_dirs()
3220 stats.strip_dirs()
3222 stats.sort_stats('time', 'calls')
3221 stats.sort_stats('time', 'calls')
3223 stats.print_stats(40)
3222 stats.print_stats(40)
3224 elif options['lsprof']:
3223 elif options['lsprof']:
3225 try:
3224 try:
3226 from mercurial import lsprof
3225 from mercurial import lsprof
3227 except ImportError:
3226 except ImportError:
3228 raise util.Abort(_(
3227 raise util.Abort(_(
3229 'lsprof not available - install from '
3228 'lsprof not available - install from '
3230 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3229 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3231 p = lsprof.Profiler()
3230 p = lsprof.Profiler()
3232 p.enable(subcalls=True)
3231 p.enable(subcalls=True)
3233 try:
3232 try:
3234 return d()
3233 return d()
3235 finally:
3234 finally:
3236 p.disable()
3235 p.disable()
3237 stats = lsprof.Stats(p.getstats())
3236 stats = lsprof.Stats(p.getstats())
3238 stats.sort()
3237 stats.sort()
3239 stats.pprint(top=10, file=sys.stderr, climit=5)
3238 stats.pprint(top=10, file=sys.stderr, climit=5)
3240 else:
3239 else:
3241 return d()
3240 return d()
3242 finally:
3241 finally:
3243 u.flush()
3242 u.flush()
3244 except:
3243 except:
3245 # enter the debugger when we hit an exception
3244 # enter the debugger when we hit an exception
3246 if options['debugger']:
3245 if options['debugger']:
3247 pdb.post_mortem(sys.exc_info()[2])
3246 pdb.post_mortem(sys.exc_info()[2])
3248 u.print_exc()
3247 u.print_exc()
3249 raise
3248 raise
3250 except ParseError, inst:
3249 except ParseError, inst:
3251 if inst.args[0]:
3250 if inst.args[0]:
3252 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3251 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3253 help_(u, inst.args[0])
3252 help_(u, inst.args[0])
3254 else:
3253 else:
3255 u.warn(_("hg: %s\n") % inst.args[1])
3254 u.warn(_("hg: %s\n") % inst.args[1])
3256 help_(u, 'shortlist')
3255 help_(u, 'shortlist')
3257 except AmbiguousCommand, inst:
3256 except AmbiguousCommand, inst:
3258 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3257 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3259 (inst.args[0], " ".join(inst.args[1])))
3258 (inst.args[0], " ".join(inst.args[1])))
3260 except UnknownCommand, inst:
3259 except UnknownCommand, inst:
3261 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3260 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3262 help_(u, 'shortlist')
3261 help_(u, 'shortlist')
3263 except hg.RepoError, inst:
3262 except hg.RepoError, inst:
3264 u.warn(_("abort: %s!\n") % inst)
3263 u.warn(_("abort: %s!\n") % inst)
3265 except lock.LockHeld, inst:
3264 except lock.LockHeld, inst:
3266 if inst.errno == errno.ETIMEDOUT:
3265 if inst.errno == errno.ETIMEDOUT:
3267 reason = _('timed out waiting for lock held by %s') % inst.locker
3266 reason = _('timed out waiting for lock held by %s') % inst.locker
3268 else:
3267 else:
3269 reason = _('lock held by %s') % inst.locker
3268 reason = _('lock held by %s') % inst.locker
3270 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3269 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3271 except lock.LockUnavailable, inst:
3270 except lock.LockUnavailable, inst:
3272 u.warn(_("abort: could not lock %s: %s\n") %
3271 u.warn(_("abort: could not lock %s: %s\n") %
3273 (inst.desc or inst.filename, inst.strerror))
3272 (inst.desc or inst.filename, inst.strerror))
3274 except revlog.RevlogError, inst:
3273 except revlog.RevlogError, inst:
3275 u.warn(_("abort: %s!\n") % inst)
3274 u.warn(_("abort: %s!\n") % inst)
3276 except util.SignalInterrupt:
3275 except util.SignalInterrupt:
3277 u.warn(_("killed!\n"))
3276 u.warn(_("killed!\n"))
3278 except KeyboardInterrupt:
3277 except KeyboardInterrupt:
3279 try:
3278 try:
3280 u.warn(_("interrupted!\n"))
3279 u.warn(_("interrupted!\n"))
3281 except IOError, inst:
3280 except IOError, inst:
3282 if inst.errno == errno.EPIPE:
3281 if inst.errno == errno.EPIPE:
3283 if u.debugflag:
3282 if u.debugflag:
3284 u.warn(_("\nbroken pipe\n"))
3283 u.warn(_("\nbroken pipe\n"))
3285 else:
3284 else:
3286 raise
3285 raise
3287 except socket.error, inst:
3286 except socket.error, inst:
3288 u.warn(_("abort: %s\n") % inst[1])
3287 u.warn(_("abort: %s\n") % inst[1])
3289 except IOError, inst:
3288 except IOError, inst:
3290 if hasattr(inst, "code"):
3289 if hasattr(inst, "code"):
3291 u.warn(_("abort: %s\n") % inst)
3290 u.warn(_("abort: %s\n") % inst)
3292 elif hasattr(inst, "reason"):
3291 elif hasattr(inst, "reason"):
3293 try: # usually it is in the form (errno, strerror)
3292 try: # usually it is in the form (errno, strerror)
3294 reason = inst.reason.args[1]
3293 reason = inst.reason.args[1]
3295 except: # it might be anything, for example a string
3294 except: # it might be anything, for example a string
3296 reason = inst.reason
3295 reason = inst.reason
3297 u.warn(_("abort: error: %s\n") % reason)
3296 u.warn(_("abort: error: %s\n") % reason)
3298 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3297 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3299 if u.debugflag:
3298 if u.debugflag:
3300 u.warn(_("broken pipe\n"))
3299 u.warn(_("broken pipe\n"))
3301 elif getattr(inst, "strerror", None):
3300 elif getattr(inst, "strerror", None):
3302 if getattr(inst, "filename", None):
3301 if getattr(inst, "filename", None):
3303 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3302 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3304 else:
3303 else:
3305 u.warn(_("abort: %s\n") % inst.strerror)
3304 u.warn(_("abort: %s\n") % inst.strerror)
3306 else:
3305 else:
3307 raise
3306 raise
3308 except OSError, inst:
3307 except OSError, inst:
3309 if getattr(inst, "filename", None):
3308 if getattr(inst, "filename", None):
3310 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3309 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3311 else:
3310 else:
3312 u.warn(_("abort: %s\n") % inst.strerror)
3311 u.warn(_("abort: %s\n") % inst.strerror)
3313 except util.UnexpectedOutput, inst:
3312 except util.UnexpectedOutput, inst:
3314 u.warn(_("abort: %s") % inst[0])
3313 u.warn(_("abort: %s") % inst[0])
3315 if not isinstance(inst[1], basestring):
3314 if not isinstance(inst[1], basestring):
3316 u.warn(" %r\n" % (inst[1],))
3315 u.warn(" %r\n" % (inst[1],))
3317 elif not inst[1]:
3316 elif not inst[1]:
3318 u.warn(_(" empty string\n"))
3317 u.warn(_(" empty string\n"))
3319 else:
3318 else:
3320 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3319 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3321 except util.Abort, inst:
3320 except util.Abort, inst:
3322 u.warn(_("abort: %s\n") % inst)
3321 u.warn(_("abort: %s\n") % inst)
3323 except TypeError, inst:
3322 except TypeError, inst:
3324 # was this an argument error?
3323 # was this an argument error?
3325 tb = traceback.extract_tb(sys.exc_info()[2])
3324 tb = traceback.extract_tb(sys.exc_info()[2])
3326 if len(tb) > 2: # no
3325 if len(tb) > 2: # no
3327 raise
3326 raise
3328 u.debug(inst, "\n")
3327 u.debug(inst, "\n")
3329 u.warn(_("%s: invalid arguments\n") % cmd)
3328 u.warn(_("%s: invalid arguments\n") % cmd)
3330 help_(u, cmd)
3329 help_(u, cmd)
3331 except SystemExit, inst:
3330 except SystemExit, inst:
3332 # Commands shouldn't sys.exit directly, but give a return code.
3331 # Commands shouldn't sys.exit directly, but give a return code.
3333 # Just in case catch this and and pass exit code to caller.
3332 # Just in case catch this and and pass exit code to caller.
3334 return inst.code
3333 return inst.code
3335 except:
3334 except:
3336 u.warn(_("** unknown exception encountered, details follow\n"))
3335 u.warn(_("** unknown exception encountered, details follow\n"))
3337 u.warn(_("** report bug details to "
3336 u.warn(_("** report bug details to "
3338 "http://www.selenic.com/mercurial/bts\n"))
3337 "http://www.selenic.com/mercurial/bts\n"))
3339 u.warn(_("** or mercurial@selenic.com\n"))
3338 u.warn(_("** or mercurial@selenic.com\n"))
3340 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3339 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3341 % version.get_version())
3340 % version.get_version())
3342 raise
3341 raise
3343
3342
3344 return -1
3343 return -1
@@ -1,2013 +1,2016 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ('lookup', 'changegroupsubset')
18 capabilities = ('lookup', 'changegroupsubset')
19 supported = ('revlogv1', 'store')
19 supported = ('revlogv1', 'store')
20 branchcache_features = ('unnamed',)
20 branchcache_features = ('unnamed',)
21
21
22 def __del__(self):
22 def __del__(self):
23 self.transhandle = None
23 self.transhandle = None
24 def __init__(self, parentui, path=None, create=0):
24 def __init__(self, parentui, path=None, create=0):
25 repo.repository.__init__(self)
25 repo.repository.__init__(self)
26 if not path:
26 if not path:
27 p = os.getcwd()
27 p = os.getcwd()
28 while not os.path.isdir(os.path.join(p, ".hg")):
28 while not os.path.isdir(os.path.join(p, ".hg")):
29 oldp = p
29 oldp = p
30 p = os.path.dirname(p)
30 p = os.path.dirname(p)
31 if p == oldp:
31 if p == oldp:
32 raise repo.RepoError(_("There is no Mercurial repository"
32 raise repo.RepoError(_("There is no Mercurial repository"
33 " here (.hg not found)"))
33 " here (.hg not found)"))
34 path = p
34 path = p
35
35
36 self.root = os.path.realpath(path)
36 self.root = os.path.realpath(path)
37 self.path = os.path.join(self.root, ".hg")
37 self.path = os.path.join(self.root, ".hg")
38 self.origroot = path
38 self.origroot = path
39 self.opener = util.opener(self.path)
39 self.opener = util.opener(self.path)
40 self.wopener = util.opener(self.root)
40 self.wopener = util.opener(self.root)
41
41
42 if not os.path.isdir(self.path):
42 if not os.path.isdir(self.path):
43 if create:
43 if create:
44 if not os.path.exists(path):
44 if not os.path.exists(path):
45 os.mkdir(path)
45 os.mkdir(path)
46 os.mkdir(self.path)
46 os.mkdir(self.path)
47 os.mkdir(os.path.join(self.path, "store"))
47 os.mkdir(os.path.join(self.path, "store"))
48 requirements = ("revlogv1", "store")
48 requirements = ("revlogv1", "store")
49 reqfile = self.opener("requires", "w")
49 reqfile = self.opener("requires", "w")
50 for r in requirements:
50 for r in requirements:
51 reqfile.write("%s\n" % r)
51 reqfile.write("%s\n" % r)
52 reqfile.close()
52 reqfile.close()
53 # create an invalid changelog
53 # create an invalid changelog
54 self.opener("00changelog.i", "a").write(
54 self.opener("00changelog.i", "a").write(
55 '\0\0\0\2' # represents revlogv2
55 '\0\0\0\2' # represents revlogv2
56 ' dummy changelog to prevent using the old repo layout'
56 ' dummy changelog to prevent using the old repo layout'
57 )
57 )
58 else:
58 else:
59 raise repo.RepoError(_("repository %s not found") % path)
59 raise repo.RepoError(_("repository %s not found") % path)
60 elif create:
60 elif create:
61 raise repo.RepoError(_("repository %s already exists") % path)
61 raise repo.RepoError(_("repository %s already exists") % path)
62 else:
62 else:
63 # find requirements
63 # find requirements
64 try:
64 try:
65 requirements = self.opener("requires").read().splitlines()
65 requirements = self.opener("requires").read().splitlines()
66 except IOError, inst:
66 except IOError, inst:
67 if inst.errno != errno.ENOENT:
67 if inst.errno != errno.ENOENT:
68 raise
68 raise
69 requirements = []
69 requirements = []
70 # check them
70 # check them
71 for r in requirements:
71 for r in requirements:
72 if r not in self.supported:
72 if r not in self.supported:
73 raise repo.RepoError(_("requirement '%s' not supported") % r)
73 raise repo.RepoError(_("requirement '%s' not supported") % r)
74
74
75 # setup store
75 # setup store
76 if "store" in requirements:
76 if "store" in requirements:
77 self.encodefn = util.encodefilename
77 self.encodefn = util.encodefilename
78 self.decodefn = util.decodefilename
78 self.decodefn = util.decodefilename
79 self.spath = os.path.join(self.path, "store")
79 self.spath = os.path.join(self.path, "store")
80 else:
80 else:
81 self.encodefn = lambda x: x
81 self.encodefn = lambda x: x
82 self.decodefn = lambda x: x
82 self.decodefn = lambda x: x
83 self.spath = self.path
83 self.spath = self.path
84 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
84 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
85
85
86 self.ui = ui.ui(parentui=parentui)
86 self.ui = ui.ui(parentui=parentui)
87 try:
87 try:
88 self.ui.readconfig(self.join("hgrc"), self.root)
88 self.ui.readconfig(self.join("hgrc"), self.root)
89 except IOError:
89 except IOError:
90 pass
90 pass
91
91
92 v = self.ui.configrevlog()
92 v = self.ui.configrevlog()
93 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
93 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
94 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
94 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
95 fl = v.get('flags', None)
95 fl = v.get('flags', None)
96 flags = 0
96 flags = 0
97 if fl != None:
97 if fl != None:
98 for x in fl.split():
98 for x in fl.split():
99 flags |= revlog.flagstr(x)
99 flags |= revlog.flagstr(x)
100 elif self.revlogv1:
100 elif self.revlogv1:
101 flags = revlog.REVLOG_DEFAULT_FLAGS
101 flags = revlog.REVLOG_DEFAULT_FLAGS
102
102
103 v = self.revlogversion | flags
103 v = self.revlogversion | flags
104 self.manifest = manifest.manifest(self.sopener, v)
104 self.manifest = manifest.manifest(self.sopener, v)
105 self.changelog = changelog.changelog(self.sopener, v)
105 self.changelog = changelog.changelog(self.sopener, v)
106
106
107 fallback = self.ui.config('ui', 'fallbackencoding')
107 fallback = self.ui.config('ui', 'fallbackencoding')
108 if fallback:
108 if fallback:
109 util._fallbackencoding = fallback
109 util._fallbackencoding = fallback
110
110
111 # the changelog might not have the inline index flag
111 # the changelog might not have the inline index flag
112 # on. If the format of the changelog is the same as found in
112 # on. If the format of the changelog is the same as found in
113 # .hgrc, apply any flags found in the .hgrc as well.
113 # .hgrc, apply any flags found in the .hgrc as well.
114 # Otherwise, just version from the changelog
114 # Otherwise, just version from the changelog
115 v = self.changelog.version
115 v = self.changelog.version
116 if v == self.revlogversion:
116 if v == self.revlogversion:
117 v |= flags
117 v |= flags
118 self.revlogversion = v
118 self.revlogversion = v
119
119
120 self.tagscache = None
120 self.tagscache = None
121 self.branchcache = None
121 self.branchcache = None
122 self.nodetagscache = None
122 self.nodetagscache = None
123 self.encodepats = None
123 self.encodepats = None
124 self.decodepats = None
124 self.decodepats = None
125 self.transhandle = None
125 self.transhandle = None
126
126
127 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
127 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
128
128
129 def url(self):
129 def url(self):
130 return 'file:' + self.root
130 return 'file:' + self.root
131
131
132 def hook(self, name, throw=False, **args):
132 def hook(self, name, throw=False, **args):
133 def callhook(hname, funcname):
133 def callhook(hname, funcname):
134 '''call python hook. hook is callable object, looked up as
134 '''call python hook. hook is callable object, looked up as
135 name in python module. if callable returns "true", hook
135 name in python module. if callable returns "true", hook
136 fails, else passes. if hook raises exception, treated as
136 fails, else passes. if hook raises exception, treated as
137 hook failure. exception propagates if throw is "true".
137 hook failure. exception propagates if throw is "true".
138
138
139 reason for "true" meaning "hook failed" is so that
139 reason for "true" meaning "hook failed" is so that
140 unmodified commands (e.g. mercurial.commands.update) can
140 unmodified commands (e.g. mercurial.commands.update) can
141 be run as hooks without wrappers to convert return values.'''
141 be run as hooks without wrappers to convert return values.'''
142
142
143 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
143 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
144 d = funcname.rfind('.')
144 d = funcname.rfind('.')
145 if d == -1:
145 if d == -1:
146 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
146 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
147 % (hname, funcname))
147 % (hname, funcname))
148 modname = funcname[:d]
148 modname = funcname[:d]
149 try:
149 try:
150 obj = __import__(modname)
150 obj = __import__(modname)
151 except ImportError:
151 except ImportError:
152 try:
152 try:
153 # extensions are loaded with hgext_ prefix
153 # extensions are loaded with hgext_ prefix
154 obj = __import__("hgext_%s" % modname)
154 obj = __import__("hgext_%s" % modname)
155 except ImportError:
155 except ImportError:
156 raise util.Abort(_('%s hook is invalid '
156 raise util.Abort(_('%s hook is invalid '
157 '(import of "%s" failed)') %
157 '(import of "%s" failed)') %
158 (hname, modname))
158 (hname, modname))
159 try:
159 try:
160 for p in funcname.split('.')[1:]:
160 for p in funcname.split('.')[1:]:
161 obj = getattr(obj, p)
161 obj = getattr(obj, p)
162 except AttributeError, err:
162 except AttributeError, err:
163 raise util.Abort(_('%s hook is invalid '
163 raise util.Abort(_('%s hook is invalid '
164 '("%s" is not defined)') %
164 '("%s" is not defined)') %
165 (hname, funcname))
165 (hname, funcname))
166 if not callable(obj):
166 if not callable(obj):
167 raise util.Abort(_('%s hook is invalid '
167 raise util.Abort(_('%s hook is invalid '
168 '("%s" is not callable)') %
168 '("%s" is not callable)') %
169 (hname, funcname))
169 (hname, funcname))
170 try:
170 try:
171 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
171 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
172 except (KeyboardInterrupt, util.SignalInterrupt):
172 except (KeyboardInterrupt, util.SignalInterrupt):
173 raise
173 raise
174 except Exception, exc:
174 except Exception, exc:
175 if isinstance(exc, util.Abort):
175 if isinstance(exc, util.Abort):
176 self.ui.warn(_('error: %s hook failed: %s\n') %
176 self.ui.warn(_('error: %s hook failed: %s\n') %
177 (hname, exc.args[0]))
177 (hname, exc.args[0]))
178 else:
178 else:
179 self.ui.warn(_('error: %s hook raised an exception: '
179 self.ui.warn(_('error: %s hook raised an exception: '
180 '%s\n') % (hname, exc))
180 '%s\n') % (hname, exc))
181 if throw:
181 if throw:
182 raise
182 raise
183 self.ui.print_exc()
183 self.ui.print_exc()
184 return True
184 return True
185 if r:
185 if r:
186 if throw:
186 if throw:
187 raise util.Abort(_('%s hook failed') % hname)
187 raise util.Abort(_('%s hook failed') % hname)
188 self.ui.warn(_('warning: %s hook failed\n') % hname)
188 self.ui.warn(_('warning: %s hook failed\n') % hname)
189 return r
189 return r
190
190
191 def runhook(name, cmd):
191 def runhook(name, cmd):
192 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
192 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
193 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
193 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
194 r = util.system(cmd, environ=env, cwd=self.root)
194 r = util.system(cmd, environ=env, cwd=self.root)
195 if r:
195 if r:
196 desc, r = util.explain_exit(r)
196 desc, r = util.explain_exit(r)
197 if throw:
197 if throw:
198 raise util.Abort(_('%s hook %s') % (name, desc))
198 raise util.Abort(_('%s hook %s') % (name, desc))
199 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
199 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
200 return r
200 return r
201
201
202 r = False
202 r = False
203 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
203 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
204 if hname.split(".", 1)[0] == name and cmd]
204 if hname.split(".", 1)[0] == name and cmd]
205 hooks.sort()
205 hooks.sort()
206 for hname, cmd in hooks:
206 for hname, cmd in hooks:
207 if cmd.startswith('python:'):
207 if cmd.startswith('python:'):
208 r = callhook(hname, cmd[7:].strip()) or r
208 r = callhook(hname, cmd[7:].strip()) or r
209 else:
209 else:
210 r = runhook(hname, cmd) or r
210 r = runhook(hname, cmd) or r
211 return r
211 return r
212
212
213 tag_disallowed = ':\r\n'
213 tag_disallowed = ':\r\n'
214
214
215 def tag(self, name, node, message, local, user, date):
215 def tag(self, name, node, message, local, user, date):
216 '''tag a revision with a symbolic name.
216 '''tag a revision with a symbolic name.
217
217
218 if local is True, the tag is stored in a per-repository file.
218 if local is True, the tag is stored in a per-repository file.
219 otherwise, it is stored in the .hgtags file, and a new
219 otherwise, it is stored in the .hgtags file, and a new
220 changeset is committed with the change.
220 changeset is committed with the change.
221
221
222 keyword arguments:
222 keyword arguments:
223
223
224 local: whether to store tag in non-version-controlled file
224 local: whether to store tag in non-version-controlled file
225 (default False)
225 (default False)
226
226
227 message: commit message to use if committing
227 message: commit message to use if committing
228
228
229 user: name of user to use if committing
229 user: name of user to use if committing
230
230
231 date: date tuple to use if committing'''
231 date: date tuple to use if committing'''
232
232
233 for c in self.tag_disallowed:
233 for c in self.tag_disallowed:
234 if c in name:
234 if c in name:
235 raise util.Abort(_('%r cannot be used in a tag name') % c)
235 raise util.Abort(_('%r cannot be used in a tag name') % c)
236
236
237 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
237 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
238
238
239 if local:
239 if local:
240 # local tags are stored in the current charset
240 # local tags are stored in the current charset
241 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
241 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
242 self.hook('tag', node=hex(node), tag=name, local=local)
242 self.hook('tag', node=hex(node), tag=name, local=local)
243 return
243 return
244
244
245 for x in self.status()[:5]:
245 for x in self.status()[:5]:
246 if '.hgtags' in x:
246 if '.hgtags' in x:
247 raise util.Abort(_('working copy of .hgtags is changed '
247 raise util.Abort(_('working copy of .hgtags is changed '
248 '(please commit .hgtags manually)'))
248 '(please commit .hgtags manually)'))
249
249
250 # committed tags are stored in UTF-8
250 # committed tags are stored in UTF-8
251 line = '%s %s\n' % (hex(node), util.fromlocal(name))
251 line = '%s %s\n' % (hex(node), util.fromlocal(name))
252 self.wfile('.hgtags', 'ab').write(line)
252 self.wfile('.hgtags', 'ab').write(line)
253 if self.dirstate.state('.hgtags') == '?':
253 if self.dirstate.state('.hgtags') == '?':
254 self.add(['.hgtags'])
254 self.add(['.hgtags'])
255
255
256 self.commit(['.hgtags'], message, user, date)
256 self.commit(['.hgtags'], message, user, date)
257 self.hook('tag', node=hex(node), tag=name, local=local)
257 self.hook('tag', node=hex(node), tag=name, local=local)
258
258
259 def tags(self):
259 def tags(self):
260 '''return a mapping of tag to node'''
260 '''return a mapping of tag to node'''
261 if not self.tagscache:
261 if not self.tagscache:
262 self.tagscache = {}
262 self.tagscache = {}
263
263
264 def parsetag(line, context):
264 def parsetag(line, context):
265 if not line:
265 if not line:
266 return
266 return
267 s = l.split(" ", 1)
267 s = l.split(" ", 1)
268 if len(s) != 2:
268 if len(s) != 2:
269 self.ui.warn(_("%s: cannot parse entry\n") % context)
269 self.ui.warn(_("%s: cannot parse entry\n") % context)
270 return
270 return
271 node, key = s
271 node, key = s
272 key = util.tolocal(key.strip()) # stored in UTF-8
272 key = util.tolocal(key.strip()) # stored in UTF-8
273 try:
273 try:
274 bin_n = bin(node)
274 bin_n = bin(node)
275 except TypeError:
275 except TypeError:
276 self.ui.warn(_("%s: node '%s' is not well formed\n") %
276 self.ui.warn(_("%s: node '%s' is not well formed\n") %
277 (context, node))
277 (context, node))
278 return
278 return
279 if bin_n not in self.changelog.nodemap:
279 if bin_n not in self.changelog.nodemap:
280 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
280 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
281 (context, key))
281 (context, key))
282 return
282 return
283 self.tagscache[key] = bin_n
283 self.tagscache[key] = bin_n
284
284
285 # read the tags file from each head, ending with the tip,
285 # read the tags file from each head, ending with the tip,
286 # and add each tag found to the map, with "newer" ones
286 # and add each tag found to the map, with "newer" ones
287 # taking precedence
287 # taking precedence
288 f = None
288 f = None
289 for rev, node, fnode in self._hgtagsnodes():
289 for rev, node, fnode in self._hgtagsnodes():
290 f = (f and f.filectx(fnode) or
290 f = (f and f.filectx(fnode) or
291 self.filectx('.hgtags', fileid=fnode))
291 self.filectx('.hgtags', fileid=fnode))
292 count = 0
292 count = 0
293 for l in f.data().splitlines():
293 for l in f.data().splitlines():
294 count += 1
294 count += 1
295 parsetag(l, _("%s, line %d") % (str(f), count))
295 parsetag(l, _("%s, line %d") % (str(f), count))
296
296
297 try:
297 try:
298 f = self.opener("localtags")
298 f = self.opener("localtags")
299 count = 0
299 count = 0
300 for l in f:
300 for l in f:
301 # localtags are stored in the local character set
301 # localtags are stored in the local character set
302 # while the internal tag table is stored in UTF-8
302 # while the internal tag table is stored in UTF-8
303 l = util.fromlocal(l)
303 l = util.fromlocal(l)
304 count += 1
304 count += 1
305 parsetag(l, _("localtags, line %d") % count)
305 parsetag(l, _("localtags, line %d") % count)
306 except IOError:
306 except IOError:
307 pass
307 pass
308
308
309 self.tagscache['tip'] = self.changelog.tip()
309 self.tagscache['tip'] = self.changelog.tip()
310
310
311 return self.tagscache
311 return self.tagscache
312
312
313 def _hgtagsnodes(self):
313 def _hgtagsnodes(self):
314 heads = self.heads()
314 heads = self.heads()
315 heads.reverse()
315 heads.reverse()
316 last = {}
316 last = {}
317 ret = []
317 ret = []
318 for node in heads:
318 for node in heads:
319 c = self.changectx(node)
319 c = self.changectx(node)
320 rev = c.rev()
320 rev = c.rev()
321 try:
321 try:
322 fnode = c.filenode('.hgtags')
322 fnode = c.filenode('.hgtags')
323 except repo.LookupError:
323 except repo.LookupError:
324 continue
324 continue
325 ret.append((rev, node, fnode))
325 ret.append((rev, node, fnode))
326 if fnode in last:
326 if fnode in last:
327 ret[last[fnode]] = None
327 ret[last[fnode]] = None
328 last[fnode] = len(ret) - 1
328 last[fnode] = len(ret) - 1
329 return [item for item in ret if item]
329 return [item for item in ret if item]
330
330
331 def tagslist(self):
331 def tagslist(self):
332 '''return a list of tags ordered by revision'''
332 '''return a list of tags ordered by revision'''
333 l = []
333 l = []
334 for t, n in self.tags().items():
334 for t, n in self.tags().items():
335 try:
335 try:
336 r = self.changelog.rev(n)
336 r = self.changelog.rev(n)
337 except:
337 except:
338 r = -2 # sort to the beginning of the list if unknown
338 r = -2 # sort to the beginning of the list if unknown
339 l.append((r, t, n))
339 l.append((r, t, n))
340 l.sort()
340 l.sort()
341 return [(t, n) for r, t, n in l]
341 return [(t, n) for r, t, n in l]
342
342
343 def nodetags(self, node):
343 def nodetags(self, node):
344 '''return the tags associated with a node'''
344 '''return the tags associated with a node'''
345 if not self.nodetagscache:
345 if not self.nodetagscache:
346 self.nodetagscache = {}
346 self.nodetagscache = {}
347 for t, n in self.tags().items():
347 for t, n in self.tags().items():
348 self.nodetagscache.setdefault(n, []).append(t)
348 self.nodetagscache.setdefault(n, []).append(t)
349 return self.nodetagscache.get(node, [])
349 return self.nodetagscache.get(node, [])
350
350
351 def _branchtags(self):
351 def _branchtags(self):
352 partial, last, lrev = self._readbranchcache()
352 partial, last, lrev = self._readbranchcache()
353
353
354 tiprev = self.changelog.count() - 1
354 tiprev = self.changelog.count() - 1
355 if lrev != tiprev:
355 if lrev != tiprev:
356 self._updatebranchcache(partial, lrev+1, tiprev+1)
356 self._updatebranchcache(partial, lrev+1, tiprev+1)
357 self._writebranchcache(partial, self.changelog.tip(), tiprev)
357 self._writebranchcache(partial, self.changelog.tip(), tiprev)
358
358
359 return partial
359 return partial
360
360
361 def branchtags(self):
361 def branchtags(self):
362 if self.branchcache is not None:
362 if self.branchcache is not None:
363 return self.branchcache
363 return self.branchcache
364
364
365 self.branchcache = {} # avoid recursion in changectx
365 self.branchcache = {} # avoid recursion in changectx
366 partial = self._branchtags()
366 partial = self._branchtags()
367
367
368 # the branch cache is stored on disk as UTF-8, but in the local
368 # the branch cache is stored on disk as UTF-8, but in the local
369 # charset internally
369 # charset internally
370 for k, v in partial.items():
370 for k, v in partial.items():
371 self.branchcache[util.tolocal(k)] = v
371 self.branchcache[util.tolocal(k)] = v
372 return self.branchcache
372 return self.branchcache
373
373
374 def _readbranchcache(self):
374 def _readbranchcache(self):
375 partial = {}
375 partial = {}
376 try:
376 try:
377 f = self.opener("branches.cache")
377 f = self.opener("branches.cache")
378 lines = f.read().split('\n')
378 lines = f.read().split('\n')
379 f.close()
379 f.close()
380 features = lines.pop(0).strip()
380 features = lines.pop(0).strip()
381 if not features.startswith('features: '):
381 if not features.startswith('features: '):
382 raise ValueError(_('branch cache: no features specified'))
382 raise ValueError(_('branch cache: no features specified'))
383 features = features.split(' ', 1)[1].split()
383 features = features.split(' ', 1)[1].split()
384 missing_features = []
384 missing_features = []
385 for feature in self.branchcache_features:
385 for feature in self.branchcache_features:
386 try:
386 try:
387 features.remove(feature)
387 features.remove(feature)
388 except ValueError, inst:
388 except ValueError, inst:
389 missing_features.append(feature)
389 missing_features.append(feature)
390 if missing_features:
390 if missing_features:
391 raise ValueError(_('branch cache: missing features: %s')
391 raise ValueError(_('branch cache: missing features: %s')
392 % ', '.join(missing_features))
392 % ', '.join(missing_features))
393 if features:
393 if features:
394 raise ValueError(_('branch cache: unknown features: %s')
394 raise ValueError(_('branch cache: unknown features: %s')
395 % ', '.join(features))
395 % ', '.join(features))
396 last, lrev = lines.pop(0).split(" ", 1)
396 last, lrev = lines.pop(0).split(" ", 1)
397 last, lrev = bin(last), int(lrev)
397 last, lrev = bin(last), int(lrev)
398 if not (lrev < self.changelog.count() and
398 if not (lrev < self.changelog.count() and
399 self.changelog.node(lrev) == last): # sanity check
399 self.changelog.node(lrev) == last): # sanity check
400 # invalidate the cache
400 # invalidate the cache
401 raise ValueError('Invalid branch cache: unknown tip')
401 raise ValueError('Invalid branch cache: unknown tip')
402 for l in lines:
402 for l in lines:
403 if not l: continue
403 if not l: continue
404 node, label = l.split(" ", 1)
404 node, label = l.split(" ", 1)
405 partial[label.strip()] = bin(node)
405 partial[label.strip()] = bin(node)
406 except (KeyboardInterrupt, util.SignalInterrupt):
406 except (KeyboardInterrupt, util.SignalInterrupt):
407 raise
407 raise
408 except Exception, inst:
408 except Exception, inst:
409 if self.ui.debugflag:
409 if self.ui.debugflag:
410 self.ui.warn(str(inst), '\n')
410 self.ui.warn(str(inst), '\n')
411 partial, last, lrev = {}, nullid, nullrev
411 partial, last, lrev = {}, nullid, nullrev
412 return partial, last, lrev
412 return partial, last, lrev
413
413
414 def _writebranchcache(self, branches, tip, tiprev):
414 def _writebranchcache(self, branches, tip, tiprev):
415 try:
415 try:
416 f = self.opener("branches.cache", "w")
416 f = self.opener("branches.cache", "w")
417 f.write(" features: %s\n" % ' '.join(self.branchcache_features))
417 f.write(" features: %s\n" % ' '.join(self.branchcache_features))
418 f.write("%s %s\n" % (hex(tip), tiprev))
418 f.write("%s %s\n" % (hex(tip), tiprev))
419 for label, node in branches.iteritems():
419 for label, node in branches.iteritems():
420 f.write("%s %s\n" % (hex(node), label))
420 f.write("%s %s\n" % (hex(node), label))
421 except IOError:
421 except IOError:
422 pass
422 pass
423
423
424 def _updatebranchcache(self, partial, start, end):
424 def _updatebranchcache(self, partial, start, end):
425 for r in xrange(start, end):
425 for r in xrange(start, end):
426 c = self.changectx(r)
426 c = self.changectx(r)
427 b = c.branch()
427 b = c.branch()
428 partial[b] = c.node()
428 partial[b] = c.node()
429
429
430 def lookup(self, key):
430 def lookup(self, key):
431 if key == '.':
431 if key == '.':
432 key = self.dirstate.parents()[0]
432 key = self.dirstate.parents()[0]
433 if key == nullid:
433 if key == nullid:
434 raise repo.RepoError(_("no revision checked out"))
434 raise repo.RepoError(_("no revision checked out"))
435 elif key == 'null':
435 elif key == 'null':
436 return nullid
436 return nullid
437 n = self.changelog._match(key)
437 n = self.changelog._match(key)
438 if n:
438 if n:
439 return n
439 return n
440 if key in self.tags():
440 if key in self.tags():
441 return self.tags()[key]
441 return self.tags()[key]
442 if key in self.branchtags():
442 if key in self.branchtags():
443 return self.branchtags()[key]
443 return self.branchtags()[key]
444 n = self.changelog._partialmatch(key)
444 n = self.changelog._partialmatch(key)
445 if n:
445 if n:
446 return n
446 return n
447 raise repo.RepoError(_("unknown revision '%s'") % key)
447 raise repo.RepoError(_("unknown revision '%s'") % key)
448
448
449 def dev(self):
449 def dev(self):
450 return os.lstat(self.path).st_dev
450 return os.lstat(self.path).st_dev
451
451
452 def local(self):
452 def local(self):
453 return True
453 return True
454
454
455 def join(self, f):
455 def join(self, f):
456 return os.path.join(self.path, f)
456 return os.path.join(self.path, f)
457
457
458 def sjoin(self, f):
458 def sjoin(self, f):
459 f = self.encodefn(f)
459 f = self.encodefn(f)
460 return os.path.join(self.spath, f)
460 return os.path.join(self.spath, f)
461
461
462 def wjoin(self, f):
462 def wjoin(self, f):
463 return os.path.join(self.root, f)
463 return os.path.join(self.root, f)
464
464
465 def file(self, f):
465 def file(self, f):
466 if f[0] == '/':
466 if f[0] == '/':
467 f = f[1:]
467 f = f[1:]
468 return filelog.filelog(self.sopener, f, self.revlogversion)
468 return filelog.filelog(self.sopener, f, self.revlogversion)
469
469
470 def changectx(self, changeid=None):
470 def changectx(self, changeid=None):
471 return context.changectx(self, changeid)
471 return context.changectx(self, changeid)
472
472
473 def workingctx(self):
473 def workingctx(self):
474 return context.workingctx(self)
474 return context.workingctx(self)
475
475
476 def parents(self, changeid=None):
476 def parents(self, changeid=None):
477 '''
477 '''
478 get list of changectxs for parents of changeid or working directory
478 get list of changectxs for parents of changeid or working directory
479 '''
479 '''
480 if changeid is None:
480 if changeid is None:
481 pl = self.dirstate.parents()
481 pl = self.dirstate.parents()
482 else:
482 else:
483 n = self.changelog.lookup(changeid)
483 n = self.changelog.lookup(changeid)
484 pl = self.changelog.parents(n)
484 pl = self.changelog.parents(n)
485 if pl[1] == nullid:
485 if pl[1] == nullid:
486 return [self.changectx(pl[0])]
486 return [self.changectx(pl[0])]
487 return [self.changectx(pl[0]), self.changectx(pl[1])]
487 return [self.changectx(pl[0]), self.changectx(pl[1])]
488
488
489 def filectx(self, path, changeid=None, fileid=None):
489 def filectx(self, path, changeid=None, fileid=None):
490 """changeid can be a changeset revision, node, or tag.
490 """changeid can be a changeset revision, node, or tag.
491 fileid can be a file revision or node."""
491 fileid can be a file revision or node."""
492 return context.filectx(self, path, changeid, fileid)
492 return context.filectx(self, path, changeid, fileid)
493
493
494 def getcwd(self):
494 def getcwd(self):
495 return self.dirstate.getcwd()
495 return self.dirstate.getcwd()
496
496
497 def wfile(self, f, mode='r'):
497 def wfile(self, f, mode='r'):
498 return self.wopener(f, mode)
498 return self.wopener(f, mode)
499
499
500 def wread(self, filename):
500 def wread(self, filename):
501 if self.encodepats == None:
501 if self.encodepats == None:
502 l = []
502 l = []
503 for pat, cmd in self.ui.configitems("encode"):
503 for pat, cmd in self.ui.configitems("encode"):
504 mf = util.matcher(self.root, "", [pat], [], [])[1]
504 mf = util.matcher(self.root, "", [pat], [], [])[1]
505 l.append((mf, cmd))
505 l.append((mf, cmd))
506 self.encodepats = l
506 self.encodepats = l
507
507
508 data = self.wopener(filename, 'r').read()
508 data = self.wopener(filename, 'r').read()
509
509
510 for mf, cmd in self.encodepats:
510 for mf, cmd in self.encodepats:
511 if mf(filename):
511 if mf(filename):
512 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
512 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
513 data = util.filter(data, cmd)
513 data = util.filter(data, cmd)
514 break
514 break
515
515
516 return data
516 return data
517
517
518 def wwrite(self, filename, data, fd=None):
518 def wwrite(self, filename, data, fd=None):
519 if self.decodepats == None:
519 if self.decodepats == None:
520 l = []
520 l = []
521 for pat, cmd in self.ui.configitems("decode"):
521 for pat, cmd in self.ui.configitems("decode"):
522 mf = util.matcher(self.root, "", [pat], [], [])[1]
522 mf = util.matcher(self.root, "", [pat], [], [])[1]
523 l.append((mf, cmd))
523 l.append((mf, cmd))
524 self.decodepats = l
524 self.decodepats = l
525
525
526 for mf, cmd in self.decodepats:
526 for mf, cmd in self.decodepats:
527 if mf(filename):
527 if mf(filename):
528 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
528 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
529 data = util.filter(data, cmd)
529 data = util.filter(data, cmd)
530 break
530 break
531
531
532 if fd:
532 if fd:
533 return fd.write(data)
533 return fd.write(data)
534 return self.wopener(filename, 'w').write(data)
534 return self.wopener(filename, 'w').write(data)
535
535
536 def transaction(self):
536 def transaction(self):
537 tr = self.transhandle
537 tr = self.transhandle
538 if tr != None and tr.running():
538 if tr != None and tr.running():
539 return tr.nest()
539 return tr.nest()
540
540
541 # save dirstate for rollback
541 # save dirstate for rollback
542 try:
542 try:
543 ds = self.opener("dirstate").read()
543 ds = self.opener("dirstate").read()
544 except IOError:
544 except IOError:
545 ds = ""
545 ds = ""
546 self.opener("journal.dirstate", "w").write(ds)
546 self.opener("journal.dirstate", "w").write(ds)
547
547
548 renames = [(self.sjoin("journal"), self.sjoin("undo")),
548 renames = [(self.sjoin("journal"), self.sjoin("undo")),
549 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
549 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
550 tr = transaction.transaction(self.ui.warn, self.sopener,
550 tr = transaction.transaction(self.ui.warn, self.sopener,
551 self.sjoin("journal"),
551 self.sjoin("journal"),
552 aftertrans(renames))
552 aftertrans(renames))
553 self.transhandle = tr
553 self.transhandle = tr
554 return tr
554 return tr
555
555
556 def recover(self):
556 def recover(self):
557 l = self.lock()
557 l = self.lock()
558 if os.path.exists(self.sjoin("journal")):
558 if os.path.exists(self.sjoin("journal")):
559 self.ui.status(_("rolling back interrupted transaction\n"))
559 self.ui.status(_("rolling back interrupted transaction\n"))
560 transaction.rollback(self.sopener, self.sjoin("journal"))
560 transaction.rollback(self.sopener, self.sjoin("journal"))
561 self.reload()
561 self.reload()
562 return True
562 return True
563 else:
563 else:
564 self.ui.warn(_("no interrupted transaction available\n"))
564 self.ui.warn(_("no interrupted transaction available\n"))
565 return False
565 return False
566
566
567 def rollback(self, wlock=None):
567 def rollback(self, wlock=None):
568 if not wlock:
568 if not wlock:
569 wlock = self.wlock()
569 wlock = self.wlock()
570 l = self.lock()
570 l = self.lock()
571 if os.path.exists(self.sjoin("undo")):
571 if os.path.exists(self.sjoin("undo")):
572 self.ui.status(_("rolling back last transaction\n"))
572 self.ui.status(_("rolling back last transaction\n"))
573 transaction.rollback(self.sopener, self.sjoin("undo"))
573 transaction.rollback(self.sopener, self.sjoin("undo"))
574 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
574 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
575 self.reload()
575 self.reload()
576 self.wreload()
576 self.wreload()
577 else:
577 else:
578 self.ui.warn(_("no rollback information available\n"))
578 self.ui.warn(_("no rollback information available\n"))
579
579
580 def wreload(self):
580 def wreload(self):
581 self.dirstate.read()
581 self.dirstate.read()
582
582
583 def reload(self):
583 def reload(self):
584 self.changelog.load()
584 self.changelog.load()
585 self.manifest.load()
585 self.manifest.load()
586 self.tagscache = None
586 self.tagscache = None
587 self.nodetagscache = None
587 self.nodetagscache = None
588
588
589 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
589 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
590 desc=None):
590 desc=None):
591 try:
591 try:
592 l = lock.lock(lockname, 0, releasefn, desc=desc)
592 l = lock.lock(lockname, 0, releasefn, desc=desc)
593 except lock.LockHeld, inst:
593 except lock.LockHeld, inst:
594 if not wait:
594 if not wait:
595 raise
595 raise
596 self.ui.warn(_("waiting for lock on %s held by %r\n") %
596 self.ui.warn(_("waiting for lock on %s held by %r\n") %
597 (desc, inst.locker))
597 (desc, inst.locker))
598 # default to 600 seconds timeout
598 # default to 600 seconds timeout
599 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
599 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
600 releasefn, desc=desc)
600 releasefn, desc=desc)
601 if acquirefn:
601 if acquirefn:
602 acquirefn()
602 acquirefn()
603 return l
603 return l
604
604
605 def lock(self, wait=1):
605 def lock(self, wait=1):
606 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
606 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
607 desc=_('repository %s') % self.origroot)
607 desc=_('repository %s') % self.origroot)
608
608
609 def wlock(self, wait=1):
609 def wlock(self, wait=1):
610 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
610 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
611 self.wreload,
611 self.wreload,
612 desc=_('working directory of %s') % self.origroot)
612 desc=_('working directory of %s') % self.origroot)
613
613
614 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
614 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
615 """
615 """
616 commit an individual file as part of a larger transaction
616 commit an individual file as part of a larger transaction
617 """
617 """
618
618
619 t = self.wread(fn)
619 t = self.wread(fn)
620 fl = self.file(fn)
620 fl = self.file(fn)
621 fp1 = manifest1.get(fn, nullid)
621 fp1 = manifest1.get(fn, nullid)
622 fp2 = manifest2.get(fn, nullid)
622 fp2 = manifest2.get(fn, nullid)
623
623
624 meta = {}
624 meta = {}
625 cp = self.dirstate.copied(fn)
625 cp = self.dirstate.copied(fn)
626 if cp:
626 if cp:
627 # Mark the new revision of this file as a copy of another
627 # Mark the new revision of this file as a copy of another
628 # file. This copy data will effectively act as a parent
628 # file. This copy data will effectively act as a parent
629 # of this new revision. If this is a merge, the first
629 # of this new revision. If this is a merge, the first
630 # parent will be the nullid (meaning "look up the copy data")
630 # parent will be the nullid (meaning "look up the copy data")
631 # and the second one will be the other parent. For example:
631 # and the second one will be the other parent. For example:
632 #
632 #
633 # 0 --- 1 --- 3 rev1 changes file foo
633 # 0 --- 1 --- 3 rev1 changes file foo
634 # \ / rev2 renames foo to bar and changes it
634 # \ / rev2 renames foo to bar and changes it
635 # \- 2 -/ rev3 should have bar with all changes and
635 # \- 2 -/ rev3 should have bar with all changes and
636 # should record that bar descends from
636 # should record that bar descends from
637 # bar in rev2 and foo in rev1
637 # bar in rev2 and foo in rev1
638 #
638 #
639 # this allows this merge to succeed:
639 # this allows this merge to succeed:
640 #
640 #
641 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
641 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
642 # \ / merging rev3 and rev4 should use bar@rev2
642 # \ / merging rev3 and rev4 should use bar@rev2
643 # \- 2 --- 4 as the merge base
643 # \- 2 --- 4 as the merge base
644 #
644 #
645 meta["copy"] = cp
645 meta["copy"] = cp
646 if not manifest2: # not a branch merge
646 if not manifest2: # not a branch merge
647 meta["copyrev"] = hex(manifest1.get(cp, nullid))
647 meta["copyrev"] = hex(manifest1.get(cp, nullid))
648 fp2 = nullid
648 fp2 = nullid
649 elif fp2 != nullid: # copied on remote side
649 elif fp2 != nullid: # copied on remote side
650 meta["copyrev"] = hex(manifest1.get(cp, nullid))
650 meta["copyrev"] = hex(manifest1.get(cp, nullid))
651 elif fp1 != nullid: # copied on local side, reversed
651 elif fp1 != nullid: # copied on local side, reversed
652 meta["copyrev"] = hex(manifest2.get(cp))
652 meta["copyrev"] = hex(manifest2.get(cp))
653 fp2 = fp1
653 fp2 = fp1
654 else: # directory rename
654 else: # directory rename
655 meta["copyrev"] = hex(manifest1.get(cp, nullid))
655 meta["copyrev"] = hex(manifest1.get(cp, nullid))
656 self.ui.debug(_(" %s: copy %s:%s\n") %
656 self.ui.debug(_(" %s: copy %s:%s\n") %
657 (fn, cp, meta["copyrev"]))
657 (fn, cp, meta["copyrev"]))
658 fp1 = nullid
658 fp1 = nullid
659 elif fp2 != nullid:
659 elif fp2 != nullid:
660 # is one parent an ancestor of the other?
660 # is one parent an ancestor of the other?
661 fpa = fl.ancestor(fp1, fp2)
661 fpa = fl.ancestor(fp1, fp2)
662 if fpa == fp1:
662 if fpa == fp1:
663 fp1, fp2 = fp2, nullid
663 fp1, fp2 = fp2, nullid
664 elif fpa == fp2:
664 elif fpa == fp2:
665 fp2 = nullid
665 fp2 = nullid
666
666
667 # is the file unmodified from the parent? report existing entry
667 # is the file unmodified from the parent? report existing entry
668 if fp2 == nullid and not fl.cmp(fp1, t):
668 if fp2 == nullid and not fl.cmp(fp1, t):
669 return fp1
669 return fp1
670
670
671 changelist.append(fn)
671 changelist.append(fn)
672 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
672 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
673
673
674 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
674 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
675 if p1 is None:
675 if p1 is None:
676 p1, p2 = self.dirstate.parents()
676 p1, p2 = self.dirstate.parents()
677 return self.commit(files=files, text=text, user=user, date=date,
677 return self.commit(files=files, text=text, user=user, date=date,
678 p1=p1, p2=p2, wlock=wlock)
678 p1=p1, p2=p2, wlock=wlock)
679
679
680 def commit(self, files=None, text="", user=None, date=None,
680 def commit(self, files=None, text="", user=None, date=None,
681 match=util.always, force=False, lock=None, wlock=None,
681 match=util.always, force=False, lock=None, wlock=None,
682 force_editor=False, p1=None, p2=None, extra={}):
682 force_editor=False, p1=None, p2=None, extra={}):
683
683
684 commit = []
684 commit = []
685 remove = []
685 remove = []
686 changed = []
686 changed = []
687 use_dirstate = (p1 is None) # not rawcommit
687 use_dirstate = (p1 is None) # not rawcommit
688 extra = extra.copy()
688 extra = extra.copy()
689
689
690 if use_dirstate:
690 if use_dirstate:
691 if files:
691 if files:
692 for f in files:
692 for f in files:
693 s = self.dirstate.state(f)
693 s = self.dirstate.state(f)
694 if s in 'nmai':
694 if s in 'nmai':
695 commit.append(f)
695 commit.append(f)
696 elif s == 'r':
696 elif s == 'r':
697 remove.append(f)
697 remove.append(f)
698 else:
698 else:
699 self.ui.warn(_("%s not tracked!\n") % f)
699 self.ui.warn(_("%s not tracked!\n") % f)
700 else:
700 else:
701 changes = self.status(match=match)[:5]
701 changes = self.status(match=match)[:5]
702 modified, added, removed, deleted, unknown = changes
702 modified, added, removed, deleted, unknown = changes
703 commit = modified + added
703 commit = modified + added
704 remove = removed
704 remove = removed
705 else:
705 else:
706 commit = files
706 commit = files
707
707
708 if use_dirstate:
708 if use_dirstate:
709 p1, p2 = self.dirstate.parents()
709 p1, p2 = self.dirstate.parents()
710 update_dirstate = True
710 update_dirstate = True
711 else:
711 else:
712 p1, p2 = p1, p2 or nullid
712 p1, p2 = p1, p2 or nullid
713 update_dirstate = (self.dirstate.parents()[0] == p1)
713 update_dirstate = (self.dirstate.parents()[0] == p1)
714
714
715 c1 = self.changelog.read(p1)
715 c1 = self.changelog.read(p1)
716 c2 = self.changelog.read(p2)
716 c2 = self.changelog.read(p2)
717 m1 = self.manifest.read(c1[0]).copy()
717 m1 = self.manifest.read(c1[0]).copy()
718 m2 = self.manifest.read(c2[0])
718 m2 = self.manifest.read(c2[0])
719
719
720 if use_dirstate:
720 if use_dirstate:
721 branchname = self.workingctx().branch()
721 branchname = self.workingctx().branch()
722 try:
722 try:
723 branchname = branchname.decode('UTF-8').encode('UTF-8')
723 branchname = branchname.decode('UTF-8').encode('UTF-8')
724 except UnicodeDecodeError:
724 except UnicodeDecodeError:
725 raise util.Abort(_('branch name not in UTF-8!'))
725 raise util.Abort(_('branch name not in UTF-8!'))
726 else:
726 else:
727 branchname = ""
727 branchname = ""
728
728
729 if use_dirstate:
729 if use_dirstate:
730 oldname = c1[5].get("branch", "") # stored in UTF-8
730 oldname = c1[5].get("branch", "") # stored in UTF-8
731 if not commit and not remove and not force and p2 == nullid and \
731 if not commit and not remove and not force and p2 == nullid and \
732 branchname == oldname:
732 branchname == oldname:
733 self.ui.status(_("nothing changed\n"))
733 self.ui.status(_("nothing changed\n"))
734 return None
734 return None
735
735
736 xp1 = hex(p1)
736 xp1 = hex(p1)
737 if p2 == nullid: xp2 = ''
737 if p2 == nullid: xp2 = ''
738 else: xp2 = hex(p2)
738 else: xp2 = hex(p2)
739
739
740 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
740 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
741
741
742 if not wlock:
742 if not wlock:
743 wlock = self.wlock()
743 wlock = self.wlock()
744 if not lock:
744 if not lock:
745 lock = self.lock()
745 lock = self.lock()
746 tr = self.transaction()
746 tr = self.transaction()
747
747
748 # check in files
748 # check in files
749 new = {}
749 new = {}
750 linkrev = self.changelog.count()
750 linkrev = self.changelog.count()
751 commit.sort()
751 commit.sort()
752 for f in commit:
752 for f in commit:
753 self.ui.note(f + "\n")
753 self.ui.note(f + "\n")
754 try:
754 try:
755 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
755 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
756 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
756 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
757 except IOError:
757 except IOError:
758 if use_dirstate:
758 if use_dirstate:
759 self.ui.warn(_("trouble committing %s!\n") % f)
759 self.ui.warn(_("trouble committing %s!\n") % f)
760 raise
760 raise
761 else:
761 else:
762 remove.append(f)
762 remove.append(f)
763
763
764 # update manifest
764 # update manifest
765 m1.update(new)
765 m1.update(new)
766 remove.sort()
766 remove.sort()
767
767
768 for f in remove:
768 for f in remove:
769 if f in m1:
769 if f in m1:
770 del m1[f]
770 del m1[f]
771 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
771 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
772
772
773 # add changeset
773 # add changeset
774 new = new.keys()
774 new = new.keys()
775 new.sort()
775 new.sort()
776
776
777 user = user or self.ui.username()
777 user = user or self.ui.username()
778 if not text or force_editor:
778 if not text or force_editor:
779 edittext = []
779 edittext = []
780 if text:
780 if text:
781 edittext.append(text)
781 edittext.append(text)
782 edittext.append("")
782 edittext.append("")
783 edittext.append("HG: user: %s" % user)
783 edittext.append("HG: user: %s" % user)
784 if p2 != nullid:
784 if p2 != nullid:
785 edittext.append("HG: branch merge")
785 edittext.append("HG: branch merge")
786 edittext.extend(["HG: changed %s" % f for f in changed])
786 edittext.extend(["HG: changed %s" % f for f in changed])
787 edittext.extend(["HG: removed %s" % f for f in remove])
787 edittext.extend(["HG: removed %s" % f for f in remove])
788 if not changed and not remove:
788 if not changed and not remove:
789 edittext.append("HG: no files changed")
789 edittext.append("HG: no files changed")
790 edittext.append("")
790 edittext.append("")
791 # run editor in the repository root
791 # run editor in the repository root
792 olddir = os.getcwd()
792 olddir = os.getcwd()
793 os.chdir(self.root)
793 os.chdir(self.root)
794 text = self.ui.edit("\n".join(edittext), user)
794 text = self.ui.edit("\n".join(edittext), user)
795 os.chdir(olddir)
795 os.chdir(olddir)
796
796
797 lines = [line.rstrip() for line in text.rstrip().splitlines()]
797 lines = [line.rstrip() for line in text.rstrip().splitlines()]
798 while lines and not lines[0]:
798 while lines and not lines[0]:
799 del lines[0]
799 del lines[0]
800 if not lines:
800 if not lines:
801 return None
801 return None
802 text = '\n'.join(lines)
802 text = '\n'.join(lines)
803 if branchname:
803 if branchname:
804 extra["branch"] = branchname
804 extra["branch"] = branchname
805 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
805 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
806 user, date, extra)
806 user, date, extra)
807 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
807 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
808 parent2=xp2)
808 parent2=xp2)
809 tr.close()
809 tr.close()
810
810
811 if use_dirstate or update_dirstate:
811 if use_dirstate or update_dirstate:
812 self.dirstate.setparents(n)
812 self.dirstate.setparents(n)
813 if use_dirstate:
813 if use_dirstate:
814 self.dirstate.update(new, "n")
814 self.dirstate.update(new, "n")
815 self.dirstate.forget(remove)
815 self.dirstate.forget(remove)
816
816
817 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
817 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
818 return n
818 return n
819
819
820 def walk(self, node=None, files=[], match=util.always, badmatch=None):
820 def walk(self, node=None, files=[], match=util.always, badmatch=None):
821 '''
821 '''
822 walk recursively through the directory tree or a given
822 walk recursively through the directory tree or a given
823 changeset, finding all files matched by the match
823 changeset, finding all files matched by the match
824 function
824 function
825
825
826 results are yielded in a tuple (src, filename), where src
826 results are yielded in a tuple (src, filename), where src
827 is one of:
827 is one of:
828 'f' the file was found in the directory tree
828 'f' the file was found in the directory tree
829 'm' the file was only in the dirstate and not in the tree
829 'm' the file was only in the dirstate and not in the tree
830 'b' file was not found and matched badmatch
830 'b' file was not found and matched badmatch
831 '''
831 '''
832
832
833 if node:
833 if node:
834 fdict = dict.fromkeys(files)
834 fdict = dict.fromkeys(files)
835 # for dirstate.walk, files=['.'] means "walk the whole tree".
836 # follow that here, too
837 fdict.pop('.', None)
835 mdict = self.manifest.read(self.changelog.read(node)[0])
838 mdict = self.manifest.read(self.changelog.read(node)[0])
836 mfiles = mdict.keys()
839 mfiles = mdict.keys()
837 mfiles.sort()
840 mfiles.sort()
838 for fn in mfiles:
841 for fn in mfiles:
839 for ffn in fdict:
842 for ffn in fdict:
840 # match if the file is the exact name or a directory
843 # match if the file is the exact name or a directory
841 if ffn == fn or fn.startswith("%s/" % ffn):
844 if ffn == fn or fn.startswith("%s/" % ffn):
842 del fdict[ffn]
845 del fdict[ffn]
843 break
846 break
844 if match(fn):
847 if match(fn):
845 yield 'm', fn
848 yield 'm', fn
846 ffiles = fdict.keys()
849 ffiles = fdict.keys()
847 ffiles.sort()
850 ffiles.sort()
848 for fn in ffiles:
851 for fn in ffiles:
849 if badmatch and badmatch(fn):
852 if badmatch and badmatch(fn):
850 if match(fn):
853 if match(fn):
851 yield 'b', fn
854 yield 'b', fn
852 else:
855 else:
853 self.ui.warn(_('%s: No such file in rev %s\n') % (
856 self.ui.warn(_('%s: No such file in rev %s\n') % (
854 util.pathto(self.getcwd(), fn), short(node)))
857 util.pathto(self.getcwd(), fn), short(node)))
855 else:
858 else:
856 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
859 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
857 yield src, fn
860 yield src, fn
858
861
859 def status(self, node1=None, node2=None, files=[], match=util.always,
862 def status(self, node1=None, node2=None, files=[], match=util.always,
860 wlock=None, list_ignored=False, list_clean=False):
863 wlock=None, list_ignored=False, list_clean=False):
861 """return status of files between two nodes or node and working directory
864 """return status of files between two nodes or node and working directory
862
865
863 If node1 is None, use the first dirstate parent instead.
866 If node1 is None, use the first dirstate parent instead.
864 If node2 is None, compare node1 with working directory.
867 If node2 is None, compare node1 with working directory.
865 """
868 """
866
869
867 def fcmp(fn, mf):
870 def fcmp(fn, mf):
868 t1 = self.wread(fn)
871 t1 = self.wread(fn)
869 return self.file(fn).cmp(mf.get(fn, nullid), t1)
872 return self.file(fn).cmp(mf.get(fn, nullid), t1)
870
873
871 def mfmatches(node):
874 def mfmatches(node):
872 change = self.changelog.read(node)
875 change = self.changelog.read(node)
873 mf = self.manifest.read(change[0]).copy()
876 mf = self.manifest.read(change[0]).copy()
874 for fn in mf.keys():
877 for fn in mf.keys():
875 if not match(fn):
878 if not match(fn):
876 del mf[fn]
879 del mf[fn]
877 return mf
880 return mf
878
881
879 modified, added, removed, deleted, unknown = [], [], [], [], []
882 modified, added, removed, deleted, unknown = [], [], [], [], []
880 ignored, clean = [], []
883 ignored, clean = [], []
881
884
882 compareworking = False
885 compareworking = False
883 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
886 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
884 compareworking = True
887 compareworking = True
885
888
886 if not compareworking:
889 if not compareworking:
887 # read the manifest from node1 before the manifest from node2,
890 # read the manifest from node1 before the manifest from node2,
888 # so that we'll hit the manifest cache if we're going through
891 # so that we'll hit the manifest cache if we're going through
889 # all the revisions in parent->child order.
892 # all the revisions in parent->child order.
890 mf1 = mfmatches(node1)
893 mf1 = mfmatches(node1)
891
894
892 # are we comparing the working directory?
895 # are we comparing the working directory?
893 if not node2:
896 if not node2:
894 if not wlock:
897 if not wlock:
895 try:
898 try:
896 wlock = self.wlock(wait=0)
899 wlock = self.wlock(wait=0)
897 except lock.LockException:
900 except lock.LockException:
898 wlock = None
901 wlock = None
899 (lookup, modified, added, removed, deleted, unknown,
902 (lookup, modified, added, removed, deleted, unknown,
900 ignored, clean) = self.dirstate.status(files, match,
903 ignored, clean) = self.dirstate.status(files, match,
901 list_ignored, list_clean)
904 list_ignored, list_clean)
902
905
903 # are we comparing working dir against its parent?
906 # are we comparing working dir against its parent?
904 if compareworking:
907 if compareworking:
905 if lookup:
908 if lookup:
906 # do a full compare of any files that might have changed
909 # do a full compare of any files that might have changed
907 mf2 = mfmatches(self.dirstate.parents()[0])
910 mf2 = mfmatches(self.dirstate.parents()[0])
908 for f in lookup:
911 for f in lookup:
909 if fcmp(f, mf2):
912 if fcmp(f, mf2):
910 modified.append(f)
913 modified.append(f)
911 else:
914 else:
912 clean.append(f)
915 clean.append(f)
913 if wlock is not None:
916 if wlock is not None:
914 self.dirstate.update([f], "n")
917 self.dirstate.update([f], "n")
915 else:
918 else:
916 # we are comparing working dir against non-parent
919 # we are comparing working dir against non-parent
917 # generate a pseudo-manifest for the working dir
920 # generate a pseudo-manifest for the working dir
918 # XXX: create it in dirstate.py ?
921 # XXX: create it in dirstate.py ?
919 mf2 = mfmatches(self.dirstate.parents()[0])
922 mf2 = mfmatches(self.dirstate.parents()[0])
920 for f in lookup + modified + added:
923 for f in lookup + modified + added:
921 mf2[f] = ""
924 mf2[f] = ""
922 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
925 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
923 for f in removed:
926 for f in removed:
924 if f in mf2:
927 if f in mf2:
925 del mf2[f]
928 del mf2[f]
926 else:
929 else:
927 # we are comparing two revisions
930 # we are comparing two revisions
928 mf2 = mfmatches(node2)
931 mf2 = mfmatches(node2)
929
932
930 if not compareworking:
933 if not compareworking:
931 # flush lists from dirstate before comparing manifests
934 # flush lists from dirstate before comparing manifests
932 modified, added, clean = [], [], []
935 modified, added, clean = [], [], []
933
936
934 # make sure to sort the files so we talk to the disk in a
937 # make sure to sort the files so we talk to the disk in a
935 # reasonable order
938 # reasonable order
936 mf2keys = mf2.keys()
939 mf2keys = mf2.keys()
937 mf2keys.sort()
940 mf2keys.sort()
938 for fn in mf2keys:
941 for fn in mf2keys:
939 if mf1.has_key(fn):
942 if mf1.has_key(fn):
940 if mf1.flags(fn) != mf2.flags(fn) or \
943 if mf1.flags(fn) != mf2.flags(fn) or \
941 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
944 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
942 modified.append(fn)
945 modified.append(fn)
943 elif list_clean:
946 elif list_clean:
944 clean.append(fn)
947 clean.append(fn)
945 del mf1[fn]
948 del mf1[fn]
946 else:
949 else:
947 added.append(fn)
950 added.append(fn)
948
951
949 removed = mf1.keys()
952 removed = mf1.keys()
950
953
951 # sort and return results:
954 # sort and return results:
952 for l in modified, added, removed, deleted, unknown, ignored, clean:
955 for l in modified, added, removed, deleted, unknown, ignored, clean:
953 l.sort()
956 l.sort()
954 return (modified, added, removed, deleted, unknown, ignored, clean)
957 return (modified, added, removed, deleted, unknown, ignored, clean)
955
958
956 def add(self, list, wlock=None):
959 def add(self, list, wlock=None):
957 if not wlock:
960 if not wlock:
958 wlock = self.wlock()
961 wlock = self.wlock()
959 for f in list:
962 for f in list:
960 p = self.wjoin(f)
963 p = self.wjoin(f)
961 if not os.path.exists(p):
964 if not os.path.exists(p):
962 self.ui.warn(_("%s does not exist!\n") % f)
965 self.ui.warn(_("%s does not exist!\n") % f)
963 elif not os.path.isfile(p):
966 elif not os.path.isfile(p):
964 self.ui.warn(_("%s not added: only files supported currently\n")
967 self.ui.warn(_("%s not added: only files supported currently\n")
965 % f)
968 % f)
966 elif self.dirstate.state(f) in 'an':
969 elif self.dirstate.state(f) in 'an':
967 self.ui.warn(_("%s already tracked!\n") % f)
970 self.ui.warn(_("%s already tracked!\n") % f)
968 else:
971 else:
969 self.dirstate.update([f], "a")
972 self.dirstate.update([f], "a")
970
973
971 def forget(self, list, wlock=None):
974 def forget(self, list, wlock=None):
972 if not wlock:
975 if not wlock:
973 wlock = self.wlock()
976 wlock = self.wlock()
974 for f in list:
977 for f in list:
975 if self.dirstate.state(f) not in 'ai':
978 if self.dirstate.state(f) not in 'ai':
976 self.ui.warn(_("%s not added!\n") % f)
979 self.ui.warn(_("%s not added!\n") % f)
977 else:
980 else:
978 self.dirstate.forget([f])
981 self.dirstate.forget([f])
979
982
980 def remove(self, list, unlink=False, wlock=None):
983 def remove(self, list, unlink=False, wlock=None):
981 if unlink:
984 if unlink:
982 for f in list:
985 for f in list:
983 try:
986 try:
984 util.unlink(self.wjoin(f))
987 util.unlink(self.wjoin(f))
985 except OSError, inst:
988 except OSError, inst:
986 if inst.errno != errno.ENOENT:
989 if inst.errno != errno.ENOENT:
987 raise
990 raise
988 if not wlock:
991 if not wlock:
989 wlock = self.wlock()
992 wlock = self.wlock()
990 for f in list:
993 for f in list:
991 p = self.wjoin(f)
994 p = self.wjoin(f)
992 if os.path.exists(p):
995 if os.path.exists(p):
993 self.ui.warn(_("%s still exists!\n") % f)
996 self.ui.warn(_("%s still exists!\n") % f)
994 elif self.dirstate.state(f) == 'a':
997 elif self.dirstate.state(f) == 'a':
995 self.dirstate.forget([f])
998 self.dirstate.forget([f])
996 elif f not in self.dirstate:
999 elif f not in self.dirstate:
997 self.ui.warn(_("%s not tracked!\n") % f)
1000 self.ui.warn(_("%s not tracked!\n") % f)
998 else:
1001 else:
999 self.dirstate.update([f], "r")
1002 self.dirstate.update([f], "r")
1000
1003
1001 def undelete(self, list, wlock=None):
1004 def undelete(self, list, wlock=None):
1002 p = self.dirstate.parents()[0]
1005 p = self.dirstate.parents()[0]
1003 mn = self.changelog.read(p)[0]
1006 mn = self.changelog.read(p)[0]
1004 m = self.manifest.read(mn)
1007 m = self.manifest.read(mn)
1005 if not wlock:
1008 if not wlock:
1006 wlock = self.wlock()
1009 wlock = self.wlock()
1007 for f in list:
1010 for f in list:
1008 if self.dirstate.state(f) not in "r":
1011 if self.dirstate.state(f) not in "r":
1009 self.ui.warn("%s not removed!\n" % f)
1012 self.ui.warn("%s not removed!\n" % f)
1010 else:
1013 else:
1011 t = self.file(f).read(m[f])
1014 t = self.file(f).read(m[f])
1012 self.wwrite(f, t)
1015 self.wwrite(f, t)
1013 util.set_exec(self.wjoin(f), m.execf(f))
1016 util.set_exec(self.wjoin(f), m.execf(f))
1014 self.dirstate.update([f], "n")
1017 self.dirstate.update([f], "n")
1015
1018
1016 def copy(self, source, dest, wlock=None):
1019 def copy(self, source, dest, wlock=None):
1017 p = self.wjoin(dest)
1020 p = self.wjoin(dest)
1018 if not os.path.exists(p):
1021 if not os.path.exists(p):
1019 self.ui.warn(_("%s does not exist!\n") % dest)
1022 self.ui.warn(_("%s does not exist!\n") % dest)
1020 elif not os.path.isfile(p):
1023 elif not os.path.isfile(p):
1021 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
1024 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
1022 else:
1025 else:
1023 if not wlock:
1026 if not wlock:
1024 wlock = self.wlock()
1027 wlock = self.wlock()
1025 if self.dirstate.state(dest) == '?':
1028 if self.dirstate.state(dest) == '?':
1026 self.dirstate.update([dest], "a")
1029 self.dirstate.update([dest], "a")
1027 self.dirstate.copy(source, dest)
1030 self.dirstate.copy(source, dest)
1028
1031
1029 def heads(self, start=None):
1032 def heads(self, start=None):
1030 heads = self.changelog.heads(start)
1033 heads = self.changelog.heads(start)
1031 # sort the output in rev descending order
1034 # sort the output in rev descending order
1032 heads = [(-self.changelog.rev(h), h) for h in heads]
1035 heads = [(-self.changelog.rev(h), h) for h in heads]
1033 heads.sort()
1036 heads.sort()
1034 return [n for (r, n) in heads]
1037 return [n for (r, n) in heads]
1035
1038
1036 # branchlookup returns a dict giving a list of branches for
1039 # branchlookup returns a dict giving a list of branches for
1037 # each head. A branch is defined as the tag of a node or
1040 # each head. A branch is defined as the tag of a node or
1038 # the branch of the node's parents. If a node has multiple
1041 # the branch of the node's parents. If a node has multiple
1039 # branch tags, tags are eliminated if they are visible from other
1042 # branch tags, tags are eliminated if they are visible from other
1040 # branch tags.
1043 # branch tags.
1041 #
1044 #
1042 # So, for this graph: a->b->c->d->e
1045 # So, for this graph: a->b->c->d->e
1043 # \ /
1046 # \ /
1044 # aa -----/
1047 # aa -----/
1045 # a has tag 2.6.12
1048 # a has tag 2.6.12
1046 # d has tag 2.6.13
1049 # d has tag 2.6.13
1047 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1050 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1048 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1051 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1049 # from the list.
1052 # from the list.
1050 #
1053 #
1051 # It is possible that more than one head will have the same branch tag.
1054 # It is possible that more than one head will have the same branch tag.
1052 # callers need to check the result for multiple heads under the same
1055 # callers need to check the result for multiple heads under the same
1053 # branch tag if that is a problem for them (ie checkout of a specific
1056 # branch tag if that is a problem for them (ie checkout of a specific
1054 # branch).
1057 # branch).
1055 #
1058 #
1056 # passing in a specific branch will limit the depth of the search
1059 # passing in a specific branch will limit the depth of the search
1057 # through the parents. It won't limit the branches returned in the
1060 # through the parents. It won't limit the branches returned in the
1058 # result though.
1061 # result though.
1059 def branchlookup(self, heads=None, branch=None):
1062 def branchlookup(self, heads=None, branch=None):
1060 if not heads:
1063 if not heads:
1061 heads = self.heads()
1064 heads = self.heads()
1062 headt = [ h for h in heads ]
1065 headt = [ h for h in heads ]
1063 chlog = self.changelog
1066 chlog = self.changelog
1064 branches = {}
1067 branches = {}
1065 merges = []
1068 merges = []
1066 seenmerge = {}
1069 seenmerge = {}
1067
1070
1068 # traverse the tree once for each head, recording in the branches
1071 # traverse the tree once for each head, recording in the branches
1069 # dict which tags are visible from this head. The branches
1072 # dict which tags are visible from this head. The branches
1070 # dict also records which tags are visible from each tag
1073 # dict also records which tags are visible from each tag
1071 # while we traverse.
1074 # while we traverse.
1072 while headt or merges:
1075 while headt or merges:
1073 if merges:
1076 if merges:
1074 n, found = merges.pop()
1077 n, found = merges.pop()
1075 visit = [n]
1078 visit = [n]
1076 else:
1079 else:
1077 h = headt.pop()
1080 h = headt.pop()
1078 visit = [h]
1081 visit = [h]
1079 found = [h]
1082 found = [h]
1080 seen = {}
1083 seen = {}
1081 while visit:
1084 while visit:
1082 n = visit.pop()
1085 n = visit.pop()
1083 if n in seen:
1086 if n in seen:
1084 continue
1087 continue
1085 pp = chlog.parents(n)
1088 pp = chlog.parents(n)
1086 tags = self.nodetags(n)
1089 tags = self.nodetags(n)
1087 if tags:
1090 if tags:
1088 for x in tags:
1091 for x in tags:
1089 if x == 'tip':
1092 if x == 'tip':
1090 continue
1093 continue
1091 for f in found:
1094 for f in found:
1092 branches.setdefault(f, {})[n] = 1
1095 branches.setdefault(f, {})[n] = 1
1093 branches.setdefault(n, {})[n] = 1
1096 branches.setdefault(n, {})[n] = 1
1094 break
1097 break
1095 if n not in found:
1098 if n not in found:
1096 found.append(n)
1099 found.append(n)
1097 if branch in tags:
1100 if branch in tags:
1098 continue
1101 continue
1099 seen[n] = 1
1102 seen[n] = 1
1100 if pp[1] != nullid and n not in seenmerge:
1103 if pp[1] != nullid and n not in seenmerge:
1101 merges.append((pp[1], [x for x in found]))
1104 merges.append((pp[1], [x for x in found]))
1102 seenmerge[n] = 1
1105 seenmerge[n] = 1
1103 if pp[0] != nullid:
1106 if pp[0] != nullid:
1104 visit.append(pp[0])
1107 visit.append(pp[0])
1105 # traverse the branches dict, eliminating branch tags from each
1108 # traverse the branches dict, eliminating branch tags from each
1106 # head that are visible from another branch tag for that head.
1109 # head that are visible from another branch tag for that head.
1107 out = {}
1110 out = {}
1108 viscache = {}
1111 viscache = {}
1109 for h in heads:
1112 for h in heads:
1110 def visible(node):
1113 def visible(node):
1111 if node in viscache:
1114 if node in viscache:
1112 return viscache[node]
1115 return viscache[node]
1113 ret = {}
1116 ret = {}
1114 visit = [node]
1117 visit = [node]
1115 while visit:
1118 while visit:
1116 x = visit.pop()
1119 x = visit.pop()
1117 if x in viscache:
1120 if x in viscache:
1118 ret.update(viscache[x])
1121 ret.update(viscache[x])
1119 elif x not in ret:
1122 elif x not in ret:
1120 ret[x] = 1
1123 ret[x] = 1
1121 if x in branches:
1124 if x in branches:
1122 visit[len(visit):] = branches[x].keys()
1125 visit[len(visit):] = branches[x].keys()
1123 viscache[node] = ret
1126 viscache[node] = ret
1124 return ret
1127 return ret
1125 if h not in branches:
1128 if h not in branches:
1126 continue
1129 continue
1127 # O(n^2), but somewhat limited. This only searches the
1130 # O(n^2), but somewhat limited. This only searches the
1128 # tags visible from a specific head, not all the tags in the
1131 # tags visible from a specific head, not all the tags in the
1129 # whole repo.
1132 # whole repo.
1130 for b in branches[h]:
1133 for b in branches[h]:
1131 vis = False
1134 vis = False
1132 for bb in branches[h].keys():
1135 for bb in branches[h].keys():
1133 if b != bb:
1136 if b != bb:
1134 if b in visible(bb):
1137 if b in visible(bb):
1135 vis = True
1138 vis = True
1136 break
1139 break
1137 if not vis:
1140 if not vis:
1138 l = out.setdefault(h, [])
1141 l = out.setdefault(h, [])
1139 l[len(l):] = self.nodetags(b)
1142 l[len(l):] = self.nodetags(b)
1140 return out
1143 return out
1141
1144
1142 def branches(self, nodes):
1145 def branches(self, nodes):
1143 if not nodes:
1146 if not nodes:
1144 nodes = [self.changelog.tip()]
1147 nodes = [self.changelog.tip()]
1145 b = []
1148 b = []
1146 for n in nodes:
1149 for n in nodes:
1147 t = n
1150 t = n
1148 while 1:
1151 while 1:
1149 p = self.changelog.parents(n)
1152 p = self.changelog.parents(n)
1150 if p[1] != nullid or p[0] == nullid:
1153 if p[1] != nullid or p[0] == nullid:
1151 b.append((t, n, p[0], p[1]))
1154 b.append((t, n, p[0], p[1]))
1152 break
1155 break
1153 n = p[0]
1156 n = p[0]
1154 return b
1157 return b
1155
1158
1156 def between(self, pairs):
1159 def between(self, pairs):
1157 r = []
1160 r = []
1158
1161
1159 for top, bottom in pairs:
1162 for top, bottom in pairs:
1160 n, l, i = top, [], 0
1163 n, l, i = top, [], 0
1161 f = 1
1164 f = 1
1162
1165
1163 while n != bottom:
1166 while n != bottom:
1164 p = self.changelog.parents(n)[0]
1167 p = self.changelog.parents(n)[0]
1165 if i == f:
1168 if i == f:
1166 l.append(n)
1169 l.append(n)
1167 f = f * 2
1170 f = f * 2
1168 n = p
1171 n = p
1169 i += 1
1172 i += 1
1170
1173
1171 r.append(l)
1174 r.append(l)
1172
1175
1173 return r
1176 return r
1174
1177
1175 def findincoming(self, remote, base=None, heads=None, force=False):
1178 def findincoming(self, remote, base=None, heads=None, force=False):
1176 """Return list of roots of the subsets of missing nodes from remote
1179 """Return list of roots of the subsets of missing nodes from remote
1177
1180
1178 If base dict is specified, assume that these nodes and their parents
1181 If base dict is specified, assume that these nodes and their parents
1179 exist on the remote side and that no child of a node of base exists
1182 exist on the remote side and that no child of a node of base exists
1180 in both remote and self.
1183 in both remote and self.
1181 Furthermore base will be updated to include the nodes that exists
1184 Furthermore base will be updated to include the nodes that exists
1182 in self and remote but no children exists in self and remote.
1185 in self and remote but no children exists in self and remote.
1183 If a list of heads is specified, return only nodes which are heads
1186 If a list of heads is specified, return only nodes which are heads
1184 or ancestors of these heads.
1187 or ancestors of these heads.
1185
1188
1186 All the ancestors of base are in self and in remote.
1189 All the ancestors of base are in self and in remote.
1187 All the descendants of the list returned are missing in self.
1190 All the descendants of the list returned are missing in self.
1188 (and so we know that the rest of the nodes are missing in remote, see
1191 (and so we know that the rest of the nodes are missing in remote, see
1189 outgoing)
1192 outgoing)
1190 """
1193 """
1191 m = self.changelog.nodemap
1194 m = self.changelog.nodemap
1192 search = []
1195 search = []
1193 fetch = {}
1196 fetch = {}
1194 seen = {}
1197 seen = {}
1195 seenbranch = {}
1198 seenbranch = {}
1196 if base == None:
1199 if base == None:
1197 base = {}
1200 base = {}
1198
1201
1199 if not heads:
1202 if not heads:
1200 heads = remote.heads()
1203 heads = remote.heads()
1201
1204
1202 if self.changelog.tip() == nullid:
1205 if self.changelog.tip() == nullid:
1203 base[nullid] = 1
1206 base[nullid] = 1
1204 if heads != [nullid]:
1207 if heads != [nullid]:
1205 return [nullid]
1208 return [nullid]
1206 return []
1209 return []
1207
1210
1208 # assume we're closer to the tip than the root
1211 # assume we're closer to the tip than the root
1209 # and start by examining the heads
1212 # and start by examining the heads
1210 self.ui.status(_("searching for changes\n"))
1213 self.ui.status(_("searching for changes\n"))
1211
1214
1212 unknown = []
1215 unknown = []
1213 for h in heads:
1216 for h in heads:
1214 if h not in m:
1217 if h not in m:
1215 unknown.append(h)
1218 unknown.append(h)
1216 else:
1219 else:
1217 base[h] = 1
1220 base[h] = 1
1218
1221
1219 if not unknown:
1222 if not unknown:
1220 return []
1223 return []
1221
1224
1222 req = dict.fromkeys(unknown)
1225 req = dict.fromkeys(unknown)
1223 reqcnt = 0
1226 reqcnt = 0
1224
1227
1225 # search through remote branches
1228 # search through remote branches
1226 # a 'branch' here is a linear segment of history, with four parts:
1229 # a 'branch' here is a linear segment of history, with four parts:
1227 # head, root, first parent, second parent
1230 # head, root, first parent, second parent
1228 # (a branch always has two parents (or none) by definition)
1231 # (a branch always has two parents (or none) by definition)
1229 unknown = remote.branches(unknown)
1232 unknown = remote.branches(unknown)
1230 while unknown:
1233 while unknown:
1231 r = []
1234 r = []
1232 while unknown:
1235 while unknown:
1233 n = unknown.pop(0)
1236 n = unknown.pop(0)
1234 if n[0] in seen:
1237 if n[0] in seen:
1235 continue
1238 continue
1236
1239
1237 self.ui.debug(_("examining %s:%s\n")
1240 self.ui.debug(_("examining %s:%s\n")
1238 % (short(n[0]), short(n[1])))
1241 % (short(n[0]), short(n[1])))
1239 if n[0] == nullid: # found the end of the branch
1242 if n[0] == nullid: # found the end of the branch
1240 pass
1243 pass
1241 elif n in seenbranch:
1244 elif n in seenbranch:
1242 self.ui.debug(_("branch already found\n"))
1245 self.ui.debug(_("branch already found\n"))
1243 continue
1246 continue
1244 elif n[1] and n[1] in m: # do we know the base?
1247 elif n[1] and n[1] in m: # do we know the base?
1245 self.ui.debug(_("found incomplete branch %s:%s\n")
1248 self.ui.debug(_("found incomplete branch %s:%s\n")
1246 % (short(n[0]), short(n[1])))
1249 % (short(n[0]), short(n[1])))
1247 search.append(n) # schedule branch range for scanning
1250 search.append(n) # schedule branch range for scanning
1248 seenbranch[n] = 1
1251 seenbranch[n] = 1
1249 else:
1252 else:
1250 if n[1] not in seen and n[1] not in fetch:
1253 if n[1] not in seen and n[1] not in fetch:
1251 if n[2] in m and n[3] in m:
1254 if n[2] in m and n[3] in m:
1252 self.ui.debug(_("found new changeset %s\n") %
1255 self.ui.debug(_("found new changeset %s\n") %
1253 short(n[1]))
1256 short(n[1]))
1254 fetch[n[1]] = 1 # earliest unknown
1257 fetch[n[1]] = 1 # earliest unknown
1255 for p in n[2:4]:
1258 for p in n[2:4]:
1256 if p in m:
1259 if p in m:
1257 base[p] = 1 # latest known
1260 base[p] = 1 # latest known
1258
1261
1259 for p in n[2:4]:
1262 for p in n[2:4]:
1260 if p not in req and p not in m:
1263 if p not in req and p not in m:
1261 r.append(p)
1264 r.append(p)
1262 req[p] = 1
1265 req[p] = 1
1263 seen[n[0]] = 1
1266 seen[n[0]] = 1
1264
1267
1265 if r:
1268 if r:
1266 reqcnt += 1
1269 reqcnt += 1
1267 self.ui.debug(_("request %d: %s\n") %
1270 self.ui.debug(_("request %d: %s\n") %
1268 (reqcnt, " ".join(map(short, r))))
1271 (reqcnt, " ".join(map(short, r))))
1269 for p in xrange(0, len(r), 10):
1272 for p in xrange(0, len(r), 10):
1270 for b in remote.branches(r[p:p+10]):
1273 for b in remote.branches(r[p:p+10]):
1271 self.ui.debug(_("received %s:%s\n") %
1274 self.ui.debug(_("received %s:%s\n") %
1272 (short(b[0]), short(b[1])))
1275 (short(b[0]), short(b[1])))
1273 unknown.append(b)
1276 unknown.append(b)
1274
1277
1275 # do binary search on the branches we found
1278 # do binary search on the branches we found
1276 while search:
1279 while search:
1277 n = search.pop(0)
1280 n = search.pop(0)
1278 reqcnt += 1
1281 reqcnt += 1
1279 l = remote.between([(n[0], n[1])])[0]
1282 l = remote.between([(n[0], n[1])])[0]
1280 l.append(n[1])
1283 l.append(n[1])
1281 p = n[0]
1284 p = n[0]
1282 f = 1
1285 f = 1
1283 for i in l:
1286 for i in l:
1284 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1287 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1285 if i in m:
1288 if i in m:
1286 if f <= 2:
1289 if f <= 2:
1287 self.ui.debug(_("found new branch changeset %s\n") %
1290 self.ui.debug(_("found new branch changeset %s\n") %
1288 short(p))
1291 short(p))
1289 fetch[p] = 1
1292 fetch[p] = 1
1290 base[i] = 1
1293 base[i] = 1
1291 else:
1294 else:
1292 self.ui.debug(_("narrowed branch search to %s:%s\n")
1295 self.ui.debug(_("narrowed branch search to %s:%s\n")
1293 % (short(p), short(i)))
1296 % (short(p), short(i)))
1294 search.append((p, i))
1297 search.append((p, i))
1295 break
1298 break
1296 p, f = i, f * 2
1299 p, f = i, f * 2
1297
1300
1298 # sanity check our fetch list
1301 # sanity check our fetch list
1299 for f in fetch.keys():
1302 for f in fetch.keys():
1300 if f in m:
1303 if f in m:
1301 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1304 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1302
1305
1303 if base.keys() == [nullid]:
1306 if base.keys() == [nullid]:
1304 if force:
1307 if force:
1305 self.ui.warn(_("warning: repository is unrelated\n"))
1308 self.ui.warn(_("warning: repository is unrelated\n"))
1306 else:
1309 else:
1307 raise util.Abort(_("repository is unrelated"))
1310 raise util.Abort(_("repository is unrelated"))
1308
1311
1309 self.ui.debug(_("found new changesets starting at ") +
1312 self.ui.debug(_("found new changesets starting at ") +
1310 " ".join([short(f) for f in fetch]) + "\n")
1313 " ".join([short(f) for f in fetch]) + "\n")
1311
1314
1312 self.ui.debug(_("%d total queries\n") % reqcnt)
1315 self.ui.debug(_("%d total queries\n") % reqcnt)
1313
1316
1314 return fetch.keys()
1317 return fetch.keys()
1315
1318
1316 def findoutgoing(self, remote, base=None, heads=None, force=False):
1319 def findoutgoing(self, remote, base=None, heads=None, force=False):
1317 """Return list of nodes that are roots of subsets not in remote
1320 """Return list of nodes that are roots of subsets not in remote
1318
1321
1319 If base dict is specified, assume that these nodes and their parents
1322 If base dict is specified, assume that these nodes and their parents
1320 exist on the remote side.
1323 exist on the remote side.
1321 If a list of heads is specified, return only nodes which are heads
1324 If a list of heads is specified, return only nodes which are heads
1322 or ancestors of these heads, and return a second element which
1325 or ancestors of these heads, and return a second element which
1323 contains all remote heads which get new children.
1326 contains all remote heads which get new children.
1324 """
1327 """
1325 if base == None:
1328 if base == None:
1326 base = {}
1329 base = {}
1327 self.findincoming(remote, base, heads, force=force)
1330 self.findincoming(remote, base, heads, force=force)
1328
1331
1329 self.ui.debug(_("common changesets up to ")
1332 self.ui.debug(_("common changesets up to ")
1330 + " ".join(map(short, base.keys())) + "\n")
1333 + " ".join(map(short, base.keys())) + "\n")
1331
1334
1332 remain = dict.fromkeys(self.changelog.nodemap)
1335 remain = dict.fromkeys(self.changelog.nodemap)
1333
1336
1334 # prune everything remote has from the tree
1337 # prune everything remote has from the tree
1335 del remain[nullid]
1338 del remain[nullid]
1336 remove = base.keys()
1339 remove = base.keys()
1337 while remove:
1340 while remove:
1338 n = remove.pop(0)
1341 n = remove.pop(0)
1339 if n in remain:
1342 if n in remain:
1340 del remain[n]
1343 del remain[n]
1341 for p in self.changelog.parents(n):
1344 for p in self.changelog.parents(n):
1342 remove.append(p)
1345 remove.append(p)
1343
1346
1344 # find every node whose parents have been pruned
1347 # find every node whose parents have been pruned
1345 subset = []
1348 subset = []
1346 # find every remote head that will get new children
1349 # find every remote head that will get new children
1347 updated_heads = {}
1350 updated_heads = {}
1348 for n in remain:
1351 for n in remain:
1349 p1, p2 = self.changelog.parents(n)
1352 p1, p2 = self.changelog.parents(n)
1350 if p1 not in remain and p2 not in remain:
1353 if p1 not in remain and p2 not in remain:
1351 subset.append(n)
1354 subset.append(n)
1352 if heads:
1355 if heads:
1353 if p1 in heads:
1356 if p1 in heads:
1354 updated_heads[p1] = True
1357 updated_heads[p1] = True
1355 if p2 in heads:
1358 if p2 in heads:
1356 updated_heads[p2] = True
1359 updated_heads[p2] = True
1357
1360
1358 # this is the set of all roots we have to push
1361 # this is the set of all roots we have to push
1359 if heads:
1362 if heads:
1360 return subset, updated_heads.keys()
1363 return subset, updated_heads.keys()
1361 else:
1364 else:
1362 return subset
1365 return subset
1363
1366
1364 def pull(self, remote, heads=None, force=False, lock=None):
1367 def pull(self, remote, heads=None, force=False, lock=None):
1365 mylock = False
1368 mylock = False
1366 if not lock:
1369 if not lock:
1367 lock = self.lock()
1370 lock = self.lock()
1368 mylock = True
1371 mylock = True
1369
1372
1370 try:
1373 try:
1371 fetch = self.findincoming(remote, force=force)
1374 fetch = self.findincoming(remote, force=force)
1372 if fetch == [nullid]:
1375 if fetch == [nullid]:
1373 self.ui.status(_("requesting all changes\n"))
1376 self.ui.status(_("requesting all changes\n"))
1374
1377
1375 if not fetch:
1378 if not fetch:
1376 self.ui.status(_("no changes found\n"))
1379 self.ui.status(_("no changes found\n"))
1377 return 0
1380 return 0
1378
1381
1379 if heads is None:
1382 if heads is None:
1380 cg = remote.changegroup(fetch, 'pull')
1383 cg = remote.changegroup(fetch, 'pull')
1381 else:
1384 else:
1382 if 'changegroupsubset' not in remote.capabilities:
1385 if 'changegroupsubset' not in remote.capabilities:
1383 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1386 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1384 cg = remote.changegroupsubset(fetch, heads, 'pull')
1387 cg = remote.changegroupsubset(fetch, heads, 'pull')
1385 return self.addchangegroup(cg, 'pull', remote.url())
1388 return self.addchangegroup(cg, 'pull', remote.url())
1386 finally:
1389 finally:
1387 if mylock:
1390 if mylock:
1388 lock.release()
1391 lock.release()
1389
1392
1390 def push(self, remote, force=False, revs=None):
1393 def push(self, remote, force=False, revs=None):
1391 # there are two ways to push to remote repo:
1394 # there are two ways to push to remote repo:
1392 #
1395 #
1393 # addchangegroup assumes local user can lock remote
1396 # addchangegroup assumes local user can lock remote
1394 # repo (local filesystem, old ssh servers).
1397 # repo (local filesystem, old ssh servers).
1395 #
1398 #
1396 # unbundle assumes local user cannot lock remote repo (new ssh
1399 # unbundle assumes local user cannot lock remote repo (new ssh
1397 # servers, http servers).
1400 # servers, http servers).
1398
1401
1399 if remote.capable('unbundle'):
1402 if remote.capable('unbundle'):
1400 return self.push_unbundle(remote, force, revs)
1403 return self.push_unbundle(remote, force, revs)
1401 return self.push_addchangegroup(remote, force, revs)
1404 return self.push_addchangegroup(remote, force, revs)
1402
1405
1403 def prepush(self, remote, force, revs):
1406 def prepush(self, remote, force, revs):
1404 base = {}
1407 base = {}
1405 remote_heads = remote.heads()
1408 remote_heads = remote.heads()
1406 inc = self.findincoming(remote, base, remote_heads, force=force)
1409 inc = self.findincoming(remote, base, remote_heads, force=force)
1407
1410
1408 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1411 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1409 if revs is not None:
1412 if revs is not None:
1410 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1413 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1411 else:
1414 else:
1412 bases, heads = update, self.changelog.heads()
1415 bases, heads = update, self.changelog.heads()
1413
1416
1414 if not bases:
1417 if not bases:
1415 self.ui.status(_("no changes found\n"))
1418 self.ui.status(_("no changes found\n"))
1416 return None, 1
1419 return None, 1
1417 elif not force:
1420 elif not force:
1418 # check if we're creating new remote heads
1421 # check if we're creating new remote heads
1419 # to be a remote head after push, node must be either
1422 # to be a remote head after push, node must be either
1420 # - unknown locally
1423 # - unknown locally
1421 # - a local outgoing head descended from update
1424 # - a local outgoing head descended from update
1422 # - a remote head that's known locally and not
1425 # - a remote head that's known locally and not
1423 # ancestral to an outgoing head
1426 # ancestral to an outgoing head
1424
1427
1425 warn = 0
1428 warn = 0
1426
1429
1427 if remote_heads == [nullid]:
1430 if remote_heads == [nullid]:
1428 warn = 0
1431 warn = 0
1429 elif not revs and len(heads) > len(remote_heads):
1432 elif not revs and len(heads) > len(remote_heads):
1430 warn = 1
1433 warn = 1
1431 else:
1434 else:
1432 newheads = list(heads)
1435 newheads = list(heads)
1433 for r in remote_heads:
1436 for r in remote_heads:
1434 if r in self.changelog.nodemap:
1437 if r in self.changelog.nodemap:
1435 desc = self.changelog.heads(r, heads)
1438 desc = self.changelog.heads(r, heads)
1436 l = [h for h in heads if h in desc]
1439 l = [h for h in heads if h in desc]
1437 if not l:
1440 if not l:
1438 newheads.append(r)
1441 newheads.append(r)
1439 else:
1442 else:
1440 newheads.append(r)
1443 newheads.append(r)
1441 if len(newheads) > len(remote_heads):
1444 if len(newheads) > len(remote_heads):
1442 warn = 1
1445 warn = 1
1443
1446
1444 if warn:
1447 if warn:
1445 self.ui.warn(_("abort: push creates new remote branches!\n"))
1448 self.ui.warn(_("abort: push creates new remote branches!\n"))
1446 self.ui.status(_("(did you forget to merge?"
1449 self.ui.status(_("(did you forget to merge?"
1447 " use push -f to force)\n"))
1450 " use push -f to force)\n"))
1448 return None, 1
1451 return None, 1
1449 elif inc:
1452 elif inc:
1450 self.ui.warn(_("note: unsynced remote changes!\n"))
1453 self.ui.warn(_("note: unsynced remote changes!\n"))
1451
1454
1452
1455
1453 if revs is None:
1456 if revs is None:
1454 cg = self.changegroup(update, 'push')
1457 cg = self.changegroup(update, 'push')
1455 else:
1458 else:
1456 cg = self.changegroupsubset(update, revs, 'push')
1459 cg = self.changegroupsubset(update, revs, 'push')
1457 return cg, remote_heads
1460 return cg, remote_heads
1458
1461
1459 def push_addchangegroup(self, remote, force, revs):
1462 def push_addchangegroup(self, remote, force, revs):
1460 lock = remote.lock()
1463 lock = remote.lock()
1461
1464
1462 ret = self.prepush(remote, force, revs)
1465 ret = self.prepush(remote, force, revs)
1463 if ret[0] is not None:
1466 if ret[0] is not None:
1464 cg, remote_heads = ret
1467 cg, remote_heads = ret
1465 return remote.addchangegroup(cg, 'push', self.url())
1468 return remote.addchangegroup(cg, 'push', self.url())
1466 return ret[1]
1469 return ret[1]
1467
1470
1468 def push_unbundle(self, remote, force, revs):
1471 def push_unbundle(self, remote, force, revs):
1469 # local repo finds heads on server, finds out what revs it
1472 # local repo finds heads on server, finds out what revs it
1470 # must push. once revs transferred, if server finds it has
1473 # must push. once revs transferred, if server finds it has
1471 # different heads (someone else won commit/push race), server
1474 # different heads (someone else won commit/push race), server
1472 # aborts.
1475 # aborts.
1473
1476
1474 ret = self.prepush(remote, force, revs)
1477 ret = self.prepush(remote, force, revs)
1475 if ret[0] is not None:
1478 if ret[0] is not None:
1476 cg, remote_heads = ret
1479 cg, remote_heads = ret
1477 if force: remote_heads = ['force']
1480 if force: remote_heads = ['force']
1478 return remote.unbundle(cg, remote_heads, 'push')
1481 return remote.unbundle(cg, remote_heads, 'push')
1479 return ret[1]
1482 return ret[1]
1480
1483
1481 def changegroupinfo(self, nodes):
1484 def changegroupinfo(self, nodes):
1482 self.ui.note(_("%d changesets found\n") % len(nodes))
1485 self.ui.note(_("%d changesets found\n") % len(nodes))
1483 if self.ui.debugflag:
1486 if self.ui.debugflag:
1484 self.ui.debug(_("List of changesets:\n"))
1487 self.ui.debug(_("List of changesets:\n"))
1485 for node in nodes:
1488 for node in nodes:
1486 self.ui.debug("%s\n" % hex(node))
1489 self.ui.debug("%s\n" % hex(node))
1487
1490
1488 def changegroupsubset(self, bases, heads, source):
1491 def changegroupsubset(self, bases, heads, source):
1489 """This function generates a changegroup consisting of all the nodes
1492 """This function generates a changegroup consisting of all the nodes
1490 that are descendents of any of the bases, and ancestors of any of
1493 that are descendents of any of the bases, and ancestors of any of
1491 the heads.
1494 the heads.
1492
1495
1493 It is fairly complex as determining which filenodes and which
1496 It is fairly complex as determining which filenodes and which
1494 manifest nodes need to be included for the changeset to be complete
1497 manifest nodes need to be included for the changeset to be complete
1495 is non-trivial.
1498 is non-trivial.
1496
1499
1497 Another wrinkle is doing the reverse, figuring out which changeset in
1500 Another wrinkle is doing the reverse, figuring out which changeset in
1498 the changegroup a particular filenode or manifestnode belongs to."""
1501 the changegroup a particular filenode or manifestnode belongs to."""
1499
1502
1500 self.hook('preoutgoing', throw=True, source=source)
1503 self.hook('preoutgoing', throw=True, source=source)
1501
1504
1502 # Set up some initial variables
1505 # Set up some initial variables
1503 # Make it easy to refer to self.changelog
1506 # Make it easy to refer to self.changelog
1504 cl = self.changelog
1507 cl = self.changelog
1505 # msng is short for missing - compute the list of changesets in this
1508 # msng is short for missing - compute the list of changesets in this
1506 # changegroup.
1509 # changegroup.
1507 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1510 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1508 self.changegroupinfo(msng_cl_lst)
1511 self.changegroupinfo(msng_cl_lst)
1509 # Some bases may turn out to be superfluous, and some heads may be
1512 # Some bases may turn out to be superfluous, and some heads may be
1510 # too. nodesbetween will return the minimal set of bases and heads
1513 # too. nodesbetween will return the minimal set of bases and heads
1511 # necessary to re-create the changegroup.
1514 # necessary to re-create the changegroup.
1512
1515
1513 # Known heads are the list of heads that it is assumed the recipient
1516 # Known heads are the list of heads that it is assumed the recipient
1514 # of this changegroup will know about.
1517 # of this changegroup will know about.
1515 knownheads = {}
1518 knownheads = {}
1516 # We assume that all parents of bases are known heads.
1519 # We assume that all parents of bases are known heads.
1517 for n in bases:
1520 for n in bases:
1518 for p in cl.parents(n):
1521 for p in cl.parents(n):
1519 if p != nullid:
1522 if p != nullid:
1520 knownheads[p] = 1
1523 knownheads[p] = 1
1521 knownheads = knownheads.keys()
1524 knownheads = knownheads.keys()
1522 if knownheads:
1525 if knownheads:
1523 # Now that we know what heads are known, we can compute which
1526 # Now that we know what heads are known, we can compute which
1524 # changesets are known. The recipient must know about all
1527 # changesets are known. The recipient must know about all
1525 # changesets required to reach the known heads from the null
1528 # changesets required to reach the known heads from the null
1526 # changeset.
1529 # changeset.
1527 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1530 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1528 junk = None
1531 junk = None
1529 # Transform the list into an ersatz set.
1532 # Transform the list into an ersatz set.
1530 has_cl_set = dict.fromkeys(has_cl_set)
1533 has_cl_set = dict.fromkeys(has_cl_set)
1531 else:
1534 else:
1532 # If there were no known heads, the recipient cannot be assumed to
1535 # If there were no known heads, the recipient cannot be assumed to
1533 # know about any changesets.
1536 # know about any changesets.
1534 has_cl_set = {}
1537 has_cl_set = {}
1535
1538
1536 # Make it easy to refer to self.manifest
1539 # Make it easy to refer to self.manifest
1537 mnfst = self.manifest
1540 mnfst = self.manifest
1538 # We don't know which manifests are missing yet
1541 # We don't know which manifests are missing yet
1539 msng_mnfst_set = {}
1542 msng_mnfst_set = {}
1540 # Nor do we know which filenodes are missing.
1543 # Nor do we know which filenodes are missing.
1541 msng_filenode_set = {}
1544 msng_filenode_set = {}
1542
1545
1543 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1546 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1544 junk = None
1547 junk = None
1545
1548
1546 # A changeset always belongs to itself, so the changenode lookup
1549 # A changeset always belongs to itself, so the changenode lookup
1547 # function for a changenode is identity.
1550 # function for a changenode is identity.
1548 def identity(x):
1551 def identity(x):
1549 return x
1552 return x
1550
1553
1551 # A function generating function. Sets up an environment for the
1554 # A function generating function. Sets up an environment for the
1552 # inner function.
1555 # inner function.
1553 def cmp_by_rev_func(revlog):
1556 def cmp_by_rev_func(revlog):
1554 # Compare two nodes by their revision number in the environment's
1557 # Compare two nodes by their revision number in the environment's
1555 # revision history. Since the revision number both represents the
1558 # revision history. Since the revision number both represents the
1556 # most efficient order to read the nodes in, and represents a
1559 # most efficient order to read the nodes in, and represents a
1557 # topological sorting of the nodes, this function is often useful.
1560 # topological sorting of the nodes, this function is often useful.
1558 def cmp_by_rev(a, b):
1561 def cmp_by_rev(a, b):
1559 return cmp(revlog.rev(a), revlog.rev(b))
1562 return cmp(revlog.rev(a), revlog.rev(b))
1560 return cmp_by_rev
1563 return cmp_by_rev
1561
1564
1562 # If we determine that a particular file or manifest node must be a
1565 # If we determine that a particular file or manifest node must be a
1563 # node that the recipient of the changegroup will already have, we can
1566 # node that the recipient of the changegroup will already have, we can
1564 # also assume the recipient will have all the parents. This function
1567 # also assume the recipient will have all the parents. This function
1565 # prunes them from the set of missing nodes.
1568 # prunes them from the set of missing nodes.
1566 def prune_parents(revlog, hasset, msngset):
1569 def prune_parents(revlog, hasset, msngset):
1567 haslst = hasset.keys()
1570 haslst = hasset.keys()
1568 haslst.sort(cmp_by_rev_func(revlog))
1571 haslst.sort(cmp_by_rev_func(revlog))
1569 for node in haslst:
1572 for node in haslst:
1570 parentlst = [p for p in revlog.parents(node) if p != nullid]
1573 parentlst = [p for p in revlog.parents(node) if p != nullid]
1571 while parentlst:
1574 while parentlst:
1572 n = parentlst.pop()
1575 n = parentlst.pop()
1573 if n not in hasset:
1576 if n not in hasset:
1574 hasset[n] = 1
1577 hasset[n] = 1
1575 p = [p for p in revlog.parents(n) if p != nullid]
1578 p = [p for p in revlog.parents(n) if p != nullid]
1576 parentlst.extend(p)
1579 parentlst.extend(p)
1577 for n in hasset:
1580 for n in hasset:
1578 msngset.pop(n, None)
1581 msngset.pop(n, None)
1579
1582
1580 # This is a function generating function used to set up an environment
1583 # This is a function generating function used to set up an environment
1581 # for the inner function to execute in.
1584 # for the inner function to execute in.
1582 def manifest_and_file_collector(changedfileset):
1585 def manifest_and_file_collector(changedfileset):
1583 # This is an information gathering function that gathers
1586 # This is an information gathering function that gathers
1584 # information from each changeset node that goes out as part of
1587 # information from each changeset node that goes out as part of
1585 # the changegroup. The information gathered is a list of which
1588 # the changegroup. The information gathered is a list of which
1586 # manifest nodes are potentially required (the recipient may
1589 # manifest nodes are potentially required (the recipient may
1587 # already have them) and total list of all files which were
1590 # already have them) and total list of all files which were
1588 # changed in any changeset in the changegroup.
1591 # changed in any changeset in the changegroup.
1589 #
1592 #
1590 # We also remember the first changenode we saw any manifest
1593 # We also remember the first changenode we saw any manifest
1591 # referenced by so we can later determine which changenode 'owns'
1594 # referenced by so we can later determine which changenode 'owns'
1592 # the manifest.
1595 # the manifest.
1593 def collect_manifests_and_files(clnode):
1596 def collect_manifests_and_files(clnode):
1594 c = cl.read(clnode)
1597 c = cl.read(clnode)
1595 for f in c[3]:
1598 for f in c[3]:
1596 # This is to make sure we only have one instance of each
1599 # This is to make sure we only have one instance of each
1597 # filename string for each filename.
1600 # filename string for each filename.
1598 changedfileset.setdefault(f, f)
1601 changedfileset.setdefault(f, f)
1599 msng_mnfst_set.setdefault(c[0], clnode)
1602 msng_mnfst_set.setdefault(c[0], clnode)
1600 return collect_manifests_and_files
1603 return collect_manifests_and_files
1601
1604
1602 # Figure out which manifest nodes (of the ones we think might be part
1605 # Figure out which manifest nodes (of the ones we think might be part
1603 # of the changegroup) the recipient must know about and remove them
1606 # of the changegroup) the recipient must know about and remove them
1604 # from the changegroup.
1607 # from the changegroup.
1605 def prune_manifests():
1608 def prune_manifests():
1606 has_mnfst_set = {}
1609 has_mnfst_set = {}
1607 for n in msng_mnfst_set:
1610 for n in msng_mnfst_set:
1608 # If a 'missing' manifest thinks it belongs to a changenode
1611 # If a 'missing' manifest thinks it belongs to a changenode
1609 # the recipient is assumed to have, obviously the recipient
1612 # the recipient is assumed to have, obviously the recipient
1610 # must have that manifest.
1613 # must have that manifest.
1611 linknode = cl.node(mnfst.linkrev(n))
1614 linknode = cl.node(mnfst.linkrev(n))
1612 if linknode in has_cl_set:
1615 if linknode in has_cl_set:
1613 has_mnfst_set[n] = 1
1616 has_mnfst_set[n] = 1
1614 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1617 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1615
1618
1616 # Use the information collected in collect_manifests_and_files to say
1619 # Use the information collected in collect_manifests_and_files to say
1617 # which changenode any manifestnode belongs to.
1620 # which changenode any manifestnode belongs to.
1618 def lookup_manifest_link(mnfstnode):
1621 def lookup_manifest_link(mnfstnode):
1619 return msng_mnfst_set[mnfstnode]
1622 return msng_mnfst_set[mnfstnode]
1620
1623
1621 # A function generating function that sets up the initial environment
1624 # A function generating function that sets up the initial environment
1622 # the inner function.
1625 # the inner function.
1623 def filenode_collector(changedfiles):
1626 def filenode_collector(changedfiles):
1624 next_rev = [0]
1627 next_rev = [0]
1625 # This gathers information from each manifestnode included in the
1628 # This gathers information from each manifestnode included in the
1626 # changegroup about which filenodes the manifest node references
1629 # changegroup about which filenodes the manifest node references
1627 # so we can include those in the changegroup too.
1630 # so we can include those in the changegroup too.
1628 #
1631 #
1629 # It also remembers which changenode each filenode belongs to. It
1632 # It also remembers which changenode each filenode belongs to. It
1630 # does this by assuming the a filenode belongs to the changenode
1633 # does this by assuming the a filenode belongs to the changenode
1631 # the first manifest that references it belongs to.
1634 # the first manifest that references it belongs to.
1632 def collect_msng_filenodes(mnfstnode):
1635 def collect_msng_filenodes(mnfstnode):
1633 r = mnfst.rev(mnfstnode)
1636 r = mnfst.rev(mnfstnode)
1634 if r == next_rev[0]:
1637 if r == next_rev[0]:
1635 # If the last rev we looked at was the one just previous,
1638 # If the last rev we looked at was the one just previous,
1636 # we only need to see a diff.
1639 # we only need to see a diff.
1637 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1640 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1638 # For each line in the delta
1641 # For each line in the delta
1639 for dline in delta.splitlines():
1642 for dline in delta.splitlines():
1640 # get the filename and filenode for that line
1643 # get the filename and filenode for that line
1641 f, fnode = dline.split('\0')
1644 f, fnode = dline.split('\0')
1642 fnode = bin(fnode[:40])
1645 fnode = bin(fnode[:40])
1643 f = changedfiles.get(f, None)
1646 f = changedfiles.get(f, None)
1644 # And if the file is in the list of files we care
1647 # And if the file is in the list of files we care
1645 # about.
1648 # about.
1646 if f is not None:
1649 if f is not None:
1647 # Get the changenode this manifest belongs to
1650 # Get the changenode this manifest belongs to
1648 clnode = msng_mnfst_set[mnfstnode]
1651 clnode = msng_mnfst_set[mnfstnode]
1649 # Create the set of filenodes for the file if
1652 # Create the set of filenodes for the file if
1650 # there isn't one already.
1653 # there isn't one already.
1651 ndset = msng_filenode_set.setdefault(f, {})
1654 ndset = msng_filenode_set.setdefault(f, {})
1652 # And set the filenode's changelog node to the
1655 # And set the filenode's changelog node to the
1653 # manifest's if it hasn't been set already.
1656 # manifest's if it hasn't been set already.
1654 ndset.setdefault(fnode, clnode)
1657 ndset.setdefault(fnode, clnode)
1655 else:
1658 else:
1656 # Otherwise we need a full manifest.
1659 # Otherwise we need a full manifest.
1657 m = mnfst.read(mnfstnode)
1660 m = mnfst.read(mnfstnode)
1658 # For every file in we care about.
1661 # For every file in we care about.
1659 for f in changedfiles:
1662 for f in changedfiles:
1660 fnode = m.get(f, None)
1663 fnode = m.get(f, None)
1661 # If it's in the manifest
1664 # If it's in the manifest
1662 if fnode is not None:
1665 if fnode is not None:
1663 # See comments above.
1666 # See comments above.
1664 clnode = msng_mnfst_set[mnfstnode]
1667 clnode = msng_mnfst_set[mnfstnode]
1665 ndset = msng_filenode_set.setdefault(f, {})
1668 ndset = msng_filenode_set.setdefault(f, {})
1666 ndset.setdefault(fnode, clnode)
1669 ndset.setdefault(fnode, clnode)
1667 # Remember the revision we hope to see next.
1670 # Remember the revision we hope to see next.
1668 next_rev[0] = r + 1
1671 next_rev[0] = r + 1
1669 return collect_msng_filenodes
1672 return collect_msng_filenodes
1670
1673
1671 # We have a list of filenodes we think we need for a file, lets remove
1674 # We have a list of filenodes we think we need for a file, lets remove
1672 # all those we now the recipient must have.
1675 # all those we now the recipient must have.
1673 def prune_filenodes(f, filerevlog):
1676 def prune_filenodes(f, filerevlog):
1674 msngset = msng_filenode_set[f]
1677 msngset = msng_filenode_set[f]
1675 hasset = {}
1678 hasset = {}
1676 # If a 'missing' filenode thinks it belongs to a changenode we
1679 # If a 'missing' filenode thinks it belongs to a changenode we
1677 # assume the recipient must have, then the recipient must have
1680 # assume the recipient must have, then the recipient must have
1678 # that filenode.
1681 # that filenode.
1679 for n in msngset:
1682 for n in msngset:
1680 clnode = cl.node(filerevlog.linkrev(n))
1683 clnode = cl.node(filerevlog.linkrev(n))
1681 if clnode in has_cl_set:
1684 if clnode in has_cl_set:
1682 hasset[n] = 1
1685 hasset[n] = 1
1683 prune_parents(filerevlog, hasset, msngset)
1686 prune_parents(filerevlog, hasset, msngset)
1684
1687
1685 # A function generator function that sets up the a context for the
1688 # A function generator function that sets up the a context for the
1686 # inner function.
1689 # inner function.
1687 def lookup_filenode_link_func(fname):
1690 def lookup_filenode_link_func(fname):
1688 msngset = msng_filenode_set[fname]
1691 msngset = msng_filenode_set[fname]
1689 # Lookup the changenode the filenode belongs to.
1692 # Lookup the changenode the filenode belongs to.
1690 def lookup_filenode_link(fnode):
1693 def lookup_filenode_link(fnode):
1691 return msngset[fnode]
1694 return msngset[fnode]
1692 return lookup_filenode_link
1695 return lookup_filenode_link
1693
1696
1694 # Now that we have all theses utility functions to help out and
1697 # Now that we have all theses utility functions to help out and
1695 # logically divide up the task, generate the group.
1698 # logically divide up the task, generate the group.
1696 def gengroup():
1699 def gengroup():
1697 # The set of changed files starts empty.
1700 # The set of changed files starts empty.
1698 changedfiles = {}
1701 changedfiles = {}
1699 # Create a changenode group generator that will call our functions
1702 # Create a changenode group generator that will call our functions
1700 # back to lookup the owning changenode and collect information.
1703 # back to lookup the owning changenode and collect information.
1701 group = cl.group(msng_cl_lst, identity,
1704 group = cl.group(msng_cl_lst, identity,
1702 manifest_and_file_collector(changedfiles))
1705 manifest_and_file_collector(changedfiles))
1703 for chnk in group:
1706 for chnk in group:
1704 yield chnk
1707 yield chnk
1705
1708
1706 # The list of manifests has been collected by the generator
1709 # The list of manifests has been collected by the generator
1707 # calling our functions back.
1710 # calling our functions back.
1708 prune_manifests()
1711 prune_manifests()
1709 msng_mnfst_lst = msng_mnfst_set.keys()
1712 msng_mnfst_lst = msng_mnfst_set.keys()
1710 # Sort the manifestnodes by revision number.
1713 # Sort the manifestnodes by revision number.
1711 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1714 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1712 # Create a generator for the manifestnodes that calls our lookup
1715 # Create a generator for the manifestnodes that calls our lookup
1713 # and data collection functions back.
1716 # and data collection functions back.
1714 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1717 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1715 filenode_collector(changedfiles))
1718 filenode_collector(changedfiles))
1716 for chnk in group:
1719 for chnk in group:
1717 yield chnk
1720 yield chnk
1718
1721
1719 # These are no longer needed, dereference and toss the memory for
1722 # These are no longer needed, dereference and toss the memory for
1720 # them.
1723 # them.
1721 msng_mnfst_lst = None
1724 msng_mnfst_lst = None
1722 msng_mnfst_set.clear()
1725 msng_mnfst_set.clear()
1723
1726
1724 changedfiles = changedfiles.keys()
1727 changedfiles = changedfiles.keys()
1725 changedfiles.sort()
1728 changedfiles.sort()
1726 # Go through all our files in order sorted by name.
1729 # Go through all our files in order sorted by name.
1727 for fname in changedfiles:
1730 for fname in changedfiles:
1728 filerevlog = self.file(fname)
1731 filerevlog = self.file(fname)
1729 # Toss out the filenodes that the recipient isn't really
1732 # Toss out the filenodes that the recipient isn't really
1730 # missing.
1733 # missing.
1731 if msng_filenode_set.has_key(fname):
1734 if msng_filenode_set.has_key(fname):
1732 prune_filenodes(fname, filerevlog)
1735 prune_filenodes(fname, filerevlog)
1733 msng_filenode_lst = msng_filenode_set[fname].keys()
1736 msng_filenode_lst = msng_filenode_set[fname].keys()
1734 else:
1737 else:
1735 msng_filenode_lst = []
1738 msng_filenode_lst = []
1736 # If any filenodes are left, generate the group for them,
1739 # If any filenodes are left, generate the group for them,
1737 # otherwise don't bother.
1740 # otherwise don't bother.
1738 if len(msng_filenode_lst) > 0:
1741 if len(msng_filenode_lst) > 0:
1739 yield changegroup.genchunk(fname)
1742 yield changegroup.genchunk(fname)
1740 # Sort the filenodes by their revision #
1743 # Sort the filenodes by their revision #
1741 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1744 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1742 # Create a group generator and only pass in a changenode
1745 # Create a group generator and only pass in a changenode
1743 # lookup function as we need to collect no information
1746 # lookup function as we need to collect no information
1744 # from filenodes.
1747 # from filenodes.
1745 group = filerevlog.group(msng_filenode_lst,
1748 group = filerevlog.group(msng_filenode_lst,
1746 lookup_filenode_link_func(fname))
1749 lookup_filenode_link_func(fname))
1747 for chnk in group:
1750 for chnk in group:
1748 yield chnk
1751 yield chnk
1749 if msng_filenode_set.has_key(fname):
1752 if msng_filenode_set.has_key(fname):
1750 # Don't need this anymore, toss it to free memory.
1753 # Don't need this anymore, toss it to free memory.
1751 del msng_filenode_set[fname]
1754 del msng_filenode_set[fname]
1752 # Signal that no more groups are left.
1755 # Signal that no more groups are left.
1753 yield changegroup.closechunk()
1756 yield changegroup.closechunk()
1754
1757
1755 if msng_cl_lst:
1758 if msng_cl_lst:
1756 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1759 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1757
1760
1758 return util.chunkbuffer(gengroup())
1761 return util.chunkbuffer(gengroup())
1759
1762
1760 def changegroup(self, basenodes, source):
1763 def changegroup(self, basenodes, source):
1761 """Generate a changegroup of all nodes that we have that a recipient
1764 """Generate a changegroup of all nodes that we have that a recipient
1762 doesn't.
1765 doesn't.
1763
1766
1764 This is much easier than the previous function as we can assume that
1767 This is much easier than the previous function as we can assume that
1765 the recipient has any changenode we aren't sending them."""
1768 the recipient has any changenode we aren't sending them."""
1766
1769
1767 self.hook('preoutgoing', throw=True, source=source)
1770 self.hook('preoutgoing', throw=True, source=source)
1768
1771
1769 cl = self.changelog
1772 cl = self.changelog
1770 nodes = cl.nodesbetween(basenodes, None)[0]
1773 nodes = cl.nodesbetween(basenodes, None)[0]
1771 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1774 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1772 self.changegroupinfo(nodes)
1775 self.changegroupinfo(nodes)
1773
1776
1774 def identity(x):
1777 def identity(x):
1775 return x
1778 return x
1776
1779
1777 def gennodelst(revlog):
1780 def gennodelst(revlog):
1778 for r in xrange(0, revlog.count()):
1781 for r in xrange(0, revlog.count()):
1779 n = revlog.node(r)
1782 n = revlog.node(r)
1780 if revlog.linkrev(n) in revset:
1783 if revlog.linkrev(n) in revset:
1781 yield n
1784 yield n
1782
1785
1783 def changed_file_collector(changedfileset):
1786 def changed_file_collector(changedfileset):
1784 def collect_changed_files(clnode):
1787 def collect_changed_files(clnode):
1785 c = cl.read(clnode)
1788 c = cl.read(clnode)
1786 for fname in c[3]:
1789 for fname in c[3]:
1787 changedfileset[fname] = 1
1790 changedfileset[fname] = 1
1788 return collect_changed_files
1791 return collect_changed_files
1789
1792
1790 def lookuprevlink_func(revlog):
1793 def lookuprevlink_func(revlog):
1791 def lookuprevlink(n):
1794 def lookuprevlink(n):
1792 return cl.node(revlog.linkrev(n))
1795 return cl.node(revlog.linkrev(n))
1793 return lookuprevlink
1796 return lookuprevlink
1794
1797
1795 def gengroup():
1798 def gengroup():
1796 # construct a list of all changed files
1799 # construct a list of all changed files
1797 changedfiles = {}
1800 changedfiles = {}
1798
1801
1799 for chnk in cl.group(nodes, identity,
1802 for chnk in cl.group(nodes, identity,
1800 changed_file_collector(changedfiles)):
1803 changed_file_collector(changedfiles)):
1801 yield chnk
1804 yield chnk
1802 changedfiles = changedfiles.keys()
1805 changedfiles = changedfiles.keys()
1803 changedfiles.sort()
1806 changedfiles.sort()
1804
1807
1805 mnfst = self.manifest
1808 mnfst = self.manifest
1806 nodeiter = gennodelst(mnfst)
1809 nodeiter = gennodelst(mnfst)
1807 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1810 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1808 yield chnk
1811 yield chnk
1809
1812
1810 for fname in changedfiles:
1813 for fname in changedfiles:
1811 filerevlog = self.file(fname)
1814 filerevlog = self.file(fname)
1812 nodeiter = gennodelst(filerevlog)
1815 nodeiter = gennodelst(filerevlog)
1813 nodeiter = list(nodeiter)
1816 nodeiter = list(nodeiter)
1814 if nodeiter:
1817 if nodeiter:
1815 yield changegroup.genchunk(fname)
1818 yield changegroup.genchunk(fname)
1816 lookup = lookuprevlink_func(filerevlog)
1819 lookup = lookuprevlink_func(filerevlog)
1817 for chnk in filerevlog.group(nodeiter, lookup):
1820 for chnk in filerevlog.group(nodeiter, lookup):
1818 yield chnk
1821 yield chnk
1819
1822
1820 yield changegroup.closechunk()
1823 yield changegroup.closechunk()
1821
1824
1822 if nodes:
1825 if nodes:
1823 self.hook('outgoing', node=hex(nodes[0]), source=source)
1826 self.hook('outgoing', node=hex(nodes[0]), source=source)
1824
1827
1825 return util.chunkbuffer(gengroup())
1828 return util.chunkbuffer(gengroup())
1826
1829
1827 def addchangegroup(self, source, srctype, url):
1830 def addchangegroup(self, source, srctype, url):
1828 """add changegroup to repo.
1831 """add changegroup to repo.
1829
1832
1830 return values:
1833 return values:
1831 - nothing changed or no source: 0
1834 - nothing changed or no source: 0
1832 - more heads than before: 1+added heads (2..n)
1835 - more heads than before: 1+added heads (2..n)
1833 - less heads than before: -1-removed heads (-2..-n)
1836 - less heads than before: -1-removed heads (-2..-n)
1834 - number of heads stays the same: 1
1837 - number of heads stays the same: 1
1835 """
1838 """
1836 def csmap(x):
1839 def csmap(x):
1837 self.ui.debug(_("add changeset %s\n") % short(x))
1840 self.ui.debug(_("add changeset %s\n") % short(x))
1838 return cl.count()
1841 return cl.count()
1839
1842
1840 def revmap(x):
1843 def revmap(x):
1841 return cl.rev(x)
1844 return cl.rev(x)
1842
1845
1843 if not source:
1846 if not source:
1844 return 0
1847 return 0
1845
1848
1846 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1849 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1847
1850
1848 changesets = files = revisions = 0
1851 changesets = files = revisions = 0
1849
1852
1850 tr = self.transaction()
1853 tr = self.transaction()
1851
1854
1852 # write changelog data to temp files so concurrent readers will not see
1855 # write changelog data to temp files so concurrent readers will not see
1853 # inconsistent view
1856 # inconsistent view
1854 cl = None
1857 cl = None
1855 try:
1858 try:
1856 cl = appendfile.appendchangelog(self.sopener,
1859 cl = appendfile.appendchangelog(self.sopener,
1857 self.changelog.version)
1860 self.changelog.version)
1858
1861
1859 oldheads = len(cl.heads())
1862 oldheads = len(cl.heads())
1860
1863
1861 # pull off the changeset group
1864 # pull off the changeset group
1862 self.ui.status(_("adding changesets\n"))
1865 self.ui.status(_("adding changesets\n"))
1863 cor = cl.count() - 1
1866 cor = cl.count() - 1
1864 chunkiter = changegroup.chunkiter(source)
1867 chunkiter = changegroup.chunkiter(source)
1865 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1868 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1866 raise util.Abort(_("received changelog group is empty"))
1869 raise util.Abort(_("received changelog group is empty"))
1867 cnr = cl.count() - 1
1870 cnr = cl.count() - 1
1868 changesets = cnr - cor
1871 changesets = cnr - cor
1869
1872
1870 # pull off the manifest group
1873 # pull off the manifest group
1871 self.ui.status(_("adding manifests\n"))
1874 self.ui.status(_("adding manifests\n"))
1872 chunkiter = changegroup.chunkiter(source)
1875 chunkiter = changegroup.chunkiter(source)
1873 # no need to check for empty manifest group here:
1876 # no need to check for empty manifest group here:
1874 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1877 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1875 # no new manifest will be created and the manifest group will
1878 # no new manifest will be created and the manifest group will
1876 # be empty during the pull
1879 # be empty during the pull
1877 self.manifest.addgroup(chunkiter, revmap, tr)
1880 self.manifest.addgroup(chunkiter, revmap, tr)
1878
1881
1879 # process the files
1882 # process the files
1880 self.ui.status(_("adding file changes\n"))
1883 self.ui.status(_("adding file changes\n"))
1881 while 1:
1884 while 1:
1882 f = changegroup.getchunk(source)
1885 f = changegroup.getchunk(source)
1883 if not f:
1886 if not f:
1884 break
1887 break
1885 self.ui.debug(_("adding %s revisions\n") % f)
1888 self.ui.debug(_("adding %s revisions\n") % f)
1886 fl = self.file(f)
1889 fl = self.file(f)
1887 o = fl.count()
1890 o = fl.count()
1888 chunkiter = changegroup.chunkiter(source)
1891 chunkiter = changegroup.chunkiter(source)
1889 if fl.addgroup(chunkiter, revmap, tr) is None:
1892 if fl.addgroup(chunkiter, revmap, tr) is None:
1890 raise util.Abort(_("received file revlog group is empty"))
1893 raise util.Abort(_("received file revlog group is empty"))
1891 revisions += fl.count() - o
1894 revisions += fl.count() - o
1892 files += 1
1895 files += 1
1893
1896
1894 cl.writedata()
1897 cl.writedata()
1895 finally:
1898 finally:
1896 if cl:
1899 if cl:
1897 cl.cleanup()
1900 cl.cleanup()
1898
1901
1899 # make changelog see real files again
1902 # make changelog see real files again
1900 self.changelog = changelog.changelog(self.sopener,
1903 self.changelog = changelog.changelog(self.sopener,
1901 self.changelog.version)
1904 self.changelog.version)
1902 self.changelog.checkinlinesize(tr)
1905 self.changelog.checkinlinesize(tr)
1903
1906
1904 newheads = len(self.changelog.heads())
1907 newheads = len(self.changelog.heads())
1905 heads = ""
1908 heads = ""
1906 if oldheads and newheads != oldheads:
1909 if oldheads and newheads != oldheads:
1907 heads = _(" (%+d heads)") % (newheads - oldheads)
1910 heads = _(" (%+d heads)") % (newheads - oldheads)
1908
1911
1909 self.ui.status(_("added %d changesets"
1912 self.ui.status(_("added %d changesets"
1910 " with %d changes to %d files%s\n")
1913 " with %d changes to %d files%s\n")
1911 % (changesets, revisions, files, heads))
1914 % (changesets, revisions, files, heads))
1912
1915
1913 if changesets > 0:
1916 if changesets > 0:
1914 self.hook('pretxnchangegroup', throw=True,
1917 self.hook('pretxnchangegroup', throw=True,
1915 node=hex(self.changelog.node(cor+1)), source=srctype,
1918 node=hex(self.changelog.node(cor+1)), source=srctype,
1916 url=url)
1919 url=url)
1917
1920
1918 tr.close()
1921 tr.close()
1919
1922
1920 if changesets > 0:
1923 if changesets > 0:
1921 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1924 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1922 source=srctype, url=url)
1925 source=srctype, url=url)
1923
1926
1924 for i in xrange(cor + 1, cnr + 1):
1927 for i in xrange(cor + 1, cnr + 1):
1925 self.hook("incoming", node=hex(self.changelog.node(i)),
1928 self.hook("incoming", node=hex(self.changelog.node(i)),
1926 source=srctype, url=url)
1929 source=srctype, url=url)
1927
1930
1928 # never return 0 here:
1931 # never return 0 here:
1929 if newheads < oldheads:
1932 if newheads < oldheads:
1930 return newheads - oldheads - 1
1933 return newheads - oldheads - 1
1931 else:
1934 else:
1932 return newheads - oldheads + 1
1935 return newheads - oldheads + 1
1933
1936
1934
1937
1935 def stream_in(self, remote):
1938 def stream_in(self, remote):
1936 fp = remote.stream_out()
1939 fp = remote.stream_out()
1937 l = fp.readline()
1940 l = fp.readline()
1938 try:
1941 try:
1939 resp = int(l)
1942 resp = int(l)
1940 except ValueError:
1943 except ValueError:
1941 raise util.UnexpectedOutput(
1944 raise util.UnexpectedOutput(
1942 _('Unexpected response from remote server:'), l)
1945 _('Unexpected response from remote server:'), l)
1943 if resp == 1:
1946 if resp == 1:
1944 raise util.Abort(_('operation forbidden by server'))
1947 raise util.Abort(_('operation forbidden by server'))
1945 elif resp == 2:
1948 elif resp == 2:
1946 raise util.Abort(_('locking the remote repository failed'))
1949 raise util.Abort(_('locking the remote repository failed'))
1947 elif resp != 0:
1950 elif resp != 0:
1948 raise util.Abort(_('the server sent an unknown error code'))
1951 raise util.Abort(_('the server sent an unknown error code'))
1949 self.ui.status(_('streaming all changes\n'))
1952 self.ui.status(_('streaming all changes\n'))
1950 l = fp.readline()
1953 l = fp.readline()
1951 try:
1954 try:
1952 total_files, total_bytes = map(int, l.split(' ', 1))
1955 total_files, total_bytes = map(int, l.split(' ', 1))
1953 except ValueError, TypeError:
1956 except ValueError, TypeError:
1954 raise util.UnexpectedOutput(
1957 raise util.UnexpectedOutput(
1955 _('Unexpected response from remote server:'), l)
1958 _('Unexpected response from remote server:'), l)
1956 self.ui.status(_('%d files to transfer, %s of data\n') %
1959 self.ui.status(_('%d files to transfer, %s of data\n') %
1957 (total_files, util.bytecount(total_bytes)))
1960 (total_files, util.bytecount(total_bytes)))
1958 start = time.time()
1961 start = time.time()
1959 for i in xrange(total_files):
1962 for i in xrange(total_files):
1960 # XXX doesn't support '\n' or '\r' in filenames
1963 # XXX doesn't support '\n' or '\r' in filenames
1961 l = fp.readline()
1964 l = fp.readline()
1962 try:
1965 try:
1963 name, size = l.split('\0', 1)
1966 name, size = l.split('\0', 1)
1964 size = int(size)
1967 size = int(size)
1965 except ValueError, TypeError:
1968 except ValueError, TypeError:
1966 raise util.UnexpectedOutput(
1969 raise util.UnexpectedOutput(
1967 _('Unexpected response from remote server:'), l)
1970 _('Unexpected response from remote server:'), l)
1968 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1971 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1969 ofp = self.sopener(name, 'w')
1972 ofp = self.sopener(name, 'w')
1970 for chunk in util.filechunkiter(fp, limit=size):
1973 for chunk in util.filechunkiter(fp, limit=size):
1971 ofp.write(chunk)
1974 ofp.write(chunk)
1972 ofp.close()
1975 ofp.close()
1973 elapsed = time.time() - start
1976 elapsed = time.time() - start
1974 if elapsed <= 0:
1977 if elapsed <= 0:
1975 elapsed = 0.001
1978 elapsed = 0.001
1976 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1979 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1977 (util.bytecount(total_bytes), elapsed,
1980 (util.bytecount(total_bytes), elapsed,
1978 util.bytecount(total_bytes / elapsed)))
1981 util.bytecount(total_bytes / elapsed)))
1979 self.reload()
1982 self.reload()
1980 return len(self.heads()) + 1
1983 return len(self.heads()) + 1
1981
1984
1982 def clone(self, remote, heads=[], stream=False):
1985 def clone(self, remote, heads=[], stream=False):
1983 '''clone remote repository.
1986 '''clone remote repository.
1984
1987
1985 keyword arguments:
1988 keyword arguments:
1986 heads: list of revs to clone (forces use of pull)
1989 heads: list of revs to clone (forces use of pull)
1987 stream: use streaming clone if possible'''
1990 stream: use streaming clone if possible'''
1988
1991
1989 # now, all clients that can request uncompressed clones can
1992 # now, all clients that can request uncompressed clones can
1990 # read repo formats supported by all servers that can serve
1993 # read repo formats supported by all servers that can serve
1991 # them.
1994 # them.
1992
1995
1993 # if revlog format changes, client will have to check version
1996 # if revlog format changes, client will have to check version
1994 # and format flags on "stream" capability, and use
1997 # and format flags on "stream" capability, and use
1995 # uncompressed only if compatible.
1998 # uncompressed only if compatible.
1996
1999
1997 if stream and not heads and remote.capable('stream'):
2000 if stream and not heads and remote.capable('stream'):
1998 return self.stream_in(remote)
2001 return self.stream_in(remote)
1999 return self.pull(remote, heads)
2002 return self.pull(remote, heads)
2000
2003
2001 # used to avoid circular references so destructors work
2004 # used to avoid circular references so destructors work
2002 def aftertrans(files):
2005 def aftertrans(files):
2003 renamefiles = [tuple(t) for t in files]
2006 renamefiles = [tuple(t) for t in files]
2004 def a():
2007 def a():
2005 for src, dest in renamefiles:
2008 for src, dest in renamefiles:
2006 util.rename(src, dest)
2009 util.rename(src, dest)
2007 return a
2010 return a
2008
2011
2009 def instance(ui, path, create):
2012 def instance(ui, path, create):
2010 return localrepository(ui, util.drop_scheme('file', path), create)
2013 return localrepository(ui, util.drop_scheme('file', path), create)
2011
2014
2012 def islocal(path):
2015 def islocal(path):
2013 return True
2016 return True
@@ -1,1378 +1,1379 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import gettext as _
15 from i18n import gettext as _
16 from demandload import *
16 from demandload import *
17 demandload(globals(), "cStringIO errno getpass popen2 re shutil sys tempfile")
17 demandload(globals(), "cStringIO errno getpass popen2 re shutil sys tempfile")
18 demandload(globals(), "os threading time calendar ConfigParser locale glob")
18 demandload(globals(), "os threading time calendar ConfigParser locale glob")
19
19
20 try:
20 try:
21 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
21 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
22 or "ascii"
22 or "ascii"
23 except locale.Error:
23 except locale.Error:
24 _encoding = 'ascii'
24 _encoding = 'ascii'
25 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
25 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
26 _fallbackencoding = 'ISO-8859-1'
26 _fallbackencoding = 'ISO-8859-1'
27
27
28 def tolocal(s):
28 def tolocal(s):
29 """
29 """
30 Convert a string from internal UTF-8 to local encoding
30 Convert a string from internal UTF-8 to local encoding
31
31
32 All internal strings should be UTF-8 but some repos before the
32 All internal strings should be UTF-8 but some repos before the
33 implementation of locale support may contain latin1 or possibly
33 implementation of locale support may contain latin1 or possibly
34 other character sets. We attempt to decode everything strictly
34 other character sets. We attempt to decode everything strictly
35 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
35 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
36 replace unknown characters.
36 replace unknown characters.
37 """
37 """
38 for e in ('UTF-8', _fallbackencoding):
38 for e in ('UTF-8', _fallbackencoding):
39 try:
39 try:
40 u = s.decode(e) # attempt strict decoding
40 u = s.decode(e) # attempt strict decoding
41 return u.encode(_encoding, "replace")
41 return u.encode(_encoding, "replace")
42 except LookupError, k:
42 except LookupError, k:
43 raise Abort(_("%s, please check your locale settings") % k)
43 raise Abort(_("%s, please check your locale settings") % k)
44 except UnicodeDecodeError:
44 except UnicodeDecodeError:
45 pass
45 pass
46 u = s.decode("utf-8", "replace") # last ditch
46 u = s.decode("utf-8", "replace") # last ditch
47 return u.encode(_encoding, "replace")
47 return u.encode(_encoding, "replace")
48
48
49 def fromlocal(s):
49 def fromlocal(s):
50 """
50 """
51 Convert a string from the local character encoding to UTF-8
51 Convert a string from the local character encoding to UTF-8
52
52
53 We attempt to decode strings using the encoding mode set by
53 We attempt to decode strings using the encoding mode set by
54 HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
54 HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
55 characters will cause an error message. Other modes include
55 characters will cause an error message. Other modes include
56 'replace', which replaces unknown characters with a special
56 'replace', which replaces unknown characters with a special
57 Unicode character, and 'ignore', which drops the character.
57 Unicode character, and 'ignore', which drops the character.
58 """
58 """
59 try:
59 try:
60 return s.decode(_encoding, _encodingmode).encode("utf-8")
60 return s.decode(_encoding, _encodingmode).encode("utf-8")
61 except UnicodeDecodeError, inst:
61 except UnicodeDecodeError, inst:
62 sub = s[max(0, inst.start-10):inst.start+10]
62 sub = s[max(0, inst.start-10):inst.start+10]
63 raise Abort("decoding near '%s': %s!" % (sub, inst))
63 raise Abort("decoding near '%s': %s!" % (sub, inst))
64 except LookupError, k:
64 except LookupError, k:
65 raise Abort(_("%s, please check your locale settings") % k)
65 raise Abort(_("%s, please check your locale settings") % k)
66
66
67 def locallen(s):
67 def locallen(s):
68 """Find the length in characters of a local string"""
68 """Find the length in characters of a local string"""
69 return len(s.decode(_encoding, "replace"))
69 return len(s.decode(_encoding, "replace"))
70
70
71 def localsub(s, a, b=None):
71 def localsub(s, a, b=None):
72 try:
72 try:
73 u = s.decode(_encoding, _encodingmode)
73 u = s.decode(_encoding, _encodingmode)
74 if b is not None:
74 if b is not None:
75 u = u[a:b]
75 u = u[a:b]
76 else:
76 else:
77 u = u[:a]
77 u = u[:a]
78 return u.encode(_encoding, _encodingmode)
78 return u.encode(_encoding, _encodingmode)
79 except UnicodeDecodeError, inst:
79 except UnicodeDecodeError, inst:
80 sub = s[max(0, inst.start-10), inst.start+10]
80 sub = s[max(0, inst.start-10), inst.start+10]
81 raise Abort(_("decoding near '%s': %s!\n") % (sub, inst))
81 raise Abort(_("decoding near '%s': %s!\n") % (sub, inst))
82
82
83 # used by parsedate
83 # used by parsedate
84 defaultdateformats = (
84 defaultdateformats = (
85 '%Y-%m-%d %H:%M:%S',
85 '%Y-%m-%d %H:%M:%S',
86 '%Y-%m-%d %I:%M:%S%p',
86 '%Y-%m-%d %I:%M:%S%p',
87 '%Y-%m-%d %H:%M',
87 '%Y-%m-%d %H:%M',
88 '%Y-%m-%d %I:%M%p',
88 '%Y-%m-%d %I:%M%p',
89 '%Y-%m-%d',
89 '%Y-%m-%d',
90 '%m-%d',
90 '%m-%d',
91 '%m/%d',
91 '%m/%d',
92 '%m/%d/%y',
92 '%m/%d/%y',
93 '%m/%d/%Y',
93 '%m/%d/%Y',
94 '%a %b %d %H:%M:%S %Y',
94 '%a %b %d %H:%M:%S %Y',
95 '%a %b %d %I:%M:%S%p %Y',
95 '%a %b %d %I:%M:%S%p %Y',
96 '%b %d %H:%M:%S %Y',
96 '%b %d %H:%M:%S %Y',
97 '%b %d %I:%M:%S%p %Y',
97 '%b %d %I:%M:%S%p %Y',
98 '%b %d %H:%M:%S',
98 '%b %d %H:%M:%S',
99 '%b %d %I:%M:%S%p',
99 '%b %d %I:%M:%S%p',
100 '%b %d %H:%M',
100 '%b %d %H:%M',
101 '%b %d %I:%M%p',
101 '%b %d %I:%M%p',
102 '%b %d %Y',
102 '%b %d %Y',
103 '%b %d',
103 '%b %d',
104 '%H:%M:%S',
104 '%H:%M:%S',
105 '%I:%M:%SP',
105 '%I:%M:%SP',
106 '%H:%M',
106 '%H:%M',
107 '%I:%M%p',
107 '%I:%M%p',
108 )
108 )
109
109
110 extendeddateformats = defaultdateformats + (
110 extendeddateformats = defaultdateformats + (
111 "%Y",
111 "%Y",
112 "%Y-%m",
112 "%Y-%m",
113 "%b",
113 "%b",
114 "%b %Y",
114 "%b %Y",
115 )
115 )
116
116
117 class SignalInterrupt(Exception):
117 class SignalInterrupt(Exception):
118 """Exception raised on SIGTERM and SIGHUP."""
118 """Exception raised on SIGTERM and SIGHUP."""
119
119
120 # like SafeConfigParser but with case-sensitive keys
120 # like SafeConfigParser but with case-sensitive keys
121 class configparser(ConfigParser.SafeConfigParser):
121 class configparser(ConfigParser.SafeConfigParser):
122 def optionxform(self, optionstr):
122 def optionxform(self, optionstr):
123 return optionstr
123 return optionstr
124
124
125 def cachefunc(func):
125 def cachefunc(func):
126 '''cache the result of function calls'''
126 '''cache the result of function calls'''
127 # XXX doesn't handle keywords args
127 # XXX doesn't handle keywords args
128 cache = {}
128 cache = {}
129 if func.func_code.co_argcount == 1:
129 if func.func_code.co_argcount == 1:
130 # we gain a small amount of time because
130 # we gain a small amount of time because
131 # we don't need to pack/unpack the list
131 # we don't need to pack/unpack the list
132 def f(arg):
132 def f(arg):
133 if arg not in cache:
133 if arg not in cache:
134 cache[arg] = func(arg)
134 cache[arg] = func(arg)
135 return cache[arg]
135 return cache[arg]
136 else:
136 else:
137 def f(*args):
137 def f(*args):
138 if args not in cache:
138 if args not in cache:
139 cache[args] = func(*args)
139 cache[args] = func(*args)
140 return cache[args]
140 return cache[args]
141
141
142 return f
142 return f
143
143
144 def pipefilter(s, cmd):
144 def pipefilter(s, cmd):
145 '''filter string S through command CMD, returning its output'''
145 '''filter string S through command CMD, returning its output'''
146 (pout, pin) = popen2.popen2(cmd, -1, 'b')
146 (pout, pin) = popen2.popen2(cmd, -1, 'b')
147 def writer():
147 def writer():
148 try:
148 try:
149 pin.write(s)
149 pin.write(s)
150 pin.close()
150 pin.close()
151 except IOError, inst:
151 except IOError, inst:
152 if inst.errno != errno.EPIPE:
152 if inst.errno != errno.EPIPE:
153 raise
153 raise
154
154
155 # we should use select instead on UNIX, but this will work on most
155 # we should use select instead on UNIX, but this will work on most
156 # systems, including Windows
156 # systems, including Windows
157 w = threading.Thread(target=writer)
157 w = threading.Thread(target=writer)
158 w.start()
158 w.start()
159 f = pout.read()
159 f = pout.read()
160 pout.close()
160 pout.close()
161 w.join()
161 w.join()
162 return f
162 return f
163
163
164 def tempfilter(s, cmd):
164 def tempfilter(s, cmd):
165 '''filter string S through a pair of temporary files with CMD.
165 '''filter string S through a pair of temporary files with CMD.
166 CMD is used as a template to create the real command to be run,
166 CMD is used as a template to create the real command to be run,
167 with the strings INFILE and OUTFILE replaced by the real names of
167 with the strings INFILE and OUTFILE replaced by the real names of
168 the temporary files generated.'''
168 the temporary files generated.'''
169 inname, outname = None, None
169 inname, outname = None, None
170 try:
170 try:
171 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
171 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
172 fp = os.fdopen(infd, 'wb')
172 fp = os.fdopen(infd, 'wb')
173 fp.write(s)
173 fp.write(s)
174 fp.close()
174 fp.close()
175 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
175 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
176 os.close(outfd)
176 os.close(outfd)
177 cmd = cmd.replace('INFILE', inname)
177 cmd = cmd.replace('INFILE', inname)
178 cmd = cmd.replace('OUTFILE', outname)
178 cmd = cmd.replace('OUTFILE', outname)
179 code = os.system(cmd)
179 code = os.system(cmd)
180 if code: raise Abort(_("command '%s' failed: %s") %
180 if code: raise Abort(_("command '%s' failed: %s") %
181 (cmd, explain_exit(code)))
181 (cmd, explain_exit(code)))
182 return open(outname, 'rb').read()
182 return open(outname, 'rb').read()
183 finally:
183 finally:
184 try:
184 try:
185 if inname: os.unlink(inname)
185 if inname: os.unlink(inname)
186 except: pass
186 except: pass
187 try:
187 try:
188 if outname: os.unlink(outname)
188 if outname: os.unlink(outname)
189 except: pass
189 except: pass
190
190
191 filtertable = {
191 filtertable = {
192 'tempfile:': tempfilter,
192 'tempfile:': tempfilter,
193 'pipe:': pipefilter,
193 'pipe:': pipefilter,
194 }
194 }
195
195
196 def filter(s, cmd):
196 def filter(s, cmd):
197 "filter a string through a command that transforms its input to its output"
197 "filter a string through a command that transforms its input to its output"
198 for name, fn in filtertable.iteritems():
198 for name, fn in filtertable.iteritems():
199 if cmd.startswith(name):
199 if cmd.startswith(name):
200 return fn(s, cmd[len(name):].lstrip())
200 return fn(s, cmd[len(name):].lstrip())
201 return pipefilter(s, cmd)
201 return pipefilter(s, cmd)
202
202
203 def find_in_path(name, path, default=None):
203 def find_in_path(name, path, default=None):
204 '''find name in search path. path can be string (will be split
204 '''find name in search path. path can be string (will be split
205 with os.pathsep), or iterable thing that returns strings. if name
205 with os.pathsep), or iterable thing that returns strings. if name
206 found, return path to name. else return default.'''
206 found, return path to name. else return default.'''
207 if isinstance(path, str):
207 if isinstance(path, str):
208 path = path.split(os.pathsep)
208 path = path.split(os.pathsep)
209 for p in path:
209 for p in path:
210 p_name = os.path.join(p, name)
210 p_name = os.path.join(p, name)
211 if os.path.exists(p_name):
211 if os.path.exists(p_name):
212 return p_name
212 return p_name
213 return default
213 return default
214
214
215 def binary(s):
215 def binary(s):
216 """return true if a string is binary data using diff's heuristic"""
216 """return true if a string is binary data using diff's heuristic"""
217 if s and '\0' in s[:4096]:
217 if s and '\0' in s[:4096]:
218 return True
218 return True
219 return False
219 return False
220
220
221 def unique(g):
221 def unique(g):
222 """return the uniq elements of iterable g"""
222 """return the uniq elements of iterable g"""
223 seen = {}
223 seen = {}
224 l = []
224 l = []
225 for f in g:
225 for f in g:
226 if f not in seen:
226 if f not in seen:
227 seen[f] = 1
227 seen[f] = 1
228 l.append(f)
228 l.append(f)
229 return l
229 return l
230
230
231 class Abort(Exception):
231 class Abort(Exception):
232 """Raised if a command needs to print an error and exit."""
232 """Raised if a command needs to print an error and exit."""
233
233
234 class UnexpectedOutput(Abort):
234 class UnexpectedOutput(Abort):
235 """Raised to print an error with part of output and exit."""
235 """Raised to print an error with part of output and exit."""
236
236
237 def always(fn): return True
237 def always(fn): return True
238 def never(fn): return False
238 def never(fn): return False
239
239
240 def expand_glob(pats):
240 def expand_glob(pats):
241 '''On Windows, expand the implicit globs in a list of patterns'''
241 '''On Windows, expand the implicit globs in a list of patterns'''
242 if os.name != 'nt':
242 if os.name != 'nt':
243 return list(pats)
243 return list(pats)
244 ret = []
244 ret = []
245 for p in pats:
245 for p in pats:
246 kind, name = patkind(p, None)
246 kind, name = patkind(p, None)
247 if kind is None:
247 if kind is None:
248 globbed = glob.glob(name)
248 globbed = glob.glob(name)
249 if globbed:
249 if globbed:
250 ret.extend(globbed)
250 ret.extend(globbed)
251 continue
251 continue
252 # if we couldn't expand the glob, just keep it around
252 # if we couldn't expand the glob, just keep it around
253 ret.append(p)
253 ret.append(p)
254 return ret
254 return ret
255
255
256 def patkind(name, dflt_pat='glob'):
256 def patkind(name, dflt_pat='glob'):
257 """Split a string into an optional pattern kind prefix and the
257 """Split a string into an optional pattern kind prefix and the
258 actual pattern."""
258 actual pattern."""
259 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
259 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
260 if name.startswith(prefix + ':'): return name.split(':', 1)
260 if name.startswith(prefix + ':'): return name.split(':', 1)
261 return dflt_pat, name
261 return dflt_pat, name
262
262
263 def globre(pat, head='^', tail='$'):
263 def globre(pat, head='^', tail='$'):
264 "convert a glob pattern into a regexp"
264 "convert a glob pattern into a regexp"
265 i, n = 0, len(pat)
265 i, n = 0, len(pat)
266 res = ''
266 res = ''
267 group = False
267 group = False
268 def peek(): return i < n and pat[i]
268 def peek(): return i < n and pat[i]
269 while i < n:
269 while i < n:
270 c = pat[i]
270 c = pat[i]
271 i = i+1
271 i = i+1
272 if c == '*':
272 if c == '*':
273 if peek() == '*':
273 if peek() == '*':
274 i += 1
274 i += 1
275 res += '.*'
275 res += '.*'
276 else:
276 else:
277 res += '[^/]*'
277 res += '[^/]*'
278 elif c == '?':
278 elif c == '?':
279 res += '.'
279 res += '.'
280 elif c == '[':
280 elif c == '[':
281 j = i
281 j = i
282 if j < n and pat[j] in '!]':
282 if j < n and pat[j] in '!]':
283 j += 1
283 j += 1
284 while j < n and pat[j] != ']':
284 while j < n and pat[j] != ']':
285 j += 1
285 j += 1
286 if j >= n:
286 if j >= n:
287 res += '\\['
287 res += '\\['
288 else:
288 else:
289 stuff = pat[i:j].replace('\\','\\\\')
289 stuff = pat[i:j].replace('\\','\\\\')
290 i = j + 1
290 i = j + 1
291 if stuff[0] == '!':
291 if stuff[0] == '!':
292 stuff = '^' + stuff[1:]
292 stuff = '^' + stuff[1:]
293 elif stuff[0] == '^':
293 elif stuff[0] == '^':
294 stuff = '\\' + stuff
294 stuff = '\\' + stuff
295 res = '%s[%s]' % (res, stuff)
295 res = '%s[%s]' % (res, stuff)
296 elif c == '{':
296 elif c == '{':
297 group = True
297 group = True
298 res += '(?:'
298 res += '(?:'
299 elif c == '}' and group:
299 elif c == '}' and group:
300 res += ')'
300 res += ')'
301 group = False
301 group = False
302 elif c == ',' and group:
302 elif c == ',' and group:
303 res += '|'
303 res += '|'
304 elif c == '\\':
304 elif c == '\\':
305 p = peek()
305 p = peek()
306 if p:
306 if p:
307 i += 1
307 i += 1
308 res += re.escape(p)
308 res += re.escape(p)
309 else:
309 else:
310 res += re.escape(c)
310 res += re.escape(c)
311 else:
311 else:
312 res += re.escape(c)
312 res += re.escape(c)
313 return head + res + tail
313 return head + res + tail
314
314
315 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
315 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
316
316
317 def pathto(n1, n2):
317 def pathto(n1, n2):
318 '''return the relative path from one place to another.
318 '''return the relative path from one place to another.
319 n1 should use os.sep to separate directories
319 n1 should use os.sep to separate directories
320 n2 should use "/" to separate directories
320 n2 should use "/" to separate directories
321 returns an os.sep-separated path.
321 returns an os.sep-separated path.
322 '''
322 '''
323 if not n1: return localpath(n2)
323 if not n1: return localpath(n2)
324 a, b = n1.split(os.sep), n2.split('/')
324 a, b = n1.split(os.sep), n2.split('/')
325 a.reverse()
325 a.reverse()
326 b.reverse()
326 b.reverse()
327 while a and b and a[-1] == b[-1]:
327 while a and b and a[-1] == b[-1]:
328 a.pop()
328 a.pop()
329 b.pop()
329 b.pop()
330 b.reverse()
330 b.reverse()
331 return os.sep.join((['..'] * len(a)) + b)
331 return os.sep.join((['..'] * len(a)) + b)
332
332
333 def canonpath(root, cwd, myname):
333 def canonpath(root, cwd, myname):
334 """return the canonical path of myname, given cwd and root"""
334 """return the canonical path of myname, given cwd and root"""
335 if root == os.sep:
335 if root == os.sep:
336 rootsep = os.sep
336 rootsep = os.sep
337 elif root.endswith(os.sep):
337 elif root.endswith(os.sep):
338 rootsep = root
338 rootsep = root
339 else:
339 else:
340 rootsep = root + os.sep
340 rootsep = root + os.sep
341 name = myname
341 name = myname
342 if not os.path.isabs(name):
342 if not os.path.isabs(name):
343 name = os.path.join(root, cwd, name)
343 name = os.path.join(root, cwd, name)
344 name = os.path.normpath(name)
344 name = os.path.normpath(name)
345 if name != rootsep and name.startswith(rootsep):
345 if name != rootsep and name.startswith(rootsep):
346 name = name[len(rootsep):]
346 name = name[len(rootsep):]
347 audit_path(name)
347 audit_path(name)
348 return pconvert(name)
348 return pconvert(name)
349 elif name == root:
349 elif name == root:
350 return ''
350 return ''
351 else:
351 else:
352 # Determine whether `name' is in the hierarchy at or beneath `root',
352 # Determine whether `name' is in the hierarchy at or beneath `root',
353 # by iterating name=dirname(name) until that causes no change (can't
353 # by iterating name=dirname(name) until that causes no change (can't
354 # check name == '/', because that doesn't work on windows). For each
354 # check name == '/', because that doesn't work on windows). For each
355 # `name', compare dev/inode numbers. If they match, the list `rel'
355 # `name', compare dev/inode numbers. If they match, the list `rel'
356 # holds the reversed list of components making up the relative file
356 # holds the reversed list of components making up the relative file
357 # name we want.
357 # name we want.
358 root_st = os.stat(root)
358 root_st = os.stat(root)
359 rel = []
359 rel = []
360 while True:
360 while True:
361 try:
361 try:
362 name_st = os.stat(name)
362 name_st = os.stat(name)
363 except OSError:
363 except OSError:
364 break
364 break
365 if samestat(name_st, root_st):
365 if samestat(name_st, root_st):
366 if not rel:
366 if not rel:
367 # name was actually the same as root (maybe a symlink)
367 # name was actually the same as root (maybe a symlink)
368 return ''
368 return ''
369 rel.reverse()
369 rel.reverse()
370 name = os.path.join(*rel)
370 name = os.path.join(*rel)
371 audit_path(name)
371 audit_path(name)
372 return pconvert(name)
372 return pconvert(name)
373 dirname, basename = os.path.split(name)
373 dirname, basename = os.path.split(name)
374 rel.append(basename)
374 rel.append(basename)
375 if dirname == name:
375 if dirname == name:
376 break
376 break
377 name = dirname
377 name = dirname
378
378
379 raise Abort('%s not under root' % myname)
379 raise Abort('%s not under root' % myname)
380
380
381 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], head='', src=None):
381 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], head='', src=None):
382 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
382 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
383
383
384 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], head='',
384 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], head='',
385 src=None, globbed=False):
385 src=None, globbed=False, default=None):
386 if not globbed:
386 default = default or 'relpath'
387 if default == 'relpath' and not globbed:
387 names = expand_glob(names)
388 names = expand_glob(names)
388 return _matcher(canonroot, cwd, names, inc, exc, head, 'relpath', src)
389 return _matcher(canonroot, cwd, names, inc, exc, head, default, src)
389
390
390 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
391 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
391 """build a function to match a set of file patterns
392 """build a function to match a set of file patterns
392
393
393 arguments:
394 arguments:
394 canonroot - the canonical root of the tree you're matching against
395 canonroot - the canonical root of the tree you're matching against
395 cwd - the current working directory, if relevant
396 cwd - the current working directory, if relevant
396 names - patterns to find
397 names - patterns to find
397 inc - patterns to include
398 inc - patterns to include
398 exc - patterns to exclude
399 exc - patterns to exclude
399 head - a regex to prepend to patterns to control whether a match is rooted
400 head - a regex to prepend to patterns to control whether a match is rooted
400 dflt_pat - if a pattern in names has no explicit type, assume this one
401 dflt_pat - if a pattern in names has no explicit type, assume this one
401 src - where these patterns came from (e.g. .hgignore)
402 src - where these patterns came from (e.g. .hgignore)
402
403
403 a pattern is one of:
404 a pattern is one of:
404 'glob:<glob>' - a glob relative to cwd
405 'glob:<glob>' - a glob relative to cwd
405 're:<regexp>' - a regular expression
406 're:<regexp>' - a regular expression
406 'path:<path>' - a path relative to canonroot
407 'path:<path>' - a path relative to canonroot
407 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
408 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
408 'relpath:<path>' - a path relative to cwd
409 'relpath:<path>' - a path relative to cwd
409 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
410 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
410 '<something>' - one of the cases above, selected by the dflt_pat argument
411 '<something>' - one of the cases above, selected by the dflt_pat argument
411
412
412 returns:
413 returns:
413 a 3-tuple containing
414 a 3-tuple containing
414 - list of roots (places where one should start a recursive walk of the fs);
415 - list of roots (places where one should start a recursive walk of the fs);
415 this often matches the explicit non-pattern names passed in, but also
416 this often matches the explicit non-pattern names passed in, but also
416 includes the initial part of glob: patterns that has no glob characters
417 includes the initial part of glob: patterns that has no glob characters
417 - a bool match(filename) function
418 - a bool match(filename) function
418 - a bool indicating if any patterns were passed in
419 - a bool indicating if any patterns were passed in
419
420
420 todo:
421 todo:
421 make head regex a rooted bool
422 make head regex a rooted bool
422 """
423 """
423
424
424 def contains_glob(name):
425 def contains_glob(name):
425 for c in name:
426 for c in name:
426 if c in _globchars: return True
427 if c in _globchars: return True
427 return False
428 return False
428
429
429 def regex(kind, name, tail):
430 def regex(kind, name, tail):
430 '''convert a pattern into a regular expression'''
431 '''convert a pattern into a regular expression'''
431 if not name:
432 if not name:
432 return ''
433 return ''
433 if kind == 're':
434 if kind == 're':
434 return name
435 return name
435 elif kind == 'path':
436 elif kind == 'path':
436 return '^' + re.escape(name) + '(?:/|$)'
437 return '^' + re.escape(name) + '(?:/|$)'
437 elif kind == 'relglob':
438 elif kind == 'relglob':
438 return head + globre(name, '(?:|.*/)', '(?:/|$)')
439 return head + globre(name, '(?:|.*/)', '(?:/|$)')
439 elif kind == 'relpath':
440 elif kind == 'relpath':
440 return head + re.escape(name) + '(?:/|$)'
441 return head + re.escape(name) + '(?:/|$)'
441 elif kind == 'relre':
442 elif kind == 'relre':
442 if name.startswith('^'):
443 if name.startswith('^'):
443 return name
444 return name
444 return '.*' + name
445 return '.*' + name
445 return head + globre(name, '', tail)
446 return head + globre(name, '', tail)
446
447
447 def matchfn(pats, tail):
448 def matchfn(pats, tail):
448 """build a matching function from a set of patterns"""
449 """build a matching function from a set of patterns"""
449 if not pats:
450 if not pats:
450 return
451 return
451 matches = []
452 matches = []
452 for k, p in pats:
453 for k, p in pats:
453 try:
454 try:
454 pat = '(?:%s)' % regex(k, p, tail)
455 pat = '(?:%s)' % regex(k, p, tail)
455 matches.append(re.compile(pat).match)
456 matches.append(re.compile(pat).match)
456 except re.error:
457 except re.error:
457 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
458 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
458 else: raise Abort("invalid pattern (%s): %s" % (k, p))
459 else: raise Abort("invalid pattern (%s): %s" % (k, p))
459
460
460 def buildfn(text):
461 def buildfn(text):
461 for m in matches:
462 for m in matches:
462 r = m(text)
463 r = m(text)
463 if r:
464 if r:
464 return r
465 return r
465
466
466 return buildfn
467 return buildfn
467
468
468 def globprefix(pat):
469 def globprefix(pat):
469 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
470 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
470 root = []
471 root = []
471 for p in pat.split('/'):
472 for p in pat.split('/'):
472 if contains_glob(p): break
473 if contains_glob(p): break
473 root.append(p)
474 root.append(p)
474 return '/'.join(root) or '.'
475 return '/'.join(root) or '.'
475
476
476 def normalizepats(names, default):
477 def normalizepats(names, default):
477 pats = []
478 pats = []
478 files = []
479 files = []
479 roots = []
480 roots = []
480 anypats = False
481 anypats = False
481 for kind, name in [patkind(p, default) for p in names]:
482 for kind, name in [patkind(p, default) for p in names]:
482 if kind in ('glob', 'relpath'):
483 if kind in ('glob', 'relpath'):
483 name = canonpath(canonroot, cwd, name)
484 name = canonpath(canonroot, cwd, name)
484 elif kind in ('relglob', 'path'):
485 elif kind in ('relglob', 'path'):
485 name = normpath(name)
486 name = normpath(name)
486 if kind in ('glob', 're', 'relglob', 'relre'):
487 if kind in ('glob', 're', 'relglob', 'relre'):
487 pats.append((kind, name))
488 pats.append((kind, name))
488 anypats = True
489 anypats = True
489 if kind == 'glob':
490 if kind == 'glob':
490 root = globprefix(name)
491 root = globprefix(name)
491 roots.append(root)
492 roots.append(root)
492 elif kind in ('relpath', 'path'):
493 elif kind in ('relpath', 'path'):
493 files.append((kind, name))
494 files.append((kind, name))
494 roots.append(name)
495 roots.append(name)
495 elif kind == 'relglob':
496 elif kind == 'relglob':
496 roots.append('.')
497 roots.append('.')
497 return roots, pats + files, anypats
498 return roots, pats + files, anypats
498
499
499 roots, pats, anypats = normalizepats(names, dflt_pat)
500 roots, pats, anypats = normalizepats(names, dflt_pat)
500
501
501 patmatch = matchfn(pats, '$') or always
502 patmatch = matchfn(pats, '$') or always
502 incmatch = always
503 incmatch = always
503 if inc:
504 if inc:
504 dummy, inckinds, dummy = normalizepats(inc, 'glob')
505 dummy, inckinds, dummy = normalizepats(inc, 'glob')
505 incmatch = matchfn(inckinds, '(?:/|$)')
506 incmatch = matchfn(inckinds, '(?:/|$)')
506 excmatch = lambda fn: False
507 excmatch = lambda fn: False
507 if exc:
508 if exc:
508 dummy, exckinds, dummy = normalizepats(exc, 'glob')
509 dummy, exckinds, dummy = normalizepats(exc, 'glob')
509 excmatch = matchfn(exckinds, '(?:/|$)')
510 excmatch = matchfn(exckinds, '(?:/|$)')
510
511
511 return (roots,
512 return (roots,
512 lambda fn: (incmatch(fn) and not excmatch(fn) and patmatch(fn)),
513 lambda fn: (incmatch(fn) and not excmatch(fn) and patmatch(fn)),
513 (inc or exc or anypats) and True)
514 (inc or exc or anypats) and True)
514
515
515 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
516 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
516 '''enhanced shell command execution.
517 '''enhanced shell command execution.
517 run with environment maybe modified, maybe in different dir.
518 run with environment maybe modified, maybe in different dir.
518
519
519 if command fails and onerr is None, return status. if ui object,
520 if command fails and onerr is None, return status. if ui object,
520 print error message and return status, else raise onerr object as
521 print error message and return status, else raise onerr object as
521 exception.'''
522 exception.'''
522 def py2shell(val):
523 def py2shell(val):
523 'convert python object into string that is useful to shell'
524 'convert python object into string that is useful to shell'
524 if val in (None, False):
525 if val in (None, False):
525 return '0'
526 return '0'
526 if val == True:
527 if val == True:
527 return '1'
528 return '1'
528 return str(val)
529 return str(val)
529 oldenv = {}
530 oldenv = {}
530 for k in environ:
531 for k in environ:
531 oldenv[k] = os.environ.get(k)
532 oldenv[k] = os.environ.get(k)
532 if cwd is not None:
533 if cwd is not None:
533 oldcwd = os.getcwd()
534 oldcwd = os.getcwd()
534 origcmd = cmd
535 origcmd = cmd
535 if os.name == 'nt':
536 if os.name == 'nt':
536 cmd = '"%s"' % cmd
537 cmd = '"%s"' % cmd
537 try:
538 try:
538 for k, v in environ.iteritems():
539 for k, v in environ.iteritems():
539 os.environ[k] = py2shell(v)
540 os.environ[k] = py2shell(v)
540 if cwd is not None and oldcwd != cwd:
541 if cwd is not None and oldcwd != cwd:
541 os.chdir(cwd)
542 os.chdir(cwd)
542 rc = os.system(cmd)
543 rc = os.system(cmd)
543 if rc and onerr:
544 if rc and onerr:
544 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
545 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
545 explain_exit(rc)[0])
546 explain_exit(rc)[0])
546 if errprefix:
547 if errprefix:
547 errmsg = '%s: %s' % (errprefix, errmsg)
548 errmsg = '%s: %s' % (errprefix, errmsg)
548 try:
549 try:
549 onerr.warn(errmsg + '\n')
550 onerr.warn(errmsg + '\n')
550 except AttributeError:
551 except AttributeError:
551 raise onerr(errmsg)
552 raise onerr(errmsg)
552 return rc
553 return rc
553 finally:
554 finally:
554 for k, v in oldenv.iteritems():
555 for k, v in oldenv.iteritems():
555 if v is None:
556 if v is None:
556 del os.environ[k]
557 del os.environ[k]
557 else:
558 else:
558 os.environ[k] = v
559 os.environ[k] = v
559 if cwd is not None and oldcwd != cwd:
560 if cwd is not None and oldcwd != cwd:
560 os.chdir(oldcwd)
561 os.chdir(oldcwd)
561
562
562 def rename(src, dst):
563 def rename(src, dst):
563 """forcibly rename a file"""
564 """forcibly rename a file"""
564 try:
565 try:
565 os.rename(src, dst)
566 os.rename(src, dst)
566 except OSError, err:
567 except OSError, err:
567 # on windows, rename to existing file is not allowed, so we
568 # on windows, rename to existing file is not allowed, so we
568 # must delete destination first. but if file is open, unlink
569 # must delete destination first. but if file is open, unlink
569 # schedules it for delete but does not delete it. rename
570 # schedules it for delete but does not delete it. rename
570 # happens immediately even for open files, so we create
571 # happens immediately even for open files, so we create
571 # temporary file, delete it, rename destination to that name,
572 # temporary file, delete it, rename destination to that name,
572 # then delete that. then rename is safe to do.
573 # then delete that. then rename is safe to do.
573 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
574 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
574 os.close(fd)
575 os.close(fd)
575 os.unlink(temp)
576 os.unlink(temp)
576 os.rename(dst, temp)
577 os.rename(dst, temp)
577 os.unlink(temp)
578 os.unlink(temp)
578 os.rename(src, dst)
579 os.rename(src, dst)
579
580
580 def unlink(f):
581 def unlink(f):
581 """unlink and remove the directory if it is empty"""
582 """unlink and remove the directory if it is empty"""
582 os.unlink(f)
583 os.unlink(f)
583 # try removing directories that might now be empty
584 # try removing directories that might now be empty
584 try:
585 try:
585 os.removedirs(os.path.dirname(f))
586 os.removedirs(os.path.dirname(f))
586 except OSError:
587 except OSError:
587 pass
588 pass
588
589
589 def copyfile(src, dest):
590 def copyfile(src, dest):
590 "copy a file, preserving mode"
591 "copy a file, preserving mode"
591 try:
592 try:
592 shutil.copyfile(src, dest)
593 shutil.copyfile(src, dest)
593 shutil.copymode(src, dest)
594 shutil.copymode(src, dest)
594 except shutil.Error, inst:
595 except shutil.Error, inst:
595 raise Abort(str(inst))
596 raise Abort(str(inst))
596
597
597 def copyfiles(src, dst, hardlink=None):
598 def copyfiles(src, dst, hardlink=None):
598 """Copy a directory tree using hardlinks if possible"""
599 """Copy a directory tree using hardlinks if possible"""
599
600
600 if hardlink is None:
601 if hardlink is None:
601 hardlink = (os.stat(src).st_dev ==
602 hardlink = (os.stat(src).st_dev ==
602 os.stat(os.path.dirname(dst)).st_dev)
603 os.stat(os.path.dirname(dst)).st_dev)
603
604
604 if os.path.isdir(src):
605 if os.path.isdir(src):
605 os.mkdir(dst)
606 os.mkdir(dst)
606 for name in os.listdir(src):
607 for name in os.listdir(src):
607 srcname = os.path.join(src, name)
608 srcname = os.path.join(src, name)
608 dstname = os.path.join(dst, name)
609 dstname = os.path.join(dst, name)
609 copyfiles(srcname, dstname, hardlink)
610 copyfiles(srcname, dstname, hardlink)
610 else:
611 else:
611 if hardlink:
612 if hardlink:
612 try:
613 try:
613 os_link(src, dst)
614 os_link(src, dst)
614 except (IOError, OSError):
615 except (IOError, OSError):
615 hardlink = False
616 hardlink = False
616 shutil.copy(src, dst)
617 shutil.copy(src, dst)
617 else:
618 else:
618 shutil.copy(src, dst)
619 shutil.copy(src, dst)
619
620
620 def audit_path(path):
621 def audit_path(path):
621 """Abort if path contains dangerous components"""
622 """Abort if path contains dangerous components"""
622 parts = os.path.normcase(path).split(os.sep)
623 parts = os.path.normcase(path).split(os.sep)
623 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
624 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
624 or os.pardir in parts):
625 or os.pardir in parts):
625 raise Abort(_("path contains illegal component: %s\n") % path)
626 raise Abort(_("path contains illegal component: %s\n") % path)
626
627
627 def _makelock_file(info, pathname):
628 def _makelock_file(info, pathname):
628 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
629 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
629 os.write(ld, info)
630 os.write(ld, info)
630 os.close(ld)
631 os.close(ld)
631
632
632 def _readlock_file(pathname):
633 def _readlock_file(pathname):
633 return posixfile(pathname).read()
634 return posixfile(pathname).read()
634
635
635 def nlinks(pathname):
636 def nlinks(pathname):
636 """Return number of hardlinks for the given file."""
637 """Return number of hardlinks for the given file."""
637 return os.lstat(pathname).st_nlink
638 return os.lstat(pathname).st_nlink
638
639
639 if hasattr(os, 'link'):
640 if hasattr(os, 'link'):
640 os_link = os.link
641 os_link = os.link
641 else:
642 else:
642 def os_link(src, dst):
643 def os_link(src, dst):
643 raise OSError(0, _("Hardlinks not supported"))
644 raise OSError(0, _("Hardlinks not supported"))
644
645
645 def fstat(fp):
646 def fstat(fp):
646 '''stat file object that may not have fileno method.'''
647 '''stat file object that may not have fileno method.'''
647 try:
648 try:
648 return os.fstat(fp.fileno())
649 return os.fstat(fp.fileno())
649 except AttributeError:
650 except AttributeError:
650 return os.stat(fp.name)
651 return os.stat(fp.name)
651
652
652 posixfile = file
653 posixfile = file
653
654
654 def is_win_9x():
655 def is_win_9x():
655 '''return true if run on windows 95, 98 or me.'''
656 '''return true if run on windows 95, 98 or me.'''
656 try:
657 try:
657 return sys.getwindowsversion()[3] == 1
658 return sys.getwindowsversion()[3] == 1
658 except AttributeError:
659 except AttributeError:
659 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
660 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
660
661
661 getuser_fallback = None
662 getuser_fallback = None
662
663
663 def getuser():
664 def getuser():
664 '''return name of current user'''
665 '''return name of current user'''
665 try:
666 try:
666 return getpass.getuser()
667 return getpass.getuser()
667 except ImportError:
668 except ImportError:
668 # import of pwd will fail on windows - try fallback
669 # import of pwd will fail on windows - try fallback
669 if getuser_fallback:
670 if getuser_fallback:
670 return getuser_fallback()
671 return getuser_fallback()
671 # raised if win32api not available
672 # raised if win32api not available
672 raise Abort(_('user name not available - set USERNAME '
673 raise Abort(_('user name not available - set USERNAME '
673 'environment variable'))
674 'environment variable'))
674
675
675 def username(uid=None):
676 def username(uid=None):
676 """Return the name of the user with the given uid.
677 """Return the name of the user with the given uid.
677
678
678 If uid is None, return the name of the current user."""
679 If uid is None, return the name of the current user."""
679 try:
680 try:
680 import pwd
681 import pwd
681 if uid is None:
682 if uid is None:
682 uid = os.getuid()
683 uid = os.getuid()
683 try:
684 try:
684 return pwd.getpwuid(uid)[0]
685 return pwd.getpwuid(uid)[0]
685 except KeyError:
686 except KeyError:
686 return str(uid)
687 return str(uid)
687 except ImportError:
688 except ImportError:
688 return None
689 return None
689
690
690 def groupname(gid=None):
691 def groupname(gid=None):
691 """Return the name of the group with the given gid.
692 """Return the name of the group with the given gid.
692
693
693 If gid is None, return the name of the current group."""
694 If gid is None, return the name of the current group."""
694 try:
695 try:
695 import grp
696 import grp
696 if gid is None:
697 if gid is None:
697 gid = os.getgid()
698 gid = os.getgid()
698 try:
699 try:
699 return grp.getgrgid(gid)[0]
700 return grp.getgrgid(gid)[0]
700 except KeyError:
701 except KeyError:
701 return str(gid)
702 return str(gid)
702 except ImportError:
703 except ImportError:
703 return None
704 return None
704
705
705 # File system features
706 # File system features
706
707
707 def checkfolding(path):
708 def checkfolding(path):
708 """
709 """
709 Check whether the given path is on a case-sensitive filesystem
710 Check whether the given path is on a case-sensitive filesystem
710
711
711 Requires a path (like /foo/.hg) ending with a foldable final
712 Requires a path (like /foo/.hg) ending with a foldable final
712 directory component.
713 directory component.
713 """
714 """
714 s1 = os.stat(path)
715 s1 = os.stat(path)
715 d, b = os.path.split(path)
716 d, b = os.path.split(path)
716 p2 = os.path.join(d, b.upper())
717 p2 = os.path.join(d, b.upper())
717 if path == p2:
718 if path == p2:
718 p2 = os.path.join(d, b.lower())
719 p2 = os.path.join(d, b.lower())
719 try:
720 try:
720 s2 = os.stat(p2)
721 s2 = os.stat(p2)
721 if s2 == s1:
722 if s2 == s1:
722 return False
723 return False
723 return True
724 return True
724 except:
725 except:
725 return True
726 return True
726
727
727 # Platform specific variants
728 # Platform specific variants
728 if os.name == 'nt':
729 if os.name == 'nt':
729 demandload(globals(), "msvcrt")
730 demandload(globals(), "msvcrt")
730 nulldev = 'NUL:'
731 nulldev = 'NUL:'
731
732
732 class winstdout:
733 class winstdout:
733 '''stdout on windows misbehaves if sent through a pipe'''
734 '''stdout on windows misbehaves if sent through a pipe'''
734
735
735 def __init__(self, fp):
736 def __init__(self, fp):
736 self.fp = fp
737 self.fp = fp
737
738
738 def __getattr__(self, key):
739 def __getattr__(self, key):
739 return getattr(self.fp, key)
740 return getattr(self.fp, key)
740
741
741 def close(self):
742 def close(self):
742 try:
743 try:
743 self.fp.close()
744 self.fp.close()
744 except: pass
745 except: pass
745
746
746 def write(self, s):
747 def write(self, s):
747 try:
748 try:
748 return self.fp.write(s)
749 return self.fp.write(s)
749 except IOError, inst:
750 except IOError, inst:
750 if inst.errno != 0: raise
751 if inst.errno != 0: raise
751 self.close()
752 self.close()
752 raise IOError(errno.EPIPE, 'Broken pipe')
753 raise IOError(errno.EPIPE, 'Broken pipe')
753
754
754 def flush(self):
755 def flush(self):
755 try:
756 try:
756 return self.fp.flush()
757 return self.fp.flush()
757 except IOError, inst:
758 except IOError, inst:
758 if inst.errno != errno.EINVAL: raise
759 if inst.errno != errno.EINVAL: raise
759 self.close()
760 self.close()
760 raise IOError(errno.EPIPE, 'Broken pipe')
761 raise IOError(errno.EPIPE, 'Broken pipe')
761
762
762 sys.stdout = winstdout(sys.stdout)
763 sys.stdout = winstdout(sys.stdout)
763
764
764 def system_rcpath():
765 def system_rcpath():
765 try:
766 try:
766 return system_rcpath_win32()
767 return system_rcpath_win32()
767 except:
768 except:
768 return [r'c:\mercurial\mercurial.ini']
769 return [r'c:\mercurial\mercurial.ini']
769
770
770 def os_rcpath():
771 def os_rcpath():
771 '''return default os-specific hgrc search path'''
772 '''return default os-specific hgrc search path'''
772 path = system_rcpath()
773 path = system_rcpath()
773 path.append(user_rcpath())
774 path.append(user_rcpath())
774 userprofile = os.environ.get('USERPROFILE')
775 userprofile = os.environ.get('USERPROFILE')
775 if userprofile:
776 if userprofile:
776 path.append(os.path.join(userprofile, 'mercurial.ini'))
777 path.append(os.path.join(userprofile, 'mercurial.ini'))
777 return path
778 return path
778
779
779 def user_rcpath():
780 def user_rcpath():
780 '''return os-specific hgrc search path to the user dir'''
781 '''return os-specific hgrc search path to the user dir'''
781 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
782 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
782
783
783 def parse_patch_output(output_line):
784 def parse_patch_output(output_line):
784 """parses the output produced by patch and returns the file name"""
785 """parses the output produced by patch and returns the file name"""
785 pf = output_line[14:]
786 pf = output_line[14:]
786 if pf[0] == '`':
787 if pf[0] == '`':
787 pf = pf[1:-1] # Remove the quotes
788 pf = pf[1:-1] # Remove the quotes
788 return pf
789 return pf
789
790
790 def testpid(pid):
791 def testpid(pid):
791 '''return False if pid dead, True if running or not known'''
792 '''return False if pid dead, True if running or not known'''
792 return True
793 return True
793
794
794 def is_exec(f, last):
795 def is_exec(f, last):
795 return last
796 return last
796
797
797 def set_exec(f, mode):
798 def set_exec(f, mode):
798 pass
799 pass
799
800
800 def set_binary(fd):
801 def set_binary(fd):
801 msvcrt.setmode(fd.fileno(), os.O_BINARY)
802 msvcrt.setmode(fd.fileno(), os.O_BINARY)
802
803
803 def pconvert(path):
804 def pconvert(path):
804 return path.replace("\\", "/")
805 return path.replace("\\", "/")
805
806
806 def localpath(path):
807 def localpath(path):
807 return path.replace('/', '\\')
808 return path.replace('/', '\\')
808
809
809 def normpath(path):
810 def normpath(path):
810 return pconvert(os.path.normpath(path))
811 return pconvert(os.path.normpath(path))
811
812
812 makelock = _makelock_file
813 makelock = _makelock_file
813 readlock = _readlock_file
814 readlock = _readlock_file
814
815
815 def samestat(s1, s2):
816 def samestat(s1, s2):
816 return False
817 return False
817
818
818 # A sequence of backslashes is special iff it precedes a double quote:
819 # A sequence of backslashes is special iff it precedes a double quote:
819 # - if there's an even number of backslashes, the double quote is not
820 # - if there's an even number of backslashes, the double quote is not
820 # quoted (i.e. it ends the quoted region)
821 # quoted (i.e. it ends the quoted region)
821 # - if there's an odd number of backslashes, the double quote is quoted
822 # - if there's an odd number of backslashes, the double quote is quoted
822 # - in both cases, every pair of backslashes is unquoted into a single
823 # - in both cases, every pair of backslashes is unquoted into a single
823 # backslash
824 # backslash
824 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
825 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
825 # So, to quote a string, we must surround it in double quotes, double
826 # So, to quote a string, we must surround it in double quotes, double
826 # the number of backslashes that preceed double quotes and add another
827 # the number of backslashes that preceed double quotes and add another
827 # backslash before every double quote (being careful with the double
828 # backslash before every double quote (being careful with the double
828 # quote we've appended to the end)
829 # quote we've appended to the end)
829 _quotere = None
830 _quotere = None
830 def shellquote(s):
831 def shellquote(s):
831 global _quotere
832 global _quotere
832 if _quotere is None:
833 if _quotere is None:
833 _quotere = re.compile(r'(\\*)("|\\$)')
834 _quotere = re.compile(r'(\\*)("|\\$)')
834 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
835 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
835
836
836 def explain_exit(code):
837 def explain_exit(code):
837 return _("exited with status %d") % code, code
838 return _("exited with status %d") % code, code
838
839
839 # if you change this stub into a real check, please try to implement the
840 # if you change this stub into a real check, please try to implement the
840 # username and groupname functions above, too.
841 # username and groupname functions above, too.
841 def isowner(fp, st=None):
842 def isowner(fp, st=None):
842 return True
843 return True
843
844
844 try:
845 try:
845 # override functions with win32 versions if possible
846 # override functions with win32 versions if possible
846 from util_win32 import *
847 from util_win32 import *
847 if not is_win_9x():
848 if not is_win_9x():
848 posixfile = posixfile_nt
849 posixfile = posixfile_nt
849 except ImportError:
850 except ImportError:
850 pass
851 pass
851
852
852 else:
853 else:
853 nulldev = '/dev/null'
854 nulldev = '/dev/null'
854
855
855 def rcfiles(path):
856 def rcfiles(path):
856 rcs = [os.path.join(path, 'hgrc')]
857 rcs = [os.path.join(path, 'hgrc')]
857 rcdir = os.path.join(path, 'hgrc.d')
858 rcdir = os.path.join(path, 'hgrc.d')
858 try:
859 try:
859 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
860 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
860 if f.endswith(".rc")])
861 if f.endswith(".rc")])
861 except OSError:
862 except OSError:
862 pass
863 pass
863 return rcs
864 return rcs
864
865
865 def os_rcpath():
866 def os_rcpath():
866 '''return default os-specific hgrc search path'''
867 '''return default os-specific hgrc search path'''
867 path = []
868 path = []
868 # old mod_python does not set sys.argv
869 # old mod_python does not set sys.argv
869 if len(getattr(sys, 'argv', [])) > 0:
870 if len(getattr(sys, 'argv', [])) > 0:
870 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
871 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
871 '/../etc/mercurial'))
872 '/../etc/mercurial'))
872 path.extend(rcfiles('/etc/mercurial'))
873 path.extend(rcfiles('/etc/mercurial'))
873 path.append(os.path.expanduser('~/.hgrc'))
874 path.append(os.path.expanduser('~/.hgrc'))
874 path = [os.path.normpath(f) for f in path]
875 path = [os.path.normpath(f) for f in path]
875 return path
876 return path
876
877
877 def parse_patch_output(output_line):
878 def parse_patch_output(output_line):
878 """parses the output produced by patch and returns the file name"""
879 """parses the output produced by patch and returns the file name"""
879 pf = output_line[14:]
880 pf = output_line[14:]
880 if pf.startswith("'") and pf.endswith("'") and " " in pf:
881 if pf.startswith("'") and pf.endswith("'") and " " in pf:
881 pf = pf[1:-1] # Remove the quotes
882 pf = pf[1:-1] # Remove the quotes
882 return pf
883 return pf
883
884
884 def is_exec(f, last):
885 def is_exec(f, last):
885 """check whether a file is executable"""
886 """check whether a file is executable"""
886 return (os.lstat(f).st_mode & 0100 != 0)
887 return (os.lstat(f).st_mode & 0100 != 0)
887
888
888 def set_exec(f, mode):
889 def set_exec(f, mode):
889 s = os.lstat(f).st_mode
890 s = os.lstat(f).st_mode
890 if (s & 0100 != 0) == mode:
891 if (s & 0100 != 0) == mode:
891 return
892 return
892 if mode:
893 if mode:
893 # Turn on +x for every +r bit when making a file executable
894 # Turn on +x for every +r bit when making a file executable
894 # and obey umask.
895 # and obey umask.
895 umask = os.umask(0)
896 umask = os.umask(0)
896 os.umask(umask)
897 os.umask(umask)
897 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
898 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
898 else:
899 else:
899 os.chmod(f, s & 0666)
900 os.chmod(f, s & 0666)
900
901
901 def set_binary(fd):
902 def set_binary(fd):
902 pass
903 pass
903
904
904 def pconvert(path):
905 def pconvert(path):
905 return path
906 return path
906
907
907 def localpath(path):
908 def localpath(path):
908 return path
909 return path
909
910
910 normpath = os.path.normpath
911 normpath = os.path.normpath
911 samestat = os.path.samestat
912 samestat = os.path.samestat
912
913
913 def makelock(info, pathname):
914 def makelock(info, pathname):
914 try:
915 try:
915 os.symlink(info, pathname)
916 os.symlink(info, pathname)
916 except OSError, why:
917 except OSError, why:
917 if why.errno == errno.EEXIST:
918 if why.errno == errno.EEXIST:
918 raise
919 raise
919 else:
920 else:
920 _makelock_file(info, pathname)
921 _makelock_file(info, pathname)
921
922
922 def readlock(pathname):
923 def readlock(pathname):
923 try:
924 try:
924 return os.readlink(pathname)
925 return os.readlink(pathname)
925 except OSError, why:
926 except OSError, why:
926 if why.errno == errno.EINVAL:
927 if why.errno == errno.EINVAL:
927 return _readlock_file(pathname)
928 return _readlock_file(pathname)
928 else:
929 else:
929 raise
930 raise
930
931
931 def shellquote(s):
932 def shellquote(s):
932 return "'%s'" % s.replace("'", "'\\''")
933 return "'%s'" % s.replace("'", "'\\''")
933
934
934 def testpid(pid):
935 def testpid(pid):
935 '''return False if pid dead, True if running or not sure'''
936 '''return False if pid dead, True if running or not sure'''
936 try:
937 try:
937 os.kill(pid, 0)
938 os.kill(pid, 0)
938 return True
939 return True
939 except OSError, inst:
940 except OSError, inst:
940 return inst.errno != errno.ESRCH
941 return inst.errno != errno.ESRCH
941
942
942 def explain_exit(code):
943 def explain_exit(code):
943 """return a 2-tuple (desc, code) describing a process's status"""
944 """return a 2-tuple (desc, code) describing a process's status"""
944 if os.WIFEXITED(code):
945 if os.WIFEXITED(code):
945 val = os.WEXITSTATUS(code)
946 val = os.WEXITSTATUS(code)
946 return _("exited with status %d") % val, val
947 return _("exited with status %d") % val, val
947 elif os.WIFSIGNALED(code):
948 elif os.WIFSIGNALED(code):
948 val = os.WTERMSIG(code)
949 val = os.WTERMSIG(code)
949 return _("killed by signal %d") % val, val
950 return _("killed by signal %d") % val, val
950 elif os.WIFSTOPPED(code):
951 elif os.WIFSTOPPED(code):
951 val = os.WSTOPSIG(code)
952 val = os.WSTOPSIG(code)
952 return _("stopped by signal %d") % val, val
953 return _("stopped by signal %d") % val, val
953 raise ValueError(_("invalid exit code"))
954 raise ValueError(_("invalid exit code"))
954
955
955 def isowner(fp, st=None):
956 def isowner(fp, st=None):
956 """Return True if the file object f belongs to the current user.
957 """Return True if the file object f belongs to the current user.
957
958
958 The return value of a util.fstat(f) may be passed as the st argument.
959 The return value of a util.fstat(f) may be passed as the st argument.
959 """
960 """
960 if st is None:
961 if st is None:
961 st = fstat(fp)
962 st = fstat(fp)
962 return st.st_uid == os.getuid()
963 return st.st_uid == os.getuid()
963
964
964 def _buildencodefun():
965 def _buildencodefun():
965 e = '_'
966 e = '_'
966 win_reserved = [ord(x) for x in '\\:*?"<>|']
967 win_reserved = [ord(x) for x in '\\:*?"<>|']
967 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
968 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
968 for x in (range(32) + range(126, 256) + win_reserved):
969 for x in (range(32) + range(126, 256) + win_reserved):
969 cmap[chr(x)] = "~%02x" % x
970 cmap[chr(x)] = "~%02x" % x
970 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
971 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
971 cmap[chr(x)] = e + chr(x).lower()
972 cmap[chr(x)] = e + chr(x).lower()
972 dmap = {}
973 dmap = {}
973 for k, v in cmap.iteritems():
974 for k, v in cmap.iteritems():
974 dmap[v] = k
975 dmap[v] = k
975 def decode(s):
976 def decode(s):
976 i = 0
977 i = 0
977 while i < len(s):
978 while i < len(s):
978 for l in xrange(1, 4):
979 for l in xrange(1, 4):
979 try:
980 try:
980 yield dmap[s[i:i+l]]
981 yield dmap[s[i:i+l]]
981 i += l
982 i += l
982 break
983 break
983 except KeyError:
984 except KeyError:
984 pass
985 pass
985 else:
986 else:
986 raise KeyError
987 raise KeyError
987 return (lambda s: "".join([cmap[c] for c in s]),
988 return (lambda s: "".join([cmap[c] for c in s]),
988 lambda s: "".join(list(decode(s))))
989 lambda s: "".join(list(decode(s))))
989
990
990 encodefilename, decodefilename = _buildencodefun()
991 encodefilename, decodefilename = _buildencodefun()
991
992
992 def encodedopener(openerfn, fn):
993 def encodedopener(openerfn, fn):
993 def o(path, *args, **kw):
994 def o(path, *args, **kw):
994 return openerfn(fn(path), *args, **kw)
995 return openerfn(fn(path), *args, **kw)
995 return o
996 return o
996
997
997 def opener(base, audit=True):
998 def opener(base, audit=True):
998 """
999 """
999 return a function that opens files relative to base
1000 return a function that opens files relative to base
1000
1001
1001 this function is used to hide the details of COW semantics and
1002 this function is used to hide the details of COW semantics and
1002 remote file access from higher level code.
1003 remote file access from higher level code.
1003 """
1004 """
1004 p = base
1005 p = base
1005 audit_p = audit
1006 audit_p = audit
1006
1007
1007 def mktempcopy(name):
1008 def mktempcopy(name):
1008 d, fn = os.path.split(name)
1009 d, fn = os.path.split(name)
1009 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1010 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1010 os.close(fd)
1011 os.close(fd)
1011 ofp = posixfile(temp, "wb")
1012 ofp = posixfile(temp, "wb")
1012 try:
1013 try:
1013 try:
1014 try:
1014 ifp = posixfile(name, "rb")
1015 ifp = posixfile(name, "rb")
1015 except IOError, inst:
1016 except IOError, inst:
1016 if not getattr(inst, 'filename', None):
1017 if not getattr(inst, 'filename', None):
1017 inst.filename = name
1018 inst.filename = name
1018 raise
1019 raise
1019 for chunk in filechunkiter(ifp):
1020 for chunk in filechunkiter(ifp):
1020 ofp.write(chunk)
1021 ofp.write(chunk)
1021 ifp.close()
1022 ifp.close()
1022 ofp.close()
1023 ofp.close()
1023 except:
1024 except:
1024 try: os.unlink(temp)
1025 try: os.unlink(temp)
1025 except: pass
1026 except: pass
1026 raise
1027 raise
1027 st = os.lstat(name)
1028 st = os.lstat(name)
1028 os.chmod(temp, st.st_mode)
1029 os.chmod(temp, st.st_mode)
1029 return temp
1030 return temp
1030
1031
1031 class atomictempfile(posixfile):
1032 class atomictempfile(posixfile):
1032 """the file will only be copied when rename is called"""
1033 """the file will only be copied when rename is called"""
1033 def __init__(self, name, mode):
1034 def __init__(self, name, mode):
1034 self.__name = name
1035 self.__name = name
1035 self.temp = mktempcopy(name)
1036 self.temp = mktempcopy(name)
1036 posixfile.__init__(self, self.temp, mode)
1037 posixfile.__init__(self, self.temp, mode)
1037 def rename(self):
1038 def rename(self):
1038 if not self.closed:
1039 if not self.closed:
1039 posixfile.close(self)
1040 posixfile.close(self)
1040 rename(self.temp, localpath(self.__name))
1041 rename(self.temp, localpath(self.__name))
1041 def __del__(self):
1042 def __del__(self):
1042 if not self.closed:
1043 if not self.closed:
1043 try:
1044 try:
1044 os.unlink(self.temp)
1045 os.unlink(self.temp)
1045 except: pass
1046 except: pass
1046 posixfile.close(self)
1047 posixfile.close(self)
1047
1048
1048 class atomicfile(atomictempfile):
1049 class atomicfile(atomictempfile):
1049 """the file will only be copied on close"""
1050 """the file will only be copied on close"""
1050 def __init__(self, name, mode):
1051 def __init__(self, name, mode):
1051 atomictempfile.__init__(self, name, mode)
1052 atomictempfile.__init__(self, name, mode)
1052 def close(self):
1053 def close(self):
1053 self.rename()
1054 self.rename()
1054 def __del__(self):
1055 def __del__(self):
1055 self.rename()
1056 self.rename()
1056
1057
1057 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
1058 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
1058 if audit_p:
1059 if audit_p:
1059 audit_path(path)
1060 audit_path(path)
1060 f = os.path.join(p, path)
1061 f = os.path.join(p, path)
1061
1062
1062 if not text:
1063 if not text:
1063 mode += "b" # for that other OS
1064 mode += "b" # for that other OS
1064
1065
1065 if mode[0] != "r":
1066 if mode[0] != "r":
1066 try:
1067 try:
1067 nlink = nlinks(f)
1068 nlink = nlinks(f)
1068 except OSError:
1069 except OSError:
1069 d = os.path.dirname(f)
1070 d = os.path.dirname(f)
1070 if not os.path.isdir(d):
1071 if not os.path.isdir(d):
1071 os.makedirs(d)
1072 os.makedirs(d)
1072 else:
1073 else:
1073 if atomic:
1074 if atomic:
1074 return atomicfile(f, mode)
1075 return atomicfile(f, mode)
1075 elif atomictemp:
1076 elif atomictemp:
1076 return atomictempfile(f, mode)
1077 return atomictempfile(f, mode)
1077 if nlink > 1:
1078 if nlink > 1:
1078 rename(mktempcopy(f), f)
1079 rename(mktempcopy(f), f)
1079 return posixfile(f, mode)
1080 return posixfile(f, mode)
1080
1081
1081 return o
1082 return o
1082
1083
1083 class chunkbuffer(object):
1084 class chunkbuffer(object):
1084 """Allow arbitrary sized chunks of data to be efficiently read from an
1085 """Allow arbitrary sized chunks of data to be efficiently read from an
1085 iterator over chunks of arbitrary size."""
1086 iterator over chunks of arbitrary size."""
1086
1087
1087 def __init__(self, in_iter, targetsize = 2**16):
1088 def __init__(self, in_iter, targetsize = 2**16):
1088 """in_iter is the iterator that's iterating over the input chunks.
1089 """in_iter is the iterator that's iterating over the input chunks.
1089 targetsize is how big a buffer to try to maintain."""
1090 targetsize is how big a buffer to try to maintain."""
1090 self.in_iter = iter(in_iter)
1091 self.in_iter = iter(in_iter)
1091 self.buf = ''
1092 self.buf = ''
1092 self.targetsize = int(targetsize)
1093 self.targetsize = int(targetsize)
1093 if self.targetsize <= 0:
1094 if self.targetsize <= 0:
1094 raise ValueError(_("targetsize must be greater than 0, was %d") %
1095 raise ValueError(_("targetsize must be greater than 0, was %d") %
1095 targetsize)
1096 targetsize)
1096 self.iterempty = False
1097 self.iterempty = False
1097
1098
1098 def fillbuf(self):
1099 def fillbuf(self):
1099 """Ignore target size; read every chunk from iterator until empty."""
1100 """Ignore target size; read every chunk from iterator until empty."""
1100 if not self.iterempty:
1101 if not self.iterempty:
1101 collector = cStringIO.StringIO()
1102 collector = cStringIO.StringIO()
1102 collector.write(self.buf)
1103 collector.write(self.buf)
1103 for ch in self.in_iter:
1104 for ch in self.in_iter:
1104 collector.write(ch)
1105 collector.write(ch)
1105 self.buf = collector.getvalue()
1106 self.buf = collector.getvalue()
1106 self.iterempty = True
1107 self.iterempty = True
1107
1108
1108 def read(self, l):
1109 def read(self, l):
1109 """Read L bytes of data from the iterator of chunks of data.
1110 """Read L bytes of data from the iterator of chunks of data.
1110 Returns less than L bytes if the iterator runs dry."""
1111 Returns less than L bytes if the iterator runs dry."""
1111 if l > len(self.buf) and not self.iterempty:
1112 if l > len(self.buf) and not self.iterempty:
1112 # Clamp to a multiple of self.targetsize
1113 # Clamp to a multiple of self.targetsize
1113 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1114 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1114 collector = cStringIO.StringIO()
1115 collector = cStringIO.StringIO()
1115 collector.write(self.buf)
1116 collector.write(self.buf)
1116 collected = len(self.buf)
1117 collected = len(self.buf)
1117 for chunk in self.in_iter:
1118 for chunk in self.in_iter:
1118 collector.write(chunk)
1119 collector.write(chunk)
1119 collected += len(chunk)
1120 collected += len(chunk)
1120 if collected >= targetsize:
1121 if collected >= targetsize:
1121 break
1122 break
1122 if collected < targetsize:
1123 if collected < targetsize:
1123 self.iterempty = True
1124 self.iterempty = True
1124 self.buf = collector.getvalue()
1125 self.buf = collector.getvalue()
1125 s, self.buf = self.buf[:l], buffer(self.buf, l)
1126 s, self.buf = self.buf[:l], buffer(self.buf, l)
1126 return s
1127 return s
1127
1128
1128 def filechunkiter(f, size=65536, limit=None):
1129 def filechunkiter(f, size=65536, limit=None):
1129 """Create a generator that produces the data in the file size
1130 """Create a generator that produces the data in the file size
1130 (default 65536) bytes at a time, up to optional limit (default is
1131 (default 65536) bytes at a time, up to optional limit (default is
1131 to read all data). Chunks may be less than size bytes if the
1132 to read all data). Chunks may be less than size bytes if the
1132 chunk is the last chunk in the file, or the file is a socket or
1133 chunk is the last chunk in the file, or the file is a socket or
1133 some other type of file that sometimes reads less data than is
1134 some other type of file that sometimes reads less data than is
1134 requested."""
1135 requested."""
1135 assert size >= 0
1136 assert size >= 0
1136 assert limit is None or limit >= 0
1137 assert limit is None or limit >= 0
1137 while True:
1138 while True:
1138 if limit is None: nbytes = size
1139 if limit is None: nbytes = size
1139 else: nbytes = min(limit, size)
1140 else: nbytes = min(limit, size)
1140 s = nbytes and f.read(nbytes)
1141 s = nbytes and f.read(nbytes)
1141 if not s: break
1142 if not s: break
1142 if limit: limit -= len(s)
1143 if limit: limit -= len(s)
1143 yield s
1144 yield s
1144
1145
1145 def makedate():
1146 def makedate():
1146 lt = time.localtime()
1147 lt = time.localtime()
1147 if lt[8] == 1 and time.daylight:
1148 if lt[8] == 1 and time.daylight:
1148 tz = time.altzone
1149 tz = time.altzone
1149 else:
1150 else:
1150 tz = time.timezone
1151 tz = time.timezone
1151 return time.mktime(lt), tz
1152 return time.mktime(lt), tz
1152
1153
1153 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1154 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1154 """represent a (unixtime, offset) tuple as a localized time.
1155 """represent a (unixtime, offset) tuple as a localized time.
1155 unixtime is seconds since the epoch, and offset is the time zone's
1156 unixtime is seconds since the epoch, and offset is the time zone's
1156 number of seconds away from UTC. if timezone is false, do not
1157 number of seconds away from UTC. if timezone is false, do not
1157 append time zone to string."""
1158 append time zone to string."""
1158 t, tz = date or makedate()
1159 t, tz = date or makedate()
1159 s = time.strftime(format, time.gmtime(float(t) - tz))
1160 s = time.strftime(format, time.gmtime(float(t) - tz))
1160 if timezone:
1161 if timezone:
1161 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1162 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1162 return s
1163 return s
1163
1164
1164 def strdate(string, format, defaults):
1165 def strdate(string, format, defaults):
1165 """parse a localized time string and return a (unixtime, offset) tuple.
1166 """parse a localized time string and return a (unixtime, offset) tuple.
1166 if the string cannot be parsed, ValueError is raised."""
1167 if the string cannot be parsed, ValueError is raised."""
1167 def timezone(string):
1168 def timezone(string):
1168 tz = string.split()[-1]
1169 tz = string.split()[-1]
1169 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1170 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1170 tz = int(tz)
1171 tz = int(tz)
1171 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1172 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1172 return offset
1173 return offset
1173 if tz == "GMT" or tz == "UTC":
1174 if tz == "GMT" or tz == "UTC":
1174 return 0
1175 return 0
1175 return None
1176 return None
1176
1177
1177 # NOTE: unixtime = localunixtime + offset
1178 # NOTE: unixtime = localunixtime + offset
1178 offset, date = timezone(string), string
1179 offset, date = timezone(string), string
1179 if offset != None:
1180 if offset != None:
1180 date = " ".join(string.split()[:-1])
1181 date = " ".join(string.split()[:-1])
1181
1182
1182 # add missing elements from defaults
1183 # add missing elements from defaults
1183 for part in defaults:
1184 for part in defaults:
1184 found = [True for p in part if ("%"+p) in format]
1185 found = [True for p in part if ("%"+p) in format]
1185 if not found:
1186 if not found:
1186 date += "@" + defaults[part]
1187 date += "@" + defaults[part]
1187 format += "@%" + part[0]
1188 format += "@%" + part[0]
1188
1189
1189 timetuple = time.strptime(date, format)
1190 timetuple = time.strptime(date, format)
1190 localunixtime = int(calendar.timegm(timetuple))
1191 localunixtime = int(calendar.timegm(timetuple))
1191 if offset is None:
1192 if offset is None:
1192 # local timezone
1193 # local timezone
1193 unixtime = int(time.mktime(timetuple))
1194 unixtime = int(time.mktime(timetuple))
1194 offset = unixtime - localunixtime
1195 offset = unixtime - localunixtime
1195 else:
1196 else:
1196 unixtime = localunixtime + offset
1197 unixtime = localunixtime + offset
1197 return unixtime, offset
1198 return unixtime, offset
1198
1199
1199 def parsedate(string, formats=None, defaults=None):
1200 def parsedate(string, formats=None, defaults=None):
1200 """parse a localized time string and return a (unixtime, offset) tuple.
1201 """parse a localized time string and return a (unixtime, offset) tuple.
1201 The date may be a "unixtime offset" string or in one of the specified
1202 The date may be a "unixtime offset" string or in one of the specified
1202 formats."""
1203 formats."""
1203 if not string:
1204 if not string:
1204 return 0, 0
1205 return 0, 0
1205 if not formats:
1206 if not formats:
1206 formats = defaultdateformats
1207 formats = defaultdateformats
1207 string = string.strip()
1208 string = string.strip()
1208 try:
1209 try:
1209 when, offset = map(int, string.split(' '))
1210 when, offset = map(int, string.split(' '))
1210 except ValueError:
1211 except ValueError:
1211 # fill out defaults
1212 # fill out defaults
1212 if not defaults:
1213 if not defaults:
1213 defaults = {}
1214 defaults = {}
1214 now = makedate()
1215 now = makedate()
1215 for part in "d mb yY HI M S".split():
1216 for part in "d mb yY HI M S".split():
1216 if part not in defaults:
1217 if part not in defaults:
1217 if part[0] in "HMS":
1218 if part[0] in "HMS":
1218 defaults[part] = "00"
1219 defaults[part] = "00"
1219 elif part[0] in "dm":
1220 elif part[0] in "dm":
1220 defaults[part] = "1"
1221 defaults[part] = "1"
1221 else:
1222 else:
1222 defaults[part] = datestr(now, "%" + part[0], False)
1223 defaults[part] = datestr(now, "%" + part[0], False)
1223
1224
1224 for format in formats:
1225 for format in formats:
1225 try:
1226 try:
1226 when, offset = strdate(string, format, defaults)
1227 when, offset = strdate(string, format, defaults)
1227 except ValueError:
1228 except ValueError:
1228 pass
1229 pass
1229 else:
1230 else:
1230 break
1231 break
1231 else:
1232 else:
1232 raise Abort(_('invalid date: %r ') % string)
1233 raise Abort(_('invalid date: %r ') % string)
1233 # validate explicit (probably user-specified) date and
1234 # validate explicit (probably user-specified) date and
1234 # time zone offset. values must fit in signed 32 bits for
1235 # time zone offset. values must fit in signed 32 bits for
1235 # current 32-bit linux runtimes. timezones go from UTC-12
1236 # current 32-bit linux runtimes. timezones go from UTC-12
1236 # to UTC+14
1237 # to UTC+14
1237 if abs(when) > 0x7fffffff:
1238 if abs(when) > 0x7fffffff:
1238 raise Abort(_('date exceeds 32 bits: %d') % when)
1239 raise Abort(_('date exceeds 32 bits: %d') % when)
1239 if offset < -50400 or offset > 43200:
1240 if offset < -50400 or offset > 43200:
1240 raise Abort(_('impossible time zone offset: %d') % offset)
1241 raise Abort(_('impossible time zone offset: %d') % offset)
1241 return when, offset
1242 return when, offset
1242
1243
1243 def matchdate(date):
1244 def matchdate(date):
1244 """Return a function that matches a given date match specifier
1245 """Return a function that matches a given date match specifier
1245
1246
1246 Formats include:
1247 Formats include:
1247
1248
1248 '{date}' match a given date to the accuracy provided
1249 '{date}' match a given date to the accuracy provided
1249
1250
1250 '<{date}' on or before a given date
1251 '<{date}' on or before a given date
1251
1252
1252 '>{date}' on or after a given date
1253 '>{date}' on or after a given date
1253
1254
1254 """
1255 """
1255
1256
1256 def lower(date):
1257 def lower(date):
1257 return parsedate(date, extendeddateformats)[0]
1258 return parsedate(date, extendeddateformats)[0]
1258
1259
1259 def upper(date):
1260 def upper(date):
1260 d = dict(mb="12", HI="23", M="59", S="59")
1261 d = dict(mb="12", HI="23", M="59", S="59")
1261 for days in "31 30 29".split():
1262 for days in "31 30 29".split():
1262 try:
1263 try:
1263 d["d"] = days
1264 d["d"] = days
1264 return parsedate(date, extendeddateformats, d)[0]
1265 return parsedate(date, extendeddateformats, d)[0]
1265 except:
1266 except:
1266 pass
1267 pass
1267 d["d"] = "28"
1268 d["d"] = "28"
1268 return parsedate(date, extendeddateformats, d)[0]
1269 return parsedate(date, extendeddateformats, d)[0]
1269
1270
1270 if date[0] == "<":
1271 if date[0] == "<":
1271 when = upper(date[1:])
1272 when = upper(date[1:])
1272 return lambda x: x <= when
1273 return lambda x: x <= when
1273 elif date[0] == ">":
1274 elif date[0] == ">":
1274 when = lower(date[1:])
1275 when = lower(date[1:])
1275 return lambda x: x >= when
1276 return lambda x: x >= when
1276 elif date[0] == "-":
1277 elif date[0] == "-":
1277 try:
1278 try:
1278 days = int(date[1:])
1279 days = int(date[1:])
1279 except ValueError:
1280 except ValueError:
1280 raise Abort(_("invalid day spec: %s") % date[1:])
1281 raise Abort(_("invalid day spec: %s") % date[1:])
1281 when = makedate()[0] - days * 3600 * 24
1282 when = makedate()[0] - days * 3600 * 24
1282 return lambda x: x >= when
1283 return lambda x: x >= when
1283 elif " to " in date:
1284 elif " to " in date:
1284 a, b = date.split(" to ")
1285 a, b = date.split(" to ")
1285 start, stop = lower(a), upper(b)
1286 start, stop = lower(a), upper(b)
1286 return lambda x: x >= start and x <= stop
1287 return lambda x: x >= start and x <= stop
1287 else:
1288 else:
1288 start, stop = lower(date), upper(date)
1289 start, stop = lower(date), upper(date)
1289 return lambda x: x >= start and x <= stop
1290 return lambda x: x >= start and x <= stop
1290
1291
1291 def shortuser(user):
1292 def shortuser(user):
1292 """Return a short representation of a user name or email address."""
1293 """Return a short representation of a user name or email address."""
1293 f = user.find('@')
1294 f = user.find('@')
1294 if f >= 0:
1295 if f >= 0:
1295 user = user[:f]
1296 user = user[:f]
1296 f = user.find('<')
1297 f = user.find('<')
1297 if f >= 0:
1298 if f >= 0:
1298 user = user[f+1:]
1299 user = user[f+1:]
1299 f = user.find(' ')
1300 f = user.find(' ')
1300 if f >= 0:
1301 if f >= 0:
1301 user = user[:f]
1302 user = user[:f]
1302 f = user.find('.')
1303 f = user.find('.')
1303 if f >= 0:
1304 if f >= 0:
1304 user = user[:f]
1305 user = user[:f]
1305 return user
1306 return user
1306
1307
1307 def ellipsis(text, maxlength=400):
1308 def ellipsis(text, maxlength=400):
1308 """Trim string to at most maxlength (default: 400) characters."""
1309 """Trim string to at most maxlength (default: 400) characters."""
1309 if len(text) <= maxlength:
1310 if len(text) <= maxlength:
1310 return text
1311 return text
1311 else:
1312 else:
1312 return "%s..." % (text[:maxlength-3])
1313 return "%s..." % (text[:maxlength-3])
1313
1314
1314 def walkrepos(path):
1315 def walkrepos(path):
1315 '''yield every hg repository under path, recursively.'''
1316 '''yield every hg repository under path, recursively.'''
1316 def errhandler(err):
1317 def errhandler(err):
1317 if err.filename == path:
1318 if err.filename == path:
1318 raise err
1319 raise err
1319
1320
1320 for root, dirs, files in os.walk(path, onerror=errhandler):
1321 for root, dirs, files in os.walk(path, onerror=errhandler):
1321 for d in dirs:
1322 for d in dirs:
1322 if d == '.hg':
1323 if d == '.hg':
1323 yield root
1324 yield root
1324 dirs[:] = []
1325 dirs[:] = []
1325 break
1326 break
1326
1327
1327 _rcpath = None
1328 _rcpath = None
1328
1329
1329 def rcpath():
1330 def rcpath():
1330 '''return hgrc search path. if env var HGRCPATH is set, use it.
1331 '''return hgrc search path. if env var HGRCPATH is set, use it.
1331 for each item in path, if directory, use files ending in .rc,
1332 for each item in path, if directory, use files ending in .rc,
1332 else use item.
1333 else use item.
1333 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1334 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1334 if no HGRCPATH, use default os-specific path.'''
1335 if no HGRCPATH, use default os-specific path.'''
1335 global _rcpath
1336 global _rcpath
1336 if _rcpath is None:
1337 if _rcpath is None:
1337 if 'HGRCPATH' in os.environ:
1338 if 'HGRCPATH' in os.environ:
1338 _rcpath = []
1339 _rcpath = []
1339 for p in os.environ['HGRCPATH'].split(os.pathsep):
1340 for p in os.environ['HGRCPATH'].split(os.pathsep):
1340 if not p: continue
1341 if not p: continue
1341 if os.path.isdir(p):
1342 if os.path.isdir(p):
1342 for f in os.listdir(p):
1343 for f in os.listdir(p):
1343 if f.endswith('.rc'):
1344 if f.endswith('.rc'):
1344 _rcpath.append(os.path.join(p, f))
1345 _rcpath.append(os.path.join(p, f))
1345 else:
1346 else:
1346 _rcpath.append(p)
1347 _rcpath.append(p)
1347 else:
1348 else:
1348 _rcpath = os_rcpath()
1349 _rcpath = os_rcpath()
1349 return _rcpath
1350 return _rcpath
1350
1351
1351 def bytecount(nbytes):
1352 def bytecount(nbytes):
1352 '''return byte count formatted as readable string, with units'''
1353 '''return byte count formatted as readable string, with units'''
1353
1354
1354 units = (
1355 units = (
1355 (100, 1<<30, _('%.0f GB')),
1356 (100, 1<<30, _('%.0f GB')),
1356 (10, 1<<30, _('%.1f GB')),
1357 (10, 1<<30, _('%.1f GB')),
1357 (1, 1<<30, _('%.2f GB')),
1358 (1, 1<<30, _('%.2f GB')),
1358 (100, 1<<20, _('%.0f MB')),
1359 (100, 1<<20, _('%.0f MB')),
1359 (10, 1<<20, _('%.1f MB')),
1360 (10, 1<<20, _('%.1f MB')),
1360 (1, 1<<20, _('%.2f MB')),
1361 (1, 1<<20, _('%.2f MB')),
1361 (100, 1<<10, _('%.0f KB')),
1362 (100, 1<<10, _('%.0f KB')),
1362 (10, 1<<10, _('%.1f KB')),
1363 (10, 1<<10, _('%.1f KB')),
1363 (1, 1<<10, _('%.2f KB')),
1364 (1, 1<<10, _('%.2f KB')),
1364 (1, 1, _('%.0f bytes')),
1365 (1, 1, _('%.0f bytes')),
1365 )
1366 )
1366
1367
1367 for multiplier, divisor, format in units:
1368 for multiplier, divisor, format in units:
1368 if nbytes >= divisor * multiplier:
1369 if nbytes >= divisor * multiplier:
1369 return format % (nbytes / float(divisor))
1370 return format % (nbytes / float(divisor))
1370 return units[-1][2] % nbytes
1371 return units[-1][2] % nbytes
1371
1372
1372 def drop_scheme(scheme, path):
1373 def drop_scheme(scheme, path):
1373 sc = scheme + ':'
1374 sc = scheme + ':'
1374 if path.startswith(sc):
1375 if path.startswith(sc):
1375 path = path[len(sc):]
1376 path = path[len(sc):]
1376 if path.startswith('//'):
1377 if path.startswith('//'):
1377 path = path[2:]
1378 path = path[2:]
1378 return path
1379 return path
@@ -1,51 +1,50 b''
1 # basic operation
1 # basic operation
2 adding a
2 adding a
3 reverting a
3 reverting a
4 changeset 2:b38a34ddfd9f backs out changeset 1:a820f4f40a57
4 changeset 2:b38a34ddfd9f backs out changeset 1:a820f4f40a57
5 a
5 a
6 # file that was removed is recreated
6 # file that was removed is recreated
7 adding a
7 adding a
8 adding a
8 adding a
9 changeset 2:44cd84c7349a backs out changeset 1:76862dcce372
9 changeset 2:44cd84c7349a backs out changeset 1:76862dcce372
10 content
10 content
11 # backout of backout is as if nothing happened
11 # backout of backout is as if nothing happened
12 removing a
12 removing a
13 changeset 3:0dd8a0ed5e99 backs out changeset 2:44cd84c7349a
13 changeset 3:0dd8a0ed5e99 backs out changeset 2:44cd84c7349a
14 cat: a: No such file or directory
14 cat: a: No such file or directory
15 # backout with merge
15 # backout with merge
16 adding a
16 adding a
17 reverting a
17 reverting a
18 changeset 3:6c77ecc28460 backs out changeset 1:314f55b1bf23
18 changeset 3:6c77ecc28460 backs out changeset 1:314f55b1bf23
19 merging with changeset 2:b66ea5b77abb
19 merging with changeset 2:b66ea5b77abb
20 merging a
20 merging a
21 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
21 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
22 (branch merge, don't forget to commit)
22 (branch merge, don't forget to commit)
23 line 1
23 line 1
24 # backout should not back out subsequent changesets
24 # backout should not back out subsequent changesets
25 adding a
25 adding a
26 adding b
26 adding b
27 reverting a
27 reverting a
28 changeset 3:4cbb1e70196a backs out changeset 1:22bca4c721e5
28 changeset 3:4cbb1e70196a backs out changeset 1:22bca4c721e5
29 the backout changeset is a new head - do not forget to merge
29 the backout changeset is a new head - do not forget to merge
30 (use "backout --merge" if you want to auto-merge)
30 (use "backout --merge" if you want to auto-merge)
31 b: No such file or directory
32 adding a
31 adding a
33 adding b
32 adding b
34 adding c
33 adding c
35 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
34 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
36 adding d
35 adding d
37 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
36 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
38 (branch merge, don't forget to commit)
37 (branch merge, don't forget to commit)
39 # backout of merge should fail
38 # backout of merge should fail
40 abort: cannot back out a merge changeset without --parent
39 abort: cannot back out a merge changeset without --parent
41 # backout of merge with bad parent should fail
40 # backout of merge with bad parent should fail
42 abort: cb9a9f314b8b is not a parent of b2f3bb92043e
41 abort: cb9a9f314b8b is not a parent of b2f3bb92043e
43 # backout of non-merge with parent should fail
42 # backout of non-merge with parent should fail
44 abort: cannot use --parent on non-merge changeset
43 abort: cannot use --parent on non-merge changeset
45 # backout with valid parent should be ok
44 # backout with valid parent should be ok
46 removing d
45 removing d
47 changeset 5:11fbd9be634c backs out changeset 4:b2f3bb92043e
46 changeset 5:11fbd9be634c backs out changeset 4:b2f3bb92043e
48 rolling back last transaction
47 rolling back last transaction
49 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
48 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
50 removing c
49 removing c
51 changeset 5:1a5f1a63bf2c backs out changeset 4:b2f3bb92043e
50 changeset 5:1a5f1a63bf2c backs out changeset 4:b2f3bb92043e
@@ -1,64 +1,62 b''
1 % new file
1 % new file
2 applying patch from stdin
2 applying patch from stdin
3 % new empty file
3 % new empty file
4 applying patch from stdin
4 applying patch from stdin
5 empty
5 empty
6 % chmod +x
6 % chmod +x
7 applying patch from stdin
7 applying patch from stdin
8 % copy
8 % copy
9 applying patch from stdin
9 applying patch from stdin
10 a
10 a
11 a
11 a
12 % rename
12 % rename
13 applying patch from stdin
13 applying patch from stdin
14 copyx
14 copyx
15 empty
15 empty
16 new
16 new
17 rename
17 rename
18 % delete
18 % delete
19 applying patch from stdin
19 applying patch from stdin
20 empty
20 empty
21 new
21 new
22 rename
22 rename
23 % regular diff
23 % regular diff
24 applying patch from stdin
24 applying patch from stdin
25 % copy and modify
25 % copy and modify
26 applying patch from stdin
26 applying patch from stdin
27 a
27 a
28 a
28 a
29 b
29 b
30 a
30 a
31 a
31 a
32 % rename and modify
32 % rename and modify
33 applying patch from stdin
33 applying patch from stdin
34 copy2: No such file or directory
35 a
34 a
36 a
35 a
37 b
36 b
38 c
37 c
39 a
38 a
40 % one file renamed multiple times
39 % one file renamed multiple times
41 applying patch from stdin
40 applying patch from stdin
42 9 rename2 rename3 rename3-2 / rename3 (rename2)rename3-2 (rename2)
41 9 rename2 rename3 rename3-2 / rename3 (rename2)rename3-2 (rename2)
43 rename2: No such file or directory
44 rename3
42 rename3
45 rename3-2
43 rename3-2
46 a
44 a
47 a
45 a
48 b
46 b
49 c
47 c
50 a
48 a
51
49
52 a
50 a
53 a
51 a
54 b
52 b
55 c
53 c
56 a
54 a
57 % binary files and regular patch hunks
55 % binary files and regular patch hunks
58 applying patch from stdin
56 applying patch from stdin
59 foo
57 foo
60 045c85ba38952325e126c70962cc0f9d9077bc67 644 binary
58 045c85ba38952325e126c70962cc0f9d9077bc67 644 binary
61 % many binary files
59 % many binary files
62 applying patch from stdin
60 applying patch from stdin
63 045c85ba38952325e126c70962cc0f9d9077bc67 644 mbinary1
61 045c85ba38952325e126c70962cc0f9d9077bc67 644 mbinary1
64 a874b471193996e7cb034bb301cac7bdaf3e3f46 644 mbinary2
62 a874b471193996e7cb034bb301cac7bdaf3e3f46 644 mbinary2
@@ -1,27 +1,23 b''
1 adding a
1 adding a
2 adding b
2 adding b
3 adding t.h
3 adding t.h
4 adding t/x
4 adding t/x
5 a
5 a
6 NONEXISTENT: No such file or directory
7 a
6 a
8 b
7 b
9 t.h
8 t.h
10 t/x
9 t/x
11 a: No such file or directory
12 NONEXISTENT: No such file or directory
13 b
10 b
14 t.h
11 t.h
15 t/x
12 t/x
16 a
13 a
17 NONEXISTENT: No such file in rev ce18e5bc5cd3
18 a
14 a
19 b
15 b
20 t.h
16 t.h
21 t/x
17 t/x
22 % -I/-X with relative path should work
18 % -I/-X with relative path should work
23 b
19 b
24 t.h
20 t.h
25 t/x
21 t/x
26 t/x
22 t/x
27 t/x
23 t/x
General Comments 0
You need to be logged in to leave comments. Login now