##// END OF EJS Templates
move most of tag code to localrepository class.
Vadim Gelfer -
r2601:00fc88b0 default
parent child Browse files
Show More
@@ -1,3534 +1,3507 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
16 demandload(globals(), "hgweb.server sshserver")
16 demandload(globals(), "hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted, unknown = repo.changes()
24 modified, added, removed, deleted, unknown = repo.changes()
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def filterfiles(filters, files):
28 def filterfiles(filters, files):
29 l = [x for x in files if x in filters]
29 l = [x for x in files if x in filters]
30
30
31 for t in filters:
31 for t in filters:
32 if t and t[-1] != "/":
32 if t and t[-1] != "/":
33 t += "/"
33 t += "/"
34 l += [x for x in files if x.startswith(t)]
34 l += [x for x in files if x.startswith(t)]
35 return l
35 return l
36
36
37 def relpath(repo, args):
37 def relpath(repo, args):
38 cwd = repo.getcwd()
38 cwd = repo.getcwd()
39 if cwd:
39 if cwd:
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
41 return args
41 return args
42
42
43 def matchpats(repo, pats=[], opts={}, head=''):
43 def matchpats(repo, pats=[], opts={}, head=''):
44 cwd = repo.getcwd()
44 cwd = repo.getcwd()
45 if not pats and cwd:
45 if not pats and cwd:
46 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
46 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
47 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
47 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
48 cwd = ''
48 cwd = ''
49 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
49 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
50 opts.get('exclude'), head)
50 opts.get('exclude'), head)
51
51
52 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
52 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
53 files, matchfn, anypats = matchpats(repo, pats, opts, head)
53 files, matchfn, anypats = matchpats(repo, pats, opts, head)
54 exact = dict(zip(files, files))
54 exact = dict(zip(files, files))
55 def walk():
55 def walk():
56 for src, fn in repo.walk(node=node, files=files, match=matchfn,
56 for src, fn in repo.walk(node=node, files=files, match=matchfn,
57 badmatch=badmatch):
57 badmatch=badmatch):
58 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
58 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
59 return files, matchfn, walk()
59 return files, matchfn, walk()
60
60
61 def walk(repo, pats, opts, node=None, head='', badmatch=None):
61 def walk(repo, pats, opts, node=None, head='', badmatch=None):
62 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
62 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
63 for r in results:
63 for r in results:
64 yield r
64 yield r
65
65
66 def walkchangerevs(ui, repo, pats, opts):
66 def walkchangerevs(ui, repo, pats, opts):
67 '''Iterate over files and the revs they changed in.
67 '''Iterate over files and the revs they changed in.
68
68
69 Callers most commonly need to iterate backwards over the history
69 Callers most commonly need to iterate backwards over the history
70 it is interested in. Doing so has awful (quadratic-looking)
70 it is interested in. Doing so has awful (quadratic-looking)
71 performance, so we use iterators in a "windowed" way.
71 performance, so we use iterators in a "windowed" way.
72
72
73 We walk a window of revisions in the desired order. Within the
73 We walk a window of revisions in the desired order. Within the
74 window, we first walk forwards to gather data, then in the desired
74 window, we first walk forwards to gather data, then in the desired
75 order (usually backwards) to display it.
75 order (usually backwards) to display it.
76
76
77 This function returns an (iterator, getchange, matchfn) tuple. The
77 This function returns an (iterator, getchange, matchfn) tuple. The
78 getchange function returns the changelog entry for a numeric
78 getchange function returns the changelog entry for a numeric
79 revision. The iterator yields 3-tuples. They will be of one of
79 revision. The iterator yields 3-tuples. They will be of one of
80 the following forms:
80 the following forms:
81
81
82 "window", incrementing, lastrev: stepping through a window,
82 "window", incrementing, lastrev: stepping through a window,
83 positive if walking forwards through revs, last rev in the
83 positive if walking forwards through revs, last rev in the
84 sequence iterated over - use to reset state for the current window
84 sequence iterated over - use to reset state for the current window
85
85
86 "add", rev, fns: out-of-order traversal of the given file names
86 "add", rev, fns: out-of-order traversal of the given file names
87 fns, which changed during revision rev - use to gather data for
87 fns, which changed during revision rev - use to gather data for
88 possible display
88 possible display
89
89
90 "iter", rev, None: in-order traversal of the revs earlier iterated
90 "iter", rev, None: in-order traversal of the revs earlier iterated
91 over with "add" - use to display data'''
91 over with "add" - use to display data'''
92
92
93 def increasing_windows(start, end, windowsize=8, sizelimit=512):
93 def increasing_windows(start, end, windowsize=8, sizelimit=512):
94 if start < end:
94 if start < end:
95 while start < end:
95 while start < end:
96 yield start, min(windowsize, end-start)
96 yield start, min(windowsize, end-start)
97 start += windowsize
97 start += windowsize
98 if windowsize < sizelimit:
98 if windowsize < sizelimit:
99 windowsize *= 2
99 windowsize *= 2
100 else:
100 else:
101 while start > end:
101 while start > end:
102 yield start, min(windowsize, start-end-1)
102 yield start, min(windowsize, start-end-1)
103 start -= windowsize
103 start -= windowsize
104 if windowsize < sizelimit:
104 if windowsize < sizelimit:
105 windowsize *= 2
105 windowsize *= 2
106
106
107
107
108 files, matchfn, anypats = matchpats(repo, pats, opts)
108 files, matchfn, anypats = matchpats(repo, pats, opts)
109
109
110 if repo.changelog.count() == 0:
110 if repo.changelog.count() == 0:
111 return [], False, matchfn
111 return [], False, matchfn
112
112
113 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
113 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
114 wanted = {}
114 wanted = {}
115 slowpath = anypats
115 slowpath = anypats
116 fncache = {}
116 fncache = {}
117
117
118 chcache = {}
118 chcache = {}
119 def getchange(rev):
119 def getchange(rev):
120 ch = chcache.get(rev)
120 ch = chcache.get(rev)
121 if ch is None:
121 if ch is None:
122 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
122 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
123 return ch
123 return ch
124
124
125 if not slowpath and not files:
125 if not slowpath and not files:
126 # No files, no patterns. Display all revs.
126 # No files, no patterns. Display all revs.
127 wanted = dict(zip(revs, revs))
127 wanted = dict(zip(revs, revs))
128 if not slowpath:
128 if not slowpath:
129 # Only files, no patterns. Check the history of each file.
129 # Only files, no patterns. Check the history of each file.
130 def filerevgen(filelog):
130 def filerevgen(filelog):
131 for i, window in increasing_windows(filelog.count()-1, -1):
131 for i, window in increasing_windows(filelog.count()-1, -1):
132 revs = []
132 revs = []
133 for j in xrange(i - window, i + 1):
133 for j in xrange(i - window, i + 1):
134 revs.append(filelog.linkrev(filelog.node(j)))
134 revs.append(filelog.linkrev(filelog.node(j)))
135 revs.reverse()
135 revs.reverse()
136 for rev in revs:
136 for rev in revs:
137 yield rev
137 yield rev
138
138
139 minrev, maxrev = min(revs), max(revs)
139 minrev, maxrev = min(revs), max(revs)
140 for file_ in files:
140 for file_ in files:
141 filelog = repo.file(file_)
141 filelog = repo.file(file_)
142 # A zero count may be a directory or deleted file, so
142 # A zero count may be a directory or deleted file, so
143 # try to find matching entries on the slow path.
143 # try to find matching entries on the slow path.
144 if filelog.count() == 0:
144 if filelog.count() == 0:
145 slowpath = True
145 slowpath = True
146 break
146 break
147 for rev in filerevgen(filelog):
147 for rev in filerevgen(filelog):
148 if rev <= maxrev:
148 if rev <= maxrev:
149 if rev < minrev:
149 if rev < minrev:
150 break
150 break
151 fncache.setdefault(rev, [])
151 fncache.setdefault(rev, [])
152 fncache[rev].append(file_)
152 fncache[rev].append(file_)
153 wanted[rev] = 1
153 wanted[rev] = 1
154 if slowpath:
154 if slowpath:
155 # The slow path checks files modified in every changeset.
155 # The slow path checks files modified in every changeset.
156 def changerevgen():
156 def changerevgen():
157 for i, window in increasing_windows(repo.changelog.count()-1, -1):
157 for i, window in increasing_windows(repo.changelog.count()-1, -1):
158 for j in xrange(i - window, i + 1):
158 for j in xrange(i - window, i + 1):
159 yield j, getchange(j)[3]
159 yield j, getchange(j)[3]
160
160
161 for rev, changefiles in changerevgen():
161 for rev, changefiles in changerevgen():
162 matches = filter(matchfn, changefiles)
162 matches = filter(matchfn, changefiles)
163 if matches:
163 if matches:
164 fncache[rev] = matches
164 fncache[rev] = matches
165 wanted[rev] = 1
165 wanted[rev] = 1
166
166
167 def iterate():
167 def iterate():
168 for i, window in increasing_windows(0, len(revs)):
168 for i, window in increasing_windows(0, len(revs)):
169 yield 'window', revs[0] < revs[-1], revs[-1]
169 yield 'window', revs[0] < revs[-1], revs[-1]
170 nrevs = [rev for rev in revs[i:i+window]
170 nrevs = [rev for rev in revs[i:i+window]
171 if rev in wanted]
171 if rev in wanted]
172 srevs = list(nrevs)
172 srevs = list(nrevs)
173 srevs.sort()
173 srevs.sort()
174 for rev in srevs:
174 for rev in srevs:
175 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
175 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
176 yield 'add', rev, fns
176 yield 'add', rev, fns
177 for rev in nrevs:
177 for rev in nrevs:
178 yield 'iter', rev, None
178 yield 'iter', rev, None
179 return iterate(), getchange, matchfn
179 return iterate(), getchange, matchfn
180
180
181 revrangesep = ':'
181 revrangesep = ':'
182
182
183 def revfix(repo, val, defval):
183 def revfix(repo, val, defval):
184 '''turn user-level id of changeset into rev number.
184 '''turn user-level id of changeset into rev number.
185 user-level id can be tag, changeset, rev number, or negative rev
185 user-level id can be tag, changeset, rev number, or negative rev
186 number relative to number of revs (-1 is tip, etc).'''
186 number relative to number of revs (-1 is tip, etc).'''
187 if not val:
187 if not val:
188 return defval
188 return defval
189 try:
189 try:
190 num = int(val)
190 num = int(val)
191 if str(num) != val:
191 if str(num) != val:
192 raise ValueError
192 raise ValueError
193 if num < 0:
193 if num < 0:
194 num += repo.changelog.count()
194 num += repo.changelog.count()
195 if num < 0:
195 if num < 0:
196 num = 0
196 num = 0
197 elif num >= repo.changelog.count():
197 elif num >= repo.changelog.count():
198 raise ValueError
198 raise ValueError
199 except ValueError:
199 except ValueError:
200 try:
200 try:
201 num = repo.changelog.rev(repo.lookup(val))
201 num = repo.changelog.rev(repo.lookup(val))
202 except KeyError:
202 except KeyError:
203 raise util.Abort(_('invalid revision identifier %s'), val)
203 raise util.Abort(_('invalid revision identifier %s'), val)
204 return num
204 return num
205
205
206 def revpair(ui, repo, revs):
206 def revpair(ui, repo, revs):
207 '''return pair of nodes, given list of revisions. second item can
207 '''return pair of nodes, given list of revisions. second item can
208 be None, meaning use working dir.'''
208 be None, meaning use working dir.'''
209 if not revs:
209 if not revs:
210 return repo.dirstate.parents()[0], None
210 return repo.dirstate.parents()[0], None
211 end = None
211 end = None
212 if len(revs) == 1:
212 if len(revs) == 1:
213 start = revs[0]
213 start = revs[0]
214 if revrangesep in start:
214 if revrangesep in start:
215 start, end = start.split(revrangesep, 1)
215 start, end = start.split(revrangesep, 1)
216 start = revfix(repo, start, 0)
216 start = revfix(repo, start, 0)
217 end = revfix(repo, end, repo.changelog.count() - 1)
217 end = revfix(repo, end, repo.changelog.count() - 1)
218 else:
218 else:
219 start = revfix(repo, start, None)
219 start = revfix(repo, start, None)
220 elif len(revs) == 2:
220 elif len(revs) == 2:
221 if revrangesep in revs[0] or revrangesep in revs[1]:
221 if revrangesep in revs[0] or revrangesep in revs[1]:
222 raise util.Abort(_('too many revisions specified'))
222 raise util.Abort(_('too many revisions specified'))
223 start = revfix(repo, revs[0], None)
223 start = revfix(repo, revs[0], None)
224 end = revfix(repo, revs[1], None)
224 end = revfix(repo, revs[1], None)
225 else:
225 else:
226 raise util.Abort(_('too many revisions specified'))
226 raise util.Abort(_('too many revisions specified'))
227 if end is not None: end = repo.lookup(str(end))
227 if end is not None: end = repo.lookup(str(end))
228 return repo.lookup(str(start)), end
228 return repo.lookup(str(start)), end
229
229
230 def revrange(ui, repo, revs):
230 def revrange(ui, repo, revs):
231 """Yield revision as strings from a list of revision specifications."""
231 """Yield revision as strings from a list of revision specifications."""
232 seen = {}
232 seen = {}
233 for spec in revs:
233 for spec in revs:
234 if revrangesep in spec:
234 if revrangesep in spec:
235 start, end = spec.split(revrangesep, 1)
235 start, end = spec.split(revrangesep, 1)
236 start = revfix(repo, start, 0)
236 start = revfix(repo, start, 0)
237 end = revfix(repo, end, repo.changelog.count() - 1)
237 end = revfix(repo, end, repo.changelog.count() - 1)
238 step = start > end and -1 or 1
238 step = start > end and -1 or 1
239 for rev in xrange(start, end+step, step):
239 for rev in xrange(start, end+step, step):
240 if rev in seen:
240 if rev in seen:
241 continue
241 continue
242 seen[rev] = 1
242 seen[rev] = 1
243 yield str(rev)
243 yield str(rev)
244 else:
244 else:
245 rev = revfix(repo, spec, None)
245 rev = revfix(repo, spec, None)
246 if rev in seen:
246 if rev in seen:
247 continue
247 continue
248 seen[rev] = 1
248 seen[rev] = 1
249 yield str(rev)
249 yield str(rev)
250
250
251 def make_filename(repo, pat, node,
251 def make_filename(repo, pat, node,
252 total=None, seqno=None, revwidth=None, pathname=None):
252 total=None, seqno=None, revwidth=None, pathname=None):
253 node_expander = {
253 node_expander = {
254 'H': lambda: hex(node),
254 'H': lambda: hex(node),
255 'R': lambda: str(repo.changelog.rev(node)),
255 'R': lambda: str(repo.changelog.rev(node)),
256 'h': lambda: short(node),
256 'h': lambda: short(node),
257 }
257 }
258 expander = {
258 expander = {
259 '%': lambda: '%',
259 '%': lambda: '%',
260 'b': lambda: os.path.basename(repo.root),
260 'b': lambda: os.path.basename(repo.root),
261 }
261 }
262
262
263 try:
263 try:
264 if node:
264 if node:
265 expander.update(node_expander)
265 expander.update(node_expander)
266 if node and revwidth is not None:
266 if node and revwidth is not None:
267 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
267 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
268 if total is not None:
268 if total is not None:
269 expander['N'] = lambda: str(total)
269 expander['N'] = lambda: str(total)
270 if seqno is not None:
270 if seqno is not None:
271 expander['n'] = lambda: str(seqno)
271 expander['n'] = lambda: str(seqno)
272 if total is not None and seqno is not None:
272 if total is not None and seqno is not None:
273 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
273 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
274 if pathname is not None:
274 if pathname is not None:
275 expander['s'] = lambda: os.path.basename(pathname)
275 expander['s'] = lambda: os.path.basename(pathname)
276 expander['d'] = lambda: os.path.dirname(pathname) or '.'
276 expander['d'] = lambda: os.path.dirname(pathname) or '.'
277 expander['p'] = lambda: pathname
277 expander['p'] = lambda: pathname
278
278
279 newname = []
279 newname = []
280 patlen = len(pat)
280 patlen = len(pat)
281 i = 0
281 i = 0
282 while i < patlen:
282 while i < patlen:
283 c = pat[i]
283 c = pat[i]
284 if c == '%':
284 if c == '%':
285 i += 1
285 i += 1
286 c = pat[i]
286 c = pat[i]
287 c = expander[c]()
287 c = expander[c]()
288 newname.append(c)
288 newname.append(c)
289 i += 1
289 i += 1
290 return ''.join(newname)
290 return ''.join(newname)
291 except KeyError, inst:
291 except KeyError, inst:
292 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
292 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
293 inst.args[0])
293 inst.args[0])
294
294
295 def make_file(repo, pat, node=None,
295 def make_file(repo, pat, node=None,
296 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
296 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
297 if not pat or pat == '-':
297 if not pat or pat == '-':
298 return 'w' in mode and sys.stdout or sys.stdin
298 return 'w' in mode and sys.stdout or sys.stdin
299 if hasattr(pat, 'write') and 'w' in mode:
299 if hasattr(pat, 'write') and 'w' in mode:
300 return pat
300 return pat
301 if hasattr(pat, 'read') and 'r' in mode:
301 if hasattr(pat, 'read') and 'r' in mode:
302 return pat
302 return pat
303 return open(make_filename(repo, pat, node, total, seqno, revwidth,
303 return open(make_filename(repo, pat, node, total, seqno, revwidth,
304 pathname),
304 pathname),
305 mode)
305 mode)
306
306
307 def write_bundle(cg, filename=None, compress=True):
307 def write_bundle(cg, filename=None, compress=True):
308 """Write a bundle file and return its filename.
308 """Write a bundle file and return its filename.
309
309
310 Existing files will not be overwritten.
310 Existing files will not be overwritten.
311 If no filename is specified, a temporary file is created.
311 If no filename is specified, a temporary file is created.
312 bz2 compression can be turned off.
312 bz2 compression can be turned off.
313 The bundle file will be deleted in case of errors.
313 The bundle file will be deleted in case of errors.
314 """
314 """
315 class nocompress(object):
315 class nocompress(object):
316 def compress(self, x):
316 def compress(self, x):
317 return x
317 return x
318 def flush(self):
318 def flush(self):
319 return ""
319 return ""
320
320
321 fh = None
321 fh = None
322 cleanup = None
322 cleanup = None
323 try:
323 try:
324 if filename:
324 if filename:
325 if os.path.exists(filename):
325 if os.path.exists(filename):
326 raise util.Abort(_("file '%s' already exists"), filename)
326 raise util.Abort(_("file '%s' already exists"), filename)
327 fh = open(filename, "wb")
327 fh = open(filename, "wb")
328 else:
328 else:
329 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
329 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
330 fh = os.fdopen(fd, "wb")
330 fh = os.fdopen(fd, "wb")
331 cleanup = filename
331 cleanup = filename
332
332
333 if compress:
333 if compress:
334 fh.write("HG10")
334 fh.write("HG10")
335 z = bz2.BZ2Compressor(9)
335 z = bz2.BZ2Compressor(9)
336 else:
336 else:
337 fh.write("HG10UN")
337 fh.write("HG10UN")
338 z = nocompress()
338 z = nocompress()
339 # parse the changegroup data, otherwise we will block
339 # parse the changegroup data, otherwise we will block
340 # in case of sshrepo because we don't know the end of the stream
340 # in case of sshrepo because we don't know the end of the stream
341
341
342 # an empty chunkiter is the end of the changegroup
342 # an empty chunkiter is the end of the changegroup
343 empty = False
343 empty = False
344 while not empty:
344 while not empty:
345 empty = True
345 empty = True
346 for chunk in changegroup.chunkiter(cg):
346 for chunk in changegroup.chunkiter(cg):
347 empty = False
347 empty = False
348 fh.write(z.compress(changegroup.genchunk(chunk)))
348 fh.write(z.compress(changegroup.genchunk(chunk)))
349 fh.write(z.compress(changegroup.closechunk()))
349 fh.write(z.compress(changegroup.closechunk()))
350 fh.write(z.flush())
350 fh.write(z.flush())
351 cleanup = None
351 cleanup = None
352 return filename
352 return filename
353 finally:
353 finally:
354 if fh is not None:
354 if fh is not None:
355 fh.close()
355 fh.close()
356 if cleanup is not None:
356 if cleanup is not None:
357 os.unlink(cleanup)
357 os.unlink(cleanup)
358
358
359 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
359 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
360 changes=None, text=False, opts={}):
360 changes=None, text=False, opts={}):
361 if not node1:
361 if not node1:
362 node1 = repo.dirstate.parents()[0]
362 node1 = repo.dirstate.parents()[0]
363 # reading the data for node1 early allows it to play nicely
363 # reading the data for node1 early allows it to play nicely
364 # with repo.changes and the revlog cache.
364 # with repo.changes and the revlog cache.
365 change = repo.changelog.read(node1)
365 change = repo.changelog.read(node1)
366 mmap = repo.manifest.read(change[0])
366 mmap = repo.manifest.read(change[0])
367 date1 = util.datestr(change[2])
367 date1 = util.datestr(change[2])
368
368
369 if not changes:
369 if not changes:
370 changes = repo.changes(node1, node2, files, match=match)
370 changes = repo.changes(node1, node2, files, match=match)
371 modified, added, removed, deleted, unknown = changes
371 modified, added, removed, deleted, unknown = changes
372 if files:
372 if files:
373 modified, added, removed = map(lambda x: filterfiles(files, x),
373 modified, added, removed = map(lambda x: filterfiles(files, x),
374 (modified, added, removed))
374 (modified, added, removed))
375
375
376 if not modified and not added and not removed:
376 if not modified and not added and not removed:
377 return
377 return
378
378
379 if node2:
379 if node2:
380 change = repo.changelog.read(node2)
380 change = repo.changelog.read(node2)
381 mmap2 = repo.manifest.read(change[0])
381 mmap2 = repo.manifest.read(change[0])
382 _date2 = util.datestr(change[2])
382 _date2 = util.datestr(change[2])
383 def date2(f):
383 def date2(f):
384 return _date2
384 return _date2
385 def read(f):
385 def read(f):
386 return repo.file(f).read(mmap2[f])
386 return repo.file(f).read(mmap2[f])
387 else:
387 else:
388 tz = util.makedate()[1]
388 tz = util.makedate()[1]
389 _date2 = util.datestr()
389 _date2 = util.datestr()
390 def date2(f):
390 def date2(f):
391 try:
391 try:
392 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
392 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
393 except OSError, err:
393 except OSError, err:
394 if err.errno != errno.ENOENT: raise
394 if err.errno != errno.ENOENT: raise
395 return _date2
395 return _date2
396 def read(f):
396 def read(f):
397 return repo.wread(f)
397 return repo.wread(f)
398
398
399 if ui.quiet:
399 if ui.quiet:
400 r = None
400 r = None
401 else:
401 else:
402 hexfunc = ui.verbose and hex or short
402 hexfunc = ui.verbose and hex or short
403 r = [hexfunc(node) for node in [node1, node2] if node]
403 r = [hexfunc(node) for node in [node1, node2] if node]
404
404
405 diffopts = ui.diffopts()
405 diffopts = ui.diffopts()
406 showfunc = opts.get('show_function') or diffopts['showfunc']
406 showfunc = opts.get('show_function') or diffopts['showfunc']
407 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
407 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
408 ignorewsamount = opts.get('ignore_space_change') or \
408 ignorewsamount = opts.get('ignore_space_change') or \
409 diffopts['ignorewsamount']
409 diffopts['ignorewsamount']
410 ignoreblanklines = opts.get('ignore_blank_lines') or \
410 ignoreblanklines = opts.get('ignore_blank_lines') or \
411 diffopts['ignoreblanklines']
411 diffopts['ignoreblanklines']
412 for f in modified:
412 for f in modified:
413 to = None
413 to = None
414 if f in mmap:
414 if f in mmap:
415 to = repo.file(f).read(mmap[f])
415 to = repo.file(f).read(mmap[f])
416 tn = read(f)
416 tn = read(f)
417 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
417 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
418 showfunc=showfunc, ignorews=ignorews,
418 showfunc=showfunc, ignorews=ignorews,
419 ignorewsamount=ignorewsamount,
419 ignorewsamount=ignorewsamount,
420 ignoreblanklines=ignoreblanklines))
420 ignoreblanklines=ignoreblanklines))
421 for f in added:
421 for f in added:
422 to = None
422 to = None
423 tn = read(f)
423 tn = read(f)
424 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
424 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
425 showfunc=showfunc, ignorews=ignorews,
425 showfunc=showfunc, ignorews=ignorews,
426 ignorewsamount=ignorewsamount,
426 ignorewsamount=ignorewsamount,
427 ignoreblanklines=ignoreblanklines))
427 ignoreblanklines=ignoreblanklines))
428 for f in removed:
428 for f in removed:
429 to = repo.file(f).read(mmap[f])
429 to = repo.file(f).read(mmap[f])
430 tn = None
430 tn = None
431 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
431 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
432 showfunc=showfunc, ignorews=ignorews,
432 showfunc=showfunc, ignorews=ignorews,
433 ignorewsamount=ignorewsamount,
433 ignorewsamount=ignorewsamount,
434 ignoreblanklines=ignoreblanklines))
434 ignoreblanklines=ignoreblanklines))
435
435
436 def trimuser(ui, name, rev, revcache):
436 def trimuser(ui, name, rev, revcache):
437 """trim the name of the user who committed a change"""
437 """trim the name of the user who committed a change"""
438 user = revcache.get(rev)
438 user = revcache.get(rev)
439 if user is None:
439 if user is None:
440 user = revcache[rev] = ui.shortuser(name)
440 user = revcache[rev] = ui.shortuser(name)
441 return user
441 return user
442
442
443 class changeset_printer(object):
443 class changeset_printer(object):
444 '''show changeset information when templating not requested.'''
444 '''show changeset information when templating not requested.'''
445
445
446 def __init__(self, ui, repo):
446 def __init__(self, ui, repo):
447 self.ui = ui
447 self.ui = ui
448 self.repo = repo
448 self.repo = repo
449
449
450 def show(self, rev=0, changenode=None, brinfo=None):
450 def show(self, rev=0, changenode=None, brinfo=None):
451 '''show a single changeset or file revision'''
451 '''show a single changeset or file revision'''
452 log = self.repo.changelog
452 log = self.repo.changelog
453 if changenode is None:
453 if changenode is None:
454 changenode = log.node(rev)
454 changenode = log.node(rev)
455 elif not rev:
455 elif not rev:
456 rev = log.rev(changenode)
456 rev = log.rev(changenode)
457
457
458 if self.ui.quiet:
458 if self.ui.quiet:
459 self.ui.write("%d:%s\n" % (rev, short(changenode)))
459 self.ui.write("%d:%s\n" % (rev, short(changenode)))
460 return
460 return
461
461
462 changes = log.read(changenode)
462 changes = log.read(changenode)
463 date = util.datestr(changes[2])
463 date = util.datestr(changes[2])
464
464
465 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
465 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
466 for p in log.parents(changenode)
466 for p in log.parents(changenode)
467 if self.ui.debugflag or p != nullid]
467 if self.ui.debugflag or p != nullid]
468 if (not self.ui.debugflag and len(parents) == 1 and
468 if (not self.ui.debugflag and len(parents) == 1 and
469 parents[0][0] == rev-1):
469 parents[0][0] == rev-1):
470 parents = []
470 parents = []
471
471
472 if self.ui.verbose:
472 if self.ui.verbose:
473 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
473 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
474 else:
474 else:
475 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
475 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
476
476
477 for tag in self.repo.nodetags(changenode):
477 for tag in self.repo.nodetags(changenode):
478 self.ui.status(_("tag: %s\n") % tag)
478 self.ui.status(_("tag: %s\n") % tag)
479 for parent in parents:
479 for parent in parents:
480 self.ui.write(_("parent: %d:%s\n") % parent)
480 self.ui.write(_("parent: %d:%s\n") % parent)
481
481
482 if brinfo and changenode in brinfo:
482 if brinfo and changenode in brinfo:
483 br = brinfo[changenode]
483 br = brinfo[changenode]
484 self.ui.write(_("branch: %s\n") % " ".join(br))
484 self.ui.write(_("branch: %s\n") % " ".join(br))
485
485
486 self.ui.debug(_("manifest: %d:%s\n") %
486 self.ui.debug(_("manifest: %d:%s\n") %
487 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
487 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
488 self.ui.status(_("user: %s\n") % changes[1])
488 self.ui.status(_("user: %s\n") % changes[1])
489 self.ui.status(_("date: %s\n") % date)
489 self.ui.status(_("date: %s\n") % date)
490
490
491 if self.ui.debugflag:
491 if self.ui.debugflag:
492 files = self.repo.changes(log.parents(changenode)[0], changenode)
492 files = self.repo.changes(log.parents(changenode)[0], changenode)
493 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
493 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
494 files):
494 files):
495 if value:
495 if value:
496 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
496 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
497 else:
497 else:
498 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
498 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
499
499
500 description = changes[4].strip()
500 description = changes[4].strip()
501 if description:
501 if description:
502 if self.ui.verbose:
502 if self.ui.verbose:
503 self.ui.status(_("description:\n"))
503 self.ui.status(_("description:\n"))
504 self.ui.status(description)
504 self.ui.status(description)
505 self.ui.status("\n\n")
505 self.ui.status("\n\n")
506 else:
506 else:
507 self.ui.status(_("summary: %s\n") %
507 self.ui.status(_("summary: %s\n") %
508 description.splitlines()[0])
508 description.splitlines()[0])
509 self.ui.status("\n")
509 self.ui.status("\n")
510
510
511 def show_changeset(ui, repo, opts):
511 def show_changeset(ui, repo, opts):
512 '''show one changeset. uses template or regular display. caller
512 '''show one changeset. uses template or regular display. caller
513 can pass in 'style' and 'template' options in opts.'''
513 can pass in 'style' and 'template' options in opts.'''
514
514
515 tmpl = opts.get('template')
515 tmpl = opts.get('template')
516 if tmpl:
516 if tmpl:
517 tmpl = templater.parsestring(tmpl, quoted=False)
517 tmpl = templater.parsestring(tmpl, quoted=False)
518 else:
518 else:
519 tmpl = ui.config('ui', 'logtemplate')
519 tmpl = ui.config('ui', 'logtemplate')
520 if tmpl: tmpl = templater.parsestring(tmpl)
520 if tmpl: tmpl = templater.parsestring(tmpl)
521 mapfile = opts.get('style') or ui.config('ui', 'style')
521 mapfile = opts.get('style') or ui.config('ui', 'style')
522 if tmpl or mapfile:
522 if tmpl or mapfile:
523 if mapfile:
523 if mapfile:
524 if not os.path.isfile(mapfile):
524 if not os.path.isfile(mapfile):
525 mapname = templater.templatepath('map-cmdline.' + mapfile)
525 mapname = templater.templatepath('map-cmdline.' + mapfile)
526 if not mapname: mapname = templater.templatepath(mapfile)
526 if not mapname: mapname = templater.templatepath(mapfile)
527 if mapname: mapfile = mapname
527 if mapname: mapfile = mapname
528 try:
528 try:
529 t = templater.changeset_templater(ui, repo, mapfile)
529 t = templater.changeset_templater(ui, repo, mapfile)
530 except SyntaxError, inst:
530 except SyntaxError, inst:
531 raise util.Abort(inst.args[0])
531 raise util.Abort(inst.args[0])
532 if tmpl: t.use_template(tmpl)
532 if tmpl: t.use_template(tmpl)
533 return t
533 return t
534 return changeset_printer(ui, repo)
534 return changeset_printer(ui, repo)
535
535
536 def show_version(ui):
536 def show_version(ui):
537 """output version and copyright information"""
537 """output version and copyright information"""
538 ui.write(_("Mercurial Distributed SCM (version %s)\n")
538 ui.write(_("Mercurial Distributed SCM (version %s)\n")
539 % version.get_version())
539 % version.get_version())
540 ui.status(_(
540 ui.status(_(
541 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
541 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
542 "This is free software; see the source for copying conditions. "
542 "This is free software; see the source for copying conditions. "
543 "There is NO\nwarranty; "
543 "There is NO\nwarranty; "
544 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
544 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
545 ))
545 ))
546
546
547 def help_(ui, name=None, with_version=False):
547 def help_(ui, name=None, with_version=False):
548 """show help for a command, extension, or list of commands
548 """show help for a command, extension, or list of commands
549
549
550 With no arguments, print a list of commands and short help.
550 With no arguments, print a list of commands and short help.
551
551
552 Given a command name, print help for that command.
552 Given a command name, print help for that command.
553
553
554 Given an extension name, print help for that extension, and the
554 Given an extension name, print help for that extension, and the
555 commands it provides."""
555 commands it provides."""
556 option_lists = []
556 option_lists = []
557
557
558 def helpcmd(name):
558 def helpcmd(name):
559 if with_version:
559 if with_version:
560 show_version(ui)
560 show_version(ui)
561 ui.write('\n')
561 ui.write('\n')
562 aliases, i = findcmd(name)
562 aliases, i = findcmd(name)
563 # synopsis
563 # synopsis
564 ui.write("%s\n\n" % i[2])
564 ui.write("%s\n\n" % i[2])
565
565
566 # description
566 # description
567 doc = i[0].__doc__
567 doc = i[0].__doc__
568 if not doc:
568 if not doc:
569 doc = _("(No help text available)")
569 doc = _("(No help text available)")
570 if ui.quiet:
570 if ui.quiet:
571 doc = doc.splitlines(0)[0]
571 doc = doc.splitlines(0)[0]
572 ui.write("%s\n" % doc.rstrip())
572 ui.write("%s\n" % doc.rstrip())
573
573
574 if not ui.quiet:
574 if not ui.quiet:
575 # aliases
575 # aliases
576 if len(aliases) > 1:
576 if len(aliases) > 1:
577 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
577 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
578
578
579 # options
579 # options
580 if i[1]:
580 if i[1]:
581 option_lists.append(("options", i[1]))
581 option_lists.append(("options", i[1]))
582
582
583 def helplist(select=None):
583 def helplist(select=None):
584 h = {}
584 h = {}
585 cmds = {}
585 cmds = {}
586 for c, e in table.items():
586 for c, e in table.items():
587 f = c.split("|", 1)[0]
587 f = c.split("|", 1)[0]
588 if select and not select(f):
588 if select and not select(f):
589 continue
589 continue
590 if name == "shortlist" and not f.startswith("^"):
590 if name == "shortlist" and not f.startswith("^"):
591 continue
591 continue
592 f = f.lstrip("^")
592 f = f.lstrip("^")
593 if not ui.debugflag and f.startswith("debug"):
593 if not ui.debugflag and f.startswith("debug"):
594 continue
594 continue
595 doc = e[0].__doc__
595 doc = e[0].__doc__
596 if not doc:
596 if not doc:
597 doc = _("(No help text available)")
597 doc = _("(No help text available)")
598 h[f] = doc.splitlines(0)[0].rstrip()
598 h[f] = doc.splitlines(0)[0].rstrip()
599 cmds[f] = c.lstrip("^")
599 cmds[f] = c.lstrip("^")
600
600
601 fns = h.keys()
601 fns = h.keys()
602 fns.sort()
602 fns.sort()
603 m = max(map(len, fns))
603 m = max(map(len, fns))
604 for f in fns:
604 for f in fns:
605 if ui.verbose:
605 if ui.verbose:
606 commands = cmds[f].replace("|",", ")
606 commands = cmds[f].replace("|",", ")
607 ui.write(" %s:\n %s\n"%(commands, h[f]))
607 ui.write(" %s:\n %s\n"%(commands, h[f]))
608 else:
608 else:
609 ui.write(' %-*s %s\n' % (m, f, h[f]))
609 ui.write(' %-*s %s\n' % (m, f, h[f]))
610
610
611 def helpext(name):
611 def helpext(name):
612 try:
612 try:
613 mod = findext(name)
613 mod = findext(name)
614 except KeyError:
614 except KeyError:
615 raise UnknownCommand(name)
615 raise UnknownCommand(name)
616
616
617 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
617 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
618 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
618 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
619 for d in doc[1:]:
619 for d in doc[1:]:
620 ui.write(d, '\n')
620 ui.write(d, '\n')
621
621
622 ui.status('\n')
622 ui.status('\n')
623 if ui.verbose:
623 if ui.verbose:
624 ui.status(_('list of commands:\n\n'))
624 ui.status(_('list of commands:\n\n'))
625 else:
625 else:
626 ui.status(_('list of commands (use "hg help -v %s" '
626 ui.status(_('list of commands (use "hg help -v %s" '
627 'to show aliases and global options):\n\n') % name)
627 'to show aliases and global options):\n\n') % name)
628
628
629 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
629 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
630 helplist(modcmds.has_key)
630 helplist(modcmds.has_key)
631
631
632 if name and name != 'shortlist':
632 if name and name != 'shortlist':
633 try:
633 try:
634 helpcmd(name)
634 helpcmd(name)
635 except UnknownCommand:
635 except UnknownCommand:
636 helpext(name)
636 helpext(name)
637
637
638 else:
638 else:
639 # program name
639 # program name
640 if ui.verbose or with_version:
640 if ui.verbose or with_version:
641 show_version(ui)
641 show_version(ui)
642 else:
642 else:
643 ui.status(_("Mercurial Distributed SCM\n"))
643 ui.status(_("Mercurial Distributed SCM\n"))
644 ui.status('\n')
644 ui.status('\n')
645
645
646 # list of commands
646 # list of commands
647 if name == "shortlist":
647 if name == "shortlist":
648 ui.status(_('basic commands (use "hg help" '
648 ui.status(_('basic commands (use "hg help" '
649 'for the full list or option "-v" for details):\n\n'))
649 'for the full list or option "-v" for details):\n\n'))
650 elif ui.verbose:
650 elif ui.verbose:
651 ui.status(_('list of commands:\n\n'))
651 ui.status(_('list of commands:\n\n'))
652 else:
652 else:
653 ui.status(_('list of commands (use "hg help -v" '
653 ui.status(_('list of commands (use "hg help -v" '
654 'to show aliases and global options):\n\n'))
654 'to show aliases and global options):\n\n'))
655
655
656 helplist()
656 helplist()
657
657
658 # global options
658 # global options
659 if ui.verbose:
659 if ui.verbose:
660 option_lists.append(("global options", globalopts))
660 option_lists.append(("global options", globalopts))
661
661
662 # list all option lists
662 # list all option lists
663 opt_output = []
663 opt_output = []
664 for title, options in option_lists:
664 for title, options in option_lists:
665 opt_output.append(("\n%s:\n" % title, None))
665 opt_output.append(("\n%s:\n" % title, None))
666 for shortopt, longopt, default, desc in options:
666 for shortopt, longopt, default, desc in options:
667 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
667 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
668 longopt and " --%s" % longopt),
668 longopt and " --%s" % longopt),
669 "%s%s" % (desc,
669 "%s%s" % (desc,
670 default
670 default
671 and _(" (default: %s)") % default
671 and _(" (default: %s)") % default
672 or "")))
672 or "")))
673
673
674 if opt_output:
674 if opt_output:
675 opts_len = max([len(line[0]) for line in opt_output if line[1]])
675 opts_len = max([len(line[0]) for line in opt_output if line[1]])
676 for first, second in opt_output:
676 for first, second in opt_output:
677 if second:
677 if second:
678 ui.write(" %-*s %s\n" % (opts_len, first, second))
678 ui.write(" %-*s %s\n" % (opts_len, first, second))
679 else:
679 else:
680 ui.write("%s\n" % first)
680 ui.write("%s\n" % first)
681
681
682 # Commands start here, listed alphabetically
682 # Commands start here, listed alphabetically
683
683
684 def add(ui, repo, *pats, **opts):
684 def add(ui, repo, *pats, **opts):
685 """add the specified files on the next commit
685 """add the specified files on the next commit
686
686
687 Schedule files to be version controlled and added to the repository.
687 Schedule files to be version controlled and added to the repository.
688
688
689 The files will be added to the repository at the next commit.
689 The files will be added to the repository at the next commit.
690
690
691 If no names are given, add all files in the repository.
691 If no names are given, add all files in the repository.
692 """
692 """
693
693
694 names = []
694 names = []
695 for src, abs, rel, exact in walk(repo, pats, opts):
695 for src, abs, rel, exact in walk(repo, pats, opts):
696 if exact:
696 if exact:
697 if ui.verbose:
697 if ui.verbose:
698 ui.status(_('adding %s\n') % rel)
698 ui.status(_('adding %s\n') % rel)
699 names.append(abs)
699 names.append(abs)
700 elif repo.dirstate.state(abs) == '?':
700 elif repo.dirstate.state(abs) == '?':
701 ui.status(_('adding %s\n') % rel)
701 ui.status(_('adding %s\n') % rel)
702 names.append(abs)
702 names.append(abs)
703 if not opts.get('dry_run'):
703 if not opts.get('dry_run'):
704 repo.add(names)
704 repo.add(names)
705
705
706 def addremove(ui, repo, *pats, **opts):
706 def addremove(ui, repo, *pats, **opts):
707 """add all new files, delete all missing files (DEPRECATED)
707 """add all new files, delete all missing files (DEPRECATED)
708
708
709 (DEPRECATED)
709 (DEPRECATED)
710 Add all new files and remove all missing files from the repository.
710 Add all new files and remove all missing files from the repository.
711
711
712 New files are ignored if they match any of the patterns in .hgignore. As
712 New files are ignored if they match any of the patterns in .hgignore. As
713 with add, these changes take effect at the next commit.
713 with add, these changes take effect at the next commit.
714
714
715 This command is now deprecated and will be removed in a future
715 This command is now deprecated and will be removed in a future
716 release. Please use add and remove --after instead.
716 release. Please use add and remove --after instead.
717 """
717 """
718 ui.warn(_('(the addremove command is deprecated; use add and remove '
718 ui.warn(_('(the addremove command is deprecated; use add and remove '
719 '--after instead)\n'))
719 '--after instead)\n'))
720 return addremove_lock(ui, repo, pats, opts)
720 return addremove_lock(ui, repo, pats, opts)
721
721
722 def addremove_lock(ui, repo, pats, opts, wlock=None):
722 def addremove_lock(ui, repo, pats, opts, wlock=None):
723 add, remove = [], []
723 add, remove = [], []
724 for src, abs, rel, exact in walk(repo, pats, opts):
724 for src, abs, rel, exact in walk(repo, pats, opts):
725 if src == 'f' and repo.dirstate.state(abs) == '?':
725 if src == 'f' and repo.dirstate.state(abs) == '?':
726 add.append(abs)
726 add.append(abs)
727 if ui.verbose or not exact:
727 if ui.verbose or not exact:
728 ui.status(_('adding %s\n') % ((pats and rel) or abs))
728 ui.status(_('adding %s\n') % ((pats and rel) or abs))
729 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
729 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
730 remove.append(abs)
730 remove.append(abs)
731 if ui.verbose or not exact:
731 if ui.verbose or not exact:
732 ui.status(_('removing %s\n') % ((pats and rel) or abs))
732 ui.status(_('removing %s\n') % ((pats and rel) or abs))
733 if not opts.get('dry_run'):
733 if not opts.get('dry_run'):
734 repo.add(add, wlock=wlock)
734 repo.add(add, wlock=wlock)
735 repo.remove(remove, wlock=wlock)
735 repo.remove(remove, wlock=wlock)
736
736
737 def annotate(ui, repo, *pats, **opts):
737 def annotate(ui, repo, *pats, **opts):
738 """show changeset information per file line
738 """show changeset information per file line
739
739
740 List changes in files, showing the revision id responsible for each line
740 List changes in files, showing the revision id responsible for each line
741
741
742 This command is useful to discover who did a change or when a change took
742 This command is useful to discover who did a change or when a change took
743 place.
743 place.
744
744
745 Without the -a option, annotate will avoid processing files it
745 Without the -a option, annotate will avoid processing files it
746 detects as binary. With -a, annotate will generate an annotation
746 detects as binary. With -a, annotate will generate an annotation
747 anyway, probably with undesirable results.
747 anyway, probably with undesirable results.
748 """
748 """
749 def getnode(rev):
749 def getnode(rev):
750 return short(repo.changelog.node(rev))
750 return short(repo.changelog.node(rev))
751
751
752 ucache = {}
752 ucache = {}
753 def getname(rev):
753 def getname(rev):
754 try:
754 try:
755 return ucache[rev]
755 return ucache[rev]
756 except:
756 except:
757 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
757 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
758 ucache[rev] = u
758 ucache[rev] = u
759 return u
759 return u
760
760
761 dcache = {}
761 dcache = {}
762 def getdate(rev):
762 def getdate(rev):
763 datestr = dcache.get(rev)
763 datestr = dcache.get(rev)
764 if datestr is None:
764 if datestr is None:
765 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
765 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
766 return datestr
766 return datestr
767
767
768 if not pats:
768 if not pats:
769 raise util.Abort(_('at least one file name or pattern required'))
769 raise util.Abort(_('at least one file name or pattern required'))
770
770
771 opmap = [['user', getname], ['number', str], ['changeset', getnode],
771 opmap = [['user', getname], ['number', str], ['changeset', getnode],
772 ['date', getdate]]
772 ['date', getdate]]
773 if not opts['user'] and not opts['changeset'] and not opts['date']:
773 if not opts['user'] and not opts['changeset'] and not opts['date']:
774 opts['number'] = 1
774 opts['number'] = 1
775
775
776 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
776 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
777
777
778 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
778 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
779 fctx = ctx.filectx(abs)
779 fctx = ctx.filectx(abs)
780 if not opts['text'] and util.binary(fctx.data()):
780 if not opts['text'] and util.binary(fctx.data()):
781 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
781 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
782 continue
782 continue
783
783
784 lines = fctx.annotate()
784 lines = fctx.annotate()
785 pieces = []
785 pieces = []
786
786
787 for o, f in opmap:
787 for o, f in opmap:
788 if opts[o]:
788 if opts[o]:
789 l = [f(n) for n, dummy in lines]
789 l = [f(n) for n, dummy in lines]
790 if l:
790 if l:
791 m = max(map(len, l))
791 m = max(map(len, l))
792 pieces.append(["%*s" % (m, x) for x in l])
792 pieces.append(["%*s" % (m, x) for x in l])
793
793
794 if pieces:
794 if pieces:
795 for p, l in zip(zip(*pieces), lines):
795 for p, l in zip(zip(*pieces), lines):
796 ui.write("%s: %s" % (" ".join(p), l[1]))
796 ui.write("%s: %s" % (" ".join(p), l[1]))
797
797
798 def archive(ui, repo, dest, **opts):
798 def archive(ui, repo, dest, **opts):
799 '''create unversioned archive of a repository revision
799 '''create unversioned archive of a repository revision
800
800
801 By default, the revision used is the parent of the working
801 By default, the revision used is the parent of the working
802 directory; use "-r" to specify a different revision.
802 directory; use "-r" to specify a different revision.
803
803
804 To specify the type of archive to create, use "-t". Valid
804 To specify the type of archive to create, use "-t". Valid
805 types are:
805 types are:
806
806
807 "files" (default): a directory full of files
807 "files" (default): a directory full of files
808 "tar": tar archive, uncompressed
808 "tar": tar archive, uncompressed
809 "tbz2": tar archive, compressed using bzip2
809 "tbz2": tar archive, compressed using bzip2
810 "tgz": tar archive, compressed using gzip
810 "tgz": tar archive, compressed using gzip
811 "uzip": zip archive, uncompressed
811 "uzip": zip archive, uncompressed
812 "zip": zip archive, compressed using deflate
812 "zip": zip archive, compressed using deflate
813
813
814 The exact name of the destination archive or directory is given
814 The exact name of the destination archive or directory is given
815 using a format string; see "hg help export" for details.
815 using a format string; see "hg help export" for details.
816
816
817 Each member added to an archive file has a directory prefix
817 Each member added to an archive file has a directory prefix
818 prepended. Use "-p" to specify a format string for the prefix.
818 prepended. Use "-p" to specify a format string for the prefix.
819 The default is the basename of the archive, with suffixes removed.
819 The default is the basename of the archive, with suffixes removed.
820 '''
820 '''
821
821
822 if opts['rev']:
822 if opts['rev']:
823 node = repo.lookup(opts['rev'])
823 node = repo.lookup(opts['rev'])
824 else:
824 else:
825 node, p2 = repo.dirstate.parents()
825 node, p2 = repo.dirstate.parents()
826 if p2 != nullid:
826 if p2 != nullid:
827 raise util.Abort(_('uncommitted merge - please provide a '
827 raise util.Abort(_('uncommitted merge - please provide a '
828 'specific revision'))
828 'specific revision'))
829
829
830 dest = make_filename(repo, dest, node)
830 dest = make_filename(repo, dest, node)
831 if os.path.realpath(dest) == repo.root:
831 if os.path.realpath(dest) == repo.root:
832 raise util.Abort(_('repository root cannot be destination'))
832 raise util.Abort(_('repository root cannot be destination'))
833 dummy, matchfn, dummy = matchpats(repo, [], opts)
833 dummy, matchfn, dummy = matchpats(repo, [], opts)
834 kind = opts.get('type') or 'files'
834 kind = opts.get('type') or 'files'
835 prefix = opts['prefix']
835 prefix = opts['prefix']
836 if dest == '-':
836 if dest == '-':
837 if kind == 'files':
837 if kind == 'files':
838 raise util.Abort(_('cannot archive plain files to stdout'))
838 raise util.Abort(_('cannot archive plain files to stdout'))
839 dest = sys.stdout
839 dest = sys.stdout
840 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
840 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
841 prefix = make_filename(repo, prefix, node)
841 prefix = make_filename(repo, prefix, node)
842 archival.archive(repo, dest, node, kind, not opts['no_decode'],
842 archival.archive(repo, dest, node, kind, not opts['no_decode'],
843 matchfn, prefix)
843 matchfn, prefix)
844
844
845 def backout(ui, repo, rev, **opts):
845 def backout(ui, repo, rev, **opts):
846 '''reverse effect of earlier changeset
846 '''reverse effect of earlier changeset
847
847
848 Commit the backed out changes as a new changeset. The new
848 Commit the backed out changes as a new changeset. The new
849 changeset is a child of the backed out changeset.
849 changeset is a child of the backed out changeset.
850
850
851 If you back out a changeset other than the tip, a new head is
851 If you back out a changeset other than the tip, a new head is
852 created. This head is the parent of the working directory. If
852 created. This head is the parent of the working directory. If
853 you back out an old changeset, your working directory will appear
853 you back out an old changeset, your working directory will appear
854 old after the backout. You should merge the backout changeset
854 old after the backout. You should merge the backout changeset
855 with another head.
855 with another head.
856
856
857 The --merge option remembers the parent of the working directory
857 The --merge option remembers the parent of the working directory
858 before starting the backout, then merges the new head with that
858 before starting the backout, then merges the new head with that
859 changeset afterwards. This saves you from doing the merge by
859 changeset afterwards. This saves you from doing the merge by
860 hand. The result of this merge is not committed, as for a normal
860 hand. The result of this merge is not committed, as for a normal
861 merge.'''
861 merge.'''
862
862
863 bail_if_changed(repo)
863 bail_if_changed(repo)
864 op1, op2 = repo.dirstate.parents()
864 op1, op2 = repo.dirstate.parents()
865 if op2 != nullid:
865 if op2 != nullid:
866 raise util.Abort(_('outstanding uncommitted merge'))
866 raise util.Abort(_('outstanding uncommitted merge'))
867 node = repo.lookup(rev)
867 node = repo.lookup(rev)
868 parent, p2 = repo.changelog.parents(node)
868 parent, p2 = repo.changelog.parents(node)
869 if parent == nullid:
869 if parent == nullid:
870 raise util.Abort(_('cannot back out a change with no parents'))
870 raise util.Abort(_('cannot back out a change with no parents'))
871 if p2 != nullid:
871 if p2 != nullid:
872 raise util.Abort(_('cannot back out a merge'))
872 raise util.Abort(_('cannot back out a merge'))
873 repo.update(node, force=True, show_stats=False)
873 repo.update(node, force=True, show_stats=False)
874 revert_opts = opts.copy()
874 revert_opts = opts.copy()
875 revert_opts['rev'] = hex(parent)
875 revert_opts['rev'] = hex(parent)
876 revert(ui, repo, **revert_opts)
876 revert(ui, repo, **revert_opts)
877 commit_opts = opts.copy()
877 commit_opts = opts.copy()
878 commit_opts['addremove'] = False
878 commit_opts['addremove'] = False
879 if not commit_opts['message'] and not commit_opts['logfile']:
879 if not commit_opts['message'] and not commit_opts['logfile']:
880 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
880 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
881 commit_opts['force_editor'] = True
881 commit_opts['force_editor'] = True
882 commit(ui, repo, **commit_opts)
882 commit(ui, repo, **commit_opts)
883 def nice(node):
883 def nice(node):
884 return '%d:%s' % (repo.changelog.rev(node), short(node))
884 return '%d:%s' % (repo.changelog.rev(node), short(node))
885 ui.status(_('changeset %s backs out changeset %s\n') %
885 ui.status(_('changeset %s backs out changeset %s\n') %
886 (nice(repo.changelog.tip()), nice(node)))
886 (nice(repo.changelog.tip()), nice(node)))
887 if op1 != node:
887 if op1 != node:
888 if opts['merge']:
888 if opts['merge']:
889 ui.status(_('merging with changeset %s\n') % nice(op1))
889 ui.status(_('merging with changeset %s\n') % nice(op1))
890 doupdate(ui, repo, hex(op1), **opts)
890 doupdate(ui, repo, hex(op1), **opts)
891 else:
891 else:
892 ui.status(_('the backout changeset is a new head - '
892 ui.status(_('the backout changeset is a new head - '
893 'do not forget to merge\n'))
893 'do not forget to merge\n'))
894 ui.status(_('(use "backout -m" if you want to auto-merge)\n'))
894 ui.status(_('(use "backout -m" if you want to auto-merge)\n'))
895
895
896 def bundle(ui, repo, fname, dest=None, **opts):
896 def bundle(ui, repo, fname, dest=None, **opts):
897 """create a changegroup file
897 """create a changegroup file
898
898
899 Generate a compressed changegroup file collecting all changesets
899 Generate a compressed changegroup file collecting all changesets
900 not found in the other repository.
900 not found in the other repository.
901
901
902 This file can then be transferred using conventional means and
902 This file can then be transferred using conventional means and
903 applied to another repository with the unbundle command. This is
903 applied to another repository with the unbundle command. This is
904 useful when native push and pull are not available or when
904 useful when native push and pull are not available or when
905 exporting an entire repository is undesirable. The standard file
905 exporting an entire repository is undesirable. The standard file
906 extension is ".hg".
906 extension is ".hg".
907
907
908 Unlike import/export, this exactly preserves all changeset
908 Unlike import/export, this exactly preserves all changeset
909 contents including permissions, rename data, and revision history.
909 contents including permissions, rename data, and revision history.
910 """
910 """
911 dest = ui.expandpath(dest or 'default-push', dest or 'default')
911 dest = ui.expandpath(dest or 'default-push', dest or 'default')
912 other = hg.repository(ui, dest)
912 other = hg.repository(ui, dest)
913 o = repo.findoutgoing(other, force=opts['force'])
913 o = repo.findoutgoing(other, force=opts['force'])
914 cg = repo.changegroup(o, 'bundle')
914 cg = repo.changegroup(o, 'bundle')
915 write_bundle(cg, fname)
915 write_bundle(cg, fname)
916
916
917 def cat(ui, repo, file1, *pats, **opts):
917 def cat(ui, repo, file1, *pats, **opts):
918 """output the latest or given revisions of files
918 """output the latest or given revisions of files
919
919
920 Print the specified files as they were at the given revision.
920 Print the specified files as they were at the given revision.
921 If no revision is given then the tip is used.
921 If no revision is given then the tip is used.
922
922
923 Output may be to a file, in which case the name of the file is
923 Output may be to a file, in which case the name of the file is
924 given using a format string. The formatting rules are the same as
924 given using a format string. The formatting rules are the same as
925 for the export command, with the following additions:
925 for the export command, with the following additions:
926
926
927 %s basename of file being printed
927 %s basename of file being printed
928 %d dirname of file being printed, or '.' if in repo root
928 %d dirname of file being printed, or '.' if in repo root
929 %p root-relative path name of file being printed
929 %p root-relative path name of file being printed
930 """
930 """
931 ctx = repo.changectx(opts['rev'] or -1)
931 ctx = repo.changectx(opts['rev'] or -1)
932 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
932 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
933 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
933 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
934 fp.write(ctx.filectx(abs).data())
934 fp.write(ctx.filectx(abs).data())
935
935
936 def clone(ui, source, dest=None, **opts):
936 def clone(ui, source, dest=None, **opts):
937 """make a copy of an existing repository
937 """make a copy of an existing repository
938
938
939 Create a copy of an existing repository in a new directory.
939 Create a copy of an existing repository in a new directory.
940
940
941 If no destination directory name is specified, it defaults to the
941 If no destination directory name is specified, it defaults to the
942 basename of the source.
942 basename of the source.
943
943
944 The location of the source is added to the new repository's
944 The location of the source is added to the new repository's
945 .hg/hgrc file, as the default to be used for future pulls.
945 .hg/hgrc file, as the default to be used for future pulls.
946
946
947 For efficiency, hardlinks are used for cloning whenever the source
947 For efficiency, hardlinks are used for cloning whenever the source
948 and destination are on the same filesystem. Some filesystems,
948 and destination are on the same filesystem. Some filesystems,
949 such as AFS, implement hardlinking incorrectly, but do not report
949 such as AFS, implement hardlinking incorrectly, but do not report
950 errors. In these cases, use the --pull option to avoid
950 errors. In these cases, use the --pull option to avoid
951 hardlinking.
951 hardlinking.
952
952
953 See pull for valid source format details.
953 See pull for valid source format details.
954
954
955 It is possible to specify an ssh:// URL as the destination, but no
955 It is possible to specify an ssh:// URL as the destination, but no
956 .hg/hgrc will be created on the remote side. Look at the help text
956 .hg/hgrc will be created on the remote side. Look at the help text
957 for the pull command for important details about ssh:// URLs.
957 for the pull command for important details about ssh:// URLs.
958 """
958 """
959 ui.setconfig_remoteopts(**opts)
959 ui.setconfig_remoteopts(**opts)
960 hg.clone(ui, ui.expandpath(source), dest,
960 hg.clone(ui, ui.expandpath(source), dest,
961 pull=opts['pull'],
961 pull=opts['pull'],
962 rev=opts['rev'],
962 rev=opts['rev'],
963 update=not opts['noupdate'])
963 update=not opts['noupdate'])
964
964
965 def commit(ui, repo, *pats, **opts):
965 def commit(ui, repo, *pats, **opts):
966 """commit the specified files or all outstanding changes
966 """commit the specified files or all outstanding changes
967
967
968 Commit changes to the given files into the repository.
968 Commit changes to the given files into the repository.
969
969
970 If a list of files is omitted, all changes reported by "hg status"
970 If a list of files is omitted, all changes reported by "hg status"
971 will be committed.
971 will be committed.
972
972
973 If no commit message is specified, the editor configured in your hgrc
973 If no commit message is specified, the editor configured in your hgrc
974 or in the EDITOR environment variable is started to enter a message.
974 or in the EDITOR environment variable is started to enter a message.
975 """
975 """
976 message = opts['message']
976 message = opts['message']
977 logfile = opts['logfile']
977 logfile = opts['logfile']
978
978
979 if message and logfile:
979 if message and logfile:
980 raise util.Abort(_('options --message and --logfile are mutually '
980 raise util.Abort(_('options --message and --logfile are mutually '
981 'exclusive'))
981 'exclusive'))
982 if not message and logfile:
982 if not message and logfile:
983 try:
983 try:
984 if logfile == '-':
984 if logfile == '-':
985 message = sys.stdin.read()
985 message = sys.stdin.read()
986 else:
986 else:
987 message = open(logfile).read()
987 message = open(logfile).read()
988 except IOError, inst:
988 except IOError, inst:
989 raise util.Abort(_("can't read commit message '%s': %s") %
989 raise util.Abort(_("can't read commit message '%s': %s") %
990 (logfile, inst.strerror))
990 (logfile, inst.strerror))
991
991
992 if opts['addremove']:
992 if opts['addremove']:
993 addremove_lock(ui, repo, pats, opts)
993 addremove_lock(ui, repo, pats, opts)
994 fns, match, anypats = matchpats(repo, pats, opts)
994 fns, match, anypats = matchpats(repo, pats, opts)
995 if pats:
995 if pats:
996 modified, added, removed, deleted, unknown = (
996 modified, added, removed, deleted, unknown = (
997 repo.changes(files=fns, match=match))
997 repo.changes(files=fns, match=match))
998 files = modified + added + removed
998 files = modified + added + removed
999 else:
999 else:
1000 files = []
1000 files = []
1001 try:
1001 try:
1002 repo.commit(files, message, opts['user'], opts['date'], match,
1002 repo.commit(files, message, opts['user'], opts['date'], match,
1003 force_editor=opts.get('force_editor'))
1003 force_editor=opts.get('force_editor'))
1004 except ValueError, inst:
1004 except ValueError, inst:
1005 raise util.Abort(str(inst))
1005 raise util.Abort(str(inst))
1006
1006
1007 def docopy(ui, repo, pats, opts, wlock):
1007 def docopy(ui, repo, pats, opts, wlock):
1008 # called with the repo lock held
1008 # called with the repo lock held
1009 cwd = repo.getcwd()
1009 cwd = repo.getcwd()
1010 errors = 0
1010 errors = 0
1011 copied = []
1011 copied = []
1012 targets = {}
1012 targets = {}
1013
1013
1014 def okaytocopy(abs, rel, exact):
1014 def okaytocopy(abs, rel, exact):
1015 reasons = {'?': _('is not managed'),
1015 reasons = {'?': _('is not managed'),
1016 'a': _('has been marked for add'),
1016 'a': _('has been marked for add'),
1017 'r': _('has been marked for remove')}
1017 'r': _('has been marked for remove')}
1018 state = repo.dirstate.state(abs)
1018 state = repo.dirstate.state(abs)
1019 reason = reasons.get(state)
1019 reason = reasons.get(state)
1020 if reason:
1020 if reason:
1021 if state == 'a':
1021 if state == 'a':
1022 origsrc = repo.dirstate.copied(abs)
1022 origsrc = repo.dirstate.copied(abs)
1023 if origsrc is not None:
1023 if origsrc is not None:
1024 return origsrc
1024 return origsrc
1025 if exact:
1025 if exact:
1026 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1026 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1027 else:
1027 else:
1028 return abs
1028 return abs
1029
1029
1030 def copy(origsrc, abssrc, relsrc, target, exact):
1030 def copy(origsrc, abssrc, relsrc, target, exact):
1031 abstarget = util.canonpath(repo.root, cwd, target)
1031 abstarget = util.canonpath(repo.root, cwd, target)
1032 reltarget = util.pathto(cwd, abstarget)
1032 reltarget = util.pathto(cwd, abstarget)
1033 prevsrc = targets.get(abstarget)
1033 prevsrc = targets.get(abstarget)
1034 if prevsrc is not None:
1034 if prevsrc is not None:
1035 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1035 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1036 (reltarget, abssrc, prevsrc))
1036 (reltarget, abssrc, prevsrc))
1037 return
1037 return
1038 if (not opts['after'] and os.path.exists(reltarget) or
1038 if (not opts['after'] and os.path.exists(reltarget) or
1039 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1039 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1040 if not opts['force']:
1040 if not opts['force']:
1041 ui.warn(_('%s: not overwriting - file exists\n') %
1041 ui.warn(_('%s: not overwriting - file exists\n') %
1042 reltarget)
1042 reltarget)
1043 return
1043 return
1044 if not opts['after'] and not opts.get('dry_run'):
1044 if not opts['after'] and not opts.get('dry_run'):
1045 os.unlink(reltarget)
1045 os.unlink(reltarget)
1046 if opts['after']:
1046 if opts['after']:
1047 if not os.path.exists(reltarget):
1047 if not os.path.exists(reltarget):
1048 return
1048 return
1049 else:
1049 else:
1050 targetdir = os.path.dirname(reltarget) or '.'
1050 targetdir = os.path.dirname(reltarget) or '.'
1051 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1051 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1052 os.makedirs(targetdir)
1052 os.makedirs(targetdir)
1053 try:
1053 try:
1054 restore = repo.dirstate.state(abstarget) == 'r'
1054 restore = repo.dirstate.state(abstarget) == 'r'
1055 if restore and not opts.get('dry_run'):
1055 if restore and not opts.get('dry_run'):
1056 repo.undelete([abstarget], wlock)
1056 repo.undelete([abstarget], wlock)
1057 try:
1057 try:
1058 if not opts.get('dry_run'):
1058 if not opts.get('dry_run'):
1059 shutil.copyfile(relsrc, reltarget)
1059 shutil.copyfile(relsrc, reltarget)
1060 shutil.copymode(relsrc, reltarget)
1060 shutil.copymode(relsrc, reltarget)
1061 restore = False
1061 restore = False
1062 finally:
1062 finally:
1063 if restore:
1063 if restore:
1064 repo.remove([abstarget], wlock)
1064 repo.remove([abstarget], wlock)
1065 except shutil.Error, inst:
1065 except shutil.Error, inst:
1066 raise util.Abort(str(inst))
1066 raise util.Abort(str(inst))
1067 except IOError, inst:
1067 except IOError, inst:
1068 if inst.errno == errno.ENOENT:
1068 if inst.errno == errno.ENOENT:
1069 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1069 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1070 else:
1070 else:
1071 ui.warn(_('%s: cannot copy - %s\n') %
1071 ui.warn(_('%s: cannot copy - %s\n') %
1072 (relsrc, inst.strerror))
1072 (relsrc, inst.strerror))
1073 errors += 1
1073 errors += 1
1074 return
1074 return
1075 if ui.verbose or not exact:
1075 if ui.verbose or not exact:
1076 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1076 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1077 targets[abstarget] = abssrc
1077 targets[abstarget] = abssrc
1078 if abstarget != origsrc and not opts.get('dry_run'):
1078 if abstarget != origsrc and not opts.get('dry_run'):
1079 repo.copy(origsrc, abstarget, wlock)
1079 repo.copy(origsrc, abstarget, wlock)
1080 copied.append((abssrc, relsrc, exact))
1080 copied.append((abssrc, relsrc, exact))
1081
1081
1082 def targetpathfn(pat, dest, srcs):
1082 def targetpathfn(pat, dest, srcs):
1083 if os.path.isdir(pat):
1083 if os.path.isdir(pat):
1084 abspfx = util.canonpath(repo.root, cwd, pat)
1084 abspfx = util.canonpath(repo.root, cwd, pat)
1085 if destdirexists:
1085 if destdirexists:
1086 striplen = len(os.path.split(abspfx)[0])
1086 striplen = len(os.path.split(abspfx)[0])
1087 else:
1087 else:
1088 striplen = len(abspfx)
1088 striplen = len(abspfx)
1089 if striplen:
1089 if striplen:
1090 striplen += len(os.sep)
1090 striplen += len(os.sep)
1091 res = lambda p: os.path.join(dest, p[striplen:])
1091 res = lambda p: os.path.join(dest, p[striplen:])
1092 elif destdirexists:
1092 elif destdirexists:
1093 res = lambda p: os.path.join(dest, os.path.basename(p))
1093 res = lambda p: os.path.join(dest, os.path.basename(p))
1094 else:
1094 else:
1095 res = lambda p: dest
1095 res = lambda p: dest
1096 return res
1096 return res
1097
1097
1098 def targetpathafterfn(pat, dest, srcs):
1098 def targetpathafterfn(pat, dest, srcs):
1099 if util.patkind(pat, None)[0]:
1099 if util.patkind(pat, None)[0]:
1100 # a mercurial pattern
1100 # a mercurial pattern
1101 res = lambda p: os.path.join(dest, os.path.basename(p))
1101 res = lambda p: os.path.join(dest, os.path.basename(p))
1102 else:
1102 else:
1103 abspfx = util.canonpath(repo.root, cwd, pat)
1103 abspfx = util.canonpath(repo.root, cwd, pat)
1104 if len(abspfx) < len(srcs[0][0]):
1104 if len(abspfx) < len(srcs[0][0]):
1105 # A directory. Either the target path contains the last
1105 # A directory. Either the target path contains the last
1106 # component of the source path or it does not.
1106 # component of the source path or it does not.
1107 def evalpath(striplen):
1107 def evalpath(striplen):
1108 score = 0
1108 score = 0
1109 for s in srcs:
1109 for s in srcs:
1110 t = os.path.join(dest, s[0][striplen:])
1110 t = os.path.join(dest, s[0][striplen:])
1111 if os.path.exists(t):
1111 if os.path.exists(t):
1112 score += 1
1112 score += 1
1113 return score
1113 return score
1114
1114
1115 striplen = len(abspfx)
1115 striplen = len(abspfx)
1116 if striplen:
1116 if striplen:
1117 striplen += len(os.sep)
1117 striplen += len(os.sep)
1118 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1118 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1119 score = evalpath(striplen)
1119 score = evalpath(striplen)
1120 striplen1 = len(os.path.split(abspfx)[0])
1120 striplen1 = len(os.path.split(abspfx)[0])
1121 if striplen1:
1121 if striplen1:
1122 striplen1 += len(os.sep)
1122 striplen1 += len(os.sep)
1123 if evalpath(striplen1) > score:
1123 if evalpath(striplen1) > score:
1124 striplen = striplen1
1124 striplen = striplen1
1125 res = lambda p: os.path.join(dest, p[striplen:])
1125 res = lambda p: os.path.join(dest, p[striplen:])
1126 else:
1126 else:
1127 # a file
1127 # a file
1128 if destdirexists:
1128 if destdirexists:
1129 res = lambda p: os.path.join(dest, os.path.basename(p))
1129 res = lambda p: os.path.join(dest, os.path.basename(p))
1130 else:
1130 else:
1131 res = lambda p: dest
1131 res = lambda p: dest
1132 return res
1132 return res
1133
1133
1134
1134
1135 pats = list(pats)
1135 pats = list(pats)
1136 if not pats:
1136 if not pats:
1137 raise util.Abort(_('no source or destination specified'))
1137 raise util.Abort(_('no source or destination specified'))
1138 if len(pats) == 1:
1138 if len(pats) == 1:
1139 raise util.Abort(_('no destination specified'))
1139 raise util.Abort(_('no destination specified'))
1140 dest = pats.pop()
1140 dest = pats.pop()
1141 destdirexists = os.path.isdir(dest)
1141 destdirexists = os.path.isdir(dest)
1142 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1142 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1143 raise util.Abort(_('with multiple sources, destination must be an '
1143 raise util.Abort(_('with multiple sources, destination must be an '
1144 'existing directory'))
1144 'existing directory'))
1145 if opts['after']:
1145 if opts['after']:
1146 tfn = targetpathafterfn
1146 tfn = targetpathafterfn
1147 else:
1147 else:
1148 tfn = targetpathfn
1148 tfn = targetpathfn
1149 copylist = []
1149 copylist = []
1150 for pat in pats:
1150 for pat in pats:
1151 srcs = []
1151 srcs = []
1152 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1152 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1153 origsrc = okaytocopy(abssrc, relsrc, exact)
1153 origsrc = okaytocopy(abssrc, relsrc, exact)
1154 if origsrc:
1154 if origsrc:
1155 srcs.append((origsrc, abssrc, relsrc, exact))
1155 srcs.append((origsrc, abssrc, relsrc, exact))
1156 if not srcs:
1156 if not srcs:
1157 continue
1157 continue
1158 copylist.append((tfn(pat, dest, srcs), srcs))
1158 copylist.append((tfn(pat, dest, srcs), srcs))
1159 if not copylist:
1159 if not copylist:
1160 raise util.Abort(_('no files to copy'))
1160 raise util.Abort(_('no files to copy'))
1161
1161
1162 for targetpath, srcs in copylist:
1162 for targetpath, srcs in copylist:
1163 for origsrc, abssrc, relsrc, exact in srcs:
1163 for origsrc, abssrc, relsrc, exact in srcs:
1164 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1164 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1165
1165
1166 if errors:
1166 if errors:
1167 ui.warn(_('(consider using --after)\n'))
1167 ui.warn(_('(consider using --after)\n'))
1168 return errors, copied
1168 return errors, copied
1169
1169
1170 def copy(ui, repo, *pats, **opts):
1170 def copy(ui, repo, *pats, **opts):
1171 """mark files as copied for the next commit
1171 """mark files as copied for the next commit
1172
1172
1173 Mark dest as having copies of source files. If dest is a
1173 Mark dest as having copies of source files. If dest is a
1174 directory, copies are put in that directory. If dest is a file,
1174 directory, copies are put in that directory. If dest is a file,
1175 there can only be one source.
1175 there can only be one source.
1176
1176
1177 By default, this command copies the contents of files as they
1177 By default, this command copies the contents of files as they
1178 stand in the working directory. If invoked with --after, the
1178 stand in the working directory. If invoked with --after, the
1179 operation is recorded, but no copying is performed.
1179 operation is recorded, but no copying is performed.
1180
1180
1181 This command takes effect in the next commit.
1181 This command takes effect in the next commit.
1182
1182
1183 NOTE: This command should be treated as experimental. While it
1183 NOTE: This command should be treated as experimental. While it
1184 should properly record copied files, this information is not yet
1184 should properly record copied files, this information is not yet
1185 fully used by merge, nor fully reported by log.
1185 fully used by merge, nor fully reported by log.
1186 """
1186 """
1187 wlock = repo.wlock(0)
1187 wlock = repo.wlock(0)
1188 errs, copied = docopy(ui, repo, pats, opts, wlock)
1188 errs, copied = docopy(ui, repo, pats, opts, wlock)
1189 return errs
1189 return errs
1190
1190
1191 def debugancestor(ui, index, rev1, rev2):
1191 def debugancestor(ui, index, rev1, rev2):
1192 """find the ancestor revision of two revisions in a given index"""
1192 """find the ancestor revision of two revisions in a given index"""
1193 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1193 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1194 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1194 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1195 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1195 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1196
1196
1197 def debugcomplete(ui, cmd='', **opts):
1197 def debugcomplete(ui, cmd='', **opts):
1198 """returns the completion list associated with the given command"""
1198 """returns the completion list associated with the given command"""
1199
1199
1200 if opts['options']:
1200 if opts['options']:
1201 options = []
1201 options = []
1202 otables = [globalopts]
1202 otables = [globalopts]
1203 if cmd:
1203 if cmd:
1204 aliases, entry = findcmd(cmd)
1204 aliases, entry = findcmd(cmd)
1205 otables.append(entry[1])
1205 otables.append(entry[1])
1206 for t in otables:
1206 for t in otables:
1207 for o in t:
1207 for o in t:
1208 if o[0]:
1208 if o[0]:
1209 options.append('-%s' % o[0])
1209 options.append('-%s' % o[0])
1210 options.append('--%s' % o[1])
1210 options.append('--%s' % o[1])
1211 ui.write("%s\n" % "\n".join(options))
1211 ui.write("%s\n" % "\n".join(options))
1212 return
1212 return
1213
1213
1214 clist = findpossible(cmd).keys()
1214 clist = findpossible(cmd).keys()
1215 clist.sort()
1215 clist.sort()
1216 ui.write("%s\n" % "\n".join(clist))
1216 ui.write("%s\n" % "\n".join(clist))
1217
1217
1218 def debugrebuildstate(ui, repo, rev=None):
1218 def debugrebuildstate(ui, repo, rev=None):
1219 """rebuild the dirstate as it would look like for the given revision"""
1219 """rebuild the dirstate as it would look like for the given revision"""
1220 if not rev:
1220 if not rev:
1221 rev = repo.changelog.tip()
1221 rev = repo.changelog.tip()
1222 else:
1222 else:
1223 rev = repo.lookup(rev)
1223 rev = repo.lookup(rev)
1224 change = repo.changelog.read(rev)
1224 change = repo.changelog.read(rev)
1225 n = change[0]
1225 n = change[0]
1226 files = repo.manifest.readflags(n)
1226 files = repo.manifest.readflags(n)
1227 wlock = repo.wlock()
1227 wlock = repo.wlock()
1228 repo.dirstate.rebuild(rev, files.iteritems())
1228 repo.dirstate.rebuild(rev, files.iteritems())
1229
1229
1230 def debugcheckstate(ui, repo):
1230 def debugcheckstate(ui, repo):
1231 """validate the correctness of the current dirstate"""
1231 """validate the correctness of the current dirstate"""
1232 parent1, parent2 = repo.dirstate.parents()
1232 parent1, parent2 = repo.dirstate.parents()
1233 repo.dirstate.read()
1233 repo.dirstate.read()
1234 dc = repo.dirstate.map
1234 dc = repo.dirstate.map
1235 keys = dc.keys()
1235 keys = dc.keys()
1236 keys.sort()
1236 keys.sort()
1237 m1n = repo.changelog.read(parent1)[0]
1237 m1n = repo.changelog.read(parent1)[0]
1238 m2n = repo.changelog.read(parent2)[0]
1238 m2n = repo.changelog.read(parent2)[0]
1239 m1 = repo.manifest.read(m1n)
1239 m1 = repo.manifest.read(m1n)
1240 m2 = repo.manifest.read(m2n)
1240 m2 = repo.manifest.read(m2n)
1241 errors = 0
1241 errors = 0
1242 for f in dc:
1242 for f in dc:
1243 state = repo.dirstate.state(f)
1243 state = repo.dirstate.state(f)
1244 if state in "nr" and f not in m1:
1244 if state in "nr" and f not in m1:
1245 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1245 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1246 errors += 1
1246 errors += 1
1247 if state in "a" and f in m1:
1247 if state in "a" and f in m1:
1248 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1248 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1249 errors += 1
1249 errors += 1
1250 if state in "m" and f not in m1 and f not in m2:
1250 if state in "m" and f not in m1 and f not in m2:
1251 ui.warn(_("%s in state %s, but not in either manifest\n") %
1251 ui.warn(_("%s in state %s, but not in either manifest\n") %
1252 (f, state))
1252 (f, state))
1253 errors += 1
1253 errors += 1
1254 for f in m1:
1254 for f in m1:
1255 state = repo.dirstate.state(f)
1255 state = repo.dirstate.state(f)
1256 if state not in "nrm":
1256 if state not in "nrm":
1257 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1257 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1258 errors += 1
1258 errors += 1
1259 if errors:
1259 if errors:
1260 error = _(".hg/dirstate inconsistent with current parent's manifest")
1260 error = _(".hg/dirstate inconsistent with current parent's manifest")
1261 raise util.Abort(error)
1261 raise util.Abort(error)
1262
1262
1263 def debugconfig(ui, repo, *values):
1263 def debugconfig(ui, repo, *values):
1264 """show combined config settings from all hgrc files
1264 """show combined config settings from all hgrc files
1265
1265
1266 With no args, print names and values of all config items.
1266 With no args, print names and values of all config items.
1267
1267
1268 With one arg of the form section.name, print just the value of
1268 With one arg of the form section.name, print just the value of
1269 that config item.
1269 that config item.
1270
1270
1271 With multiple args, print names and values of all config items
1271 With multiple args, print names and values of all config items
1272 with matching section names."""
1272 with matching section names."""
1273
1273
1274 if values:
1274 if values:
1275 if len([v for v in values if '.' in v]) > 1:
1275 if len([v for v in values if '.' in v]) > 1:
1276 raise util.Abort(_('only one config item permitted'))
1276 raise util.Abort(_('only one config item permitted'))
1277 for section, name, value in ui.walkconfig():
1277 for section, name, value in ui.walkconfig():
1278 sectname = section + '.' + name
1278 sectname = section + '.' + name
1279 if values:
1279 if values:
1280 for v in values:
1280 for v in values:
1281 if v == section:
1281 if v == section:
1282 ui.write('%s=%s\n' % (sectname, value))
1282 ui.write('%s=%s\n' % (sectname, value))
1283 elif v == sectname:
1283 elif v == sectname:
1284 ui.write(value, '\n')
1284 ui.write(value, '\n')
1285 else:
1285 else:
1286 ui.write('%s=%s\n' % (sectname, value))
1286 ui.write('%s=%s\n' % (sectname, value))
1287
1287
1288 def debugsetparents(ui, repo, rev1, rev2=None):
1288 def debugsetparents(ui, repo, rev1, rev2=None):
1289 """manually set the parents of the current working directory
1289 """manually set the parents of the current working directory
1290
1290
1291 This is useful for writing repository conversion tools, but should
1291 This is useful for writing repository conversion tools, but should
1292 be used with care.
1292 be used with care.
1293 """
1293 """
1294
1294
1295 if not rev2:
1295 if not rev2:
1296 rev2 = hex(nullid)
1296 rev2 = hex(nullid)
1297
1297
1298 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1298 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1299
1299
1300 def debugstate(ui, repo):
1300 def debugstate(ui, repo):
1301 """show the contents of the current dirstate"""
1301 """show the contents of the current dirstate"""
1302 repo.dirstate.read()
1302 repo.dirstate.read()
1303 dc = repo.dirstate.map
1303 dc = repo.dirstate.map
1304 keys = dc.keys()
1304 keys = dc.keys()
1305 keys.sort()
1305 keys.sort()
1306 for file_ in keys:
1306 for file_ in keys:
1307 ui.write("%c %3o %10d %s %s\n"
1307 ui.write("%c %3o %10d %s %s\n"
1308 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1308 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1309 time.strftime("%x %X",
1309 time.strftime("%x %X",
1310 time.localtime(dc[file_][3])), file_))
1310 time.localtime(dc[file_][3])), file_))
1311 for f in repo.dirstate.copies:
1311 for f in repo.dirstate.copies:
1312 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1312 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1313
1313
1314 def debugdata(ui, file_, rev):
1314 def debugdata(ui, file_, rev):
1315 """dump the contents of an data file revision"""
1315 """dump the contents of an data file revision"""
1316 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1316 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1317 file_[:-2] + ".i", file_, 0)
1317 file_[:-2] + ".i", file_, 0)
1318 try:
1318 try:
1319 ui.write(r.revision(r.lookup(rev)))
1319 ui.write(r.revision(r.lookup(rev)))
1320 except KeyError:
1320 except KeyError:
1321 raise util.Abort(_('invalid revision identifier %s'), rev)
1321 raise util.Abort(_('invalid revision identifier %s'), rev)
1322
1322
1323 def debugindex(ui, file_):
1323 def debugindex(ui, file_):
1324 """dump the contents of an index file"""
1324 """dump the contents of an index file"""
1325 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1325 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1326 ui.write(" rev offset length base linkrev" +
1326 ui.write(" rev offset length base linkrev" +
1327 " nodeid p1 p2\n")
1327 " nodeid p1 p2\n")
1328 for i in range(r.count()):
1328 for i in range(r.count()):
1329 node = r.node(i)
1329 node = r.node(i)
1330 pp = r.parents(node)
1330 pp = r.parents(node)
1331 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1331 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1332 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1332 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1333 short(node), short(pp[0]), short(pp[1])))
1333 short(node), short(pp[0]), short(pp[1])))
1334
1334
1335 def debugindexdot(ui, file_):
1335 def debugindexdot(ui, file_):
1336 """dump an index DAG as a .dot file"""
1336 """dump an index DAG as a .dot file"""
1337 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1337 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1338 ui.write("digraph G {\n")
1338 ui.write("digraph G {\n")
1339 for i in range(r.count()):
1339 for i in range(r.count()):
1340 node = r.node(i)
1340 node = r.node(i)
1341 pp = r.parents(node)
1341 pp = r.parents(node)
1342 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1342 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1343 if pp[1] != nullid:
1343 if pp[1] != nullid:
1344 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1344 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1345 ui.write("}\n")
1345 ui.write("}\n")
1346
1346
1347 def debugrename(ui, repo, file, rev=None):
1347 def debugrename(ui, repo, file, rev=None):
1348 """dump rename information"""
1348 """dump rename information"""
1349 r = repo.file(relpath(repo, [file])[0])
1349 r = repo.file(relpath(repo, [file])[0])
1350 if rev:
1350 if rev:
1351 try:
1351 try:
1352 # assume all revision numbers are for changesets
1352 # assume all revision numbers are for changesets
1353 n = repo.lookup(rev)
1353 n = repo.lookup(rev)
1354 change = repo.changelog.read(n)
1354 change = repo.changelog.read(n)
1355 m = repo.manifest.read(change[0])
1355 m = repo.manifest.read(change[0])
1356 n = m[relpath(repo, [file])[0]]
1356 n = m[relpath(repo, [file])[0]]
1357 except (hg.RepoError, KeyError):
1357 except (hg.RepoError, KeyError):
1358 n = r.lookup(rev)
1358 n = r.lookup(rev)
1359 else:
1359 else:
1360 n = r.tip()
1360 n = r.tip()
1361 m = r.renamed(n)
1361 m = r.renamed(n)
1362 if m:
1362 if m:
1363 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1363 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1364 else:
1364 else:
1365 ui.write(_("not renamed\n"))
1365 ui.write(_("not renamed\n"))
1366
1366
1367 def debugwalk(ui, repo, *pats, **opts):
1367 def debugwalk(ui, repo, *pats, **opts):
1368 """show how files match on given patterns"""
1368 """show how files match on given patterns"""
1369 items = list(walk(repo, pats, opts))
1369 items = list(walk(repo, pats, opts))
1370 if not items:
1370 if not items:
1371 return
1371 return
1372 fmt = '%%s %%-%ds %%-%ds %%s' % (
1372 fmt = '%%s %%-%ds %%-%ds %%s' % (
1373 max([len(abs) for (src, abs, rel, exact) in items]),
1373 max([len(abs) for (src, abs, rel, exact) in items]),
1374 max([len(rel) for (src, abs, rel, exact) in items]))
1374 max([len(rel) for (src, abs, rel, exact) in items]))
1375 for src, abs, rel, exact in items:
1375 for src, abs, rel, exact in items:
1376 line = fmt % (src, abs, rel, exact and 'exact' or '')
1376 line = fmt % (src, abs, rel, exact and 'exact' or '')
1377 ui.write("%s\n" % line.rstrip())
1377 ui.write("%s\n" % line.rstrip())
1378
1378
1379 def diff(ui, repo, *pats, **opts):
1379 def diff(ui, repo, *pats, **opts):
1380 """diff repository (or selected files)
1380 """diff repository (or selected files)
1381
1381
1382 Show differences between revisions for the specified files.
1382 Show differences between revisions for the specified files.
1383
1383
1384 Differences between files are shown using the unified diff format.
1384 Differences between files are shown using the unified diff format.
1385
1385
1386 When two revision arguments are given, then changes are shown
1386 When two revision arguments are given, then changes are shown
1387 between those revisions. If only one revision is specified then
1387 between those revisions. If only one revision is specified then
1388 that revision is compared to the working directory, and, when no
1388 that revision is compared to the working directory, and, when no
1389 revisions are specified, the working directory files are compared
1389 revisions are specified, the working directory files are compared
1390 to its parent.
1390 to its parent.
1391
1391
1392 Without the -a option, diff will avoid generating diffs of files
1392 Without the -a option, diff will avoid generating diffs of files
1393 it detects as binary. With -a, diff will generate a diff anyway,
1393 it detects as binary. With -a, diff will generate a diff anyway,
1394 probably with undesirable results.
1394 probably with undesirable results.
1395 """
1395 """
1396 node1, node2 = revpair(ui, repo, opts['rev'])
1396 node1, node2 = revpair(ui, repo, opts['rev'])
1397
1397
1398 fns, matchfn, anypats = matchpats(repo, pats, opts)
1398 fns, matchfn, anypats = matchpats(repo, pats, opts)
1399
1399
1400 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1400 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1401 text=opts['text'], opts=opts)
1401 text=opts['text'], opts=opts)
1402
1402
1403 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1403 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1404 node = repo.lookup(changeset)
1404 node = repo.lookup(changeset)
1405 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1405 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1406 if opts['switch_parent']:
1406 if opts['switch_parent']:
1407 parents.reverse()
1407 parents.reverse()
1408 prev = (parents and parents[0]) or nullid
1408 prev = (parents and parents[0]) or nullid
1409 change = repo.changelog.read(node)
1409 change = repo.changelog.read(node)
1410
1410
1411 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1411 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1412 revwidth=revwidth)
1412 revwidth=revwidth)
1413 if fp != sys.stdout:
1413 if fp != sys.stdout:
1414 ui.note("%s\n" % fp.name)
1414 ui.note("%s\n" % fp.name)
1415
1415
1416 fp.write("# HG changeset patch\n")
1416 fp.write("# HG changeset patch\n")
1417 fp.write("# User %s\n" % change[1])
1417 fp.write("# User %s\n" % change[1])
1418 fp.write("# Date %d %d\n" % change[2])
1418 fp.write("# Date %d %d\n" % change[2])
1419 fp.write("# Node ID %s\n" % hex(node))
1419 fp.write("# Node ID %s\n" % hex(node))
1420 fp.write("# Parent %s\n" % hex(prev))
1420 fp.write("# Parent %s\n" % hex(prev))
1421 if len(parents) > 1:
1421 if len(parents) > 1:
1422 fp.write("# Parent %s\n" % hex(parents[1]))
1422 fp.write("# Parent %s\n" % hex(parents[1]))
1423 fp.write(change[4].rstrip())
1423 fp.write(change[4].rstrip())
1424 fp.write("\n\n")
1424 fp.write("\n\n")
1425
1425
1426 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1426 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1427 if fp != sys.stdout:
1427 if fp != sys.stdout:
1428 fp.close()
1428 fp.close()
1429
1429
1430 def export(ui, repo, *changesets, **opts):
1430 def export(ui, repo, *changesets, **opts):
1431 """dump the header and diffs for one or more changesets
1431 """dump the header and diffs for one or more changesets
1432
1432
1433 Print the changeset header and diffs for one or more revisions.
1433 Print the changeset header and diffs for one or more revisions.
1434
1434
1435 The information shown in the changeset header is: author,
1435 The information shown in the changeset header is: author,
1436 changeset hash, parent and commit comment.
1436 changeset hash, parent and commit comment.
1437
1437
1438 Output may be to a file, in which case the name of the file is
1438 Output may be to a file, in which case the name of the file is
1439 given using a format string. The formatting rules are as follows:
1439 given using a format string. The formatting rules are as follows:
1440
1440
1441 %% literal "%" character
1441 %% literal "%" character
1442 %H changeset hash (40 bytes of hexadecimal)
1442 %H changeset hash (40 bytes of hexadecimal)
1443 %N number of patches being generated
1443 %N number of patches being generated
1444 %R changeset revision number
1444 %R changeset revision number
1445 %b basename of the exporting repository
1445 %b basename of the exporting repository
1446 %h short-form changeset hash (12 bytes of hexadecimal)
1446 %h short-form changeset hash (12 bytes of hexadecimal)
1447 %n zero-padded sequence number, starting at 1
1447 %n zero-padded sequence number, starting at 1
1448 %r zero-padded changeset revision number
1448 %r zero-padded changeset revision number
1449
1449
1450 Without the -a option, export will avoid generating diffs of files
1450 Without the -a option, export will avoid generating diffs of files
1451 it detects as binary. With -a, export will generate a diff anyway,
1451 it detects as binary. With -a, export will generate a diff anyway,
1452 probably with undesirable results.
1452 probably with undesirable results.
1453
1453
1454 With the --switch-parent option, the diff will be against the second
1454 With the --switch-parent option, the diff will be against the second
1455 parent. It can be useful to review a merge.
1455 parent. It can be useful to review a merge.
1456 """
1456 """
1457 if not changesets:
1457 if not changesets:
1458 raise util.Abort(_("export requires at least one changeset"))
1458 raise util.Abort(_("export requires at least one changeset"))
1459 seqno = 0
1459 seqno = 0
1460 revs = list(revrange(ui, repo, changesets))
1460 revs = list(revrange(ui, repo, changesets))
1461 total = len(revs)
1461 total = len(revs)
1462 revwidth = max(map(len, revs))
1462 revwidth = max(map(len, revs))
1463 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1463 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1464 ui.note(msg)
1464 ui.note(msg)
1465 for cset in revs:
1465 for cset in revs:
1466 seqno += 1
1466 seqno += 1
1467 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1467 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1468
1468
1469 def forget(ui, repo, *pats, **opts):
1469 def forget(ui, repo, *pats, **opts):
1470 """don't add the specified files on the next commit (DEPRECATED)
1470 """don't add the specified files on the next commit (DEPRECATED)
1471
1471
1472 (DEPRECATED)
1472 (DEPRECATED)
1473 Undo an 'hg add' scheduled for the next commit.
1473 Undo an 'hg add' scheduled for the next commit.
1474
1474
1475 This command is now deprecated and will be removed in a future
1475 This command is now deprecated and will be removed in a future
1476 release. Please use revert instead.
1476 release. Please use revert instead.
1477 """
1477 """
1478 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1478 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1479 forget = []
1479 forget = []
1480 for src, abs, rel, exact in walk(repo, pats, opts):
1480 for src, abs, rel, exact in walk(repo, pats, opts):
1481 if repo.dirstate.state(abs) == 'a':
1481 if repo.dirstate.state(abs) == 'a':
1482 forget.append(abs)
1482 forget.append(abs)
1483 if ui.verbose or not exact:
1483 if ui.verbose or not exact:
1484 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1484 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1485 repo.forget(forget)
1485 repo.forget(forget)
1486
1486
1487 def grep(ui, repo, pattern, *pats, **opts):
1487 def grep(ui, repo, pattern, *pats, **opts):
1488 """search for a pattern in specified files and revisions
1488 """search for a pattern in specified files and revisions
1489
1489
1490 Search revisions of files for a regular expression.
1490 Search revisions of files for a regular expression.
1491
1491
1492 This command behaves differently than Unix grep. It only accepts
1492 This command behaves differently than Unix grep. It only accepts
1493 Python/Perl regexps. It searches repository history, not the
1493 Python/Perl regexps. It searches repository history, not the
1494 working directory. It always prints the revision number in which
1494 working directory. It always prints the revision number in which
1495 a match appears.
1495 a match appears.
1496
1496
1497 By default, grep only prints output for the first revision of a
1497 By default, grep only prints output for the first revision of a
1498 file in which it finds a match. To get it to print every revision
1498 file in which it finds a match. To get it to print every revision
1499 that contains a change in match status ("-" for a match that
1499 that contains a change in match status ("-" for a match that
1500 becomes a non-match, or "+" for a non-match that becomes a match),
1500 becomes a non-match, or "+" for a non-match that becomes a match),
1501 use the --all flag.
1501 use the --all flag.
1502 """
1502 """
1503 reflags = 0
1503 reflags = 0
1504 if opts['ignore_case']:
1504 if opts['ignore_case']:
1505 reflags |= re.I
1505 reflags |= re.I
1506 regexp = re.compile(pattern, reflags)
1506 regexp = re.compile(pattern, reflags)
1507 sep, eol = ':', '\n'
1507 sep, eol = ':', '\n'
1508 if opts['print0']:
1508 if opts['print0']:
1509 sep = eol = '\0'
1509 sep = eol = '\0'
1510
1510
1511 fcache = {}
1511 fcache = {}
1512 def getfile(fn):
1512 def getfile(fn):
1513 if fn not in fcache:
1513 if fn not in fcache:
1514 fcache[fn] = repo.file(fn)
1514 fcache[fn] = repo.file(fn)
1515 return fcache[fn]
1515 return fcache[fn]
1516
1516
1517 def matchlines(body):
1517 def matchlines(body):
1518 begin = 0
1518 begin = 0
1519 linenum = 0
1519 linenum = 0
1520 while True:
1520 while True:
1521 match = regexp.search(body, begin)
1521 match = regexp.search(body, begin)
1522 if not match:
1522 if not match:
1523 break
1523 break
1524 mstart, mend = match.span()
1524 mstart, mend = match.span()
1525 linenum += body.count('\n', begin, mstart) + 1
1525 linenum += body.count('\n', begin, mstart) + 1
1526 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1526 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1527 lend = body.find('\n', mend)
1527 lend = body.find('\n', mend)
1528 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1528 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1529 begin = lend + 1
1529 begin = lend + 1
1530
1530
1531 class linestate(object):
1531 class linestate(object):
1532 def __init__(self, line, linenum, colstart, colend):
1532 def __init__(self, line, linenum, colstart, colend):
1533 self.line = line
1533 self.line = line
1534 self.linenum = linenum
1534 self.linenum = linenum
1535 self.colstart = colstart
1535 self.colstart = colstart
1536 self.colend = colend
1536 self.colend = colend
1537 def __eq__(self, other):
1537 def __eq__(self, other):
1538 return self.line == other.line
1538 return self.line == other.line
1539 def __hash__(self):
1539 def __hash__(self):
1540 return hash(self.line)
1540 return hash(self.line)
1541
1541
1542 matches = {}
1542 matches = {}
1543 def grepbody(fn, rev, body):
1543 def grepbody(fn, rev, body):
1544 matches[rev].setdefault(fn, {})
1544 matches[rev].setdefault(fn, {})
1545 m = matches[rev][fn]
1545 m = matches[rev][fn]
1546 for lnum, cstart, cend, line in matchlines(body):
1546 for lnum, cstart, cend, line in matchlines(body):
1547 s = linestate(line, lnum, cstart, cend)
1547 s = linestate(line, lnum, cstart, cend)
1548 m[s] = s
1548 m[s] = s
1549
1549
1550 # FIXME: prev isn't used, why ?
1550 # FIXME: prev isn't used, why ?
1551 prev = {}
1551 prev = {}
1552 ucache = {}
1552 ucache = {}
1553 def display(fn, rev, states, prevstates):
1553 def display(fn, rev, states, prevstates):
1554 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1554 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1555 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1555 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1556 counts = {'-': 0, '+': 0}
1556 counts = {'-': 0, '+': 0}
1557 filerevmatches = {}
1557 filerevmatches = {}
1558 for l in diff:
1558 for l in diff:
1559 if incrementing or not opts['all']:
1559 if incrementing or not opts['all']:
1560 change = ((l in prevstates) and '-') or '+'
1560 change = ((l in prevstates) and '-') or '+'
1561 r = rev
1561 r = rev
1562 else:
1562 else:
1563 change = ((l in states) and '-') or '+'
1563 change = ((l in states) and '-') or '+'
1564 r = prev[fn]
1564 r = prev[fn]
1565 cols = [fn, str(rev)]
1565 cols = [fn, str(rev)]
1566 if opts['line_number']:
1566 if opts['line_number']:
1567 cols.append(str(l.linenum))
1567 cols.append(str(l.linenum))
1568 if opts['all']:
1568 if opts['all']:
1569 cols.append(change)
1569 cols.append(change)
1570 if opts['user']:
1570 if opts['user']:
1571 cols.append(trimuser(ui, getchange(rev)[1], rev,
1571 cols.append(trimuser(ui, getchange(rev)[1], rev,
1572 ucache))
1572 ucache))
1573 if opts['files_with_matches']:
1573 if opts['files_with_matches']:
1574 c = (fn, rev)
1574 c = (fn, rev)
1575 if c in filerevmatches:
1575 if c in filerevmatches:
1576 continue
1576 continue
1577 filerevmatches[c] = 1
1577 filerevmatches[c] = 1
1578 else:
1578 else:
1579 cols.append(l.line)
1579 cols.append(l.line)
1580 ui.write(sep.join(cols), eol)
1580 ui.write(sep.join(cols), eol)
1581 counts[change] += 1
1581 counts[change] += 1
1582 return counts['+'], counts['-']
1582 return counts['+'], counts['-']
1583
1583
1584 fstate = {}
1584 fstate = {}
1585 skip = {}
1585 skip = {}
1586 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1586 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1587 count = 0
1587 count = 0
1588 incrementing = False
1588 incrementing = False
1589 for st, rev, fns in changeiter:
1589 for st, rev, fns in changeiter:
1590 if st == 'window':
1590 if st == 'window':
1591 incrementing = rev
1591 incrementing = rev
1592 matches.clear()
1592 matches.clear()
1593 elif st == 'add':
1593 elif st == 'add':
1594 change = repo.changelog.read(repo.lookup(str(rev)))
1594 change = repo.changelog.read(repo.lookup(str(rev)))
1595 mf = repo.manifest.read(change[0])
1595 mf = repo.manifest.read(change[0])
1596 matches[rev] = {}
1596 matches[rev] = {}
1597 for fn in fns:
1597 for fn in fns:
1598 if fn in skip:
1598 if fn in skip:
1599 continue
1599 continue
1600 fstate.setdefault(fn, {})
1600 fstate.setdefault(fn, {})
1601 try:
1601 try:
1602 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1602 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1603 except KeyError:
1603 except KeyError:
1604 pass
1604 pass
1605 elif st == 'iter':
1605 elif st == 'iter':
1606 states = matches[rev].items()
1606 states = matches[rev].items()
1607 states.sort()
1607 states.sort()
1608 for fn, m in states:
1608 for fn, m in states:
1609 if fn in skip:
1609 if fn in skip:
1610 continue
1610 continue
1611 if incrementing or not opts['all'] or fstate[fn]:
1611 if incrementing or not opts['all'] or fstate[fn]:
1612 pos, neg = display(fn, rev, m, fstate[fn])
1612 pos, neg = display(fn, rev, m, fstate[fn])
1613 count += pos + neg
1613 count += pos + neg
1614 if pos and not opts['all']:
1614 if pos and not opts['all']:
1615 skip[fn] = True
1615 skip[fn] = True
1616 fstate[fn] = m
1616 fstate[fn] = m
1617 prev[fn] = rev
1617 prev[fn] = rev
1618
1618
1619 if not incrementing:
1619 if not incrementing:
1620 fstate = fstate.items()
1620 fstate = fstate.items()
1621 fstate.sort()
1621 fstate.sort()
1622 for fn, state in fstate:
1622 for fn, state in fstate:
1623 if fn in skip:
1623 if fn in skip:
1624 continue
1624 continue
1625 display(fn, rev, {}, state)
1625 display(fn, rev, {}, state)
1626 return (count == 0 and 1) or 0
1626 return (count == 0 and 1) or 0
1627
1627
1628 def heads(ui, repo, **opts):
1628 def heads(ui, repo, **opts):
1629 """show current repository heads
1629 """show current repository heads
1630
1630
1631 Show all repository head changesets.
1631 Show all repository head changesets.
1632
1632
1633 Repository "heads" are changesets that don't have children
1633 Repository "heads" are changesets that don't have children
1634 changesets. They are where development generally takes place and
1634 changesets. They are where development generally takes place and
1635 are the usual targets for update and merge operations.
1635 are the usual targets for update and merge operations.
1636 """
1636 """
1637 if opts['rev']:
1637 if opts['rev']:
1638 heads = repo.heads(repo.lookup(opts['rev']))
1638 heads = repo.heads(repo.lookup(opts['rev']))
1639 else:
1639 else:
1640 heads = repo.heads()
1640 heads = repo.heads()
1641 br = None
1641 br = None
1642 if opts['branches']:
1642 if opts['branches']:
1643 br = repo.branchlookup(heads)
1643 br = repo.branchlookup(heads)
1644 displayer = show_changeset(ui, repo, opts)
1644 displayer = show_changeset(ui, repo, opts)
1645 for n in heads:
1645 for n in heads:
1646 displayer.show(changenode=n, brinfo=br)
1646 displayer.show(changenode=n, brinfo=br)
1647
1647
1648 def identify(ui, repo):
1648 def identify(ui, repo):
1649 """print information about the working copy
1649 """print information about the working copy
1650
1650
1651 Print a short summary of the current state of the repo.
1651 Print a short summary of the current state of the repo.
1652
1652
1653 This summary identifies the repository state using one or two parent
1653 This summary identifies the repository state using one or two parent
1654 hash identifiers, followed by a "+" if there are uncommitted changes
1654 hash identifiers, followed by a "+" if there are uncommitted changes
1655 in the working directory, followed by a list of tags for this revision.
1655 in the working directory, followed by a list of tags for this revision.
1656 """
1656 """
1657 parents = [p for p in repo.dirstate.parents() if p != nullid]
1657 parents = [p for p in repo.dirstate.parents() if p != nullid]
1658 if not parents:
1658 if not parents:
1659 ui.write(_("unknown\n"))
1659 ui.write(_("unknown\n"))
1660 return
1660 return
1661
1661
1662 hexfunc = ui.verbose and hex or short
1662 hexfunc = ui.verbose and hex or short
1663 modified, added, removed, deleted, unknown = repo.changes()
1663 modified, added, removed, deleted, unknown = repo.changes()
1664 output = ["%s%s" %
1664 output = ["%s%s" %
1665 ('+'.join([hexfunc(parent) for parent in parents]),
1665 ('+'.join([hexfunc(parent) for parent in parents]),
1666 (modified or added or removed or deleted) and "+" or "")]
1666 (modified or added or removed or deleted) and "+" or "")]
1667
1667
1668 if not ui.quiet:
1668 if not ui.quiet:
1669 # multiple tags for a single parent separated by '/'
1669 # multiple tags for a single parent separated by '/'
1670 parenttags = ['/'.join(tags)
1670 parenttags = ['/'.join(tags)
1671 for tags in map(repo.nodetags, parents) if tags]
1671 for tags in map(repo.nodetags, parents) if tags]
1672 # tags for multiple parents separated by ' + '
1672 # tags for multiple parents separated by ' + '
1673 if parenttags:
1673 if parenttags:
1674 output.append(' + '.join(parenttags))
1674 output.append(' + '.join(parenttags))
1675
1675
1676 ui.write("%s\n" % ' '.join(output))
1676 ui.write("%s\n" % ' '.join(output))
1677
1677
1678 def import_(ui, repo, patch1, *patches, **opts):
1678 def import_(ui, repo, patch1, *patches, **opts):
1679 """import an ordered set of patches
1679 """import an ordered set of patches
1680
1680
1681 Import a list of patches and commit them individually.
1681 Import a list of patches and commit them individually.
1682
1682
1683 If there are outstanding changes in the working directory, import
1683 If there are outstanding changes in the working directory, import
1684 will abort unless given the -f flag.
1684 will abort unless given the -f flag.
1685
1685
1686 You can import a patch straight from a mail message. Even patches
1686 You can import a patch straight from a mail message. Even patches
1687 as attachments work (body part must be type text/plain or
1687 as attachments work (body part must be type text/plain or
1688 text/x-patch to be used). From and Subject headers of email
1688 text/x-patch to be used). From and Subject headers of email
1689 message are used as default committer and commit message. All
1689 message are used as default committer and commit message. All
1690 text/plain body parts before first diff are added to commit
1690 text/plain body parts before first diff are added to commit
1691 message.
1691 message.
1692
1692
1693 If imported patch was generated by hg export, user and description
1693 If imported patch was generated by hg export, user and description
1694 from patch override values from message headers and body. Values
1694 from patch override values from message headers and body. Values
1695 given on command line with -m and -u override these.
1695 given on command line with -m and -u override these.
1696
1696
1697 To read a patch from standard input, use patch name "-".
1697 To read a patch from standard input, use patch name "-".
1698 """
1698 """
1699 patches = (patch1,) + patches
1699 patches = (patch1,) + patches
1700
1700
1701 if not opts['force']:
1701 if not opts['force']:
1702 bail_if_changed(repo)
1702 bail_if_changed(repo)
1703
1703
1704 d = opts["base"]
1704 d = opts["base"]
1705 strip = opts["strip"]
1705 strip = opts["strip"]
1706
1706
1707 mailre = re.compile(r'(?:From |[\w-]+:)')
1707 mailre = re.compile(r'(?:From |[\w-]+:)')
1708
1708
1709 # attempt to detect the start of a patch
1709 # attempt to detect the start of a patch
1710 # (this heuristic is borrowed from quilt)
1710 # (this heuristic is borrowed from quilt)
1711 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1711 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1712 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1712 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1713 '(---|\*\*\*)[ \t])', re.MULTILINE)
1713 '(---|\*\*\*)[ \t])', re.MULTILINE)
1714
1714
1715 for patch in patches:
1715 for patch in patches:
1716 pf = os.path.join(d, patch)
1716 pf = os.path.join(d, patch)
1717
1717
1718 message = None
1718 message = None
1719 user = None
1719 user = None
1720 date = None
1720 date = None
1721 hgpatch = False
1721 hgpatch = False
1722
1722
1723 p = email.Parser.Parser()
1723 p = email.Parser.Parser()
1724 if pf == '-':
1724 if pf == '-':
1725 msg = p.parse(sys.stdin)
1725 msg = p.parse(sys.stdin)
1726 ui.status(_("applying patch from stdin\n"))
1726 ui.status(_("applying patch from stdin\n"))
1727 else:
1727 else:
1728 msg = p.parse(file(pf))
1728 msg = p.parse(file(pf))
1729 ui.status(_("applying %s\n") % patch)
1729 ui.status(_("applying %s\n") % patch)
1730
1730
1731 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1731 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1732 tmpfp = os.fdopen(fd, 'w')
1732 tmpfp = os.fdopen(fd, 'w')
1733 try:
1733 try:
1734 message = msg['Subject']
1734 message = msg['Subject']
1735 if message:
1735 if message:
1736 message = message.replace('\n\t', ' ')
1736 message = message.replace('\n\t', ' ')
1737 ui.debug('Subject: %s\n' % message)
1737 ui.debug('Subject: %s\n' % message)
1738 user = msg['From']
1738 user = msg['From']
1739 if user:
1739 if user:
1740 ui.debug('From: %s\n' % user)
1740 ui.debug('From: %s\n' % user)
1741 diffs_seen = 0
1741 diffs_seen = 0
1742 ok_types = ('text/plain', 'text/x-patch')
1742 ok_types = ('text/plain', 'text/x-patch')
1743 for part in msg.walk():
1743 for part in msg.walk():
1744 content_type = part.get_content_type()
1744 content_type = part.get_content_type()
1745 ui.debug('Content-Type: %s\n' % content_type)
1745 ui.debug('Content-Type: %s\n' % content_type)
1746 if content_type not in ok_types:
1746 if content_type not in ok_types:
1747 continue
1747 continue
1748 payload = part.get_payload(decode=True)
1748 payload = part.get_payload(decode=True)
1749 m = diffre.search(payload)
1749 m = diffre.search(payload)
1750 if m:
1750 if m:
1751 ui.debug(_('found patch at byte %d\n') % m.start(0))
1751 ui.debug(_('found patch at byte %d\n') % m.start(0))
1752 diffs_seen += 1
1752 diffs_seen += 1
1753 hgpatch = False
1753 hgpatch = False
1754 fp = cStringIO.StringIO()
1754 fp = cStringIO.StringIO()
1755 if message:
1755 if message:
1756 fp.write(message)
1756 fp.write(message)
1757 fp.write('\n')
1757 fp.write('\n')
1758 for line in payload[:m.start(0)].splitlines():
1758 for line in payload[:m.start(0)].splitlines():
1759 if line.startswith('# HG changeset patch'):
1759 if line.startswith('# HG changeset patch'):
1760 ui.debug(_('patch generated by hg export\n'))
1760 ui.debug(_('patch generated by hg export\n'))
1761 hgpatch = True
1761 hgpatch = True
1762 # drop earlier commit message content
1762 # drop earlier commit message content
1763 fp.seek(0)
1763 fp.seek(0)
1764 fp.truncate()
1764 fp.truncate()
1765 elif hgpatch:
1765 elif hgpatch:
1766 if line.startswith('# User '):
1766 if line.startswith('# User '):
1767 user = line[7:]
1767 user = line[7:]
1768 ui.debug('From: %s\n' % user)
1768 ui.debug('From: %s\n' % user)
1769 elif line.startswith("# Date "):
1769 elif line.startswith("# Date "):
1770 date = line[7:]
1770 date = line[7:]
1771 if not line.startswith('# '):
1771 if not line.startswith('# '):
1772 fp.write(line)
1772 fp.write(line)
1773 fp.write('\n')
1773 fp.write('\n')
1774 message = fp.getvalue()
1774 message = fp.getvalue()
1775 if tmpfp:
1775 if tmpfp:
1776 tmpfp.write(payload)
1776 tmpfp.write(payload)
1777 if not payload.endswith('\n'):
1777 if not payload.endswith('\n'):
1778 tmpfp.write('\n')
1778 tmpfp.write('\n')
1779 elif not diffs_seen and message and content_type == 'text/plain':
1779 elif not diffs_seen and message and content_type == 'text/plain':
1780 message += '\n' + payload
1780 message += '\n' + payload
1781
1781
1782 if opts['message']:
1782 if opts['message']:
1783 # pickup the cmdline msg
1783 # pickup the cmdline msg
1784 message = opts['message']
1784 message = opts['message']
1785 elif message:
1785 elif message:
1786 # pickup the patch msg
1786 # pickup the patch msg
1787 message = message.strip()
1787 message = message.strip()
1788 else:
1788 else:
1789 # launch the editor
1789 # launch the editor
1790 message = None
1790 message = None
1791 ui.debug(_('message:\n%s\n') % message)
1791 ui.debug(_('message:\n%s\n') % message)
1792
1792
1793 tmpfp.close()
1793 tmpfp.close()
1794 if not diffs_seen:
1794 if not diffs_seen:
1795 raise util.Abort(_('no diffs found'))
1795 raise util.Abort(_('no diffs found'))
1796
1796
1797 files = util.patch(strip, tmpname, ui)
1797 files = util.patch(strip, tmpname, ui)
1798 if len(files) > 0:
1798 if len(files) > 0:
1799 addremove_lock(ui, repo, files, {})
1799 addremove_lock(ui, repo, files, {})
1800 repo.commit(files, message, user, date)
1800 repo.commit(files, message, user, date)
1801 finally:
1801 finally:
1802 os.unlink(tmpname)
1802 os.unlink(tmpname)
1803
1803
1804 def incoming(ui, repo, source="default", **opts):
1804 def incoming(ui, repo, source="default", **opts):
1805 """show new changesets found in source
1805 """show new changesets found in source
1806
1806
1807 Show new changesets found in the specified path/URL or the default
1807 Show new changesets found in the specified path/URL or the default
1808 pull location. These are the changesets that would be pulled if a pull
1808 pull location. These are the changesets that would be pulled if a pull
1809 was requested.
1809 was requested.
1810
1810
1811 For remote repository, using --bundle avoids downloading the changesets
1811 For remote repository, using --bundle avoids downloading the changesets
1812 twice if the incoming is followed by a pull.
1812 twice if the incoming is followed by a pull.
1813
1813
1814 See pull for valid source format details.
1814 See pull for valid source format details.
1815 """
1815 """
1816 source = ui.expandpath(source)
1816 source = ui.expandpath(source)
1817 ui.setconfig_remoteopts(**opts)
1817 ui.setconfig_remoteopts(**opts)
1818
1818
1819 other = hg.repository(ui, source)
1819 other = hg.repository(ui, source)
1820 incoming = repo.findincoming(other, force=opts["force"])
1820 incoming = repo.findincoming(other, force=opts["force"])
1821 if not incoming:
1821 if not incoming:
1822 ui.status(_("no changes found\n"))
1822 ui.status(_("no changes found\n"))
1823 return
1823 return
1824
1824
1825 cleanup = None
1825 cleanup = None
1826 try:
1826 try:
1827 fname = opts["bundle"]
1827 fname = opts["bundle"]
1828 if fname or not other.local():
1828 if fname or not other.local():
1829 # create a bundle (uncompressed if other repo is not local)
1829 # create a bundle (uncompressed if other repo is not local)
1830 cg = other.changegroup(incoming, "incoming")
1830 cg = other.changegroup(incoming, "incoming")
1831 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1831 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1832 # keep written bundle?
1832 # keep written bundle?
1833 if opts["bundle"]:
1833 if opts["bundle"]:
1834 cleanup = None
1834 cleanup = None
1835 if not other.local():
1835 if not other.local():
1836 # use the created uncompressed bundlerepo
1836 # use the created uncompressed bundlerepo
1837 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1837 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1838
1838
1839 revs = None
1839 revs = None
1840 if opts['rev']:
1840 if opts['rev']:
1841 revs = [other.lookup(rev) for rev in opts['rev']]
1841 revs = [other.lookup(rev) for rev in opts['rev']]
1842 o = other.changelog.nodesbetween(incoming, revs)[0]
1842 o = other.changelog.nodesbetween(incoming, revs)[0]
1843 if opts['newest_first']:
1843 if opts['newest_first']:
1844 o.reverse()
1844 o.reverse()
1845 displayer = show_changeset(ui, other, opts)
1845 displayer = show_changeset(ui, other, opts)
1846 for n in o:
1846 for n in o:
1847 parents = [p for p in other.changelog.parents(n) if p != nullid]
1847 parents = [p for p in other.changelog.parents(n) if p != nullid]
1848 if opts['no_merges'] and len(parents) == 2:
1848 if opts['no_merges'] and len(parents) == 2:
1849 continue
1849 continue
1850 displayer.show(changenode=n)
1850 displayer.show(changenode=n)
1851 if opts['patch']:
1851 if opts['patch']:
1852 prev = (parents and parents[0]) or nullid
1852 prev = (parents and parents[0]) or nullid
1853 dodiff(ui, ui, other, prev, n)
1853 dodiff(ui, ui, other, prev, n)
1854 ui.write("\n")
1854 ui.write("\n")
1855 finally:
1855 finally:
1856 if hasattr(other, 'close'):
1856 if hasattr(other, 'close'):
1857 other.close()
1857 other.close()
1858 if cleanup:
1858 if cleanup:
1859 os.unlink(cleanup)
1859 os.unlink(cleanup)
1860
1860
1861 def init(ui, dest=".", **opts):
1861 def init(ui, dest=".", **opts):
1862 """create a new repository in the given directory
1862 """create a new repository in the given directory
1863
1863
1864 Initialize a new repository in the given directory. If the given
1864 Initialize a new repository in the given directory. If the given
1865 directory does not exist, it is created.
1865 directory does not exist, it is created.
1866
1866
1867 If no directory is given, the current directory is used.
1867 If no directory is given, the current directory is used.
1868
1868
1869 It is possible to specify an ssh:// URL as the destination.
1869 It is possible to specify an ssh:// URL as the destination.
1870 Look at the help text for the pull command for important details
1870 Look at the help text for the pull command for important details
1871 about ssh:// URLs.
1871 about ssh:// URLs.
1872 """
1872 """
1873 ui.setconfig_remoteopts(**opts)
1873 ui.setconfig_remoteopts(**opts)
1874 hg.repository(ui, dest, create=1)
1874 hg.repository(ui, dest, create=1)
1875
1875
1876 def locate(ui, repo, *pats, **opts):
1876 def locate(ui, repo, *pats, **opts):
1877 """locate files matching specific patterns
1877 """locate files matching specific patterns
1878
1878
1879 Print all files under Mercurial control whose names match the
1879 Print all files under Mercurial control whose names match the
1880 given patterns.
1880 given patterns.
1881
1881
1882 This command searches the current directory and its
1882 This command searches the current directory and its
1883 subdirectories. To search an entire repository, move to the root
1883 subdirectories. To search an entire repository, move to the root
1884 of the repository.
1884 of the repository.
1885
1885
1886 If no patterns are given to match, this command prints all file
1886 If no patterns are given to match, this command prints all file
1887 names.
1887 names.
1888
1888
1889 If you want to feed the output of this command into the "xargs"
1889 If you want to feed the output of this command into the "xargs"
1890 command, use the "-0" option to both this command and "xargs".
1890 command, use the "-0" option to both this command and "xargs".
1891 This will avoid the problem of "xargs" treating single filenames
1891 This will avoid the problem of "xargs" treating single filenames
1892 that contain white space as multiple filenames.
1892 that contain white space as multiple filenames.
1893 """
1893 """
1894 end = opts['print0'] and '\0' or '\n'
1894 end = opts['print0'] and '\0' or '\n'
1895 rev = opts['rev']
1895 rev = opts['rev']
1896 if rev:
1896 if rev:
1897 node = repo.lookup(rev)
1897 node = repo.lookup(rev)
1898 else:
1898 else:
1899 node = None
1899 node = None
1900
1900
1901 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1901 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1902 head='(?:.*/|)'):
1902 head='(?:.*/|)'):
1903 if not node and repo.dirstate.state(abs) == '?':
1903 if not node and repo.dirstate.state(abs) == '?':
1904 continue
1904 continue
1905 if opts['fullpath']:
1905 if opts['fullpath']:
1906 ui.write(os.path.join(repo.root, abs), end)
1906 ui.write(os.path.join(repo.root, abs), end)
1907 else:
1907 else:
1908 ui.write(((pats and rel) or abs), end)
1908 ui.write(((pats and rel) or abs), end)
1909
1909
1910 def log(ui, repo, *pats, **opts):
1910 def log(ui, repo, *pats, **opts):
1911 """show revision history of entire repository or files
1911 """show revision history of entire repository or files
1912
1912
1913 Print the revision history of the specified files or the entire project.
1913 Print the revision history of the specified files or the entire project.
1914
1914
1915 By default this command outputs: changeset id and hash, tags,
1915 By default this command outputs: changeset id and hash, tags,
1916 non-trivial parents, user, date and time, and a summary for each
1916 non-trivial parents, user, date and time, and a summary for each
1917 commit. When the -v/--verbose switch is used, the list of changed
1917 commit. When the -v/--verbose switch is used, the list of changed
1918 files and full commit message is shown.
1918 files and full commit message is shown.
1919 """
1919 """
1920 class dui(object):
1920 class dui(object):
1921 # Implement and delegate some ui protocol. Save hunks of
1921 # Implement and delegate some ui protocol. Save hunks of
1922 # output for later display in the desired order.
1922 # output for later display in the desired order.
1923 def __init__(self, ui):
1923 def __init__(self, ui):
1924 self.ui = ui
1924 self.ui = ui
1925 self.hunk = {}
1925 self.hunk = {}
1926 self.header = {}
1926 self.header = {}
1927 def bump(self, rev):
1927 def bump(self, rev):
1928 self.rev = rev
1928 self.rev = rev
1929 self.hunk[rev] = []
1929 self.hunk[rev] = []
1930 self.header[rev] = []
1930 self.header[rev] = []
1931 def note(self, *args):
1931 def note(self, *args):
1932 if self.verbose:
1932 if self.verbose:
1933 self.write(*args)
1933 self.write(*args)
1934 def status(self, *args):
1934 def status(self, *args):
1935 if not self.quiet:
1935 if not self.quiet:
1936 self.write(*args)
1936 self.write(*args)
1937 def write(self, *args):
1937 def write(self, *args):
1938 self.hunk[self.rev].append(args)
1938 self.hunk[self.rev].append(args)
1939 def write_header(self, *args):
1939 def write_header(self, *args):
1940 self.header[self.rev].append(args)
1940 self.header[self.rev].append(args)
1941 def debug(self, *args):
1941 def debug(self, *args):
1942 if self.debugflag:
1942 if self.debugflag:
1943 self.write(*args)
1943 self.write(*args)
1944 def __getattr__(self, key):
1944 def __getattr__(self, key):
1945 return getattr(self.ui, key)
1945 return getattr(self.ui, key)
1946
1946
1947 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1947 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1948
1948
1949 if opts['limit']:
1949 if opts['limit']:
1950 try:
1950 try:
1951 limit = int(opts['limit'])
1951 limit = int(opts['limit'])
1952 except ValueError:
1952 except ValueError:
1953 raise util.Abort(_('limit must be a positive integer'))
1953 raise util.Abort(_('limit must be a positive integer'))
1954 if limit <= 0: raise util.Abort(_('limit must be positive'))
1954 if limit <= 0: raise util.Abort(_('limit must be positive'))
1955 else:
1955 else:
1956 limit = sys.maxint
1956 limit = sys.maxint
1957 count = 0
1957 count = 0
1958
1958
1959 displayer = show_changeset(ui, repo, opts)
1959 displayer = show_changeset(ui, repo, opts)
1960 for st, rev, fns in changeiter:
1960 for st, rev, fns in changeiter:
1961 if st == 'window':
1961 if st == 'window':
1962 du = dui(ui)
1962 du = dui(ui)
1963 displayer.ui = du
1963 displayer.ui = du
1964 elif st == 'add':
1964 elif st == 'add':
1965 du.bump(rev)
1965 du.bump(rev)
1966 changenode = repo.changelog.node(rev)
1966 changenode = repo.changelog.node(rev)
1967 parents = [p for p in repo.changelog.parents(changenode)
1967 parents = [p for p in repo.changelog.parents(changenode)
1968 if p != nullid]
1968 if p != nullid]
1969 if opts['no_merges'] and len(parents) == 2:
1969 if opts['no_merges'] and len(parents) == 2:
1970 continue
1970 continue
1971 if opts['only_merges'] and len(parents) != 2:
1971 if opts['only_merges'] and len(parents) != 2:
1972 continue
1972 continue
1973
1973
1974 if opts['keyword']:
1974 if opts['keyword']:
1975 changes = getchange(rev)
1975 changes = getchange(rev)
1976 miss = 0
1976 miss = 0
1977 for k in [kw.lower() for kw in opts['keyword']]:
1977 for k in [kw.lower() for kw in opts['keyword']]:
1978 if not (k in changes[1].lower() or
1978 if not (k in changes[1].lower() or
1979 k in changes[4].lower() or
1979 k in changes[4].lower() or
1980 k in " ".join(changes[3][:20]).lower()):
1980 k in " ".join(changes[3][:20]).lower()):
1981 miss = 1
1981 miss = 1
1982 break
1982 break
1983 if miss:
1983 if miss:
1984 continue
1984 continue
1985
1985
1986 br = None
1986 br = None
1987 if opts['branches']:
1987 if opts['branches']:
1988 br = repo.branchlookup([repo.changelog.node(rev)])
1988 br = repo.branchlookup([repo.changelog.node(rev)])
1989
1989
1990 displayer.show(rev, brinfo=br)
1990 displayer.show(rev, brinfo=br)
1991 if opts['patch']:
1991 if opts['patch']:
1992 prev = (parents and parents[0]) or nullid
1992 prev = (parents and parents[0]) or nullid
1993 dodiff(du, du, repo, prev, changenode, match=matchfn)
1993 dodiff(du, du, repo, prev, changenode, match=matchfn)
1994 du.write("\n\n")
1994 du.write("\n\n")
1995 elif st == 'iter':
1995 elif st == 'iter':
1996 if count == limit: break
1996 if count == limit: break
1997 if du.header[rev]:
1997 if du.header[rev]:
1998 for args in du.header[rev]:
1998 for args in du.header[rev]:
1999 ui.write_header(*args)
1999 ui.write_header(*args)
2000 if du.hunk[rev]:
2000 if du.hunk[rev]:
2001 count += 1
2001 count += 1
2002 for args in du.hunk[rev]:
2002 for args in du.hunk[rev]:
2003 ui.write(*args)
2003 ui.write(*args)
2004
2004
2005 def manifest(ui, repo, rev=None):
2005 def manifest(ui, repo, rev=None):
2006 """output the latest or given revision of the project manifest
2006 """output the latest or given revision of the project manifest
2007
2007
2008 Print a list of version controlled files for the given revision.
2008 Print a list of version controlled files for the given revision.
2009
2009
2010 The manifest is the list of files being version controlled. If no revision
2010 The manifest is the list of files being version controlled. If no revision
2011 is given then the tip is used.
2011 is given then the tip is used.
2012 """
2012 """
2013 if rev:
2013 if rev:
2014 try:
2014 try:
2015 # assume all revision numbers are for changesets
2015 # assume all revision numbers are for changesets
2016 n = repo.lookup(rev)
2016 n = repo.lookup(rev)
2017 change = repo.changelog.read(n)
2017 change = repo.changelog.read(n)
2018 n = change[0]
2018 n = change[0]
2019 except hg.RepoError:
2019 except hg.RepoError:
2020 n = repo.manifest.lookup(rev)
2020 n = repo.manifest.lookup(rev)
2021 else:
2021 else:
2022 n = repo.manifest.tip()
2022 n = repo.manifest.tip()
2023 m = repo.manifest.read(n)
2023 m = repo.manifest.read(n)
2024 mf = repo.manifest.readflags(n)
2024 mf = repo.manifest.readflags(n)
2025 files = m.keys()
2025 files = m.keys()
2026 files.sort()
2026 files.sort()
2027
2027
2028 for f in files:
2028 for f in files:
2029 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2029 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2030
2030
2031 def merge(ui, repo, node=None, **opts):
2031 def merge(ui, repo, node=None, **opts):
2032 """Merge working directory with another revision
2032 """Merge working directory with another revision
2033
2033
2034 Merge the contents of the current working directory and the
2034 Merge the contents of the current working directory and the
2035 requested revision. Files that changed between either parent are
2035 requested revision. Files that changed between either parent are
2036 marked as changed for the next commit and a commit must be
2036 marked as changed for the next commit and a commit must be
2037 performed before any further updates are allowed.
2037 performed before any further updates are allowed.
2038 """
2038 """
2039 return doupdate(ui, repo, node=node, merge=True, **opts)
2039 return doupdate(ui, repo, node=node, merge=True, **opts)
2040
2040
2041 def outgoing(ui, repo, dest=None, **opts):
2041 def outgoing(ui, repo, dest=None, **opts):
2042 """show changesets not found in destination
2042 """show changesets not found in destination
2043
2043
2044 Show changesets not found in the specified destination repository or
2044 Show changesets not found in the specified destination repository or
2045 the default push location. These are the changesets that would be pushed
2045 the default push location. These are the changesets that would be pushed
2046 if a push was requested.
2046 if a push was requested.
2047
2047
2048 See pull for valid destination format details.
2048 See pull for valid destination format details.
2049 """
2049 """
2050 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2050 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2051 ui.setconfig_remoteopts(**opts)
2051 ui.setconfig_remoteopts(**opts)
2052 revs = None
2052 revs = None
2053 if opts['rev']:
2053 if opts['rev']:
2054 revs = [repo.lookup(rev) for rev in opts['rev']]
2054 revs = [repo.lookup(rev) for rev in opts['rev']]
2055
2055
2056 other = hg.repository(ui, dest)
2056 other = hg.repository(ui, dest)
2057 o = repo.findoutgoing(other, force=opts['force'])
2057 o = repo.findoutgoing(other, force=opts['force'])
2058 if not o:
2058 if not o:
2059 ui.status(_("no changes found\n"))
2059 ui.status(_("no changes found\n"))
2060 return
2060 return
2061 o = repo.changelog.nodesbetween(o, revs)[0]
2061 o = repo.changelog.nodesbetween(o, revs)[0]
2062 if opts['newest_first']:
2062 if opts['newest_first']:
2063 o.reverse()
2063 o.reverse()
2064 displayer = show_changeset(ui, repo, opts)
2064 displayer = show_changeset(ui, repo, opts)
2065 for n in o:
2065 for n in o:
2066 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2066 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2067 if opts['no_merges'] and len(parents) == 2:
2067 if opts['no_merges'] and len(parents) == 2:
2068 continue
2068 continue
2069 displayer.show(changenode=n)
2069 displayer.show(changenode=n)
2070 if opts['patch']:
2070 if opts['patch']:
2071 prev = (parents and parents[0]) or nullid
2071 prev = (parents and parents[0]) or nullid
2072 dodiff(ui, ui, repo, prev, n)
2072 dodiff(ui, ui, repo, prev, n)
2073 ui.write("\n")
2073 ui.write("\n")
2074
2074
2075 def parents(ui, repo, rev=None, branches=None, **opts):
2075 def parents(ui, repo, rev=None, branches=None, **opts):
2076 """show the parents of the working dir or revision
2076 """show the parents of the working dir or revision
2077
2077
2078 Print the working directory's parent revisions.
2078 Print the working directory's parent revisions.
2079 """
2079 """
2080 if rev:
2080 if rev:
2081 p = repo.changelog.parents(repo.lookup(rev))
2081 p = repo.changelog.parents(repo.lookup(rev))
2082 else:
2082 else:
2083 p = repo.dirstate.parents()
2083 p = repo.dirstate.parents()
2084
2084
2085 br = None
2085 br = None
2086 if branches is not None:
2086 if branches is not None:
2087 br = repo.branchlookup(p)
2087 br = repo.branchlookup(p)
2088 displayer = show_changeset(ui, repo, opts)
2088 displayer = show_changeset(ui, repo, opts)
2089 for n in p:
2089 for n in p:
2090 if n != nullid:
2090 if n != nullid:
2091 displayer.show(changenode=n, brinfo=br)
2091 displayer.show(changenode=n, brinfo=br)
2092
2092
2093 def paths(ui, repo, search=None):
2093 def paths(ui, repo, search=None):
2094 """show definition of symbolic path names
2094 """show definition of symbolic path names
2095
2095
2096 Show definition of symbolic path name NAME. If no name is given, show
2096 Show definition of symbolic path name NAME. If no name is given, show
2097 definition of available names.
2097 definition of available names.
2098
2098
2099 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2099 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2100 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2100 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2101 """
2101 """
2102 if search:
2102 if search:
2103 for name, path in ui.configitems("paths"):
2103 for name, path in ui.configitems("paths"):
2104 if name == search:
2104 if name == search:
2105 ui.write("%s\n" % path)
2105 ui.write("%s\n" % path)
2106 return
2106 return
2107 ui.warn(_("not found!\n"))
2107 ui.warn(_("not found!\n"))
2108 return 1
2108 return 1
2109 else:
2109 else:
2110 for name, path in ui.configitems("paths"):
2110 for name, path in ui.configitems("paths"):
2111 ui.write("%s = %s\n" % (name, path))
2111 ui.write("%s = %s\n" % (name, path))
2112
2112
2113 def postincoming(ui, repo, modheads, optupdate):
2113 def postincoming(ui, repo, modheads, optupdate):
2114 if modheads == 0:
2114 if modheads == 0:
2115 return
2115 return
2116 if optupdate:
2116 if optupdate:
2117 if modheads == 1:
2117 if modheads == 1:
2118 return doupdate(ui, repo)
2118 return doupdate(ui, repo)
2119 else:
2119 else:
2120 ui.status(_("not updating, since new heads added\n"))
2120 ui.status(_("not updating, since new heads added\n"))
2121 if modheads > 1:
2121 if modheads > 1:
2122 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2122 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2123 else:
2123 else:
2124 ui.status(_("(run 'hg update' to get a working copy)\n"))
2124 ui.status(_("(run 'hg update' to get a working copy)\n"))
2125
2125
2126 def pull(ui, repo, source="default", **opts):
2126 def pull(ui, repo, source="default", **opts):
2127 """pull changes from the specified source
2127 """pull changes from the specified source
2128
2128
2129 Pull changes from a remote repository to a local one.
2129 Pull changes from a remote repository to a local one.
2130
2130
2131 This finds all changes from the repository at the specified path
2131 This finds all changes from the repository at the specified path
2132 or URL and adds them to the local repository. By default, this
2132 or URL and adds them to the local repository. By default, this
2133 does not update the copy of the project in the working directory.
2133 does not update the copy of the project in the working directory.
2134
2134
2135 Valid URLs are of the form:
2135 Valid URLs are of the form:
2136
2136
2137 local/filesystem/path
2137 local/filesystem/path
2138 http://[user@]host[:port]/[path]
2138 http://[user@]host[:port]/[path]
2139 https://[user@]host[:port]/[path]
2139 https://[user@]host[:port]/[path]
2140 ssh://[user@]host[:port]/[path]
2140 ssh://[user@]host[:port]/[path]
2141
2141
2142 Some notes about using SSH with Mercurial:
2142 Some notes about using SSH with Mercurial:
2143 - SSH requires an accessible shell account on the destination machine
2143 - SSH requires an accessible shell account on the destination machine
2144 and a copy of hg in the remote path or specified with as remotecmd.
2144 and a copy of hg in the remote path or specified with as remotecmd.
2145 - path is relative to the remote user's home directory by default.
2145 - path is relative to the remote user's home directory by default.
2146 Use an extra slash at the start of a path to specify an absolute path:
2146 Use an extra slash at the start of a path to specify an absolute path:
2147 ssh://example.com//tmp/repository
2147 ssh://example.com//tmp/repository
2148 - Mercurial doesn't use its own compression via SSH; the right thing
2148 - Mercurial doesn't use its own compression via SSH; the right thing
2149 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2149 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2150 Host *.mylocalnetwork.example.com
2150 Host *.mylocalnetwork.example.com
2151 Compression off
2151 Compression off
2152 Host *
2152 Host *
2153 Compression on
2153 Compression on
2154 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2154 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2155 with the --ssh command line option.
2155 with the --ssh command line option.
2156 """
2156 """
2157 source = ui.expandpath(source)
2157 source = ui.expandpath(source)
2158 ui.setconfig_remoteopts(**opts)
2158 ui.setconfig_remoteopts(**opts)
2159
2159
2160 other = hg.repository(ui, source)
2160 other = hg.repository(ui, source)
2161 ui.status(_('pulling from %s\n') % (source))
2161 ui.status(_('pulling from %s\n') % (source))
2162 revs = None
2162 revs = None
2163 if opts['rev'] and not other.local():
2163 if opts['rev'] and not other.local():
2164 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2164 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2165 elif opts['rev']:
2165 elif opts['rev']:
2166 revs = [other.lookup(rev) for rev in opts['rev']]
2166 revs = [other.lookup(rev) for rev in opts['rev']]
2167 modheads = repo.pull(other, heads=revs, force=opts['force'])
2167 modheads = repo.pull(other, heads=revs, force=opts['force'])
2168 return postincoming(ui, repo, modheads, opts['update'])
2168 return postincoming(ui, repo, modheads, opts['update'])
2169
2169
2170 def push(ui, repo, dest=None, **opts):
2170 def push(ui, repo, dest=None, **opts):
2171 """push changes to the specified destination
2171 """push changes to the specified destination
2172
2172
2173 Push changes from the local repository to the given destination.
2173 Push changes from the local repository to the given destination.
2174
2174
2175 This is the symmetrical operation for pull. It helps to move
2175 This is the symmetrical operation for pull. It helps to move
2176 changes from the current repository to a different one. If the
2176 changes from the current repository to a different one. If the
2177 destination is local this is identical to a pull in that directory
2177 destination is local this is identical to a pull in that directory
2178 from the current one.
2178 from the current one.
2179
2179
2180 By default, push will refuse to run if it detects the result would
2180 By default, push will refuse to run if it detects the result would
2181 increase the number of remote heads. This generally indicates the
2181 increase the number of remote heads. This generally indicates the
2182 the client has forgotten to sync and merge before pushing.
2182 the client has forgotten to sync and merge before pushing.
2183
2183
2184 Valid URLs are of the form:
2184 Valid URLs are of the form:
2185
2185
2186 local/filesystem/path
2186 local/filesystem/path
2187 ssh://[user@]host[:port]/[path]
2187 ssh://[user@]host[:port]/[path]
2188
2188
2189 Look at the help text for the pull command for important details
2189 Look at the help text for the pull command for important details
2190 about ssh:// URLs.
2190 about ssh:// URLs.
2191
2191
2192 Pushing to http:// and https:// URLs is possible, too, if this
2192 Pushing to http:// and https:// URLs is possible, too, if this
2193 feature is enabled on the remote Mercurial server.
2193 feature is enabled on the remote Mercurial server.
2194 """
2194 """
2195 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2195 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2196 ui.setconfig_remoteopts(**opts)
2196 ui.setconfig_remoteopts(**opts)
2197
2197
2198 other = hg.repository(ui, dest)
2198 other = hg.repository(ui, dest)
2199 ui.status('pushing to %s\n' % (dest))
2199 ui.status('pushing to %s\n' % (dest))
2200 revs = None
2200 revs = None
2201 if opts['rev']:
2201 if opts['rev']:
2202 revs = [repo.lookup(rev) for rev in opts['rev']]
2202 revs = [repo.lookup(rev) for rev in opts['rev']]
2203 r = repo.push(other, opts['force'], revs=revs)
2203 r = repo.push(other, opts['force'], revs=revs)
2204 return r == 0
2204 return r == 0
2205
2205
2206 def rawcommit(ui, repo, *flist, **rc):
2206 def rawcommit(ui, repo, *flist, **rc):
2207 """raw commit interface (DEPRECATED)
2207 """raw commit interface (DEPRECATED)
2208
2208
2209 (DEPRECATED)
2209 (DEPRECATED)
2210 Lowlevel commit, for use in helper scripts.
2210 Lowlevel commit, for use in helper scripts.
2211
2211
2212 This command is not intended to be used by normal users, as it is
2212 This command is not intended to be used by normal users, as it is
2213 primarily useful for importing from other SCMs.
2213 primarily useful for importing from other SCMs.
2214
2214
2215 This command is now deprecated and will be removed in a future
2215 This command is now deprecated and will be removed in a future
2216 release, please use debugsetparents and commit instead.
2216 release, please use debugsetparents and commit instead.
2217 """
2217 """
2218
2218
2219 ui.warn(_("(the rawcommit command is deprecated)\n"))
2219 ui.warn(_("(the rawcommit command is deprecated)\n"))
2220
2220
2221 message = rc['message']
2221 message = rc['message']
2222 if not message and rc['logfile']:
2222 if not message and rc['logfile']:
2223 try:
2223 try:
2224 message = open(rc['logfile']).read()
2224 message = open(rc['logfile']).read()
2225 except IOError:
2225 except IOError:
2226 pass
2226 pass
2227 if not message and not rc['logfile']:
2227 if not message and not rc['logfile']:
2228 raise util.Abort(_("missing commit message"))
2228 raise util.Abort(_("missing commit message"))
2229
2229
2230 files = relpath(repo, list(flist))
2230 files = relpath(repo, list(flist))
2231 if rc['files']:
2231 if rc['files']:
2232 files += open(rc['files']).read().splitlines()
2232 files += open(rc['files']).read().splitlines()
2233
2233
2234 rc['parent'] = map(repo.lookup, rc['parent'])
2234 rc['parent'] = map(repo.lookup, rc['parent'])
2235
2235
2236 try:
2236 try:
2237 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2237 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2238 except ValueError, inst:
2238 except ValueError, inst:
2239 raise util.Abort(str(inst))
2239 raise util.Abort(str(inst))
2240
2240
2241 def recover(ui, repo):
2241 def recover(ui, repo):
2242 """roll back an interrupted transaction
2242 """roll back an interrupted transaction
2243
2243
2244 Recover from an interrupted commit or pull.
2244 Recover from an interrupted commit or pull.
2245
2245
2246 This command tries to fix the repository status after an interrupted
2246 This command tries to fix the repository status after an interrupted
2247 operation. It should only be necessary when Mercurial suggests it.
2247 operation. It should only be necessary when Mercurial suggests it.
2248 """
2248 """
2249 if repo.recover():
2249 if repo.recover():
2250 return repo.verify()
2250 return repo.verify()
2251 return 1
2251 return 1
2252
2252
2253 def remove(ui, repo, *pats, **opts):
2253 def remove(ui, repo, *pats, **opts):
2254 """remove the specified files on the next commit
2254 """remove the specified files on the next commit
2255
2255
2256 Schedule the indicated files for removal from the repository.
2256 Schedule the indicated files for removal from the repository.
2257
2257
2258 This command schedules the files to be removed at the next commit.
2258 This command schedules the files to be removed at the next commit.
2259 This only removes files from the current branch, not from the
2259 This only removes files from the current branch, not from the
2260 entire project history. If the files still exist in the working
2260 entire project history. If the files still exist in the working
2261 directory, they will be deleted from it. If invoked with --after,
2261 directory, they will be deleted from it. If invoked with --after,
2262 files that have been manually deleted are marked as removed.
2262 files that have been manually deleted are marked as removed.
2263
2263
2264 Modified files and added files are not removed by default. To
2264 Modified files and added files are not removed by default. To
2265 remove them, use the -f/--force option.
2265 remove them, use the -f/--force option.
2266 """
2266 """
2267 names = []
2267 names = []
2268 if not opts['after'] and not pats:
2268 if not opts['after'] and not pats:
2269 raise util.Abort(_('no files specified'))
2269 raise util.Abort(_('no files specified'))
2270 files, matchfn, anypats = matchpats(repo, pats, opts)
2270 files, matchfn, anypats = matchpats(repo, pats, opts)
2271 exact = dict.fromkeys(files)
2271 exact = dict.fromkeys(files)
2272 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2272 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2273 modified, added, removed, deleted, unknown = mardu
2273 modified, added, removed, deleted, unknown = mardu
2274 remove, forget = [], []
2274 remove, forget = [], []
2275 for src, abs, rel, exact in walk(repo, pats, opts):
2275 for src, abs, rel, exact in walk(repo, pats, opts):
2276 reason = None
2276 reason = None
2277 if abs not in deleted and opts['after']:
2277 if abs not in deleted and opts['after']:
2278 reason = _('is still present')
2278 reason = _('is still present')
2279 elif abs in modified and not opts['force']:
2279 elif abs in modified and not opts['force']:
2280 reason = _('is modified (use -f to force removal)')
2280 reason = _('is modified (use -f to force removal)')
2281 elif abs in added:
2281 elif abs in added:
2282 if opts['force']:
2282 if opts['force']:
2283 forget.append(abs)
2283 forget.append(abs)
2284 continue
2284 continue
2285 reason = _('has been marked for add (use -f to force removal)')
2285 reason = _('has been marked for add (use -f to force removal)')
2286 elif abs in unknown:
2286 elif abs in unknown:
2287 reason = _('is not managed')
2287 reason = _('is not managed')
2288 elif abs in removed:
2288 elif abs in removed:
2289 continue
2289 continue
2290 if reason:
2290 if reason:
2291 if exact:
2291 if exact:
2292 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2292 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2293 else:
2293 else:
2294 if ui.verbose or not exact:
2294 if ui.verbose or not exact:
2295 ui.status(_('removing %s\n') % rel)
2295 ui.status(_('removing %s\n') % rel)
2296 remove.append(abs)
2296 remove.append(abs)
2297 repo.forget(forget)
2297 repo.forget(forget)
2298 repo.remove(remove, unlink=not opts['after'])
2298 repo.remove(remove, unlink=not opts['after'])
2299
2299
2300 def rename(ui, repo, *pats, **opts):
2300 def rename(ui, repo, *pats, **opts):
2301 """rename files; equivalent of copy + remove
2301 """rename files; equivalent of copy + remove
2302
2302
2303 Mark dest as copies of sources; mark sources for deletion. If
2303 Mark dest as copies of sources; mark sources for deletion. If
2304 dest is a directory, copies are put in that directory. If dest is
2304 dest is a directory, copies are put in that directory. If dest is
2305 a file, there can only be one source.
2305 a file, there can only be one source.
2306
2306
2307 By default, this command copies the contents of files as they
2307 By default, this command copies the contents of files as they
2308 stand in the working directory. If invoked with --after, the
2308 stand in the working directory. If invoked with --after, the
2309 operation is recorded, but no copying is performed.
2309 operation is recorded, but no copying is performed.
2310
2310
2311 This command takes effect in the next commit.
2311 This command takes effect in the next commit.
2312
2312
2313 NOTE: This command should be treated as experimental. While it
2313 NOTE: This command should be treated as experimental. While it
2314 should properly record rename files, this information is not yet
2314 should properly record rename files, this information is not yet
2315 fully used by merge, nor fully reported by log.
2315 fully used by merge, nor fully reported by log.
2316 """
2316 """
2317 wlock = repo.wlock(0)
2317 wlock = repo.wlock(0)
2318 errs, copied = docopy(ui, repo, pats, opts, wlock)
2318 errs, copied = docopy(ui, repo, pats, opts, wlock)
2319 names = []
2319 names = []
2320 for abs, rel, exact in copied:
2320 for abs, rel, exact in copied:
2321 if ui.verbose or not exact:
2321 if ui.verbose or not exact:
2322 ui.status(_('removing %s\n') % rel)
2322 ui.status(_('removing %s\n') % rel)
2323 names.append(abs)
2323 names.append(abs)
2324 if not opts.get('dry_run'):
2324 if not opts.get('dry_run'):
2325 repo.remove(names, True, wlock)
2325 repo.remove(names, True, wlock)
2326 return errs
2326 return errs
2327
2327
2328 def revert(ui, repo, *pats, **opts):
2328 def revert(ui, repo, *pats, **opts):
2329 """revert files or dirs to their states as of some revision
2329 """revert files or dirs to their states as of some revision
2330
2330
2331 With no revision specified, revert the named files or directories
2331 With no revision specified, revert the named files or directories
2332 to the contents they had in the parent of the working directory.
2332 to the contents they had in the parent of the working directory.
2333 This restores the contents of the affected files to an unmodified
2333 This restores the contents of the affected files to an unmodified
2334 state. If the working directory has two parents, you must
2334 state. If the working directory has two parents, you must
2335 explicitly specify the revision to revert to.
2335 explicitly specify the revision to revert to.
2336
2336
2337 Modified files are saved with a .orig suffix before reverting.
2337 Modified files are saved with a .orig suffix before reverting.
2338 To disable these backups, use --no-backup.
2338 To disable these backups, use --no-backup.
2339
2339
2340 Using the -r option, revert the given files or directories to
2340 Using the -r option, revert the given files or directories to
2341 their contents as of a specific revision. This can be helpful to"roll
2341 their contents as of a specific revision. This can be helpful to"roll
2342 back" some or all of a change that should not have been committed.
2342 back" some or all of a change that should not have been committed.
2343
2343
2344 Revert modifies the working directory. It does not commit any
2344 Revert modifies the working directory. It does not commit any
2345 changes, or change the parent of the working directory. If you
2345 changes, or change the parent of the working directory. If you
2346 revert to a revision other than the parent of the working
2346 revert to a revision other than the parent of the working
2347 directory, the reverted files will thus appear modified
2347 directory, the reverted files will thus appear modified
2348 afterwards.
2348 afterwards.
2349
2349
2350 If a file has been deleted, it is recreated. If the executable
2350 If a file has been deleted, it is recreated. If the executable
2351 mode of a file was changed, it is reset.
2351 mode of a file was changed, it is reset.
2352
2352
2353 If names are given, all files matching the names are reverted.
2353 If names are given, all files matching the names are reverted.
2354
2354
2355 If no arguments are given, all files in the repository are reverted.
2355 If no arguments are given, all files in the repository are reverted.
2356 """
2356 """
2357 parent, p2 = repo.dirstate.parents()
2357 parent, p2 = repo.dirstate.parents()
2358 if opts['rev']:
2358 if opts['rev']:
2359 node = repo.lookup(opts['rev'])
2359 node = repo.lookup(opts['rev'])
2360 elif p2 != nullid:
2360 elif p2 != nullid:
2361 raise util.Abort(_('working dir has two parents; '
2361 raise util.Abort(_('working dir has two parents; '
2362 'you must specify the revision to revert to'))
2362 'you must specify the revision to revert to'))
2363 else:
2363 else:
2364 node = parent
2364 node = parent
2365 mf = repo.manifest.read(repo.changelog.read(node)[0])
2365 mf = repo.manifest.read(repo.changelog.read(node)[0])
2366 if node == parent:
2366 if node == parent:
2367 pmf = mf
2367 pmf = mf
2368 else:
2368 else:
2369 pmf = None
2369 pmf = None
2370
2370
2371 wlock = repo.wlock()
2371 wlock = repo.wlock()
2372
2372
2373 # need all matching names in dirstate and manifest of target rev,
2373 # need all matching names in dirstate and manifest of target rev,
2374 # so have to walk both. do not print errors if files exist in one
2374 # so have to walk both. do not print errors if files exist in one
2375 # but not other.
2375 # but not other.
2376
2376
2377 names = {}
2377 names = {}
2378 target_only = {}
2378 target_only = {}
2379
2379
2380 # walk dirstate.
2380 # walk dirstate.
2381
2381
2382 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2382 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2383 names[abs] = (rel, exact)
2383 names[abs] = (rel, exact)
2384 if src == 'b':
2384 if src == 'b':
2385 target_only[abs] = True
2385 target_only[abs] = True
2386
2386
2387 # walk target manifest.
2387 # walk target manifest.
2388
2388
2389 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2389 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2390 badmatch=names.has_key):
2390 badmatch=names.has_key):
2391 if abs in names: continue
2391 if abs in names: continue
2392 names[abs] = (rel, exact)
2392 names[abs] = (rel, exact)
2393 target_only[abs] = True
2393 target_only[abs] = True
2394
2394
2395 changes = repo.changes(match=names.has_key, wlock=wlock)
2395 changes = repo.changes(match=names.has_key, wlock=wlock)
2396 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2396 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2397
2397
2398 revert = ([], _('reverting %s\n'))
2398 revert = ([], _('reverting %s\n'))
2399 add = ([], _('adding %s\n'))
2399 add = ([], _('adding %s\n'))
2400 remove = ([], _('removing %s\n'))
2400 remove = ([], _('removing %s\n'))
2401 forget = ([], _('forgetting %s\n'))
2401 forget = ([], _('forgetting %s\n'))
2402 undelete = ([], _('undeleting %s\n'))
2402 undelete = ([], _('undeleting %s\n'))
2403 update = {}
2403 update = {}
2404
2404
2405 disptable = (
2405 disptable = (
2406 # dispatch table:
2406 # dispatch table:
2407 # file state
2407 # file state
2408 # action if in target manifest
2408 # action if in target manifest
2409 # action if not in target manifest
2409 # action if not in target manifest
2410 # make backup if in target manifest
2410 # make backup if in target manifest
2411 # make backup if not in target manifest
2411 # make backup if not in target manifest
2412 (modified, revert, remove, True, True),
2412 (modified, revert, remove, True, True),
2413 (added, revert, forget, True, False),
2413 (added, revert, forget, True, False),
2414 (removed, undelete, None, False, False),
2414 (removed, undelete, None, False, False),
2415 (deleted, revert, remove, False, False),
2415 (deleted, revert, remove, False, False),
2416 (unknown, add, None, True, False),
2416 (unknown, add, None, True, False),
2417 (target_only, add, None, False, False),
2417 (target_only, add, None, False, False),
2418 )
2418 )
2419
2419
2420 entries = names.items()
2420 entries = names.items()
2421 entries.sort()
2421 entries.sort()
2422
2422
2423 for abs, (rel, exact) in entries:
2423 for abs, (rel, exact) in entries:
2424 mfentry = mf.get(abs)
2424 mfentry = mf.get(abs)
2425 def handle(xlist, dobackup):
2425 def handle(xlist, dobackup):
2426 xlist[0].append(abs)
2426 xlist[0].append(abs)
2427 update[abs] = 1
2427 update[abs] = 1
2428 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2428 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2429 bakname = "%s.orig" % rel
2429 bakname = "%s.orig" % rel
2430 ui.note(_('saving current version of %s as %s\n') %
2430 ui.note(_('saving current version of %s as %s\n') %
2431 (rel, bakname))
2431 (rel, bakname))
2432 if not opts.get('dry_run'):
2432 if not opts.get('dry_run'):
2433 shutil.copyfile(rel, bakname)
2433 shutil.copyfile(rel, bakname)
2434 shutil.copymode(rel, bakname)
2434 shutil.copymode(rel, bakname)
2435 if ui.verbose or not exact:
2435 if ui.verbose or not exact:
2436 ui.status(xlist[1] % rel)
2436 ui.status(xlist[1] % rel)
2437 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2437 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2438 if abs not in table: continue
2438 if abs not in table: continue
2439 # file has changed in dirstate
2439 # file has changed in dirstate
2440 if mfentry:
2440 if mfentry:
2441 handle(hitlist, backuphit)
2441 handle(hitlist, backuphit)
2442 elif misslist is not None:
2442 elif misslist is not None:
2443 handle(misslist, backupmiss)
2443 handle(misslist, backupmiss)
2444 else:
2444 else:
2445 if exact: ui.warn(_('file not managed: %s\n' % rel))
2445 if exact: ui.warn(_('file not managed: %s\n' % rel))
2446 break
2446 break
2447 else:
2447 else:
2448 # file has not changed in dirstate
2448 # file has not changed in dirstate
2449 if node == parent:
2449 if node == parent:
2450 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2450 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2451 continue
2451 continue
2452 if pmf is None:
2452 if pmf is None:
2453 # only need parent manifest in this unlikely case,
2453 # only need parent manifest in this unlikely case,
2454 # so do not read by default
2454 # so do not read by default
2455 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2455 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2456 if abs in pmf:
2456 if abs in pmf:
2457 if mfentry:
2457 if mfentry:
2458 # if version of file is same in parent and target
2458 # if version of file is same in parent and target
2459 # manifests, do nothing
2459 # manifests, do nothing
2460 if pmf[abs] != mfentry:
2460 if pmf[abs] != mfentry:
2461 handle(revert, False)
2461 handle(revert, False)
2462 else:
2462 else:
2463 handle(remove, False)
2463 handle(remove, False)
2464
2464
2465 if not opts.get('dry_run'):
2465 if not opts.get('dry_run'):
2466 repo.dirstate.forget(forget[0])
2466 repo.dirstate.forget(forget[0])
2467 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2467 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2468 show_stats=False)
2468 show_stats=False)
2469 repo.dirstate.update(add[0], 'a')
2469 repo.dirstate.update(add[0], 'a')
2470 repo.dirstate.update(undelete[0], 'n')
2470 repo.dirstate.update(undelete[0], 'n')
2471 repo.dirstate.update(remove[0], 'r')
2471 repo.dirstate.update(remove[0], 'r')
2472 return r
2472 return r
2473
2473
2474 def rollback(ui, repo):
2474 def rollback(ui, repo):
2475 """roll back the last transaction in this repository
2475 """roll back the last transaction in this repository
2476
2476
2477 Roll back the last transaction in this repository, restoring the
2477 Roll back the last transaction in this repository, restoring the
2478 project to its state prior to the transaction.
2478 project to its state prior to the transaction.
2479
2479
2480 Transactions are used to encapsulate the effects of all commands
2480 Transactions are used to encapsulate the effects of all commands
2481 that create new changesets or propagate existing changesets into a
2481 that create new changesets or propagate existing changesets into a
2482 repository. For example, the following commands are transactional,
2482 repository. For example, the following commands are transactional,
2483 and their effects can be rolled back:
2483 and their effects can be rolled back:
2484
2484
2485 commit
2485 commit
2486 import
2486 import
2487 pull
2487 pull
2488 push (with this repository as destination)
2488 push (with this repository as destination)
2489 unbundle
2489 unbundle
2490
2490
2491 This command should be used with care. There is only one level of
2491 This command should be used with care. There is only one level of
2492 rollback, and there is no way to undo a rollback.
2492 rollback, and there is no way to undo a rollback.
2493
2493
2494 This command is not intended for use on public repositories. Once
2494 This command is not intended for use on public repositories. Once
2495 changes are visible for pull by other users, rolling a transaction
2495 changes are visible for pull by other users, rolling a transaction
2496 back locally is ineffective (someone else may already have pulled
2496 back locally is ineffective (someone else may already have pulled
2497 the changes). Furthermore, a race is possible with readers of the
2497 the changes). Furthermore, a race is possible with readers of the
2498 repository; for example an in-progress pull from the repository
2498 repository; for example an in-progress pull from the repository
2499 may fail if a rollback is performed.
2499 may fail if a rollback is performed.
2500 """
2500 """
2501 repo.rollback()
2501 repo.rollback()
2502
2502
2503 def root(ui, repo):
2503 def root(ui, repo):
2504 """print the root (top) of the current working dir
2504 """print the root (top) of the current working dir
2505
2505
2506 Print the root directory of the current repository.
2506 Print the root directory of the current repository.
2507 """
2507 """
2508 ui.write(repo.root + "\n")
2508 ui.write(repo.root + "\n")
2509
2509
2510 def serve(ui, repo, **opts):
2510 def serve(ui, repo, **opts):
2511 """export the repository via HTTP
2511 """export the repository via HTTP
2512
2512
2513 Start a local HTTP repository browser and pull server.
2513 Start a local HTTP repository browser and pull server.
2514
2514
2515 By default, the server logs accesses to stdout and errors to
2515 By default, the server logs accesses to stdout and errors to
2516 stderr. Use the "-A" and "-E" options to log to files.
2516 stderr. Use the "-A" and "-E" options to log to files.
2517 """
2517 """
2518
2518
2519 if opts["stdio"]:
2519 if opts["stdio"]:
2520 if repo is None:
2520 if repo is None:
2521 raise hg.RepoError(_('no repo found'))
2521 raise hg.RepoError(_('no repo found'))
2522 s = sshserver.sshserver(ui, repo)
2522 s = sshserver.sshserver(ui, repo)
2523 s.serve_forever()
2523 s.serve_forever()
2524
2524
2525 optlist = ("name templates style address port ipv6"
2525 optlist = ("name templates style address port ipv6"
2526 " accesslog errorlog webdir_conf")
2526 " accesslog errorlog webdir_conf")
2527 for o in optlist.split():
2527 for o in optlist.split():
2528 if opts[o]:
2528 if opts[o]:
2529 ui.setconfig("web", o, opts[o])
2529 ui.setconfig("web", o, opts[o])
2530
2530
2531 if repo is None and not ui.config("web", "webdir_conf"):
2531 if repo is None and not ui.config("web", "webdir_conf"):
2532 raise hg.RepoError(_('no repo found'))
2532 raise hg.RepoError(_('no repo found'))
2533
2533
2534 if opts['daemon'] and not opts['daemon_pipefds']:
2534 if opts['daemon'] and not opts['daemon_pipefds']:
2535 rfd, wfd = os.pipe()
2535 rfd, wfd = os.pipe()
2536 args = sys.argv[:]
2536 args = sys.argv[:]
2537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2538 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2538 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2539 args[0], args)
2539 args[0], args)
2540 os.close(wfd)
2540 os.close(wfd)
2541 os.read(rfd, 1)
2541 os.read(rfd, 1)
2542 os._exit(0)
2542 os._exit(0)
2543
2543
2544 try:
2544 try:
2545 httpd = hgweb.server.create_server(ui, repo)
2545 httpd = hgweb.server.create_server(ui, repo)
2546 except socket.error, inst:
2546 except socket.error, inst:
2547 raise util.Abort(_('cannot start server: ') + inst.args[1])
2547 raise util.Abort(_('cannot start server: ') + inst.args[1])
2548
2548
2549 if ui.verbose:
2549 if ui.verbose:
2550 addr, port = httpd.socket.getsockname()
2550 addr, port = httpd.socket.getsockname()
2551 if addr == '0.0.0.0':
2551 if addr == '0.0.0.0':
2552 addr = socket.gethostname()
2552 addr = socket.gethostname()
2553 else:
2553 else:
2554 try:
2554 try:
2555 addr = socket.gethostbyaddr(addr)[0]
2555 addr = socket.gethostbyaddr(addr)[0]
2556 except socket.error:
2556 except socket.error:
2557 pass
2557 pass
2558 if port != 80:
2558 if port != 80:
2559 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2559 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2560 else:
2560 else:
2561 ui.status(_('listening at http://%s/\n') % addr)
2561 ui.status(_('listening at http://%s/\n') % addr)
2562
2562
2563 if opts['pid_file']:
2563 if opts['pid_file']:
2564 fp = open(opts['pid_file'], 'w')
2564 fp = open(opts['pid_file'], 'w')
2565 fp.write(str(os.getpid()) + '\n')
2565 fp.write(str(os.getpid()) + '\n')
2566 fp.close()
2566 fp.close()
2567
2567
2568 if opts['daemon_pipefds']:
2568 if opts['daemon_pipefds']:
2569 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2569 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2570 os.close(rfd)
2570 os.close(rfd)
2571 os.write(wfd, 'y')
2571 os.write(wfd, 'y')
2572 os.close(wfd)
2572 os.close(wfd)
2573 sys.stdout.flush()
2573 sys.stdout.flush()
2574 sys.stderr.flush()
2574 sys.stderr.flush()
2575 fd = os.open(util.nulldev, os.O_RDWR)
2575 fd = os.open(util.nulldev, os.O_RDWR)
2576 if fd != 0: os.dup2(fd, 0)
2576 if fd != 0: os.dup2(fd, 0)
2577 if fd != 1: os.dup2(fd, 1)
2577 if fd != 1: os.dup2(fd, 1)
2578 if fd != 2: os.dup2(fd, 2)
2578 if fd != 2: os.dup2(fd, 2)
2579 if fd not in (0, 1, 2): os.close(fd)
2579 if fd not in (0, 1, 2): os.close(fd)
2580
2580
2581 httpd.serve_forever()
2581 httpd.serve_forever()
2582
2582
2583 def status(ui, repo, *pats, **opts):
2583 def status(ui, repo, *pats, **opts):
2584 """show changed files in the working directory
2584 """show changed files in the working directory
2585
2585
2586 Show changed files in the repository. If names are
2586 Show changed files in the repository. If names are
2587 given, only files that match are shown.
2587 given, only files that match are shown.
2588
2588
2589 The codes used to show the status of files are:
2589 The codes used to show the status of files are:
2590 M = modified
2590 M = modified
2591 A = added
2591 A = added
2592 R = removed
2592 R = removed
2593 ! = deleted, but still tracked
2593 ! = deleted, but still tracked
2594 ? = not tracked
2594 ? = not tracked
2595 I = ignored (not shown by default)
2595 I = ignored (not shown by default)
2596 """
2596 """
2597
2597
2598 show_ignored = opts['ignored'] and True or False
2598 show_ignored = opts['ignored'] and True or False
2599 files, matchfn, anypats = matchpats(repo, pats, opts)
2599 files, matchfn, anypats = matchpats(repo, pats, opts)
2600 cwd = (pats and repo.getcwd()) or ''
2600 cwd = (pats and repo.getcwd()) or ''
2601 modified, added, removed, deleted, unknown, ignored = [
2601 modified, added, removed, deleted, unknown, ignored = [
2602 [util.pathto(cwd, x) for x in n]
2602 [util.pathto(cwd, x) for x in n]
2603 for n in repo.changes(files=files, match=matchfn,
2603 for n in repo.changes(files=files, match=matchfn,
2604 show_ignored=show_ignored)]
2604 show_ignored=show_ignored)]
2605
2605
2606 changetypes = [('modified', 'M', modified),
2606 changetypes = [('modified', 'M', modified),
2607 ('added', 'A', added),
2607 ('added', 'A', added),
2608 ('removed', 'R', removed),
2608 ('removed', 'R', removed),
2609 ('deleted', '!', deleted),
2609 ('deleted', '!', deleted),
2610 ('unknown', '?', unknown),
2610 ('unknown', '?', unknown),
2611 ('ignored', 'I', ignored)]
2611 ('ignored', 'I', ignored)]
2612
2612
2613 end = opts['print0'] and '\0' or '\n'
2613 end = opts['print0'] and '\0' or '\n'
2614
2614
2615 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2615 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2616 or changetypes):
2616 or changetypes):
2617 if opts['no_status']:
2617 if opts['no_status']:
2618 format = "%%s%s" % end
2618 format = "%%s%s" % end
2619 else:
2619 else:
2620 format = "%s %%s%s" % (char, end)
2620 format = "%s %%s%s" % (char, end)
2621
2621
2622 for f in changes:
2622 for f in changes:
2623 ui.write(format % f)
2623 ui.write(format % f)
2624
2624
2625 def tag(ui, repo, name, rev_=None, **opts):
2625 def tag(ui, repo, name, rev_=None, **opts):
2626 """add a tag for the current tip or a given revision
2626 """add a tag for the current tip or a given revision
2627
2627
2628 Name a particular revision using <name>.
2628 Name a particular revision using <name>.
2629
2629
2630 Tags are used to name particular revisions of the repository and are
2630 Tags are used to name particular revisions of the repository and are
2631 very useful to compare different revision, to go back to significant
2631 very useful to compare different revision, to go back to significant
2632 earlier versions or to mark branch points as releases, etc.
2632 earlier versions or to mark branch points as releases, etc.
2633
2633
2634 If no revision is given, the tip is used.
2634 If no revision is given, the tip is used.
2635
2635
2636 To facilitate version control, distribution, and merging of tags,
2636 To facilitate version control, distribution, and merging of tags,
2637 they are stored as a file named ".hgtags" which is managed
2637 they are stored as a file named ".hgtags" which is managed
2638 similarly to other project files and can be hand-edited if
2638 similarly to other project files and can be hand-edited if
2639 necessary. The file '.hg/localtags' is used for local tags (not
2639 necessary. The file '.hg/localtags' is used for local tags (not
2640 shared among repositories).
2640 shared among repositories).
2641 """
2641 """
2642 if name == "tip":
2642 if name == "tip":
2643 raise util.Abort(_("the name 'tip' is reserved"))
2643 raise util.Abort(_("the name 'tip' is reserved"))
2644 if rev_ is not None:
2644 if rev_ is not None:
2645 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2645 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2646 "please use 'hg tag [-r REV] NAME' instead\n"))
2646 "please use 'hg tag [-r REV] NAME' instead\n"))
2647 if opts['rev']:
2647 if opts['rev']:
2648 raise util.Abort(_("use only one form to specify the revision"))
2648 raise util.Abort(_("use only one form to specify the revision"))
2649 if opts['rev']:
2649 if opts['rev']:
2650 rev_ = opts['rev']
2650 rev_ = opts['rev']
2651 if rev_:
2651 if rev_:
2652 r = hex(repo.lookup(rev_))
2652 r = hex(repo.lookup(rev_))
2653 else:
2653 else:
2654 r = hex(repo.changelog.tip())
2654 r = hex(repo.changelog.tip())
2655
2655
2656 disallowed = (revrangesep, '\r', '\n')
2656 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2657 for c in disallowed:
2657 opts['date'])
2658 if c in name:
2659 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2660
2661 repo.hook('pretag', throw=True, node=r, tag=name,
2662 local=int(not not opts['local']))
2663
2664 if opts['local']:
2665 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2666 repo.hook('tag', node=r, tag=name, local=1)
2667 return
2668
2669 for x in repo.changes():
2670 if ".hgtags" in x:
2671 raise util.Abort(_("working copy of .hgtags is changed "
2672 "(please commit .hgtags manually)"))
2673
2674 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2675 if repo.dirstate.state(".hgtags") == '?':
2676 repo.add([".hgtags"])
2677
2678 message = (opts['message'] or
2679 _("Added tag %s for changeset %s") % (name, r))
2680 try:
2681 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2682 repo.hook('tag', node=r, tag=name, local=0)
2683 except ValueError, inst:
2684 raise util.Abort(str(inst))
2685
2658
2686 def tags(ui, repo):
2659 def tags(ui, repo):
2687 """list repository tags
2660 """list repository tags
2688
2661
2689 List the repository tags.
2662 List the repository tags.
2690
2663
2691 This lists both regular and local tags.
2664 This lists both regular and local tags.
2692 """
2665 """
2693
2666
2694 l = repo.tagslist()
2667 l = repo.tagslist()
2695 l.reverse()
2668 l.reverse()
2696 for t, n in l:
2669 for t, n in l:
2697 try:
2670 try:
2698 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2671 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2699 except KeyError:
2672 except KeyError:
2700 r = " ?:?"
2673 r = " ?:?"
2701 if ui.quiet:
2674 if ui.quiet:
2702 ui.write("%s\n" % t)
2675 ui.write("%s\n" % t)
2703 else:
2676 else:
2704 ui.write("%-30s %s\n" % (t, r))
2677 ui.write("%-30s %s\n" % (t, r))
2705
2678
2706 def tip(ui, repo, **opts):
2679 def tip(ui, repo, **opts):
2707 """show the tip revision
2680 """show the tip revision
2708
2681
2709 Show the tip revision.
2682 Show the tip revision.
2710 """
2683 """
2711 n = repo.changelog.tip()
2684 n = repo.changelog.tip()
2712 br = None
2685 br = None
2713 if opts['branches']:
2686 if opts['branches']:
2714 br = repo.branchlookup([n])
2687 br = repo.branchlookup([n])
2715 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2688 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2716 if opts['patch']:
2689 if opts['patch']:
2717 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2690 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2718
2691
2719 def unbundle(ui, repo, fname, **opts):
2692 def unbundle(ui, repo, fname, **opts):
2720 """apply a changegroup file
2693 """apply a changegroup file
2721
2694
2722 Apply a compressed changegroup file generated by the bundle
2695 Apply a compressed changegroup file generated by the bundle
2723 command.
2696 command.
2724 """
2697 """
2725 f = urllib.urlopen(fname)
2698 f = urllib.urlopen(fname)
2726
2699
2727 header = f.read(6)
2700 header = f.read(6)
2728 if not header.startswith("HG"):
2701 if not header.startswith("HG"):
2729 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2702 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2730 elif not header.startswith("HG10"):
2703 elif not header.startswith("HG10"):
2731 raise util.Abort(_("%s: unknown bundle version") % fname)
2704 raise util.Abort(_("%s: unknown bundle version") % fname)
2732 elif header == "HG10BZ":
2705 elif header == "HG10BZ":
2733 def generator(f):
2706 def generator(f):
2734 zd = bz2.BZ2Decompressor()
2707 zd = bz2.BZ2Decompressor()
2735 zd.decompress("BZ")
2708 zd.decompress("BZ")
2736 for chunk in f:
2709 for chunk in f:
2737 yield zd.decompress(chunk)
2710 yield zd.decompress(chunk)
2738 elif header == "HG10UN":
2711 elif header == "HG10UN":
2739 def generator(f):
2712 def generator(f):
2740 for chunk in f:
2713 for chunk in f:
2741 yield chunk
2714 yield chunk
2742 else:
2715 else:
2743 raise util.Abort(_("%s: unknown bundle compression type")
2716 raise util.Abort(_("%s: unknown bundle compression type")
2744 % fname)
2717 % fname)
2745 gen = generator(util.filechunkiter(f, 4096))
2718 gen = generator(util.filechunkiter(f, 4096))
2746 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle')
2719 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle')
2747 return postincoming(ui, repo, modheads, opts['update'])
2720 return postincoming(ui, repo, modheads, opts['update'])
2748
2721
2749 def undo(ui, repo):
2722 def undo(ui, repo):
2750 """undo the last commit or pull (DEPRECATED)
2723 """undo the last commit or pull (DEPRECATED)
2751
2724
2752 (DEPRECATED)
2725 (DEPRECATED)
2753 This command is now deprecated and will be removed in a future
2726 This command is now deprecated and will be removed in a future
2754 release. Please use the rollback command instead. For usage
2727 release. Please use the rollback command instead. For usage
2755 instructions, see the rollback command.
2728 instructions, see the rollback command.
2756 """
2729 """
2757 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2730 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2758 repo.rollback()
2731 repo.rollback()
2759
2732
2760 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2733 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2761 branch=None, **opts):
2734 branch=None, **opts):
2762 """update or merge working directory
2735 """update or merge working directory
2763
2736
2764 Update the working directory to the specified revision.
2737 Update the working directory to the specified revision.
2765
2738
2766 If there are no outstanding changes in the working directory and
2739 If there are no outstanding changes in the working directory and
2767 there is a linear relationship between the current version and the
2740 there is a linear relationship between the current version and the
2768 requested version, the result is the requested version.
2741 requested version, the result is the requested version.
2769
2742
2770 To merge the working directory with another revision, use the
2743 To merge the working directory with another revision, use the
2771 merge command.
2744 merge command.
2772
2745
2773 By default, update will refuse to run if doing so would require
2746 By default, update will refuse to run if doing so would require
2774 merging or discarding local changes.
2747 merging or discarding local changes.
2775 """
2748 """
2776 if merge:
2749 if merge:
2777 ui.warn(_('(the -m/--merge option is deprecated; '
2750 ui.warn(_('(the -m/--merge option is deprecated; '
2778 'use the merge command instead)\n'))
2751 'use the merge command instead)\n'))
2779 return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
2752 return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
2780
2753
2781 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2754 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2782 branch=None, **opts):
2755 branch=None, **opts):
2783 if branch:
2756 if branch:
2784 br = repo.branchlookup(branch=branch)
2757 br = repo.branchlookup(branch=branch)
2785 found = []
2758 found = []
2786 for x in br:
2759 for x in br:
2787 if branch in br[x]:
2760 if branch in br[x]:
2788 found.append(x)
2761 found.append(x)
2789 if len(found) > 1:
2762 if len(found) > 1:
2790 ui.warn(_("Found multiple heads for %s\n") % branch)
2763 ui.warn(_("Found multiple heads for %s\n") % branch)
2791 for x in found:
2764 for x in found:
2792 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2765 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2793 return 1
2766 return 1
2794 if len(found) == 1:
2767 if len(found) == 1:
2795 node = found[0]
2768 node = found[0]
2796 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2769 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2797 else:
2770 else:
2798 ui.warn(_("branch %s not found\n") % (branch))
2771 ui.warn(_("branch %s not found\n") % (branch))
2799 return 1
2772 return 1
2800 else:
2773 else:
2801 node = node and repo.lookup(node) or repo.changelog.tip()
2774 node = node and repo.lookup(node) or repo.changelog.tip()
2802 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2775 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2803
2776
2804 def verify(ui, repo):
2777 def verify(ui, repo):
2805 """verify the integrity of the repository
2778 """verify the integrity of the repository
2806
2779
2807 Verify the integrity of the current repository.
2780 Verify the integrity of the current repository.
2808
2781
2809 This will perform an extensive check of the repository's
2782 This will perform an extensive check of the repository's
2810 integrity, validating the hashes and checksums of each entry in
2783 integrity, validating the hashes and checksums of each entry in
2811 the changelog, manifest, and tracked files, as well as the
2784 the changelog, manifest, and tracked files, as well as the
2812 integrity of their crosslinks and indices.
2785 integrity of their crosslinks and indices.
2813 """
2786 """
2814 return repo.verify()
2787 return repo.verify()
2815
2788
2816 # Command options and aliases are listed here, alphabetically
2789 # Command options and aliases are listed here, alphabetically
2817
2790
2818 table = {
2791 table = {
2819 "^add":
2792 "^add":
2820 (add,
2793 (add,
2821 [('I', 'include', [], _('include names matching the given patterns')),
2794 [('I', 'include', [], _('include names matching the given patterns')),
2822 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2795 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2823 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2796 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2824 _('hg add [OPTION]... [FILE]...')),
2797 _('hg add [OPTION]... [FILE]...')),
2825 "debugaddremove|addremove":
2798 "debugaddremove|addremove":
2826 (addremove,
2799 (addremove,
2827 [('I', 'include', [], _('include names matching the given patterns')),
2800 [('I', 'include', [], _('include names matching the given patterns')),
2828 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2801 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2829 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2802 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2830 _('hg addremove [OPTION]... [FILE]...')),
2803 _('hg addremove [OPTION]... [FILE]...')),
2831 "^annotate":
2804 "^annotate":
2832 (annotate,
2805 (annotate,
2833 [('r', 'rev', '', _('annotate the specified revision')),
2806 [('r', 'rev', '', _('annotate the specified revision')),
2834 ('a', 'text', None, _('treat all files as text')),
2807 ('a', 'text', None, _('treat all files as text')),
2835 ('u', 'user', None, _('list the author')),
2808 ('u', 'user', None, _('list the author')),
2836 ('d', 'date', None, _('list the date')),
2809 ('d', 'date', None, _('list the date')),
2837 ('n', 'number', None, _('list the revision number (default)')),
2810 ('n', 'number', None, _('list the revision number (default)')),
2838 ('c', 'changeset', None, _('list the changeset')),
2811 ('c', 'changeset', None, _('list the changeset')),
2839 ('I', 'include', [], _('include names matching the given patterns')),
2812 ('I', 'include', [], _('include names matching the given patterns')),
2840 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2813 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2841 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2814 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2842 "archive":
2815 "archive":
2843 (archive,
2816 (archive,
2844 [('', 'no-decode', None, _('do not pass files through decoders')),
2817 [('', 'no-decode', None, _('do not pass files through decoders')),
2845 ('p', 'prefix', '', _('directory prefix for files in archive')),
2818 ('p', 'prefix', '', _('directory prefix for files in archive')),
2846 ('r', 'rev', '', _('revision to distribute')),
2819 ('r', 'rev', '', _('revision to distribute')),
2847 ('t', 'type', '', _('type of distribution to create')),
2820 ('t', 'type', '', _('type of distribution to create')),
2848 ('I', 'include', [], _('include names matching the given patterns')),
2821 ('I', 'include', [], _('include names matching the given patterns')),
2849 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2822 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2850 _('hg archive [OPTION]... DEST')),
2823 _('hg archive [OPTION]... DEST')),
2851 "backout":
2824 "backout":
2852 (backout,
2825 (backout,
2853 [('', 'merge', None,
2826 [('', 'merge', None,
2854 _('merge with old dirstate parent after backout')),
2827 _('merge with old dirstate parent after backout')),
2855 ('m', 'message', '', _('use <text> as commit message')),
2828 ('m', 'message', '', _('use <text> as commit message')),
2856 ('l', 'logfile', '', _('read commit message from <file>')),
2829 ('l', 'logfile', '', _('read commit message from <file>')),
2857 ('d', 'date', '', _('record datecode as commit date')),
2830 ('d', 'date', '', _('record datecode as commit date')),
2858 ('u', 'user', '', _('record user as committer')),
2831 ('u', 'user', '', _('record user as committer')),
2859 ('I', 'include', [], _('include names matching the given patterns')),
2832 ('I', 'include', [], _('include names matching the given patterns')),
2860 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2833 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2861 _('hg backout [OPTION]... REV')),
2834 _('hg backout [OPTION]... REV')),
2862 "bundle":
2835 "bundle":
2863 (bundle,
2836 (bundle,
2864 [('f', 'force', None,
2837 [('f', 'force', None,
2865 _('run even when remote repository is unrelated'))],
2838 _('run even when remote repository is unrelated'))],
2866 _('hg bundle FILE DEST')),
2839 _('hg bundle FILE DEST')),
2867 "cat":
2840 "cat":
2868 (cat,
2841 (cat,
2869 [('o', 'output', '', _('print output to file with formatted name')),
2842 [('o', 'output', '', _('print output to file with formatted name')),
2870 ('r', 'rev', '', _('print the given revision')),
2843 ('r', 'rev', '', _('print the given revision')),
2871 ('I', 'include', [], _('include names matching the given patterns')),
2844 ('I', 'include', [], _('include names matching the given patterns')),
2872 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2845 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2873 _('hg cat [OPTION]... FILE...')),
2846 _('hg cat [OPTION]... FILE...')),
2874 "^clone":
2847 "^clone":
2875 (clone,
2848 (clone,
2876 [('U', 'noupdate', None, _('do not update the new working directory')),
2849 [('U', 'noupdate', None, _('do not update the new working directory')),
2877 ('r', 'rev', [],
2850 ('r', 'rev', [],
2878 _('a changeset you would like to have after cloning')),
2851 _('a changeset you would like to have after cloning')),
2879 ('', 'pull', None, _('use pull protocol to copy metadata')),
2852 ('', 'pull', None, _('use pull protocol to copy metadata')),
2880 ('e', 'ssh', '', _('specify ssh command to use')),
2853 ('e', 'ssh', '', _('specify ssh command to use')),
2881 ('', 'remotecmd', '',
2854 ('', 'remotecmd', '',
2882 _('specify hg command to run on the remote side'))],
2855 _('specify hg command to run on the remote side'))],
2883 _('hg clone [OPTION]... SOURCE [DEST]')),
2856 _('hg clone [OPTION]... SOURCE [DEST]')),
2884 "^commit|ci":
2857 "^commit|ci":
2885 (commit,
2858 (commit,
2886 [('A', 'addremove', None,
2859 [('A', 'addremove', None,
2887 _('mark new/missing files as added/removed before committing')),
2860 _('mark new/missing files as added/removed before committing')),
2888 ('m', 'message', '', _('use <text> as commit message')),
2861 ('m', 'message', '', _('use <text> as commit message')),
2889 ('l', 'logfile', '', _('read the commit message from <file>')),
2862 ('l', 'logfile', '', _('read the commit message from <file>')),
2890 ('d', 'date', '', _('record datecode as commit date')),
2863 ('d', 'date', '', _('record datecode as commit date')),
2891 ('u', 'user', '', _('record user as commiter')),
2864 ('u', 'user', '', _('record user as commiter')),
2892 ('I', 'include', [], _('include names matching the given patterns')),
2865 ('I', 'include', [], _('include names matching the given patterns')),
2893 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2866 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2894 _('hg commit [OPTION]... [FILE]...')),
2867 _('hg commit [OPTION]... [FILE]...')),
2895 "copy|cp":
2868 "copy|cp":
2896 (copy,
2869 (copy,
2897 [('A', 'after', None, _('record a copy that has already occurred')),
2870 [('A', 'after', None, _('record a copy that has already occurred')),
2898 ('f', 'force', None,
2871 ('f', 'force', None,
2899 _('forcibly copy over an existing managed file')),
2872 _('forcibly copy over an existing managed file')),
2900 ('I', 'include', [], _('include names matching the given patterns')),
2873 ('I', 'include', [], _('include names matching the given patterns')),
2901 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2874 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2902 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2875 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2903 _('hg copy [OPTION]... [SOURCE]... DEST')),
2876 _('hg copy [OPTION]... [SOURCE]... DEST')),
2904 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2877 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2905 "debugcomplete":
2878 "debugcomplete":
2906 (debugcomplete,
2879 (debugcomplete,
2907 [('o', 'options', None, _('show the command options'))],
2880 [('o', 'options', None, _('show the command options'))],
2908 _('debugcomplete [-o] CMD')),
2881 _('debugcomplete [-o] CMD')),
2909 "debugrebuildstate":
2882 "debugrebuildstate":
2910 (debugrebuildstate,
2883 (debugrebuildstate,
2911 [('r', 'rev', '', _('revision to rebuild to'))],
2884 [('r', 'rev', '', _('revision to rebuild to'))],
2912 _('debugrebuildstate [-r REV] [REV]')),
2885 _('debugrebuildstate [-r REV] [REV]')),
2913 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2886 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2914 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2887 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2915 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2888 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2916 "debugstate": (debugstate, [], _('debugstate')),
2889 "debugstate": (debugstate, [], _('debugstate')),
2917 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2890 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2918 "debugindex": (debugindex, [], _('debugindex FILE')),
2891 "debugindex": (debugindex, [], _('debugindex FILE')),
2919 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2892 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2920 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2893 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2921 "debugwalk":
2894 "debugwalk":
2922 (debugwalk,
2895 (debugwalk,
2923 [('I', 'include', [], _('include names matching the given patterns')),
2896 [('I', 'include', [], _('include names matching the given patterns')),
2924 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2897 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2925 _('debugwalk [OPTION]... [FILE]...')),
2898 _('debugwalk [OPTION]... [FILE]...')),
2926 "^diff":
2899 "^diff":
2927 (diff,
2900 (diff,
2928 [('r', 'rev', [], _('revision')),
2901 [('r', 'rev', [], _('revision')),
2929 ('a', 'text', None, _('treat all files as text')),
2902 ('a', 'text', None, _('treat all files as text')),
2930 ('p', 'show-function', None,
2903 ('p', 'show-function', None,
2931 _('show which function each change is in')),
2904 _('show which function each change is in')),
2932 ('w', 'ignore-all-space', None,
2905 ('w', 'ignore-all-space', None,
2933 _('ignore white space when comparing lines')),
2906 _('ignore white space when comparing lines')),
2934 ('b', 'ignore-space-change', None,
2907 ('b', 'ignore-space-change', None,
2935 _('ignore changes in the amount of white space')),
2908 _('ignore changes in the amount of white space')),
2936 ('B', 'ignore-blank-lines', None,
2909 ('B', 'ignore-blank-lines', None,
2937 _('ignore changes whose lines are all blank')),
2910 _('ignore changes whose lines are all blank')),
2938 ('I', 'include', [], _('include names matching the given patterns')),
2911 ('I', 'include', [], _('include names matching the given patterns')),
2939 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2912 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2940 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2913 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2941 "^export":
2914 "^export":
2942 (export,
2915 (export,
2943 [('o', 'output', '', _('print output to file with formatted name')),
2916 [('o', 'output', '', _('print output to file with formatted name')),
2944 ('a', 'text', None, _('treat all files as text')),
2917 ('a', 'text', None, _('treat all files as text')),
2945 ('', 'switch-parent', None, _('diff against the second parent'))],
2918 ('', 'switch-parent', None, _('diff against the second parent'))],
2946 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2919 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2947 "debugforget|forget":
2920 "debugforget|forget":
2948 (forget,
2921 (forget,
2949 [('I', 'include', [], _('include names matching the given patterns')),
2922 [('I', 'include', [], _('include names matching the given patterns')),
2950 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2923 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2951 _('hg forget [OPTION]... FILE...')),
2924 _('hg forget [OPTION]... FILE...')),
2952 "grep":
2925 "grep":
2953 (grep,
2926 (grep,
2954 [('0', 'print0', None, _('end fields with NUL')),
2927 [('0', 'print0', None, _('end fields with NUL')),
2955 ('', 'all', None, _('print all revisions that match')),
2928 ('', 'all', None, _('print all revisions that match')),
2956 ('i', 'ignore-case', None, _('ignore case when matching')),
2929 ('i', 'ignore-case', None, _('ignore case when matching')),
2957 ('l', 'files-with-matches', None,
2930 ('l', 'files-with-matches', None,
2958 _('print only filenames and revs that match')),
2931 _('print only filenames and revs that match')),
2959 ('n', 'line-number', None, _('print matching line numbers')),
2932 ('n', 'line-number', None, _('print matching line numbers')),
2960 ('r', 'rev', [], _('search in given revision range')),
2933 ('r', 'rev', [], _('search in given revision range')),
2961 ('u', 'user', None, _('print user who committed change')),
2934 ('u', 'user', None, _('print user who committed change')),
2962 ('I', 'include', [], _('include names matching the given patterns')),
2935 ('I', 'include', [], _('include names matching the given patterns')),
2963 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2936 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2964 _('hg grep [OPTION]... PATTERN [FILE]...')),
2937 _('hg grep [OPTION]... PATTERN [FILE]...')),
2965 "heads":
2938 "heads":
2966 (heads,
2939 (heads,
2967 [('b', 'branches', None, _('show branches')),
2940 [('b', 'branches', None, _('show branches')),
2968 ('', 'style', '', _('display using template map file')),
2941 ('', 'style', '', _('display using template map file')),
2969 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2942 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2970 ('', 'template', '', _('display with template'))],
2943 ('', 'template', '', _('display with template'))],
2971 _('hg heads [-b] [-r <rev>]')),
2944 _('hg heads [-b] [-r <rev>]')),
2972 "help": (help_, [], _('hg help [COMMAND]')),
2945 "help": (help_, [], _('hg help [COMMAND]')),
2973 "identify|id": (identify, [], _('hg identify')),
2946 "identify|id": (identify, [], _('hg identify')),
2974 "import|patch":
2947 "import|patch":
2975 (import_,
2948 (import_,
2976 [('p', 'strip', 1,
2949 [('p', 'strip', 1,
2977 _('directory strip option for patch. This has the same\n'
2950 _('directory strip option for patch. This has the same\n'
2978 'meaning as the corresponding patch option')),
2951 'meaning as the corresponding patch option')),
2979 ('m', 'message', '', _('use <text> as commit message')),
2952 ('m', 'message', '', _('use <text> as commit message')),
2980 ('b', 'base', '', _('base path')),
2953 ('b', 'base', '', _('base path')),
2981 ('f', 'force', None,
2954 ('f', 'force', None,
2982 _('skip check for outstanding uncommitted changes'))],
2955 _('skip check for outstanding uncommitted changes'))],
2983 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2956 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2984 "incoming|in": (incoming,
2957 "incoming|in": (incoming,
2985 [('M', 'no-merges', None, _('do not show merges')),
2958 [('M', 'no-merges', None, _('do not show merges')),
2986 ('f', 'force', None,
2959 ('f', 'force', None,
2987 _('run even when remote repository is unrelated')),
2960 _('run even when remote repository is unrelated')),
2988 ('', 'style', '', _('display using template map file')),
2961 ('', 'style', '', _('display using template map file')),
2989 ('n', 'newest-first', None, _('show newest record first')),
2962 ('n', 'newest-first', None, _('show newest record first')),
2990 ('', 'bundle', '', _('file to store the bundles into')),
2963 ('', 'bundle', '', _('file to store the bundles into')),
2991 ('p', 'patch', None, _('show patch')),
2964 ('p', 'patch', None, _('show patch')),
2992 ('r', 'rev', [], _('a specific revision you would like to pull')),
2965 ('r', 'rev', [], _('a specific revision you would like to pull')),
2993 ('', 'template', '', _('display with template')),
2966 ('', 'template', '', _('display with template')),
2994 ('e', 'ssh', '', _('specify ssh command to use')),
2967 ('e', 'ssh', '', _('specify ssh command to use')),
2995 ('', 'remotecmd', '',
2968 ('', 'remotecmd', '',
2996 _('specify hg command to run on the remote side'))],
2969 _('specify hg command to run on the remote side'))],
2997 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2970 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2998 ' [--bundle FILENAME] [SOURCE]')),
2971 ' [--bundle FILENAME] [SOURCE]')),
2999 "^init":
2972 "^init":
3000 (init,
2973 (init,
3001 [('e', 'ssh', '', _('specify ssh command to use')),
2974 [('e', 'ssh', '', _('specify ssh command to use')),
3002 ('', 'remotecmd', '',
2975 ('', 'remotecmd', '',
3003 _('specify hg command to run on the remote side'))],
2976 _('specify hg command to run on the remote side'))],
3004 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2977 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3005 "locate":
2978 "locate":
3006 (locate,
2979 (locate,
3007 [('r', 'rev', '', _('search the repository as it stood at rev')),
2980 [('r', 'rev', '', _('search the repository as it stood at rev')),
3008 ('0', 'print0', None,
2981 ('0', 'print0', None,
3009 _('end filenames with NUL, for use with xargs')),
2982 _('end filenames with NUL, for use with xargs')),
3010 ('f', 'fullpath', None,
2983 ('f', 'fullpath', None,
3011 _('print complete paths from the filesystem root')),
2984 _('print complete paths from the filesystem root')),
3012 ('I', 'include', [], _('include names matching the given patterns')),
2985 ('I', 'include', [], _('include names matching the given patterns')),
3013 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2986 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3014 _('hg locate [OPTION]... [PATTERN]...')),
2987 _('hg locate [OPTION]... [PATTERN]...')),
3015 "^log|history":
2988 "^log|history":
3016 (log,
2989 (log,
3017 [('b', 'branches', None, _('show branches')),
2990 [('b', 'branches', None, _('show branches')),
3018 ('k', 'keyword', [], _('search for a keyword')),
2991 ('k', 'keyword', [], _('search for a keyword')),
3019 ('l', 'limit', '', _('limit number of changes displayed')),
2992 ('l', 'limit', '', _('limit number of changes displayed')),
3020 ('r', 'rev', [], _('show the specified revision or range')),
2993 ('r', 'rev', [], _('show the specified revision or range')),
3021 ('M', 'no-merges', None, _('do not show merges')),
2994 ('M', 'no-merges', None, _('do not show merges')),
3022 ('', 'style', '', _('display using template map file')),
2995 ('', 'style', '', _('display using template map file')),
3023 ('m', 'only-merges', None, _('show only merges')),
2996 ('m', 'only-merges', None, _('show only merges')),
3024 ('p', 'patch', None, _('show patch')),
2997 ('p', 'patch', None, _('show patch')),
3025 ('', 'template', '', _('display with template')),
2998 ('', 'template', '', _('display with template')),
3026 ('I', 'include', [], _('include names matching the given patterns')),
2999 ('I', 'include', [], _('include names matching the given patterns')),
3027 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3000 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3028 _('hg log [OPTION]... [FILE]')),
3001 _('hg log [OPTION]... [FILE]')),
3029 "manifest": (manifest, [], _('hg manifest [REV]')),
3002 "manifest": (manifest, [], _('hg manifest [REV]')),
3030 "merge":
3003 "merge":
3031 (merge,
3004 (merge,
3032 [('b', 'branch', '', _('merge with head of a specific branch')),
3005 [('b', 'branch', '', _('merge with head of a specific branch')),
3033 ('f', 'force', None, _('force a merge with outstanding changes'))],
3006 ('f', 'force', None, _('force a merge with outstanding changes'))],
3034 _('hg merge [-b TAG] [-f] [REV]')),
3007 _('hg merge [-b TAG] [-f] [REV]')),
3035 "outgoing|out": (outgoing,
3008 "outgoing|out": (outgoing,
3036 [('M', 'no-merges', None, _('do not show merges')),
3009 [('M', 'no-merges', None, _('do not show merges')),
3037 ('f', 'force', None,
3010 ('f', 'force', None,
3038 _('run even when remote repository is unrelated')),
3011 _('run even when remote repository is unrelated')),
3039 ('p', 'patch', None, _('show patch')),
3012 ('p', 'patch', None, _('show patch')),
3040 ('', 'style', '', _('display using template map file')),
3013 ('', 'style', '', _('display using template map file')),
3041 ('r', 'rev', [], _('a specific revision you would like to push')),
3014 ('r', 'rev', [], _('a specific revision you would like to push')),
3042 ('n', 'newest-first', None, _('show newest record first')),
3015 ('n', 'newest-first', None, _('show newest record first')),
3043 ('', 'template', '', _('display with template')),
3016 ('', 'template', '', _('display with template')),
3044 ('e', 'ssh', '', _('specify ssh command to use')),
3017 ('e', 'ssh', '', _('specify ssh command to use')),
3045 ('', 'remotecmd', '',
3018 ('', 'remotecmd', '',
3046 _('specify hg command to run on the remote side'))],
3019 _('specify hg command to run on the remote side'))],
3047 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3020 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3048 "^parents":
3021 "^parents":
3049 (parents,
3022 (parents,
3050 [('b', 'branches', None, _('show branches')),
3023 [('b', 'branches', None, _('show branches')),
3051 ('', 'style', '', _('display using template map file')),
3024 ('', 'style', '', _('display using template map file')),
3052 ('', 'template', '', _('display with template'))],
3025 ('', 'template', '', _('display with template'))],
3053 _('hg parents [-b] [REV]')),
3026 _('hg parents [-b] [REV]')),
3054 "paths": (paths, [], _('hg paths [NAME]')),
3027 "paths": (paths, [], _('hg paths [NAME]')),
3055 "^pull":
3028 "^pull":
3056 (pull,
3029 (pull,
3057 [('u', 'update', None,
3030 [('u', 'update', None,
3058 _('update the working directory to tip after pull')),
3031 _('update the working directory to tip after pull')),
3059 ('e', 'ssh', '', _('specify ssh command to use')),
3032 ('e', 'ssh', '', _('specify ssh command to use')),
3060 ('f', 'force', None,
3033 ('f', 'force', None,
3061 _('run even when remote repository is unrelated')),
3034 _('run even when remote repository is unrelated')),
3062 ('r', 'rev', [], _('a specific revision you would like to pull')),
3035 ('r', 'rev', [], _('a specific revision you would like to pull')),
3063 ('', 'remotecmd', '',
3036 ('', 'remotecmd', '',
3064 _('specify hg command to run on the remote side'))],
3037 _('specify hg command to run on the remote side'))],
3065 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3038 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3066 "^push":
3039 "^push":
3067 (push,
3040 (push,
3068 [('f', 'force', None, _('force push')),
3041 [('f', 'force', None, _('force push')),
3069 ('e', 'ssh', '', _('specify ssh command to use')),
3042 ('e', 'ssh', '', _('specify ssh command to use')),
3070 ('r', 'rev', [], _('a specific revision you would like to push')),
3043 ('r', 'rev', [], _('a specific revision you would like to push')),
3071 ('', 'remotecmd', '',
3044 ('', 'remotecmd', '',
3072 _('specify hg command to run on the remote side'))],
3045 _('specify hg command to run on the remote side'))],
3073 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3046 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3074 "debugrawcommit|rawcommit":
3047 "debugrawcommit|rawcommit":
3075 (rawcommit,
3048 (rawcommit,
3076 [('p', 'parent', [], _('parent')),
3049 [('p', 'parent', [], _('parent')),
3077 ('d', 'date', '', _('date code')),
3050 ('d', 'date', '', _('date code')),
3078 ('u', 'user', '', _('user')),
3051 ('u', 'user', '', _('user')),
3079 ('F', 'files', '', _('file list')),
3052 ('F', 'files', '', _('file list')),
3080 ('m', 'message', '', _('commit message')),
3053 ('m', 'message', '', _('commit message')),
3081 ('l', 'logfile', '', _('commit message file'))],
3054 ('l', 'logfile', '', _('commit message file'))],
3082 _('hg debugrawcommit [OPTION]... [FILE]...')),
3055 _('hg debugrawcommit [OPTION]... [FILE]...')),
3083 "recover": (recover, [], _('hg recover')),
3056 "recover": (recover, [], _('hg recover')),
3084 "^remove|rm":
3057 "^remove|rm":
3085 (remove,
3058 (remove,
3086 [('A', 'after', None, _('record remove that has already occurred')),
3059 [('A', 'after', None, _('record remove that has already occurred')),
3087 ('f', 'force', None, _('remove file even if modified')),
3060 ('f', 'force', None, _('remove file even if modified')),
3088 ('I', 'include', [], _('include names matching the given patterns')),
3061 ('I', 'include', [], _('include names matching the given patterns')),
3089 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3062 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3090 _('hg remove [OPTION]... FILE...')),
3063 _('hg remove [OPTION]... FILE...')),
3091 "rename|mv":
3064 "rename|mv":
3092 (rename,
3065 (rename,
3093 [('A', 'after', None, _('record a rename that has already occurred')),
3066 [('A', 'after', None, _('record a rename that has already occurred')),
3094 ('f', 'force', None,
3067 ('f', 'force', None,
3095 _('forcibly copy over an existing managed file')),
3068 _('forcibly copy over an existing managed file')),
3096 ('I', 'include', [], _('include names matching the given patterns')),
3069 ('I', 'include', [], _('include names matching the given patterns')),
3097 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3070 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3098 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3071 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3099 _('hg rename [OPTION]... SOURCE... DEST')),
3072 _('hg rename [OPTION]... SOURCE... DEST')),
3100 "^revert":
3073 "^revert":
3101 (revert,
3074 (revert,
3102 [('r', 'rev', '', _('revision to revert to')),
3075 [('r', 'rev', '', _('revision to revert to')),
3103 ('', 'no-backup', None, _('do not save backup copies of files')),
3076 ('', 'no-backup', None, _('do not save backup copies of files')),
3104 ('I', 'include', [], _('include names matching given patterns')),
3077 ('I', 'include', [], _('include names matching given patterns')),
3105 ('X', 'exclude', [], _('exclude names matching given patterns')),
3078 ('X', 'exclude', [], _('exclude names matching given patterns')),
3106 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3079 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3107 _('hg revert [-r REV] [NAME]...')),
3080 _('hg revert [-r REV] [NAME]...')),
3108 "rollback": (rollback, [], _('hg rollback')),
3081 "rollback": (rollback, [], _('hg rollback')),
3109 "root": (root, [], _('hg root')),
3082 "root": (root, [], _('hg root')),
3110 "^serve":
3083 "^serve":
3111 (serve,
3084 (serve,
3112 [('A', 'accesslog', '', _('name of access log file to write to')),
3085 [('A', 'accesslog', '', _('name of access log file to write to')),
3113 ('d', 'daemon', None, _('run server in background')),
3086 ('d', 'daemon', None, _('run server in background')),
3114 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3087 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3115 ('E', 'errorlog', '', _('name of error log file to write to')),
3088 ('E', 'errorlog', '', _('name of error log file to write to')),
3116 ('p', 'port', 0, _('port to use (default: 8000)')),
3089 ('p', 'port', 0, _('port to use (default: 8000)')),
3117 ('a', 'address', '', _('address to use')),
3090 ('a', 'address', '', _('address to use')),
3118 ('n', 'name', '',
3091 ('n', 'name', '',
3119 _('name to show in web pages (default: working dir)')),
3092 _('name to show in web pages (default: working dir)')),
3120 ('', 'webdir-conf', '', _('name of the webdir config file'
3093 ('', 'webdir-conf', '', _('name of the webdir config file'
3121 ' (serve more than one repo)')),
3094 ' (serve more than one repo)')),
3122 ('', 'pid-file', '', _('name of file to write process ID to')),
3095 ('', 'pid-file', '', _('name of file to write process ID to')),
3123 ('', 'stdio', None, _('for remote clients')),
3096 ('', 'stdio', None, _('for remote clients')),
3124 ('t', 'templates', '', _('web templates to use')),
3097 ('t', 'templates', '', _('web templates to use')),
3125 ('', 'style', '', _('template style to use')),
3098 ('', 'style', '', _('template style to use')),
3126 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3099 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3127 _('hg serve [OPTION]...')),
3100 _('hg serve [OPTION]...')),
3128 "^status|st":
3101 "^status|st":
3129 (status,
3102 (status,
3130 [('m', 'modified', None, _('show only modified files')),
3103 [('m', 'modified', None, _('show only modified files')),
3131 ('a', 'added', None, _('show only added files')),
3104 ('a', 'added', None, _('show only added files')),
3132 ('r', 'removed', None, _('show only removed files')),
3105 ('r', 'removed', None, _('show only removed files')),
3133 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3106 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3134 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3107 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3135 ('i', 'ignored', None, _('show ignored files')),
3108 ('i', 'ignored', None, _('show ignored files')),
3136 ('n', 'no-status', None, _('hide status prefix')),
3109 ('n', 'no-status', None, _('hide status prefix')),
3137 ('0', 'print0', None,
3110 ('0', 'print0', None,
3138 _('end filenames with NUL, for use with xargs')),
3111 _('end filenames with NUL, for use with xargs')),
3139 ('I', 'include', [], _('include names matching the given patterns')),
3112 ('I', 'include', [], _('include names matching the given patterns')),
3140 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3113 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3141 _('hg status [OPTION]... [FILE]...')),
3114 _('hg status [OPTION]... [FILE]...')),
3142 "tag":
3115 "tag":
3143 (tag,
3116 (tag,
3144 [('l', 'local', None, _('make the tag local')),
3117 [('l', 'local', None, _('make the tag local')),
3145 ('m', 'message', '', _('message for tag commit log entry')),
3118 ('m', 'message', '', _('message for tag commit log entry')),
3146 ('d', 'date', '', _('record datecode as commit date')),
3119 ('d', 'date', '', _('record datecode as commit date')),
3147 ('u', 'user', '', _('record user as commiter')),
3120 ('u', 'user', '', _('record user as commiter')),
3148 ('r', 'rev', '', _('revision to tag'))],
3121 ('r', 'rev', '', _('revision to tag'))],
3149 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3122 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3150 "tags": (tags, [], _('hg tags')),
3123 "tags": (tags, [], _('hg tags')),
3151 "tip":
3124 "tip":
3152 (tip,
3125 (tip,
3153 [('b', 'branches', None, _('show branches')),
3126 [('b', 'branches', None, _('show branches')),
3154 ('', 'style', '', _('display using template map file')),
3127 ('', 'style', '', _('display using template map file')),
3155 ('p', 'patch', None, _('show patch')),
3128 ('p', 'patch', None, _('show patch')),
3156 ('', 'template', '', _('display with template'))],
3129 ('', 'template', '', _('display with template'))],
3157 _('hg tip [-b] [-p]')),
3130 _('hg tip [-b] [-p]')),
3158 "unbundle":
3131 "unbundle":
3159 (unbundle,
3132 (unbundle,
3160 [('u', 'update', None,
3133 [('u', 'update', None,
3161 _('update the working directory to tip after unbundle'))],
3134 _('update the working directory to tip after unbundle'))],
3162 _('hg unbundle [-u] FILE')),
3135 _('hg unbundle [-u] FILE')),
3163 "debugundo|undo": (undo, [], _('hg undo')),
3136 "debugundo|undo": (undo, [], _('hg undo')),
3164 "^update|up|checkout|co":
3137 "^update|up|checkout|co":
3165 (update,
3138 (update,
3166 [('b', 'branch', '', _('checkout the head of a specific branch')),
3139 [('b', 'branch', '', _('checkout the head of a specific branch')),
3167 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3140 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3168 ('C', 'clean', None, _('overwrite locally modified files')),
3141 ('C', 'clean', None, _('overwrite locally modified files')),
3169 ('f', 'force', None, _('force a merge with outstanding changes'))],
3142 ('f', 'force', None, _('force a merge with outstanding changes'))],
3170 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3143 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3171 "verify": (verify, [], _('hg verify')),
3144 "verify": (verify, [], _('hg verify')),
3172 "version": (show_version, [], _('hg version')),
3145 "version": (show_version, [], _('hg version')),
3173 }
3146 }
3174
3147
3175 globalopts = [
3148 globalopts = [
3176 ('R', 'repository', '',
3149 ('R', 'repository', '',
3177 _('repository root directory or symbolic path name')),
3150 _('repository root directory or symbolic path name')),
3178 ('', 'cwd', '', _('change working directory')),
3151 ('', 'cwd', '', _('change working directory')),
3179 ('y', 'noninteractive', None,
3152 ('y', 'noninteractive', None,
3180 _('do not prompt, assume \'yes\' for any required answers')),
3153 _('do not prompt, assume \'yes\' for any required answers')),
3181 ('q', 'quiet', None, _('suppress output')),
3154 ('q', 'quiet', None, _('suppress output')),
3182 ('v', 'verbose', None, _('enable additional output')),
3155 ('v', 'verbose', None, _('enable additional output')),
3183 ('', 'config', [], _('set/override config option')),
3156 ('', 'config', [], _('set/override config option')),
3184 ('', 'debug', None, _('enable debugging output')),
3157 ('', 'debug', None, _('enable debugging output')),
3185 ('', 'debugger', None, _('start debugger')),
3158 ('', 'debugger', None, _('start debugger')),
3186 ('', 'lsprof', None, _('print improved command execution profile')),
3159 ('', 'lsprof', None, _('print improved command execution profile')),
3187 ('', 'traceback', None, _('print traceback on exception')),
3160 ('', 'traceback', None, _('print traceback on exception')),
3188 ('', 'time', None, _('time how long the command takes')),
3161 ('', 'time', None, _('time how long the command takes')),
3189 ('', 'profile', None, _('print command execution profile')),
3162 ('', 'profile', None, _('print command execution profile')),
3190 ('', 'version', None, _('output version information and exit')),
3163 ('', 'version', None, _('output version information and exit')),
3191 ('h', 'help', None, _('display help and exit')),
3164 ('h', 'help', None, _('display help and exit')),
3192 ]
3165 ]
3193
3166
3194 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3167 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3195 " debugindex debugindexdot")
3168 " debugindex debugindexdot")
3196 optionalrepo = ("paths serve debugconfig")
3169 optionalrepo = ("paths serve debugconfig")
3197
3170
3198 def findpossible(cmd):
3171 def findpossible(cmd):
3199 """
3172 """
3200 Return cmd -> (aliases, command table entry)
3173 Return cmd -> (aliases, command table entry)
3201 for each matching command.
3174 for each matching command.
3202 Return debug commands (or their aliases) only if no normal command matches.
3175 Return debug commands (or their aliases) only if no normal command matches.
3203 """
3176 """
3204 choice = {}
3177 choice = {}
3205 debugchoice = {}
3178 debugchoice = {}
3206 for e in table.keys():
3179 for e in table.keys():
3207 aliases = e.lstrip("^").split("|")
3180 aliases = e.lstrip("^").split("|")
3208 found = None
3181 found = None
3209 if cmd in aliases:
3182 if cmd in aliases:
3210 found = cmd
3183 found = cmd
3211 else:
3184 else:
3212 for a in aliases:
3185 for a in aliases:
3213 if a.startswith(cmd):
3186 if a.startswith(cmd):
3214 found = a
3187 found = a
3215 break
3188 break
3216 if found is not None:
3189 if found is not None:
3217 if aliases[0].startswith("debug"):
3190 if aliases[0].startswith("debug"):
3218 debugchoice[found] = (aliases, table[e])
3191 debugchoice[found] = (aliases, table[e])
3219 else:
3192 else:
3220 choice[found] = (aliases, table[e])
3193 choice[found] = (aliases, table[e])
3221
3194
3222 if not choice and debugchoice:
3195 if not choice and debugchoice:
3223 choice = debugchoice
3196 choice = debugchoice
3224
3197
3225 return choice
3198 return choice
3226
3199
3227 def findcmd(cmd):
3200 def findcmd(cmd):
3228 """Return (aliases, command table entry) for command string."""
3201 """Return (aliases, command table entry) for command string."""
3229 choice = findpossible(cmd)
3202 choice = findpossible(cmd)
3230
3203
3231 if choice.has_key(cmd):
3204 if choice.has_key(cmd):
3232 return choice[cmd]
3205 return choice[cmd]
3233
3206
3234 if len(choice) > 1:
3207 if len(choice) > 1:
3235 clist = choice.keys()
3208 clist = choice.keys()
3236 clist.sort()
3209 clist.sort()
3237 raise AmbiguousCommand(cmd, clist)
3210 raise AmbiguousCommand(cmd, clist)
3238
3211
3239 if choice:
3212 if choice:
3240 return choice.values()[0]
3213 return choice.values()[0]
3241
3214
3242 raise UnknownCommand(cmd)
3215 raise UnknownCommand(cmd)
3243
3216
3244 def catchterm(*args):
3217 def catchterm(*args):
3245 raise util.SignalInterrupt
3218 raise util.SignalInterrupt
3246
3219
3247 def run():
3220 def run():
3248 sys.exit(dispatch(sys.argv[1:]))
3221 sys.exit(dispatch(sys.argv[1:]))
3249
3222
3250 class ParseError(Exception):
3223 class ParseError(Exception):
3251 """Exception raised on errors in parsing the command line."""
3224 """Exception raised on errors in parsing the command line."""
3252
3225
3253 def parse(ui, args):
3226 def parse(ui, args):
3254 options = {}
3227 options = {}
3255 cmdoptions = {}
3228 cmdoptions = {}
3256
3229
3257 try:
3230 try:
3258 args = fancyopts.fancyopts(args, globalopts, options)
3231 args = fancyopts.fancyopts(args, globalopts, options)
3259 except fancyopts.getopt.GetoptError, inst:
3232 except fancyopts.getopt.GetoptError, inst:
3260 raise ParseError(None, inst)
3233 raise ParseError(None, inst)
3261
3234
3262 if args:
3235 if args:
3263 cmd, args = args[0], args[1:]
3236 cmd, args = args[0], args[1:]
3264 aliases, i = findcmd(cmd)
3237 aliases, i = findcmd(cmd)
3265 cmd = aliases[0]
3238 cmd = aliases[0]
3266 defaults = ui.config("defaults", cmd)
3239 defaults = ui.config("defaults", cmd)
3267 if defaults:
3240 if defaults:
3268 args = defaults.split() + args
3241 args = defaults.split() + args
3269 c = list(i[1])
3242 c = list(i[1])
3270 else:
3243 else:
3271 cmd = None
3244 cmd = None
3272 c = []
3245 c = []
3273
3246
3274 # combine global options into local
3247 # combine global options into local
3275 for o in globalopts:
3248 for o in globalopts:
3276 c.append((o[0], o[1], options[o[1]], o[3]))
3249 c.append((o[0], o[1], options[o[1]], o[3]))
3277
3250
3278 try:
3251 try:
3279 args = fancyopts.fancyopts(args, c, cmdoptions)
3252 args = fancyopts.fancyopts(args, c, cmdoptions)
3280 except fancyopts.getopt.GetoptError, inst:
3253 except fancyopts.getopt.GetoptError, inst:
3281 raise ParseError(cmd, inst)
3254 raise ParseError(cmd, inst)
3282
3255
3283 # separate global options back out
3256 # separate global options back out
3284 for o in globalopts:
3257 for o in globalopts:
3285 n = o[1]
3258 n = o[1]
3286 options[n] = cmdoptions[n]
3259 options[n] = cmdoptions[n]
3287 del cmdoptions[n]
3260 del cmdoptions[n]
3288
3261
3289 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3262 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3290
3263
3291 external = {}
3264 external = {}
3292
3265
3293 def findext(name):
3266 def findext(name):
3294 '''return module with given extension name'''
3267 '''return module with given extension name'''
3295 try:
3268 try:
3296 return sys.modules[external[name]]
3269 return sys.modules[external[name]]
3297 except KeyError:
3270 except KeyError:
3298 dotname = '.' + name
3271 dotname = '.' + name
3299 for k, v in external.iteritems():
3272 for k, v in external.iteritems():
3300 if k.endswith('.' + name) or v == name:
3273 if k.endswith('.' + name) or v == name:
3301 return sys.modules[v]
3274 return sys.modules[v]
3302 raise KeyError(name)
3275 raise KeyError(name)
3303
3276
3304 def dispatch(args):
3277 def dispatch(args):
3305 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3278 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3306 num = getattr(signal, name, None)
3279 num = getattr(signal, name, None)
3307 if num: signal.signal(num, catchterm)
3280 if num: signal.signal(num, catchterm)
3308
3281
3309 try:
3282 try:
3310 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3283 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3311 except util.Abort, inst:
3284 except util.Abort, inst:
3312 sys.stderr.write(_("abort: %s\n") % inst)
3285 sys.stderr.write(_("abort: %s\n") % inst)
3313 return -1
3286 return -1
3314
3287
3315 for ext_name, load_from_name in u.extensions():
3288 for ext_name, load_from_name in u.extensions():
3316 try:
3289 try:
3317 if load_from_name:
3290 if load_from_name:
3318 # the module will be loaded in sys.modules
3291 # the module will be loaded in sys.modules
3319 # choose an unique name so that it doesn't
3292 # choose an unique name so that it doesn't
3320 # conflicts with other modules
3293 # conflicts with other modules
3321 module_name = "hgext_%s" % ext_name.replace('.', '_')
3294 module_name = "hgext_%s" % ext_name.replace('.', '_')
3322 mod = imp.load_source(module_name, load_from_name)
3295 mod = imp.load_source(module_name, load_from_name)
3323 else:
3296 else:
3324 def importh(name):
3297 def importh(name):
3325 mod = __import__(name)
3298 mod = __import__(name)
3326 components = name.split('.')
3299 components = name.split('.')
3327 for comp in components[1:]:
3300 for comp in components[1:]:
3328 mod = getattr(mod, comp)
3301 mod = getattr(mod, comp)
3329 return mod
3302 return mod
3330 try:
3303 try:
3331 mod = importh("hgext.%s" % ext_name)
3304 mod = importh("hgext.%s" % ext_name)
3332 except ImportError:
3305 except ImportError:
3333 mod = importh(ext_name)
3306 mod = importh(ext_name)
3334 external[ext_name] = mod.__name__
3307 external[ext_name] = mod.__name__
3335 except (util.SignalInterrupt, KeyboardInterrupt):
3308 except (util.SignalInterrupt, KeyboardInterrupt):
3336 raise
3309 raise
3337 except Exception, inst:
3310 except Exception, inst:
3338 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3311 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3339 if u.print_exc():
3312 if u.print_exc():
3340 return 1
3313 return 1
3341
3314
3342 for name in external.itervalues():
3315 for name in external.itervalues():
3343 mod = sys.modules[name]
3316 mod = sys.modules[name]
3344 uisetup = getattr(mod, 'uisetup', None)
3317 uisetup = getattr(mod, 'uisetup', None)
3345 if uisetup:
3318 if uisetup:
3346 uisetup(u)
3319 uisetup(u)
3347 cmdtable = getattr(mod, 'cmdtable', {})
3320 cmdtable = getattr(mod, 'cmdtable', {})
3348 for t in cmdtable:
3321 for t in cmdtable:
3349 if t in table:
3322 if t in table:
3350 u.warn(_("module %s overrides %s\n") % (name, t))
3323 u.warn(_("module %s overrides %s\n") % (name, t))
3351 table.update(cmdtable)
3324 table.update(cmdtable)
3352
3325
3353 try:
3326 try:
3354 cmd, func, args, options, cmdoptions = parse(u, args)
3327 cmd, func, args, options, cmdoptions = parse(u, args)
3355 if options["time"]:
3328 if options["time"]:
3356 def get_times():
3329 def get_times():
3357 t = os.times()
3330 t = os.times()
3358 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3331 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3359 t = (t[0], t[1], t[2], t[3], time.clock())
3332 t = (t[0], t[1], t[2], t[3], time.clock())
3360 return t
3333 return t
3361 s = get_times()
3334 s = get_times()
3362 def print_time():
3335 def print_time():
3363 t = get_times()
3336 t = get_times()
3364 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3337 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3365 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3338 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3366 atexit.register(print_time)
3339 atexit.register(print_time)
3367
3340
3368 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3341 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3369 not options["noninteractive"], options["traceback"],
3342 not options["noninteractive"], options["traceback"],
3370 options["config"])
3343 options["config"])
3371
3344
3372 # enter the debugger before command execution
3345 # enter the debugger before command execution
3373 if options['debugger']:
3346 if options['debugger']:
3374 pdb.set_trace()
3347 pdb.set_trace()
3375
3348
3376 try:
3349 try:
3377 if options['cwd']:
3350 if options['cwd']:
3378 try:
3351 try:
3379 os.chdir(options['cwd'])
3352 os.chdir(options['cwd'])
3380 except OSError, inst:
3353 except OSError, inst:
3381 raise util.Abort('%s: %s' %
3354 raise util.Abort('%s: %s' %
3382 (options['cwd'], inst.strerror))
3355 (options['cwd'], inst.strerror))
3383
3356
3384 path = u.expandpath(options["repository"]) or ""
3357 path = u.expandpath(options["repository"]) or ""
3385 repo = path and hg.repository(u, path=path) or None
3358 repo = path and hg.repository(u, path=path) or None
3386
3359
3387 if options['help']:
3360 if options['help']:
3388 return help_(u, cmd, options['version'])
3361 return help_(u, cmd, options['version'])
3389 elif options['version']:
3362 elif options['version']:
3390 return show_version(u)
3363 return show_version(u)
3391 elif not cmd:
3364 elif not cmd:
3392 return help_(u, 'shortlist')
3365 return help_(u, 'shortlist')
3393
3366
3394 if cmd not in norepo.split():
3367 if cmd not in norepo.split():
3395 try:
3368 try:
3396 if not repo:
3369 if not repo:
3397 repo = hg.repository(u, path=path)
3370 repo = hg.repository(u, path=path)
3398 u = repo.ui
3371 u = repo.ui
3399 for name in external.itervalues():
3372 for name in external.itervalues():
3400 mod = sys.modules[name]
3373 mod = sys.modules[name]
3401 if hasattr(mod, 'reposetup'):
3374 if hasattr(mod, 'reposetup'):
3402 mod.reposetup(u, repo)
3375 mod.reposetup(u, repo)
3403 except hg.RepoError:
3376 except hg.RepoError:
3404 if cmd not in optionalrepo.split():
3377 if cmd not in optionalrepo.split():
3405 raise
3378 raise
3406 d = lambda: func(u, repo, *args, **cmdoptions)
3379 d = lambda: func(u, repo, *args, **cmdoptions)
3407 else:
3380 else:
3408 d = lambda: func(u, *args, **cmdoptions)
3381 d = lambda: func(u, *args, **cmdoptions)
3409
3382
3410 try:
3383 try:
3411 if options['profile']:
3384 if options['profile']:
3412 import hotshot, hotshot.stats
3385 import hotshot, hotshot.stats
3413 prof = hotshot.Profile("hg.prof")
3386 prof = hotshot.Profile("hg.prof")
3414 try:
3387 try:
3415 try:
3388 try:
3416 return prof.runcall(d)
3389 return prof.runcall(d)
3417 except:
3390 except:
3418 try:
3391 try:
3419 u.warn(_('exception raised - generating '
3392 u.warn(_('exception raised - generating '
3420 'profile anyway\n'))
3393 'profile anyway\n'))
3421 except:
3394 except:
3422 pass
3395 pass
3423 raise
3396 raise
3424 finally:
3397 finally:
3425 prof.close()
3398 prof.close()
3426 stats = hotshot.stats.load("hg.prof")
3399 stats = hotshot.stats.load("hg.prof")
3427 stats.strip_dirs()
3400 stats.strip_dirs()
3428 stats.sort_stats('time', 'calls')
3401 stats.sort_stats('time', 'calls')
3429 stats.print_stats(40)
3402 stats.print_stats(40)
3430 elif options['lsprof']:
3403 elif options['lsprof']:
3431 try:
3404 try:
3432 from mercurial import lsprof
3405 from mercurial import lsprof
3433 except ImportError:
3406 except ImportError:
3434 raise util.Abort(_(
3407 raise util.Abort(_(
3435 'lsprof not available - install from '
3408 'lsprof not available - install from '
3436 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3409 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3437 p = lsprof.Profiler()
3410 p = lsprof.Profiler()
3438 p.enable(subcalls=True)
3411 p.enable(subcalls=True)
3439 try:
3412 try:
3440 return d()
3413 return d()
3441 finally:
3414 finally:
3442 p.disable()
3415 p.disable()
3443 stats = lsprof.Stats(p.getstats())
3416 stats = lsprof.Stats(p.getstats())
3444 stats.sort()
3417 stats.sort()
3445 stats.pprint(top=10, file=sys.stderr, climit=5)
3418 stats.pprint(top=10, file=sys.stderr, climit=5)
3446 else:
3419 else:
3447 return d()
3420 return d()
3448 finally:
3421 finally:
3449 u.flush()
3422 u.flush()
3450 except:
3423 except:
3451 # enter the debugger when we hit an exception
3424 # enter the debugger when we hit an exception
3452 if options['debugger']:
3425 if options['debugger']:
3453 pdb.post_mortem(sys.exc_info()[2])
3426 pdb.post_mortem(sys.exc_info()[2])
3454 u.print_exc()
3427 u.print_exc()
3455 raise
3428 raise
3456 except ParseError, inst:
3429 except ParseError, inst:
3457 if inst.args[0]:
3430 if inst.args[0]:
3458 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3431 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3459 help_(u, inst.args[0])
3432 help_(u, inst.args[0])
3460 else:
3433 else:
3461 u.warn(_("hg: %s\n") % inst.args[1])
3434 u.warn(_("hg: %s\n") % inst.args[1])
3462 help_(u, 'shortlist')
3435 help_(u, 'shortlist')
3463 except AmbiguousCommand, inst:
3436 except AmbiguousCommand, inst:
3464 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3437 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3465 (inst.args[0], " ".join(inst.args[1])))
3438 (inst.args[0], " ".join(inst.args[1])))
3466 except UnknownCommand, inst:
3439 except UnknownCommand, inst:
3467 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3440 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3468 help_(u, 'shortlist')
3441 help_(u, 'shortlist')
3469 except hg.RepoError, inst:
3442 except hg.RepoError, inst:
3470 u.warn(_("abort: %s!\n") % inst)
3443 u.warn(_("abort: %s!\n") % inst)
3471 except lock.LockHeld, inst:
3444 except lock.LockHeld, inst:
3472 if inst.errno == errno.ETIMEDOUT:
3445 if inst.errno == errno.ETIMEDOUT:
3473 reason = _('timed out waiting for lock held by %s') % inst.locker
3446 reason = _('timed out waiting for lock held by %s') % inst.locker
3474 else:
3447 else:
3475 reason = _('lock held by %s') % inst.locker
3448 reason = _('lock held by %s') % inst.locker
3476 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3449 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3477 except lock.LockUnavailable, inst:
3450 except lock.LockUnavailable, inst:
3478 u.warn(_("abort: could not lock %s: %s\n") %
3451 u.warn(_("abort: could not lock %s: %s\n") %
3479 (inst.desc or inst.filename, inst.strerror))
3452 (inst.desc or inst.filename, inst.strerror))
3480 except revlog.RevlogError, inst:
3453 except revlog.RevlogError, inst:
3481 u.warn(_("abort: "), inst, "!\n")
3454 u.warn(_("abort: "), inst, "!\n")
3482 except util.SignalInterrupt:
3455 except util.SignalInterrupt:
3483 u.warn(_("killed!\n"))
3456 u.warn(_("killed!\n"))
3484 except KeyboardInterrupt:
3457 except KeyboardInterrupt:
3485 try:
3458 try:
3486 u.warn(_("interrupted!\n"))
3459 u.warn(_("interrupted!\n"))
3487 except IOError, inst:
3460 except IOError, inst:
3488 if inst.errno == errno.EPIPE:
3461 if inst.errno == errno.EPIPE:
3489 if u.debugflag:
3462 if u.debugflag:
3490 u.warn(_("\nbroken pipe\n"))
3463 u.warn(_("\nbroken pipe\n"))
3491 else:
3464 else:
3492 raise
3465 raise
3493 except IOError, inst:
3466 except IOError, inst:
3494 if hasattr(inst, "code"):
3467 if hasattr(inst, "code"):
3495 u.warn(_("abort: %s\n") % inst)
3468 u.warn(_("abort: %s\n") % inst)
3496 elif hasattr(inst, "reason"):
3469 elif hasattr(inst, "reason"):
3497 u.warn(_("abort: error: %s\n") % inst.reason[1])
3470 u.warn(_("abort: error: %s\n") % inst.reason[1])
3498 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3471 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3499 if u.debugflag:
3472 if u.debugflag:
3500 u.warn(_("broken pipe\n"))
3473 u.warn(_("broken pipe\n"))
3501 elif getattr(inst, "strerror", None):
3474 elif getattr(inst, "strerror", None):
3502 if getattr(inst, "filename", None):
3475 if getattr(inst, "filename", None):
3503 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3476 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3504 else:
3477 else:
3505 u.warn(_("abort: %s\n") % inst.strerror)
3478 u.warn(_("abort: %s\n") % inst.strerror)
3506 else:
3479 else:
3507 raise
3480 raise
3508 except OSError, inst:
3481 except OSError, inst:
3509 if hasattr(inst, "filename"):
3482 if hasattr(inst, "filename"):
3510 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3483 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3511 else:
3484 else:
3512 u.warn(_("abort: %s\n") % inst.strerror)
3485 u.warn(_("abort: %s\n") % inst.strerror)
3513 except util.Abort, inst:
3486 except util.Abort, inst:
3514 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3487 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3515 except TypeError, inst:
3488 except TypeError, inst:
3516 # was this an argument error?
3489 # was this an argument error?
3517 tb = traceback.extract_tb(sys.exc_info()[2])
3490 tb = traceback.extract_tb(sys.exc_info()[2])
3518 if len(tb) > 2: # no
3491 if len(tb) > 2: # no
3519 raise
3492 raise
3520 u.debug(inst, "\n")
3493 u.debug(inst, "\n")
3521 u.warn(_("%s: invalid arguments\n") % cmd)
3494 u.warn(_("%s: invalid arguments\n") % cmd)
3522 help_(u, cmd)
3495 help_(u, cmd)
3523 except SystemExit, inst:
3496 except SystemExit, inst:
3524 # Commands shouldn't sys.exit directly, but give a return code.
3497 # Commands shouldn't sys.exit directly, but give a return code.
3525 # Just in case catch this and and pass exit code to caller.
3498 # Just in case catch this and and pass exit code to caller.
3526 return inst.code
3499 return inst.code
3527 except:
3500 except:
3528 u.warn(_("** unknown exception encountered, details follow\n"))
3501 u.warn(_("** unknown exception encountered, details follow\n"))
3529 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3502 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3530 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3503 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3531 % version.get_version())
3504 % version.get_version())
3532 raise
3505 raise
3533
3506
3534 return -1
3507 return -1
@@ -1,2166 +1,2212 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "appendfile changegroup")
11 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "changelog dirstate filelog manifest repo context")
12 demandload(globals(), "changelog dirstate filelog manifest repo context")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "os revlog util")
14 demandload(globals(), "os revlog util")
15
15
16 class localrepository(object):
16 class localrepository(object):
17 capabilities = ()
17 capabilities = ()
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 if not path:
22 if not path:
23 p = os.getcwd()
23 p = os.getcwd()
24 while not os.path.isdir(os.path.join(p, ".hg")):
24 while not os.path.isdir(os.path.join(p, ".hg")):
25 oldp = p
25 oldp = p
26 p = os.path.dirname(p)
26 p = os.path.dirname(p)
27 if p == oldp:
27 if p == oldp:
28 raise repo.RepoError(_("no repo found"))
28 raise repo.RepoError(_("no repo found"))
29 path = p
29 path = p
30 self.path = os.path.join(path, ".hg")
30 self.path = os.path.join(path, ".hg")
31
31
32 if not create and not os.path.isdir(self.path):
32 if not create and not os.path.isdir(self.path):
33 raise repo.RepoError(_("repository %s not found") % path)
33 raise repo.RepoError(_("repository %s not found") % path)
34
34
35 self.root = os.path.abspath(path)
35 self.root = os.path.abspath(path)
36 self.origroot = path
36 self.origroot = path
37 self.ui = ui.ui(parentui=parentui)
37 self.ui = ui.ui(parentui=parentui)
38 self.opener = util.opener(self.path)
38 self.opener = util.opener(self.path)
39 self.wopener = util.opener(self.root)
39 self.wopener = util.opener(self.root)
40
40
41 try:
41 try:
42 self.ui.readconfig(self.join("hgrc"), self.root)
42 self.ui.readconfig(self.join("hgrc"), self.root)
43 except IOError:
43 except IOError:
44 pass
44 pass
45
45
46 v = self.ui.revlogopts
46 v = self.ui.revlogopts
47 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
47 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
48 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
48 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
49 fl = v.get('flags', None)
49 fl = v.get('flags', None)
50 flags = 0
50 flags = 0
51 if fl != None:
51 if fl != None:
52 for x in fl.split():
52 for x in fl.split():
53 flags |= revlog.flagstr(x)
53 flags |= revlog.flagstr(x)
54 elif self.revlogv1:
54 elif self.revlogv1:
55 flags = revlog.REVLOG_DEFAULT_FLAGS
55 flags = revlog.REVLOG_DEFAULT_FLAGS
56
56
57 v = self.revlogversion | flags
57 v = self.revlogversion | flags
58 self.manifest = manifest.manifest(self.opener, v)
58 self.manifest = manifest.manifest(self.opener, v)
59 self.changelog = changelog.changelog(self.opener, v)
59 self.changelog = changelog.changelog(self.opener, v)
60
60
61 # the changelog might not have the inline index flag
61 # the changelog might not have the inline index flag
62 # on. If the format of the changelog is the same as found in
62 # on. If the format of the changelog is the same as found in
63 # .hgrc, apply any flags found in the .hgrc as well.
63 # .hgrc, apply any flags found in the .hgrc as well.
64 # Otherwise, just version from the changelog
64 # Otherwise, just version from the changelog
65 v = self.changelog.version
65 v = self.changelog.version
66 if v == self.revlogversion:
66 if v == self.revlogversion:
67 v |= flags
67 v |= flags
68 self.revlogversion = v
68 self.revlogversion = v
69
69
70 self.tagscache = None
70 self.tagscache = None
71 self.nodetagscache = None
71 self.nodetagscache = None
72 self.encodepats = None
72 self.encodepats = None
73 self.decodepats = None
73 self.decodepats = None
74 self.transhandle = None
74 self.transhandle = None
75
75
76 if create:
76 if create:
77 if not os.path.exists(path):
77 if not os.path.exists(path):
78 os.mkdir(path)
78 os.mkdir(path)
79 os.mkdir(self.path)
79 os.mkdir(self.path)
80 os.mkdir(self.join("data"))
80 os.mkdir(self.join("data"))
81
81
82 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
82 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
83
83
84 def hook(self, name, throw=False, **args):
84 def hook(self, name, throw=False, **args):
85 def callhook(hname, funcname):
85 def callhook(hname, funcname):
86 '''call python hook. hook is callable object, looked up as
86 '''call python hook. hook is callable object, looked up as
87 name in python module. if callable returns "true", hook
87 name in python module. if callable returns "true", hook
88 fails, else passes. if hook raises exception, treated as
88 fails, else passes. if hook raises exception, treated as
89 hook failure. exception propagates if throw is "true".
89 hook failure. exception propagates if throw is "true".
90
90
91 reason for "true" meaning "hook failed" is so that
91 reason for "true" meaning "hook failed" is so that
92 unmodified commands (e.g. mercurial.commands.update) can
92 unmodified commands (e.g. mercurial.commands.update) can
93 be run as hooks without wrappers to convert return values.'''
93 be run as hooks without wrappers to convert return values.'''
94
94
95 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
95 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
96 d = funcname.rfind('.')
96 d = funcname.rfind('.')
97 if d == -1:
97 if d == -1:
98 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
98 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
99 % (hname, funcname))
99 % (hname, funcname))
100 modname = funcname[:d]
100 modname = funcname[:d]
101 try:
101 try:
102 obj = __import__(modname)
102 obj = __import__(modname)
103 except ImportError:
103 except ImportError:
104 try:
104 try:
105 # extensions are loaded with hgext_ prefix
105 # extensions are loaded with hgext_ prefix
106 obj = __import__("hgext_%s" % modname)
106 obj = __import__("hgext_%s" % modname)
107 except ImportError:
107 except ImportError:
108 raise util.Abort(_('%s hook is invalid '
108 raise util.Abort(_('%s hook is invalid '
109 '(import of "%s" failed)') %
109 '(import of "%s" failed)') %
110 (hname, modname))
110 (hname, modname))
111 try:
111 try:
112 for p in funcname.split('.')[1:]:
112 for p in funcname.split('.')[1:]:
113 obj = getattr(obj, p)
113 obj = getattr(obj, p)
114 except AttributeError, err:
114 except AttributeError, err:
115 raise util.Abort(_('%s hook is invalid '
115 raise util.Abort(_('%s hook is invalid '
116 '("%s" is not defined)') %
116 '("%s" is not defined)') %
117 (hname, funcname))
117 (hname, funcname))
118 if not callable(obj):
118 if not callable(obj):
119 raise util.Abort(_('%s hook is invalid '
119 raise util.Abort(_('%s hook is invalid '
120 '("%s" is not callable)') %
120 '("%s" is not callable)') %
121 (hname, funcname))
121 (hname, funcname))
122 try:
122 try:
123 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
123 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
124 except (KeyboardInterrupt, util.SignalInterrupt):
124 except (KeyboardInterrupt, util.SignalInterrupt):
125 raise
125 raise
126 except Exception, exc:
126 except Exception, exc:
127 if isinstance(exc, util.Abort):
127 if isinstance(exc, util.Abort):
128 self.ui.warn(_('error: %s hook failed: %s\n') %
128 self.ui.warn(_('error: %s hook failed: %s\n') %
129 (hname, exc.args[0] % exc.args[1:]))
129 (hname, exc.args[0] % exc.args[1:]))
130 else:
130 else:
131 self.ui.warn(_('error: %s hook raised an exception: '
131 self.ui.warn(_('error: %s hook raised an exception: '
132 '%s\n') % (hname, exc))
132 '%s\n') % (hname, exc))
133 if throw:
133 if throw:
134 raise
134 raise
135 self.ui.print_exc()
135 self.ui.print_exc()
136 return True
136 return True
137 if r:
137 if r:
138 if throw:
138 if throw:
139 raise util.Abort(_('%s hook failed') % hname)
139 raise util.Abort(_('%s hook failed') % hname)
140 self.ui.warn(_('warning: %s hook failed\n') % hname)
140 self.ui.warn(_('warning: %s hook failed\n') % hname)
141 return r
141 return r
142
142
143 def runhook(name, cmd):
143 def runhook(name, cmd):
144 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
144 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
145 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
145 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
146 r = util.system(cmd, environ=env, cwd=self.root)
146 r = util.system(cmd, environ=env, cwd=self.root)
147 if r:
147 if r:
148 desc, r = util.explain_exit(r)
148 desc, r = util.explain_exit(r)
149 if throw:
149 if throw:
150 raise util.Abort(_('%s hook %s') % (name, desc))
150 raise util.Abort(_('%s hook %s') % (name, desc))
151 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
151 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
152 return r
152 return r
153
153
154 r = False
154 r = False
155 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
155 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
156 if hname.split(".", 1)[0] == name and cmd]
156 if hname.split(".", 1)[0] == name and cmd]
157 hooks.sort()
157 hooks.sort()
158 for hname, cmd in hooks:
158 for hname, cmd in hooks:
159 if cmd.startswith('python:'):
159 if cmd.startswith('python:'):
160 r = callhook(hname, cmd[7:].strip()) or r
160 r = callhook(hname, cmd[7:].strip()) or r
161 else:
161 else:
162 r = runhook(hname, cmd) or r
162 r = runhook(hname, cmd) or r
163 return r
163 return r
164
164
165 tag_disallowed = ':\r\n'
166
167 def tag(self, name, node, local=False, message=None, user=None, date=None):
168 '''tag a revision with a symbolic name.
169
170 if local is True, the tag is stored in a per-repository file.
171 otherwise, it is stored in the .hgtags file, and a new
172 changeset is committed with the change.
173
174 keyword arguments:
175
176 local: whether to store tag in non-version-controlled file
177 (default False)
178
179 message: commit message to use if committing
180
181 user: name of user to use if committing
182
183 date: date tuple to use if committing'''
184
185 for c in self.tag_disallowed:
186 if c in name:
187 raise util.Abort(_('%r cannot be used in a tag name') % c)
188
189 self.hook('pretag', throw=True, node=node, tag=name, local=local)
190
191 if local:
192 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
193 self.hook('tag', node=node, tag=name, local=local)
194 return
195
196 for x in self.changes():
197 if '.hgtags' in x:
198 raise util.Abort(_('working copy of .hgtags is changed '
199 '(please commit .hgtags manually)'))
200
201 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
202 if self.dirstate.state('.hgtags') == '?':
203 self.add(['.hgtags'])
204
205 if not message:
206 message = _('Added tag %s for changeset %s') % (name, node)
207
208 self.commit(['.hgtags'], message, user, date)
209 self.hook('tag', node=node, tag=name, local=local)
210
165 def tags(self):
211 def tags(self):
166 '''return a mapping of tag to node'''
212 '''return a mapping of tag to node'''
167 if not self.tagscache:
213 if not self.tagscache:
168 self.tagscache = {}
214 self.tagscache = {}
169
215
170 def parsetag(line, context):
216 def parsetag(line, context):
171 if not line:
217 if not line:
172 return
218 return
173 s = l.split(" ", 1)
219 s = l.split(" ", 1)
174 if len(s) != 2:
220 if len(s) != 2:
175 self.ui.warn(_("%s: cannot parse entry\n") % context)
221 self.ui.warn(_("%s: cannot parse entry\n") % context)
176 return
222 return
177 node, key = s
223 node, key = s
178 key = key.strip()
224 key = key.strip()
179 try:
225 try:
180 bin_n = bin(node)
226 bin_n = bin(node)
181 except TypeError:
227 except TypeError:
182 self.ui.warn(_("%s: node '%s' is not well formed\n") %
228 self.ui.warn(_("%s: node '%s' is not well formed\n") %
183 (context, node))
229 (context, node))
184 return
230 return
185 if bin_n not in self.changelog.nodemap:
231 if bin_n not in self.changelog.nodemap:
186 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
232 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
187 (context, key))
233 (context, key))
188 return
234 return
189 self.tagscache[key] = bin_n
235 self.tagscache[key] = bin_n
190
236
191 # read the tags file from each head, ending with the tip,
237 # read the tags file from each head, ending with the tip,
192 # and add each tag found to the map, with "newer" ones
238 # and add each tag found to the map, with "newer" ones
193 # taking precedence
239 # taking precedence
194 heads = self.heads()
240 heads = self.heads()
195 heads.reverse()
241 heads.reverse()
196 fl = self.file(".hgtags")
242 fl = self.file(".hgtags")
197 for node in heads:
243 for node in heads:
198 change = self.changelog.read(node)
244 change = self.changelog.read(node)
199 rev = self.changelog.rev(node)
245 rev = self.changelog.rev(node)
200 fn, ff = self.manifest.find(change[0], '.hgtags')
246 fn, ff = self.manifest.find(change[0], '.hgtags')
201 if fn is None: continue
247 if fn is None: continue
202 count = 0
248 count = 0
203 for l in fl.read(fn).splitlines():
249 for l in fl.read(fn).splitlines():
204 count += 1
250 count += 1
205 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
251 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
206 (rev, short(node), count))
252 (rev, short(node), count))
207 try:
253 try:
208 f = self.opener("localtags")
254 f = self.opener("localtags")
209 count = 0
255 count = 0
210 for l in f:
256 for l in f:
211 count += 1
257 count += 1
212 parsetag(l, _("localtags, line %d") % count)
258 parsetag(l, _("localtags, line %d") % count)
213 except IOError:
259 except IOError:
214 pass
260 pass
215
261
216 self.tagscache['tip'] = self.changelog.tip()
262 self.tagscache['tip'] = self.changelog.tip()
217
263
218 return self.tagscache
264 return self.tagscache
219
265
220 def tagslist(self):
266 def tagslist(self):
221 '''return a list of tags ordered by revision'''
267 '''return a list of tags ordered by revision'''
222 l = []
268 l = []
223 for t, n in self.tags().items():
269 for t, n in self.tags().items():
224 try:
270 try:
225 r = self.changelog.rev(n)
271 r = self.changelog.rev(n)
226 except:
272 except:
227 r = -2 # sort to the beginning of the list if unknown
273 r = -2 # sort to the beginning of the list if unknown
228 l.append((r, t, n))
274 l.append((r, t, n))
229 l.sort()
275 l.sort()
230 return [(t, n) for r, t, n in l]
276 return [(t, n) for r, t, n in l]
231
277
232 def nodetags(self, node):
278 def nodetags(self, node):
233 '''return the tags associated with a node'''
279 '''return the tags associated with a node'''
234 if not self.nodetagscache:
280 if not self.nodetagscache:
235 self.nodetagscache = {}
281 self.nodetagscache = {}
236 for t, n in self.tags().items():
282 for t, n in self.tags().items():
237 self.nodetagscache.setdefault(n, []).append(t)
283 self.nodetagscache.setdefault(n, []).append(t)
238 return self.nodetagscache.get(node, [])
284 return self.nodetagscache.get(node, [])
239
285
240 def lookup(self, key):
286 def lookup(self, key):
241 try:
287 try:
242 return self.tags()[key]
288 return self.tags()[key]
243 except KeyError:
289 except KeyError:
244 try:
290 try:
245 return self.changelog.lookup(key)
291 return self.changelog.lookup(key)
246 except:
292 except:
247 raise repo.RepoError(_("unknown revision '%s'") % key)
293 raise repo.RepoError(_("unknown revision '%s'") % key)
248
294
249 def dev(self):
295 def dev(self):
250 return os.lstat(self.path).st_dev
296 return os.lstat(self.path).st_dev
251
297
252 def local(self):
298 def local(self):
253 return True
299 return True
254
300
255 def join(self, f):
301 def join(self, f):
256 return os.path.join(self.path, f)
302 return os.path.join(self.path, f)
257
303
258 def wjoin(self, f):
304 def wjoin(self, f):
259 return os.path.join(self.root, f)
305 return os.path.join(self.root, f)
260
306
261 def file(self, f):
307 def file(self, f):
262 if f[0] == '/':
308 if f[0] == '/':
263 f = f[1:]
309 f = f[1:]
264 return filelog.filelog(self.opener, f, self.revlogversion)
310 return filelog.filelog(self.opener, f, self.revlogversion)
265
311
266 def changectx(self, changeid):
312 def changectx(self, changeid):
267 return context.changectx(self, changeid)
313 return context.changectx(self, changeid)
268
314
269 def filectx(self, path, changeid=None, fileid=None):
315 def filectx(self, path, changeid=None, fileid=None):
270 """changeid can be a changeset revision, node, or tag.
316 """changeid can be a changeset revision, node, or tag.
271 fileid can be a file revision or node."""
317 fileid can be a file revision or node."""
272 return context.filectx(self, path, changeid, fileid)
318 return context.filectx(self, path, changeid, fileid)
273
319
274 def getcwd(self):
320 def getcwd(self):
275 return self.dirstate.getcwd()
321 return self.dirstate.getcwd()
276
322
277 def wfile(self, f, mode='r'):
323 def wfile(self, f, mode='r'):
278 return self.wopener(f, mode)
324 return self.wopener(f, mode)
279
325
280 def wread(self, filename):
326 def wread(self, filename):
281 if self.encodepats == None:
327 if self.encodepats == None:
282 l = []
328 l = []
283 for pat, cmd in self.ui.configitems("encode"):
329 for pat, cmd in self.ui.configitems("encode"):
284 mf = util.matcher(self.root, "", [pat], [], [])[1]
330 mf = util.matcher(self.root, "", [pat], [], [])[1]
285 l.append((mf, cmd))
331 l.append((mf, cmd))
286 self.encodepats = l
332 self.encodepats = l
287
333
288 data = self.wopener(filename, 'r').read()
334 data = self.wopener(filename, 'r').read()
289
335
290 for mf, cmd in self.encodepats:
336 for mf, cmd in self.encodepats:
291 if mf(filename):
337 if mf(filename):
292 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
338 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
293 data = util.filter(data, cmd)
339 data = util.filter(data, cmd)
294 break
340 break
295
341
296 return data
342 return data
297
343
298 def wwrite(self, filename, data, fd=None):
344 def wwrite(self, filename, data, fd=None):
299 if self.decodepats == None:
345 if self.decodepats == None:
300 l = []
346 l = []
301 for pat, cmd in self.ui.configitems("decode"):
347 for pat, cmd in self.ui.configitems("decode"):
302 mf = util.matcher(self.root, "", [pat], [], [])[1]
348 mf = util.matcher(self.root, "", [pat], [], [])[1]
303 l.append((mf, cmd))
349 l.append((mf, cmd))
304 self.decodepats = l
350 self.decodepats = l
305
351
306 for mf, cmd in self.decodepats:
352 for mf, cmd in self.decodepats:
307 if mf(filename):
353 if mf(filename):
308 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
354 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
309 data = util.filter(data, cmd)
355 data = util.filter(data, cmd)
310 break
356 break
311
357
312 if fd:
358 if fd:
313 return fd.write(data)
359 return fd.write(data)
314 return self.wopener(filename, 'w').write(data)
360 return self.wopener(filename, 'w').write(data)
315
361
316 def transaction(self):
362 def transaction(self):
317 tr = self.transhandle
363 tr = self.transhandle
318 if tr != None and tr.running():
364 if tr != None and tr.running():
319 return tr.nest()
365 return tr.nest()
320
366
321 # save dirstate for rollback
367 # save dirstate for rollback
322 try:
368 try:
323 ds = self.opener("dirstate").read()
369 ds = self.opener("dirstate").read()
324 except IOError:
370 except IOError:
325 ds = ""
371 ds = ""
326 self.opener("journal.dirstate", "w").write(ds)
372 self.opener("journal.dirstate", "w").write(ds)
327
373
328 tr = transaction.transaction(self.ui.warn, self.opener,
374 tr = transaction.transaction(self.ui.warn, self.opener,
329 self.join("journal"),
375 self.join("journal"),
330 aftertrans(self.path))
376 aftertrans(self.path))
331 self.transhandle = tr
377 self.transhandle = tr
332 return tr
378 return tr
333
379
334 def recover(self):
380 def recover(self):
335 l = self.lock()
381 l = self.lock()
336 if os.path.exists(self.join("journal")):
382 if os.path.exists(self.join("journal")):
337 self.ui.status(_("rolling back interrupted transaction\n"))
383 self.ui.status(_("rolling back interrupted transaction\n"))
338 transaction.rollback(self.opener, self.join("journal"))
384 transaction.rollback(self.opener, self.join("journal"))
339 self.reload()
385 self.reload()
340 return True
386 return True
341 else:
387 else:
342 self.ui.warn(_("no interrupted transaction available\n"))
388 self.ui.warn(_("no interrupted transaction available\n"))
343 return False
389 return False
344
390
345 def rollback(self, wlock=None):
391 def rollback(self, wlock=None):
346 if not wlock:
392 if not wlock:
347 wlock = self.wlock()
393 wlock = self.wlock()
348 l = self.lock()
394 l = self.lock()
349 if os.path.exists(self.join("undo")):
395 if os.path.exists(self.join("undo")):
350 self.ui.status(_("rolling back last transaction\n"))
396 self.ui.status(_("rolling back last transaction\n"))
351 transaction.rollback(self.opener, self.join("undo"))
397 transaction.rollback(self.opener, self.join("undo"))
352 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
398 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
353 self.reload()
399 self.reload()
354 self.wreload()
400 self.wreload()
355 else:
401 else:
356 self.ui.warn(_("no rollback information available\n"))
402 self.ui.warn(_("no rollback information available\n"))
357
403
358 def wreload(self):
404 def wreload(self):
359 self.dirstate.read()
405 self.dirstate.read()
360
406
361 def reload(self):
407 def reload(self):
362 self.changelog.load()
408 self.changelog.load()
363 self.manifest.load()
409 self.manifest.load()
364 self.tagscache = None
410 self.tagscache = None
365 self.nodetagscache = None
411 self.nodetagscache = None
366
412
367 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
413 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
368 desc=None):
414 desc=None):
369 try:
415 try:
370 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
416 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
371 except lock.LockHeld, inst:
417 except lock.LockHeld, inst:
372 if not wait:
418 if not wait:
373 raise
419 raise
374 self.ui.warn(_("waiting for lock on %s held by %s\n") %
420 self.ui.warn(_("waiting for lock on %s held by %s\n") %
375 (desc, inst.args[0]))
421 (desc, inst.args[0]))
376 # default to 600 seconds timeout
422 # default to 600 seconds timeout
377 l = lock.lock(self.join(lockname),
423 l = lock.lock(self.join(lockname),
378 int(self.ui.config("ui", "timeout") or 600),
424 int(self.ui.config("ui", "timeout") or 600),
379 releasefn, desc=desc)
425 releasefn, desc=desc)
380 if acquirefn:
426 if acquirefn:
381 acquirefn()
427 acquirefn()
382 return l
428 return l
383
429
384 def lock(self, wait=1):
430 def lock(self, wait=1):
385 return self.do_lock("lock", wait, acquirefn=self.reload,
431 return self.do_lock("lock", wait, acquirefn=self.reload,
386 desc=_('repository %s') % self.origroot)
432 desc=_('repository %s') % self.origroot)
387
433
388 def wlock(self, wait=1):
434 def wlock(self, wait=1):
389 return self.do_lock("wlock", wait, self.dirstate.write,
435 return self.do_lock("wlock", wait, self.dirstate.write,
390 self.wreload,
436 self.wreload,
391 desc=_('working directory of %s') % self.origroot)
437 desc=_('working directory of %s') % self.origroot)
392
438
393 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
439 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
394 "determine whether a new filenode is needed"
440 "determine whether a new filenode is needed"
395 fp1 = manifest1.get(filename, nullid)
441 fp1 = manifest1.get(filename, nullid)
396 fp2 = manifest2.get(filename, nullid)
442 fp2 = manifest2.get(filename, nullid)
397
443
398 if fp2 != nullid:
444 if fp2 != nullid:
399 # is one parent an ancestor of the other?
445 # is one parent an ancestor of the other?
400 fpa = filelog.ancestor(fp1, fp2)
446 fpa = filelog.ancestor(fp1, fp2)
401 if fpa == fp1:
447 if fpa == fp1:
402 fp1, fp2 = fp2, nullid
448 fp1, fp2 = fp2, nullid
403 elif fpa == fp2:
449 elif fpa == fp2:
404 fp2 = nullid
450 fp2 = nullid
405
451
406 # is the file unmodified from the parent? report existing entry
452 # is the file unmodified from the parent? report existing entry
407 if fp2 == nullid and text == filelog.read(fp1):
453 if fp2 == nullid and text == filelog.read(fp1):
408 return (fp1, None, None)
454 return (fp1, None, None)
409
455
410 return (None, fp1, fp2)
456 return (None, fp1, fp2)
411
457
412 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
458 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
413 orig_parent = self.dirstate.parents()[0] or nullid
459 orig_parent = self.dirstate.parents()[0] or nullid
414 p1 = p1 or self.dirstate.parents()[0] or nullid
460 p1 = p1 or self.dirstate.parents()[0] or nullid
415 p2 = p2 or self.dirstate.parents()[1] or nullid
461 p2 = p2 or self.dirstate.parents()[1] or nullid
416 c1 = self.changelog.read(p1)
462 c1 = self.changelog.read(p1)
417 c2 = self.changelog.read(p2)
463 c2 = self.changelog.read(p2)
418 m1 = self.manifest.read(c1[0])
464 m1 = self.manifest.read(c1[0])
419 mf1 = self.manifest.readflags(c1[0])
465 mf1 = self.manifest.readflags(c1[0])
420 m2 = self.manifest.read(c2[0])
466 m2 = self.manifest.read(c2[0])
421 changed = []
467 changed = []
422
468
423 if orig_parent == p1:
469 if orig_parent == p1:
424 update_dirstate = 1
470 update_dirstate = 1
425 else:
471 else:
426 update_dirstate = 0
472 update_dirstate = 0
427
473
428 if not wlock:
474 if not wlock:
429 wlock = self.wlock()
475 wlock = self.wlock()
430 l = self.lock()
476 l = self.lock()
431 tr = self.transaction()
477 tr = self.transaction()
432 mm = m1.copy()
478 mm = m1.copy()
433 mfm = mf1.copy()
479 mfm = mf1.copy()
434 linkrev = self.changelog.count()
480 linkrev = self.changelog.count()
435 for f in files:
481 for f in files:
436 try:
482 try:
437 t = self.wread(f)
483 t = self.wread(f)
438 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
484 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
439 r = self.file(f)
485 r = self.file(f)
440 mfm[f] = tm
486 mfm[f] = tm
441
487
442 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
488 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
443 if entry:
489 if entry:
444 mm[f] = entry
490 mm[f] = entry
445 continue
491 continue
446
492
447 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
493 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
448 changed.append(f)
494 changed.append(f)
449 if update_dirstate:
495 if update_dirstate:
450 self.dirstate.update([f], "n")
496 self.dirstate.update([f], "n")
451 except IOError:
497 except IOError:
452 try:
498 try:
453 del mm[f]
499 del mm[f]
454 del mfm[f]
500 del mfm[f]
455 if update_dirstate:
501 if update_dirstate:
456 self.dirstate.forget([f])
502 self.dirstate.forget([f])
457 except:
503 except:
458 # deleted from p2?
504 # deleted from p2?
459 pass
505 pass
460
506
461 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
507 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
462 user = user or self.ui.username()
508 user = user or self.ui.username()
463 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
509 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
464 tr.close()
510 tr.close()
465 if update_dirstate:
511 if update_dirstate:
466 self.dirstate.setparents(n, nullid)
512 self.dirstate.setparents(n, nullid)
467
513
468 def commit(self, files=None, text="", user=None, date=None,
514 def commit(self, files=None, text="", user=None, date=None,
469 match=util.always, force=False, lock=None, wlock=None,
515 match=util.always, force=False, lock=None, wlock=None,
470 force_editor=False):
516 force_editor=False):
471 commit = []
517 commit = []
472 remove = []
518 remove = []
473 changed = []
519 changed = []
474
520
475 if files:
521 if files:
476 for f in files:
522 for f in files:
477 s = self.dirstate.state(f)
523 s = self.dirstate.state(f)
478 if s in 'nmai':
524 if s in 'nmai':
479 commit.append(f)
525 commit.append(f)
480 elif s == 'r':
526 elif s == 'r':
481 remove.append(f)
527 remove.append(f)
482 else:
528 else:
483 self.ui.warn(_("%s not tracked!\n") % f)
529 self.ui.warn(_("%s not tracked!\n") % f)
484 else:
530 else:
485 modified, added, removed, deleted, unknown = self.changes(match=match)
531 modified, added, removed, deleted, unknown = self.changes(match=match)
486 commit = modified + added
532 commit = modified + added
487 remove = removed
533 remove = removed
488
534
489 p1, p2 = self.dirstate.parents()
535 p1, p2 = self.dirstate.parents()
490 c1 = self.changelog.read(p1)
536 c1 = self.changelog.read(p1)
491 c2 = self.changelog.read(p2)
537 c2 = self.changelog.read(p2)
492 m1 = self.manifest.read(c1[0])
538 m1 = self.manifest.read(c1[0])
493 mf1 = self.manifest.readflags(c1[0])
539 mf1 = self.manifest.readflags(c1[0])
494 m2 = self.manifest.read(c2[0])
540 m2 = self.manifest.read(c2[0])
495
541
496 if not commit and not remove and not force and p2 == nullid:
542 if not commit and not remove and not force and p2 == nullid:
497 self.ui.status(_("nothing changed\n"))
543 self.ui.status(_("nothing changed\n"))
498 return None
544 return None
499
545
500 xp1 = hex(p1)
546 xp1 = hex(p1)
501 if p2 == nullid: xp2 = ''
547 if p2 == nullid: xp2 = ''
502 else: xp2 = hex(p2)
548 else: xp2 = hex(p2)
503
549
504 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
550 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
505
551
506 if not wlock:
552 if not wlock:
507 wlock = self.wlock()
553 wlock = self.wlock()
508 if not lock:
554 if not lock:
509 lock = self.lock()
555 lock = self.lock()
510 tr = self.transaction()
556 tr = self.transaction()
511
557
512 # check in files
558 # check in files
513 new = {}
559 new = {}
514 linkrev = self.changelog.count()
560 linkrev = self.changelog.count()
515 commit.sort()
561 commit.sort()
516 for f in commit:
562 for f in commit:
517 self.ui.note(f + "\n")
563 self.ui.note(f + "\n")
518 try:
564 try:
519 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
565 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
520 t = self.wread(f)
566 t = self.wread(f)
521 except IOError:
567 except IOError:
522 self.ui.warn(_("trouble committing %s!\n") % f)
568 self.ui.warn(_("trouble committing %s!\n") % f)
523 raise
569 raise
524
570
525 r = self.file(f)
571 r = self.file(f)
526
572
527 meta = {}
573 meta = {}
528 cp = self.dirstate.copied(f)
574 cp = self.dirstate.copied(f)
529 if cp:
575 if cp:
530 meta["copy"] = cp
576 meta["copy"] = cp
531 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
577 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
532 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
578 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
533 fp1, fp2 = nullid, nullid
579 fp1, fp2 = nullid, nullid
534 else:
580 else:
535 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
581 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
536 if entry:
582 if entry:
537 new[f] = entry
583 new[f] = entry
538 continue
584 continue
539
585
540 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
586 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
541 # remember what we've added so that we can later calculate
587 # remember what we've added so that we can later calculate
542 # the files to pull from a set of changesets
588 # the files to pull from a set of changesets
543 changed.append(f)
589 changed.append(f)
544
590
545 # update manifest
591 # update manifest
546 m1 = m1.copy()
592 m1 = m1.copy()
547 m1.update(new)
593 m1.update(new)
548 for f in remove:
594 for f in remove:
549 if f in m1:
595 if f in m1:
550 del m1[f]
596 del m1[f]
551 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
597 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
552 (new, remove))
598 (new, remove))
553
599
554 # add changeset
600 # add changeset
555 new = new.keys()
601 new = new.keys()
556 new.sort()
602 new.sort()
557
603
558 user = user or self.ui.username()
604 user = user or self.ui.username()
559 if not text or force_editor:
605 if not text or force_editor:
560 edittext = []
606 edittext = []
561 if text:
607 if text:
562 edittext.append(text)
608 edittext.append(text)
563 edittext.append("")
609 edittext.append("")
564 if p2 != nullid:
610 if p2 != nullid:
565 edittext.append("HG: branch merge")
611 edittext.append("HG: branch merge")
566 edittext.extend(["HG: changed %s" % f for f in changed])
612 edittext.extend(["HG: changed %s" % f for f in changed])
567 edittext.extend(["HG: removed %s" % f for f in remove])
613 edittext.extend(["HG: removed %s" % f for f in remove])
568 if not changed and not remove:
614 if not changed and not remove:
569 edittext.append("HG: no files changed")
615 edittext.append("HG: no files changed")
570 edittext.append("")
616 edittext.append("")
571 # run editor in the repository root
617 # run editor in the repository root
572 olddir = os.getcwd()
618 olddir = os.getcwd()
573 os.chdir(self.root)
619 os.chdir(self.root)
574 text = self.ui.edit("\n".join(edittext), user)
620 text = self.ui.edit("\n".join(edittext), user)
575 os.chdir(olddir)
621 os.chdir(olddir)
576
622
577 lines = [line.rstrip() for line in text.rstrip().splitlines()]
623 lines = [line.rstrip() for line in text.rstrip().splitlines()]
578 while lines and not lines[0]:
624 while lines and not lines[0]:
579 del lines[0]
625 del lines[0]
580 if not lines:
626 if not lines:
581 return None
627 return None
582 text = '\n'.join(lines)
628 text = '\n'.join(lines)
583 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
629 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
584 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
630 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
585 parent2=xp2)
631 parent2=xp2)
586 tr.close()
632 tr.close()
587
633
588 self.dirstate.setparents(n)
634 self.dirstate.setparents(n)
589 self.dirstate.update(new, "n")
635 self.dirstate.update(new, "n")
590 self.dirstate.forget(remove)
636 self.dirstate.forget(remove)
591
637
592 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
638 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
593 return n
639 return n
594
640
595 def walk(self, node=None, files=[], match=util.always, badmatch=None):
641 def walk(self, node=None, files=[], match=util.always, badmatch=None):
596 if node:
642 if node:
597 fdict = dict.fromkeys(files)
643 fdict = dict.fromkeys(files)
598 for fn in self.manifest.read(self.changelog.read(node)[0]):
644 for fn in self.manifest.read(self.changelog.read(node)[0]):
599 fdict.pop(fn, None)
645 fdict.pop(fn, None)
600 if match(fn):
646 if match(fn):
601 yield 'm', fn
647 yield 'm', fn
602 for fn in fdict:
648 for fn in fdict:
603 if badmatch and badmatch(fn):
649 if badmatch and badmatch(fn):
604 if match(fn):
650 if match(fn):
605 yield 'b', fn
651 yield 'b', fn
606 else:
652 else:
607 self.ui.warn(_('%s: No such file in rev %s\n') % (
653 self.ui.warn(_('%s: No such file in rev %s\n') % (
608 util.pathto(self.getcwd(), fn), short(node)))
654 util.pathto(self.getcwd(), fn), short(node)))
609 else:
655 else:
610 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
656 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
611 yield src, fn
657 yield src, fn
612
658
613 def changes(self, node1=None, node2=None, files=[], match=util.always,
659 def changes(self, node1=None, node2=None, files=[], match=util.always,
614 wlock=None, show_ignored=None):
660 wlock=None, show_ignored=None):
615 """return changes between two nodes or node and working directory
661 """return changes between two nodes or node and working directory
616
662
617 If node1 is None, use the first dirstate parent instead.
663 If node1 is None, use the first dirstate parent instead.
618 If node2 is None, compare node1 with working directory.
664 If node2 is None, compare node1 with working directory.
619 """
665 """
620
666
621 def fcmp(fn, mf):
667 def fcmp(fn, mf):
622 t1 = self.wread(fn)
668 t1 = self.wread(fn)
623 t2 = self.file(fn).read(mf.get(fn, nullid))
669 t2 = self.file(fn).read(mf.get(fn, nullid))
624 return cmp(t1, t2)
670 return cmp(t1, t2)
625
671
626 def mfmatches(node):
672 def mfmatches(node):
627 change = self.changelog.read(node)
673 change = self.changelog.read(node)
628 mf = dict(self.manifest.read(change[0]))
674 mf = dict(self.manifest.read(change[0]))
629 for fn in mf.keys():
675 for fn in mf.keys():
630 if not match(fn):
676 if not match(fn):
631 del mf[fn]
677 del mf[fn]
632 return mf
678 return mf
633
679
634 modified, added, removed, deleted, unknown, ignored = [],[],[],[],[],[]
680 modified, added, removed, deleted, unknown, ignored = [],[],[],[],[],[]
635 compareworking = False
681 compareworking = False
636 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
682 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
637 compareworking = True
683 compareworking = True
638
684
639 if not compareworking:
685 if not compareworking:
640 # read the manifest from node1 before the manifest from node2,
686 # read the manifest from node1 before the manifest from node2,
641 # so that we'll hit the manifest cache if we're going through
687 # so that we'll hit the manifest cache if we're going through
642 # all the revisions in parent->child order.
688 # all the revisions in parent->child order.
643 mf1 = mfmatches(node1)
689 mf1 = mfmatches(node1)
644
690
645 # are we comparing the working directory?
691 # are we comparing the working directory?
646 if not node2:
692 if not node2:
647 if not wlock:
693 if not wlock:
648 try:
694 try:
649 wlock = self.wlock(wait=0)
695 wlock = self.wlock(wait=0)
650 except lock.LockException:
696 except lock.LockException:
651 wlock = None
697 wlock = None
652 lookup, modified, added, removed, deleted, unknown, ignored = (
698 lookup, modified, added, removed, deleted, unknown, ignored = (
653 self.dirstate.changes(files, match, show_ignored))
699 self.dirstate.changes(files, match, show_ignored))
654
700
655 # are we comparing working dir against its parent?
701 # are we comparing working dir against its parent?
656 if compareworking:
702 if compareworking:
657 if lookup:
703 if lookup:
658 # do a full compare of any files that might have changed
704 # do a full compare of any files that might have changed
659 mf2 = mfmatches(self.dirstate.parents()[0])
705 mf2 = mfmatches(self.dirstate.parents()[0])
660 for f in lookup:
706 for f in lookup:
661 if fcmp(f, mf2):
707 if fcmp(f, mf2):
662 modified.append(f)
708 modified.append(f)
663 elif wlock is not None:
709 elif wlock is not None:
664 self.dirstate.update([f], "n")
710 self.dirstate.update([f], "n")
665 else:
711 else:
666 # we are comparing working dir against non-parent
712 # we are comparing working dir against non-parent
667 # generate a pseudo-manifest for the working dir
713 # generate a pseudo-manifest for the working dir
668 mf2 = mfmatches(self.dirstate.parents()[0])
714 mf2 = mfmatches(self.dirstate.parents()[0])
669 for f in lookup + modified + added:
715 for f in lookup + modified + added:
670 mf2[f] = ""
716 mf2[f] = ""
671 for f in removed:
717 for f in removed:
672 if f in mf2:
718 if f in mf2:
673 del mf2[f]
719 del mf2[f]
674 else:
720 else:
675 # we are comparing two revisions
721 # we are comparing two revisions
676 deleted, unknown, ignored = [], [], []
722 deleted, unknown, ignored = [], [], []
677 mf2 = mfmatches(node2)
723 mf2 = mfmatches(node2)
678
724
679 if not compareworking:
725 if not compareworking:
680 # flush lists from dirstate before comparing manifests
726 # flush lists from dirstate before comparing manifests
681 modified, added = [], []
727 modified, added = [], []
682
728
683 # make sure to sort the files so we talk to the disk in a
729 # make sure to sort the files so we talk to the disk in a
684 # reasonable order
730 # reasonable order
685 mf2keys = mf2.keys()
731 mf2keys = mf2.keys()
686 mf2keys.sort()
732 mf2keys.sort()
687 for fn in mf2keys:
733 for fn in mf2keys:
688 if mf1.has_key(fn):
734 if mf1.has_key(fn):
689 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
735 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
690 modified.append(fn)
736 modified.append(fn)
691 del mf1[fn]
737 del mf1[fn]
692 else:
738 else:
693 added.append(fn)
739 added.append(fn)
694
740
695 removed = mf1.keys()
741 removed = mf1.keys()
696
742
697 # sort and return results:
743 # sort and return results:
698 for l in modified, added, removed, deleted, unknown, ignored:
744 for l in modified, added, removed, deleted, unknown, ignored:
699 l.sort()
745 l.sort()
700 if show_ignored is None:
746 if show_ignored is None:
701 return (modified, added, removed, deleted, unknown)
747 return (modified, added, removed, deleted, unknown)
702 else:
748 else:
703 return (modified, added, removed, deleted, unknown, ignored)
749 return (modified, added, removed, deleted, unknown, ignored)
704
750
705 def add(self, list, wlock=None):
751 def add(self, list, wlock=None):
706 if not wlock:
752 if not wlock:
707 wlock = self.wlock()
753 wlock = self.wlock()
708 for f in list:
754 for f in list:
709 p = self.wjoin(f)
755 p = self.wjoin(f)
710 if not os.path.exists(p):
756 if not os.path.exists(p):
711 self.ui.warn(_("%s does not exist!\n") % f)
757 self.ui.warn(_("%s does not exist!\n") % f)
712 elif not os.path.isfile(p):
758 elif not os.path.isfile(p):
713 self.ui.warn(_("%s not added: only files supported currently\n")
759 self.ui.warn(_("%s not added: only files supported currently\n")
714 % f)
760 % f)
715 elif self.dirstate.state(f) in 'an':
761 elif self.dirstate.state(f) in 'an':
716 self.ui.warn(_("%s already tracked!\n") % f)
762 self.ui.warn(_("%s already tracked!\n") % f)
717 else:
763 else:
718 self.dirstate.update([f], "a")
764 self.dirstate.update([f], "a")
719
765
720 def forget(self, list, wlock=None):
766 def forget(self, list, wlock=None):
721 if not wlock:
767 if not wlock:
722 wlock = self.wlock()
768 wlock = self.wlock()
723 for f in list:
769 for f in list:
724 if self.dirstate.state(f) not in 'ai':
770 if self.dirstate.state(f) not in 'ai':
725 self.ui.warn(_("%s not added!\n") % f)
771 self.ui.warn(_("%s not added!\n") % f)
726 else:
772 else:
727 self.dirstate.forget([f])
773 self.dirstate.forget([f])
728
774
729 def remove(self, list, unlink=False, wlock=None):
775 def remove(self, list, unlink=False, wlock=None):
730 if unlink:
776 if unlink:
731 for f in list:
777 for f in list:
732 try:
778 try:
733 util.unlink(self.wjoin(f))
779 util.unlink(self.wjoin(f))
734 except OSError, inst:
780 except OSError, inst:
735 if inst.errno != errno.ENOENT:
781 if inst.errno != errno.ENOENT:
736 raise
782 raise
737 if not wlock:
783 if not wlock:
738 wlock = self.wlock()
784 wlock = self.wlock()
739 for f in list:
785 for f in list:
740 p = self.wjoin(f)
786 p = self.wjoin(f)
741 if os.path.exists(p):
787 if os.path.exists(p):
742 self.ui.warn(_("%s still exists!\n") % f)
788 self.ui.warn(_("%s still exists!\n") % f)
743 elif self.dirstate.state(f) == 'a':
789 elif self.dirstate.state(f) == 'a':
744 self.dirstate.forget([f])
790 self.dirstate.forget([f])
745 elif f not in self.dirstate:
791 elif f not in self.dirstate:
746 self.ui.warn(_("%s not tracked!\n") % f)
792 self.ui.warn(_("%s not tracked!\n") % f)
747 else:
793 else:
748 self.dirstate.update([f], "r")
794 self.dirstate.update([f], "r")
749
795
750 def undelete(self, list, wlock=None):
796 def undelete(self, list, wlock=None):
751 p = self.dirstate.parents()[0]
797 p = self.dirstate.parents()[0]
752 mn = self.changelog.read(p)[0]
798 mn = self.changelog.read(p)[0]
753 mf = self.manifest.readflags(mn)
799 mf = self.manifest.readflags(mn)
754 m = self.manifest.read(mn)
800 m = self.manifest.read(mn)
755 if not wlock:
801 if not wlock:
756 wlock = self.wlock()
802 wlock = self.wlock()
757 for f in list:
803 for f in list:
758 if self.dirstate.state(f) not in "r":
804 if self.dirstate.state(f) not in "r":
759 self.ui.warn("%s not removed!\n" % f)
805 self.ui.warn("%s not removed!\n" % f)
760 else:
806 else:
761 t = self.file(f).read(m[f])
807 t = self.file(f).read(m[f])
762 self.wwrite(f, t)
808 self.wwrite(f, t)
763 util.set_exec(self.wjoin(f), mf[f])
809 util.set_exec(self.wjoin(f), mf[f])
764 self.dirstate.update([f], "n")
810 self.dirstate.update([f], "n")
765
811
766 def copy(self, source, dest, wlock=None):
812 def copy(self, source, dest, wlock=None):
767 p = self.wjoin(dest)
813 p = self.wjoin(dest)
768 if not os.path.exists(p):
814 if not os.path.exists(p):
769 self.ui.warn(_("%s does not exist!\n") % dest)
815 self.ui.warn(_("%s does not exist!\n") % dest)
770 elif not os.path.isfile(p):
816 elif not os.path.isfile(p):
771 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
817 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
772 else:
818 else:
773 if not wlock:
819 if not wlock:
774 wlock = self.wlock()
820 wlock = self.wlock()
775 if self.dirstate.state(dest) == '?':
821 if self.dirstate.state(dest) == '?':
776 self.dirstate.update([dest], "a")
822 self.dirstate.update([dest], "a")
777 self.dirstate.copy(source, dest)
823 self.dirstate.copy(source, dest)
778
824
779 def heads(self, start=None):
825 def heads(self, start=None):
780 heads = self.changelog.heads(start)
826 heads = self.changelog.heads(start)
781 # sort the output in rev descending order
827 # sort the output in rev descending order
782 heads = [(-self.changelog.rev(h), h) for h in heads]
828 heads = [(-self.changelog.rev(h), h) for h in heads]
783 heads.sort()
829 heads.sort()
784 return [n for (r, n) in heads]
830 return [n for (r, n) in heads]
785
831
786 # branchlookup returns a dict giving a list of branches for
832 # branchlookup returns a dict giving a list of branches for
787 # each head. A branch is defined as the tag of a node or
833 # each head. A branch is defined as the tag of a node or
788 # the branch of the node's parents. If a node has multiple
834 # the branch of the node's parents. If a node has multiple
789 # branch tags, tags are eliminated if they are visible from other
835 # branch tags, tags are eliminated if they are visible from other
790 # branch tags.
836 # branch tags.
791 #
837 #
792 # So, for this graph: a->b->c->d->e
838 # So, for this graph: a->b->c->d->e
793 # \ /
839 # \ /
794 # aa -----/
840 # aa -----/
795 # a has tag 2.6.12
841 # a has tag 2.6.12
796 # d has tag 2.6.13
842 # d has tag 2.6.13
797 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
843 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
798 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
844 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
799 # from the list.
845 # from the list.
800 #
846 #
801 # It is possible that more than one head will have the same branch tag.
847 # It is possible that more than one head will have the same branch tag.
802 # callers need to check the result for multiple heads under the same
848 # callers need to check the result for multiple heads under the same
803 # branch tag if that is a problem for them (ie checkout of a specific
849 # branch tag if that is a problem for them (ie checkout of a specific
804 # branch).
850 # branch).
805 #
851 #
806 # passing in a specific branch will limit the depth of the search
852 # passing in a specific branch will limit the depth of the search
807 # through the parents. It won't limit the branches returned in the
853 # through the parents. It won't limit the branches returned in the
808 # result though.
854 # result though.
809 def branchlookup(self, heads=None, branch=None):
855 def branchlookup(self, heads=None, branch=None):
810 if not heads:
856 if not heads:
811 heads = self.heads()
857 heads = self.heads()
812 headt = [ h for h in heads ]
858 headt = [ h for h in heads ]
813 chlog = self.changelog
859 chlog = self.changelog
814 branches = {}
860 branches = {}
815 merges = []
861 merges = []
816 seenmerge = {}
862 seenmerge = {}
817
863
818 # traverse the tree once for each head, recording in the branches
864 # traverse the tree once for each head, recording in the branches
819 # dict which tags are visible from this head. The branches
865 # dict which tags are visible from this head. The branches
820 # dict also records which tags are visible from each tag
866 # dict also records which tags are visible from each tag
821 # while we traverse.
867 # while we traverse.
822 while headt or merges:
868 while headt or merges:
823 if merges:
869 if merges:
824 n, found = merges.pop()
870 n, found = merges.pop()
825 visit = [n]
871 visit = [n]
826 else:
872 else:
827 h = headt.pop()
873 h = headt.pop()
828 visit = [h]
874 visit = [h]
829 found = [h]
875 found = [h]
830 seen = {}
876 seen = {}
831 while visit:
877 while visit:
832 n = visit.pop()
878 n = visit.pop()
833 if n in seen:
879 if n in seen:
834 continue
880 continue
835 pp = chlog.parents(n)
881 pp = chlog.parents(n)
836 tags = self.nodetags(n)
882 tags = self.nodetags(n)
837 if tags:
883 if tags:
838 for x in tags:
884 for x in tags:
839 if x == 'tip':
885 if x == 'tip':
840 continue
886 continue
841 for f in found:
887 for f in found:
842 branches.setdefault(f, {})[n] = 1
888 branches.setdefault(f, {})[n] = 1
843 branches.setdefault(n, {})[n] = 1
889 branches.setdefault(n, {})[n] = 1
844 break
890 break
845 if n not in found:
891 if n not in found:
846 found.append(n)
892 found.append(n)
847 if branch in tags:
893 if branch in tags:
848 continue
894 continue
849 seen[n] = 1
895 seen[n] = 1
850 if pp[1] != nullid and n not in seenmerge:
896 if pp[1] != nullid and n not in seenmerge:
851 merges.append((pp[1], [x for x in found]))
897 merges.append((pp[1], [x for x in found]))
852 seenmerge[n] = 1
898 seenmerge[n] = 1
853 if pp[0] != nullid:
899 if pp[0] != nullid:
854 visit.append(pp[0])
900 visit.append(pp[0])
855 # traverse the branches dict, eliminating branch tags from each
901 # traverse the branches dict, eliminating branch tags from each
856 # head that are visible from another branch tag for that head.
902 # head that are visible from another branch tag for that head.
857 out = {}
903 out = {}
858 viscache = {}
904 viscache = {}
859 for h in heads:
905 for h in heads:
860 def visible(node):
906 def visible(node):
861 if node in viscache:
907 if node in viscache:
862 return viscache[node]
908 return viscache[node]
863 ret = {}
909 ret = {}
864 visit = [node]
910 visit = [node]
865 while visit:
911 while visit:
866 x = visit.pop()
912 x = visit.pop()
867 if x in viscache:
913 if x in viscache:
868 ret.update(viscache[x])
914 ret.update(viscache[x])
869 elif x not in ret:
915 elif x not in ret:
870 ret[x] = 1
916 ret[x] = 1
871 if x in branches:
917 if x in branches:
872 visit[len(visit):] = branches[x].keys()
918 visit[len(visit):] = branches[x].keys()
873 viscache[node] = ret
919 viscache[node] = ret
874 return ret
920 return ret
875 if h not in branches:
921 if h not in branches:
876 continue
922 continue
877 # O(n^2), but somewhat limited. This only searches the
923 # O(n^2), but somewhat limited. This only searches the
878 # tags visible from a specific head, not all the tags in the
924 # tags visible from a specific head, not all the tags in the
879 # whole repo.
925 # whole repo.
880 for b in branches[h]:
926 for b in branches[h]:
881 vis = False
927 vis = False
882 for bb in branches[h].keys():
928 for bb in branches[h].keys():
883 if b != bb:
929 if b != bb:
884 if b in visible(bb):
930 if b in visible(bb):
885 vis = True
931 vis = True
886 break
932 break
887 if not vis:
933 if not vis:
888 l = out.setdefault(h, [])
934 l = out.setdefault(h, [])
889 l[len(l):] = self.nodetags(b)
935 l[len(l):] = self.nodetags(b)
890 return out
936 return out
891
937
892 def branches(self, nodes):
938 def branches(self, nodes):
893 if not nodes:
939 if not nodes:
894 nodes = [self.changelog.tip()]
940 nodes = [self.changelog.tip()]
895 b = []
941 b = []
896 for n in nodes:
942 for n in nodes:
897 t = n
943 t = n
898 while 1:
944 while 1:
899 p = self.changelog.parents(n)
945 p = self.changelog.parents(n)
900 if p[1] != nullid or p[0] == nullid:
946 if p[1] != nullid or p[0] == nullid:
901 b.append((t, n, p[0], p[1]))
947 b.append((t, n, p[0], p[1]))
902 break
948 break
903 n = p[0]
949 n = p[0]
904 return b
950 return b
905
951
906 def between(self, pairs):
952 def between(self, pairs):
907 r = []
953 r = []
908
954
909 for top, bottom in pairs:
955 for top, bottom in pairs:
910 n, l, i = top, [], 0
956 n, l, i = top, [], 0
911 f = 1
957 f = 1
912
958
913 while n != bottom:
959 while n != bottom:
914 p = self.changelog.parents(n)[0]
960 p = self.changelog.parents(n)[0]
915 if i == f:
961 if i == f:
916 l.append(n)
962 l.append(n)
917 f = f * 2
963 f = f * 2
918 n = p
964 n = p
919 i += 1
965 i += 1
920
966
921 r.append(l)
967 r.append(l)
922
968
923 return r
969 return r
924
970
925 def findincoming(self, remote, base=None, heads=None, force=False):
971 def findincoming(self, remote, base=None, heads=None, force=False):
926 """Return list of roots of the subsets of missing nodes from remote
972 """Return list of roots of the subsets of missing nodes from remote
927
973
928 If base dict is specified, assume that these nodes and their parents
974 If base dict is specified, assume that these nodes and their parents
929 exist on the remote side and that no child of a node of base exists
975 exist on the remote side and that no child of a node of base exists
930 in both remote and self.
976 in both remote and self.
931 Furthermore base will be updated to include the nodes that exists
977 Furthermore base will be updated to include the nodes that exists
932 in self and remote but no children exists in self and remote.
978 in self and remote but no children exists in self and remote.
933 If a list of heads is specified, return only nodes which are heads
979 If a list of heads is specified, return only nodes which are heads
934 or ancestors of these heads.
980 or ancestors of these heads.
935
981
936 All the ancestors of base are in self and in remote.
982 All the ancestors of base are in self and in remote.
937 All the descendants of the list returned are missing in self.
983 All the descendants of the list returned are missing in self.
938 (and so we know that the rest of the nodes are missing in remote, see
984 (and so we know that the rest of the nodes are missing in remote, see
939 outgoing)
985 outgoing)
940 """
986 """
941 m = self.changelog.nodemap
987 m = self.changelog.nodemap
942 search = []
988 search = []
943 fetch = {}
989 fetch = {}
944 seen = {}
990 seen = {}
945 seenbranch = {}
991 seenbranch = {}
946 if base == None:
992 if base == None:
947 base = {}
993 base = {}
948
994
949 if not heads:
995 if not heads:
950 heads = remote.heads()
996 heads = remote.heads()
951
997
952 if self.changelog.tip() == nullid:
998 if self.changelog.tip() == nullid:
953 base[nullid] = 1
999 base[nullid] = 1
954 if heads != [nullid]:
1000 if heads != [nullid]:
955 return [nullid]
1001 return [nullid]
956 return []
1002 return []
957
1003
958 # assume we're closer to the tip than the root
1004 # assume we're closer to the tip than the root
959 # and start by examining the heads
1005 # and start by examining the heads
960 self.ui.status(_("searching for changes\n"))
1006 self.ui.status(_("searching for changes\n"))
961
1007
962 unknown = []
1008 unknown = []
963 for h in heads:
1009 for h in heads:
964 if h not in m:
1010 if h not in m:
965 unknown.append(h)
1011 unknown.append(h)
966 else:
1012 else:
967 base[h] = 1
1013 base[h] = 1
968
1014
969 if not unknown:
1015 if not unknown:
970 return []
1016 return []
971
1017
972 req = dict.fromkeys(unknown)
1018 req = dict.fromkeys(unknown)
973 reqcnt = 0
1019 reqcnt = 0
974
1020
975 # search through remote branches
1021 # search through remote branches
976 # a 'branch' here is a linear segment of history, with four parts:
1022 # a 'branch' here is a linear segment of history, with four parts:
977 # head, root, first parent, second parent
1023 # head, root, first parent, second parent
978 # (a branch always has two parents (or none) by definition)
1024 # (a branch always has two parents (or none) by definition)
979 unknown = remote.branches(unknown)
1025 unknown = remote.branches(unknown)
980 while unknown:
1026 while unknown:
981 r = []
1027 r = []
982 while unknown:
1028 while unknown:
983 n = unknown.pop(0)
1029 n = unknown.pop(0)
984 if n[0] in seen:
1030 if n[0] in seen:
985 continue
1031 continue
986
1032
987 self.ui.debug(_("examining %s:%s\n")
1033 self.ui.debug(_("examining %s:%s\n")
988 % (short(n[0]), short(n[1])))
1034 % (short(n[0]), short(n[1])))
989 if n[0] == nullid: # found the end of the branch
1035 if n[0] == nullid: # found the end of the branch
990 pass
1036 pass
991 elif n in seenbranch:
1037 elif n in seenbranch:
992 self.ui.debug(_("branch already found\n"))
1038 self.ui.debug(_("branch already found\n"))
993 continue
1039 continue
994 elif n[1] and n[1] in m: # do we know the base?
1040 elif n[1] and n[1] in m: # do we know the base?
995 self.ui.debug(_("found incomplete branch %s:%s\n")
1041 self.ui.debug(_("found incomplete branch %s:%s\n")
996 % (short(n[0]), short(n[1])))
1042 % (short(n[0]), short(n[1])))
997 search.append(n) # schedule branch range for scanning
1043 search.append(n) # schedule branch range for scanning
998 seenbranch[n] = 1
1044 seenbranch[n] = 1
999 else:
1045 else:
1000 if n[1] not in seen and n[1] not in fetch:
1046 if n[1] not in seen and n[1] not in fetch:
1001 if n[2] in m and n[3] in m:
1047 if n[2] in m and n[3] in m:
1002 self.ui.debug(_("found new changeset %s\n") %
1048 self.ui.debug(_("found new changeset %s\n") %
1003 short(n[1]))
1049 short(n[1]))
1004 fetch[n[1]] = 1 # earliest unknown
1050 fetch[n[1]] = 1 # earliest unknown
1005 for p in n[2:4]:
1051 for p in n[2:4]:
1006 if p in m:
1052 if p in m:
1007 base[p] = 1 # latest known
1053 base[p] = 1 # latest known
1008
1054
1009 for p in n[2:4]:
1055 for p in n[2:4]:
1010 if p not in req and p not in m:
1056 if p not in req and p not in m:
1011 r.append(p)
1057 r.append(p)
1012 req[p] = 1
1058 req[p] = 1
1013 seen[n[0]] = 1
1059 seen[n[0]] = 1
1014
1060
1015 if r:
1061 if r:
1016 reqcnt += 1
1062 reqcnt += 1
1017 self.ui.debug(_("request %d: %s\n") %
1063 self.ui.debug(_("request %d: %s\n") %
1018 (reqcnt, " ".join(map(short, r))))
1064 (reqcnt, " ".join(map(short, r))))
1019 for p in range(0, len(r), 10):
1065 for p in range(0, len(r), 10):
1020 for b in remote.branches(r[p:p+10]):
1066 for b in remote.branches(r[p:p+10]):
1021 self.ui.debug(_("received %s:%s\n") %
1067 self.ui.debug(_("received %s:%s\n") %
1022 (short(b[0]), short(b[1])))
1068 (short(b[0]), short(b[1])))
1023 unknown.append(b)
1069 unknown.append(b)
1024
1070
1025 # do binary search on the branches we found
1071 # do binary search on the branches we found
1026 while search:
1072 while search:
1027 n = search.pop(0)
1073 n = search.pop(0)
1028 reqcnt += 1
1074 reqcnt += 1
1029 l = remote.between([(n[0], n[1])])[0]
1075 l = remote.between([(n[0], n[1])])[0]
1030 l.append(n[1])
1076 l.append(n[1])
1031 p = n[0]
1077 p = n[0]
1032 f = 1
1078 f = 1
1033 for i in l:
1079 for i in l:
1034 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1080 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1035 if i in m:
1081 if i in m:
1036 if f <= 2:
1082 if f <= 2:
1037 self.ui.debug(_("found new branch changeset %s\n") %
1083 self.ui.debug(_("found new branch changeset %s\n") %
1038 short(p))
1084 short(p))
1039 fetch[p] = 1
1085 fetch[p] = 1
1040 base[i] = 1
1086 base[i] = 1
1041 else:
1087 else:
1042 self.ui.debug(_("narrowed branch search to %s:%s\n")
1088 self.ui.debug(_("narrowed branch search to %s:%s\n")
1043 % (short(p), short(i)))
1089 % (short(p), short(i)))
1044 search.append((p, i))
1090 search.append((p, i))
1045 break
1091 break
1046 p, f = i, f * 2
1092 p, f = i, f * 2
1047
1093
1048 # sanity check our fetch list
1094 # sanity check our fetch list
1049 for f in fetch.keys():
1095 for f in fetch.keys():
1050 if f in m:
1096 if f in m:
1051 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1097 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1052
1098
1053 if base.keys() == [nullid]:
1099 if base.keys() == [nullid]:
1054 if force:
1100 if force:
1055 self.ui.warn(_("warning: repository is unrelated\n"))
1101 self.ui.warn(_("warning: repository is unrelated\n"))
1056 else:
1102 else:
1057 raise util.Abort(_("repository is unrelated"))
1103 raise util.Abort(_("repository is unrelated"))
1058
1104
1059 self.ui.note(_("found new changesets starting at ") +
1105 self.ui.note(_("found new changesets starting at ") +
1060 " ".join([short(f) for f in fetch]) + "\n")
1106 " ".join([short(f) for f in fetch]) + "\n")
1061
1107
1062 self.ui.debug(_("%d total queries\n") % reqcnt)
1108 self.ui.debug(_("%d total queries\n") % reqcnt)
1063
1109
1064 return fetch.keys()
1110 return fetch.keys()
1065
1111
1066 def findoutgoing(self, remote, base=None, heads=None, force=False):
1112 def findoutgoing(self, remote, base=None, heads=None, force=False):
1067 """Return list of nodes that are roots of subsets not in remote
1113 """Return list of nodes that are roots of subsets not in remote
1068
1114
1069 If base dict is specified, assume that these nodes and their parents
1115 If base dict is specified, assume that these nodes and their parents
1070 exist on the remote side.
1116 exist on the remote side.
1071 If a list of heads is specified, return only nodes which are heads
1117 If a list of heads is specified, return only nodes which are heads
1072 or ancestors of these heads, and return a second element which
1118 or ancestors of these heads, and return a second element which
1073 contains all remote heads which get new children.
1119 contains all remote heads which get new children.
1074 """
1120 """
1075 if base == None:
1121 if base == None:
1076 base = {}
1122 base = {}
1077 self.findincoming(remote, base, heads, force=force)
1123 self.findincoming(remote, base, heads, force=force)
1078
1124
1079 self.ui.debug(_("common changesets up to ")
1125 self.ui.debug(_("common changesets up to ")
1080 + " ".join(map(short, base.keys())) + "\n")
1126 + " ".join(map(short, base.keys())) + "\n")
1081
1127
1082 remain = dict.fromkeys(self.changelog.nodemap)
1128 remain = dict.fromkeys(self.changelog.nodemap)
1083
1129
1084 # prune everything remote has from the tree
1130 # prune everything remote has from the tree
1085 del remain[nullid]
1131 del remain[nullid]
1086 remove = base.keys()
1132 remove = base.keys()
1087 while remove:
1133 while remove:
1088 n = remove.pop(0)
1134 n = remove.pop(0)
1089 if n in remain:
1135 if n in remain:
1090 del remain[n]
1136 del remain[n]
1091 for p in self.changelog.parents(n):
1137 for p in self.changelog.parents(n):
1092 remove.append(p)
1138 remove.append(p)
1093
1139
1094 # find every node whose parents have been pruned
1140 # find every node whose parents have been pruned
1095 subset = []
1141 subset = []
1096 # find every remote head that will get new children
1142 # find every remote head that will get new children
1097 updated_heads = {}
1143 updated_heads = {}
1098 for n in remain:
1144 for n in remain:
1099 p1, p2 = self.changelog.parents(n)
1145 p1, p2 = self.changelog.parents(n)
1100 if p1 not in remain and p2 not in remain:
1146 if p1 not in remain and p2 not in remain:
1101 subset.append(n)
1147 subset.append(n)
1102 if heads:
1148 if heads:
1103 if p1 in heads:
1149 if p1 in heads:
1104 updated_heads[p1] = True
1150 updated_heads[p1] = True
1105 if p2 in heads:
1151 if p2 in heads:
1106 updated_heads[p2] = True
1152 updated_heads[p2] = True
1107
1153
1108 # this is the set of all roots we have to push
1154 # this is the set of all roots we have to push
1109 if heads:
1155 if heads:
1110 return subset, updated_heads.keys()
1156 return subset, updated_heads.keys()
1111 else:
1157 else:
1112 return subset
1158 return subset
1113
1159
1114 def pull(self, remote, heads=None, force=False):
1160 def pull(self, remote, heads=None, force=False):
1115 l = self.lock()
1161 l = self.lock()
1116
1162
1117 fetch = self.findincoming(remote, force=force)
1163 fetch = self.findincoming(remote, force=force)
1118 if fetch == [nullid]:
1164 if fetch == [nullid]:
1119 self.ui.status(_("requesting all changes\n"))
1165 self.ui.status(_("requesting all changes\n"))
1120
1166
1121 if not fetch:
1167 if not fetch:
1122 self.ui.status(_("no changes found\n"))
1168 self.ui.status(_("no changes found\n"))
1123 return 0
1169 return 0
1124
1170
1125 if heads is None:
1171 if heads is None:
1126 cg = remote.changegroup(fetch, 'pull')
1172 cg = remote.changegroup(fetch, 'pull')
1127 else:
1173 else:
1128 cg = remote.changegroupsubset(fetch, heads, 'pull')
1174 cg = remote.changegroupsubset(fetch, heads, 'pull')
1129 return self.addchangegroup(cg, 'pull')
1175 return self.addchangegroup(cg, 'pull')
1130
1176
1131 def push(self, remote, force=False, revs=None):
1177 def push(self, remote, force=False, revs=None):
1132 # there are two ways to push to remote repo:
1178 # there are two ways to push to remote repo:
1133 #
1179 #
1134 # addchangegroup assumes local user can lock remote
1180 # addchangegroup assumes local user can lock remote
1135 # repo (local filesystem, old ssh servers).
1181 # repo (local filesystem, old ssh servers).
1136 #
1182 #
1137 # unbundle assumes local user cannot lock remote repo (new ssh
1183 # unbundle assumes local user cannot lock remote repo (new ssh
1138 # servers, http servers).
1184 # servers, http servers).
1139
1185
1140 if 'unbundle' in remote.capabilities:
1186 if 'unbundle' in remote.capabilities:
1141 return self.push_unbundle(remote, force, revs)
1187 return self.push_unbundle(remote, force, revs)
1142 return self.push_addchangegroup(remote, force, revs)
1188 return self.push_addchangegroup(remote, force, revs)
1143
1189
1144 def prepush(self, remote, force, revs):
1190 def prepush(self, remote, force, revs):
1145 base = {}
1191 base = {}
1146 remote_heads = remote.heads()
1192 remote_heads = remote.heads()
1147 inc = self.findincoming(remote, base, remote_heads, force=force)
1193 inc = self.findincoming(remote, base, remote_heads, force=force)
1148 if not force and inc:
1194 if not force and inc:
1149 self.ui.warn(_("abort: unsynced remote changes!\n"))
1195 self.ui.warn(_("abort: unsynced remote changes!\n"))
1150 self.ui.status(_("(did you forget to sync?"
1196 self.ui.status(_("(did you forget to sync?"
1151 " use push -f to force)\n"))
1197 " use push -f to force)\n"))
1152 return None, 1
1198 return None, 1
1153
1199
1154 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1200 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1155 if revs is not None:
1201 if revs is not None:
1156 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1202 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1157 else:
1203 else:
1158 bases, heads = update, self.changelog.heads()
1204 bases, heads = update, self.changelog.heads()
1159
1205
1160 if not bases:
1206 if not bases:
1161 self.ui.status(_("no changes found\n"))
1207 self.ui.status(_("no changes found\n"))
1162 return None, 1
1208 return None, 1
1163 elif not force:
1209 elif not force:
1164 # FIXME we don't properly detect creation of new heads
1210 # FIXME we don't properly detect creation of new heads
1165 # in the push -r case, assume the user knows what he's doing
1211 # in the push -r case, assume the user knows what he's doing
1166 if not revs and len(remote_heads) < len(heads) \
1212 if not revs and len(remote_heads) < len(heads) \
1167 and remote_heads != [nullid]:
1213 and remote_heads != [nullid]:
1168 self.ui.warn(_("abort: push creates new remote branches!\n"))
1214 self.ui.warn(_("abort: push creates new remote branches!\n"))
1169 self.ui.status(_("(did you forget to merge?"
1215 self.ui.status(_("(did you forget to merge?"
1170 " use push -f to force)\n"))
1216 " use push -f to force)\n"))
1171 return None, 1
1217 return None, 1
1172
1218
1173 if revs is None:
1219 if revs is None:
1174 cg = self.changegroup(update, 'push')
1220 cg = self.changegroup(update, 'push')
1175 else:
1221 else:
1176 cg = self.changegroupsubset(update, revs, 'push')
1222 cg = self.changegroupsubset(update, revs, 'push')
1177 return cg, remote_heads
1223 return cg, remote_heads
1178
1224
1179 def push_addchangegroup(self, remote, force, revs):
1225 def push_addchangegroup(self, remote, force, revs):
1180 lock = remote.lock()
1226 lock = remote.lock()
1181
1227
1182 ret = self.prepush(remote, force, revs)
1228 ret = self.prepush(remote, force, revs)
1183 if ret[0] is not None:
1229 if ret[0] is not None:
1184 cg, remote_heads = ret
1230 cg, remote_heads = ret
1185 return remote.addchangegroup(cg, 'push')
1231 return remote.addchangegroup(cg, 'push')
1186 return ret[1]
1232 return ret[1]
1187
1233
1188 def push_unbundle(self, remote, force, revs):
1234 def push_unbundle(self, remote, force, revs):
1189 # local repo finds heads on server, finds out what revs it
1235 # local repo finds heads on server, finds out what revs it
1190 # must push. once revs transferred, if server finds it has
1236 # must push. once revs transferred, if server finds it has
1191 # different heads (someone else won commit/push race), server
1237 # different heads (someone else won commit/push race), server
1192 # aborts.
1238 # aborts.
1193
1239
1194 ret = self.prepush(remote, force, revs)
1240 ret = self.prepush(remote, force, revs)
1195 if ret[0] is not None:
1241 if ret[0] is not None:
1196 cg, remote_heads = ret
1242 cg, remote_heads = ret
1197 if force: remote_heads = ['force']
1243 if force: remote_heads = ['force']
1198 return remote.unbundle(cg, remote_heads, 'push')
1244 return remote.unbundle(cg, remote_heads, 'push')
1199 return ret[1]
1245 return ret[1]
1200
1246
1201 def changegroupsubset(self, bases, heads, source):
1247 def changegroupsubset(self, bases, heads, source):
1202 """This function generates a changegroup consisting of all the nodes
1248 """This function generates a changegroup consisting of all the nodes
1203 that are descendents of any of the bases, and ancestors of any of
1249 that are descendents of any of the bases, and ancestors of any of
1204 the heads.
1250 the heads.
1205
1251
1206 It is fairly complex as determining which filenodes and which
1252 It is fairly complex as determining which filenodes and which
1207 manifest nodes need to be included for the changeset to be complete
1253 manifest nodes need to be included for the changeset to be complete
1208 is non-trivial.
1254 is non-trivial.
1209
1255
1210 Another wrinkle is doing the reverse, figuring out which changeset in
1256 Another wrinkle is doing the reverse, figuring out which changeset in
1211 the changegroup a particular filenode or manifestnode belongs to."""
1257 the changegroup a particular filenode or manifestnode belongs to."""
1212
1258
1213 self.hook('preoutgoing', throw=True, source=source)
1259 self.hook('preoutgoing', throw=True, source=source)
1214
1260
1215 # Set up some initial variables
1261 # Set up some initial variables
1216 # Make it easy to refer to self.changelog
1262 # Make it easy to refer to self.changelog
1217 cl = self.changelog
1263 cl = self.changelog
1218 # msng is short for missing - compute the list of changesets in this
1264 # msng is short for missing - compute the list of changesets in this
1219 # changegroup.
1265 # changegroup.
1220 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1266 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1221 # Some bases may turn out to be superfluous, and some heads may be
1267 # Some bases may turn out to be superfluous, and some heads may be
1222 # too. nodesbetween will return the minimal set of bases and heads
1268 # too. nodesbetween will return the minimal set of bases and heads
1223 # necessary to re-create the changegroup.
1269 # necessary to re-create the changegroup.
1224
1270
1225 # Known heads are the list of heads that it is assumed the recipient
1271 # Known heads are the list of heads that it is assumed the recipient
1226 # of this changegroup will know about.
1272 # of this changegroup will know about.
1227 knownheads = {}
1273 knownheads = {}
1228 # We assume that all parents of bases are known heads.
1274 # We assume that all parents of bases are known heads.
1229 for n in bases:
1275 for n in bases:
1230 for p in cl.parents(n):
1276 for p in cl.parents(n):
1231 if p != nullid:
1277 if p != nullid:
1232 knownheads[p] = 1
1278 knownheads[p] = 1
1233 knownheads = knownheads.keys()
1279 knownheads = knownheads.keys()
1234 if knownheads:
1280 if knownheads:
1235 # Now that we know what heads are known, we can compute which
1281 # Now that we know what heads are known, we can compute which
1236 # changesets are known. The recipient must know about all
1282 # changesets are known. The recipient must know about all
1237 # changesets required to reach the known heads from the null
1283 # changesets required to reach the known heads from the null
1238 # changeset.
1284 # changeset.
1239 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1285 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1240 junk = None
1286 junk = None
1241 # Transform the list into an ersatz set.
1287 # Transform the list into an ersatz set.
1242 has_cl_set = dict.fromkeys(has_cl_set)
1288 has_cl_set = dict.fromkeys(has_cl_set)
1243 else:
1289 else:
1244 # If there were no known heads, the recipient cannot be assumed to
1290 # If there were no known heads, the recipient cannot be assumed to
1245 # know about any changesets.
1291 # know about any changesets.
1246 has_cl_set = {}
1292 has_cl_set = {}
1247
1293
1248 # Make it easy to refer to self.manifest
1294 # Make it easy to refer to self.manifest
1249 mnfst = self.manifest
1295 mnfst = self.manifest
1250 # We don't know which manifests are missing yet
1296 # We don't know which manifests are missing yet
1251 msng_mnfst_set = {}
1297 msng_mnfst_set = {}
1252 # Nor do we know which filenodes are missing.
1298 # Nor do we know which filenodes are missing.
1253 msng_filenode_set = {}
1299 msng_filenode_set = {}
1254
1300
1255 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1301 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1256 junk = None
1302 junk = None
1257
1303
1258 # A changeset always belongs to itself, so the changenode lookup
1304 # A changeset always belongs to itself, so the changenode lookup
1259 # function for a changenode is identity.
1305 # function for a changenode is identity.
1260 def identity(x):
1306 def identity(x):
1261 return x
1307 return x
1262
1308
1263 # A function generating function. Sets up an environment for the
1309 # A function generating function. Sets up an environment for the
1264 # inner function.
1310 # inner function.
1265 def cmp_by_rev_func(revlog):
1311 def cmp_by_rev_func(revlog):
1266 # Compare two nodes by their revision number in the environment's
1312 # Compare two nodes by their revision number in the environment's
1267 # revision history. Since the revision number both represents the
1313 # revision history. Since the revision number both represents the
1268 # most efficient order to read the nodes in, and represents a
1314 # most efficient order to read the nodes in, and represents a
1269 # topological sorting of the nodes, this function is often useful.
1315 # topological sorting of the nodes, this function is often useful.
1270 def cmp_by_rev(a, b):
1316 def cmp_by_rev(a, b):
1271 return cmp(revlog.rev(a), revlog.rev(b))
1317 return cmp(revlog.rev(a), revlog.rev(b))
1272 return cmp_by_rev
1318 return cmp_by_rev
1273
1319
1274 # If we determine that a particular file or manifest node must be a
1320 # If we determine that a particular file or manifest node must be a
1275 # node that the recipient of the changegroup will already have, we can
1321 # node that the recipient of the changegroup will already have, we can
1276 # also assume the recipient will have all the parents. This function
1322 # also assume the recipient will have all the parents. This function
1277 # prunes them from the set of missing nodes.
1323 # prunes them from the set of missing nodes.
1278 def prune_parents(revlog, hasset, msngset):
1324 def prune_parents(revlog, hasset, msngset):
1279 haslst = hasset.keys()
1325 haslst = hasset.keys()
1280 haslst.sort(cmp_by_rev_func(revlog))
1326 haslst.sort(cmp_by_rev_func(revlog))
1281 for node in haslst:
1327 for node in haslst:
1282 parentlst = [p for p in revlog.parents(node) if p != nullid]
1328 parentlst = [p for p in revlog.parents(node) if p != nullid]
1283 while parentlst:
1329 while parentlst:
1284 n = parentlst.pop()
1330 n = parentlst.pop()
1285 if n not in hasset:
1331 if n not in hasset:
1286 hasset[n] = 1
1332 hasset[n] = 1
1287 p = [p for p in revlog.parents(n) if p != nullid]
1333 p = [p for p in revlog.parents(n) if p != nullid]
1288 parentlst.extend(p)
1334 parentlst.extend(p)
1289 for n in hasset:
1335 for n in hasset:
1290 msngset.pop(n, None)
1336 msngset.pop(n, None)
1291
1337
1292 # This is a function generating function used to set up an environment
1338 # This is a function generating function used to set up an environment
1293 # for the inner function to execute in.
1339 # for the inner function to execute in.
1294 def manifest_and_file_collector(changedfileset):
1340 def manifest_and_file_collector(changedfileset):
1295 # This is an information gathering function that gathers
1341 # This is an information gathering function that gathers
1296 # information from each changeset node that goes out as part of
1342 # information from each changeset node that goes out as part of
1297 # the changegroup. The information gathered is a list of which
1343 # the changegroup. The information gathered is a list of which
1298 # manifest nodes are potentially required (the recipient may
1344 # manifest nodes are potentially required (the recipient may
1299 # already have them) and total list of all files which were
1345 # already have them) and total list of all files which were
1300 # changed in any changeset in the changegroup.
1346 # changed in any changeset in the changegroup.
1301 #
1347 #
1302 # We also remember the first changenode we saw any manifest
1348 # We also remember the first changenode we saw any manifest
1303 # referenced by so we can later determine which changenode 'owns'
1349 # referenced by so we can later determine which changenode 'owns'
1304 # the manifest.
1350 # the manifest.
1305 def collect_manifests_and_files(clnode):
1351 def collect_manifests_and_files(clnode):
1306 c = cl.read(clnode)
1352 c = cl.read(clnode)
1307 for f in c[3]:
1353 for f in c[3]:
1308 # This is to make sure we only have one instance of each
1354 # This is to make sure we only have one instance of each
1309 # filename string for each filename.
1355 # filename string for each filename.
1310 changedfileset.setdefault(f, f)
1356 changedfileset.setdefault(f, f)
1311 msng_mnfst_set.setdefault(c[0], clnode)
1357 msng_mnfst_set.setdefault(c[0], clnode)
1312 return collect_manifests_and_files
1358 return collect_manifests_and_files
1313
1359
1314 # Figure out which manifest nodes (of the ones we think might be part
1360 # Figure out which manifest nodes (of the ones we think might be part
1315 # of the changegroup) the recipient must know about and remove them
1361 # of the changegroup) the recipient must know about and remove them
1316 # from the changegroup.
1362 # from the changegroup.
1317 def prune_manifests():
1363 def prune_manifests():
1318 has_mnfst_set = {}
1364 has_mnfst_set = {}
1319 for n in msng_mnfst_set:
1365 for n in msng_mnfst_set:
1320 # If a 'missing' manifest thinks it belongs to a changenode
1366 # If a 'missing' manifest thinks it belongs to a changenode
1321 # the recipient is assumed to have, obviously the recipient
1367 # the recipient is assumed to have, obviously the recipient
1322 # must have that manifest.
1368 # must have that manifest.
1323 linknode = cl.node(mnfst.linkrev(n))
1369 linknode = cl.node(mnfst.linkrev(n))
1324 if linknode in has_cl_set:
1370 if linknode in has_cl_set:
1325 has_mnfst_set[n] = 1
1371 has_mnfst_set[n] = 1
1326 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1372 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1327
1373
1328 # Use the information collected in collect_manifests_and_files to say
1374 # Use the information collected in collect_manifests_and_files to say
1329 # which changenode any manifestnode belongs to.
1375 # which changenode any manifestnode belongs to.
1330 def lookup_manifest_link(mnfstnode):
1376 def lookup_manifest_link(mnfstnode):
1331 return msng_mnfst_set[mnfstnode]
1377 return msng_mnfst_set[mnfstnode]
1332
1378
1333 # A function generating function that sets up the initial environment
1379 # A function generating function that sets up the initial environment
1334 # the inner function.
1380 # the inner function.
1335 def filenode_collector(changedfiles):
1381 def filenode_collector(changedfiles):
1336 next_rev = [0]
1382 next_rev = [0]
1337 # This gathers information from each manifestnode included in the
1383 # This gathers information from each manifestnode included in the
1338 # changegroup about which filenodes the manifest node references
1384 # changegroup about which filenodes the manifest node references
1339 # so we can include those in the changegroup too.
1385 # so we can include those in the changegroup too.
1340 #
1386 #
1341 # It also remembers which changenode each filenode belongs to. It
1387 # It also remembers which changenode each filenode belongs to. It
1342 # does this by assuming the a filenode belongs to the changenode
1388 # does this by assuming the a filenode belongs to the changenode
1343 # the first manifest that references it belongs to.
1389 # the first manifest that references it belongs to.
1344 def collect_msng_filenodes(mnfstnode):
1390 def collect_msng_filenodes(mnfstnode):
1345 r = mnfst.rev(mnfstnode)
1391 r = mnfst.rev(mnfstnode)
1346 if r == next_rev[0]:
1392 if r == next_rev[0]:
1347 # If the last rev we looked at was the one just previous,
1393 # If the last rev we looked at was the one just previous,
1348 # we only need to see a diff.
1394 # we only need to see a diff.
1349 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1395 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1350 # For each line in the delta
1396 # For each line in the delta
1351 for dline in delta.splitlines():
1397 for dline in delta.splitlines():
1352 # get the filename and filenode for that line
1398 # get the filename and filenode for that line
1353 f, fnode = dline.split('\0')
1399 f, fnode = dline.split('\0')
1354 fnode = bin(fnode[:40])
1400 fnode = bin(fnode[:40])
1355 f = changedfiles.get(f, None)
1401 f = changedfiles.get(f, None)
1356 # And if the file is in the list of files we care
1402 # And if the file is in the list of files we care
1357 # about.
1403 # about.
1358 if f is not None:
1404 if f is not None:
1359 # Get the changenode this manifest belongs to
1405 # Get the changenode this manifest belongs to
1360 clnode = msng_mnfst_set[mnfstnode]
1406 clnode = msng_mnfst_set[mnfstnode]
1361 # Create the set of filenodes for the file if
1407 # Create the set of filenodes for the file if
1362 # there isn't one already.
1408 # there isn't one already.
1363 ndset = msng_filenode_set.setdefault(f, {})
1409 ndset = msng_filenode_set.setdefault(f, {})
1364 # And set the filenode's changelog node to the
1410 # And set the filenode's changelog node to the
1365 # manifest's if it hasn't been set already.
1411 # manifest's if it hasn't been set already.
1366 ndset.setdefault(fnode, clnode)
1412 ndset.setdefault(fnode, clnode)
1367 else:
1413 else:
1368 # Otherwise we need a full manifest.
1414 # Otherwise we need a full manifest.
1369 m = mnfst.read(mnfstnode)
1415 m = mnfst.read(mnfstnode)
1370 # For every file in we care about.
1416 # For every file in we care about.
1371 for f in changedfiles:
1417 for f in changedfiles:
1372 fnode = m.get(f, None)
1418 fnode = m.get(f, None)
1373 # If it's in the manifest
1419 # If it's in the manifest
1374 if fnode is not None:
1420 if fnode is not None:
1375 # See comments above.
1421 # See comments above.
1376 clnode = msng_mnfst_set[mnfstnode]
1422 clnode = msng_mnfst_set[mnfstnode]
1377 ndset = msng_filenode_set.setdefault(f, {})
1423 ndset = msng_filenode_set.setdefault(f, {})
1378 ndset.setdefault(fnode, clnode)
1424 ndset.setdefault(fnode, clnode)
1379 # Remember the revision we hope to see next.
1425 # Remember the revision we hope to see next.
1380 next_rev[0] = r + 1
1426 next_rev[0] = r + 1
1381 return collect_msng_filenodes
1427 return collect_msng_filenodes
1382
1428
1383 # We have a list of filenodes we think we need for a file, lets remove
1429 # We have a list of filenodes we think we need for a file, lets remove
1384 # all those we now the recipient must have.
1430 # all those we now the recipient must have.
1385 def prune_filenodes(f, filerevlog):
1431 def prune_filenodes(f, filerevlog):
1386 msngset = msng_filenode_set[f]
1432 msngset = msng_filenode_set[f]
1387 hasset = {}
1433 hasset = {}
1388 # If a 'missing' filenode thinks it belongs to a changenode we
1434 # If a 'missing' filenode thinks it belongs to a changenode we
1389 # assume the recipient must have, then the recipient must have
1435 # assume the recipient must have, then the recipient must have
1390 # that filenode.
1436 # that filenode.
1391 for n in msngset:
1437 for n in msngset:
1392 clnode = cl.node(filerevlog.linkrev(n))
1438 clnode = cl.node(filerevlog.linkrev(n))
1393 if clnode in has_cl_set:
1439 if clnode in has_cl_set:
1394 hasset[n] = 1
1440 hasset[n] = 1
1395 prune_parents(filerevlog, hasset, msngset)
1441 prune_parents(filerevlog, hasset, msngset)
1396
1442
1397 # A function generator function that sets up the a context for the
1443 # A function generator function that sets up the a context for the
1398 # inner function.
1444 # inner function.
1399 def lookup_filenode_link_func(fname):
1445 def lookup_filenode_link_func(fname):
1400 msngset = msng_filenode_set[fname]
1446 msngset = msng_filenode_set[fname]
1401 # Lookup the changenode the filenode belongs to.
1447 # Lookup the changenode the filenode belongs to.
1402 def lookup_filenode_link(fnode):
1448 def lookup_filenode_link(fnode):
1403 return msngset[fnode]
1449 return msngset[fnode]
1404 return lookup_filenode_link
1450 return lookup_filenode_link
1405
1451
1406 # Now that we have all theses utility functions to help out and
1452 # Now that we have all theses utility functions to help out and
1407 # logically divide up the task, generate the group.
1453 # logically divide up the task, generate the group.
1408 def gengroup():
1454 def gengroup():
1409 # The set of changed files starts empty.
1455 # The set of changed files starts empty.
1410 changedfiles = {}
1456 changedfiles = {}
1411 # Create a changenode group generator that will call our functions
1457 # Create a changenode group generator that will call our functions
1412 # back to lookup the owning changenode and collect information.
1458 # back to lookup the owning changenode and collect information.
1413 group = cl.group(msng_cl_lst, identity,
1459 group = cl.group(msng_cl_lst, identity,
1414 manifest_and_file_collector(changedfiles))
1460 manifest_and_file_collector(changedfiles))
1415 for chnk in group:
1461 for chnk in group:
1416 yield chnk
1462 yield chnk
1417
1463
1418 # The list of manifests has been collected by the generator
1464 # The list of manifests has been collected by the generator
1419 # calling our functions back.
1465 # calling our functions back.
1420 prune_manifests()
1466 prune_manifests()
1421 msng_mnfst_lst = msng_mnfst_set.keys()
1467 msng_mnfst_lst = msng_mnfst_set.keys()
1422 # Sort the manifestnodes by revision number.
1468 # Sort the manifestnodes by revision number.
1423 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1469 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1424 # Create a generator for the manifestnodes that calls our lookup
1470 # Create a generator for the manifestnodes that calls our lookup
1425 # and data collection functions back.
1471 # and data collection functions back.
1426 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1472 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1427 filenode_collector(changedfiles))
1473 filenode_collector(changedfiles))
1428 for chnk in group:
1474 for chnk in group:
1429 yield chnk
1475 yield chnk
1430
1476
1431 # These are no longer needed, dereference and toss the memory for
1477 # These are no longer needed, dereference and toss the memory for
1432 # them.
1478 # them.
1433 msng_mnfst_lst = None
1479 msng_mnfst_lst = None
1434 msng_mnfst_set.clear()
1480 msng_mnfst_set.clear()
1435
1481
1436 changedfiles = changedfiles.keys()
1482 changedfiles = changedfiles.keys()
1437 changedfiles.sort()
1483 changedfiles.sort()
1438 # Go through all our files in order sorted by name.
1484 # Go through all our files in order sorted by name.
1439 for fname in changedfiles:
1485 for fname in changedfiles:
1440 filerevlog = self.file(fname)
1486 filerevlog = self.file(fname)
1441 # Toss out the filenodes that the recipient isn't really
1487 # Toss out the filenodes that the recipient isn't really
1442 # missing.
1488 # missing.
1443 if msng_filenode_set.has_key(fname):
1489 if msng_filenode_set.has_key(fname):
1444 prune_filenodes(fname, filerevlog)
1490 prune_filenodes(fname, filerevlog)
1445 msng_filenode_lst = msng_filenode_set[fname].keys()
1491 msng_filenode_lst = msng_filenode_set[fname].keys()
1446 else:
1492 else:
1447 msng_filenode_lst = []
1493 msng_filenode_lst = []
1448 # If any filenodes are left, generate the group for them,
1494 # If any filenodes are left, generate the group for them,
1449 # otherwise don't bother.
1495 # otherwise don't bother.
1450 if len(msng_filenode_lst) > 0:
1496 if len(msng_filenode_lst) > 0:
1451 yield changegroup.genchunk(fname)
1497 yield changegroup.genchunk(fname)
1452 # Sort the filenodes by their revision #
1498 # Sort the filenodes by their revision #
1453 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1499 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1454 # Create a group generator and only pass in a changenode
1500 # Create a group generator and only pass in a changenode
1455 # lookup function as we need to collect no information
1501 # lookup function as we need to collect no information
1456 # from filenodes.
1502 # from filenodes.
1457 group = filerevlog.group(msng_filenode_lst,
1503 group = filerevlog.group(msng_filenode_lst,
1458 lookup_filenode_link_func(fname))
1504 lookup_filenode_link_func(fname))
1459 for chnk in group:
1505 for chnk in group:
1460 yield chnk
1506 yield chnk
1461 if msng_filenode_set.has_key(fname):
1507 if msng_filenode_set.has_key(fname):
1462 # Don't need this anymore, toss it to free memory.
1508 # Don't need this anymore, toss it to free memory.
1463 del msng_filenode_set[fname]
1509 del msng_filenode_set[fname]
1464 # Signal that no more groups are left.
1510 # Signal that no more groups are left.
1465 yield changegroup.closechunk()
1511 yield changegroup.closechunk()
1466
1512
1467 if msng_cl_lst:
1513 if msng_cl_lst:
1468 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1514 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1469
1515
1470 return util.chunkbuffer(gengroup())
1516 return util.chunkbuffer(gengroup())
1471
1517
1472 def changegroup(self, basenodes, source):
1518 def changegroup(self, basenodes, source):
1473 """Generate a changegroup of all nodes that we have that a recipient
1519 """Generate a changegroup of all nodes that we have that a recipient
1474 doesn't.
1520 doesn't.
1475
1521
1476 This is much easier than the previous function as we can assume that
1522 This is much easier than the previous function as we can assume that
1477 the recipient has any changenode we aren't sending them."""
1523 the recipient has any changenode we aren't sending them."""
1478
1524
1479 self.hook('preoutgoing', throw=True, source=source)
1525 self.hook('preoutgoing', throw=True, source=source)
1480
1526
1481 cl = self.changelog
1527 cl = self.changelog
1482 nodes = cl.nodesbetween(basenodes, None)[0]
1528 nodes = cl.nodesbetween(basenodes, None)[0]
1483 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1529 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1484
1530
1485 def identity(x):
1531 def identity(x):
1486 return x
1532 return x
1487
1533
1488 def gennodelst(revlog):
1534 def gennodelst(revlog):
1489 for r in xrange(0, revlog.count()):
1535 for r in xrange(0, revlog.count()):
1490 n = revlog.node(r)
1536 n = revlog.node(r)
1491 if revlog.linkrev(n) in revset:
1537 if revlog.linkrev(n) in revset:
1492 yield n
1538 yield n
1493
1539
1494 def changed_file_collector(changedfileset):
1540 def changed_file_collector(changedfileset):
1495 def collect_changed_files(clnode):
1541 def collect_changed_files(clnode):
1496 c = cl.read(clnode)
1542 c = cl.read(clnode)
1497 for fname in c[3]:
1543 for fname in c[3]:
1498 changedfileset[fname] = 1
1544 changedfileset[fname] = 1
1499 return collect_changed_files
1545 return collect_changed_files
1500
1546
1501 def lookuprevlink_func(revlog):
1547 def lookuprevlink_func(revlog):
1502 def lookuprevlink(n):
1548 def lookuprevlink(n):
1503 return cl.node(revlog.linkrev(n))
1549 return cl.node(revlog.linkrev(n))
1504 return lookuprevlink
1550 return lookuprevlink
1505
1551
1506 def gengroup():
1552 def gengroup():
1507 # construct a list of all changed files
1553 # construct a list of all changed files
1508 changedfiles = {}
1554 changedfiles = {}
1509
1555
1510 for chnk in cl.group(nodes, identity,
1556 for chnk in cl.group(nodes, identity,
1511 changed_file_collector(changedfiles)):
1557 changed_file_collector(changedfiles)):
1512 yield chnk
1558 yield chnk
1513 changedfiles = changedfiles.keys()
1559 changedfiles = changedfiles.keys()
1514 changedfiles.sort()
1560 changedfiles.sort()
1515
1561
1516 mnfst = self.manifest
1562 mnfst = self.manifest
1517 nodeiter = gennodelst(mnfst)
1563 nodeiter = gennodelst(mnfst)
1518 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1564 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1519 yield chnk
1565 yield chnk
1520
1566
1521 for fname in changedfiles:
1567 for fname in changedfiles:
1522 filerevlog = self.file(fname)
1568 filerevlog = self.file(fname)
1523 nodeiter = gennodelst(filerevlog)
1569 nodeiter = gennodelst(filerevlog)
1524 nodeiter = list(nodeiter)
1570 nodeiter = list(nodeiter)
1525 if nodeiter:
1571 if nodeiter:
1526 yield changegroup.genchunk(fname)
1572 yield changegroup.genchunk(fname)
1527 lookup = lookuprevlink_func(filerevlog)
1573 lookup = lookuprevlink_func(filerevlog)
1528 for chnk in filerevlog.group(nodeiter, lookup):
1574 for chnk in filerevlog.group(nodeiter, lookup):
1529 yield chnk
1575 yield chnk
1530
1576
1531 yield changegroup.closechunk()
1577 yield changegroup.closechunk()
1532
1578
1533 if nodes:
1579 if nodes:
1534 self.hook('outgoing', node=hex(nodes[0]), source=source)
1580 self.hook('outgoing', node=hex(nodes[0]), source=source)
1535
1581
1536 return util.chunkbuffer(gengroup())
1582 return util.chunkbuffer(gengroup())
1537
1583
1538 def addchangegroup(self, source, srctype):
1584 def addchangegroup(self, source, srctype):
1539 """add changegroup to repo.
1585 """add changegroup to repo.
1540 returns number of heads modified or added + 1."""
1586 returns number of heads modified or added + 1."""
1541
1587
1542 def csmap(x):
1588 def csmap(x):
1543 self.ui.debug(_("add changeset %s\n") % short(x))
1589 self.ui.debug(_("add changeset %s\n") % short(x))
1544 return cl.count()
1590 return cl.count()
1545
1591
1546 def revmap(x):
1592 def revmap(x):
1547 return cl.rev(x)
1593 return cl.rev(x)
1548
1594
1549 if not source:
1595 if not source:
1550 return 0
1596 return 0
1551
1597
1552 self.hook('prechangegroup', throw=True, source=srctype)
1598 self.hook('prechangegroup', throw=True, source=srctype)
1553
1599
1554 changesets = files = revisions = 0
1600 changesets = files = revisions = 0
1555
1601
1556 tr = self.transaction()
1602 tr = self.transaction()
1557
1603
1558 # write changelog data to temp files so concurrent readers will not see
1604 # write changelog data to temp files so concurrent readers will not see
1559 # inconsistent view
1605 # inconsistent view
1560 cl = None
1606 cl = None
1561 try:
1607 try:
1562 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1608 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1563
1609
1564 oldheads = len(cl.heads())
1610 oldheads = len(cl.heads())
1565
1611
1566 # pull off the changeset group
1612 # pull off the changeset group
1567 self.ui.status(_("adding changesets\n"))
1613 self.ui.status(_("adding changesets\n"))
1568 cor = cl.count() - 1
1614 cor = cl.count() - 1
1569 chunkiter = changegroup.chunkiter(source)
1615 chunkiter = changegroup.chunkiter(source)
1570 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1616 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1571 raise util.Abort(_("received changelog group is empty"))
1617 raise util.Abort(_("received changelog group is empty"))
1572 cnr = cl.count() - 1
1618 cnr = cl.count() - 1
1573 changesets = cnr - cor
1619 changesets = cnr - cor
1574
1620
1575 # pull off the manifest group
1621 # pull off the manifest group
1576 self.ui.status(_("adding manifests\n"))
1622 self.ui.status(_("adding manifests\n"))
1577 chunkiter = changegroup.chunkiter(source)
1623 chunkiter = changegroup.chunkiter(source)
1578 # no need to check for empty manifest group here:
1624 # no need to check for empty manifest group here:
1579 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1625 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1580 # no new manifest will be created and the manifest group will
1626 # no new manifest will be created and the manifest group will
1581 # be empty during the pull
1627 # be empty during the pull
1582 self.manifest.addgroup(chunkiter, revmap, tr)
1628 self.manifest.addgroup(chunkiter, revmap, tr)
1583
1629
1584 # process the files
1630 # process the files
1585 self.ui.status(_("adding file changes\n"))
1631 self.ui.status(_("adding file changes\n"))
1586 while 1:
1632 while 1:
1587 f = changegroup.getchunk(source)
1633 f = changegroup.getchunk(source)
1588 if not f:
1634 if not f:
1589 break
1635 break
1590 self.ui.debug(_("adding %s revisions\n") % f)
1636 self.ui.debug(_("adding %s revisions\n") % f)
1591 fl = self.file(f)
1637 fl = self.file(f)
1592 o = fl.count()
1638 o = fl.count()
1593 chunkiter = changegroup.chunkiter(source)
1639 chunkiter = changegroup.chunkiter(source)
1594 if fl.addgroup(chunkiter, revmap, tr) is None:
1640 if fl.addgroup(chunkiter, revmap, tr) is None:
1595 raise util.Abort(_("received file revlog group is empty"))
1641 raise util.Abort(_("received file revlog group is empty"))
1596 revisions += fl.count() - o
1642 revisions += fl.count() - o
1597 files += 1
1643 files += 1
1598
1644
1599 cl.writedata()
1645 cl.writedata()
1600 finally:
1646 finally:
1601 if cl:
1647 if cl:
1602 cl.cleanup()
1648 cl.cleanup()
1603
1649
1604 # make changelog see real files again
1650 # make changelog see real files again
1605 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1651 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1606 self.changelog.checkinlinesize(tr)
1652 self.changelog.checkinlinesize(tr)
1607
1653
1608 newheads = len(self.changelog.heads())
1654 newheads = len(self.changelog.heads())
1609 heads = ""
1655 heads = ""
1610 if oldheads and newheads != oldheads:
1656 if oldheads and newheads != oldheads:
1611 heads = _(" (%+d heads)") % (newheads - oldheads)
1657 heads = _(" (%+d heads)") % (newheads - oldheads)
1612
1658
1613 self.ui.status(_("added %d changesets"
1659 self.ui.status(_("added %d changesets"
1614 " with %d changes to %d files%s\n")
1660 " with %d changes to %d files%s\n")
1615 % (changesets, revisions, files, heads))
1661 % (changesets, revisions, files, heads))
1616
1662
1617 if changesets > 0:
1663 if changesets > 0:
1618 self.hook('pretxnchangegroup', throw=True,
1664 self.hook('pretxnchangegroup', throw=True,
1619 node=hex(self.changelog.node(cor+1)), source=srctype)
1665 node=hex(self.changelog.node(cor+1)), source=srctype)
1620
1666
1621 tr.close()
1667 tr.close()
1622
1668
1623 if changesets > 0:
1669 if changesets > 0:
1624 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1670 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1625 source=srctype)
1671 source=srctype)
1626
1672
1627 for i in range(cor + 1, cnr + 1):
1673 for i in range(cor + 1, cnr + 1):
1628 self.hook("incoming", node=hex(self.changelog.node(i)),
1674 self.hook("incoming", node=hex(self.changelog.node(i)),
1629 source=srctype)
1675 source=srctype)
1630
1676
1631 return newheads - oldheads + 1
1677 return newheads - oldheads + 1
1632
1678
1633 def update(self, node, allow=False, force=False, choose=None,
1679 def update(self, node, allow=False, force=False, choose=None,
1634 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1680 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1635 pl = self.dirstate.parents()
1681 pl = self.dirstate.parents()
1636 if not force and pl[1] != nullid:
1682 if not force and pl[1] != nullid:
1637 raise util.Abort(_("outstanding uncommitted merges"))
1683 raise util.Abort(_("outstanding uncommitted merges"))
1638
1684
1639 err = False
1685 err = False
1640
1686
1641 p1, p2 = pl[0], node
1687 p1, p2 = pl[0], node
1642 pa = self.changelog.ancestor(p1, p2)
1688 pa = self.changelog.ancestor(p1, p2)
1643 m1n = self.changelog.read(p1)[0]
1689 m1n = self.changelog.read(p1)[0]
1644 m2n = self.changelog.read(p2)[0]
1690 m2n = self.changelog.read(p2)[0]
1645 man = self.manifest.ancestor(m1n, m2n)
1691 man = self.manifest.ancestor(m1n, m2n)
1646 m1 = self.manifest.read(m1n)
1692 m1 = self.manifest.read(m1n)
1647 mf1 = self.manifest.readflags(m1n)
1693 mf1 = self.manifest.readflags(m1n)
1648 m2 = self.manifest.read(m2n).copy()
1694 m2 = self.manifest.read(m2n).copy()
1649 mf2 = self.manifest.readflags(m2n)
1695 mf2 = self.manifest.readflags(m2n)
1650 ma = self.manifest.read(man)
1696 ma = self.manifest.read(man)
1651 mfa = self.manifest.readflags(man)
1697 mfa = self.manifest.readflags(man)
1652
1698
1653 modified, added, removed, deleted, unknown = self.changes()
1699 modified, added, removed, deleted, unknown = self.changes()
1654
1700
1655 # is this a jump, or a merge? i.e. is there a linear path
1701 # is this a jump, or a merge? i.e. is there a linear path
1656 # from p1 to p2?
1702 # from p1 to p2?
1657 linear_path = (pa == p1 or pa == p2)
1703 linear_path = (pa == p1 or pa == p2)
1658
1704
1659 if allow and linear_path:
1705 if allow and linear_path:
1660 raise util.Abort(_("there is nothing to merge, just use "
1706 raise util.Abort(_("there is nothing to merge, just use "
1661 "'hg update' or look at 'hg heads'"))
1707 "'hg update' or look at 'hg heads'"))
1662 if allow and not forcemerge:
1708 if allow and not forcemerge:
1663 if modified or added or removed:
1709 if modified or added or removed:
1664 raise util.Abort(_("outstanding uncommitted changes"))
1710 raise util.Abort(_("outstanding uncommitted changes"))
1665
1711
1666 if not forcemerge and not force:
1712 if not forcemerge and not force:
1667 for f in unknown:
1713 for f in unknown:
1668 if f in m2:
1714 if f in m2:
1669 t1 = self.wread(f)
1715 t1 = self.wread(f)
1670 t2 = self.file(f).read(m2[f])
1716 t2 = self.file(f).read(m2[f])
1671 if cmp(t1, t2) != 0:
1717 if cmp(t1, t2) != 0:
1672 raise util.Abort(_("'%s' already exists in the working"
1718 raise util.Abort(_("'%s' already exists in the working"
1673 " dir and differs from remote") % f)
1719 " dir and differs from remote") % f)
1674
1720
1675 # resolve the manifest to determine which files
1721 # resolve the manifest to determine which files
1676 # we care about merging
1722 # we care about merging
1677 self.ui.note(_("resolving manifests\n"))
1723 self.ui.note(_("resolving manifests\n"))
1678 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1724 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1679 (force, allow, moddirstate, linear_path))
1725 (force, allow, moddirstate, linear_path))
1680 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1726 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1681 (short(man), short(m1n), short(m2n)))
1727 (short(man), short(m1n), short(m2n)))
1682
1728
1683 merge = {}
1729 merge = {}
1684 get = {}
1730 get = {}
1685 remove = []
1731 remove = []
1686
1732
1687 # construct a working dir manifest
1733 # construct a working dir manifest
1688 mw = m1.copy()
1734 mw = m1.copy()
1689 mfw = mf1.copy()
1735 mfw = mf1.copy()
1690 umap = dict.fromkeys(unknown)
1736 umap = dict.fromkeys(unknown)
1691
1737
1692 for f in added + modified + unknown:
1738 for f in added + modified + unknown:
1693 mw[f] = ""
1739 mw[f] = ""
1694 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1740 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1695
1741
1696 if moddirstate and not wlock:
1742 if moddirstate and not wlock:
1697 wlock = self.wlock()
1743 wlock = self.wlock()
1698
1744
1699 for f in deleted + removed:
1745 for f in deleted + removed:
1700 if f in mw:
1746 if f in mw:
1701 del mw[f]
1747 del mw[f]
1702
1748
1703 # If we're jumping between revisions (as opposed to merging),
1749 # If we're jumping between revisions (as opposed to merging),
1704 # and if neither the working directory nor the target rev has
1750 # and if neither the working directory nor the target rev has
1705 # the file, then we need to remove it from the dirstate, to
1751 # the file, then we need to remove it from the dirstate, to
1706 # prevent the dirstate from listing the file when it is no
1752 # prevent the dirstate from listing the file when it is no
1707 # longer in the manifest.
1753 # longer in the manifest.
1708 if moddirstate and linear_path and f not in m2:
1754 if moddirstate and linear_path and f not in m2:
1709 self.dirstate.forget((f,))
1755 self.dirstate.forget((f,))
1710
1756
1711 # Compare manifests
1757 # Compare manifests
1712 for f, n in mw.iteritems():
1758 for f, n in mw.iteritems():
1713 if choose and not choose(f):
1759 if choose and not choose(f):
1714 continue
1760 continue
1715 if f in m2:
1761 if f in m2:
1716 s = 0
1762 s = 0
1717
1763
1718 # is the wfile new since m1, and match m2?
1764 # is the wfile new since m1, and match m2?
1719 if f not in m1:
1765 if f not in m1:
1720 t1 = self.wread(f)
1766 t1 = self.wread(f)
1721 t2 = self.file(f).read(m2[f])
1767 t2 = self.file(f).read(m2[f])
1722 if cmp(t1, t2) == 0:
1768 if cmp(t1, t2) == 0:
1723 n = m2[f]
1769 n = m2[f]
1724 del t1, t2
1770 del t1, t2
1725
1771
1726 # are files different?
1772 # are files different?
1727 if n != m2[f]:
1773 if n != m2[f]:
1728 a = ma.get(f, nullid)
1774 a = ma.get(f, nullid)
1729 # are both different from the ancestor?
1775 # are both different from the ancestor?
1730 if n != a and m2[f] != a:
1776 if n != a and m2[f] != a:
1731 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1777 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1732 # merge executable bits
1778 # merge executable bits
1733 # "if we changed or they changed, change in merge"
1779 # "if we changed or they changed, change in merge"
1734 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1780 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1735 mode = ((a^b) | (a^c)) ^ a
1781 mode = ((a^b) | (a^c)) ^ a
1736 merge[f] = (m1.get(f, nullid), m2[f], mode)
1782 merge[f] = (m1.get(f, nullid), m2[f], mode)
1737 s = 1
1783 s = 1
1738 # are we clobbering?
1784 # are we clobbering?
1739 # is remote's version newer?
1785 # is remote's version newer?
1740 # or are we going back in time?
1786 # or are we going back in time?
1741 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1787 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1742 self.ui.debug(_(" remote %s is newer, get\n") % f)
1788 self.ui.debug(_(" remote %s is newer, get\n") % f)
1743 get[f] = m2[f]
1789 get[f] = m2[f]
1744 s = 1
1790 s = 1
1745 elif f in umap or f in added:
1791 elif f in umap or f in added:
1746 # this unknown file is the same as the checkout
1792 # this unknown file is the same as the checkout
1747 # we need to reset the dirstate if the file was added
1793 # we need to reset the dirstate if the file was added
1748 get[f] = m2[f]
1794 get[f] = m2[f]
1749
1795
1750 if not s and mfw[f] != mf2[f]:
1796 if not s and mfw[f] != mf2[f]:
1751 if force:
1797 if force:
1752 self.ui.debug(_(" updating permissions for %s\n") % f)
1798 self.ui.debug(_(" updating permissions for %s\n") % f)
1753 util.set_exec(self.wjoin(f), mf2[f])
1799 util.set_exec(self.wjoin(f), mf2[f])
1754 else:
1800 else:
1755 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1801 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1756 mode = ((a^b) | (a^c)) ^ a
1802 mode = ((a^b) | (a^c)) ^ a
1757 if mode != b:
1803 if mode != b:
1758 self.ui.debug(_(" updating permissions for %s\n")
1804 self.ui.debug(_(" updating permissions for %s\n")
1759 % f)
1805 % f)
1760 util.set_exec(self.wjoin(f), mode)
1806 util.set_exec(self.wjoin(f), mode)
1761 del m2[f]
1807 del m2[f]
1762 elif f in ma:
1808 elif f in ma:
1763 if n != ma[f]:
1809 if n != ma[f]:
1764 r = _("d")
1810 r = _("d")
1765 if not force and (linear_path or allow):
1811 if not force and (linear_path or allow):
1766 r = self.ui.prompt(
1812 r = self.ui.prompt(
1767 (_(" local changed %s which remote deleted\n") % f) +
1813 (_(" local changed %s which remote deleted\n") % f) +
1768 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1814 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1769 if r == _("d"):
1815 if r == _("d"):
1770 remove.append(f)
1816 remove.append(f)
1771 else:
1817 else:
1772 self.ui.debug(_("other deleted %s\n") % f)
1818 self.ui.debug(_("other deleted %s\n") % f)
1773 remove.append(f) # other deleted it
1819 remove.append(f) # other deleted it
1774 else:
1820 else:
1775 # file is created on branch or in working directory
1821 # file is created on branch or in working directory
1776 if force and f not in umap:
1822 if force and f not in umap:
1777 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1823 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1778 remove.append(f)
1824 remove.append(f)
1779 elif n == m1.get(f, nullid): # same as parent
1825 elif n == m1.get(f, nullid): # same as parent
1780 if p2 == pa: # going backwards?
1826 if p2 == pa: # going backwards?
1781 self.ui.debug(_("remote deleted %s\n") % f)
1827 self.ui.debug(_("remote deleted %s\n") % f)
1782 remove.append(f)
1828 remove.append(f)
1783 else:
1829 else:
1784 self.ui.debug(_("local modified %s, keeping\n") % f)
1830 self.ui.debug(_("local modified %s, keeping\n") % f)
1785 else:
1831 else:
1786 self.ui.debug(_("working dir created %s, keeping\n") % f)
1832 self.ui.debug(_("working dir created %s, keeping\n") % f)
1787
1833
1788 for f, n in m2.iteritems():
1834 for f, n in m2.iteritems():
1789 if choose and not choose(f):
1835 if choose and not choose(f):
1790 continue
1836 continue
1791 if f[0] == "/":
1837 if f[0] == "/":
1792 continue
1838 continue
1793 if f in ma and n != ma[f]:
1839 if f in ma and n != ma[f]:
1794 r = _("k")
1840 r = _("k")
1795 if not force and (linear_path or allow):
1841 if not force and (linear_path or allow):
1796 r = self.ui.prompt(
1842 r = self.ui.prompt(
1797 (_("remote changed %s which local deleted\n") % f) +
1843 (_("remote changed %s which local deleted\n") % f) +
1798 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1844 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1799 if r == _("k"):
1845 if r == _("k"):
1800 get[f] = n
1846 get[f] = n
1801 elif f not in ma:
1847 elif f not in ma:
1802 self.ui.debug(_("remote created %s\n") % f)
1848 self.ui.debug(_("remote created %s\n") % f)
1803 get[f] = n
1849 get[f] = n
1804 else:
1850 else:
1805 if force or p2 == pa: # going backwards?
1851 if force or p2 == pa: # going backwards?
1806 self.ui.debug(_("local deleted %s, recreating\n") % f)
1852 self.ui.debug(_("local deleted %s, recreating\n") % f)
1807 get[f] = n
1853 get[f] = n
1808 else:
1854 else:
1809 self.ui.debug(_("local deleted %s\n") % f)
1855 self.ui.debug(_("local deleted %s\n") % f)
1810
1856
1811 del mw, m1, m2, ma
1857 del mw, m1, m2, ma
1812
1858
1813 if force:
1859 if force:
1814 for f in merge:
1860 for f in merge:
1815 get[f] = merge[f][1]
1861 get[f] = merge[f][1]
1816 merge = {}
1862 merge = {}
1817
1863
1818 if linear_path or force:
1864 if linear_path or force:
1819 # we don't need to do any magic, just jump to the new rev
1865 # we don't need to do any magic, just jump to the new rev
1820 branch_merge = False
1866 branch_merge = False
1821 p1, p2 = p2, nullid
1867 p1, p2 = p2, nullid
1822 else:
1868 else:
1823 if not allow:
1869 if not allow:
1824 self.ui.status(_("this update spans a branch"
1870 self.ui.status(_("this update spans a branch"
1825 " affecting the following files:\n"))
1871 " affecting the following files:\n"))
1826 fl = merge.keys() + get.keys()
1872 fl = merge.keys() + get.keys()
1827 fl.sort()
1873 fl.sort()
1828 for f in fl:
1874 for f in fl:
1829 cf = ""
1875 cf = ""
1830 if f in merge:
1876 if f in merge:
1831 cf = _(" (resolve)")
1877 cf = _(" (resolve)")
1832 self.ui.status(" %s%s\n" % (f, cf))
1878 self.ui.status(" %s%s\n" % (f, cf))
1833 self.ui.warn(_("aborting update spanning branches!\n"))
1879 self.ui.warn(_("aborting update spanning branches!\n"))
1834 self.ui.status(_("(use 'hg merge' to merge across branches"
1880 self.ui.status(_("(use 'hg merge' to merge across branches"
1835 " or 'hg update -C' to lose changes)\n"))
1881 " or 'hg update -C' to lose changes)\n"))
1836 return 1
1882 return 1
1837 branch_merge = True
1883 branch_merge = True
1838
1884
1839 xp1 = hex(p1)
1885 xp1 = hex(p1)
1840 xp2 = hex(p2)
1886 xp2 = hex(p2)
1841 if p2 == nullid: xxp2 = ''
1887 if p2 == nullid: xxp2 = ''
1842 else: xxp2 = xp2
1888 else: xxp2 = xp2
1843
1889
1844 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1890 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1845
1891
1846 # get the files we don't need to change
1892 # get the files we don't need to change
1847 files = get.keys()
1893 files = get.keys()
1848 files.sort()
1894 files.sort()
1849 for f in files:
1895 for f in files:
1850 if f[0] == "/":
1896 if f[0] == "/":
1851 continue
1897 continue
1852 self.ui.note(_("getting %s\n") % f)
1898 self.ui.note(_("getting %s\n") % f)
1853 t = self.file(f).read(get[f])
1899 t = self.file(f).read(get[f])
1854 self.wwrite(f, t)
1900 self.wwrite(f, t)
1855 util.set_exec(self.wjoin(f), mf2[f])
1901 util.set_exec(self.wjoin(f), mf2[f])
1856 if moddirstate:
1902 if moddirstate:
1857 if branch_merge:
1903 if branch_merge:
1858 self.dirstate.update([f], 'n', st_mtime=-1)
1904 self.dirstate.update([f], 'n', st_mtime=-1)
1859 else:
1905 else:
1860 self.dirstate.update([f], 'n')
1906 self.dirstate.update([f], 'n')
1861
1907
1862 # merge the tricky bits
1908 # merge the tricky bits
1863 failedmerge = []
1909 failedmerge = []
1864 files = merge.keys()
1910 files = merge.keys()
1865 files.sort()
1911 files.sort()
1866 for f in files:
1912 for f in files:
1867 self.ui.status(_("merging %s\n") % f)
1913 self.ui.status(_("merging %s\n") % f)
1868 my, other, flag = merge[f]
1914 my, other, flag = merge[f]
1869 ret = self.merge3(f, my, other, xp1, xp2)
1915 ret = self.merge3(f, my, other, xp1, xp2)
1870 if ret:
1916 if ret:
1871 err = True
1917 err = True
1872 failedmerge.append(f)
1918 failedmerge.append(f)
1873 util.set_exec(self.wjoin(f), flag)
1919 util.set_exec(self.wjoin(f), flag)
1874 if moddirstate:
1920 if moddirstate:
1875 if branch_merge:
1921 if branch_merge:
1876 # We've done a branch merge, mark this file as merged
1922 # We've done a branch merge, mark this file as merged
1877 # so that we properly record the merger later
1923 # so that we properly record the merger later
1878 self.dirstate.update([f], 'm')
1924 self.dirstate.update([f], 'm')
1879 else:
1925 else:
1880 # We've update-merged a locally modified file, so
1926 # We've update-merged a locally modified file, so
1881 # we set the dirstate to emulate a normal checkout
1927 # we set the dirstate to emulate a normal checkout
1882 # of that file some time in the past. Thus our
1928 # of that file some time in the past. Thus our
1883 # merge will appear as a normal local file
1929 # merge will appear as a normal local file
1884 # modification.
1930 # modification.
1885 f_len = len(self.file(f).read(other))
1931 f_len = len(self.file(f).read(other))
1886 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1932 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1887
1933
1888 remove.sort()
1934 remove.sort()
1889 for f in remove:
1935 for f in remove:
1890 self.ui.note(_("removing %s\n") % f)
1936 self.ui.note(_("removing %s\n") % f)
1891 util.audit_path(f)
1937 util.audit_path(f)
1892 try:
1938 try:
1893 util.unlink(self.wjoin(f))
1939 util.unlink(self.wjoin(f))
1894 except OSError, inst:
1940 except OSError, inst:
1895 if inst.errno != errno.ENOENT:
1941 if inst.errno != errno.ENOENT:
1896 self.ui.warn(_("update failed to remove %s: %s!\n") %
1942 self.ui.warn(_("update failed to remove %s: %s!\n") %
1897 (f, inst.strerror))
1943 (f, inst.strerror))
1898 if moddirstate:
1944 if moddirstate:
1899 if branch_merge:
1945 if branch_merge:
1900 self.dirstate.update(remove, 'r')
1946 self.dirstate.update(remove, 'r')
1901 else:
1947 else:
1902 self.dirstate.forget(remove)
1948 self.dirstate.forget(remove)
1903
1949
1904 if moddirstate:
1950 if moddirstate:
1905 self.dirstate.setparents(p1, p2)
1951 self.dirstate.setparents(p1, p2)
1906
1952
1907 if show_stats:
1953 if show_stats:
1908 stats = ((len(get), _("updated")),
1954 stats = ((len(get), _("updated")),
1909 (len(merge) - len(failedmerge), _("merged")),
1955 (len(merge) - len(failedmerge), _("merged")),
1910 (len(remove), _("removed")),
1956 (len(remove), _("removed")),
1911 (len(failedmerge), _("unresolved")))
1957 (len(failedmerge), _("unresolved")))
1912 note = ", ".join([_("%d files %s") % s for s in stats])
1958 note = ", ".join([_("%d files %s") % s for s in stats])
1913 self.ui.status("%s\n" % note)
1959 self.ui.status("%s\n" % note)
1914 if moddirstate:
1960 if moddirstate:
1915 if branch_merge:
1961 if branch_merge:
1916 if failedmerge:
1962 if failedmerge:
1917 self.ui.status(_("There are unresolved merges,"
1963 self.ui.status(_("There are unresolved merges,"
1918 " you can redo the full merge using:\n"
1964 " you can redo the full merge using:\n"
1919 " hg update -C %s\n"
1965 " hg update -C %s\n"
1920 " hg merge %s\n"
1966 " hg merge %s\n"
1921 % (self.changelog.rev(p1),
1967 % (self.changelog.rev(p1),
1922 self.changelog.rev(p2))))
1968 self.changelog.rev(p2))))
1923 else:
1969 else:
1924 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1970 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1925 elif failedmerge:
1971 elif failedmerge:
1926 self.ui.status(_("There are unresolved merges with"
1972 self.ui.status(_("There are unresolved merges with"
1927 " locally modified files.\n"))
1973 " locally modified files.\n"))
1928
1974
1929 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1975 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1930 return err
1976 return err
1931
1977
1932 def merge3(self, fn, my, other, p1, p2):
1978 def merge3(self, fn, my, other, p1, p2):
1933 """perform a 3-way merge in the working directory"""
1979 """perform a 3-way merge in the working directory"""
1934
1980
1935 def temp(prefix, node):
1981 def temp(prefix, node):
1936 pre = "%s~%s." % (os.path.basename(fn), prefix)
1982 pre = "%s~%s." % (os.path.basename(fn), prefix)
1937 (fd, name) = tempfile.mkstemp(prefix=pre)
1983 (fd, name) = tempfile.mkstemp(prefix=pre)
1938 f = os.fdopen(fd, "wb")
1984 f = os.fdopen(fd, "wb")
1939 self.wwrite(fn, fl.read(node), f)
1985 self.wwrite(fn, fl.read(node), f)
1940 f.close()
1986 f.close()
1941 return name
1987 return name
1942
1988
1943 fl = self.file(fn)
1989 fl = self.file(fn)
1944 base = fl.ancestor(my, other)
1990 base = fl.ancestor(my, other)
1945 a = self.wjoin(fn)
1991 a = self.wjoin(fn)
1946 b = temp("base", base)
1992 b = temp("base", base)
1947 c = temp("other", other)
1993 c = temp("other", other)
1948
1994
1949 self.ui.note(_("resolving %s\n") % fn)
1995 self.ui.note(_("resolving %s\n") % fn)
1950 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1996 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1951 (fn, short(my), short(other), short(base)))
1997 (fn, short(my), short(other), short(base)))
1952
1998
1953 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1999 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1954 or "hgmerge")
2000 or "hgmerge")
1955 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
2001 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1956 environ={'HG_FILE': fn,
2002 environ={'HG_FILE': fn,
1957 'HG_MY_NODE': p1,
2003 'HG_MY_NODE': p1,
1958 'HG_OTHER_NODE': p2,
2004 'HG_OTHER_NODE': p2,
1959 'HG_FILE_MY_NODE': hex(my),
2005 'HG_FILE_MY_NODE': hex(my),
1960 'HG_FILE_OTHER_NODE': hex(other),
2006 'HG_FILE_OTHER_NODE': hex(other),
1961 'HG_FILE_BASE_NODE': hex(base)})
2007 'HG_FILE_BASE_NODE': hex(base)})
1962 if r:
2008 if r:
1963 self.ui.warn(_("merging %s failed!\n") % fn)
2009 self.ui.warn(_("merging %s failed!\n") % fn)
1964
2010
1965 os.unlink(b)
2011 os.unlink(b)
1966 os.unlink(c)
2012 os.unlink(c)
1967 return r
2013 return r
1968
2014
1969 def verify(self):
2015 def verify(self):
1970 filelinkrevs = {}
2016 filelinkrevs = {}
1971 filenodes = {}
2017 filenodes = {}
1972 changesets = revisions = files = 0
2018 changesets = revisions = files = 0
1973 errors = [0]
2019 errors = [0]
1974 warnings = [0]
2020 warnings = [0]
1975 neededmanifests = {}
2021 neededmanifests = {}
1976
2022
1977 def err(msg):
2023 def err(msg):
1978 self.ui.warn(msg + "\n")
2024 self.ui.warn(msg + "\n")
1979 errors[0] += 1
2025 errors[0] += 1
1980
2026
1981 def warn(msg):
2027 def warn(msg):
1982 self.ui.warn(msg + "\n")
2028 self.ui.warn(msg + "\n")
1983 warnings[0] += 1
2029 warnings[0] += 1
1984
2030
1985 def checksize(obj, name):
2031 def checksize(obj, name):
1986 d = obj.checksize()
2032 d = obj.checksize()
1987 if d[0]:
2033 if d[0]:
1988 err(_("%s data length off by %d bytes") % (name, d[0]))
2034 err(_("%s data length off by %d bytes") % (name, d[0]))
1989 if d[1]:
2035 if d[1]:
1990 err(_("%s index contains %d extra bytes") % (name, d[1]))
2036 err(_("%s index contains %d extra bytes") % (name, d[1]))
1991
2037
1992 def checkversion(obj, name):
2038 def checkversion(obj, name):
1993 if obj.version != revlog.REVLOGV0:
2039 if obj.version != revlog.REVLOGV0:
1994 if not revlogv1:
2040 if not revlogv1:
1995 warn(_("warning: `%s' uses revlog format 1") % name)
2041 warn(_("warning: `%s' uses revlog format 1") % name)
1996 elif revlogv1:
2042 elif revlogv1:
1997 warn(_("warning: `%s' uses revlog format 0") % name)
2043 warn(_("warning: `%s' uses revlog format 0") % name)
1998
2044
1999 revlogv1 = self.revlogversion != revlog.REVLOGV0
2045 revlogv1 = self.revlogversion != revlog.REVLOGV0
2000 if self.ui.verbose or revlogv1 != self.revlogv1:
2046 if self.ui.verbose or revlogv1 != self.revlogv1:
2001 self.ui.status(_("repository uses revlog format %d\n") %
2047 self.ui.status(_("repository uses revlog format %d\n") %
2002 (revlogv1 and 1 or 0))
2048 (revlogv1 and 1 or 0))
2003
2049
2004 seen = {}
2050 seen = {}
2005 self.ui.status(_("checking changesets\n"))
2051 self.ui.status(_("checking changesets\n"))
2006 checksize(self.changelog, "changelog")
2052 checksize(self.changelog, "changelog")
2007
2053
2008 for i in range(self.changelog.count()):
2054 for i in range(self.changelog.count()):
2009 changesets += 1
2055 changesets += 1
2010 n = self.changelog.node(i)
2056 n = self.changelog.node(i)
2011 l = self.changelog.linkrev(n)
2057 l = self.changelog.linkrev(n)
2012 if l != i:
2058 if l != i:
2013 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
2059 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
2014 if n in seen:
2060 if n in seen:
2015 err(_("duplicate changeset at revision %d") % i)
2061 err(_("duplicate changeset at revision %d") % i)
2016 seen[n] = 1
2062 seen[n] = 1
2017
2063
2018 for p in self.changelog.parents(n):
2064 for p in self.changelog.parents(n):
2019 if p not in self.changelog.nodemap:
2065 if p not in self.changelog.nodemap:
2020 err(_("changeset %s has unknown parent %s") %
2066 err(_("changeset %s has unknown parent %s") %
2021 (short(n), short(p)))
2067 (short(n), short(p)))
2022 try:
2068 try:
2023 changes = self.changelog.read(n)
2069 changes = self.changelog.read(n)
2024 except KeyboardInterrupt:
2070 except KeyboardInterrupt:
2025 self.ui.warn(_("interrupted"))
2071 self.ui.warn(_("interrupted"))
2026 raise
2072 raise
2027 except Exception, inst:
2073 except Exception, inst:
2028 err(_("unpacking changeset %s: %s") % (short(n), inst))
2074 err(_("unpacking changeset %s: %s") % (short(n), inst))
2029 continue
2075 continue
2030
2076
2031 neededmanifests[changes[0]] = n
2077 neededmanifests[changes[0]] = n
2032
2078
2033 for f in changes[3]:
2079 for f in changes[3]:
2034 filelinkrevs.setdefault(f, []).append(i)
2080 filelinkrevs.setdefault(f, []).append(i)
2035
2081
2036 seen = {}
2082 seen = {}
2037 self.ui.status(_("checking manifests\n"))
2083 self.ui.status(_("checking manifests\n"))
2038 checkversion(self.manifest, "manifest")
2084 checkversion(self.manifest, "manifest")
2039 checksize(self.manifest, "manifest")
2085 checksize(self.manifest, "manifest")
2040
2086
2041 for i in range(self.manifest.count()):
2087 for i in range(self.manifest.count()):
2042 n = self.manifest.node(i)
2088 n = self.manifest.node(i)
2043 l = self.manifest.linkrev(n)
2089 l = self.manifest.linkrev(n)
2044
2090
2045 if l < 0 or l >= self.changelog.count():
2091 if l < 0 or l >= self.changelog.count():
2046 err(_("bad manifest link (%d) at revision %d") % (l, i))
2092 err(_("bad manifest link (%d) at revision %d") % (l, i))
2047
2093
2048 if n in neededmanifests:
2094 if n in neededmanifests:
2049 del neededmanifests[n]
2095 del neededmanifests[n]
2050
2096
2051 if n in seen:
2097 if n in seen:
2052 err(_("duplicate manifest at revision %d") % i)
2098 err(_("duplicate manifest at revision %d") % i)
2053
2099
2054 seen[n] = 1
2100 seen[n] = 1
2055
2101
2056 for p in self.manifest.parents(n):
2102 for p in self.manifest.parents(n):
2057 if p not in self.manifest.nodemap:
2103 if p not in self.manifest.nodemap:
2058 err(_("manifest %s has unknown parent %s") %
2104 err(_("manifest %s has unknown parent %s") %
2059 (short(n), short(p)))
2105 (short(n), short(p)))
2060
2106
2061 try:
2107 try:
2062 delta = mdiff.patchtext(self.manifest.delta(n))
2108 delta = mdiff.patchtext(self.manifest.delta(n))
2063 except KeyboardInterrupt:
2109 except KeyboardInterrupt:
2064 self.ui.warn(_("interrupted"))
2110 self.ui.warn(_("interrupted"))
2065 raise
2111 raise
2066 except Exception, inst:
2112 except Exception, inst:
2067 err(_("unpacking manifest %s: %s") % (short(n), inst))
2113 err(_("unpacking manifest %s: %s") % (short(n), inst))
2068 continue
2114 continue
2069
2115
2070 try:
2116 try:
2071 ff = [ l.split('\0') for l in delta.splitlines() ]
2117 ff = [ l.split('\0') for l in delta.splitlines() ]
2072 for f, fn in ff:
2118 for f, fn in ff:
2073 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2119 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2074 except (ValueError, TypeError), inst:
2120 except (ValueError, TypeError), inst:
2075 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2121 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2076
2122
2077 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2123 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2078
2124
2079 for m, c in neededmanifests.items():
2125 for m, c in neededmanifests.items():
2080 err(_("Changeset %s refers to unknown manifest %s") %
2126 err(_("Changeset %s refers to unknown manifest %s") %
2081 (short(m), short(c)))
2127 (short(m), short(c)))
2082 del neededmanifests
2128 del neededmanifests
2083
2129
2084 for f in filenodes:
2130 for f in filenodes:
2085 if f not in filelinkrevs:
2131 if f not in filelinkrevs:
2086 err(_("file %s in manifest but not in changesets") % f)
2132 err(_("file %s in manifest but not in changesets") % f)
2087
2133
2088 for f in filelinkrevs:
2134 for f in filelinkrevs:
2089 if f not in filenodes:
2135 if f not in filenodes:
2090 err(_("file %s in changeset but not in manifest") % f)
2136 err(_("file %s in changeset but not in manifest") % f)
2091
2137
2092 self.ui.status(_("checking files\n"))
2138 self.ui.status(_("checking files\n"))
2093 ff = filenodes.keys()
2139 ff = filenodes.keys()
2094 ff.sort()
2140 ff.sort()
2095 for f in ff:
2141 for f in ff:
2096 if f == "/dev/null":
2142 if f == "/dev/null":
2097 continue
2143 continue
2098 files += 1
2144 files += 1
2099 if not f:
2145 if not f:
2100 err(_("file without name in manifest %s") % short(n))
2146 err(_("file without name in manifest %s") % short(n))
2101 continue
2147 continue
2102 fl = self.file(f)
2148 fl = self.file(f)
2103 checkversion(fl, f)
2149 checkversion(fl, f)
2104 checksize(fl, f)
2150 checksize(fl, f)
2105
2151
2106 nodes = {nullid: 1}
2152 nodes = {nullid: 1}
2107 seen = {}
2153 seen = {}
2108 for i in range(fl.count()):
2154 for i in range(fl.count()):
2109 revisions += 1
2155 revisions += 1
2110 n = fl.node(i)
2156 n = fl.node(i)
2111
2157
2112 if n in seen:
2158 if n in seen:
2113 err(_("%s: duplicate revision %d") % (f, i))
2159 err(_("%s: duplicate revision %d") % (f, i))
2114 if n not in filenodes[f]:
2160 if n not in filenodes[f]:
2115 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2161 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2116 else:
2162 else:
2117 del filenodes[f][n]
2163 del filenodes[f][n]
2118
2164
2119 flr = fl.linkrev(n)
2165 flr = fl.linkrev(n)
2120 if flr not in filelinkrevs.get(f, []):
2166 if flr not in filelinkrevs.get(f, []):
2121 err(_("%s:%s points to unexpected changeset %d")
2167 err(_("%s:%s points to unexpected changeset %d")
2122 % (f, short(n), flr))
2168 % (f, short(n), flr))
2123 else:
2169 else:
2124 filelinkrevs[f].remove(flr)
2170 filelinkrevs[f].remove(flr)
2125
2171
2126 # verify contents
2172 # verify contents
2127 try:
2173 try:
2128 t = fl.read(n)
2174 t = fl.read(n)
2129 except KeyboardInterrupt:
2175 except KeyboardInterrupt:
2130 self.ui.warn(_("interrupted"))
2176 self.ui.warn(_("interrupted"))
2131 raise
2177 raise
2132 except Exception, inst:
2178 except Exception, inst:
2133 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2179 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2134
2180
2135 # verify parents
2181 # verify parents
2136 (p1, p2) = fl.parents(n)
2182 (p1, p2) = fl.parents(n)
2137 if p1 not in nodes:
2183 if p1 not in nodes:
2138 err(_("file %s:%s unknown parent 1 %s") %
2184 err(_("file %s:%s unknown parent 1 %s") %
2139 (f, short(n), short(p1)))
2185 (f, short(n), short(p1)))
2140 if p2 not in nodes:
2186 if p2 not in nodes:
2141 err(_("file %s:%s unknown parent 2 %s") %
2187 err(_("file %s:%s unknown parent 2 %s") %
2142 (f, short(n), short(p1)))
2188 (f, short(n), short(p1)))
2143 nodes[n] = 1
2189 nodes[n] = 1
2144
2190
2145 # cross-check
2191 # cross-check
2146 for node in filenodes[f]:
2192 for node in filenodes[f]:
2147 err(_("node %s in manifests not in %s") % (hex(node), f))
2193 err(_("node %s in manifests not in %s") % (hex(node), f))
2148
2194
2149 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2195 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2150 (files, changesets, revisions))
2196 (files, changesets, revisions))
2151
2197
2152 if warnings[0]:
2198 if warnings[0]:
2153 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2199 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2154 if errors[0]:
2200 if errors[0]:
2155 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2201 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2156 return 1
2202 return 1
2157
2203
2158 # used to avoid circular references so destructors work
2204 # used to avoid circular references so destructors work
2159 def aftertrans(base):
2205 def aftertrans(base):
2160 p = base
2206 p = base
2161 def a():
2207 def a():
2162 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2208 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2163 util.rename(os.path.join(p, "journal.dirstate"),
2209 util.rename(os.path.join(p, "journal.dirstate"),
2164 os.path.join(p, "undo.dirstate"))
2210 os.path.join(p, "undo.dirstate"))
2165 return a
2211 return a
2166
2212
@@ -1,950 +1,957 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8
8
9 This contains helper routines that are independent of the SCM core and hide
9 This contains helper routines that are independent of the SCM core and hide
10 platform-specific details from the core.
10 platform-specific details from the core.
11 """
11 """
12
12
13 from i18n import gettext as _
13 from i18n import gettext as _
14 from demandload import *
14 from demandload import *
15 demandload(globals(), "cStringIO errno popen2 re shutil sys tempfile")
15 demandload(globals(), "cStringIO errno popen2 re shutil sys tempfile")
16 demandload(globals(), "os threading time")
16 demandload(globals(), "os threading time")
17
17
18 class SignalInterrupt(Exception):
18 class SignalInterrupt(Exception):
19 """Exception raised on SIGTERM and SIGHUP."""
19 """Exception raised on SIGTERM and SIGHUP."""
20
20
21 def pipefilter(s, cmd):
21 def pipefilter(s, cmd):
22 '''filter string S through command CMD, returning its output'''
22 '''filter string S through command CMD, returning its output'''
23 (pout, pin) = popen2.popen2(cmd, -1, 'b')
23 (pout, pin) = popen2.popen2(cmd, -1, 'b')
24 def writer():
24 def writer():
25 try:
25 try:
26 pin.write(s)
26 pin.write(s)
27 pin.close()
27 pin.close()
28 except IOError, inst:
28 except IOError, inst:
29 if inst.errno != errno.EPIPE:
29 if inst.errno != errno.EPIPE:
30 raise
30 raise
31
31
32 # we should use select instead on UNIX, but this will work on most
32 # we should use select instead on UNIX, but this will work on most
33 # systems, including Windows
33 # systems, including Windows
34 w = threading.Thread(target=writer)
34 w = threading.Thread(target=writer)
35 w.start()
35 w.start()
36 f = pout.read()
36 f = pout.read()
37 pout.close()
37 pout.close()
38 w.join()
38 w.join()
39 return f
39 return f
40
40
41 def tempfilter(s, cmd):
41 def tempfilter(s, cmd):
42 '''filter string S through a pair of temporary files with CMD.
42 '''filter string S through a pair of temporary files with CMD.
43 CMD is used as a template to create the real command to be run,
43 CMD is used as a template to create the real command to be run,
44 with the strings INFILE and OUTFILE replaced by the real names of
44 with the strings INFILE and OUTFILE replaced by the real names of
45 the temporary files generated.'''
45 the temporary files generated.'''
46 inname, outname = None, None
46 inname, outname = None, None
47 try:
47 try:
48 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
48 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
49 fp = os.fdopen(infd, 'wb')
49 fp = os.fdopen(infd, 'wb')
50 fp.write(s)
50 fp.write(s)
51 fp.close()
51 fp.close()
52 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
52 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
53 os.close(outfd)
53 os.close(outfd)
54 cmd = cmd.replace('INFILE', inname)
54 cmd = cmd.replace('INFILE', inname)
55 cmd = cmd.replace('OUTFILE', outname)
55 cmd = cmd.replace('OUTFILE', outname)
56 code = os.system(cmd)
56 code = os.system(cmd)
57 if code: raise Abort(_("command '%s' failed: %s") %
57 if code: raise Abort(_("command '%s' failed: %s") %
58 (cmd, explain_exit(code)))
58 (cmd, explain_exit(code)))
59 return open(outname, 'rb').read()
59 return open(outname, 'rb').read()
60 finally:
60 finally:
61 try:
61 try:
62 if inname: os.unlink(inname)
62 if inname: os.unlink(inname)
63 except: pass
63 except: pass
64 try:
64 try:
65 if outname: os.unlink(outname)
65 if outname: os.unlink(outname)
66 except: pass
66 except: pass
67
67
68 filtertable = {
68 filtertable = {
69 'tempfile:': tempfilter,
69 'tempfile:': tempfilter,
70 'pipe:': pipefilter,
70 'pipe:': pipefilter,
71 }
71 }
72
72
73 def filter(s, cmd):
73 def filter(s, cmd):
74 "filter a string through a command that transforms its input to its output"
74 "filter a string through a command that transforms its input to its output"
75 for name, fn in filtertable.iteritems():
75 for name, fn in filtertable.iteritems():
76 if cmd.startswith(name):
76 if cmd.startswith(name):
77 return fn(s, cmd[len(name):].lstrip())
77 return fn(s, cmd[len(name):].lstrip())
78 return pipefilter(s, cmd)
78 return pipefilter(s, cmd)
79
79
80 def find_in_path(name, path, default=None):
80 def find_in_path(name, path, default=None):
81 '''find name in search path. path can be string (will be split
81 '''find name in search path. path can be string (will be split
82 with os.pathsep), or iterable thing that returns strings. if name
82 with os.pathsep), or iterable thing that returns strings. if name
83 found, return path to name. else return default.'''
83 found, return path to name. else return default.'''
84 if isinstance(path, str):
84 if isinstance(path, str):
85 path = path.split(os.pathsep)
85 path = path.split(os.pathsep)
86 for p in path:
86 for p in path:
87 p_name = os.path.join(p, name)
87 p_name = os.path.join(p, name)
88 if os.path.exists(p_name):
88 if os.path.exists(p_name):
89 return p_name
89 return p_name
90 return default
90 return default
91
91
92 def patch(strip, patchname, ui):
92 def patch(strip, patchname, ui):
93 """apply the patch <patchname> to the working directory.
93 """apply the patch <patchname> to the working directory.
94 a list of patched files is returned"""
94 a list of patched files is returned"""
95 patcher = find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
95 patcher = find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
96 fp = os.popen('%s -p%d < "%s"' % (patcher, strip, patchname))
96 fp = os.popen('%s -p%d < "%s"' % (patcher, strip, patchname))
97 files = {}
97 files = {}
98 for line in fp:
98 for line in fp:
99 line = line.rstrip()
99 line = line.rstrip()
100 ui.status("%s\n" % line)
100 ui.status("%s\n" % line)
101 if line.startswith('patching file '):
101 if line.startswith('patching file '):
102 pf = parse_patch_output(line)
102 pf = parse_patch_output(line)
103 files.setdefault(pf, 1)
103 files.setdefault(pf, 1)
104 code = fp.close()
104 code = fp.close()
105 if code:
105 if code:
106 raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
106 raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
107 return files.keys()
107 return files.keys()
108
108
109 def binary(s):
109 def binary(s):
110 """return true if a string is binary data using diff's heuristic"""
110 """return true if a string is binary data using diff's heuristic"""
111 if s and '\0' in s[:4096]:
111 if s and '\0' in s[:4096]:
112 return True
112 return True
113 return False
113 return False
114
114
115 def unique(g):
115 def unique(g):
116 """return the uniq elements of iterable g"""
116 """return the uniq elements of iterable g"""
117 seen = {}
117 seen = {}
118 for f in g:
118 for f in g:
119 if f not in seen:
119 if f not in seen:
120 seen[f] = 1
120 seen[f] = 1
121 yield f
121 yield f
122
122
123 class Abort(Exception):
123 class Abort(Exception):
124 """Raised if a command needs to print an error and exit."""
124 """Raised if a command needs to print an error and exit."""
125
125
126 def always(fn): return True
126 def always(fn): return True
127 def never(fn): return False
127 def never(fn): return False
128
128
129 def patkind(name, dflt_pat='glob'):
129 def patkind(name, dflt_pat='glob'):
130 """Split a string into an optional pattern kind prefix and the
130 """Split a string into an optional pattern kind prefix and the
131 actual pattern."""
131 actual pattern."""
132 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
132 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
133 if name.startswith(prefix + ':'): return name.split(':', 1)
133 if name.startswith(prefix + ':'): return name.split(':', 1)
134 return dflt_pat, name
134 return dflt_pat, name
135
135
136 def globre(pat, head='^', tail='$'):
136 def globre(pat, head='^', tail='$'):
137 "convert a glob pattern into a regexp"
137 "convert a glob pattern into a regexp"
138 i, n = 0, len(pat)
138 i, n = 0, len(pat)
139 res = ''
139 res = ''
140 group = False
140 group = False
141 def peek(): return i < n and pat[i]
141 def peek(): return i < n and pat[i]
142 while i < n:
142 while i < n:
143 c = pat[i]
143 c = pat[i]
144 i = i+1
144 i = i+1
145 if c == '*':
145 if c == '*':
146 if peek() == '*':
146 if peek() == '*':
147 i += 1
147 i += 1
148 res += '.*'
148 res += '.*'
149 else:
149 else:
150 res += '[^/]*'
150 res += '[^/]*'
151 elif c == '?':
151 elif c == '?':
152 res += '.'
152 res += '.'
153 elif c == '[':
153 elif c == '[':
154 j = i
154 j = i
155 if j < n and pat[j] in '!]':
155 if j < n and pat[j] in '!]':
156 j += 1
156 j += 1
157 while j < n and pat[j] != ']':
157 while j < n and pat[j] != ']':
158 j += 1
158 j += 1
159 if j >= n:
159 if j >= n:
160 res += '\\['
160 res += '\\['
161 else:
161 else:
162 stuff = pat[i:j].replace('\\','\\\\')
162 stuff = pat[i:j].replace('\\','\\\\')
163 i = j + 1
163 i = j + 1
164 if stuff[0] == '!':
164 if stuff[0] == '!':
165 stuff = '^' + stuff[1:]
165 stuff = '^' + stuff[1:]
166 elif stuff[0] == '^':
166 elif stuff[0] == '^':
167 stuff = '\\' + stuff
167 stuff = '\\' + stuff
168 res = '%s[%s]' % (res, stuff)
168 res = '%s[%s]' % (res, stuff)
169 elif c == '{':
169 elif c == '{':
170 group = True
170 group = True
171 res += '(?:'
171 res += '(?:'
172 elif c == '}' and group:
172 elif c == '}' and group:
173 res += ')'
173 res += ')'
174 group = False
174 group = False
175 elif c == ',' and group:
175 elif c == ',' and group:
176 res += '|'
176 res += '|'
177 elif c == '\\':
177 elif c == '\\':
178 p = peek()
178 p = peek()
179 if p:
179 if p:
180 i += 1
180 i += 1
181 res += re.escape(p)
181 res += re.escape(p)
182 else:
182 else:
183 res += re.escape(c)
183 res += re.escape(c)
184 else:
184 else:
185 res += re.escape(c)
185 res += re.escape(c)
186 return head + res + tail
186 return head + res + tail
187
187
188 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
188 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
189
189
190 def pathto(n1, n2):
190 def pathto(n1, n2):
191 '''return the relative path from one place to another.
191 '''return the relative path from one place to another.
192 this returns a path in the form used by the local filesystem, not hg.'''
192 this returns a path in the form used by the local filesystem, not hg.'''
193 if not n1: return localpath(n2)
193 if not n1: return localpath(n2)
194 a, b = n1.split('/'), n2.split('/')
194 a, b = n1.split('/'), n2.split('/')
195 a.reverse()
195 a.reverse()
196 b.reverse()
196 b.reverse()
197 while a and b and a[-1] == b[-1]:
197 while a and b and a[-1] == b[-1]:
198 a.pop()
198 a.pop()
199 b.pop()
199 b.pop()
200 b.reverse()
200 b.reverse()
201 return os.sep.join((['..'] * len(a)) + b)
201 return os.sep.join((['..'] * len(a)) + b)
202
202
203 def canonpath(root, cwd, myname):
203 def canonpath(root, cwd, myname):
204 """return the canonical path of myname, given cwd and root"""
204 """return the canonical path of myname, given cwd and root"""
205 if root == os.sep:
205 if root == os.sep:
206 rootsep = os.sep
206 rootsep = os.sep
207 elif root.endswith(os.sep):
207 elif root.endswith(os.sep):
208 rootsep = root
208 rootsep = root
209 else:
209 else:
210 rootsep = root + os.sep
210 rootsep = root + os.sep
211 name = myname
211 name = myname
212 if not os.path.isabs(name):
212 if not os.path.isabs(name):
213 name = os.path.join(root, cwd, name)
213 name = os.path.join(root, cwd, name)
214 name = os.path.normpath(name)
214 name = os.path.normpath(name)
215 if name != rootsep and name.startswith(rootsep):
215 if name != rootsep and name.startswith(rootsep):
216 name = name[len(rootsep):]
216 name = name[len(rootsep):]
217 audit_path(name)
217 audit_path(name)
218 return pconvert(name)
218 return pconvert(name)
219 elif name == root:
219 elif name == root:
220 return ''
220 return ''
221 else:
221 else:
222 # Determine whether `name' is in the hierarchy at or beneath `root',
222 # Determine whether `name' is in the hierarchy at or beneath `root',
223 # by iterating name=dirname(name) until that causes no change (can't
223 # by iterating name=dirname(name) until that causes no change (can't
224 # check name == '/', because that doesn't work on windows). For each
224 # check name == '/', because that doesn't work on windows). For each
225 # `name', compare dev/inode numbers. If they match, the list `rel'
225 # `name', compare dev/inode numbers. If they match, the list `rel'
226 # holds the reversed list of components making up the relative file
226 # holds the reversed list of components making up the relative file
227 # name we want.
227 # name we want.
228 root_st = os.stat(root)
228 root_st = os.stat(root)
229 rel = []
229 rel = []
230 while True:
230 while True:
231 try:
231 try:
232 name_st = os.stat(name)
232 name_st = os.stat(name)
233 except OSError:
233 except OSError:
234 break
234 break
235 if samestat(name_st, root_st):
235 if samestat(name_st, root_st):
236 rel.reverse()
236 rel.reverse()
237 name = os.path.join(*rel)
237 name = os.path.join(*rel)
238 audit_path(name)
238 audit_path(name)
239 return pconvert(name)
239 return pconvert(name)
240 dirname, basename = os.path.split(name)
240 dirname, basename = os.path.split(name)
241 rel.append(basename)
241 rel.append(basename)
242 if dirname == name:
242 if dirname == name:
243 break
243 break
244 name = dirname
244 name = dirname
245
245
246 raise Abort('%s not under root' % myname)
246 raise Abort('%s not under root' % myname)
247
247
248 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
248 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
249 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
249 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
250
250
251 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
251 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
252 if os.name == 'nt':
252 if os.name == 'nt':
253 dflt_pat = 'glob'
253 dflt_pat = 'glob'
254 else:
254 else:
255 dflt_pat = 'relpath'
255 dflt_pat = 'relpath'
256 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
256 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
257
257
258 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
258 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
259 """build a function to match a set of file patterns
259 """build a function to match a set of file patterns
260
260
261 arguments:
261 arguments:
262 canonroot - the canonical root of the tree you're matching against
262 canonroot - the canonical root of the tree you're matching against
263 cwd - the current working directory, if relevant
263 cwd - the current working directory, if relevant
264 names - patterns to find
264 names - patterns to find
265 inc - patterns to include
265 inc - patterns to include
266 exc - patterns to exclude
266 exc - patterns to exclude
267 head - a regex to prepend to patterns to control whether a match is rooted
267 head - a regex to prepend to patterns to control whether a match is rooted
268
268
269 a pattern is one of:
269 a pattern is one of:
270 'glob:<rooted glob>'
270 'glob:<rooted glob>'
271 're:<rooted regexp>'
271 're:<rooted regexp>'
272 'path:<rooted path>'
272 'path:<rooted path>'
273 'relglob:<relative glob>'
273 'relglob:<relative glob>'
274 'relpath:<relative path>'
274 'relpath:<relative path>'
275 'relre:<relative regexp>'
275 'relre:<relative regexp>'
276 '<rooted path or regexp>'
276 '<rooted path or regexp>'
277
277
278 returns:
278 returns:
279 a 3-tuple containing
279 a 3-tuple containing
280 - list of explicit non-pattern names passed in
280 - list of explicit non-pattern names passed in
281 - a bool match(filename) function
281 - a bool match(filename) function
282 - a bool indicating if any patterns were passed in
282 - a bool indicating if any patterns were passed in
283
283
284 todo:
284 todo:
285 make head regex a rooted bool
285 make head regex a rooted bool
286 """
286 """
287
287
288 def contains_glob(name):
288 def contains_glob(name):
289 for c in name:
289 for c in name:
290 if c in _globchars: return True
290 if c in _globchars: return True
291 return False
291 return False
292
292
293 def regex(kind, name, tail):
293 def regex(kind, name, tail):
294 '''convert a pattern into a regular expression'''
294 '''convert a pattern into a regular expression'''
295 if kind == 're':
295 if kind == 're':
296 return name
296 return name
297 elif kind == 'path':
297 elif kind == 'path':
298 return '^' + re.escape(name) + '(?:/|$)'
298 return '^' + re.escape(name) + '(?:/|$)'
299 elif kind == 'relglob':
299 elif kind == 'relglob':
300 return head + globre(name, '(?:|.*/)', tail)
300 return head + globre(name, '(?:|.*/)', tail)
301 elif kind == 'relpath':
301 elif kind == 'relpath':
302 return head + re.escape(name) + tail
302 return head + re.escape(name) + tail
303 elif kind == 'relre':
303 elif kind == 'relre':
304 if name.startswith('^'):
304 if name.startswith('^'):
305 return name
305 return name
306 return '.*' + name
306 return '.*' + name
307 return head + globre(name, '', tail)
307 return head + globre(name, '', tail)
308
308
309 def matchfn(pats, tail):
309 def matchfn(pats, tail):
310 """build a matching function from a set of patterns"""
310 """build a matching function from a set of patterns"""
311 if not pats:
311 if not pats:
312 return
312 return
313 matches = []
313 matches = []
314 for k, p in pats:
314 for k, p in pats:
315 try:
315 try:
316 pat = '(?:%s)' % regex(k, p, tail)
316 pat = '(?:%s)' % regex(k, p, tail)
317 matches.append(re.compile(pat).match)
317 matches.append(re.compile(pat).match)
318 except re.error:
318 except re.error:
319 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
319 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
320 else: raise Abort("invalid pattern (%s): %s" % (k, p))
320 else: raise Abort("invalid pattern (%s): %s" % (k, p))
321
321
322 def buildfn(text):
322 def buildfn(text):
323 for m in matches:
323 for m in matches:
324 r = m(text)
324 r = m(text)
325 if r:
325 if r:
326 return r
326 return r
327
327
328 return buildfn
328 return buildfn
329
329
330 def globprefix(pat):
330 def globprefix(pat):
331 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
331 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
332 root = []
332 root = []
333 for p in pat.split(os.sep):
333 for p in pat.split(os.sep):
334 if contains_glob(p): break
334 if contains_glob(p): break
335 root.append(p)
335 root.append(p)
336 return '/'.join(root)
336 return '/'.join(root)
337
337
338 pats = []
338 pats = []
339 files = []
339 files = []
340 roots = []
340 roots = []
341 for kind, name in [patkind(p, dflt_pat) for p in names]:
341 for kind, name in [patkind(p, dflt_pat) for p in names]:
342 if kind in ('glob', 'relpath'):
342 if kind in ('glob', 'relpath'):
343 name = canonpath(canonroot, cwd, name)
343 name = canonpath(canonroot, cwd, name)
344 if name == '':
344 if name == '':
345 kind, name = 'glob', '**'
345 kind, name = 'glob', '**'
346 if kind in ('glob', 'path', 're'):
346 if kind in ('glob', 'path', 're'):
347 pats.append((kind, name))
347 pats.append((kind, name))
348 if kind == 'glob':
348 if kind == 'glob':
349 root = globprefix(name)
349 root = globprefix(name)
350 if root: roots.append(root)
350 if root: roots.append(root)
351 elif kind == 'relpath':
351 elif kind == 'relpath':
352 files.append((kind, name))
352 files.append((kind, name))
353 roots.append(name)
353 roots.append(name)
354
354
355 patmatch = matchfn(pats, '$') or always
355 patmatch = matchfn(pats, '$') or always
356 filematch = matchfn(files, '(?:/|$)') or always
356 filematch = matchfn(files, '(?:/|$)') or always
357 incmatch = always
357 incmatch = always
358 if inc:
358 if inc:
359 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
359 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
360 incmatch = matchfn(inckinds, '(?:/|$)')
360 incmatch = matchfn(inckinds, '(?:/|$)')
361 excmatch = lambda fn: False
361 excmatch = lambda fn: False
362 if exc:
362 if exc:
363 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
363 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
364 excmatch = matchfn(exckinds, '(?:/|$)')
364 excmatch = matchfn(exckinds, '(?:/|$)')
365
365
366 return (roots,
366 return (roots,
367 lambda fn: (incmatch(fn) and not excmatch(fn) and
367 lambda fn: (incmatch(fn) and not excmatch(fn) and
368 (fn.endswith('/') or
368 (fn.endswith('/') or
369 (not pats and not files) or
369 (not pats and not files) or
370 (pats and patmatch(fn)) or
370 (pats and patmatch(fn)) or
371 (files and filematch(fn)))),
371 (files and filematch(fn)))),
372 (inc or exc or (pats and pats != [('glob', '**')])) and True)
372 (inc or exc or (pats and pats != [('glob', '**')])) and True)
373
373
374 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
374 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
375 '''enhanced shell command execution.
375 '''enhanced shell command execution.
376 run with environment maybe modified, maybe in different dir.
376 run with environment maybe modified, maybe in different dir.
377
377
378 if command fails and onerr is None, return status. if ui object,
378 if command fails and onerr is None, return status. if ui object,
379 print error message and return status, else raise onerr object as
379 print error message and return status, else raise onerr object as
380 exception.'''
380 exception.'''
381 def py2shell(val):
382 'convert python object into string that is useful to shell'
383 if val in (None, False):
384 return '0'
385 if val == True:
386 return '1'
387 return str(val)
381 oldenv = {}
388 oldenv = {}
382 for k in environ:
389 for k in environ:
383 oldenv[k] = os.environ.get(k)
390 oldenv[k] = os.environ.get(k)
384 if cwd is not None:
391 if cwd is not None:
385 oldcwd = os.getcwd()
392 oldcwd = os.getcwd()
386 try:
393 try:
387 for k, v in environ.iteritems():
394 for k, v in environ.iteritems():
388 os.environ[k] = str(v)
395 os.environ[k] = py2shell(v)
389 if cwd is not None and oldcwd != cwd:
396 if cwd is not None and oldcwd != cwd:
390 os.chdir(cwd)
397 os.chdir(cwd)
391 rc = os.system(cmd)
398 rc = os.system(cmd)
392 if rc and onerr:
399 if rc and onerr:
393 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
400 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
394 explain_exit(rc)[0])
401 explain_exit(rc)[0])
395 if errprefix:
402 if errprefix:
396 errmsg = '%s: %s' % (errprefix, errmsg)
403 errmsg = '%s: %s' % (errprefix, errmsg)
397 try:
404 try:
398 onerr.warn(errmsg + '\n')
405 onerr.warn(errmsg + '\n')
399 except AttributeError:
406 except AttributeError:
400 raise onerr(errmsg)
407 raise onerr(errmsg)
401 return rc
408 return rc
402 finally:
409 finally:
403 for k, v in oldenv.iteritems():
410 for k, v in oldenv.iteritems():
404 if v is None:
411 if v is None:
405 del os.environ[k]
412 del os.environ[k]
406 else:
413 else:
407 os.environ[k] = v
414 os.environ[k] = v
408 if cwd is not None and oldcwd != cwd:
415 if cwd is not None and oldcwd != cwd:
409 os.chdir(oldcwd)
416 os.chdir(oldcwd)
410
417
411 def rename(src, dst):
418 def rename(src, dst):
412 """forcibly rename a file"""
419 """forcibly rename a file"""
413 try:
420 try:
414 os.rename(src, dst)
421 os.rename(src, dst)
415 except OSError, err:
422 except OSError, err:
416 # on windows, rename to existing file is not allowed, so we
423 # on windows, rename to existing file is not allowed, so we
417 # must delete destination first. but if file is open, unlink
424 # must delete destination first. but if file is open, unlink
418 # schedules it for delete but does not delete it. rename
425 # schedules it for delete but does not delete it. rename
419 # happens immediately even for open files, so we create
426 # happens immediately even for open files, so we create
420 # temporary file, delete it, rename destination to that name,
427 # temporary file, delete it, rename destination to that name,
421 # then delete that. then rename is safe to do.
428 # then delete that. then rename is safe to do.
422 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
429 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
423 os.close(fd)
430 os.close(fd)
424 os.unlink(temp)
431 os.unlink(temp)
425 os.rename(dst, temp)
432 os.rename(dst, temp)
426 os.unlink(temp)
433 os.unlink(temp)
427 os.rename(src, dst)
434 os.rename(src, dst)
428
435
429 def unlink(f):
436 def unlink(f):
430 """unlink and remove the directory if it is empty"""
437 """unlink and remove the directory if it is empty"""
431 os.unlink(f)
438 os.unlink(f)
432 # try removing directories that might now be empty
439 # try removing directories that might now be empty
433 try:
440 try:
434 os.removedirs(os.path.dirname(f))
441 os.removedirs(os.path.dirname(f))
435 except OSError:
442 except OSError:
436 pass
443 pass
437
444
438 def copyfiles(src, dst, hardlink=None):
445 def copyfiles(src, dst, hardlink=None):
439 """Copy a directory tree using hardlinks if possible"""
446 """Copy a directory tree using hardlinks if possible"""
440
447
441 if hardlink is None:
448 if hardlink is None:
442 hardlink = (os.stat(src).st_dev ==
449 hardlink = (os.stat(src).st_dev ==
443 os.stat(os.path.dirname(dst)).st_dev)
450 os.stat(os.path.dirname(dst)).st_dev)
444
451
445 if os.path.isdir(src):
452 if os.path.isdir(src):
446 os.mkdir(dst)
453 os.mkdir(dst)
447 for name in os.listdir(src):
454 for name in os.listdir(src):
448 srcname = os.path.join(src, name)
455 srcname = os.path.join(src, name)
449 dstname = os.path.join(dst, name)
456 dstname = os.path.join(dst, name)
450 copyfiles(srcname, dstname, hardlink)
457 copyfiles(srcname, dstname, hardlink)
451 else:
458 else:
452 if hardlink:
459 if hardlink:
453 try:
460 try:
454 os_link(src, dst)
461 os_link(src, dst)
455 except (IOError, OSError):
462 except (IOError, OSError):
456 hardlink = False
463 hardlink = False
457 shutil.copy(src, dst)
464 shutil.copy(src, dst)
458 else:
465 else:
459 shutil.copy(src, dst)
466 shutil.copy(src, dst)
460
467
461 def audit_path(path):
468 def audit_path(path):
462 """Abort if path contains dangerous components"""
469 """Abort if path contains dangerous components"""
463 parts = os.path.normcase(path).split(os.sep)
470 parts = os.path.normcase(path).split(os.sep)
464 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
471 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
465 or os.pardir in parts):
472 or os.pardir in parts):
466 raise Abort(_("path contains illegal component: %s\n") % path)
473 raise Abort(_("path contains illegal component: %s\n") % path)
467
474
468 def _makelock_file(info, pathname):
475 def _makelock_file(info, pathname):
469 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
476 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
470 os.write(ld, info)
477 os.write(ld, info)
471 os.close(ld)
478 os.close(ld)
472
479
473 def _readlock_file(pathname):
480 def _readlock_file(pathname):
474 return posixfile(pathname).read()
481 return posixfile(pathname).read()
475
482
476 def nlinks(pathname):
483 def nlinks(pathname):
477 """Return number of hardlinks for the given file."""
484 """Return number of hardlinks for the given file."""
478 return os.lstat(pathname).st_nlink
485 return os.lstat(pathname).st_nlink
479
486
480 if hasattr(os, 'link'):
487 if hasattr(os, 'link'):
481 os_link = os.link
488 os_link = os.link
482 else:
489 else:
483 def os_link(src, dst):
490 def os_link(src, dst):
484 raise OSError(0, _("Hardlinks not supported"))
491 raise OSError(0, _("Hardlinks not supported"))
485
492
486 def fstat(fp):
493 def fstat(fp):
487 '''stat file object that may not have fileno method.'''
494 '''stat file object that may not have fileno method.'''
488 try:
495 try:
489 return os.fstat(fp.fileno())
496 return os.fstat(fp.fileno())
490 except AttributeError:
497 except AttributeError:
491 return os.stat(fp.name)
498 return os.stat(fp.name)
492
499
493 posixfile = file
500 posixfile = file
494
501
495 def is_win_9x():
502 def is_win_9x():
496 '''return true if run on windows 95, 98 or me.'''
503 '''return true if run on windows 95, 98 or me.'''
497 try:
504 try:
498 return sys.getwindowsversion()[3] == 1
505 return sys.getwindowsversion()[3] == 1
499 except AttributeError:
506 except AttributeError:
500 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
507 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
501
508
502 # Platform specific variants
509 # Platform specific variants
503 if os.name == 'nt':
510 if os.name == 'nt':
504 demandload(globals(), "msvcrt")
511 demandload(globals(), "msvcrt")
505 nulldev = 'NUL:'
512 nulldev = 'NUL:'
506
513
507 class winstdout:
514 class winstdout:
508 '''stdout on windows misbehaves if sent through a pipe'''
515 '''stdout on windows misbehaves if sent through a pipe'''
509
516
510 def __init__(self, fp):
517 def __init__(self, fp):
511 self.fp = fp
518 self.fp = fp
512
519
513 def __getattr__(self, key):
520 def __getattr__(self, key):
514 return getattr(self.fp, key)
521 return getattr(self.fp, key)
515
522
516 def close(self):
523 def close(self):
517 try:
524 try:
518 self.fp.close()
525 self.fp.close()
519 except: pass
526 except: pass
520
527
521 def write(self, s):
528 def write(self, s):
522 try:
529 try:
523 return self.fp.write(s)
530 return self.fp.write(s)
524 except IOError, inst:
531 except IOError, inst:
525 if inst.errno != 0: raise
532 if inst.errno != 0: raise
526 self.close()
533 self.close()
527 raise IOError(errno.EPIPE, 'Broken pipe')
534 raise IOError(errno.EPIPE, 'Broken pipe')
528
535
529 sys.stdout = winstdout(sys.stdout)
536 sys.stdout = winstdout(sys.stdout)
530
537
531 def system_rcpath():
538 def system_rcpath():
532 try:
539 try:
533 return system_rcpath_win32()
540 return system_rcpath_win32()
534 except:
541 except:
535 return [r'c:\mercurial\mercurial.ini']
542 return [r'c:\mercurial\mercurial.ini']
536
543
537 def os_rcpath():
544 def os_rcpath():
538 '''return default os-specific hgrc search path'''
545 '''return default os-specific hgrc search path'''
539 path = system_rcpath()
546 path = system_rcpath()
540 path.append(user_rcpath())
547 path.append(user_rcpath())
541 userprofile = os.environ.get('USERPROFILE')
548 userprofile = os.environ.get('USERPROFILE')
542 if userprofile:
549 if userprofile:
543 path.append(os.path.join(userprofile, 'mercurial.ini'))
550 path.append(os.path.join(userprofile, 'mercurial.ini'))
544 return path
551 return path
545
552
546 def user_rcpath():
553 def user_rcpath():
547 '''return os-specific hgrc search path to the user dir'''
554 '''return os-specific hgrc search path to the user dir'''
548 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
555 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
549
556
550 def parse_patch_output(output_line):
557 def parse_patch_output(output_line):
551 """parses the output produced by patch and returns the file name"""
558 """parses the output produced by patch and returns the file name"""
552 pf = output_line[14:]
559 pf = output_line[14:]
553 if pf[0] == '`':
560 if pf[0] == '`':
554 pf = pf[1:-1] # Remove the quotes
561 pf = pf[1:-1] # Remove the quotes
555 return pf
562 return pf
556
563
557 def testpid(pid):
564 def testpid(pid):
558 '''return False if pid dead, True if running or not known'''
565 '''return False if pid dead, True if running or not known'''
559 return True
566 return True
560
567
561 def is_exec(f, last):
568 def is_exec(f, last):
562 return last
569 return last
563
570
564 def set_exec(f, mode):
571 def set_exec(f, mode):
565 pass
572 pass
566
573
567 def set_binary(fd):
574 def set_binary(fd):
568 msvcrt.setmode(fd.fileno(), os.O_BINARY)
575 msvcrt.setmode(fd.fileno(), os.O_BINARY)
569
576
570 def pconvert(path):
577 def pconvert(path):
571 return path.replace("\\", "/")
578 return path.replace("\\", "/")
572
579
573 def localpath(path):
580 def localpath(path):
574 return path.replace('/', '\\')
581 return path.replace('/', '\\')
575
582
576 def normpath(path):
583 def normpath(path):
577 return pconvert(os.path.normpath(path))
584 return pconvert(os.path.normpath(path))
578
585
579 makelock = _makelock_file
586 makelock = _makelock_file
580 readlock = _readlock_file
587 readlock = _readlock_file
581
588
582 def samestat(s1, s2):
589 def samestat(s1, s2):
583 return False
590 return False
584
591
585 def explain_exit(code):
592 def explain_exit(code):
586 return _("exited with status %d") % code, code
593 return _("exited with status %d") % code, code
587
594
588 try:
595 try:
589 # override functions with win32 versions if possible
596 # override functions with win32 versions if possible
590 from util_win32 import *
597 from util_win32 import *
591 if not is_win_9x():
598 if not is_win_9x():
592 posixfile = posixfile_nt
599 posixfile = posixfile_nt
593 except ImportError:
600 except ImportError:
594 pass
601 pass
595
602
596 else:
603 else:
597 nulldev = '/dev/null'
604 nulldev = '/dev/null'
598
605
599 def rcfiles(path):
606 def rcfiles(path):
600 rcs = [os.path.join(path, 'hgrc')]
607 rcs = [os.path.join(path, 'hgrc')]
601 rcdir = os.path.join(path, 'hgrc.d')
608 rcdir = os.path.join(path, 'hgrc.d')
602 try:
609 try:
603 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
610 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
604 if f.endswith(".rc")])
611 if f.endswith(".rc")])
605 except OSError, inst: pass
612 except OSError, inst: pass
606 return rcs
613 return rcs
607
614
608 def os_rcpath():
615 def os_rcpath():
609 '''return default os-specific hgrc search path'''
616 '''return default os-specific hgrc search path'''
610 path = []
617 path = []
611 # old mod_python does not set sys.argv
618 # old mod_python does not set sys.argv
612 if len(getattr(sys, 'argv', [])) > 0:
619 if len(getattr(sys, 'argv', [])) > 0:
613 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
620 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
614 '/../etc/mercurial'))
621 '/../etc/mercurial'))
615 path.extend(rcfiles('/etc/mercurial'))
622 path.extend(rcfiles('/etc/mercurial'))
616 path.append(os.path.expanduser('~/.hgrc'))
623 path.append(os.path.expanduser('~/.hgrc'))
617 path = [os.path.normpath(f) for f in path]
624 path = [os.path.normpath(f) for f in path]
618 return path
625 return path
619
626
620 def parse_patch_output(output_line):
627 def parse_patch_output(output_line):
621 """parses the output produced by patch and returns the file name"""
628 """parses the output produced by patch and returns the file name"""
622 pf = output_line[14:]
629 pf = output_line[14:]
623 if pf.startswith("'") and pf.endswith("'") and " " in pf:
630 if pf.startswith("'") and pf.endswith("'") and " " in pf:
624 pf = pf[1:-1] # Remove the quotes
631 pf = pf[1:-1] # Remove the quotes
625 return pf
632 return pf
626
633
627 def is_exec(f, last):
634 def is_exec(f, last):
628 """check whether a file is executable"""
635 """check whether a file is executable"""
629 return (os.lstat(f).st_mode & 0100 != 0)
636 return (os.lstat(f).st_mode & 0100 != 0)
630
637
631 def set_exec(f, mode):
638 def set_exec(f, mode):
632 s = os.lstat(f).st_mode
639 s = os.lstat(f).st_mode
633 if (s & 0100 != 0) == mode:
640 if (s & 0100 != 0) == mode:
634 return
641 return
635 if mode:
642 if mode:
636 # Turn on +x for every +r bit when making a file executable
643 # Turn on +x for every +r bit when making a file executable
637 # and obey umask.
644 # and obey umask.
638 umask = os.umask(0)
645 umask = os.umask(0)
639 os.umask(umask)
646 os.umask(umask)
640 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
647 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
641 else:
648 else:
642 os.chmod(f, s & 0666)
649 os.chmod(f, s & 0666)
643
650
644 def set_binary(fd):
651 def set_binary(fd):
645 pass
652 pass
646
653
647 def pconvert(path):
654 def pconvert(path):
648 return path
655 return path
649
656
650 def localpath(path):
657 def localpath(path):
651 return path
658 return path
652
659
653 normpath = os.path.normpath
660 normpath = os.path.normpath
654 samestat = os.path.samestat
661 samestat = os.path.samestat
655
662
656 def makelock(info, pathname):
663 def makelock(info, pathname):
657 try:
664 try:
658 os.symlink(info, pathname)
665 os.symlink(info, pathname)
659 except OSError, why:
666 except OSError, why:
660 if why.errno == errno.EEXIST:
667 if why.errno == errno.EEXIST:
661 raise
668 raise
662 else:
669 else:
663 _makelock_file(info, pathname)
670 _makelock_file(info, pathname)
664
671
665 def readlock(pathname):
672 def readlock(pathname):
666 try:
673 try:
667 return os.readlink(pathname)
674 return os.readlink(pathname)
668 except OSError, why:
675 except OSError, why:
669 if why.errno == errno.EINVAL:
676 if why.errno == errno.EINVAL:
670 return _readlock_file(pathname)
677 return _readlock_file(pathname)
671 else:
678 else:
672 raise
679 raise
673
680
674 def testpid(pid):
681 def testpid(pid):
675 '''return False if pid dead, True if running or not sure'''
682 '''return False if pid dead, True if running or not sure'''
676 try:
683 try:
677 os.kill(pid, 0)
684 os.kill(pid, 0)
678 return True
685 return True
679 except OSError, inst:
686 except OSError, inst:
680 return inst.errno != errno.ESRCH
687 return inst.errno != errno.ESRCH
681
688
682 def explain_exit(code):
689 def explain_exit(code):
683 """return a 2-tuple (desc, code) describing a process's status"""
690 """return a 2-tuple (desc, code) describing a process's status"""
684 if os.WIFEXITED(code):
691 if os.WIFEXITED(code):
685 val = os.WEXITSTATUS(code)
692 val = os.WEXITSTATUS(code)
686 return _("exited with status %d") % val, val
693 return _("exited with status %d") % val, val
687 elif os.WIFSIGNALED(code):
694 elif os.WIFSIGNALED(code):
688 val = os.WTERMSIG(code)
695 val = os.WTERMSIG(code)
689 return _("killed by signal %d") % val, val
696 return _("killed by signal %d") % val, val
690 elif os.WIFSTOPPED(code):
697 elif os.WIFSTOPPED(code):
691 val = os.WSTOPSIG(code)
698 val = os.WSTOPSIG(code)
692 return _("stopped by signal %d") % val, val
699 return _("stopped by signal %d") % val, val
693 raise ValueError(_("invalid exit code"))
700 raise ValueError(_("invalid exit code"))
694
701
695 def opener(base, audit=True):
702 def opener(base, audit=True):
696 """
703 """
697 return a function that opens files relative to base
704 return a function that opens files relative to base
698
705
699 this function is used to hide the details of COW semantics and
706 this function is used to hide the details of COW semantics and
700 remote file access from higher level code.
707 remote file access from higher level code.
701 """
708 """
702 p = base
709 p = base
703 audit_p = audit
710 audit_p = audit
704
711
705 def mktempcopy(name):
712 def mktempcopy(name):
706 d, fn = os.path.split(name)
713 d, fn = os.path.split(name)
707 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
714 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
708 os.close(fd)
715 os.close(fd)
709 ofp = posixfile(temp, "wb")
716 ofp = posixfile(temp, "wb")
710 try:
717 try:
711 try:
718 try:
712 ifp = posixfile(name, "rb")
719 ifp = posixfile(name, "rb")
713 except IOError, inst:
720 except IOError, inst:
714 if not getattr(inst, 'filename', None):
721 if not getattr(inst, 'filename', None):
715 inst.filename = name
722 inst.filename = name
716 raise
723 raise
717 for chunk in filechunkiter(ifp):
724 for chunk in filechunkiter(ifp):
718 ofp.write(chunk)
725 ofp.write(chunk)
719 ifp.close()
726 ifp.close()
720 ofp.close()
727 ofp.close()
721 except:
728 except:
722 try: os.unlink(temp)
729 try: os.unlink(temp)
723 except: pass
730 except: pass
724 raise
731 raise
725 st = os.lstat(name)
732 st = os.lstat(name)
726 os.chmod(temp, st.st_mode)
733 os.chmod(temp, st.st_mode)
727 return temp
734 return temp
728
735
729 class atomictempfile(posixfile):
736 class atomictempfile(posixfile):
730 """the file will only be copied when rename is called"""
737 """the file will only be copied when rename is called"""
731 def __init__(self, name, mode):
738 def __init__(self, name, mode):
732 self.__name = name
739 self.__name = name
733 self.temp = mktempcopy(name)
740 self.temp = mktempcopy(name)
734 posixfile.__init__(self, self.temp, mode)
741 posixfile.__init__(self, self.temp, mode)
735 def rename(self):
742 def rename(self):
736 if not self.closed:
743 if not self.closed:
737 posixfile.close(self)
744 posixfile.close(self)
738 rename(self.temp, localpath(self.__name))
745 rename(self.temp, localpath(self.__name))
739 def __del__(self):
746 def __del__(self):
740 if not self.closed:
747 if not self.closed:
741 try:
748 try:
742 os.unlink(self.temp)
749 os.unlink(self.temp)
743 except: pass
750 except: pass
744 posixfile.close(self)
751 posixfile.close(self)
745
752
746 class atomicfile(atomictempfile):
753 class atomicfile(atomictempfile):
747 """the file will only be copied on close"""
754 """the file will only be copied on close"""
748 def __init__(self, name, mode):
755 def __init__(self, name, mode):
749 atomictempfile.__init__(self, name, mode)
756 atomictempfile.__init__(self, name, mode)
750 def close(self):
757 def close(self):
751 self.rename()
758 self.rename()
752 def __del__(self):
759 def __del__(self):
753 self.rename()
760 self.rename()
754
761
755 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
762 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
756 if audit_p:
763 if audit_p:
757 audit_path(path)
764 audit_path(path)
758 f = os.path.join(p, path)
765 f = os.path.join(p, path)
759
766
760 if not text:
767 if not text:
761 mode += "b" # for that other OS
768 mode += "b" # for that other OS
762
769
763 if mode[0] != "r":
770 if mode[0] != "r":
764 try:
771 try:
765 nlink = nlinks(f)
772 nlink = nlinks(f)
766 except OSError:
773 except OSError:
767 d = os.path.dirname(f)
774 d = os.path.dirname(f)
768 if not os.path.isdir(d):
775 if not os.path.isdir(d):
769 os.makedirs(d)
776 os.makedirs(d)
770 else:
777 else:
771 if atomic:
778 if atomic:
772 return atomicfile(f, mode)
779 return atomicfile(f, mode)
773 elif atomictemp:
780 elif atomictemp:
774 return atomictempfile(f, mode)
781 return atomictempfile(f, mode)
775 if nlink > 1:
782 if nlink > 1:
776 rename(mktempcopy(f), f)
783 rename(mktempcopy(f), f)
777 return posixfile(f, mode)
784 return posixfile(f, mode)
778
785
779 return o
786 return o
780
787
781 class chunkbuffer(object):
788 class chunkbuffer(object):
782 """Allow arbitrary sized chunks of data to be efficiently read from an
789 """Allow arbitrary sized chunks of data to be efficiently read from an
783 iterator over chunks of arbitrary size."""
790 iterator over chunks of arbitrary size."""
784
791
785 def __init__(self, in_iter, targetsize = 2**16):
792 def __init__(self, in_iter, targetsize = 2**16):
786 """in_iter is the iterator that's iterating over the input chunks.
793 """in_iter is the iterator that's iterating over the input chunks.
787 targetsize is how big a buffer to try to maintain."""
794 targetsize is how big a buffer to try to maintain."""
788 self.in_iter = iter(in_iter)
795 self.in_iter = iter(in_iter)
789 self.buf = ''
796 self.buf = ''
790 self.targetsize = int(targetsize)
797 self.targetsize = int(targetsize)
791 if self.targetsize <= 0:
798 if self.targetsize <= 0:
792 raise ValueError(_("targetsize must be greater than 0, was %d") %
799 raise ValueError(_("targetsize must be greater than 0, was %d") %
793 targetsize)
800 targetsize)
794 self.iterempty = False
801 self.iterempty = False
795
802
796 def fillbuf(self):
803 def fillbuf(self):
797 """Ignore target size; read every chunk from iterator until empty."""
804 """Ignore target size; read every chunk from iterator until empty."""
798 if not self.iterempty:
805 if not self.iterempty:
799 collector = cStringIO.StringIO()
806 collector = cStringIO.StringIO()
800 collector.write(self.buf)
807 collector.write(self.buf)
801 for ch in self.in_iter:
808 for ch in self.in_iter:
802 collector.write(ch)
809 collector.write(ch)
803 self.buf = collector.getvalue()
810 self.buf = collector.getvalue()
804 self.iterempty = True
811 self.iterempty = True
805
812
806 def read(self, l):
813 def read(self, l):
807 """Read L bytes of data from the iterator of chunks of data.
814 """Read L bytes of data from the iterator of chunks of data.
808 Returns less than L bytes if the iterator runs dry."""
815 Returns less than L bytes if the iterator runs dry."""
809 if l > len(self.buf) and not self.iterempty:
816 if l > len(self.buf) and not self.iterempty:
810 # Clamp to a multiple of self.targetsize
817 # Clamp to a multiple of self.targetsize
811 targetsize = self.targetsize * ((l // self.targetsize) + 1)
818 targetsize = self.targetsize * ((l // self.targetsize) + 1)
812 collector = cStringIO.StringIO()
819 collector = cStringIO.StringIO()
813 collector.write(self.buf)
820 collector.write(self.buf)
814 collected = len(self.buf)
821 collected = len(self.buf)
815 for chunk in self.in_iter:
822 for chunk in self.in_iter:
816 collector.write(chunk)
823 collector.write(chunk)
817 collected += len(chunk)
824 collected += len(chunk)
818 if collected >= targetsize:
825 if collected >= targetsize:
819 break
826 break
820 if collected < targetsize:
827 if collected < targetsize:
821 self.iterempty = True
828 self.iterempty = True
822 self.buf = collector.getvalue()
829 self.buf = collector.getvalue()
823 s, self.buf = self.buf[:l], buffer(self.buf, l)
830 s, self.buf = self.buf[:l], buffer(self.buf, l)
824 return s
831 return s
825
832
826 def filechunkiter(f, size=65536, limit=None):
833 def filechunkiter(f, size=65536, limit=None):
827 """Create a generator that produces the data in the file size
834 """Create a generator that produces the data in the file size
828 (default 65536) bytes at a time, up to optional limit (default is
835 (default 65536) bytes at a time, up to optional limit (default is
829 to read all data). Chunks may be less than size bytes if the
836 to read all data). Chunks may be less than size bytes if the
830 chunk is the last chunk in the file, or the file is a socket or
837 chunk is the last chunk in the file, or the file is a socket or
831 some other type of file that sometimes reads less data than is
838 some other type of file that sometimes reads less data than is
832 requested."""
839 requested."""
833 assert size >= 0
840 assert size >= 0
834 assert limit is None or limit >= 0
841 assert limit is None or limit >= 0
835 while True:
842 while True:
836 if limit is None: nbytes = size
843 if limit is None: nbytes = size
837 else: nbytes = min(limit, size)
844 else: nbytes = min(limit, size)
838 s = nbytes and f.read(nbytes)
845 s = nbytes and f.read(nbytes)
839 if not s: break
846 if not s: break
840 if limit: limit -= len(s)
847 if limit: limit -= len(s)
841 yield s
848 yield s
842
849
843 def makedate():
850 def makedate():
844 lt = time.localtime()
851 lt = time.localtime()
845 if lt[8] == 1 and time.daylight:
852 if lt[8] == 1 and time.daylight:
846 tz = time.altzone
853 tz = time.altzone
847 else:
854 else:
848 tz = time.timezone
855 tz = time.timezone
849 return time.mktime(lt), tz
856 return time.mktime(lt), tz
850
857
851 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
858 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
852 """represent a (unixtime, offset) tuple as a localized time.
859 """represent a (unixtime, offset) tuple as a localized time.
853 unixtime is seconds since the epoch, and offset is the time zone's
860 unixtime is seconds since the epoch, and offset is the time zone's
854 number of seconds away from UTC. if timezone is false, do not
861 number of seconds away from UTC. if timezone is false, do not
855 append time zone to string."""
862 append time zone to string."""
856 t, tz = date or makedate()
863 t, tz = date or makedate()
857 s = time.strftime(format, time.gmtime(float(t) - tz))
864 s = time.strftime(format, time.gmtime(float(t) - tz))
858 if timezone:
865 if timezone:
859 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
866 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
860 return s
867 return s
861
868
862 def strdate(string, format='%a %b %d %H:%M:%S %Y'):
869 def strdate(string, format='%a %b %d %H:%M:%S %Y'):
863 """parse a localized time string and return a (unixtime, offset) tuple.
870 """parse a localized time string and return a (unixtime, offset) tuple.
864 if the string cannot be parsed, ValueError is raised."""
871 if the string cannot be parsed, ValueError is raised."""
865 def hastimezone(string):
872 def hastimezone(string):
866 return (string[-4:].isdigit() and
873 return (string[-4:].isdigit() and
867 (string[-5] == '+' or string[-5] == '-') and
874 (string[-5] == '+' or string[-5] == '-') and
868 string[-6].isspace())
875 string[-6].isspace())
869
876
870 if hastimezone(string):
877 if hastimezone(string):
871 date, tz = string[:-6], string[-5:]
878 date, tz = string[:-6], string[-5:]
872 tz = int(tz)
879 tz = int(tz)
873 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
880 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
874 else:
881 else:
875 date, offset = string, 0
882 date, offset = string, 0
876 when = int(time.mktime(time.strptime(date, format))) + offset
883 when = int(time.mktime(time.strptime(date, format))) + offset
877 return when, offset
884 return when, offset
878
885
879 def parsedate(string, formats=('%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M')):
886 def parsedate(string, formats=('%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M')):
880 """parse a localized time string and return a (unixtime, offset) tuple.
887 """parse a localized time string and return a (unixtime, offset) tuple.
881 The date may be a "unixtime offset" string or in one of the specified
888 The date may be a "unixtime offset" string or in one of the specified
882 formats."""
889 formats."""
883 try:
890 try:
884 when, offset = map(int, string.split(' '))
891 when, offset = map(int, string.split(' '))
885 except ValueError:
892 except ValueError:
886 for format in formats:
893 for format in formats:
887 try:
894 try:
888 when, offset = strdate(string, format)
895 when, offset = strdate(string, format)
889 except ValueError:
896 except ValueError:
890 pass
897 pass
891 else:
898 else:
892 break
899 break
893 else:
900 else:
894 raise ValueError(_('invalid date: %r') % string)
901 raise ValueError(_('invalid date: %r') % string)
895 # validate explicit (probably user-specified) date and
902 # validate explicit (probably user-specified) date and
896 # time zone offset. values must fit in signed 32 bits for
903 # time zone offset. values must fit in signed 32 bits for
897 # current 32-bit linux runtimes. timezones go from UTC-12
904 # current 32-bit linux runtimes. timezones go from UTC-12
898 # to UTC+14
905 # to UTC+14
899 if abs(when) > 0x7fffffff:
906 if abs(when) > 0x7fffffff:
900 raise ValueError(_('date exceeds 32 bits: %d') % when)
907 raise ValueError(_('date exceeds 32 bits: %d') % when)
901 if offset < -50400 or offset > 43200:
908 if offset < -50400 or offset > 43200:
902 raise ValueError(_('impossible time zone offset: %d') % offset)
909 raise ValueError(_('impossible time zone offset: %d') % offset)
903 return when, offset
910 return when, offset
904
911
905 def shortuser(user):
912 def shortuser(user):
906 """Return a short representation of a user name or email address."""
913 """Return a short representation of a user name or email address."""
907 f = user.find('@')
914 f = user.find('@')
908 if f >= 0:
915 if f >= 0:
909 user = user[:f]
916 user = user[:f]
910 f = user.find('<')
917 f = user.find('<')
911 if f >= 0:
918 if f >= 0:
912 user = user[f+1:]
919 user = user[f+1:]
913 return user
920 return user
914
921
915 def walkrepos(path):
922 def walkrepos(path):
916 '''yield every hg repository under path, recursively.'''
923 '''yield every hg repository under path, recursively.'''
917 def errhandler(err):
924 def errhandler(err):
918 if err.filename == path:
925 if err.filename == path:
919 raise err
926 raise err
920
927
921 for root, dirs, files in os.walk(path, onerror=errhandler):
928 for root, dirs, files in os.walk(path, onerror=errhandler):
922 for d in dirs:
929 for d in dirs:
923 if d == '.hg':
930 if d == '.hg':
924 yield root
931 yield root
925 dirs[:] = []
932 dirs[:] = []
926 break
933 break
927
934
928 _rcpath = None
935 _rcpath = None
929
936
930 def rcpath():
937 def rcpath():
931 '''return hgrc search path. if env var HGRCPATH is set, use it.
938 '''return hgrc search path. if env var HGRCPATH is set, use it.
932 for each item in path, if directory, use files ending in .rc,
939 for each item in path, if directory, use files ending in .rc,
933 else use item.
940 else use item.
934 make HGRCPATH empty to only look in .hg/hgrc of current repo.
941 make HGRCPATH empty to only look in .hg/hgrc of current repo.
935 if no HGRCPATH, use default os-specific path.'''
942 if no HGRCPATH, use default os-specific path.'''
936 global _rcpath
943 global _rcpath
937 if _rcpath is None:
944 if _rcpath is None:
938 if 'HGRCPATH' in os.environ:
945 if 'HGRCPATH' in os.environ:
939 _rcpath = []
946 _rcpath = []
940 for p in os.environ['HGRCPATH'].split(os.pathsep):
947 for p in os.environ['HGRCPATH'].split(os.pathsep):
941 if not p: continue
948 if not p: continue
942 if os.path.isdir(p):
949 if os.path.isdir(p):
943 for f in os.listdir(p):
950 for f in os.listdir(p):
944 if f.endswith('.rc'):
951 if f.endswith('.rc'):
945 _rcpath.append(os.path.join(p, f))
952 _rcpath.append(os.path.join(p, f))
946 else:
953 else:
947 _rcpath.append(p)
954 _rcpath.append(p)
948 else:
955 else:
949 _rcpath = os_rcpath()
956 _rcpath = os_rcpath()
950 return _rcpath
957 return _rcpath
General Comments 0
You need to be logged in to leave comments. Login now