##// END OF EJS Templates
clone: do not make streaming default. add --stream option instead.
Vadim Gelfer -
r2613:479e26af default
parent child Browse files
Show More
@@ -1,3507 +1,3509 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
16 demandload(globals(), "hgweb.server sshserver")
16 demandload(globals(), "hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted, unknown = repo.changes()
24 modified, added, removed, deleted, unknown = repo.changes()
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def filterfiles(filters, files):
28 def filterfiles(filters, files):
29 l = [x for x in files if x in filters]
29 l = [x for x in files if x in filters]
30
30
31 for t in filters:
31 for t in filters:
32 if t and t[-1] != "/":
32 if t and t[-1] != "/":
33 t += "/"
33 t += "/"
34 l += [x for x in files if x.startswith(t)]
34 l += [x for x in files if x.startswith(t)]
35 return l
35 return l
36
36
37 def relpath(repo, args):
37 def relpath(repo, args):
38 cwd = repo.getcwd()
38 cwd = repo.getcwd()
39 if cwd:
39 if cwd:
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
41 return args
41 return args
42
42
43 def matchpats(repo, pats=[], opts={}, head=''):
43 def matchpats(repo, pats=[], opts={}, head=''):
44 cwd = repo.getcwd()
44 cwd = repo.getcwd()
45 if not pats and cwd:
45 if not pats and cwd:
46 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
46 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
47 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
47 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
48 cwd = ''
48 cwd = ''
49 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
49 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
50 opts.get('exclude'), head)
50 opts.get('exclude'), head)
51
51
52 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
52 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
53 files, matchfn, anypats = matchpats(repo, pats, opts, head)
53 files, matchfn, anypats = matchpats(repo, pats, opts, head)
54 exact = dict(zip(files, files))
54 exact = dict(zip(files, files))
55 def walk():
55 def walk():
56 for src, fn in repo.walk(node=node, files=files, match=matchfn,
56 for src, fn in repo.walk(node=node, files=files, match=matchfn,
57 badmatch=badmatch):
57 badmatch=badmatch):
58 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
58 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
59 return files, matchfn, walk()
59 return files, matchfn, walk()
60
60
61 def walk(repo, pats, opts, node=None, head='', badmatch=None):
61 def walk(repo, pats, opts, node=None, head='', badmatch=None):
62 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
62 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
63 for r in results:
63 for r in results:
64 yield r
64 yield r
65
65
66 def walkchangerevs(ui, repo, pats, opts):
66 def walkchangerevs(ui, repo, pats, opts):
67 '''Iterate over files and the revs they changed in.
67 '''Iterate over files and the revs they changed in.
68
68
69 Callers most commonly need to iterate backwards over the history
69 Callers most commonly need to iterate backwards over the history
70 it is interested in. Doing so has awful (quadratic-looking)
70 it is interested in. Doing so has awful (quadratic-looking)
71 performance, so we use iterators in a "windowed" way.
71 performance, so we use iterators in a "windowed" way.
72
72
73 We walk a window of revisions in the desired order. Within the
73 We walk a window of revisions in the desired order. Within the
74 window, we first walk forwards to gather data, then in the desired
74 window, we first walk forwards to gather data, then in the desired
75 order (usually backwards) to display it.
75 order (usually backwards) to display it.
76
76
77 This function returns an (iterator, getchange, matchfn) tuple. The
77 This function returns an (iterator, getchange, matchfn) tuple. The
78 getchange function returns the changelog entry for a numeric
78 getchange function returns the changelog entry for a numeric
79 revision. The iterator yields 3-tuples. They will be of one of
79 revision. The iterator yields 3-tuples. They will be of one of
80 the following forms:
80 the following forms:
81
81
82 "window", incrementing, lastrev: stepping through a window,
82 "window", incrementing, lastrev: stepping through a window,
83 positive if walking forwards through revs, last rev in the
83 positive if walking forwards through revs, last rev in the
84 sequence iterated over - use to reset state for the current window
84 sequence iterated over - use to reset state for the current window
85
85
86 "add", rev, fns: out-of-order traversal of the given file names
86 "add", rev, fns: out-of-order traversal of the given file names
87 fns, which changed during revision rev - use to gather data for
87 fns, which changed during revision rev - use to gather data for
88 possible display
88 possible display
89
89
90 "iter", rev, None: in-order traversal of the revs earlier iterated
90 "iter", rev, None: in-order traversal of the revs earlier iterated
91 over with "add" - use to display data'''
91 over with "add" - use to display data'''
92
92
93 def increasing_windows(start, end, windowsize=8, sizelimit=512):
93 def increasing_windows(start, end, windowsize=8, sizelimit=512):
94 if start < end:
94 if start < end:
95 while start < end:
95 while start < end:
96 yield start, min(windowsize, end-start)
96 yield start, min(windowsize, end-start)
97 start += windowsize
97 start += windowsize
98 if windowsize < sizelimit:
98 if windowsize < sizelimit:
99 windowsize *= 2
99 windowsize *= 2
100 else:
100 else:
101 while start > end:
101 while start > end:
102 yield start, min(windowsize, start-end-1)
102 yield start, min(windowsize, start-end-1)
103 start -= windowsize
103 start -= windowsize
104 if windowsize < sizelimit:
104 if windowsize < sizelimit:
105 windowsize *= 2
105 windowsize *= 2
106
106
107
107
108 files, matchfn, anypats = matchpats(repo, pats, opts)
108 files, matchfn, anypats = matchpats(repo, pats, opts)
109
109
110 if repo.changelog.count() == 0:
110 if repo.changelog.count() == 0:
111 return [], False, matchfn
111 return [], False, matchfn
112
112
113 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
113 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
114 wanted = {}
114 wanted = {}
115 slowpath = anypats
115 slowpath = anypats
116 fncache = {}
116 fncache = {}
117
117
118 chcache = {}
118 chcache = {}
119 def getchange(rev):
119 def getchange(rev):
120 ch = chcache.get(rev)
120 ch = chcache.get(rev)
121 if ch is None:
121 if ch is None:
122 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
122 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
123 return ch
123 return ch
124
124
125 if not slowpath and not files:
125 if not slowpath and not files:
126 # No files, no patterns. Display all revs.
126 # No files, no patterns. Display all revs.
127 wanted = dict(zip(revs, revs))
127 wanted = dict(zip(revs, revs))
128 if not slowpath:
128 if not slowpath:
129 # Only files, no patterns. Check the history of each file.
129 # Only files, no patterns. Check the history of each file.
130 def filerevgen(filelog):
130 def filerevgen(filelog):
131 for i, window in increasing_windows(filelog.count()-1, -1):
131 for i, window in increasing_windows(filelog.count()-1, -1):
132 revs = []
132 revs = []
133 for j in xrange(i - window, i + 1):
133 for j in xrange(i - window, i + 1):
134 revs.append(filelog.linkrev(filelog.node(j)))
134 revs.append(filelog.linkrev(filelog.node(j)))
135 revs.reverse()
135 revs.reverse()
136 for rev in revs:
136 for rev in revs:
137 yield rev
137 yield rev
138
138
139 minrev, maxrev = min(revs), max(revs)
139 minrev, maxrev = min(revs), max(revs)
140 for file_ in files:
140 for file_ in files:
141 filelog = repo.file(file_)
141 filelog = repo.file(file_)
142 # A zero count may be a directory or deleted file, so
142 # A zero count may be a directory or deleted file, so
143 # try to find matching entries on the slow path.
143 # try to find matching entries on the slow path.
144 if filelog.count() == 0:
144 if filelog.count() == 0:
145 slowpath = True
145 slowpath = True
146 break
146 break
147 for rev in filerevgen(filelog):
147 for rev in filerevgen(filelog):
148 if rev <= maxrev:
148 if rev <= maxrev:
149 if rev < minrev:
149 if rev < minrev:
150 break
150 break
151 fncache.setdefault(rev, [])
151 fncache.setdefault(rev, [])
152 fncache[rev].append(file_)
152 fncache[rev].append(file_)
153 wanted[rev] = 1
153 wanted[rev] = 1
154 if slowpath:
154 if slowpath:
155 # The slow path checks files modified in every changeset.
155 # The slow path checks files modified in every changeset.
156 def changerevgen():
156 def changerevgen():
157 for i, window in increasing_windows(repo.changelog.count()-1, -1):
157 for i, window in increasing_windows(repo.changelog.count()-1, -1):
158 for j in xrange(i - window, i + 1):
158 for j in xrange(i - window, i + 1):
159 yield j, getchange(j)[3]
159 yield j, getchange(j)[3]
160
160
161 for rev, changefiles in changerevgen():
161 for rev, changefiles in changerevgen():
162 matches = filter(matchfn, changefiles)
162 matches = filter(matchfn, changefiles)
163 if matches:
163 if matches:
164 fncache[rev] = matches
164 fncache[rev] = matches
165 wanted[rev] = 1
165 wanted[rev] = 1
166
166
167 def iterate():
167 def iterate():
168 for i, window in increasing_windows(0, len(revs)):
168 for i, window in increasing_windows(0, len(revs)):
169 yield 'window', revs[0] < revs[-1], revs[-1]
169 yield 'window', revs[0] < revs[-1], revs[-1]
170 nrevs = [rev for rev in revs[i:i+window]
170 nrevs = [rev for rev in revs[i:i+window]
171 if rev in wanted]
171 if rev in wanted]
172 srevs = list(nrevs)
172 srevs = list(nrevs)
173 srevs.sort()
173 srevs.sort()
174 for rev in srevs:
174 for rev in srevs:
175 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
175 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
176 yield 'add', rev, fns
176 yield 'add', rev, fns
177 for rev in nrevs:
177 for rev in nrevs:
178 yield 'iter', rev, None
178 yield 'iter', rev, None
179 return iterate(), getchange, matchfn
179 return iterate(), getchange, matchfn
180
180
181 revrangesep = ':'
181 revrangesep = ':'
182
182
183 def revfix(repo, val, defval):
183 def revfix(repo, val, defval):
184 '''turn user-level id of changeset into rev number.
184 '''turn user-level id of changeset into rev number.
185 user-level id can be tag, changeset, rev number, or negative rev
185 user-level id can be tag, changeset, rev number, or negative rev
186 number relative to number of revs (-1 is tip, etc).'''
186 number relative to number of revs (-1 is tip, etc).'''
187 if not val:
187 if not val:
188 return defval
188 return defval
189 try:
189 try:
190 num = int(val)
190 num = int(val)
191 if str(num) != val:
191 if str(num) != val:
192 raise ValueError
192 raise ValueError
193 if num < 0:
193 if num < 0:
194 num += repo.changelog.count()
194 num += repo.changelog.count()
195 if num < 0:
195 if num < 0:
196 num = 0
196 num = 0
197 elif num >= repo.changelog.count():
197 elif num >= repo.changelog.count():
198 raise ValueError
198 raise ValueError
199 except ValueError:
199 except ValueError:
200 try:
200 try:
201 num = repo.changelog.rev(repo.lookup(val))
201 num = repo.changelog.rev(repo.lookup(val))
202 except KeyError:
202 except KeyError:
203 raise util.Abort(_('invalid revision identifier %s'), val)
203 raise util.Abort(_('invalid revision identifier %s'), val)
204 return num
204 return num
205
205
206 def revpair(ui, repo, revs):
206 def revpair(ui, repo, revs):
207 '''return pair of nodes, given list of revisions. second item can
207 '''return pair of nodes, given list of revisions. second item can
208 be None, meaning use working dir.'''
208 be None, meaning use working dir.'''
209 if not revs:
209 if not revs:
210 return repo.dirstate.parents()[0], None
210 return repo.dirstate.parents()[0], None
211 end = None
211 end = None
212 if len(revs) == 1:
212 if len(revs) == 1:
213 start = revs[0]
213 start = revs[0]
214 if revrangesep in start:
214 if revrangesep in start:
215 start, end = start.split(revrangesep, 1)
215 start, end = start.split(revrangesep, 1)
216 start = revfix(repo, start, 0)
216 start = revfix(repo, start, 0)
217 end = revfix(repo, end, repo.changelog.count() - 1)
217 end = revfix(repo, end, repo.changelog.count() - 1)
218 else:
218 else:
219 start = revfix(repo, start, None)
219 start = revfix(repo, start, None)
220 elif len(revs) == 2:
220 elif len(revs) == 2:
221 if revrangesep in revs[0] or revrangesep in revs[1]:
221 if revrangesep in revs[0] or revrangesep in revs[1]:
222 raise util.Abort(_('too many revisions specified'))
222 raise util.Abort(_('too many revisions specified'))
223 start = revfix(repo, revs[0], None)
223 start = revfix(repo, revs[0], None)
224 end = revfix(repo, revs[1], None)
224 end = revfix(repo, revs[1], None)
225 else:
225 else:
226 raise util.Abort(_('too many revisions specified'))
226 raise util.Abort(_('too many revisions specified'))
227 if end is not None: end = repo.lookup(str(end))
227 if end is not None: end = repo.lookup(str(end))
228 return repo.lookup(str(start)), end
228 return repo.lookup(str(start)), end
229
229
230 def revrange(ui, repo, revs):
230 def revrange(ui, repo, revs):
231 """Yield revision as strings from a list of revision specifications."""
231 """Yield revision as strings from a list of revision specifications."""
232 seen = {}
232 seen = {}
233 for spec in revs:
233 for spec in revs:
234 if revrangesep in spec:
234 if revrangesep in spec:
235 start, end = spec.split(revrangesep, 1)
235 start, end = spec.split(revrangesep, 1)
236 start = revfix(repo, start, 0)
236 start = revfix(repo, start, 0)
237 end = revfix(repo, end, repo.changelog.count() - 1)
237 end = revfix(repo, end, repo.changelog.count() - 1)
238 step = start > end and -1 or 1
238 step = start > end and -1 or 1
239 for rev in xrange(start, end+step, step):
239 for rev in xrange(start, end+step, step):
240 if rev in seen:
240 if rev in seen:
241 continue
241 continue
242 seen[rev] = 1
242 seen[rev] = 1
243 yield str(rev)
243 yield str(rev)
244 else:
244 else:
245 rev = revfix(repo, spec, None)
245 rev = revfix(repo, spec, None)
246 if rev in seen:
246 if rev in seen:
247 continue
247 continue
248 seen[rev] = 1
248 seen[rev] = 1
249 yield str(rev)
249 yield str(rev)
250
250
251 def make_filename(repo, pat, node,
251 def make_filename(repo, pat, node,
252 total=None, seqno=None, revwidth=None, pathname=None):
252 total=None, seqno=None, revwidth=None, pathname=None):
253 node_expander = {
253 node_expander = {
254 'H': lambda: hex(node),
254 'H': lambda: hex(node),
255 'R': lambda: str(repo.changelog.rev(node)),
255 'R': lambda: str(repo.changelog.rev(node)),
256 'h': lambda: short(node),
256 'h': lambda: short(node),
257 }
257 }
258 expander = {
258 expander = {
259 '%': lambda: '%',
259 '%': lambda: '%',
260 'b': lambda: os.path.basename(repo.root),
260 'b': lambda: os.path.basename(repo.root),
261 }
261 }
262
262
263 try:
263 try:
264 if node:
264 if node:
265 expander.update(node_expander)
265 expander.update(node_expander)
266 if node and revwidth is not None:
266 if node and revwidth is not None:
267 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
267 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
268 if total is not None:
268 if total is not None:
269 expander['N'] = lambda: str(total)
269 expander['N'] = lambda: str(total)
270 if seqno is not None:
270 if seqno is not None:
271 expander['n'] = lambda: str(seqno)
271 expander['n'] = lambda: str(seqno)
272 if total is not None and seqno is not None:
272 if total is not None and seqno is not None:
273 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
273 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
274 if pathname is not None:
274 if pathname is not None:
275 expander['s'] = lambda: os.path.basename(pathname)
275 expander['s'] = lambda: os.path.basename(pathname)
276 expander['d'] = lambda: os.path.dirname(pathname) or '.'
276 expander['d'] = lambda: os.path.dirname(pathname) or '.'
277 expander['p'] = lambda: pathname
277 expander['p'] = lambda: pathname
278
278
279 newname = []
279 newname = []
280 patlen = len(pat)
280 patlen = len(pat)
281 i = 0
281 i = 0
282 while i < patlen:
282 while i < patlen:
283 c = pat[i]
283 c = pat[i]
284 if c == '%':
284 if c == '%':
285 i += 1
285 i += 1
286 c = pat[i]
286 c = pat[i]
287 c = expander[c]()
287 c = expander[c]()
288 newname.append(c)
288 newname.append(c)
289 i += 1
289 i += 1
290 return ''.join(newname)
290 return ''.join(newname)
291 except KeyError, inst:
291 except KeyError, inst:
292 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
292 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
293 inst.args[0])
293 inst.args[0])
294
294
295 def make_file(repo, pat, node=None,
295 def make_file(repo, pat, node=None,
296 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
296 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
297 if not pat or pat == '-':
297 if not pat or pat == '-':
298 return 'w' in mode and sys.stdout or sys.stdin
298 return 'w' in mode and sys.stdout or sys.stdin
299 if hasattr(pat, 'write') and 'w' in mode:
299 if hasattr(pat, 'write') and 'w' in mode:
300 return pat
300 return pat
301 if hasattr(pat, 'read') and 'r' in mode:
301 if hasattr(pat, 'read') and 'r' in mode:
302 return pat
302 return pat
303 return open(make_filename(repo, pat, node, total, seqno, revwidth,
303 return open(make_filename(repo, pat, node, total, seqno, revwidth,
304 pathname),
304 pathname),
305 mode)
305 mode)
306
306
307 def write_bundle(cg, filename=None, compress=True):
307 def write_bundle(cg, filename=None, compress=True):
308 """Write a bundle file and return its filename.
308 """Write a bundle file and return its filename.
309
309
310 Existing files will not be overwritten.
310 Existing files will not be overwritten.
311 If no filename is specified, a temporary file is created.
311 If no filename is specified, a temporary file is created.
312 bz2 compression can be turned off.
312 bz2 compression can be turned off.
313 The bundle file will be deleted in case of errors.
313 The bundle file will be deleted in case of errors.
314 """
314 """
315 class nocompress(object):
315 class nocompress(object):
316 def compress(self, x):
316 def compress(self, x):
317 return x
317 return x
318 def flush(self):
318 def flush(self):
319 return ""
319 return ""
320
320
321 fh = None
321 fh = None
322 cleanup = None
322 cleanup = None
323 try:
323 try:
324 if filename:
324 if filename:
325 if os.path.exists(filename):
325 if os.path.exists(filename):
326 raise util.Abort(_("file '%s' already exists"), filename)
326 raise util.Abort(_("file '%s' already exists"), filename)
327 fh = open(filename, "wb")
327 fh = open(filename, "wb")
328 else:
328 else:
329 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
329 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
330 fh = os.fdopen(fd, "wb")
330 fh = os.fdopen(fd, "wb")
331 cleanup = filename
331 cleanup = filename
332
332
333 if compress:
333 if compress:
334 fh.write("HG10")
334 fh.write("HG10")
335 z = bz2.BZ2Compressor(9)
335 z = bz2.BZ2Compressor(9)
336 else:
336 else:
337 fh.write("HG10UN")
337 fh.write("HG10UN")
338 z = nocompress()
338 z = nocompress()
339 # parse the changegroup data, otherwise we will block
339 # parse the changegroup data, otherwise we will block
340 # in case of sshrepo because we don't know the end of the stream
340 # in case of sshrepo because we don't know the end of the stream
341
341
342 # an empty chunkiter is the end of the changegroup
342 # an empty chunkiter is the end of the changegroup
343 empty = False
343 empty = False
344 while not empty:
344 while not empty:
345 empty = True
345 empty = True
346 for chunk in changegroup.chunkiter(cg):
346 for chunk in changegroup.chunkiter(cg):
347 empty = False
347 empty = False
348 fh.write(z.compress(changegroup.genchunk(chunk)))
348 fh.write(z.compress(changegroup.genchunk(chunk)))
349 fh.write(z.compress(changegroup.closechunk()))
349 fh.write(z.compress(changegroup.closechunk()))
350 fh.write(z.flush())
350 fh.write(z.flush())
351 cleanup = None
351 cleanup = None
352 return filename
352 return filename
353 finally:
353 finally:
354 if fh is not None:
354 if fh is not None:
355 fh.close()
355 fh.close()
356 if cleanup is not None:
356 if cleanup is not None:
357 os.unlink(cleanup)
357 os.unlink(cleanup)
358
358
359 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
359 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
360 changes=None, text=False, opts={}):
360 changes=None, text=False, opts={}):
361 if not node1:
361 if not node1:
362 node1 = repo.dirstate.parents()[0]
362 node1 = repo.dirstate.parents()[0]
363 # reading the data for node1 early allows it to play nicely
363 # reading the data for node1 early allows it to play nicely
364 # with repo.changes and the revlog cache.
364 # with repo.changes and the revlog cache.
365 change = repo.changelog.read(node1)
365 change = repo.changelog.read(node1)
366 mmap = repo.manifest.read(change[0])
366 mmap = repo.manifest.read(change[0])
367 date1 = util.datestr(change[2])
367 date1 = util.datestr(change[2])
368
368
369 if not changes:
369 if not changes:
370 changes = repo.changes(node1, node2, files, match=match)
370 changes = repo.changes(node1, node2, files, match=match)
371 modified, added, removed, deleted, unknown = changes
371 modified, added, removed, deleted, unknown = changes
372 if files:
372 if files:
373 modified, added, removed = map(lambda x: filterfiles(files, x),
373 modified, added, removed = map(lambda x: filterfiles(files, x),
374 (modified, added, removed))
374 (modified, added, removed))
375
375
376 if not modified and not added and not removed:
376 if not modified and not added and not removed:
377 return
377 return
378
378
379 if node2:
379 if node2:
380 change = repo.changelog.read(node2)
380 change = repo.changelog.read(node2)
381 mmap2 = repo.manifest.read(change[0])
381 mmap2 = repo.manifest.read(change[0])
382 _date2 = util.datestr(change[2])
382 _date2 = util.datestr(change[2])
383 def date2(f):
383 def date2(f):
384 return _date2
384 return _date2
385 def read(f):
385 def read(f):
386 return repo.file(f).read(mmap2[f])
386 return repo.file(f).read(mmap2[f])
387 else:
387 else:
388 tz = util.makedate()[1]
388 tz = util.makedate()[1]
389 _date2 = util.datestr()
389 _date2 = util.datestr()
390 def date2(f):
390 def date2(f):
391 try:
391 try:
392 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
392 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
393 except OSError, err:
393 except OSError, err:
394 if err.errno != errno.ENOENT: raise
394 if err.errno != errno.ENOENT: raise
395 return _date2
395 return _date2
396 def read(f):
396 def read(f):
397 return repo.wread(f)
397 return repo.wread(f)
398
398
399 if ui.quiet:
399 if ui.quiet:
400 r = None
400 r = None
401 else:
401 else:
402 hexfunc = ui.verbose and hex or short
402 hexfunc = ui.verbose and hex or short
403 r = [hexfunc(node) for node in [node1, node2] if node]
403 r = [hexfunc(node) for node in [node1, node2] if node]
404
404
405 diffopts = ui.diffopts()
405 diffopts = ui.diffopts()
406 showfunc = opts.get('show_function') or diffopts['showfunc']
406 showfunc = opts.get('show_function') or diffopts['showfunc']
407 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
407 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
408 ignorewsamount = opts.get('ignore_space_change') or \
408 ignorewsamount = opts.get('ignore_space_change') or \
409 diffopts['ignorewsamount']
409 diffopts['ignorewsamount']
410 ignoreblanklines = opts.get('ignore_blank_lines') or \
410 ignoreblanklines = opts.get('ignore_blank_lines') or \
411 diffopts['ignoreblanklines']
411 diffopts['ignoreblanklines']
412 for f in modified:
412 for f in modified:
413 to = None
413 to = None
414 if f in mmap:
414 if f in mmap:
415 to = repo.file(f).read(mmap[f])
415 to = repo.file(f).read(mmap[f])
416 tn = read(f)
416 tn = read(f)
417 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
417 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
418 showfunc=showfunc, ignorews=ignorews,
418 showfunc=showfunc, ignorews=ignorews,
419 ignorewsamount=ignorewsamount,
419 ignorewsamount=ignorewsamount,
420 ignoreblanklines=ignoreblanklines))
420 ignoreblanklines=ignoreblanklines))
421 for f in added:
421 for f in added:
422 to = None
422 to = None
423 tn = read(f)
423 tn = read(f)
424 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
424 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
425 showfunc=showfunc, ignorews=ignorews,
425 showfunc=showfunc, ignorews=ignorews,
426 ignorewsamount=ignorewsamount,
426 ignorewsamount=ignorewsamount,
427 ignoreblanklines=ignoreblanklines))
427 ignoreblanklines=ignoreblanklines))
428 for f in removed:
428 for f in removed:
429 to = repo.file(f).read(mmap[f])
429 to = repo.file(f).read(mmap[f])
430 tn = None
430 tn = None
431 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
431 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
432 showfunc=showfunc, ignorews=ignorews,
432 showfunc=showfunc, ignorews=ignorews,
433 ignorewsamount=ignorewsamount,
433 ignorewsamount=ignorewsamount,
434 ignoreblanklines=ignoreblanklines))
434 ignoreblanklines=ignoreblanklines))
435
435
436 def trimuser(ui, name, rev, revcache):
436 def trimuser(ui, name, rev, revcache):
437 """trim the name of the user who committed a change"""
437 """trim the name of the user who committed a change"""
438 user = revcache.get(rev)
438 user = revcache.get(rev)
439 if user is None:
439 if user is None:
440 user = revcache[rev] = ui.shortuser(name)
440 user = revcache[rev] = ui.shortuser(name)
441 return user
441 return user
442
442
443 class changeset_printer(object):
443 class changeset_printer(object):
444 '''show changeset information when templating not requested.'''
444 '''show changeset information when templating not requested.'''
445
445
446 def __init__(self, ui, repo):
446 def __init__(self, ui, repo):
447 self.ui = ui
447 self.ui = ui
448 self.repo = repo
448 self.repo = repo
449
449
450 def show(self, rev=0, changenode=None, brinfo=None):
450 def show(self, rev=0, changenode=None, brinfo=None):
451 '''show a single changeset or file revision'''
451 '''show a single changeset or file revision'''
452 log = self.repo.changelog
452 log = self.repo.changelog
453 if changenode is None:
453 if changenode is None:
454 changenode = log.node(rev)
454 changenode = log.node(rev)
455 elif not rev:
455 elif not rev:
456 rev = log.rev(changenode)
456 rev = log.rev(changenode)
457
457
458 if self.ui.quiet:
458 if self.ui.quiet:
459 self.ui.write("%d:%s\n" % (rev, short(changenode)))
459 self.ui.write("%d:%s\n" % (rev, short(changenode)))
460 return
460 return
461
461
462 changes = log.read(changenode)
462 changes = log.read(changenode)
463 date = util.datestr(changes[2])
463 date = util.datestr(changes[2])
464
464
465 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
465 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
466 for p in log.parents(changenode)
466 for p in log.parents(changenode)
467 if self.ui.debugflag or p != nullid]
467 if self.ui.debugflag or p != nullid]
468 if (not self.ui.debugflag and len(parents) == 1 and
468 if (not self.ui.debugflag and len(parents) == 1 and
469 parents[0][0] == rev-1):
469 parents[0][0] == rev-1):
470 parents = []
470 parents = []
471
471
472 if self.ui.verbose:
472 if self.ui.verbose:
473 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
473 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
474 else:
474 else:
475 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
475 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
476
476
477 for tag in self.repo.nodetags(changenode):
477 for tag in self.repo.nodetags(changenode):
478 self.ui.status(_("tag: %s\n") % tag)
478 self.ui.status(_("tag: %s\n") % tag)
479 for parent in parents:
479 for parent in parents:
480 self.ui.write(_("parent: %d:%s\n") % parent)
480 self.ui.write(_("parent: %d:%s\n") % parent)
481
481
482 if brinfo and changenode in brinfo:
482 if brinfo and changenode in brinfo:
483 br = brinfo[changenode]
483 br = brinfo[changenode]
484 self.ui.write(_("branch: %s\n") % " ".join(br))
484 self.ui.write(_("branch: %s\n") % " ".join(br))
485
485
486 self.ui.debug(_("manifest: %d:%s\n") %
486 self.ui.debug(_("manifest: %d:%s\n") %
487 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
487 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
488 self.ui.status(_("user: %s\n") % changes[1])
488 self.ui.status(_("user: %s\n") % changes[1])
489 self.ui.status(_("date: %s\n") % date)
489 self.ui.status(_("date: %s\n") % date)
490
490
491 if self.ui.debugflag:
491 if self.ui.debugflag:
492 files = self.repo.changes(log.parents(changenode)[0], changenode)
492 files = self.repo.changes(log.parents(changenode)[0], changenode)
493 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
493 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
494 files):
494 files):
495 if value:
495 if value:
496 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
496 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
497 else:
497 else:
498 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
498 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
499
499
500 description = changes[4].strip()
500 description = changes[4].strip()
501 if description:
501 if description:
502 if self.ui.verbose:
502 if self.ui.verbose:
503 self.ui.status(_("description:\n"))
503 self.ui.status(_("description:\n"))
504 self.ui.status(description)
504 self.ui.status(description)
505 self.ui.status("\n\n")
505 self.ui.status("\n\n")
506 else:
506 else:
507 self.ui.status(_("summary: %s\n") %
507 self.ui.status(_("summary: %s\n") %
508 description.splitlines()[0])
508 description.splitlines()[0])
509 self.ui.status("\n")
509 self.ui.status("\n")
510
510
511 def show_changeset(ui, repo, opts):
511 def show_changeset(ui, repo, opts):
512 '''show one changeset. uses template or regular display. caller
512 '''show one changeset. uses template or regular display. caller
513 can pass in 'style' and 'template' options in opts.'''
513 can pass in 'style' and 'template' options in opts.'''
514
514
515 tmpl = opts.get('template')
515 tmpl = opts.get('template')
516 if tmpl:
516 if tmpl:
517 tmpl = templater.parsestring(tmpl, quoted=False)
517 tmpl = templater.parsestring(tmpl, quoted=False)
518 else:
518 else:
519 tmpl = ui.config('ui', 'logtemplate')
519 tmpl = ui.config('ui', 'logtemplate')
520 if tmpl: tmpl = templater.parsestring(tmpl)
520 if tmpl: tmpl = templater.parsestring(tmpl)
521 mapfile = opts.get('style') or ui.config('ui', 'style')
521 mapfile = opts.get('style') or ui.config('ui', 'style')
522 if tmpl or mapfile:
522 if tmpl or mapfile:
523 if mapfile:
523 if mapfile:
524 if not os.path.isfile(mapfile):
524 if not os.path.isfile(mapfile):
525 mapname = templater.templatepath('map-cmdline.' + mapfile)
525 mapname = templater.templatepath('map-cmdline.' + mapfile)
526 if not mapname: mapname = templater.templatepath(mapfile)
526 if not mapname: mapname = templater.templatepath(mapfile)
527 if mapname: mapfile = mapname
527 if mapname: mapfile = mapname
528 try:
528 try:
529 t = templater.changeset_templater(ui, repo, mapfile)
529 t = templater.changeset_templater(ui, repo, mapfile)
530 except SyntaxError, inst:
530 except SyntaxError, inst:
531 raise util.Abort(inst.args[0])
531 raise util.Abort(inst.args[0])
532 if tmpl: t.use_template(tmpl)
532 if tmpl: t.use_template(tmpl)
533 return t
533 return t
534 return changeset_printer(ui, repo)
534 return changeset_printer(ui, repo)
535
535
536 def show_version(ui):
536 def show_version(ui):
537 """output version and copyright information"""
537 """output version and copyright information"""
538 ui.write(_("Mercurial Distributed SCM (version %s)\n")
538 ui.write(_("Mercurial Distributed SCM (version %s)\n")
539 % version.get_version())
539 % version.get_version())
540 ui.status(_(
540 ui.status(_(
541 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
541 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
542 "This is free software; see the source for copying conditions. "
542 "This is free software; see the source for copying conditions. "
543 "There is NO\nwarranty; "
543 "There is NO\nwarranty; "
544 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
544 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
545 ))
545 ))
546
546
547 def help_(ui, name=None, with_version=False):
547 def help_(ui, name=None, with_version=False):
548 """show help for a command, extension, or list of commands
548 """show help for a command, extension, or list of commands
549
549
550 With no arguments, print a list of commands and short help.
550 With no arguments, print a list of commands and short help.
551
551
552 Given a command name, print help for that command.
552 Given a command name, print help for that command.
553
553
554 Given an extension name, print help for that extension, and the
554 Given an extension name, print help for that extension, and the
555 commands it provides."""
555 commands it provides."""
556 option_lists = []
556 option_lists = []
557
557
558 def helpcmd(name):
558 def helpcmd(name):
559 if with_version:
559 if with_version:
560 show_version(ui)
560 show_version(ui)
561 ui.write('\n')
561 ui.write('\n')
562 aliases, i = findcmd(name)
562 aliases, i = findcmd(name)
563 # synopsis
563 # synopsis
564 ui.write("%s\n\n" % i[2])
564 ui.write("%s\n\n" % i[2])
565
565
566 # description
566 # description
567 doc = i[0].__doc__
567 doc = i[0].__doc__
568 if not doc:
568 if not doc:
569 doc = _("(No help text available)")
569 doc = _("(No help text available)")
570 if ui.quiet:
570 if ui.quiet:
571 doc = doc.splitlines(0)[0]
571 doc = doc.splitlines(0)[0]
572 ui.write("%s\n" % doc.rstrip())
572 ui.write("%s\n" % doc.rstrip())
573
573
574 if not ui.quiet:
574 if not ui.quiet:
575 # aliases
575 # aliases
576 if len(aliases) > 1:
576 if len(aliases) > 1:
577 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
577 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
578
578
579 # options
579 # options
580 if i[1]:
580 if i[1]:
581 option_lists.append(("options", i[1]))
581 option_lists.append(("options", i[1]))
582
582
583 def helplist(select=None):
583 def helplist(select=None):
584 h = {}
584 h = {}
585 cmds = {}
585 cmds = {}
586 for c, e in table.items():
586 for c, e in table.items():
587 f = c.split("|", 1)[0]
587 f = c.split("|", 1)[0]
588 if select and not select(f):
588 if select and not select(f):
589 continue
589 continue
590 if name == "shortlist" and not f.startswith("^"):
590 if name == "shortlist" and not f.startswith("^"):
591 continue
591 continue
592 f = f.lstrip("^")
592 f = f.lstrip("^")
593 if not ui.debugflag and f.startswith("debug"):
593 if not ui.debugflag and f.startswith("debug"):
594 continue
594 continue
595 doc = e[0].__doc__
595 doc = e[0].__doc__
596 if not doc:
596 if not doc:
597 doc = _("(No help text available)")
597 doc = _("(No help text available)")
598 h[f] = doc.splitlines(0)[0].rstrip()
598 h[f] = doc.splitlines(0)[0].rstrip()
599 cmds[f] = c.lstrip("^")
599 cmds[f] = c.lstrip("^")
600
600
601 fns = h.keys()
601 fns = h.keys()
602 fns.sort()
602 fns.sort()
603 m = max(map(len, fns))
603 m = max(map(len, fns))
604 for f in fns:
604 for f in fns:
605 if ui.verbose:
605 if ui.verbose:
606 commands = cmds[f].replace("|",", ")
606 commands = cmds[f].replace("|",", ")
607 ui.write(" %s:\n %s\n"%(commands, h[f]))
607 ui.write(" %s:\n %s\n"%(commands, h[f]))
608 else:
608 else:
609 ui.write(' %-*s %s\n' % (m, f, h[f]))
609 ui.write(' %-*s %s\n' % (m, f, h[f]))
610
610
611 def helpext(name):
611 def helpext(name):
612 try:
612 try:
613 mod = findext(name)
613 mod = findext(name)
614 except KeyError:
614 except KeyError:
615 raise UnknownCommand(name)
615 raise UnknownCommand(name)
616
616
617 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
617 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
618 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
618 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
619 for d in doc[1:]:
619 for d in doc[1:]:
620 ui.write(d, '\n')
620 ui.write(d, '\n')
621
621
622 ui.status('\n')
622 ui.status('\n')
623 if ui.verbose:
623 if ui.verbose:
624 ui.status(_('list of commands:\n\n'))
624 ui.status(_('list of commands:\n\n'))
625 else:
625 else:
626 ui.status(_('list of commands (use "hg help -v %s" '
626 ui.status(_('list of commands (use "hg help -v %s" '
627 'to show aliases and global options):\n\n') % name)
627 'to show aliases and global options):\n\n') % name)
628
628
629 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
629 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
630 helplist(modcmds.has_key)
630 helplist(modcmds.has_key)
631
631
632 if name and name != 'shortlist':
632 if name and name != 'shortlist':
633 try:
633 try:
634 helpcmd(name)
634 helpcmd(name)
635 except UnknownCommand:
635 except UnknownCommand:
636 helpext(name)
636 helpext(name)
637
637
638 else:
638 else:
639 # program name
639 # program name
640 if ui.verbose or with_version:
640 if ui.verbose or with_version:
641 show_version(ui)
641 show_version(ui)
642 else:
642 else:
643 ui.status(_("Mercurial Distributed SCM\n"))
643 ui.status(_("Mercurial Distributed SCM\n"))
644 ui.status('\n')
644 ui.status('\n')
645
645
646 # list of commands
646 # list of commands
647 if name == "shortlist":
647 if name == "shortlist":
648 ui.status(_('basic commands (use "hg help" '
648 ui.status(_('basic commands (use "hg help" '
649 'for the full list or option "-v" for details):\n\n'))
649 'for the full list or option "-v" for details):\n\n'))
650 elif ui.verbose:
650 elif ui.verbose:
651 ui.status(_('list of commands:\n\n'))
651 ui.status(_('list of commands:\n\n'))
652 else:
652 else:
653 ui.status(_('list of commands (use "hg help -v" '
653 ui.status(_('list of commands (use "hg help -v" '
654 'to show aliases and global options):\n\n'))
654 'to show aliases and global options):\n\n'))
655
655
656 helplist()
656 helplist()
657
657
658 # global options
658 # global options
659 if ui.verbose:
659 if ui.verbose:
660 option_lists.append(("global options", globalopts))
660 option_lists.append(("global options", globalopts))
661
661
662 # list all option lists
662 # list all option lists
663 opt_output = []
663 opt_output = []
664 for title, options in option_lists:
664 for title, options in option_lists:
665 opt_output.append(("\n%s:\n" % title, None))
665 opt_output.append(("\n%s:\n" % title, None))
666 for shortopt, longopt, default, desc in options:
666 for shortopt, longopt, default, desc in options:
667 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
667 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
668 longopt and " --%s" % longopt),
668 longopt and " --%s" % longopt),
669 "%s%s" % (desc,
669 "%s%s" % (desc,
670 default
670 default
671 and _(" (default: %s)") % default
671 and _(" (default: %s)") % default
672 or "")))
672 or "")))
673
673
674 if opt_output:
674 if opt_output:
675 opts_len = max([len(line[0]) for line in opt_output if line[1]])
675 opts_len = max([len(line[0]) for line in opt_output if line[1]])
676 for first, second in opt_output:
676 for first, second in opt_output:
677 if second:
677 if second:
678 ui.write(" %-*s %s\n" % (opts_len, first, second))
678 ui.write(" %-*s %s\n" % (opts_len, first, second))
679 else:
679 else:
680 ui.write("%s\n" % first)
680 ui.write("%s\n" % first)
681
681
682 # Commands start here, listed alphabetically
682 # Commands start here, listed alphabetically
683
683
684 def add(ui, repo, *pats, **opts):
684 def add(ui, repo, *pats, **opts):
685 """add the specified files on the next commit
685 """add the specified files on the next commit
686
686
687 Schedule files to be version controlled and added to the repository.
687 Schedule files to be version controlled and added to the repository.
688
688
689 The files will be added to the repository at the next commit.
689 The files will be added to the repository at the next commit.
690
690
691 If no names are given, add all files in the repository.
691 If no names are given, add all files in the repository.
692 """
692 """
693
693
694 names = []
694 names = []
695 for src, abs, rel, exact in walk(repo, pats, opts):
695 for src, abs, rel, exact in walk(repo, pats, opts):
696 if exact:
696 if exact:
697 if ui.verbose:
697 if ui.verbose:
698 ui.status(_('adding %s\n') % rel)
698 ui.status(_('adding %s\n') % rel)
699 names.append(abs)
699 names.append(abs)
700 elif repo.dirstate.state(abs) == '?':
700 elif repo.dirstate.state(abs) == '?':
701 ui.status(_('adding %s\n') % rel)
701 ui.status(_('adding %s\n') % rel)
702 names.append(abs)
702 names.append(abs)
703 if not opts.get('dry_run'):
703 if not opts.get('dry_run'):
704 repo.add(names)
704 repo.add(names)
705
705
706 def addremove(ui, repo, *pats, **opts):
706 def addremove(ui, repo, *pats, **opts):
707 """add all new files, delete all missing files (DEPRECATED)
707 """add all new files, delete all missing files (DEPRECATED)
708
708
709 (DEPRECATED)
709 (DEPRECATED)
710 Add all new files and remove all missing files from the repository.
710 Add all new files and remove all missing files from the repository.
711
711
712 New files are ignored if they match any of the patterns in .hgignore. As
712 New files are ignored if they match any of the patterns in .hgignore. As
713 with add, these changes take effect at the next commit.
713 with add, these changes take effect at the next commit.
714
714
715 This command is now deprecated and will be removed in a future
715 This command is now deprecated and will be removed in a future
716 release. Please use add and remove --after instead.
716 release. Please use add and remove --after instead.
717 """
717 """
718 ui.warn(_('(the addremove command is deprecated; use add and remove '
718 ui.warn(_('(the addremove command is deprecated; use add and remove '
719 '--after instead)\n'))
719 '--after instead)\n'))
720 return addremove_lock(ui, repo, pats, opts)
720 return addremove_lock(ui, repo, pats, opts)
721
721
722 def addremove_lock(ui, repo, pats, opts, wlock=None):
722 def addremove_lock(ui, repo, pats, opts, wlock=None):
723 add, remove = [], []
723 add, remove = [], []
724 for src, abs, rel, exact in walk(repo, pats, opts):
724 for src, abs, rel, exact in walk(repo, pats, opts):
725 if src == 'f' and repo.dirstate.state(abs) == '?':
725 if src == 'f' and repo.dirstate.state(abs) == '?':
726 add.append(abs)
726 add.append(abs)
727 if ui.verbose or not exact:
727 if ui.verbose or not exact:
728 ui.status(_('adding %s\n') % ((pats and rel) or abs))
728 ui.status(_('adding %s\n') % ((pats and rel) or abs))
729 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
729 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
730 remove.append(abs)
730 remove.append(abs)
731 if ui.verbose or not exact:
731 if ui.verbose or not exact:
732 ui.status(_('removing %s\n') % ((pats and rel) or abs))
732 ui.status(_('removing %s\n') % ((pats and rel) or abs))
733 if not opts.get('dry_run'):
733 if not opts.get('dry_run'):
734 repo.add(add, wlock=wlock)
734 repo.add(add, wlock=wlock)
735 repo.remove(remove, wlock=wlock)
735 repo.remove(remove, wlock=wlock)
736
736
737 def annotate(ui, repo, *pats, **opts):
737 def annotate(ui, repo, *pats, **opts):
738 """show changeset information per file line
738 """show changeset information per file line
739
739
740 List changes in files, showing the revision id responsible for each line
740 List changes in files, showing the revision id responsible for each line
741
741
742 This command is useful to discover who did a change or when a change took
742 This command is useful to discover who did a change or when a change took
743 place.
743 place.
744
744
745 Without the -a option, annotate will avoid processing files it
745 Without the -a option, annotate will avoid processing files it
746 detects as binary. With -a, annotate will generate an annotation
746 detects as binary. With -a, annotate will generate an annotation
747 anyway, probably with undesirable results.
747 anyway, probably with undesirable results.
748 """
748 """
749 def getnode(rev):
749 def getnode(rev):
750 return short(repo.changelog.node(rev))
750 return short(repo.changelog.node(rev))
751
751
752 ucache = {}
752 ucache = {}
753 def getname(rev):
753 def getname(rev):
754 try:
754 try:
755 return ucache[rev]
755 return ucache[rev]
756 except:
756 except:
757 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
757 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
758 ucache[rev] = u
758 ucache[rev] = u
759 return u
759 return u
760
760
761 dcache = {}
761 dcache = {}
762 def getdate(rev):
762 def getdate(rev):
763 datestr = dcache.get(rev)
763 datestr = dcache.get(rev)
764 if datestr is None:
764 if datestr is None:
765 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
765 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
766 return datestr
766 return datestr
767
767
768 if not pats:
768 if not pats:
769 raise util.Abort(_('at least one file name or pattern required'))
769 raise util.Abort(_('at least one file name or pattern required'))
770
770
771 opmap = [['user', getname], ['number', str], ['changeset', getnode],
771 opmap = [['user', getname], ['number', str], ['changeset', getnode],
772 ['date', getdate]]
772 ['date', getdate]]
773 if not opts['user'] and not opts['changeset'] and not opts['date']:
773 if not opts['user'] and not opts['changeset'] and not opts['date']:
774 opts['number'] = 1
774 opts['number'] = 1
775
775
776 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
776 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
777
777
778 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
778 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
779 fctx = ctx.filectx(abs)
779 fctx = ctx.filectx(abs)
780 if not opts['text'] and util.binary(fctx.data()):
780 if not opts['text'] and util.binary(fctx.data()):
781 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
781 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
782 continue
782 continue
783
783
784 lines = fctx.annotate()
784 lines = fctx.annotate()
785 pieces = []
785 pieces = []
786
786
787 for o, f in opmap:
787 for o, f in opmap:
788 if opts[o]:
788 if opts[o]:
789 l = [f(n) for n, dummy in lines]
789 l = [f(n) for n, dummy in lines]
790 if l:
790 if l:
791 m = max(map(len, l))
791 m = max(map(len, l))
792 pieces.append(["%*s" % (m, x) for x in l])
792 pieces.append(["%*s" % (m, x) for x in l])
793
793
794 if pieces:
794 if pieces:
795 for p, l in zip(zip(*pieces), lines):
795 for p, l in zip(zip(*pieces), lines):
796 ui.write("%s: %s" % (" ".join(p), l[1]))
796 ui.write("%s: %s" % (" ".join(p), l[1]))
797
797
798 def archive(ui, repo, dest, **opts):
798 def archive(ui, repo, dest, **opts):
799 '''create unversioned archive of a repository revision
799 '''create unversioned archive of a repository revision
800
800
801 By default, the revision used is the parent of the working
801 By default, the revision used is the parent of the working
802 directory; use "-r" to specify a different revision.
802 directory; use "-r" to specify a different revision.
803
803
804 To specify the type of archive to create, use "-t". Valid
804 To specify the type of archive to create, use "-t". Valid
805 types are:
805 types are:
806
806
807 "files" (default): a directory full of files
807 "files" (default): a directory full of files
808 "tar": tar archive, uncompressed
808 "tar": tar archive, uncompressed
809 "tbz2": tar archive, compressed using bzip2
809 "tbz2": tar archive, compressed using bzip2
810 "tgz": tar archive, compressed using gzip
810 "tgz": tar archive, compressed using gzip
811 "uzip": zip archive, uncompressed
811 "uzip": zip archive, uncompressed
812 "zip": zip archive, compressed using deflate
812 "zip": zip archive, compressed using deflate
813
813
814 The exact name of the destination archive or directory is given
814 The exact name of the destination archive or directory is given
815 using a format string; see "hg help export" for details.
815 using a format string; see "hg help export" for details.
816
816
817 Each member added to an archive file has a directory prefix
817 Each member added to an archive file has a directory prefix
818 prepended. Use "-p" to specify a format string for the prefix.
818 prepended. Use "-p" to specify a format string for the prefix.
819 The default is the basename of the archive, with suffixes removed.
819 The default is the basename of the archive, with suffixes removed.
820 '''
820 '''
821
821
822 if opts['rev']:
822 if opts['rev']:
823 node = repo.lookup(opts['rev'])
823 node = repo.lookup(opts['rev'])
824 else:
824 else:
825 node, p2 = repo.dirstate.parents()
825 node, p2 = repo.dirstate.parents()
826 if p2 != nullid:
826 if p2 != nullid:
827 raise util.Abort(_('uncommitted merge - please provide a '
827 raise util.Abort(_('uncommitted merge - please provide a '
828 'specific revision'))
828 'specific revision'))
829
829
830 dest = make_filename(repo, dest, node)
830 dest = make_filename(repo, dest, node)
831 if os.path.realpath(dest) == repo.root:
831 if os.path.realpath(dest) == repo.root:
832 raise util.Abort(_('repository root cannot be destination'))
832 raise util.Abort(_('repository root cannot be destination'))
833 dummy, matchfn, dummy = matchpats(repo, [], opts)
833 dummy, matchfn, dummy = matchpats(repo, [], opts)
834 kind = opts.get('type') or 'files'
834 kind = opts.get('type') or 'files'
835 prefix = opts['prefix']
835 prefix = opts['prefix']
836 if dest == '-':
836 if dest == '-':
837 if kind == 'files':
837 if kind == 'files':
838 raise util.Abort(_('cannot archive plain files to stdout'))
838 raise util.Abort(_('cannot archive plain files to stdout'))
839 dest = sys.stdout
839 dest = sys.stdout
840 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
840 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
841 prefix = make_filename(repo, prefix, node)
841 prefix = make_filename(repo, prefix, node)
842 archival.archive(repo, dest, node, kind, not opts['no_decode'],
842 archival.archive(repo, dest, node, kind, not opts['no_decode'],
843 matchfn, prefix)
843 matchfn, prefix)
844
844
845 def backout(ui, repo, rev, **opts):
845 def backout(ui, repo, rev, **opts):
846 '''reverse effect of earlier changeset
846 '''reverse effect of earlier changeset
847
847
848 Commit the backed out changes as a new changeset. The new
848 Commit the backed out changes as a new changeset. The new
849 changeset is a child of the backed out changeset.
849 changeset is a child of the backed out changeset.
850
850
851 If you back out a changeset other than the tip, a new head is
851 If you back out a changeset other than the tip, a new head is
852 created. This head is the parent of the working directory. If
852 created. This head is the parent of the working directory. If
853 you back out an old changeset, your working directory will appear
853 you back out an old changeset, your working directory will appear
854 old after the backout. You should merge the backout changeset
854 old after the backout. You should merge the backout changeset
855 with another head.
855 with another head.
856
856
857 The --merge option remembers the parent of the working directory
857 The --merge option remembers the parent of the working directory
858 before starting the backout, then merges the new head with that
858 before starting the backout, then merges the new head with that
859 changeset afterwards. This saves you from doing the merge by
859 changeset afterwards. This saves you from doing the merge by
860 hand. The result of this merge is not committed, as for a normal
860 hand. The result of this merge is not committed, as for a normal
861 merge.'''
861 merge.'''
862
862
863 bail_if_changed(repo)
863 bail_if_changed(repo)
864 op1, op2 = repo.dirstate.parents()
864 op1, op2 = repo.dirstate.parents()
865 if op2 != nullid:
865 if op2 != nullid:
866 raise util.Abort(_('outstanding uncommitted merge'))
866 raise util.Abort(_('outstanding uncommitted merge'))
867 node = repo.lookup(rev)
867 node = repo.lookup(rev)
868 parent, p2 = repo.changelog.parents(node)
868 parent, p2 = repo.changelog.parents(node)
869 if parent == nullid:
869 if parent == nullid:
870 raise util.Abort(_('cannot back out a change with no parents'))
870 raise util.Abort(_('cannot back out a change with no parents'))
871 if p2 != nullid:
871 if p2 != nullid:
872 raise util.Abort(_('cannot back out a merge'))
872 raise util.Abort(_('cannot back out a merge'))
873 repo.update(node, force=True, show_stats=False)
873 repo.update(node, force=True, show_stats=False)
874 revert_opts = opts.copy()
874 revert_opts = opts.copy()
875 revert_opts['rev'] = hex(parent)
875 revert_opts['rev'] = hex(parent)
876 revert(ui, repo, **revert_opts)
876 revert(ui, repo, **revert_opts)
877 commit_opts = opts.copy()
877 commit_opts = opts.copy()
878 commit_opts['addremove'] = False
878 commit_opts['addremove'] = False
879 if not commit_opts['message'] and not commit_opts['logfile']:
879 if not commit_opts['message'] and not commit_opts['logfile']:
880 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
880 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
881 commit_opts['force_editor'] = True
881 commit_opts['force_editor'] = True
882 commit(ui, repo, **commit_opts)
882 commit(ui, repo, **commit_opts)
883 def nice(node):
883 def nice(node):
884 return '%d:%s' % (repo.changelog.rev(node), short(node))
884 return '%d:%s' % (repo.changelog.rev(node), short(node))
885 ui.status(_('changeset %s backs out changeset %s\n') %
885 ui.status(_('changeset %s backs out changeset %s\n') %
886 (nice(repo.changelog.tip()), nice(node)))
886 (nice(repo.changelog.tip()), nice(node)))
887 if op1 != node:
887 if op1 != node:
888 if opts['merge']:
888 if opts['merge']:
889 ui.status(_('merging with changeset %s\n') % nice(op1))
889 ui.status(_('merging with changeset %s\n') % nice(op1))
890 doupdate(ui, repo, hex(op1), **opts)
890 doupdate(ui, repo, hex(op1), **opts)
891 else:
891 else:
892 ui.status(_('the backout changeset is a new head - '
892 ui.status(_('the backout changeset is a new head - '
893 'do not forget to merge\n'))
893 'do not forget to merge\n'))
894 ui.status(_('(use "backout -m" if you want to auto-merge)\n'))
894 ui.status(_('(use "backout -m" if you want to auto-merge)\n'))
895
895
896 def bundle(ui, repo, fname, dest=None, **opts):
896 def bundle(ui, repo, fname, dest=None, **opts):
897 """create a changegroup file
897 """create a changegroup file
898
898
899 Generate a compressed changegroup file collecting all changesets
899 Generate a compressed changegroup file collecting all changesets
900 not found in the other repository.
900 not found in the other repository.
901
901
902 This file can then be transferred using conventional means and
902 This file can then be transferred using conventional means and
903 applied to another repository with the unbundle command. This is
903 applied to another repository with the unbundle command. This is
904 useful when native push and pull are not available or when
904 useful when native push and pull are not available or when
905 exporting an entire repository is undesirable. The standard file
905 exporting an entire repository is undesirable. The standard file
906 extension is ".hg".
906 extension is ".hg".
907
907
908 Unlike import/export, this exactly preserves all changeset
908 Unlike import/export, this exactly preserves all changeset
909 contents including permissions, rename data, and revision history.
909 contents including permissions, rename data, and revision history.
910 """
910 """
911 dest = ui.expandpath(dest or 'default-push', dest or 'default')
911 dest = ui.expandpath(dest or 'default-push', dest or 'default')
912 other = hg.repository(ui, dest)
912 other = hg.repository(ui, dest)
913 o = repo.findoutgoing(other, force=opts['force'])
913 o = repo.findoutgoing(other, force=opts['force'])
914 cg = repo.changegroup(o, 'bundle')
914 cg = repo.changegroup(o, 'bundle')
915 write_bundle(cg, fname)
915 write_bundle(cg, fname)
916
916
917 def cat(ui, repo, file1, *pats, **opts):
917 def cat(ui, repo, file1, *pats, **opts):
918 """output the latest or given revisions of files
918 """output the latest or given revisions of files
919
919
920 Print the specified files as they were at the given revision.
920 Print the specified files as they were at the given revision.
921 If no revision is given then the tip is used.
921 If no revision is given then the tip is used.
922
922
923 Output may be to a file, in which case the name of the file is
923 Output may be to a file, in which case the name of the file is
924 given using a format string. The formatting rules are the same as
924 given using a format string. The formatting rules are the same as
925 for the export command, with the following additions:
925 for the export command, with the following additions:
926
926
927 %s basename of file being printed
927 %s basename of file being printed
928 %d dirname of file being printed, or '.' if in repo root
928 %d dirname of file being printed, or '.' if in repo root
929 %p root-relative path name of file being printed
929 %p root-relative path name of file being printed
930 """
930 """
931 ctx = repo.changectx(opts['rev'] or -1)
931 ctx = repo.changectx(opts['rev'] or -1)
932 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
932 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
933 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
933 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
934 fp.write(ctx.filectx(abs).data())
934 fp.write(ctx.filectx(abs).data())
935
935
936 def clone(ui, source, dest=None, **opts):
936 def clone(ui, source, dest=None, **opts):
937 """make a copy of an existing repository
937 """make a copy of an existing repository
938
938
939 Create a copy of an existing repository in a new directory.
939 Create a copy of an existing repository in a new directory.
940
940
941 If no destination directory name is specified, it defaults to the
941 If no destination directory name is specified, it defaults to the
942 basename of the source.
942 basename of the source.
943
943
944 The location of the source is added to the new repository's
944 The location of the source is added to the new repository's
945 .hg/hgrc file, as the default to be used for future pulls.
945 .hg/hgrc file, as the default to be used for future pulls.
946
946
947 For efficiency, hardlinks are used for cloning whenever the source
947 For efficiency, hardlinks are used for cloning whenever the source
948 and destination are on the same filesystem. Some filesystems,
948 and destination are on the same filesystem. Some filesystems,
949 such as AFS, implement hardlinking incorrectly, but do not report
949 such as AFS, implement hardlinking incorrectly, but do not report
950 errors. In these cases, use the --pull option to avoid
950 errors. In these cases, use the --pull option to avoid
951 hardlinking.
951 hardlinking.
952
952
953 See pull for valid source format details.
953 See pull for valid source format details.
954
954
955 It is possible to specify an ssh:// URL as the destination, but no
955 It is possible to specify an ssh:// URL as the destination, but no
956 .hg/hgrc will be created on the remote side. Look at the help text
956 .hg/hgrc will be created on the remote side. Look at the help text
957 for the pull command for important details about ssh:// URLs.
957 for the pull command for important details about ssh:// URLs.
958 """
958 """
959 ui.setconfig_remoteopts(**opts)
959 ui.setconfig_remoteopts(**opts)
960 hg.clone(ui, ui.expandpath(source), dest,
960 hg.clone(ui, ui.expandpath(source), dest,
961 pull=opts['pull'],
961 pull=opts['pull'],
962 stream=opts['stream'],
962 rev=opts['rev'],
963 rev=opts['rev'],
963 update=not opts['noupdate'])
964 update=not opts['noupdate'])
964
965
965 def commit(ui, repo, *pats, **opts):
966 def commit(ui, repo, *pats, **opts):
966 """commit the specified files or all outstanding changes
967 """commit the specified files or all outstanding changes
967
968
968 Commit changes to the given files into the repository.
969 Commit changes to the given files into the repository.
969
970
970 If a list of files is omitted, all changes reported by "hg status"
971 If a list of files is omitted, all changes reported by "hg status"
971 will be committed.
972 will be committed.
972
973
973 If no commit message is specified, the editor configured in your hgrc
974 If no commit message is specified, the editor configured in your hgrc
974 or in the EDITOR environment variable is started to enter a message.
975 or in the EDITOR environment variable is started to enter a message.
975 """
976 """
976 message = opts['message']
977 message = opts['message']
977 logfile = opts['logfile']
978 logfile = opts['logfile']
978
979
979 if message and logfile:
980 if message and logfile:
980 raise util.Abort(_('options --message and --logfile are mutually '
981 raise util.Abort(_('options --message and --logfile are mutually '
981 'exclusive'))
982 'exclusive'))
982 if not message and logfile:
983 if not message and logfile:
983 try:
984 try:
984 if logfile == '-':
985 if logfile == '-':
985 message = sys.stdin.read()
986 message = sys.stdin.read()
986 else:
987 else:
987 message = open(logfile).read()
988 message = open(logfile).read()
988 except IOError, inst:
989 except IOError, inst:
989 raise util.Abort(_("can't read commit message '%s': %s") %
990 raise util.Abort(_("can't read commit message '%s': %s") %
990 (logfile, inst.strerror))
991 (logfile, inst.strerror))
991
992
992 if opts['addremove']:
993 if opts['addremove']:
993 addremove_lock(ui, repo, pats, opts)
994 addremove_lock(ui, repo, pats, opts)
994 fns, match, anypats = matchpats(repo, pats, opts)
995 fns, match, anypats = matchpats(repo, pats, opts)
995 if pats:
996 if pats:
996 modified, added, removed, deleted, unknown = (
997 modified, added, removed, deleted, unknown = (
997 repo.changes(files=fns, match=match))
998 repo.changes(files=fns, match=match))
998 files = modified + added + removed
999 files = modified + added + removed
999 else:
1000 else:
1000 files = []
1001 files = []
1001 try:
1002 try:
1002 repo.commit(files, message, opts['user'], opts['date'], match,
1003 repo.commit(files, message, opts['user'], opts['date'], match,
1003 force_editor=opts.get('force_editor'))
1004 force_editor=opts.get('force_editor'))
1004 except ValueError, inst:
1005 except ValueError, inst:
1005 raise util.Abort(str(inst))
1006 raise util.Abort(str(inst))
1006
1007
1007 def docopy(ui, repo, pats, opts, wlock):
1008 def docopy(ui, repo, pats, opts, wlock):
1008 # called with the repo lock held
1009 # called with the repo lock held
1009 cwd = repo.getcwd()
1010 cwd = repo.getcwd()
1010 errors = 0
1011 errors = 0
1011 copied = []
1012 copied = []
1012 targets = {}
1013 targets = {}
1013
1014
1014 def okaytocopy(abs, rel, exact):
1015 def okaytocopy(abs, rel, exact):
1015 reasons = {'?': _('is not managed'),
1016 reasons = {'?': _('is not managed'),
1016 'a': _('has been marked for add'),
1017 'a': _('has been marked for add'),
1017 'r': _('has been marked for remove')}
1018 'r': _('has been marked for remove')}
1018 state = repo.dirstate.state(abs)
1019 state = repo.dirstate.state(abs)
1019 reason = reasons.get(state)
1020 reason = reasons.get(state)
1020 if reason:
1021 if reason:
1021 if state == 'a':
1022 if state == 'a':
1022 origsrc = repo.dirstate.copied(abs)
1023 origsrc = repo.dirstate.copied(abs)
1023 if origsrc is not None:
1024 if origsrc is not None:
1024 return origsrc
1025 return origsrc
1025 if exact:
1026 if exact:
1026 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1027 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1027 else:
1028 else:
1028 return abs
1029 return abs
1029
1030
1030 def copy(origsrc, abssrc, relsrc, target, exact):
1031 def copy(origsrc, abssrc, relsrc, target, exact):
1031 abstarget = util.canonpath(repo.root, cwd, target)
1032 abstarget = util.canonpath(repo.root, cwd, target)
1032 reltarget = util.pathto(cwd, abstarget)
1033 reltarget = util.pathto(cwd, abstarget)
1033 prevsrc = targets.get(abstarget)
1034 prevsrc = targets.get(abstarget)
1034 if prevsrc is not None:
1035 if prevsrc is not None:
1035 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1036 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1036 (reltarget, abssrc, prevsrc))
1037 (reltarget, abssrc, prevsrc))
1037 return
1038 return
1038 if (not opts['after'] and os.path.exists(reltarget) or
1039 if (not opts['after'] and os.path.exists(reltarget) or
1039 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1040 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1040 if not opts['force']:
1041 if not opts['force']:
1041 ui.warn(_('%s: not overwriting - file exists\n') %
1042 ui.warn(_('%s: not overwriting - file exists\n') %
1042 reltarget)
1043 reltarget)
1043 return
1044 return
1044 if not opts['after'] and not opts.get('dry_run'):
1045 if not opts['after'] and not opts.get('dry_run'):
1045 os.unlink(reltarget)
1046 os.unlink(reltarget)
1046 if opts['after']:
1047 if opts['after']:
1047 if not os.path.exists(reltarget):
1048 if not os.path.exists(reltarget):
1048 return
1049 return
1049 else:
1050 else:
1050 targetdir = os.path.dirname(reltarget) or '.'
1051 targetdir = os.path.dirname(reltarget) or '.'
1051 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1052 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1052 os.makedirs(targetdir)
1053 os.makedirs(targetdir)
1053 try:
1054 try:
1054 restore = repo.dirstate.state(abstarget) == 'r'
1055 restore = repo.dirstate.state(abstarget) == 'r'
1055 if restore and not opts.get('dry_run'):
1056 if restore and not opts.get('dry_run'):
1056 repo.undelete([abstarget], wlock)
1057 repo.undelete([abstarget], wlock)
1057 try:
1058 try:
1058 if not opts.get('dry_run'):
1059 if not opts.get('dry_run'):
1059 shutil.copyfile(relsrc, reltarget)
1060 shutil.copyfile(relsrc, reltarget)
1060 shutil.copymode(relsrc, reltarget)
1061 shutil.copymode(relsrc, reltarget)
1061 restore = False
1062 restore = False
1062 finally:
1063 finally:
1063 if restore:
1064 if restore:
1064 repo.remove([abstarget], wlock)
1065 repo.remove([abstarget], wlock)
1065 except shutil.Error, inst:
1066 except shutil.Error, inst:
1066 raise util.Abort(str(inst))
1067 raise util.Abort(str(inst))
1067 except IOError, inst:
1068 except IOError, inst:
1068 if inst.errno == errno.ENOENT:
1069 if inst.errno == errno.ENOENT:
1069 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1070 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1070 else:
1071 else:
1071 ui.warn(_('%s: cannot copy - %s\n') %
1072 ui.warn(_('%s: cannot copy - %s\n') %
1072 (relsrc, inst.strerror))
1073 (relsrc, inst.strerror))
1073 errors += 1
1074 errors += 1
1074 return
1075 return
1075 if ui.verbose or not exact:
1076 if ui.verbose or not exact:
1076 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1077 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1077 targets[abstarget] = abssrc
1078 targets[abstarget] = abssrc
1078 if abstarget != origsrc and not opts.get('dry_run'):
1079 if abstarget != origsrc and not opts.get('dry_run'):
1079 repo.copy(origsrc, abstarget, wlock)
1080 repo.copy(origsrc, abstarget, wlock)
1080 copied.append((abssrc, relsrc, exact))
1081 copied.append((abssrc, relsrc, exact))
1081
1082
1082 def targetpathfn(pat, dest, srcs):
1083 def targetpathfn(pat, dest, srcs):
1083 if os.path.isdir(pat):
1084 if os.path.isdir(pat):
1084 abspfx = util.canonpath(repo.root, cwd, pat)
1085 abspfx = util.canonpath(repo.root, cwd, pat)
1085 if destdirexists:
1086 if destdirexists:
1086 striplen = len(os.path.split(abspfx)[0])
1087 striplen = len(os.path.split(abspfx)[0])
1087 else:
1088 else:
1088 striplen = len(abspfx)
1089 striplen = len(abspfx)
1089 if striplen:
1090 if striplen:
1090 striplen += len(os.sep)
1091 striplen += len(os.sep)
1091 res = lambda p: os.path.join(dest, p[striplen:])
1092 res = lambda p: os.path.join(dest, p[striplen:])
1092 elif destdirexists:
1093 elif destdirexists:
1093 res = lambda p: os.path.join(dest, os.path.basename(p))
1094 res = lambda p: os.path.join(dest, os.path.basename(p))
1094 else:
1095 else:
1095 res = lambda p: dest
1096 res = lambda p: dest
1096 return res
1097 return res
1097
1098
1098 def targetpathafterfn(pat, dest, srcs):
1099 def targetpathafterfn(pat, dest, srcs):
1099 if util.patkind(pat, None)[0]:
1100 if util.patkind(pat, None)[0]:
1100 # a mercurial pattern
1101 # a mercurial pattern
1101 res = lambda p: os.path.join(dest, os.path.basename(p))
1102 res = lambda p: os.path.join(dest, os.path.basename(p))
1102 else:
1103 else:
1103 abspfx = util.canonpath(repo.root, cwd, pat)
1104 abspfx = util.canonpath(repo.root, cwd, pat)
1104 if len(abspfx) < len(srcs[0][0]):
1105 if len(abspfx) < len(srcs[0][0]):
1105 # A directory. Either the target path contains the last
1106 # A directory. Either the target path contains the last
1106 # component of the source path or it does not.
1107 # component of the source path or it does not.
1107 def evalpath(striplen):
1108 def evalpath(striplen):
1108 score = 0
1109 score = 0
1109 for s in srcs:
1110 for s in srcs:
1110 t = os.path.join(dest, s[0][striplen:])
1111 t = os.path.join(dest, s[0][striplen:])
1111 if os.path.exists(t):
1112 if os.path.exists(t):
1112 score += 1
1113 score += 1
1113 return score
1114 return score
1114
1115
1115 striplen = len(abspfx)
1116 striplen = len(abspfx)
1116 if striplen:
1117 if striplen:
1117 striplen += len(os.sep)
1118 striplen += len(os.sep)
1118 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1119 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1119 score = evalpath(striplen)
1120 score = evalpath(striplen)
1120 striplen1 = len(os.path.split(abspfx)[0])
1121 striplen1 = len(os.path.split(abspfx)[0])
1121 if striplen1:
1122 if striplen1:
1122 striplen1 += len(os.sep)
1123 striplen1 += len(os.sep)
1123 if evalpath(striplen1) > score:
1124 if evalpath(striplen1) > score:
1124 striplen = striplen1
1125 striplen = striplen1
1125 res = lambda p: os.path.join(dest, p[striplen:])
1126 res = lambda p: os.path.join(dest, p[striplen:])
1126 else:
1127 else:
1127 # a file
1128 # a file
1128 if destdirexists:
1129 if destdirexists:
1129 res = lambda p: os.path.join(dest, os.path.basename(p))
1130 res = lambda p: os.path.join(dest, os.path.basename(p))
1130 else:
1131 else:
1131 res = lambda p: dest
1132 res = lambda p: dest
1132 return res
1133 return res
1133
1134
1134
1135
1135 pats = list(pats)
1136 pats = list(pats)
1136 if not pats:
1137 if not pats:
1137 raise util.Abort(_('no source or destination specified'))
1138 raise util.Abort(_('no source or destination specified'))
1138 if len(pats) == 1:
1139 if len(pats) == 1:
1139 raise util.Abort(_('no destination specified'))
1140 raise util.Abort(_('no destination specified'))
1140 dest = pats.pop()
1141 dest = pats.pop()
1141 destdirexists = os.path.isdir(dest)
1142 destdirexists = os.path.isdir(dest)
1142 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1143 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1143 raise util.Abort(_('with multiple sources, destination must be an '
1144 raise util.Abort(_('with multiple sources, destination must be an '
1144 'existing directory'))
1145 'existing directory'))
1145 if opts['after']:
1146 if opts['after']:
1146 tfn = targetpathafterfn
1147 tfn = targetpathafterfn
1147 else:
1148 else:
1148 tfn = targetpathfn
1149 tfn = targetpathfn
1149 copylist = []
1150 copylist = []
1150 for pat in pats:
1151 for pat in pats:
1151 srcs = []
1152 srcs = []
1152 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1153 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1153 origsrc = okaytocopy(abssrc, relsrc, exact)
1154 origsrc = okaytocopy(abssrc, relsrc, exact)
1154 if origsrc:
1155 if origsrc:
1155 srcs.append((origsrc, abssrc, relsrc, exact))
1156 srcs.append((origsrc, abssrc, relsrc, exact))
1156 if not srcs:
1157 if not srcs:
1157 continue
1158 continue
1158 copylist.append((tfn(pat, dest, srcs), srcs))
1159 copylist.append((tfn(pat, dest, srcs), srcs))
1159 if not copylist:
1160 if not copylist:
1160 raise util.Abort(_('no files to copy'))
1161 raise util.Abort(_('no files to copy'))
1161
1162
1162 for targetpath, srcs in copylist:
1163 for targetpath, srcs in copylist:
1163 for origsrc, abssrc, relsrc, exact in srcs:
1164 for origsrc, abssrc, relsrc, exact in srcs:
1164 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1165 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1165
1166
1166 if errors:
1167 if errors:
1167 ui.warn(_('(consider using --after)\n'))
1168 ui.warn(_('(consider using --after)\n'))
1168 return errors, copied
1169 return errors, copied
1169
1170
1170 def copy(ui, repo, *pats, **opts):
1171 def copy(ui, repo, *pats, **opts):
1171 """mark files as copied for the next commit
1172 """mark files as copied for the next commit
1172
1173
1173 Mark dest as having copies of source files. If dest is a
1174 Mark dest as having copies of source files. If dest is a
1174 directory, copies are put in that directory. If dest is a file,
1175 directory, copies are put in that directory. If dest is a file,
1175 there can only be one source.
1176 there can only be one source.
1176
1177
1177 By default, this command copies the contents of files as they
1178 By default, this command copies the contents of files as they
1178 stand in the working directory. If invoked with --after, the
1179 stand in the working directory. If invoked with --after, the
1179 operation is recorded, but no copying is performed.
1180 operation is recorded, but no copying is performed.
1180
1181
1181 This command takes effect in the next commit.
1182 This command takes effect in the next commit.
1182
1183
1183 NOTE: This command should be treated as experimental. While it
1184 NOTE: This command should be treated as experimental. While it
1184 should properly record copied files, this information is not yet
1185 should properly record copied files, this information is not yet
1185 fully used by merge, nor fully reported by log.
1186 fully used by merge, nor fully reported by log.
1186 """
1187 """
1187 wlock = repo.wlock(0)
1188 wlock = repo.wlock(0)
1188 errs, copied = docopy(ui, repo, pats, opts, wlock)
1189 errs, copied = docopy(ui, repo, pats, opts, wlock)
1189 return errs
1190 return errs
1190
1191
1191 def debugancestor(ui, index, rev1, rev2):
1192 def debugancestor(ui, index, rev1, rev2):
1192 """find the ancestor revision of two revisions in a given index"""
1193 """find the ancestor revision of two revisions in a given index"""
1193 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1194 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1194 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1195 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1195 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1196 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1196
1197
1197 def debugcomplete(ui, cmd='', **opts):
1198 def debugcomplete(ui, cmd='', **opts):
1198 """returns the completion list associated with the given command"""
1199 """returns the completion list associated with the given command"""
1199
1200
1200 if opts['options']:
1201 if opts['options']:
1201 options = []
1202 options = []
1202 otables = [globalopts]
1203 otables = [globalopts]
1203 if cmd:
1204 if cmd:
1204 aliases, entry = findcmd(cmd)
1205 aliases, entry = findcmd(cmd)
1205 otables.append(entry[1])
1206 otables.append(entry[1])
1206 for t in otables:
1207 for t in otables:
1207 for o in t:
1208 for o in t:
1208 if o[0]:
1209 if o[0]:
1209 options.append('-%s' % o[0])
1210 options.append('-%s' % o[0])
1210 options.append('--%s' % o[1])
1211 options.append('--%s' % o[1])
1211 ui.write("%s\n" % "\n".join(options))
1212 ui.write("%s\n" % "\n".join(options))
1212 return
1213 return
1213
1214
1214 clist = findpossible(cmd).keys()
1215 clist = findpossible(cmd).keys()
1215 clist.sort()
1216 clist.sort()
1216 ui.write("%s\n" % "\n".join(clist))
1217 ui.write("%s\n" % "\n".join(clist))
1217
1218
1218 def debugrebuildstate(ui, repo, rev=None):
1219 def debugrebuildstate(ui, repo, rev=None):
1219 """rebuild the dirstate as it would look like for the given revision"""
1220 """rebuild the dirstate as it would look like for the given revision"""
1220 if not rev:
1221 if not rev:
1221 rev = repo.changelog.tip()
1222 rev = repo.changelog.tip()
1222 else:
1223 else:
1223 rev = repo.lookup(rev)
1224 rev = repo.lookup(rev)
1224 change = repo.changelog.read(rev)
1225 change = repo.changelog.read(rev)
1225 n = change[0]
1226 n = change[0]
1226 files = repo.manifest.readflags(n)
1227 files = repo.manifest.readflags(n)
1227 wlock = repo.wlock()
1228 wlock = repo.wlock()
1228 repo.dirstate.rebuild(rev, files.iteritems())
1229 repo.dirstate.rebuild(rev, files.iteritems())
1229
1230
1230 def debugcheckstate(ui, repo):
1231 def debugcheckstate(ui, repo):
1231 """validate the correctness of the current dirstate"""
1232 """validate the correctness of the current dirstate"""
1232 parent1, parent2 = repo.dirstate.parents()
1233 parent1, parent2 = repo.dirstate.parents()
1233 repo.dirstate.read()
1234 repo.dirstate.read()
1234 dc = repo.dirstate.map
1235 dc = repo.dirstate.map
1235 keys = dc.keys()
1236 keys = dc.keys()
1236 keys.sort()
1237 keys.sort()
1237 m1n = repo.changelog.read(parent1)[0]
1238 m1n = repo.changelog.read(parent1)[0]
1238 m2n = repo.changelog.read(parent2)[0]
1239 m2n = repo.changelog.read(parent2)[0]
1239 m1 = repo.manifest.read(m1n)
1240 m1 = repo.manifest.read(m1n)
1240 m2 = repo.manifest.read(m2n)
1241 m2 = repo.manifest.read(m2n)
1241 errors = 0
1242 errors = 0
1242 for f in dc:
1243 for f in dc:
1243 state = repo.dirstate.state(f)
1244 state = repo.dirstate.state(f)
1244 if state in "nr" and f not in m1:
1245 if state in "nr" and f not in m1:
1245 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1246 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1246 errors += 1
1247 errors += 1
1247 if state in "a" and f in m1:
1248 if state in "a" and f in m1:
1248 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1249 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1249 errors += 1
1250 errors += 1
1250 if state in "m" and f not in m1 and f not in m2:
1251 if state in "m" and f not in m1 and f not in m2:
1251 ui.warn(_("%s in state %s, but not in either manifest\n") %
1252 ui.warn(_("%s in state %s, but not in either manifest\n") %
1252 (f, state))
1253 (f, state))
1253 errors += 1
1254 errors += 1
1254 for f in m1:
1255 for f in m1:
1255 state = repo.dirstate.state(f)
1256 state = repo.dirstate.state(f)
1256 if state not in "nrm":
1257 if state not in "nrm":
1257 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1258 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1258 errors += 1
1259 errors += 1
1259 if errors:
1260 if errors:
1260 error = _(".hg/dirstate inconsistent with current parent's manifest")
1261 error = _(".hg/dirstate inconsistent with current parent's manifest")
1261 raise util.Abort(error)
1262 raise util.Abort(error)
1262
1263
1263 def debugconfig(ui, repo, *values):
1264 def debugconfig(ui, repo, *values):
1264 """show combined config settings from all hgrc files
1265 """show combined config settings from all hgrc files
1265
1266
1266 With no args, print names and values of all config items.
1267 With no args, print names and values of all config items.
1267
1268
1268 With one arg of the form section.name, print just the value of
1269 With one arg of the form section.name, print just the value of
1269 that config item.
1270 that config item.
1270
1271
1271 With multiple args, print names and values of all config items
1272 With multiple args, print names and values of all config items
1272 with matching section names."""
1273 with matching section names."""
1273
1274
1274 if values:
1275 if values:
1275 if len([v for v in values if '.' in v]) > 1:
1276 if len([v for v in values if '.' in v]) > 1:
1276 raise util.Abort(_('only one config item permitted'))
1277 raise util.Abort(_('only one config item permitted'))
1277 for section, name, value in ui.walkconfig():
1278 for section, name, value in ui.walkconfig():
1278 sectname = section + '.' + name
1279 sectname = section + '.' + name
1279 if values:
1280 if values:
1280 for v in values:
1281 for v in values:
1281 if v == section:
1282 if v == section:
1282 ui.write('%s=%s\n' % (sectname, value))
1283 ui.write('%s=%s\n' % (sectname, value))
1283 elif v == sectname:
1284 elif v == sectname:
1284 ui.write(value, '\n')
1285 ui.write(value, '\n')
1285 else:
1286 else:
1286 ui.write('%s=%s\n' % (sectname, value))
1287 ui.write('%s=%s\n' % (sectname, value))
1287
1288
1288 def debugsetparents(ui, repo, rev1, rev2=None):
1289 def debugsetparents(ui, repo, rev1, rev2=None):
1289 """manually set the parents of the current working directory
1290 """manually set the parents of the current working directory
1290
1291
1291 This is useful for writing repository conversion tools, but should
1292 This is useful for writing repository conversion tools, but should
1292 be used with care.
1293 be used with care.
1293 """
1294 """
1294
1295
1295 if not rev2:
1296 if not rev2:
1296 rev2 = hex(nullid)
1297 rev2 = hex(nullid)
1297
1298
1298 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1299 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1299
1300
1300 def debugstate(ui, repo):
1301 def debugstate(ui, repo):
1301 """show the contents of the current dirstate"""
1302 """show the contents of the current dirstate"""
1302 repo.dirstate.read()
1303 repo.dirstate.read()
1303 dc = repo.dirstate.map
1304 dc = repo.dirstate.map
1304 keys = dc.keys()
1305 keys = dc.keys()
1305 keys.sort()
1306 keys.sort()
1306 for file_ in keys:
1307 for file_ in keys:
1307 ui.write("%c %3o %10d %s %s\n"
1308 ui.write("%c %3o %10d %s %s\n"
1308 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1309 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1309 time.strftime("%x %X",
1310 time.strftime("%x %X",
1310 time.localtime(dc[file_][3])), file_))
1311 time.localtime(dc[file_][3])), file_))
1311 for f in repo.dirstate.copies:
1312 for f in repo.dirstate.copies:
1312 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1313 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1313
1314
1314 def debugdata(ui, file_, rev):
1315 def debugdata(ui, file_, rev):
1315 """dump the contents of an data file revision"""
1316 """dump the contents of an data file revision"""
1316 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1317 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1317 file_[:-2] + ".i", file_, 0)
1318 file_[:-2] + ".i", file_, 0)
1318 try:
1319 try:
1319 ui.write(r.revision(r.lookup(rev)))
1320 ui.write(r.revision(r.lookup(rev)))
1320 except KeyError:
1321 except KeyError:
1321 raise util.Abort(_('invalid revision identifier %s'), rev)
1322 raise util.Abort(_('invalid revision identifier %s'), rev)
1322
1323
1323 def debugindex(ui, file_):
1324 def debugindex(ui, file_):
1324 """dump the contents of an index file"""
1325 """dump the contents of an index file"""
1325 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1326 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1326 ui.write(" rev offset length base linkrev" +
1327 ui.write(" rev offset length base linkrev" +
1327 " nodeid p1 p2\n")
1328 " nodeid p1 p2\n")
1328 for i in range(r.count()):
1329 for i in range(r.count()):
1329 node = r.node(i)
1330 node = r.node(i)
1330 pp = r.parents(node)
1331 pp = r.parents(node)
1331 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1332 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1332 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1333 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1333 short(node), short(pp[0]), short(pp[1])))
1334 short(node), short(pp[0]), short(pp[1])))
1334
1335
1335 def debugindexdot(ui, file_):
1336 def debugindexdot(ui, file_):
1336 """dump an index DAG as a .dot file"""
1337 """dump an index DAG as a .dot file"""
1337 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1338 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1338 ui.write("digraph G {\n")
1339 ui.write("digraph G {\n")
1339 for i in range(r.count()):
1340 for i in range(r.count()):
1340 node = r.node(i)
1341 node = r.node(i)
1341 pp = r.parents(node)
1342 pp = r.parents(node)
1342 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1343 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1343 if pp[1] != nullid:
1344 if pp[1] != nullid:
1344 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1345 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1345 ui.write("}\n")
1346 ui.write("}\n")
1346
1347
1347 def debugrename(ui, repo, file, rev=None):
1348 def debugrename(ui, repo, file, rev=None):
1348 """dump rename information"""
1349 """dump rename information"""
1349 r = repo.file(relpath(repo, [file])[0])
1350 r = repo.file(relpath(repo, [file])[0])
1350 if rev:
1351 if rev:
1351 try:
1352 try:
1352 # assume all revision numbers are for changesets
1353 # assume all revision numbers are for changesets
1353 n = repo.lookup(rev)
1354 n = repo.lookup(rev)
1354 change = repo.changelog.read(n)
1355 change = repo.changelog.read(n)
1355 m = repo.manifest.read(change[0])
1356 m = repo.manifest.read(change[0])
1356 n = m[relpath(repo, [file])[0]]
1357 n = m[relpath(repo, [file])[0]]
1357 except (hg.RepoError, KeyError):
1358 except (hg.RepoError, KeyError):
1358 n = r.lookup(rev)
1359 n = r.lookup(rev)
1359 else:
1360 else:
1360 n = r.tip()
1361 n = r.tip()
1361 m = r.renamed(n)
1362 m = r.renamed(n)
1362 if m:
1363 if m:
1363 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1364 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1364 else:
1365 else:
1365 ui.write(_("not renamed\n"))
1366 ui.write(_("not renamed\n"))
1366
1367
1367 def debugwalk(ui, repo, *pats, **opts):
1368 def debugwalk(ui, repo, *pats, **opts):
1368 """show how files match on given patterns"""
1369 """show how files match on given patterns"""
1369 items = list(walk(repo, pats, opts))
1370 items = list(walk(repo, pats, opts))
1370 if not items:
1371 if not items:
1371 return
1372 return
1372 fmt = '%%s %%-%ds %%-%ds %%s' % (
1373 fmt = '%%s %%-%ds %%-%ds %%s' % (
1373 max([len(abs) for (src, abs, rel, exact) in items]),
1374 max([len(abs) for (src, abs, rel, exact) in items]),
1374 max([len(rel) for (src, abs, rel, exact) in items]))
1375 max([len(rel) for (src, abs, rel, exact) in items]))
1375 for src, abs, rel, exact in items:
1376 for src, abs, rel, exact in items:
1376 line = fmt % (src, abs, rel, exact and 'exact' or '')
1377 line = fmt % (src, abs, rel, exact and 'exact' or '')
1377 ui.write("%s\n" % line.rstrip())
1378 ui.write("%s\n" % line.rstrip())
1378
1379
1379 def diff(ui, repo, *pats, **opts):
1380 def diff(ui, repo, *pats, **opts):
1380 """diff repository (or selected files)
1381 """diff repository (or selected files)
1381
1382
1382 Show differences between revisions for the specified files.
1383 Show differences between revisions for the specified files.
1383
1384
1384 Differences between files are shown using the unified diff format.
1385 Differences between files are shown using the unified diff format.
1385
1386
1386 When two revision arguments are given, then changes are shown
1387 When two revision arguments are given, then changes are shown
1387 between those revisions. If only one revision is specified then
1388 between those revisions. If only one revision is specified then
1388 that revision is compared to the working directory, and, when no
1389 that revision is compared to the working directory, and, when no
1389 revisions are specified, the working directory files are compared
1390 revisions are specified, the working directory files are compared
1390 to its parent.
1391 to its parent.
1391
1392
1392 Without the -a option, diff will avoid generating diffs of files
1393 Without the -a option, diff will avoid generating diffs of files
1393 it detects as binary. With -a, diff will generate a diff anyway,
1394 it detects as binary. With -a, diff will generate a diff anyway,
1394 probably with undesirable results.
1395 probably with undesirable results.
1395 """
1396 """
1396 node1, node2 = revpair(ui, repo, opts['rev'])
1397 node1, node2 = revpair(ui, repo, opts['rev'])
1397
1398
1398 fns, matchfn, anypats = matchpats(repo, pats, opts)
1399 fns, matchfn, anypats = matchpats(repo, pats, opts)
1399
1400
1400 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1401 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1401 text=opts['text'], opts=opts)
1402 text=opts['text'], opts=opts)
1402
1403
1403 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1404 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1404 node = repo.lookup(changeset)
1405 node = repo.lookup(changeset)
1405 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1406 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1406 if opts['switch_parent']:
1407 if opts['switch_parent']:
1407 parents.reverse()
1408 parents.reverse()
1408 prev = (parents and parents[0]) or nullid
1409 prev = (parents and parents[0]) or nullid
1409 change = repo.changelog.read(node)
1410 change = repo.changelog.read(node)
1410
1411
1411 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1412 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1412 revwidth=revwidth)
1413 revwidth=revwidth)
1413 if fp != sys.stdout:
1414 if fp != sys.stdout:
1414 ui.note("%s\n" % fp.name)
1415 ui.note("%s\n" % fp.name)
1415
1416
1416 fp.write("# HG changeset patch\n")
1417 fp.write("# HG changeset patch\n")
1417 fp.write("# User %s\n" % change[1])
1418 fp.write("# User %s\n" % change[1])
1418 fp.write("# Date %d %d\n" % change[2])
1419 fp.write("# Date %d %d\n" % change[2])
1419 fp.write("# Node ID %s\n" % hex(node))
1420 fp.write("# Node ID %s\n" % hex(node))
1420 fp.write("# Parent %s\n" % hex(prev))
1421 fp.write("# Parent %s\n" % hex(prev))
1421 if len(parents) > 1:
1422 if len(parents) > 1:
1422 fp.write("# Parent %s\n" % hex(parents[1]))
1423 fp.write("# Parent %s\n" % hex(parents[1]))
1423 fp.write(change[4].rstrip())
1424 fp.write(change[4].rstrip())
1424 fp.write("\n\n")
1425 fp.write("\n\n")
1425
1426
1426 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1427 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1427 if fp != sys.stdout:
1428 if fp != sys.stdout:
1428 fp.close()
1429 fp.close()
1429
1430
1430 def export(ui, repo, *changesets, **opts):
1431 def export(ui, repo, *changesets, **opts):
1431 """dump the header and diffs for one or more changesets
1432 """dump the header and diffs for one or more changesets
1432
1433
1433 Print the changeset header and diffs for one or more revisions.
1434 Print the changeset header and diffs for one or more revisions.
1434
1435
1435 The information shown in the changeset header is: author,
1436 The information shown in the changeset header is: author,
1436 changeset hash, parent and commit comment.
1437 changeset hash, parent and commit comment.
1437
1438
1438 Output may be to a file, in which case the name of the file is
1439 Output may be to a file, in which case the name of the file is
1439 given using a format string. The formatting rules are as follows:
1440 given using a format string. The formatting rules are as follows:
1440
1441
1441 %% literal "%" character
1442 %% literal "%" character
1442 %H changeset hash (40 bytes of hexadecimal)
1443 %H changeset hash (40 bytes of hexadecimal)
1443 %N number of patches being generated
1444 %N number of patches being generated
1444 %R changeset revision number
1445 %R changeset revision number
1445 %b basename of the exporting repository
1446 %b basename of the exporting repository
1446 %h short-form changeset hash (12 bytes of hexadecimal)
1447 %h short-form changeset hash (12 bytes of hexadecimal)
1447 %n zero-padded sequence number, starting at 1
1448 %n zero-padded sequence number, starting at 1
1448 %r zero-padded changeset revision number
1449 %r zero-padded changeset revision number
1449
1450
1450 Without the -a option, export will avoid generating diffs of files
1451 Without the -a option, export will avoid generating diffs of files
1451 it detects as binary. With -a, export will generate a diff anyway,
1452 it detects as binary. With -a, export will generate a diff anyway,
1452 probably with undesirable results.
1453 probably with undesirable results.
1453
1454
1454 With the --switch-parent option, the diff will be against the second
1455 With the --switch-parent option, the diff will be against the second
1455 parent. It can be useful to review a merge.
1456 parent. It can be useful to review a merge.
1456 """
1457 """
1457 if not changesets:
1458 if not changesets:
1458 raise util.Abort(_("export requires at least one changeset"))
1459 raise util.Abort(_("export requires at least one changeset"))
1459 seqno = 0
1460 seqno = 0
1460 revs = list(revrange(ui, repo, changesets))
1461 revs = list(revrange(ui, repo, changesets))
1461 total = len(revs)
1462 total = len(revs)
1462 revwidth = max(map(len, revs))
1463 revwidth = max(map(len, revs))
1463 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1464 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1464 ui.note(msg)
1465 ui.note(msg)
1465 for cset in revs:
1466 for cset in revs:
1466 seqno += 1
1467 seqno += 1
1467 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1468 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1468
1469
1469 def forget(ui, repo, *pats, **opts):
1470 def forget(ui, repo, *pats, **opts):
1470 """don't add the specified files on the next commit (DEPRECATED)
1471 """don't add the specified files on the next commit (DEPRECATED)
1471
1472
1472 (DEPRECATED)
1473 (DEPRECATED)
1473 Undo an 'hg add' scheduled for the next commit.
1474 Undo an 'hg add' scheduled for the next commit.
1474
1475
1475 This command is now deprecated and will be removed in a future
1476 This command is now deprecated and will be removed in a future
1476 release. Please use revert instead.
1477 release. Please use revert instead.
1477 """
1478 """
1478 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1479 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1479 forget = []
1480 forget = []
1480 for src, abs, rel, exact in walk(repo, pats, opts):
1481 for src, abs, rel, exact in walk(repo, pats, opts):
1481 if repo.dirstate.state(abs) == 'a':
1482 if repo.dirstate.state(abs) == 'a':
1482 forget.append(abs)
1483 forget.append(abs)
1483 if ui.verbose or not exact:
1484 if ui.verbose or not exact:
1484 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1485 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1485 repo.forget(forget)
1486 repo.forget(forget)
1486
1487
1487 def grep(ui, repo, pattern, *pats, **opts):
1488 def grep(ui, repo, pattern, *pats, **opts):
1488 """search for a pattern in specified files and revisions
1489 """search for a pattern in specified files and revisions
1489
1490
1490 Search revisions of files for a regular expression.
1491 Search revisions of files for a regular expression.
1491
1492
1492 This command behaves differently than Unix grep. It only accepts
1493 This command behaves differently than Unix grep. It only accepts
1493 Python/Perl regexps. It searches repository history, not the
1494 Python/Perl regexps. It searches repository history, not the
1494 working directory. It always prints the revision number in which
1495 working directory. It always prints the revision number in which
1495 a match appears.
1496 a match appears.
1496
1497
1497 By default, grep only prints output for the first revision of a
1498 By default, grep only prints output for the first revision of a
1498 file in which it finds a match. To get it to print every revision
1499 file in which it finds a match. To get it to print every revision
1499 that contains a change in match status ("-" for a match that
1500 that contains a change in match status ("-" for a match that
1500 becomes a non-match, or "+" for a non-match that becomes a match),
1501 becomes a non-match, or "+" for a non-match that becomes a match),
1501 use the --all flag.
1502 use the --all flag.
1502 """
1503 """
1503 reflags = 0
1504 reflags = 0
1504 if opts['ignore_case']:
1505 if opts['ignore_case']:
1505 reflags |= re.I
1506 reflags |= re.I
1506 regexp = re.compile(pattern, reflags)
1507 regexp = re.compile(pattern, reflags)
1507 sep, eol = ':', '\n'
1508 sep, eol = ':', '\n'
1508 if opts['print0']:
1509 if opts['print0']:
1509 sep = eol = '\0'
1510 sep = eol = '\0'
1510
1511
1511 fcache = {}
1512 fcache = {}
1512 def getfile(fn):
1513 def getfile(fn):
1513 if fn not in fcache:
1514 if fn not in fcache:
1514 fcache[fn] = repo.file(fn)
1515 fcache[fn] = repo.file(fn)
1515 return fcache[fn]
1516 return fcache[fn]
1516
1517
1517 def matchlines(body):
1518 def matchlines(body):
1518 begin = 0
1519 begin = 0
1519 linenum = 0
1520 linenum = 0
1520 while True:
1521 while True:
1521 match = regexp.search(body, begin)
1522 match = regexp.search(body, begin)
1522 if not match:
1523 if not match:
1523 break
1524 break
1524 mstart, mend = match.span()
1525 mstart, mend = match.span()
1525 linenum += body.count('\n', begin, mstart) + 1
1526 linenum += body.count('\n', begin, mstart) + 1
1526 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1527 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1527 lend = body.find('\n', mend)
1528 lend = body.find('\n', mend)
1528 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1529 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1529 begin = lend + 1
1530 begin = lend + 1
1530
1531
1531 class linestate(object):
1532 class linestate(object):
1532 def __init__(self, line, linenum, colstart, colend):
1533 def __init__(self, line, linenum, colstart, colend):
1533 self.line = line
1534 self.line = line
1534 self.linenum = linenum
1535 self.linenum = linenum
1535 self.colstart = colstart
1536 self.colstart = colstart
1536 self.colend = colend
1537 self.colend = colend
1537 def __eq__(self, other):
1538 def __eq__(self, other):
1538 return self.line == other.line
1539 return self.line == other.line
1539 def __hash__(self):
1540 def __hash__(self):
1540 return hash(self.line)
1541 return hash(self.line)
1541
1542
1542 matches = {}
1543 matches = {}
1543 def grepbody(fn, rev, body):
1544 def grepbody(fn, rev, body):
1544 matches[rev].setdefault(fn, {})
1545 matches[rev].setdefault(fn, {})
1545 m = matches[rev][fn]
1546 m = matches[rev][fn]
1546 for lnum, cstart, cend, line in matchlines(body):
1547 for lnum, cstart, cend, line in matchlines(body):
1547 s = linestate(line, lnum, cstart, cend)
1548 s = linestate(line, lnum, cstart, cend)
1548 m[s] = s
1549 m[s] = s
1549
1550
1550 # FIXME: prev isn't used, why ?
1551 # FIXME: prev isn't used, why ?
1551 prev = {}
1552 prev = {}
1552 ucache = {}
1553 ucache = {}
1553 def display(fn, rev, states, prevstates):
1554 def display(fn, rev, states, prevstates):
1554 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1555 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1555 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1556 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1556 counts = {'-': 0, '+': 0}
1557 counts = {'-': 0, '+': 0}
1557 filerevmatches = {}
1558 filerevmatches = {}
1558 for l in diff:
1559 for l in diff:
1559 if incrementing or not opts['all']:
1560 if incrementing or not opts['all']:
1560 change = ((l in prevstates) and '-') or '+'
1561 change = ((l in prevstates) and '-') or '+'
1561 r = rev
1562 r = rev
1562 else:
1563 else:
1563 change = ((l in states) and '-') or '+'
1564 change = ((l in states) and '-') or '+'
1564 r = prev[fn]
1565 r = prev[fn]
1565 cols = [fn, str(rev)]
1566 cols = [fn, str(rev)]
1566 if opts['line_number']:
1567 if opts['line_number']:
1567 cols.append(str(l.linenum))
1568 cols.append(str(l.linenum))
1568 if opts['all']:
1569 if opts['all']:
1569 cols.append(change)
1570 cols.append(change)
1570 if opts['user']:
1571 if opts['user']:
1571 cols.append(trimuser(ui, getchange(rev)[1], rev,
1572 cols.append(trimuser(ui, getchange(rev)[1], rev,
1572 ucache))
1573 ucache))
1573 if opts['files_with_matches']:
1574 if opts['files_with_matches']:
1574 c = (fn, rev)
1575 c = (fn, rev)
1575 if c in filerevmatches:
1576 if c in filerevmatches:
1576 continue
1577 continue
1577 filerevmatches[c] = 1
1578 filerevmatches[c] = 1
1578 else:
1579 else:
1579 cols.append(l.line)
1580 cols.append(l.line)
1580 ui.write(sep.join(cols), eol)
1581 ui.write(sep.join(cols), eol)
1581 counts[change] += 1
1582 counts[change] += 1
1582 return counts['+'], counts['-']
1583 return counts['+'], counts['-']
1583
1584
1584 fstate = {}
1585 fstate = {}
1585 skip = {}
1586 skip = {}
1586 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1587 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1587 count = 0
1588 count = 0
1588 incrementing = False
1589 incrementing = False
1589 for st, rev, fns in changeiter:
1590 for st, rev, fns in changeiter:
1590 if st == 'window':
1591 if st == 'window':
1591 incrementing = rev
1592 incrementing = rev
1592 matches.clear()
1593 matches.clear()
1593 elif st == 'add':
1594 elif st == 'add':
1594 change = repo.changelog.read(repo.lookup(str(rev)))
1595 change = repo.changelog.read(repo.lookup(str(rev)))
1595 mf = repo.manifest.read(change[0])
1596 mf = repo.manifest.read(change[0])
1596 matches[rev] = {}
1597 matches[rev] = {}
1597 for fn in fns:
1598 for fn in fns:
1598 if fn in skip:
1599 if fn in skip:
1599 continue
1600 continue
1600 fstate.setdefault(fn, {})
1601 fstate.setdefault(fn, {})
1601 try:
1602 try:
1602 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1603 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1603 except KeyError:
1604 except KeyError:
1604 pass
1605 pass
1605 elif st == 'iter':
1606 elif st == 'iter':
1606 states = matches[rev].items()
1607 states = matches[rev].items()
1607 states.sort()
1608 states.sort()
1608 for fn, m in states:
1609 for fn, m in states:
1609 if fn in skip:
1610 if fn in skip:
1610 continue
1611 continue
1611 if incrementing or not opts['all'] or fstate[fn]:
1612 if incrementing or not opts['all'] or fstate[fn]:
1612 pos, neg = display(fn, rev, m, fstate[fn])
1613 pos, neg = display(fn, rev, m, fstate[fn])
1613 count += pos + neg
1614 count += pos + neg
1614 if pos and not opts['all']:
1615 if pos and not opts['all']:
1615 skip[fn] = True
1616 skip[fn] = True
1616 fstate[fn] = m
1617 fstate[fn] = m
1617 prev[fn] = rev
1618 prev[fn] = rev
1618
1619
1619 if not incrementing:
1620 if not incrementing:
1620 fstate = fstate.items()
1621 fstate = fstate.items()
1621 fstate.sort()
1622 fstate.sort()
1622 for fn, state in fstate:
1623 for fn, state in fstate:
1623 if fn in skip:
1624 if fn in skip:
1624 continue
1625 continue
1625 display(fn, rev, {}, state)
1626 display(fn, rev, {}, state)
1626 return (count == 0 and 1) or 0
1627 return (count == 0 and 1) or 0
1627
1628
1628 def heads(ui, repo, **opts):
1629 def heads(ui, repo, **opts):
1629 """show current repository heads
1630 """show current repository heads
1630
1631
1631 Show all repository head changesets.
1632 Show all repository head changesets.
1632
1633
1633 Repository "heads" are changesets that don't have children
1634 Repository "heads" are changesets that don't have children
1634 changesets. They are where development generally takes place and
1635 changesets. They are where development generally takes place and
1635 are the usual targets for update and merge operations.
1636 are the usual targets for update and merge operations.
1636 """
1637 """
1637 if opts['rev']:
1638 if opts['rev']:
1638 heads = repo.heads(repo.lookup(opts['rev']))
1639 heads = repo.heads(repo.lookup(opts['rev']))
1639 else:
1640 else:
1640 heads = repo.heads()
1641 heads = repo.heads()
1641 br = None
1642 br = None
1642 if opts['branches']:
1643 if opts['branches']:
1643 br = repo.branchlookup(heads)
1644 br = repo.branchlookup(heads)
1644 displayer = show_changeset(ui, repo, opts)
1645 displayer = show_changeset(ui, repo, opts)
1645 for n in heads:
1646 for n in heads:
1646 displayer.show(changenode=n, brinfo=br)
1647 displayer.show(changenode=n, brinfo=br)
1647
1648
1648 def identify(ui, repo):
1649 def identify(ui, repo):
1649 """print information about the working copy
1650 """print information about the working copy
1650
1651
1651 Print a short summary of the current state of the repo.
1652 Print a short summary of the current state of the repo.
1652
1653
1653 This summary identifies the repository state using one or two parent
1654 This summary identifies the repository state using one or two parent
1654 hash identifiers, followed by a "+" if there are uncommitted changes
1655 hash identifiers, followed by a "+" if there are uncommitted changes
1655 in the working directory, followed by a list of tags for this revision.
1656 in the working directory, followed by a list of tags for this revision.
1656 """
1657 """
1657 parents = [p for p in repo.dirstate.parents() if p != nullid]
1658 parents = [p for p in repo.dirstate.parents() if p != nullid]
1658 if not parents:
1659 if not parents:
1659 ui.write(_("unknown\n"))
1660 ui.write(_("unknown\n"))
1660 return
1661 return
1661
1662
1662 hexfunc = ui.verbose and hex or short
1663 hexfunc = ui.verbose and hex or short
1663 modified, added, removed, deleted, unknown = repo.changes()
1664 modified, added, removed, deleted, unknown = repo.changes()
1664 output = ["%s%s" %
1665 output = ["%s%s" %
1665 ('+'.join([hexfunc(parent) for parent in parents]),
1666 ('+'.join([hexfunc(parent) for parent in parents]),
1666 (modified or added or removed or deleted) and "+" or "")]
1667 (modified or added or removed or deleted) and "+" or "")]
1667
1668
1668 if not ui.quiet:
1669 if not ui.quiet:
1669 # multiple tags for a single parent separated by '/'
1670 # multiple tags for a single parent separated by '/'
1670 parenttags = ['/'.join(tags)
1671 parenttags = ['/'.join(tags)
1671 for tags in map(repo.nodetags, parents) if tags]
1672 for tags in map(repo.nodetags, parents) if tags]
1672 # tags for multiple parents separated by ' + '
1673 # tags for multiple parents separated by ' + '
1673 if parenttags:
1674 if parenttags:
1674 output.append(' + '.join(parenttags))
1675 output.append(' + '.join(parenttags))
1675
1676
1676 ui.write("%s\n" % ' '.join(output))
1677 ui.write("%s\n" % ' '.join(output))
1677
1678
1678 def import_(ui, repo, patch1, *patches, **opts):
1679 def import_(ui, repo, patch1, *patches, **opts):
1679 """import an ordered set of patches
1680 """import an ordered set of patches
1680
1681
1681 Import a list of patches and commit them individually.
1682 Import a list of patches and commit them individually.
1682
1683
1683 If there are outstanding changes in the working directory, import
1684 If there are outstanding changes in the working directory, import
1684 will abort unless given the -f flag.
1685 will abort unless given the -f flag.
1685
1686
1686 You can import a patch straight from a mail message. Even patches
1687 You can import a patch straight from a mail message. Even patches
1687 as attachments work (body part must be type text/plain or
1688 as attachments work (body part must be type text/plain or
1688 text/x-patch to be used). From and Subject headers of email
1689 text/x-patch to be used). From and Subject headers of email
1689 message are used as default committer and commit message. All
1690 message are used as default committer and commit message. All
1690 text/plain body parts before first diff are added to commit
1691 text/plain body parts before first diff are added to commit
1691 message.
1692 message.
1692
1693
1693 If imported patch was generated by hg export, user and description
1694 If imported patch was generated by hg export, user and description
1694 from patch override values from message headers and body. Values
1695 from patch override values from message headers and body. Values
1695 given on command line with -m and -u override these.
1696 given on command line with -m and -u override these.
1696
1697
1697 To read a patch from standard input, use patch name "-".
1698 To read a patch from standard input, use patch name "-".
1698 """
1699 """
1699 patches = (patch1,) + patches
1700 patches = (patch1,) + patches
1700
1701
1701 if not opts['force']:
1702 if not opts['force']:
1702 bail_if_changed(repo)
1703 bail_if_changed(repo)
1703
1704
1704 d = opts["base"]
1705 d = opts["base"]
1705 strip = opts["strip"]
1706 strip = opts["strip"]
1706
1707
1707 mailre = re.compile(r'(?:From |[\w-]+:)')
1708 mailre = re.compile(r'(?:From |[\w-]+:)')
1708
1709
1709 # attempt to detect the start of a patch
1710 # attempt to detect the start of a patch
1710 # (this heuristic is borrowed from quilt)
1711 # (this heuristic is borrowed from quilt)
1711 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1712 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1712 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1713 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1713 '(---|\*\*\*)[ \t])', re.MULTILINE)
1714 '(---|\*\*\*)[ \t])', re.MULTILINE)
1714
1715
1715 for patch in patches:
1716 for patch in patches:
1716 pf = os.path.join(d, patch)
1717 pf = os.path.join(d, patch)
1717
1718
1718 message = None
1719 message = None
1719 user = None
1720 user = None
1720 date = None
1721 date = None
1721 hgpatch = False
1722 hgpatch = False
1722
1723
1723 p = email.Parser.Parser()
1724 p = email.Parser.Parser()
1724 if pf == '-':
1725 if pf == '-':
1725 msg = p.parse(sys.stdin)
1726 msg = p.parse(sys.stdin)
1726 ui.status(_("applying patch from stdin\n"))
1727 ui.status(_("applying patch from stdin\n"))
1727 else:
1728 else:
1728 msg = p.parse(file(pf))
1729 msg = p.parse(file(pf))
1729 ui.status(_("applying %s\n") % patch)
1730 ui.status(_("applying %s\n") % patch)
1730
1731
1731 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1732 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1732 tmpfp = os.fdopen(fd, 'w')
1733 tmpfp = os.fdopen(fd, 'w')
1733 try:
1734 try:
1734 message = msg['Subject']
1735 message = msg['Subject']
1735 if message:
1736 if message:
1736 message = message.replace('\n\t', ' ')
1737 message = message.replace('\n\t', ' ')
1737 ui.debug('Subject: %s\n' % message)
1738 ui.debug('Subject: %s\n' % message)
1738 user = msg['From']
1739 user = msg['From']
1739 if user:
1740 if user:
1740 ui.debug('From: %s\n' % user)
1741 ui.debug('From: %s\n' % user)
1741 diffs_seen = 0
1742 diffs_seen = 0
1742 ok_types = ('text/plain', 'text/x-patch')
1743 ok_types = ('text/plain', 'text/x-patch')
1743 for part in msg.walk():
1744 for part in msg.walk():
1744 content_type = part.get_content_type()
1745 content_type = part.get_content_type()
1745 ui.debug('Content-Type: %s\n' % content_type)
1746 ui.debug('Content-Type: %s\n' % content_type)
1746 if content_type not in ok_types:
1747 if content_type not in ok_types:
1747 continue
1748 continue
1748 payload = part.get_payload(decode=True)
1749 payload = part.get_payload(decode=True)
1749 m = diffre.search(payload)
1750 m = diffre.search(payload)
1750 if m:
1751 if m:
1751 ui.debug(_('found patch at byte %d\n') % m.start(0))
1752 ui.debug(_('found patch at byte %d\n') % m.start(0))
1752 diffs_seen += 1
1753 diffs_seen += 1
1753 hgpatch = False
1754 hgpatch = False
1754 fp = cStringIO.StringIO()
1755 fp = cStringIO.StringIO()
1755 if message:
1756 if message:
1756 fp.write(message)
1757 fp.write(message)
1757 fp.write('\n')
1758 fp.write('\n')
1758 for line in payload[:m.start(0)].splitlines():
1759 for line in payload[:m.start(0)].splitlines():
1759 if line.startswith('# HG changeset patch'):
1760 if line.startswith('# HG changeset patch'):
1760 ui.debug(_('patch generated by hg export\n'))
1761 ui.debug(_('patch generated by hg export\n'))
1761 hgpatch = True
1762 hgpatch = True
1762 # drop earlier commit message content
1763 # drop earlier commit message content
1763 fp.seek(0)
1764 fp.seek(0)
1764 fp.truncate()
1765 fp.truncate()
1765 elif hgpatch:
1766 elif hgpatch:
1766 if line.startswith('# User '):
1767 if line.startswith('# User '):
1767 user = line[7:]
1768 user = line[7:]
1768 ui.debug('From: %s\n' % user)
1769 ui.debug('From: %s\n' % user)
1769 elif line.startswith("# Date "):
1770 elif line.startswith("# Date "):
1770 date = line[7:]
1771 date = line[7:]
1771 if not line.startswith('# '):
1772 if not line.startswith('# '):
1772 fp.write(line)
1773 fp.write(line)
1773 fp.write('\n')
1774 fp.write('\n')
1774 message = fp.getvalue()
1775 message = fp.getvalue()
1775 if tmpfp:
1776 if tmpfp:
1776 tmpfp.write(payload)
1777 tmpfp.write(payload)
1777 if not payload.endswith('\n'):
1778 if not payload.endswith('\n'):
1778 tmpfp.write('\n')
1779 tmpfp.write('\n')
1779 elif not diffs_seen and message and content_type == 'text/plain':
1780 elif not diffs_seen and message and content_type == 'text/plain':
1780 message += '\n' + payload
1781 message += '\n' + payload
1781
1782
1782 if opts['message']:
1783 if opts['message']:
1783 # pickup the cmdline msg
1784 # pickup the cmdline msg
1784 message = opts['message']
1785 message = opts['message']
1785 elif message:
1786 elif message:
1786 # pickup the patch msg
1787 # pickup the patch msg
1787 message = message.strip()
1788 message = message.strip()
1788 else:
1789 else:
1789 # launch the editor
1790 # launch the editor
1790 message = None
1791 message = None
1791 ui.debug(_('message:\n%s\n') % message)
1792 ui.debug(_('message:\n%s\n') % message)
1792
1793
1793 tmpfp.close()
1794 tmpfp.close()
1794 if not diffs_seen:
1795 if not diffs_seen:
1795 raise util.Abort(_('no diffs found'))
1796 raise util.Abort(_('no diffs found'))
1796
1797
1797 files = util.patch(strip, tmpname, ui)
1798 files = util.patch(strip, tmpname, ui)
1798 if len(files) > 0:
1799 if len(files) > 0:
1799 addremove_lock(ui, repo, files, {})
1800 addremove_lock(ui, repo, files, {})
1800 repo.commit(files, message, user, date)
1801 repo.commit(files, message, user, date)
1801 finally:
1802 finally:
1802 os.unlink(tmpname)
1803 os.unlink(tmpname)
1803
1804
1804 def incoming(ui, repo, source="default", **opts):
1805 def incoming(ui, repo, source="default", **opts):
1805 """show new changesets found in source
1806 """show new changesets found in source
1806
1807
1807 Show new changesets found in the specified path/URL or the default
1808 Show new changesets found in the specified path/URL or the default
1808 pull location. These are the changesets that would be pulled if a pull
1809 pull location. These are the changesets that would be pulled if a pull
1809 was requested.
1810 was requested.
1810
1811
1811 For remote repository, using --bundle avoids downloading the changesets
1812 For remote repository, using --bundle avoids downloading the changesets
1812 twice if the incoming is followed by a pull.
1813 twice if the incoming is followed by a pull.
1813
1814
1814 See pull for valid source format details.
1815 See pull for valid source format details.
1815 """
1816 """
1816 source = ui.expandpath(source)
1817 source = ui.expandpath(source)
1817 ui.setconfig_remoteopts(**opts)
1818 ui.setconfig_remoteopts(**opts)
1818
1819
1819 other = hg.repository(ui, source)
1820 other = hg.repository(ui, source)
1820 incoming = repo.findincoming(other, force=opts["force"])
1821 incoming = repo.findincoming(other, force=opts["force"])
1821 if not incoming:
1822 if not incoming:
1822 ui.status(_("no changes found\n"))
1823 ui.status(_("no changes found\n"))
1823 return
1824 return
1824
1825
1825 cleanup = None
1826 cleanup = None
1826 try:
1827 try:
1827 fname = opts["bundle"]
1828 fname = opts["bundle"]
1828 if fname or not other.local():
1829 if fname or not other.local():
1829 # create a bundle (uncompressed if other repo is not local)
1830 # create a bundle (uncompressed if other repo is not local)
1830 cg = other.changegroup(incoming, "incoming")
1831 cg = other.changegroup(incoming, "incoming")
1831 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1832 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1832 # keep written bundle?
1833 # keep written bundle?
1833 if opts["bundle"]:
1834 if opts["bundle"]:
1834 cleanup = None
1835 cleanup = None
1835 if not other.local():
1836 if not other.local():
1836 # use the created uncompressed bundlerepo
1837 # use the created uncompressed bundlerepo
1837 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1838 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1838
1839
1839 revs = None
1840 revs = None
1840 if opts['rev']:
1841 if opts['rev']:
1841 revs = [other.lookup(rev) for rev in opts['rev']]
1842 revs = [other.lookup(rev) for rev in opts['rev']]
1842 o = other.changelog.nodesbetween(incoming, revs)[0]
1843 o = other.changelog.nodesbetween(incoming, revs)[0]
1843 if opts['newest_first']:
1844 if opts['newest_first']:
1844 o.reverse()
1845 o.reverse()
1845 displayer = show_changeset(ui, other, opts)
1846 displayer = show_changeset(ui, other, opts)
1846 for n in o:
1847 for n in o:
1847 parents = [p for p in other.changelog.parents(n) if p != nullid]
1848 parents = [p for p in other.changelog.parents(n) if p != nullid]
1848 if opts['no_merges'] and len(parents) == 2:
1849 if opts['no_merges'] and len(parents) == 2:
1849 continue
1850 continue
1850 displayer.show(changenode=n)
1851 displayer.show(changenode=n)
1851 if opts['patch']:
1852 if opts['patch']:
1852 prev = (parents and parents[0]) or nullid
1853 prev = (parents and parents[0]) or nullid
1853 dodiff(ui, ui, other, prev, n)
1854 dodiff(ui, ui, other, prev, n)
1854 ui.write("\n")
1855 ui.write("\n")
1855 finally:
1856 finally:
1856 if hasattr(other, 'close'):
1857 if hasattr(other, 'close'):
1857 other.close()
1858 other.close()
1858 if cleanup:
1859 if cleanup:
1859 os.unlink(cleanup)
1860 os.unlink(cleanup)
1860
1861
1861 def init(ui, dest=".", **opts):
1862 def init(ui, dest=".", **opts):
1862 """create a new repository in the given directory
1863 """create a new repository in the given directory
1863
1864
1864 Initialize a new repository in the given directory. If the given
1865 Initialize a new repository in the given directory. If the given
1865 directory does not exist, it is created.
1866 directory does not exist, it is created.
1866
1867
1867 If no directory is given, the current directory is used.
1868 If no directory is given, the current directory is used.
1868
1869
1869 It is possible to specify an ssh:// URL as the destination.
1870 It is possible to specify an ssh:// URL as the destination.
1870 Look at the help text for the pull command for important details
1871 Look at the help text for the pull command for important details
1871 about ssh:// URLs.
1872 about ssh:// URLs.
1872 """
1873 """
1873 ui.setconfig_remoteopts(**opts)
1874 ui.setconfig_remoteopts(**opts)
1874 hg.repository(ui, dest, create=1)
1875 hg.repository(ui, dest, create=1)
1875
1876
1876 def locate(ui, repo, *pats, **opts):
1877 def locate(ui, repo, *pats, **opts):
1877 """locate files matching specific patterns
1878 """locate files matching specific patterns
1878
1879
1879 Print all files under Mercurial control whose names match the
1880 Print all files under Mercurial control whose names match the
1880 given patterns.
1881 given patterns.
1881
1882
1882 This command searches the current directory and its
1883 This command searches the current directory and its
1883 subdirectories. To search an entire repository, move to the root
1884 subdirectories. To search an entire repository, move to the root
1884 of the repository.
1885 of the repository.
1885
1886
1886 If no patterns are given to match, this command prints all file
1887 If no patterns are given to match, this command prints all file
1887 names.
1888 names.
1888
1889
1889 If you want to feed the output of this command into the "xargs"
1890 If you want to feed the output of this command into the "xargs"
1890 command, use the "-0" option to both this command and "xargs".
1891 command, use the "-0" option to both this command and "xargs".
1891 This will avoid the problem of "xargs" treating single filenames
1892 This will avoid the problem of "xargs" treating single filenames
1892 that contain white space as multiple filenames.
1893 that contain white space as multiple filenames.
1893 """
1894 """
1894 end = opts['print0'] and '\0' or '\n'
1895 end = opts['print0'] and '\0' or '\n'
1895 rev = opts['rev']
1896 rev = opts['rev']
1896 if rev:
1897 if rev:
1897 node = repo.lookup(rev)
1898 node = repo.lookup(rev)
1898 else:
1899 else:
1899 node = None
1900 node = None
1900
1901
1901 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1902 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1902 head='(?:.*/|)'):
1903 head='(?:.*/|)'):
1903 if not node and repo.dirstate.state(abs) == '?':
1904 if not node and repo.dirstate.state(abs) == '?':
1904 continue
1905 continue
1905 if opts['fullpath']:
1906 if opts['fullpath']:
1906 ui.write(os.path.join(repo.root, abs), end)
1907 ui.write(os.path.join(repo.root, abs), end)
1907 else:
1908 else:
1908 ui.write(((pats and rel) or abs), end)
1909 ui.write(((pats and rel) or abs), end)
1909
1910
1910 def log(ui, repo, *pats, **opts):
1911 def log(ui, repo, *pats, **opts):
1911 """show revision history of entire repository or files
1912 """show revision history of entire repository or files
1912
1913
1913 Print the revision history of the specified files or the entire project.
1914 Print the revision history of the specified files or the entire project.
1914
1915
1915 By default this command outputs: changeset id and hash, tags,
1916 By default this command outputs: changeset id and hash, tags,
1916 non-trivial parents, user, date and time, and a summary for each
1917 non-trivial parents, user, date and time, and a summary for each
1917 commit. When the -v/--verbose switch is used, the list of changed
1918 commit. When the -v/--verbose switch is used, the list of changed
1918 files and full commit message is shown.
1919 files and full commit message is shown.
1919 """
1920 """
1920 class dui(object):
1921 class dui(object):
1921 # Implement and delegate some ui protocol. Save hunks of
1922 # Implement and delegate some ui protocol. Save hunks of
1922 # output for later display in the desired order.
1923 # output for later display in the desired order.
1923 def __init__(self, ui):
1924 def __init__(self, ui):
1924 self.ui = ui
1925 self.ui = ui
1925 self.hunk = {}
1926 self.hunk = {}
1926 self.header = {}
1927 self.header = {}
1927 def bump(self, rev):
1928 def bump(self, rev):
1928 self.rev = rev
1929 self.rev = rev
1929 self.hunk[rev] = []
1930 self.hunk[rev] = []
1930 self.header[rev] = []
1931 self.header[rev] = []
1931 def note(self, *args):
1932 def note(self, *args):
1932 if self.verbose:
1933 if self.verbose:
1933 self.write(*args)
1934 self.write(*args)
1934 def status(self, *args):
1935 def status(self, *args):
1935 if not self.quiet:
1936 if not self.quiet:
1936 self.write(*args)
1937 self.write(*args)
1937 def write(self, *args):
1938 def write(self, *args):
1938 self.hunk[self.rev].append(args)
1939 self.hunk[self.rev].append(args)
1939 def write_header(self, *args):
1940 def write_header(self, *args):
1940 self.header[self.rev].append(args)
1941 self.header[self.rev].append(args)
1941 def debug(self, *args):
1942 def debug(self, *args):
1942 if self.debugflag:
1943 if self.debugflag:
1943 self.write(*args)
1944 self.write(*args)
1944 def __getattr__(self, key):
1945 def __getattr__(self, key):
1945 return getattr(self.ui, key)
1946 return getattr(self.ui, key)
1946
1947
1947 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1948 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1948
1949
1949 if opts['limit']:
1950 if opts['limit']:
1950 try:
1951 try:
1951 limit = int(opts['limit'])
1952 limit = int(opts['limit'])
1952 except ValueError:
1953 except ValueError:
1953 raise util.Abort(_('limit must be a positive integer'))
1954 raise util.Abort(_('limit must be a positive integer'))
1954 if limit <= 0: raise util.Abort(_('limit must be positive'))
1955 if limit <= 0: raise util.Abort(_('limit must be positive'))
1955 else:
1956 else:
1956 limit = sys.maxint
1957 limit = sys.maxint
1957 count = 0
1958 count = 0
1958
1959
1959 displayer = show_changeset(ui, repo, opts)
1960 displayer = show_changeset(ui, repo, opts)
1960 for st, rev, fns in changeiter:
1961 for st, rev, fns in changeiter:
1961 if st == 'window':
1962 if st == 'window':
1962 du = dui(ui)
1963 du = dui(ui)
1963 displayer.ui = du
1964 displayer.ui = du
1964 elif st == 'add':
1965 elif st == 'add':
1965 du.bump(rev)
1966 du.bump(rev)
1966 changenode = repo.changelog.node(rev)
1967 changenode = repo.changelog.node(rev)
1967 parents = [p for p in repo.changelog.parents(changenode)
1968 parents = [p for p in repo.changelog.parents(changenode)
1968 if p != nullid]
1969 if p != nullid]
1969 if opts['no_merges'] and len(parents) == 2:
1970 if opts['no_merges'] and len(parents) == 2:
1970 continue
1971 continue
1971 if opts['only_merges'] and len(parents) != 2:
1972 if opts['only_merges'] and len(parents) != 2:
1972 continue
1973 continue
1973
1974
1974 if opts['keyword']:
1975 if opts['keyword']:
1975 changes = getchange(rev)
1976 changes = getchange(rev)
1976 miss = 0
1977 miss = 0
1977 for k in [kw.lower() for kw in opts['keyword']]:
1978 for k in [kw.lower() for kw in opts['keyword']]:
1978 if not (k in changes[1].lower() or
1979 if not (k in changes[1].lower() or
1979 k in changes[4].lower() or
1980 k in changes[4].lower() or
1980 k in " ".join(changes[3][:20]).lower()):
1981 k in " ".join(changes[3][:20]).lower()):
1981 miss = 1
1982 miss = 1
1982 break
1983 break
1983 if miss:
1984 if miss:
1984 continue
1985 continue
1985
1986
1986 br = None
1987 br = None
1987 if opts['branches']:
1988 if opts['branches']:
1988 br = repo.branchlookup([repo.changelog.node(rev)])
1989 br = repo.branchlookup([repo.changelog.node(rev)])
1989
1990
1990 displayer.show(rev, brinfo=br)
1991 displayer.show(rev, brinfo=br)
1991 if opts['patch']:
1992 if opts['patch']:
1992 prev = (parents and parents[0]) or nullid
1993 prev = (parents and parents[0]) or nullid
1993 dodiff(du, du, repo, prev, changenode, match=matchfn)
1994 dodiff(du, du, repo, prev, changenode, match=matchfn)
1994 du.write("\n\n")
1995 du.write("\n\n")
1995 elif st == 'iter':
1996 elif st == 'iter':
1996 if count == limit: break
1997 if count == limit: break
1997 if du.header[rev]:
1998 if du.header[rev]:
1998 for args in du.header[rev]:
1999 for args in du.header[rev]:
1999 ui.write_header(*args)
2000 ui.write_header(*args)
2000 if du.hunk[rev]:
2001 if du.hunk[rev]:
2001 count += 1
2002 count += 1
2002 for args in du.hunk[rev]:
2003 for args in du.hunk[rev]:
2003 ui.write(*args)
2004 ui.write(*args)
2004
2005
2005 def manifest(ui, repo, rev=None):
2006 def manifest(ui, repo, rev=None):
2006 """output the latest or given revision of the project manifest
2007 """output the latest or given revision of the project manifest
2007
2008
2008 Print a list of version controlled files for the given revision.
2009 Print a list of version controlled files for the given revision.
2009
2010
2010 The manifest is the list of files being version controlled. If no revision
2011 The manifest is the list of files being version controlled. If no revision
2011 is given then the tip is used.
2012 is given then the tip is used.
2012 """
2013 """
2013 if rev:
2014 if rev:
2014 try:
2015 try:
2015 # assume all revision numbers are for changesets
2016 # assume all revision numbers are for changesets
2016 n = repo.lookup(rev)
2017 n = repo.lookup(rev)
2017 change = repo.changelog.read(n)
2018 change = repo.changelog.read(n)
2018 n = change[0]
2019 n = change[0]
2019 except hg.RepoError:
2020 except hg.RepoError:
2020 n = repo.manifest.lookup(rev)
2021 n = repo.manifest.lookup(rev)
2021 else:
2022 else:
2022 n = repo.manifest.tip()
2023 n = repo.manifest.tip()
2023 m = repo.manifest.read(n)
2024 m = repo.manifest.read(n)
2024 mf = repo.manifest.readflags(n)
2025 mf = repo.manifest.readflags(n)
2025 files = m.keys()
2026 files = m.keys()
2026 files.sort()
2027 files.sort()
2027
2028
2028 for f in files:
2029 for f in files:
2029 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2030 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2030
2031
2031 def merge(ui, repo, node=None, **opts):
2032 def merge(ui, repo, node=None, **opts):
2032 """Merge working directory with another revision
2033 """Merge working directory with another revision
2033
2034
2034 Merge the contents of the current working directory and the
2035 Merge the contents of the current working directory and the
2035 requested revision. Files that changed between either parent are
2036 requested revision. Files that changed between either parent are
2036 marked as changed for the next commit and a commit must be
2037 marked as changed for the next commit and a commit must be
2037 performed before any further updates are allowed.
2038 performed before any further updates are allowed.
2038 """
2039 """
2039 return doupdate(ui, repo, node=node, merge=True, **opts)
2040 return doupdate(ui, repo, node=node, merge=True, **opts)
2040
2041
2041 def outgoing(ui, repo, dest=None, **opts):
2042 def outgoing(ui, repo, dest=None, **opts):
2042 """show changesets not found in destination
2043 """show changesets not found in destination
2043
2044
2044 Show changesets not found in the specified destination repository or
2045 Show changesets not found in the specified destination repository or
2045 the default push location. These are the changesets that would be pushed
2046 the default push location. These are the changesets that would be pushed
2046 if a push was requested.
2047 if a push was requested.
2047
2048
2048 See pull for valid destination format details.
2049 See pull for valid destination format details.
2049 """
2050 """
2050 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2051 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2051 ui.setconfig_remoteopts(**opts)
2052 ui.setconfig_remoteopts(**opts)
2052 revs = None
2053 revs = None
2053 if opts['rev']:
2054 if opts['rev']:
2054 revs = [repo.lookup(rev) for rev in opts['rev']]
2055 revs = [repo.lookup(rev) for rev in opts['rev']]
2055
2056
2056 other = hg.repository(ui, dest)
2057 other = hg.repository(ui, dest)
2057 o = repo.findoutgoing(other, force=opts['force'])
2058 o = repo.findoutgoing(other, force=opts['force'])
2058 if not o:
2059 if not o:
2059 ui.status(_("no changes found\n"))
2060 ui.status(_("no changes found\n"))
2060 return
2061 return
2061 o = repo.changelog.nodesbetween(o, revs)[0]
2062 o = repo.changelog.nodesbetween(o, revs)[0]
2062 if opts['newest_first']:
2063 if opts['newest_first']:
2063 o.reverse()
2064 o.reverse()
2064 displayer = show_changeset(ui, repo, opts)
2065 displayer = show_changeset(ui, repo, opts)
2065 for n in o:
2066 for n in o:
2066 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2067 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2067 if opts['no_merges'] and len(parents) == 2:
2068 if opts['no_merges'] and len(parents) == 2:
2068 continue
2069 continue
2069 displayer.show(changenode=n)
2070 displayer.show(changenode=n)
2070 if opts['patch']:
2071 if opts['patch']:
2071 prev = (parents and parents[0]) or nullid
2072 prev = (parents and parents[0]) or nullid
2072 dodiff(ui, ui, repo, prev, n)
2073 dodiff(ui, ui, repo, prev, n)
2073 ui.write("\n")
2074 ui.write("\n")
2074
2075
2075 def parents(ui, repo, rev=None, branches=None, **opts):
2076 def parents(ui, repo, rev=None, branches=None, **opts):
2076 """show the parents of the working dir or revision
2077 """show the parents of the working dir or revision
2077
2078
2078 Print the working directory's parent revisions.
2079 Print the working directory's parent revisions.
2079 """
2080 """
2080 if rev:
2081 if rev:
2081 p = repo.changelog.parents(repo.lookup(rev))
2082 p = repo.changelog.parents(repo.lookup(rev))
2082 else:
2083 else:
2083 p = repo.dirstate.parents()
2084 p = repo.dirstate.parents()
2084
2085
2085 br = None
2086 br = None
2086 if branches is not None:
2087 if branches is not None:
2087 br = repo.branchlookup(p)
2088 br = repo.branchlookup(p)
2088 displayer = show_changeset(ui, repo, opts)
2089 displayer = show_changeset(ui, repo, opts)
2089 for n in p:
2090 for n in p:
2090 if n != nullid:
2091 if n != nullid:
2091 displayer.show(changenode=n, brinfo=br)
2092 displayer.show(changenode=n, brinfo=br)
2092
2093
2093 def paths(ui, repo, search=None):
2094 def paths(ui, repo, search=None):
2094 """show definition of symbolic path names
2095 """show definition of symbolic path names
2095
2096
2096 Show definition of symbolic path name NAME. If no name is given, show
2097 Show definition of symbolic path name NAME. If no name is given, show
2097 definition of available names.
2098 definition of available names.
2098
2099
2099 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2100 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2100 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2101 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2101 """
2102 """
2102 if search:
2103 if search:
2103 for name, path in ui.configitems("paths"):
2104 for name, path in ui.configitems("paths"):
2104 if name == search:
2105 if name == search:
2105 ui.write("%s\n" % path)
2106 ui.write("%s\n" % path)
2106 return
2107 return
2107 ui.warn(_("not found!\n"))
2108 ui.warn(_("not found!\n"))
2108 return 1
2109 return 1
2109 else:
2110 else:
2110 for name, path in ui.configitems("paths"):
2111 for name, path in ui.configitems("paths"):
2111 ui.write("%s = %s\n" % (name, path))
2112 ui.write("%s = %s\n" % (name, path))
2112
2113
2113 def postincoming(ui, repo, modheads, optupdate):
2114 def postincoming(ui, repo, modheads, optupdate):
2114 if modheads == 0:
2115 if modheads == 0:
2115 return
2116 return
2116 if optupdate:
2117 if optupdate:
2117 if modheads == 1:
2118 if modheads == 1:
2118 return doupdate(ui, repo)
2119 return doupdate(ui, repo)
2119 else:
2120 else:
2120 ui.status(_("not updating, since new heads added\n"))
2121 ui.status(_("not updating, since new heads added\n"))
2121 if modheads > 1:
2122 if modheads > 1:
2122 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2123 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2123 else:
2124 else:
2124 ui.status(_("(run 'hg update' to get a working copy)\n"))
2125 ui.status(_("(run 'hg update' to get a working copy)\n"))
2125
2126
2126 def pull(ui, repo, source="default", **opts):
2127 def pull(ui, repo, source="default", **opts):
2127 """pull changes from the specified source
2128 """pull changes from the specified source
2128
2129
2129 Pull changes from a remote repository to a local one.
2130 Pull changes from a remote repository to a local one.
2130
2131
2131 This finds all changes from the repository at the specified path
2132 This finds all changes from the repository at the specified path
2132 or URL and adds them to the local repository. By default, this
2133 or URL and adds them to the local repository. By default, this
2133 does not update the copy of the project in the working directory.
2134 does not update the copy of the project in the working directory.
2134
2135
2135 Valid URLs are of the form:
2136 Valid URLs are of the form:
2136
2137
2137 local/filesystem/path
2138 local/filesystem/path
2138 http://[user@]host[:port]/[path]
2139 http://[user@]host[:port]/[path]
2139 https://[user@]host[:port]/[path]
2140 https://[user@]host[:port]/[path]
2140 ssh://[user@]host[:port]/[path]
2141 ssh://[user@]host[:port]/[path]
2141
2142
2142 Some notes about using SSH with Mercurial:
2143 Some notes about using SSH with Mercurial:
2143 - SSH requires an accessible shell account on the destination machine
2144 - SSH requires an accessible shell account on the destination machine
2144 and a copy of hg in the remote path or specified with as remotecmd.
2145 and a copy of hg in the remote path or specified with as remotecmd.
2145 - path is relative to the remote user's home directory by default.
2146 - path is relative to the remote user's home directory by default.
2146 Use an extra slash at the start of a path to specify an absolute path:
2147 Use an extra slash at the start of a path to specify an absolute path:
2147 ssh://example.com//tmp/repository
2148 ssh://example.com//tmp/repository
2148 - Mercurial doesn't use its own compression via SSH; the right thing
2149 - Mercurial doesn't use its own compression via SSH; the right thing
2149 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2150 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2150 Host *.mylocalnetwork.example.com
2151 Host *.mylocalnetwork.example.com
2151 Compression off
2152 Compression off
2152 Host *
2153 Host *
2153 Compression on
2154 Compression on
2154 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2155 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2155 with the --ssh command line option.
2156 with the --ssh command line option.
2156 """
2157 """
2157 source = ui.expandpath(source)
2158 source = ui.expandpath(source)
2158 ui.setconfig_remoteopts(**opts)
2159 ui.setconfig_remoteopts(**opts)
2159
2160
2160 other = hg.repository(ui, source)
2161 other = hg.repository(ui, source)
2161 ui.status(_('pulling from %s\n') % (source))
2162 ui.status(_('pulling from %s\n') % (source))
2162 revs = None
2163 revs = None
2163 if opts['rev'] and not other.local():
2164 if opts['rev'] and not other.local():
2164 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2165 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2165 elif opts['rev']:
2166 elif opts['rev']:
2166 revs = [other.lookup(rev) for rev in opts['rev']]
2167 revs = [other.lookup(rev) for rev in opts['rev']]
2167 modheads = repo.pull(other, heads=revs, force=opts['force'])
2168 modheads = repo.pull(other, heads=revs, force=opts['force'])
2168 return postincoming(ui, repo, modheads, opts['update'])
2169 return postincoming(ui, repo, modheads, opts['update'])
2169
2170
2170 def push(ui, repo, dest=None, **opts):
2171 def push(ui, repo, dest=None, **opts):
2171 """push changes to the specified destination
2172 """push changes to the specified destination
2172
2173
2173 Push changes from the local repository to the given destination.
2174 Push changes from the local repository to the given destination.
2174
2175
2175 This is the symmetrical operation for pull. It helps to move
2176 This is the symmetrical operation for pull. It helps to move
2176 changes from the current repository to a different one. If the
2177 changes from the current repository to a different one. If the
2177 destination is local this is identical to a pull in that directory
2178 destination is local this is identical to a pull in that directory
2178 from the current one.
2179 from the current one.
2179
2180
2180 By default, push will refuse to run if it detects the result would
2181 By default, push will refuse to run if it detects the result would
2181 increase the number of remote heads. This generally indicates the
2182 increase the number of remote heads. This generally indicates the
2182 the client has forgotten to sync and merge before pushing.
2183 the client has forgotten to sync and merge before pushing.
2183
2184
2184 Valid URLs are of the form:
2185 Valid URLs are of the form:
2185
2186
2186 local/filesystem/path
2187 local/filesystem/path
2187 ssh://[user@]host[:port]/[path]
2188 ssh://[user@]host[:port]/[path]
2188
2189
2189 Look at the help text for the pull command for important details
2190 Look at the help text for the pull command for important details
2190 about ssh:// URLs.
2191 about ssh:// URLs.
2191
2192
2192 Pushing to http:// and https:// URLs is possible, too, if this
2193 Pushing to http:// and https:// URLs is possible, too, if this
2193 feature is enabled on the remote Mercurial server.
2194 feature is enabled on the remote Mercurial server.
2194 """
2195 """
2195 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2196 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2196 ui.setconfig_remoteopts(**opts)
2197 ui.setconfig_remoteopts(**opts)
2197
2198
2198 other = hg.repository(ui, dest)
2199 other = hg.repository(ui, dest)
2199 ui.status('pushing to %s\n' % (dest))
2200 ui.status('pushing to %s\n' % (dest))
2200 revs = None
2201 revs = None
2201 if opts['rev']:
2202 if opts['rev']:
2202 revs = [repo.lookup(rev) for rev in opts['rev']]
2203 revs = [repo.lookup(rev) for rev in opts['rev']]
2203 r = repo.push(other, opts['force'], revs=revs)
2204 r = repo.push(other, opts['force'], revs=revs)
2204 return r == 0
2205 return r == 0
2205
2206
2206 def rawcommit(ui, repo, *flist, **rc):
2207 def rawcommit(ui, repo, *flist, **rc):
2207 """raw commit interface (DEPRECATED)
2208 """raw commit interface (DEPRECATED)
2208
2209
2209 (DEPRECATED)
2210 (DEPRECATED)
2210 Lowlevel commit, for use in helper scripts.
2211 Lowlevel commit, for use in helper scripts.
2211
2212
2212 This command is not intended to be used by normal users, as it is
2213 This command is not intended to be used by normal users, as it is
2213 primarily useful for importing from other SCMs.
2214 primarily useful for importing from other SCMs.
2214
2215
2215 This command is now deprecated and will be removed in a future
2216 This command is now deprecated and will be removed in a future
2216 release, please use debugsetparents and commit instead.
2217 release, please use debugsetparents and commit instead.
2217 """
2218 """
2218
2219
2219 ui.warn(_("(the rawcommit command is deprecated)\n"))
2220 ui.warn(_("(the rawcommit command is deprecated)\n"))
2220
2221
2221 message = rc['message']
2222 message = rc['message']
2222 if not message and rc['logfile']:
2223 if not message and rc['logfile']:
2223 try:
2224 try:
2224 message = open(rc['logfile']).read()
2225 message = open(rc['logfile']).read()
2225 except IOError:
2226 except IOError:
2226 pass
2227 pass
2227 if not message and not rc['logfile']:
2228 if not message and not rc['logfile']:
2228 raise util.Abort(_("missing commit message"))
2229 raise util.Abort(_("missing commit message"))
2229
2230
2230 files = relpath(repo, list(flist))
2231 files = relpath(repo, list(flist))
2231 if rc['files']:
2232 if rc['files']:
2232 files += open(rc['files']).read().splitlines()
2233 files += open(rc['files']).read().splitlines()
2233
2234
2234 rc['parent'] = map(repo.lookup, rc['parent'])
2235 rc['parent'] = map(repo.lookup, rc['parent'])
2235
2236
2236 try:
2237 try:
2237 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2238 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2238 except ValueError, inst:
2239 except ValueError, inst:
2239 raise util.Abort(str(inst))
2240 raise util.Abort(str(inst))
2240
2241
2241 def recover(ui, repo):
2242 def recover(ui, repo):
2242 """roll back an interrupted transaction
2243 """roll back an interrupted transaction
2243
2244
2244 Recover from an interrupted commit or pull.
2245 Recover from an interrupted commit or pull.
2245
2246
2246 This command tries to fix the repository status after an interrupted
2247 This command tries to fix the repository status after an interrupted
2247 operation. It should only be necessary when Mercurial suggests it.
2248 operation. It should only be necessary when Mercurial suggests it.
2248 """
2249 """
2249 if repo.recover():
2250 if repo.recover():
2250 return repo.verify()
2251 return repo.verify()
2251 return 1
2252 return 1
2252
2253
2253 def remove(ui, repo, *pats, **opts):
2254 def remove(ui, repo, *pats, **opts):
2254 """remove the specified files on the next commit
2255 """remove the specified files on the next commit
2255
2256
2256 Schedule the indicated files for removal from the repository.
2257 Schedule the indicated files for removal from the repository.
2257
2258
2258 This command schedules the files to be removed at the next commit.
2259 This command schedules the files to be removed at the next commit.
2259 This only removes files from the current branch, not from the
2260 This only removes files from the current branch, not from the
2260 entire project history. If the files still exist in the working
2261 entire project history. If the files still exist in the working
2261 directory, they will be deleted from it. If invoked with --after,
2262 directory, they will be deleted from it. If invoked with --after,
2262 files that have been manually deleted are marked as removed.
2263 files that have been manually deleted are marked as removed.
2263
2264
2264 Modified files and added files are not removed by default. To
2265 Modified files and added files are not removed by default. To
2265 remove them, use the -f/--force option.
2266 remove them, use the -f/--force option.
2266 """
2267 """
2267 names = []
2268 names = []
2268 if not opts['after'] and not pats:
2269 if not opts['after'] and not pats:
2269 raise util.Abort(_('no files specified'))
2270 raise util.Abort(_('no files specified'))
2270 files, matchfn, anypats = matchpats(repo, pats, opts)
2271 files, matchfn, anypats = matchpats(repo, pats, opts)
2271 exact = dict.fromkeys(files)
2272 exact = dict.fromkeys(files)
2272 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2273 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2273 modified, added, removed, deleted, unknown = mardu
2274 modified, added, removed, deleted, unknown = mardu
2274 remove, forget = [], []
2275 remove, forget = [], []
2275 for src, abs, rel, exact in walk(repo, pats, opts):
2276 for src, abs, rel, exact in walk(repo, pats, opts):
2276 reason = None
2277 reason = None
2277 if abs not in deleted and opts['after']:
2278 if abs not in deleted and opts['after']:
2278 reason = _('is still present')
2279 reason = _('is still present')
2279 elif abs in modified and not opts['force']:
2280 elif abs in modified and not opts['force']:
2280 reason = _('is modified (use -f to force removal)')
2281 reason = _('is modified (use -f to force removal)')
2281 elif abs in added:
2282 elif abs in added:
2282 if opts['force']:
2283 if opts['force']:
2283 forget.append(abs)
2284 forget.append(abs)
2284 continue
2285 continue
2285 reason = _('has been marked for add (use -f to force removal)')
2286 reason = _('has been marked for add (use -f to force removal)')
2286 elif abs in unknown:
2287 elif abs in unknown:
2287 reason = _('is not managed')
2288 reason = _('is not managed')
2288 elif abs in removed:
2289 elif abs in removed:
2289 continue
2290 continue
2290 if reason:
2291 if reason:
2291 if exact:
2292 if exact:
2292 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2293 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2293 else:
2294 else:
2294 if ui.verbose or not exact:
2295 if ui.verbose or not exact:
2295 ui.status(_('removing %s\n') % rel)
2296 ui.status(_('removing %s\n') % rel)
2296 remove.append(abs)
2297 remove.append(abs)
2297 repo.forget(forget)
2298 repo.forget(forget)
2298 repo.remove(remove, unlink=not opts['after'])
2299 repo.remove(remove, unlink=not opts['after'])
2299
2300
2300 def rename(ui, repo, *pats, **opts):
2301 def rename(ui, repo, *pats, **opts):
2301 """rename files; equivalent of copy + remove
2302 """rename files; equivalent of copy + remove
2302
2303
2303 Mark dest as copies of sources; mark sources for deletion. If
2304 Mark dest as copies of sources; mark sources for deletion. If
2304 dest is a directory, copies are put in that directory. If dest is
2305 dest is a directory, copies are put in that directory. If dest is
2305 a file, there can only be one source.
2306 a file, there can only be one source.
2306
2307
2307 By default, this command copies the contents of files as they
2308 By default, this command copies the contents of files as they
2308 stand in the working directory. If invoked with --after, the
2309 stand in the working directory. If invoked with --after, the
2309 operation is recorded, but no copying is performed.
2310 operation is recorded, but no copying is performed.
2310
2311
2311 This command takes effect in the next commit.
2312 This command takes effect in the next commit.
2312
2313
2313 NOTE: This command should be treated as experimental. While it
2314 NOTE: This command should be treated as experimental. While it
2314 should properly record rename files, this information is not yet
2315 should properly record rename files, this information is not yet
2315 fully used by merge, nor fully reported by log.
2316 fully used by merge, nor fully reported by log.
2316 """
2317 """
2317 wlock = repo.wlock(0)
2318 wlock = repo.wlock(0)
2318 errs, copied = docopy(ui, repo, pats, opts, wlock)
2319 errs, copied = docopy(ui, repo, pats, opts, wlock)
2319 names = []
2320 names = []
2320 for abs, rel, exact in copied:
2321 for abs, rel, exact in copied:
2321 if ui.verbose or not exact:
2322 if ui.verbose or not exact:
2322 ui.status(_('removing %s\n') % rel)
2323 ui.status(_('removing %s\n') % rel)
2323 names.append(abs)
2324 names.append(abs)
2324 if not opts.get('dry_run'):
2325 if not opts.get('dry_run'):
2325 repo.remove(names, True, wlock)
2326 repo.remove(names, True, wlock)
2326 return errs
2327 return errs
2327
2328
2328 def revert(ui, repo, *pats, **opts):
2329 def revert(ui, repo, *pats, **opts):
2329 """revert files or dirs to their states as of some revision
2330 """revert files or dirs to their states as of some revision
2330
2331
2331 With no revision specified, revert the named files or directories
2332 With no revision specified, revert the named files or directories
2332 to the contents they had in the parent of the working directory.
2333 to the contents they had in the parent of the working directory.
2333 This restores the contents of the affected files to an unmodified
2334 This restores the contents of the affected files to an unmodified
2334 state. If the working directory has two parents, you must
2335 state. If the working directory has two parents, you must
2335 explicitly specify the revision to revert to.
2336 explicitly specify the revision to revert to.
2336
2337
2337 Modified files are saved with a .orig suffix before reverting.
2338 Modified files are saved with a .orig suffix before reverting.
2338 To disable these backups, use --no-backup.
2339 To disable these backups, use --no-backup.
2339
2340
2340 Using the -r option, revert the given files or directories to
2341 Using the -r option, revert the given files or directories to
2341 their contents as of a specific revision. This can be helpful to"roll
2342 their contents as of a specific revision. This can be helpful to"roll
2342 back" some or all of a change that should not have been committed.
2343 back" some or all of a change that should not have been committed.
2343
2344
2344 Revert modifies the working directory. It does not commit any
2345 Revert modifies the working directory. It does not commit any
2345 changes, or change the parent of the working directory. If you
2346 changes, or change the parent of the working directory. If you
2346 revert to a revision other than the parent of the working
2347 revert to a revision other than the parent of the working
2347 directory, the reverted files will thus appear modified
2348 directory, the reverted files will thus appear modified
2348 afterwards.
2349 afterwards.
2349
2350
2350 If a file has been deleted, it is recreated. If the executable
2351 If a file has been deleted, it is recreated. If the executable
2351 mode of a file was changed, it is reset.
2352 mode of a file was changed, it is reset.
2352
2353
2353 If names are given, all files matching the names are reverted.
2354 If names are given, all files matching the names are reverted.
2354
2355
2355 If no arguments are given, all files in the repository are reverted.
2356 If no arguments are given, all files in the repository are reverted.
2356 """
2357 """
2357 parent, p2 = repo.dirstate.parents()
2358 parent, p2 = repo.dirstate.parents()
2358 if opts['rev']:
2359 if opts['rev']:
2359 node = repo.lookup(opts['rev'])
2360 node = repo.lookup(opts['rev'])
2360 elif p2 != nullid:
2361 elif p2 != nullid:
2361 raise util.Abort(_('working dir has two parents; '
2362 raise util.Abort(_('working dir has two parents; '
2362 'you must specify the revision to revert to'))
2363 'you must specify the revision to revert to'))
2363 else:
2364 else:
2364 node = parent
2365 node = parent
2365 mf = repo.manifest.read(repo.changelog.read(node)[0])
2366 mf = repo.manifest.read(repo.changelog.read(node)[0])
2366 if node == parent:
2367 if node == parent:
2367 pmf = mf
2368 pmf = mf
2368 else:
2369 else:
2369 pmf = None
2370 pmf = None
2370
2371
2371 wlock = repo.wlock()
2372 wlock = repo.wlock()
2372
2373
2373 # need all matching names in dirstate and manifest of target rev,
2374 # need all matching names in dirstate and manifest of target rev,
2374 # so have to walk both. do not print errors if files exist in one
2375 # so have to walk both. do not print errors if files exist in one
2375 # but not other.
2376 # but not other.
2376
2377
2377 names = {}
2378 names = {}
2378 target_only = {}
2379 target_only = {}
2379
2380
2380 # walk dirstate.
2381 # walk dirstate.
2381
2382
2382 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2383 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2383 names[abs] = (rel, exact)
2384 names[abs] = (rel, exact)
2384 if src == 'b':
2385 if src == 'b':
2385 target_only[abs] = True
2386 target_only[abs] = True
2386
2387
2387 # walk target manifest.
2388 # walk target manifest.
2388
2389
2389 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2390 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2390 badmatch=names.has_key):
2391 badmatch=names.has_key):
2391 if abs in names: continue
2392 if abs in names: continue
2392 names[abs] = (rel, exact)
2393 names[abs] = (rel, exact)
2393 target_only[abs] = True
2394 target_only[abs] = True
2394
2395
2395 changes = repo.changes(match=names.has_key, wlock=wlock)
2396 changes = repo.changes(match=names.has_key, wlock=wlock)
2396 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2397 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2397
2398
2398 revert = ([], _('reverting %s\n'))
2399 revert = ([], _('reverting %s\n'))
2399 add = ([], _('adding %s\n'))
2400 add = ([], _('adding %s\n'))
2400 remove = ([], _('removing %s\n'))
2401 remove = ([], _('removing %s\n'))
2401 forget = ([], _('forgetting %s\n'))
2402 forget = ([], _('forgetting %s\n'))
2402 undelete = ([], _('undeleting %s\n'))
2403 undelete = ([], _('undeleting %s\n'))
2403 update = {}
2404 update = {}
2404
2405
2405 disptable = (
2406 disptable = (
2406 # dispatch table:
2407 # dispatch table:
2407 # file state
2408 # file state
2408 # action if in target manifest
2409 # action if in target manifest
2409 # action if not in target manifest
2410 # action if not in target manifest
2410 # make backup if in target manifest
2411 # make backup if in target manifest
2411 # make backup if not in target manifest
2412 # make backup if not in target manifest
2412 (modified, revert, remove, True, True),
2413 (modified, revert, remove, True, True),
2413 (added, revert, forget, True, False),
2414 (added, revert, forget, True, False),
2414 (removed, undelete, None, False, False),
2415 (removed, undelete, None, False, False),
2415 (deleted, revert, remove, False, False),
2416 (deleted, revert, remove, False, False),
2416 (unknown, add, None, True, False),
2417 (unknown, add, None, True, False),
2417 (target_only, add, None, False, False),
2418 (target_only, add, None, False, False),
2418 )
2419 )
2419
2420
2420 entries = names.items()
2421 entries = names.items()
2421 entries.sort()
2422 entries.sort()
2422
2423
2423 for abs, (rel, exact) in entries:
2424 for abs, (rel, exact) in entries:
2424 mfentry = mf.get(abs)
2425 mfentry = mf.get(abs)
2425 def handle(xlist, dobackup):
2426 def handle(xlist, dobackup):
2426 xlist[0].append(abs)
2427 xlist[0].append(abs)
2427 update[abs] = 1
2428 update[abs] = 1
2428 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2429 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2429 bakname = "%s.orig" % rel
2430 bakname = "%s.orig" % rel
2430 ui.note(_('saving current version of %s as %s\n') %
2431 ui.note(_('saving current version of %s as %s\n') %
2431 (rel, bakname))
2432 (rel, bakname))
2432 if not opts.get('dry_run'):
2433 if not opts.get('dry_run'):
2433 shutil.copyfile(rel, bakname)
2434 shutil.copyfile(rel, bakname)
2434 shutil.copymode(rel, bakname)
2435 shutil.copymode(rel, bakname)
2435 if ui.verbose or not exact:
2436 if ui.verbose or not exact:
2436 ui.status(xlist[1] % rel)
2437 ui.status(xlist[1] % rel)
2437 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2438 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2438 if abs not in table: continue
2439 if abs not in table: continue
2439 # file has changed in dirstate
2440 # file has changed in dirstate
2440 if mfentry:
2441 if mfentry:
2441 handle(hitlist, backuphit)
2442 handle(hitlist, backuphit)
2442 elif misslist is not None:
2443 elif misslist is not None:
2443 handle(misslist, backupmiss)
2444 handle(misslist, backupmiss)
2444 else:
2445 else:
2445 if exact: ui.warn(_('file not managed: %s\n' % rel))
2446 if exact: ui.warn(_('file not managed: %s\n' % rel))
2446 break
2447 break
2447 else:
2448 else:
2448 # file has not changed in dirstate
2449 # file has not changed in dirstate
2449 if node == parent:
2450 if node == parent:
2450 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2451 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2451 continue
2452 continue
2452 if pmf is None:
2453 if pmf is None:
2453 # only need parent manifest in this unlikely case,
2454 # only need parent manifest in this unlikely case,
2454 # so do not read by default
2455 # so do not read by default
2455 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2456 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2456 if abs in pmf:
2457 if abs in pmf:
2457 if mfentry:
2458 if mfentry:
2458 # if version of file is same in parent and target
2459 # if version of file is same in parent and target
2459 # manifests, do nothing
2460 # manifests, do nothing
2460 if pmf[abs] != mfentry:
2461 if pmf[abs] != mfentry:
2461 handle(revert, False)
2462 handle(revert, False)
2462 else:
2463 else:
2463 handle(remove, False)
2464 handle(remove, False)
2464
2465
2465 if not opts.get('dry_run'):
2466 if not opts.get('dry_run'):
2466 repo.dirstate.forget(forget[0])
2467 repo.dirstate.forget(forget[0])
2467 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2468 r = repo.update(node, False, True, update.has_key, False, wlock=wlock,
2468 show_stats=False)
2469 show_stats=False)
2469 repo.dirstate.update(add[0], 'a')
2470 repo.dirstate.update(add[0], 'a')
2470 repo.dirstate.update(undelete[0], 'n')
2471 repo.dirstate.update(undelete[0], 'n')
2471 repo.dirstate.update(remove[0], 'r')
2472 repo.dirstate.update(remove[0], 'r')
2472 return r
2473 return r
2473
2474
2474 def rollback(ui, repo):
2475 def rollback(ui, repo):
2475 """roll back the last transaction in this repository
2476 """roll back the last transaction in this repository
2476
2477
2477 Roll back the last transaction in this repository, restoring the
2478 Roll back the last transaction in this repository, restoring the
2478 project to its state prior to the transaction.
2479 project to its state prior to the transaction.
2479
2480
2480 Transactions are used to encapsulate the effects of all commands
2481 Transactions are used to encapsulate the effects of all commands
2481 that create new changesets or propagate existing changesets into a
2482 that create new changesets or propagate existing changesets into a
2482 repository. For example, the following commands are transactional,
2483 repository. For example, the following commands are transactional,
2483 and their effects can be rolled back:
2484 and their effects can be rolled back:
2484
2485
2485 commit
2486 commit
2486 import
2487 import
2487 pull
2488 pull
2488 push (with this repository as destination)
2489 push (with this repository as destination)
2489 unbundle
2490 unbundle
2490
2491
2491 This command should be used with care. There is only one level of
2492 This command should be used with care. There is only one level of
2492 rollback, and there is no way to undo a rollback.
2493 rollback, and there is no way to undo a rollback.
2493
2494
2494 This command is not intended for use on public repositories. Once
2495 This command is not intended for use on public repositories. Once
2495 changes are visible for pull by other users, rolling a transaction
2496 changes are visible for pull by other users, rolling a transaction
2496 back locally is ineffective (someone else may already have pulled
2497 back locally is ineffective (someone else may already have pulled
2497 the changes). Furthermore, a race is possible with readers of the
2498 the changes). Furthermore, a race is possible with readers of the
2498 repository; for example an in-progress pull from the repository
2499 repository; for example an in-progress pull from the repository
2499 may fail if a rollback is performed.
2500 may fail if a rollback is performed.
2500 """
2501 """
2501 repo.rollback()
2502 repo.rollback()
2502
2503
2503 def root(ui, repo):
2504 def root(ui, repo):
2504 """print the root (top) of the current working dir
2505 """print the root (top) of the current working dir
2505
2506
2506 Print the root directory of the current repository.
2507 Print the root directory of the current repository.
2507 """
2508 """
2508 ui.write(repo.root + "\n")
2509 ui.write(repo.root + "\n")
2509
2510
2510 def serve(ui, repo, **opts):
2511 def serve(ui, repo, **opts):
2511 """export the repository via HTTP
2512 """export the repository via HTTP
2512
2513
2513 Start a local HTTP repository browser and pull server.
2514 Start a local HTTP repository browser and pull server.
2514
2515
2515 By default, the server logs accesses to stdout and errors to
2516 By default, the server logs accesses to stdout and errors to
2516 stderr. Use the "-A" and "-E" options to log to files.
2517 stderr. Use the "-A" and "-E" options to log to files.
2517 """
2518 """
2518
2519
2519 if opts["stdio"]:
2520 if opts["stdio"]:
2520 if repo is None:
2521 if repo is None:
2521 raise hg.RepoError(_('no repo found'))
2522 raise hg.RepoError(_('no repo found'))
2522 s = sshserver.sshserver(ui, repo)
2523 s = sshserver.sshserver(ui, repo)
2523 s.serve_forever()
2524 s.serve_forever()
2524
2525
2525 optlist = ("name templates style address port ipv6"
2526 optlist = ("name templates style address port ipv6"
2526 " accesslog errorlog webdir_conf")
2527 " accesslog errorlog webdir_conf")
2527 for o in optlist.split():
2528 for o in optlist.split():
2528 if opts[o]:
2529 if opts[o]:
2529 ui.setconfig("web", o, opts[o])
2530 ui.setconfig("web", o, opts[o])
2530
2531
2531 if repo is None and not ui.config("web", "webdir_conf"):
2532 if repo is None and not ui.config("web", "webdir_conf"):
2532 raise hg.RepoError(_('no repo found'))
2533 raise hg.RepoError(_('no repo found'))
2533
2534
2534 if opts['daemon'] and not opts['daemon_pipefds']:
2535 if opts['daemon'] and not opts['daemon_pipefds']:
2535 rfd, wfd = os.pipe()
2536 rfd, wfd = os.pipe()
2536 args = sys.argv[:]
2537 args = sys.argv[:]
2537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2538 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2538 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2539 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2539 args[0], args)
2540 args[0], args)
2540 os.close(wfd)
2541 os.close(wfd)
2541 os.read(rfd, 1)
2542 os.read(rfd, 1)
2542 os._exit(0)
2543 os._exit(0)
2543
2544
2544 try:
2545 try:
2545 httpd = hgweb.server.create_server(ui, repo)
2546 httpd = hgweb.server.create_server(ui, repo)
2546 except socket.error, inst:
2547 except socket.error, inst:
2547 raise util.Abort(_('cannot start server: ') + inst.args[1])
2548 raise util.Abort(_('cannot start server: ') + inst.args[1])
2548
2549
2549 if ui.verbose:
2550 if ui.verbose:
2550 addr, port = httpd.socket.getsockname()
2551 addr, port = httpd.socket.getsockname()
2551 if addr == '0.0.0.0':
2552 if addr == '0.0.0.0':
2552 addr = socket.gethostname()
2553 addr = socket.gethostname()
2553 else:
2554 else:
2554 try:
2555 try:
2555 addr = socket.gethostbyaddr(addr)[0]
2556 addr = socket.gethostbyaddr(addr)[0]
2556 except socket.error:
2557 except socket.error:
2557 pass
2558 pass
2558 if port != 80:
2559 if port != 80:
2559 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2560 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2560 else:
2561 else:
2561 ui.status(_('listening at http://%s/\n') % addr)
2562 ui.status(_('listening at http://%s/\n') % addr)
2562
2563
2563 if opts['pid_file']:
2564 if opts['pid_file']:
2564 fp = open(opts['pid_file'], 'w')
2565 fp = open(opts['pid_file'], 'w')
2565 fp.write(str(os.getpid()) + '\n')
2566 fp.write(str(os.getpid()) + '\n')
2566 fp.close()
2567 fp.close()
2567
2568
2568 if opts['daemon_pipefds']:
2569 if opts['daemon_pipefds']:
2569 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2570 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2570 os.close(rfd)
2571 os.close(rfd)
2571 os.write(wfd, 'y')
2572 os.write(wfd, 'y')
2572 os.close(wfd)
2573 os.close(wfd)
2573 sys.stdout.flush()
2574 sys.stdout.flush()
2574 sys.stderr.flush()
2575 sys.stderr.flush()
2575 fd = os.open(util.nulldev, os.O_RDWR)
2576 fd = os.open(util.nulldev, os.O_RDWR)
2576 if fd != 0: os.dup2(fd, 0)
2577 if fd != 0: os.dup2(fd, 0)
2577 if fd != 1: os.dup2(fd, 1)
2578 if fd != 1: os.dup2(fd, 1)
2578 if fd != 2: os.dup2(fd, 2)
2579 if fd != 2: os.dup2(fd, 2)
2579 if fd not in (0, 1, 2): os.close(fd)
2580 if fd not in (0, 1, 2): os.close(fd)
2580
2581
2581 httpd.serve_forever()
2582 httpd.serve_forever()
2582
2583
2583 def status(ui, repo, *pats, **opts):
2584 def status(ui, repo, *pats, **opts):
2584 """show changed files in the working directory
2585 """show changed files in the working directory
2585
2586
2586 Show changed files in the repository. If names are
2587 Show changed files in the repository. If names are
2587 given, only files that match are shown.
2588 given, only files that match are shown.
2588
2589
2589 The codes used to show the status of files are:
2590 The codes used to show the status of files are:
2590 M = modified
2591 M = modified
2591 A = added
2592 A = added
2592 R = removed
2593 R = removed
2593 ! = deleted, but still tracked
2594 ! = deleted, but still tracked
2594 ? = not tracked
2595 ? = not tracked
2595 I = ignored (not shown by default)
2596 I = ignored (not shown by default)
2596 """
2597 """
2597
2598
2598 show_ignored = opts['ignored'] and True or False
2599 show_ignored = opts['ignored'] and True or False
2599 files, matchfn, anypats = matchpats(repo, pats, opts)
2600 files, matchfn, anypats = matchpats(repo, pats, opts)
2600 cwd = (pats and repo.getcwd()) or ''
2601 cwd = (pats and repo.getcwd()) or ''
2601 modified, added, removed, deleted, unknown, ignored = [
2602 modified, added, removed, deleted, unknown, ignored = [
2602 [util.pathto(cwd, x) for x in n]
2603 [util.pathto(cwd, x) for x in n]
2603 for n in repo.changes(files=files, match=matchfn,
2604 for n in repo.changes(files=files, match=matchfn,
2604 show_ignored=show_ignored)]
2605 show_ignored=show_ignored)]
2605
2606
2606 changetypes = [('modified', 'M', modified),
2607 changetypes = [('modified', 'M', modified),
2607 ('added', 'A', added),
2608 ('added', 'A', added),
2608 ('removed', 'R', removed),
2609 ('removed', 'R', removed),
2609 ('deleted', '!', deleted),
2610 ('deleted', '!', deleted),
2610 ('unknown', '?', unknown),
2611 ('unknown', '?', unknown),
2611 ('ignored', 'I', ignored)]
2612 ('ignored', 'I', ignored)]
2612
2613
2613 end = opts['print0'] and '\0' or '\n'
2614 end = opts['print0'] and '\0' or '\n'
2614
2615
2615 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2616 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2616 or changetypes):
2617 or changetypes):
2617 if opts['no_status']:
2618 if opts['no_status']:
2618 format = "%%s%s" % end
2619 format = "%%s%s" % end
2619 else:
2620 else:
2620 format = "%s %%s%s" % (char, end)
2621 format = "%s %%s%s" % (char, end)
2621
2622
2622 for f in changes:
2623 for f in changes:
2623 ui.write(format % f)
2624 ui.write(format % f)
2624
2625
2625 def tag(ui, repo, name, rev_=None, **opts):
2626 def tag(ui, repo, name, rev_=None, **opts):
2626 """add a tag for the current tip or a given revision
2627 """add a tag for the current tip or a given revision
2627
2628
2628 Name a particular revision using <name>.
2629 Name a particular revision using <name>.
2629
2630
2630 Tags are used to name particular revisions of the repository and are
2631 Tags are used to name particular revisions of the repository and are
2631 very useful to compare different revision, to go back to significant
2632 very useful to compare different revision, to go back to significant
2632 earlier versions or to mark branch points as releases, etc.
2633 earlier versions or to mark branch points as releases, etc.
2633
2634
2634 If no revision is given, the tip is used.
2635 If no revision is given, the tip is used.
2635
2636
2636 To facilitate version control, distribution, and merging of tags,
2637 To facilitate version control, distribution, and merging of tags,
2637 they are stored as a file named ".hgtags" which is managed
2638 they are stored as a file named ".hgtags" which is managed
2638 similarly to other project files and can be hand-edited if
2639 similarly to other project files and can be hand-edited if
2639 necessary. The file '.hg/localtags' is used for local tags (not
2640 necessary. The file '.hg/localtags' is used for local tags (not
2640 shared among repositories).
2641 shared among repositories).
2641 """
2642 """
2642 if name == "tip":
2643 if name == "tip":
2643 raise util.Abort(_("the name 'tip' is reserved"))
2644 raise util.Abort(_("the name 'tip' is reserved"))
2644 if rev_ is not None:
2645 if rev_ is not None:
2645 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2646 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2646 "please use 'hg tag [-r REV] NAME' instead\n"))
2647 "please use 'hg tag [-r REV] NAME' instead\n"))
2647 if opts['rev']:
2648 if opts['rev']:
2648 raise util.Abort(_("use only one form to specify the revision"))
2649 raise util.Abort(_("use only one form to specify the revision"))
2649 if opts['rev']:
2650 if opts['rev']:
2650 rev_ = opts['rev']
2651 rev_ = opts['rev']
2651 if rev_:
2652 if rev_:
2652 r = hex(repo.lookup(rev_))
2653 r = hex(repo.lookup(rev_))
2653 else:
2654 else:
2654 r = hex(repo.changelog.tip())
2655 r = hex(repo.changelog.tip())
2655
2656
2656 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2657 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2657 opts['date'])
2658 opts['date'])
2658
2659
2659 def tags(ui, repo):
2660 def tags(ui, repo):
2660 """list repository tags
2661 """list repository tags
2661
2662
2662 List the repository tags.
2663 List the repository tags.
2663
2664
2664 This lists both regular and local tags.
2665 This lists both regular and local tags.
2665 """
2666 """
2666
2667
2667 l = repo.tagslist()
2668 l = repo.tagslist()
2668 l.reverse()
2669 l.reverse()
2669 for t, n in l:
2670 for t, n in l:
2670 try:
2671 try:
2671 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2672 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2672 except KeyError:
2673 except KeyError:
2673 r = " ?:?"
2674 r = " ?:?"
2674 if ui.quiet:
2675 if ui.quiet:
2675 ui.write("%s\n" % t)
2676 ui.write("%s\n" % t)
2676 else:
2677 else:
2677 ui.write("%-30s %s\n" % (t, r))
2678 ui.write("%-30s %s\n" % (t, r))
2678
2679
2679 def tip(ui, repo, **opts):
2680 def tip(ui, repo, **opts):
2680 """show the tip revision
2681 """show the tip revision
2681
2682
2682 Show the tip revision.
2683 Show the tip revision.
2683 """
2684 """
2684 n = repo.changelog.tip()
2685 n = repo.changelog.tip()
2685 br = None
2686 br = None
2686 if opts['branches']:
2687 if opts['branches']:
2687 br = repo.branchlookup([n])
2688 br = repo.branchlookup([n])
2688 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2689 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2689 if opts['patch']:
2690 if opts['patch']:
2690 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2691 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2691
2692
2692 def unbundle(ui, repo, fname, **opts):
2693 def unbundle(ui, repo, fname, **opts):
2693 """apply a changegroup file
2694 """apply a changegroup file
2694
2695
2695 Apply a compressed changegroup file generated by the bundle
2696 Apply a compressed changegroup file generated by the bundle
2696 command.
2697 command.
2697 """
2698 """
2698 f = urllib.urlopen(fname)
2699 f = urllib.urlopen(fname)
2699
2700
2700 header = f.read(6)
2701 header = f.read(6)
2701 if not header.startswith("HG"):
2702 if not header.startswith("HG"):
2702 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2703 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2703 elif not header.startswith("HG10"):
2704 elif not header.startswith("HG10"):
2704 raise util.Abort(_("%s: unknown bundle version") % fname)
2705 raise util.Abort(_("%s: unknown bundle version") % fname)
2705 elif header == "HG10BZ":
2706 elif header == "HG10BZ":
2706 def generator(f):
2707 def generator(f):
2707 zd = bz2.BZ2Decompressor()
2708 zd = bz2.BZ2Decompressor()
2708 zd.decompress("BZ")
2709 zd.decompress("BZ")
2709 for chunk in f:
2710 for chunk in f:
2710 yield zd.decompress(chunk)
2711 yield zd.decompress(chunk)
2711 elif header == "HG10UN":
2712 elif header == "HG10UN":
2712 def generator(f):
2713 def generator(f):
2713 for chunk in f:
2714 for chunk in f:
2714 yield chunk
2715 yield chunk
2715 else:
2716 else:
2716 raise util.Abort(_("%s: unknown bundle compression type")
2717 raise util.Abort(_("%s: unknown bundle compression type")
2717 % fname)
2718 % fname)
2718 gen = generator(util.filechunkiter(f, 4096))
2719 gen = generator(util.filechunkiter(f, 4096))
2719 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle')
2720 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle')
2720 return postincoming(ui, repo, modheads, opts['update'])
2721 return postincoming(ui, repo, modheads, opts['update'])
2721
2722
2722 def undo(ui, repo):
2723 def undo(ui, repo):
2723 """undo the last commit or pull (DEPRECATED)
2724 """undo the last commit or pull (DEPRECATED)
2724
2725
2725 (DEPRECATED)
2726 (DEPRECATED)
2726 This command is now deprecated and will be removed in a future
2727 This command is now deprecated and will be removed in a future
2727 release. Please use the rollback command instead. For usage
2728 release. Please use the rollback command instead. For usage
2728 instructions, see the rollback command.
2729 instructions, see the rollback command.
2729 """
2730 """
2730 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2731 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2731 repo.rollback()
2732 repo.rollback()
2732
2733
2733 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2734 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2734 branch=None, **opts):
2735 branch=None, **opts):
2735 """update or merge working directory
2736 """update or merge working directory
2736
2737
2737 Update the working directory to the specified revision.
2738 Update the working directory to the specified revision.
2738
2739
2739 If there are no outstanding changes in the working directory and
2740 If there are no outstanding changes in the working directory and
2740 there is a linear relationship between the current version and the
2741 there is a linear relationship between the current version and the
2741 requested version, the result is the requested version.
2742 requested version, the result is the requested version.
2742
2743
2743 To merge the working directory with another revision, use the
2744 To merge the working directory with another revision, use the
2744 merge command.
2745 merge command.
2745
2746
2746 By default, update will refuse to run if doing so would require
2747 By default, update will refuse to run if doing so would require
2747 merging or discarding local changes.
2748 merging or discarding local changes.
2748 """
2749 """
2749 if merge:
2750 if merge:
2750 ui.warn(_('(the -m/--merge option is deprecated; '
2751 ui.warn(_('(the -m/--merge option is deprecated; '
2751 'use the merge command instead)\n'))
2752 'use the merge command instead)\n'))
2752 return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
2753 return doupdate(ui, repo, node, merge, clean, force, branch, **opts)
2753
2754
2754 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2755 def doupdate(ui, repo, node=None, merge=False, clean=False, force=None,
2755 branch=None, **opts):
2756 branch=None, **opts):
2756 if branch:
2757 if branch:
2757 br = repo.branchlookup(branch=branch)
2758 br = repo.branchlookup(branch=branch)
2758 found = []
2759 found = []
2759 for x in br:
2760 for x in br:
2760 if branch in br[x]:
2761 if branch in br[x]:
2761 found.append(x)
2762 found.append(x)
2762 if len(found) > 1:
2763 if len(found) > 1:
2763 ui.warn(_("Found multiple heads for %s\n") % branch)
2764 ui.warn(_("Found multiple heads for %s\n") % branch)
2764 for x in found:
2765 for x in found:
2765 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2766 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2766 return 1
2767 return 1
2767 if len(found) == 1:
2768 if len(found) == 1:
2768 node = found[0]
2769 node = found[0]
2769 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2770 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2770 else:
2771 else:
2771 ui.warn(_("branch %s not found\n") % (branch))
2772 ui.warn(_("branch %s not found\n") % (branch))
2772 return 1
2773 return 1
2773 else:
2774 else:
2774 node = node and repo.lookup(node) or repo.changelog.tip()
2775 node = node and repo.lookup(node) or repo.changelog.tip()
2775 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2776 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2776
2777
2777 def verify(ui, repo):
2778 def verify(ui, repo):
2778 """verify the integrity of the repository
2779 """verify the integrity of the repository
2779
2780
2780 Verify the integrity of the current repository.
2781 Verify the integrity of the current repository.
2781
2782
2782 This will perform an extensive check of the repository's
2783 This will perform an extensive check of the repository's
2783 integrity, validating the hashes and checksums of each entry in
2784 integrity, validating the hashes and checksums of each entry in
2784 the changelog, manifest, and tracked files, as well as the
2785 the changelog, manifest, and tracked files, as well as the
2785 integrity of their crosslinks and indices.
2786 integrity of their crosslinks and indices.
2786 """
2787 """
2787 return repo.verify()
2788 return repo.verify()
2788
2789
2789 # Command options and aliases are listed here, alphabetically
2790 # Command options and aliases are listed here, alphabetically
2790
2791
2791 table = {
2792 table = {
2792 "^add":
2793 "^add":
2793 (add,
2794 (add,
2794 [('I', 'include', [], _('include names matching the given patterns')),
2795 [('I', 'include', [], _('include names matching the given patterns')),
2795 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2796 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2796 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2797 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2797 _('hg add [OPTION]... [FILE]...')),
2798 _('hg add [OPTION]... [FILE]...')),
2798 "debugaddremove|addremove":
2799 "debugaddremove|addremove":
2799 (addremove,
2800 (addremove,
2800 [('I', 'include', [], _('include names matching the given patterns')),
2801 [('I', 'include', [], _('include names matching the given patterns')),
2801 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2802 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2802 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2803 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2803 _('hg addremove [OPTION]... [FILE]...')),
2804 _('hg addremove [OPTION]... [FILE]...')),
2804 "^annotate":
2805 "^annotate":
2805 (annotate,
2806 (annotate,
2806 [('r', 'rev', '', _('annotate the specified revision')),
2807 [('r', 'rev', '', _('annotate the specified revision')),
2807 ('a', 'text', None, _('treat all files as text')),
2808 ('a', 'text', None, _('treat all files as text')),
2808 ('u', 'user', None, _('list the author')),
2809 ('u', 'user', None, _('list the author')),
2809 ('d', 'date', None, _('list the date')),
2810 ('d', 'date', None, _('list the date')),
2810 ('n', 'number', None, _('list the revision number (default)')),
2811 ('n', 'number', None, _('list the revision number (default)')),
2811 ('c', 'changeset', None, _('list the changeset')),
2812 ('c', 'changeset', None, _('list the changeset')),
2812 ('I', 'include', [], _('include names matching the given patterns')),
2813 ('I', 'include', [], _('include names matching the given patterns')),
2813 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2814 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2814 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2815 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2815 "archive":
2816 "archive":
2816 (archive,
2817 (archive,
2817 [('', 'no-decode', None, _('do not pass files through decoders')),
2818 [('', 'no-decode', None, _('do not pass files through decoders')),
2818 ('p', 'prefix', '', _('directory prefix for files in archive')),
2819 ('p', 'prefix', '', _('directory prefix for files in archive')),
2819 ('r', 'rev', '', _('revision to distribute')),
2820 ('r', 'rev', '', _('revision to distribute')),
2820 ('t', 'type', '', _('type of distribution to create')),
2821 ('t', 'type', '', _('type of distribution to create')),
2821 ('I', 'include', [], _('include names matching the given patterns')),
2822 ('I', 'include', [], _('include names matching the given patterns')),
2822 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2823 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2823 _('hg archive [OPTION]... DEST')),
2824 _('hg archive [OPTION]... DEST')),
2824 "backout":
2825 "backout":
2825 (backout,
2826 (backout,
2826 [('', 'merge', None,
2827 [('', 'merge', None,
2827 _('merge with old dirstate parent after backout')),
2828 _('merge with old dirstate parent after backout')),
2828 ('m', 'message', '', _('use <text> as commit message')),
2829 ('m', 'message', '', _('use <text> as commit message')),
2829 ('l', 'logfile', '', _('read commit message from <file>')),
2830 ('l', 'logfile', '', _('read commit message from <file>')),
2830 ('d', 'date', '', _('record datecode as commit date')),
2831 ('d', 'date', '', _('record datecode as commit date')),
2831 ('u', 'user', '', _('record user as committer')),
2832 ('u', 'user', '', _('record user as committer')),
2832 ('I', 'include', [], _('include names matching the given patterns')),
2833 ('I', 'include', [], _('include names matching the given patterns')),
2833 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2834 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2834 _('hg backout [OPTION]... REV')),
2835 _('hg backout [OPTION]... REV')),
2835 "bundle":
2836 "bundle":
2836 (bundle,
2837 (bundle,
2837 [('f', 'force', None,
2838 [('f', 'force', None,
2838 _('run even when remote repository is unrelated'))],
2839 _('run even when remote repository is unrelated'))],
2839 _('hg bundle FILE DEST')),
2840 _('hg bundle FILE DEST')),
2840 "cat":
2841 "cat":
2841 (cat,
2842 (cat,
2842 [('o', 'output', '', _('print output to file with formatted name')),
2843 [('o', 'output', '', _('print output to file with formatted name')),
2843 ('r', 'rev', '', _('print the given revision')),
2844 ('r', 'rev', '', _('print the given revision')),
2844 ('I', 'include', [], _('include names matching the given patterns')),
2845 ('I', 'include', [], _('include names matching the given patterns')),
2845 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2846 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2846 _('hg cat [OPTION]... FILE...')),
2847 _('hg cat [OPTION]... FILE...')),
2847 "^clone":
2848 "^clone":
2848 (clone,
2849 (clone,
2849 [('U', 'noupdate', None, _('do not update the new working directory')),
2850 [('U', 'noupdate', None, _('do not update the new working directory')),
2850 ('r', 'rev', [],
2851 ('r', 'rev', [],
2851 _('a changeset you would like to have after cloning')),
2852 _('a changeset you would like to have after cloning')),
2852 ('', 'pull', None, _('use pull protocol to copy metadata')),
2853 ('', 'pull', None, _('use pull protocol to copy metadata')),
2854 ('', 'stream', None, _('use streaming protocol (fast over LAN)')),
2853 ('e', 'ssh', '', _('specify ssh command to use')),
2855 ('e', 'ssh', '', _('specify ssh command to use')),
2854 ('', 'remotecmd', '',
2856 ('', 'remotecmd', '',
2855 _('specify hg command to run on the remote side'))],
2857 _('specify hg command to run on the remote side'))],
2856 _('hg clone [OPTION]... SOURCE [DEST]')),
2858 _('hg clone [OPTION]... SOURCE [DEST]')),
2857 "^commit|ci":
2859 "^commit|ci":
2858 (commit,
2860 (commit,
2859 [('A', 'addremove', None,
2861 [('A', 'addremove', None,
2860 _('mark new/missing files as added/removed before committing')),
2862 _('mark new/missing files as added/removed before committing')),
2861 ('m', 'message', '', _('use <text> as commit message')),
2863 ('m', 'message', '', _('use <text> as commit message')),
2862 ('l', 'logfile', '', _('read the commit message from <file>')),
2864 ('l', 'logfile', '', _('read the commit message from <file>')),
2863 ('d', 'date', '', _('record datecode as commit date')),
2865 ('d', 'date', '', _('record datecode as commit date')),
2864 ('u', 'user', '', _('record user as commiter')),
2866 ('u', 'user', '', _('record user as commiter')),
2865 ('I', 'include', [], _('include names matching the given patterns')),
2867 ('I', 'include', [], _('include names matching the given patterns')),
2866 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2868 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2867 _('hg commit [OPTION]... [FILE]...')),
2869 _('hg commit [OPTION]... [FILE]...')),
2868 "copy|cp":
2870 "copy|cp":
2869 (copy,
2871 (copy,
2870 [('A', 'after', None, _('record a copy that has already occurred')),
2872 [('A', 'after', None, _('record a copy that has already occurred')),
2871 ('f', 'force', None,
2873 ('f', 'force', None,
2872 _('forcibly copy over an existing managed file')),
2874 _('forcibly copy over an existing managed file')),
2873 ('I', 'include', [], _('include names matching the given patterns')),
2875 ('I', 'include', [], _('include names matching the given patterns')),
2874 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2876 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2875 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2877 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2876 _('hg copy [OPTION]... [SOURCE]... DEST')),
2878 _('hg copy [OPTION]... [SOURCE]... DEST')),
2877 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2879 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2878 "debugcomplete":
2880 "debugcomplete":
2879 (debugcomplete,
2881 (debugcomplete,
2880 [('o', 'options', None, _('show the command options'))],
2882 [('o', 'options', None, _('show the command options'))],
2881 _('debugcomplete [-o] CMD')),
2883 _('debugcomplete [-o] CMD')),
2882 "debugrebuildstate":
2884 "debugrebuildstate":
2883 (debugrebuildstate,
2885 (debugrebuildstate,
2884 [('r', 'rev', '', _('revision to rebuild to'))],
2886 [('r', 'rev', '', _('revision to rebuild to'))],
2885 _('debugrebuildstate [-r REV] [REV]')),
2887 _('debugrebuildstate [-r REV] [REV]')),
2886 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2888 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2887 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2889 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2888 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2890 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2889 "debugstate": (debugstate, [], _('debugstate')),
2891 "debugstate": (debugstate, [], _('debugstate')),
2890 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2892 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2891 "debugindex": (debugindex, [], _('debugindex FILE')),
2893 "debugindex": (debugindex, [], _('debugindex FILE')),
2892 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2894 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2893 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2895 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2894 "debugwalk":
2896 "debugwalk":
2895 (debugwalk,
2897 (debugwalk,
2896 [('I', 'include', [], _('include names matching the given patterns')),
2898 [('I', 'include', [], _('include names matching the given patterns')),
2897 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2899 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2898 _('debugwalk [OPTION]... [FILE]...')),
2900 _('debugwalk [OPTION]... [FILE]...')),
2899 "^diff":
2901 "^diff":
2900 (diff,
2902 (diff,
2901 [('r', 'rev', [], _('revision')),
2903 [('r', 'rev', [], _('revision')),
2902 ('a', 'text', None, _('treat all files as text')),
2904 ('a', 'text', None, _('treat all files as text')),
2903 ('p', 'show-function', None,
2905 ('p', 'show-function', None,
2904 _('show which function each change is in')),
2906 _('show which function each change is in')),
2905 ('w', 'ignore-all-space', None,
2907 ('w', 'ignore-all-space', None,
2906 _('ignore white space when comparing lines')),
2908 _('ignore white space when comparing lines')),
2907 ('b', 'ignore-space-change', None,
2909 ('b', 'ignore-space-change', None,
2908 _('ignore changes in the amount of white space')),
2910 _('ignore changes in the amount of white space')),
2909 ('B', 'ignore-blank-lines', None,
2911 ('B', 'ignore-blank-lines', None,
2910 _('ignore changes whose lines are all blank')),
2912 _('ignore changes whose lines are all blank')),
2911 ('I', 'include', [], _('include names matching the given patterns')),
2913 ('I', 'include', [], _('include names matching the given patterns')),
2912 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2914 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2913 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2915 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2914 "^export":
2916 "^export":
2915 (export,
2917 (export,
2916 [('o', 'output', '', _('print output to file with formatted name')),
2918 [('o', 'output', '', _('print output to file with formatted name')),
2917 ('a', 'text', None, _('treat all files as text')),
2919 ('a', 'text', None, _('treat all files as text')),
2918 ('', 'switch-parent', None, _('diff against the second parent'))],
2920 ('', 'switch-parent', None, _('diff against the second parent'))],
2919 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2921 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2920 "debugforget|forget":
2922 "debugforget|forget":
2921 (forget,
2923 (forget,
2922 [('I', 'include', [], _('include names matching the given patterns')),
2924 [('I', 'include', [], _('include names matching the given patterns')),
2923 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2925 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2924 _('hg forget [OPTION]... FILE...')),
2926 _('hg forget [OPTION]... FILE...')),
2925 "grep":
2927 "grep":
2926 (grep,
2928 (grep,
2927 [('0', 'print0', None, _('end fields with NUL')),
2929 [('0', 'print0', None, _('end fields with NUL')),
2928 ('', 'all', None, _('print all revisions that match')),
2930 ('', 'all', None, _('print all revisions that match')),
2929 ('i', 'ignore-case', None, _('ignore case when matching')),
2931 ('i', 'ignore-case', None, _('ignore case when matching')),
2930 ('l', 'files-with-matches', None,
2932 ('l', 'files-with-matches', None,
2931 _('print only filenames and revs that match')),
2933 _('print only filenames and revs that match')),
2932 ('n', 'line-number', None, _('print matching line numbers')),
2934 ('n', 'line-number', None, _('print matching line numbers')),
2933 ('r', 'rev', [], _('search in given revision range')),
2935 ('r', 'rev', [], _('search in given revision range')),
2934 ('u', 'user', None, _('print user who committed change')),
2936 ('u', 'user', None, _('print user who committed change')),
2935 ('I', 'include', [], _('include names matching the given patterns')),
2937 ('I', 'include', [], _('include names matching the given patterns')),
2936 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2938 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2937 _('hg grep [OPTION]... PATTERN [FILE]...')),
2939 _('hg grep [OPTION]... PATTERN [FILE]...')),
2938 "heads":
2940 "heads":
2939 (heads,
2941 (heads,
2940 [('b', 'branches', None, _('show branches')),
2942 [('b', 'branches', None, _('show branches')),
2941 ('', 'style', '', _('display using template map file')),
2943 ('', 'style', '', _('display using template map file')),
2942 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2944 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2943 ('', 'template', '', _('display with template'))],
2945 ('', 'template', '', _('display with template'))],
2944 _('hg heads [-b] [-r <rev>]')),
2946 _('hg heads [-b] [-r <rev>]')),
2945 "help": (help_, [], _('hg help [COMMAND]')),
2947 "help": (help_, [], _('hg help [COMMAND]')),
2946 "identify|id": (identify, [], _('hg identify')),
2948 "identify|id": (identify, [], _('hg identify')),
2947 "import|patch":
2949 "import|patch":
2948 (import_,
2950 (import_,
2949 [('p', 'strip', 1,
2951 [('p', 'strip', 1,
2950 _('directory strip option for patch. This has the same\n'
2952 _('directory strip option for patch. This has the same\n'
2951 'meaning as the corresponding patch option')),
2953 'meaning as the corresponding patch option')),
2952 ('m', 'message', '', _('use <text> as commit message')),
2954 ('m', 'message', '', _('use <text> as commit message')),
2953 ('b', 'base', '', _('base path')),
2955 ('b', 'base', '', _('base path')),
2954 ('f', 'force', None,
2956 ('f', 'force', None,
2955 _('skip check for outstanding uncommitted changes'))],
2957 _('skip check for outstanding uncommitted changes'))],
2956 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2958 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2957 "incoming|in": (incoming,
2959 "incoming|in": (incoming,
2958 [('M', 'no-merges', None, _('do not show merges')),
2960 [('M', 'no-merges', None, _('do not show merges')),
2959 ('f', 'force', None,
2961 ('f', 'force', None,
2960 _('run even when remote repository is unrelated')),
2962 _('run even when remote repository is unrelated')),
2961 ('', 'style', '', _('display using template map file')),
2963 ('', 'style', '', _('display using template map file')),
2962 ('n', 'newest-first', None, _('show newest record first')),
2964 ('n', 'newest-first', None, _('show newest record first')),
2963 ('', 'bundle', '', _('file to store the bundles into')),
2965 ('', 'bundle', '', _('file to store the bundles into')),
2964 ('p', 'patch', None, _('show patch')),
2966 ('p', 'patch', None, _('show patch')),
2965 ('r', 'rev', [], _('a specific revision you would like to pull')),
2967 ('r', 'rev', [], _('a specific revision you would like to pull')),
2966 ('', 'template', '', _('display with template')),
2968 ('', 'template', '', _('display with template')),
2967 ('e', 'ssh', '', _('specify ssh command to use')),
2969 ('e', 'ssh', '', _('specify ssh command to use')),
2968 ('', 'remotecmd', '',
2970 ('', 'remotecmd', '',
2969 _('specify hg command to run on the remote side'))],
2971 _('specify hg command to run on the remote side'))],
2970 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2972 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2971 ' [--bundle FILENAME] [SOURCE]')),
2973 ' [--bundle FILENAME] [SOURCE]')),
2972 "^init":
2974 "^init":
2973 (init,
2975 (init,
2974 [('e', 'ssh', '', _('specify ssh command to use')),
2976 [('e', 'ssh', '', _('specify ssh command to use')),
2975 ('', 'remotecmd', '',
2977 ('', 'remotecmd', '',
2976 _('specify hg command to run on the remote side'))],
2978 _('specify hg command to run on the remote side'))],
2977 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2979 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2978 "locate":
2980 "locate":
2979 (locate,
2981 (locate,
2980 [('r', 'rev', '', _('search the repository as it stood at rev')),
2982 [('r', 'rev', '', _('search the repository as it stood at rev')),
2981 ('0', 'print0', None,
2983 ('0', 'print0', None,
2982 _('end filenames with NUL, for use with xargs')),
2984 _('end filenames with NUL, for use with xargs')),
2983 ('f', 'fullpath', None,
2985 ('f', 'fullpath', None,
2984 _('print complete paths from the filesystem root')),
2986 _('print complete paths from the filesystem root')),
2985 ('I', 'include', [], _('include names matching the given patterns')),
2987 ('I', 'include', [], _('include names matching the given patterns')),
2986 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2988 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2987 _('hg locate [OPTION]... [PATTERN]...')),
2989 _('hg locate [OPTION]... [PATTERN]...')),
2988 "^log|history":
2990 "^log|history":
2989 (log,
2991 (log,
2990 [('b', 'branches', None, _('show branches')),
2992 [('b', 'branches', None, _('show branches')),
2991 ('k', 'keyword', [], _('search for a keyword')),
2993 ('k', 'keyword', [], _('search for a keyword')),
2992 ('l', 'limit', '', _('limit number of changes displayed')),
2994 ('l', 'limit', '', _('limit number of changes displayed')),
2993 ('r', 'rev', [], _('show the specified revision or range')),
2995 ('r', 'rev', [], _('show the specified revision or range')),
2994 ('M', 'no-merges', None, _('do not show merges')),
2996 ('M', 'no-merges', None, _('do not show merges')),
2995 ('', 'style', '', _('display using template map file')),
2997 ('', 'style', '', _('display using template map file')),
2996 ('m', 'only-merges', None, _('show only merges')),
2998 ('m', 'only-merges', None, _('show only merges')),
2997 ('p', 'patch', None, _('show patch')),
2999 ('p', 'patch', None, _('show patch')),
2998 ('', 'template', '', _('display with template')),
3000 ('', 'template', '', _('display with template')),
2999 ('I', 'include', [], _('include names matching the given patterns')),
3001 ('I', 'include', [], _('include names matching the given patterns')),
3000 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3002 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3001 _('hg log [OPTION]... [FILE]')),
3003 _('hg log [OPTION]... [FILE]')),
3002 "manifest": (manifest, [], _('hg manifest [REV]')),
3004 "manifest": (manifest, [], _('hg manifest [REV]')),
3003 "merge":
3005 "merge":
3004 (merge,
3006 (merge,
3005 [('b', 'branch', '', _('merge with head of a specific branch')),
3007 [('b', 'branch', '', _('merge with head of a specific branch')),
3006 ('f', 'force', None, _('force a merge with outstanding changes'))],
3008 ('f', 'force', None, _('force a merge with outstanding changes'))],
3007 _('hg merge [-b TAG] [-f] [REV]')),
3009 _('hg merge [-b TAG] [-f] [REV]')),
3008 "outgoing|out": (outgoing,
3010 "outgoing|out": (outgoing,
3009 [('M', 'no-merges', None, _('do not show merges')),
3011 [('M', 'no-merges', None, _('do not show merges')),
3010 ('f', 'force', None,
3012 ('f', 'force', None,
3011 _('run even when remote repository is unrelated')),
3013 _('run even when remote repository is unrelated')),
3012 ('p', 'patch', None, _('show patch')),
3014 ('p', 'patch', None, _('show patch')),
3013 ('', 'style', '', _('display using template map file')),
3015 ('', 'style', '', _('display using template map file')),
3014 ('r', 'rev', [], _('a specific revision you would like to push')),
3016 ('r', 'rev', [], _('a specific revision you would like to push')),
3015 ('n', 'newest-first', None, _('show newest record first')),
3017 ('n', 'newest-first', None, _('show newest record first')),
3016 ('', 'template', '', _('display with template')),
3018 ('', 'template', '', _('display with template')),
3017 ('e', 'ssh', '', _('specify ssh command to use')),
3019 ('e', 'ssh', '', _('specify ssh command to use')),
3018 ('', 'remotecmd', '',
3020 ('', 'remotecmd', '',
3019 _('specify hg command to run on the remote side'))],
3021 _('specify hg command to run on the remote side'))],
3020 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3022 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3021 "^parents":
3023 "^parents":
3022 (parents,
3024 (parents,
3023 [('b', 'branches', None, _('show branches')),
3025 [('b', 'branches', None, _('show branches')),
3024 ('', 'style', '', _('display using template map file')),
3026 ('', 'style', '', _('display using template map file')),
3025 ('', 'template', '', _('display with template'))],
3027 ('', 'template', '', _('display with template'))],
3026 _('hg parents [-b] [REV]')),
3028 _('hg parents [-b] [REV]')),
3027 "paths": (paths, [], _('hg paths [NAME]')),
3029 "paths": (paths, [], _('hg paths [NAME]')),
3028 "^pull":
3030 "^pull":
3029 (pull,
3031 (pull,
3030 [('u', 'update', None,
3032 [('u', 'update', None,
3031 _('update the working directory to tip after pull')),
3033 _('update the working directory to tip after pull')),
3032 ('e', 'ssh', '', _('specify ssh command to use')),
3034 ('e', 'ssh', '', _('specify ssh command to use')),
3033 ('f', 'force', None,
3035 ('f', 'force', None,
3034 _('run even when remote repository is unrelated')),
3036 _('run even when remote repository is unrelated')),
3035 ('r', 'rev', [], _('a specific revision you would like to pull')),
3037 ('r', 'rev', [], _('a specific revision you would like to pull')),
3036 ('', 'remotecmd', '',
3038 ('', 'remotecmd', '',
3037 _('specify hg command to run on the remote side'))],
3039 _('specify hg command to run on the remote side'))],
3038 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3040 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3039 "^push":
3041 "^push":
3040 (push,
3042 (push,
3041 [('f', 'force', None, _('force push')),
3043 [('f', 'force', None, _('force push')),
3042 ('e', 'ssh', '', _('specify ssh command to use')),
3044 ('e', 'ssh', '', _('specify ssh command to use')),
3043 ('r', 'rev', [], _('a specific revision you would like to push')),
3045 ('r', 'rev', [], _('a specific revision you would like to push')),
3044 ('', 'remotecmd', '',
3046 ('', 'remotecmd', '',
3045 _('specify hg command to run on the remote side'))],
3047 _('specify hg command to run on the remote side'))],
3046 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3048 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3047 "debugrawcommit|rawcommit":
3049 "debugrawcommit|rawcommit":
3048 (rawcommit,
3050 (rawcommit,
3049 [('p', 'parent', [], _('parent')),
3051 [('p', 'parent', [], _('parent')),
3050 ('d', 'date', '', _('date code')),
3052 ('d', 'date', '', _('date code')),
3051 ('u', 'user', '', _('user')),
3053 ('u', 'user', '', _('user')),
3052 ('F', 'files', '', _('file list')),
3054 ('F', 'files', '', _('file list')),
3053 ('m', 'message', '', _('commit message')),
3055 ('m', 'message', '', _('commit message')),
3054 ('l', 'logfile', '', _('commit message file'))],
3056 ('l', 'logfile', '', _('commit message file'))],
3055 _('hg debugrawcommit [OPTION]... [FILE]...')),
3057 _('hg debugrawcommit [OPTION]... [FILE]...')),
3056 "recover": (recover, [], _('hg recover')),
3058 "recover": (recover, [], _('hg recover')),
3057 "^remove|rm":
3059 "^remove|rm":
3058 (remove,
3060 (remove,
3059 [('A', 'after', None, _('record remove that has already occurred')),
3061 [('A', 'after', None, _('record remove that has already occurred')),
3060 ('f', 'force', None, _('remove file even if modified')),
3062 ('f', 'force', None, _('remove file even if modified')),
3061 ('I', 'include', [], _('include names matching the given patterns')),
3063 ('I', 'include', [], _('include names matching the given patterns')),
3062 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3064 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3063 _('hg remove [OPTION]... FILE...')),
3065 _('hg remove [OPTION]... FILE...')),
3064 "rename|mv":
3066 "rename|mv":
3065 (rename,
3067 (rename,
3066 [('A', 'after', None, _('record a rename that has already occurred')),
3068 [('A', 'after', None, _('record a rename that has already occurred')),
3067 ('f', 'force', None,
3069 ('f', 'force', None,
3068 _('forcibly copy over an existing managed file')),
3070 _('forcibly copy over an existing managed file')),
3069 ('I', 'include', [], _('include names matching the given patterns')),
3071 ('I', 'include', [], _('include names matching the given patterns')),
3070 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3072 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3071 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3073 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3072 _('hg rename [OPTION]... SOURCE... DEST')),
3074 _('hg rename [OPTION]... SOURCE... DEST')),
3073 "^revert":
3075 "^revert":
3074 (revert,
3076 (revert,
3075 [('r', 'rev', '', _('revision to revert to')),
3077 [('r', 'rev', '', _('revision to revert to')),
3076 ('', 'no-backup', None, _('do not save backup copies of files')),
3078 ('', 'no-backup', None, _('do not save backup copies of files')),
3077 ('I', 'include', [], _('include names matching given patterns')),
3079 ('I', 'include', [], _('include names matching given patterns')),
3078 ('X', 'exclude', [], _('exclude names matching given patterns')),
3080 ('X', 'exclude', [], _('exclude names matching given patterns')),
3079 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3081 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3080 _('hg revert [-r REV] [NAME]...')),
3082 _('hg revert [-r REV] [NAME]...')),
3081 "rollback": (rollback, [], _('hg rollback')),
3083 "rollback": (rollback, [], _('hg rollback')),
3082 "root": (root, [], _('hg root')),
3084 "root": (root, [], _('hg root')),
3083 "^serve":
3085 "^serve":
3084 (serve,
3086 (serve,
3085 [('A', 'accesslog', '', _('name of access log file to write to')),
3087 [('A', 'accesslog', '', _('name of access log file to write to')),
3086 ('d', 'daemon', None, _('run server in background')),
3088 ('d', 'daemon', None, _('run server in background')),
3087 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3089 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3088 ('E', 'errorlog', '', _('name of error log file to write to')),
3090 ('E', 'errorlog', '', _('name of error log file to write to')),
3089 ('p', 'port', 0, _('port to use (default: 8000)')),
3091 ('p', 'port', 0, _('port to use (default: 8000)')),
3090 ('a', 'address', '', _('address to use')),
3092 ('a', 'address', '', _('address to use')),
3091 ('n', 'name', '',
3093 ('n', 'name', '',
3092 _('name to show in web pages (default: working dir)')),
3094 _('name to show in web pages (default: working dir)')),
3093 ('', 'webdir-conf', '', _('name of the webdir config file'
3095 ('', 'webdir-conf', '', _('name of the webdir config file'
3094 ' (serve more than one repo)')),
3096 ' (serve more than one repo)')),
3095 ('', 'pid-file', '', _('name of file to write process ID to')),
3097 ('', 'pid-file', '', _('name of file to write process ID to')),
3096 ('', 'stdio', None, _('for remote clients')),
3098 ('', 'stdio', None, _('for remote clients')),
3097 ('t', 'templates', '', _('web templates to use')),
3099 ('t', 'templates', '', _('web templates to use')),
3098 ('', 'style', '', _('template style to use')),
3100 ('', 'style', '', _('template style to use')),
3099 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3101 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3100 _('hg serve [OPTION]...')),
3102 _('hg serve [OPTION]...')),
3101 "^status|st":
3103 "^status|st":
3102 (status,
3104 (status,
3103 [('m', 'modified', None, _('show only modified files')),
3105 [('m', 'modified', None, _('show only modified files')),
3104 ('a', 'added', None, _('show only added files')),
3106 ('a', 'added', None, _('show only added files')),
3105 ('r', 'removed', None, _('show only removed files')),
3107 ('r', 'removed', None, _('show only removed files')),
3106 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3108 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3107 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3109 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3108 ('i', 'ignored', None, _('show ignored files')),
3110 ('i', 'ignored', None, _('show ignored files')),
3109 ('n', 'no-status', None, _('hide status prefix')),
3111 ('n', 'no-status', None, _('hide status prefix')),
3110 ('0', 'print0', None,
3112 ('0', 'print0', None,
3111 _('end filenames with NUL, for use with xargs')),
3113 _('end filenames with NUL, for use with xargs')),
3112 ('I', 'include', [], _('include names matching the given patterns')),
3114 ('I', 'include', [], _('include names matching the given patterns')),
3113 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3115 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3114 _('hg status [OPTION]... [FILE]...')),
3116 _('hg status [OPTION]... [FILE]...')),
3115 "tag":
3117 "tag":
3116 (tag,
3118 (tag,
3117 [('l', 'local', None, _('make the tag local')),
3119 [('l', 'local', None, _('make the tag local')),
3118 ('m', 'message', '', _('message for tag commit log entry')),
3120 ('m', 'message', '', _('message for tag commit log entry')),
3119 ('d', 'date', '', _('record datecode as commit date')),
3121 ('d', 'date', '', _('record datecode as commit date')),
3120 ('u', 'user', '', _('record user as commiter')),
3122 ('u', 'user', '', _('record user as commiter')),
3121 ('r', 'rev', '', _('revision to tag'))],
3123 ('r', 'rev', '', _('revision to tag'))],
3122 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3124 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3123 "tags": (tags, [], _('hg tags')),
3125 "tags": (tags, [], _('hg tags')),
3124 "tip":
3126 "tip":
3125 (tip,
3127 (tip,
3126 [('b', 'branches', None, _('show branches')),
3128 [('b', 'branches', None, _('show branches')),
3127 ('', 'style', '', _('display using template map file')),
3129 ('', 'style', '', _('display using template map file')),
3128 ('p', 'patch', None, _('show patch')),
3130 ('p', 'patch', None, _('show patch')),
3129 ('', 'template', '', _('display with template'))],
3131 ('', 'template', '', _('display with template'))],
3130 _('hg tip [-b] [-p]')),
3132 _('hg tip [-b] [-p]')),
3131 "unbundle":
3133 "unbundle":
3132 (unbundle,
3134 (unbundle,
3133 [('u', 'update', None,
3135 [('u', 'update', None,
3134 _('update the working directory to tip after unbundle'))],
3136 _('update the working directory to tip after unbundle'))],
3135 _('hg unbundle [-u] FILE')),
3137 _('hg unbundle [-u] FILE')),
3136 "debugundo|undo": (undo, [], _('hg undo')),
3138 "debugundo|undo": (undo, [], _('hg undo')),
3137 "^update|up|checkout|co":
3139 "^update|up|checkout|co":
3138 (update,
3140 (update,
3139 [('b', 'branch', '', _('checkout the head of a specific branch')),
3141 [('b', 'branch', '', _('checkout the head of a specific branch')),
3140 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3142 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3141 ('C', 'clean', None, _('overwrite locally modified files')),
3143 ('C', 'clean', None, _('overwrite locally modified files')),
3142 ('f', 'force', None, _('force a merge with outstanding changes'))],
3144 ('f', 'force', None, _('force a merge with outstanding changes'))],
3143 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3145 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3144 "verify": (verify, [], _('hg verify')),
3146 "verify": (verify, [], _('hg verify')),
3145 "version": (show_version, [], _('hg version')),
3147 "version": (show_version, [], _('hg version')),
3146 }
3148 }
3147
3149
3148 globalopts = [
3150 globalopts = [
3149 ('R', 'repository', '',
3151 ('R', 'repository', '',
3150 _('repository root directory or symbolic path name')),
3152 _('repository root directory or symbolic path name')),
3151 ('', 'cwd', '', _('change working directory')),
3153 ('', 'cwd', '', _('change working directory')),
3152 ('y', 'noninteractive', None,
3154 ('y', 'noninteractive', None,
3153 _('do not prompt, assume \'yes\' for any required answers')),
3155 _('do not prompt, assume \'yes\' for any required answers')),
3154 ('q', 'quiet', None, _('suppress output')),
3156 ('q', 'quiet', None, _('suppress output')),
3155 ('v', 'verbose', None, _('enable additional output')),
3157 ('v', 'verbose', None, _('enable additional output')),
3156 ('', 'config', [], _('set/override config option')),
3158 ('', 'config', [], _('set/override config option')),
3157 ('', 'debug', None, _('enable debugging output')),
3159 ('', 'debug', None, _('enable debugging output')),
3158 ('', 'debugger', None, _('start debugger')),
3160 ('', 'debugger', None, _('start debugger')),
3159 ('', 'lsprof', None, _('print improved command execution profile')),
3161 ('', 'lsprof', None, _('print improved command execution profile')),
3160 ('', 'traceback', None, _('print traceback on exception')),
3162 ('', 'traceback', None, _('print traceback on exception')),
3161 ('', 'time', None, _('time how long the command takes')),
3163 ('', 'time', None, _('time how long the command takes')),
3162 ('', 'profile', None, _('print command execution profile')),
3164 ('', 'profile', None, _('print command execution profile')),
3163 ('', 'version', None, _('output version information and exit')),
3165 ('', 'version', None, _('output version information and exit')),
3164 ('h', 'help', None, _('display help and exit')),
3166 ('h', 'help', None, _('display help and exit')),
3165 ]
3167 ]
3166
3168
3167 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3169 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3168 " debugindex debugindexdot")
3170 " debugindex debugindexdot")
3169 optionalrepo = ("paths serve debugconfig")
3171 optionalrepo = ("paths serve debugconfig")
3170
3172
3171 def findpossible(cmd):
3173 def findpossible(cmd):
3172 """
3174 """
3173 Return cmd -> (aliases, command table entry)
3175 Return cmd -> (aliases, command table entry)
3174 for each matching command.
3176 for each matching command.
3175 Return debug commands (or their aliases) only if no normal command matches.
3177 Return debug commands (or their aliases) only if no normal command matches.
3176 """
3178 """
3177 choice = {}
3179 choice = {}
3178 debugchoice = {}
3180 debugchoice = {}
3179 for e in table.keys():
3181 for e in table.keys():
3180 aliases = e.lstrip("^").split("|")
3182 aliases = e.lstrip("^").split("|")
3181 found = None
3183 found = None
3182 if cmd in aliases:
3184 if cmd in aliases:
3183 found = cmd
3185 found = cmd
3184 else:
3186 else:
3185 for a in aliases:
3187 for a in aliases:
3186 if a.startswith(cmd):
3188 if a.startswith(cmd):
3187 found = a
3189 found = a
3188 break
3190 break
3189 if found is not None:
3191 if found is not None:
3190 if aliases[0].startswith("debug"):
3192 if aliases[0].startswith("debug"):
3191 debugchoice[found] = (aliases, table[e])
3193 debugchoice[found] = (aliases, table[e])
3192 else:
3194 else:
3193 choice[found] = (aliases, table[e])
3195 choice[found] = (aliases, table[e])
3194
3196
3195 if not choice and debugchoice:
3197 if not choice and debugchoice:
3196 choice = debugchoice
3198 choice = debugchoice
3197
3199
3198 return choice
3200 return choice
3199
3201
3200 def findcmd(cmd):
3202 def findcmd(cmd):
3201 """Return (aliases, command table entry) for command string."""
3203 """Return (aliases, command table entry) for command string."""
3202 choice = findpossible(cmd)
3204 choice = findpossible(cmd)
3203
3205
3204 if choice.has_key(cmd):
3206 if choice.has_key(cmd):
3205 return choice[cmd]
3207 return choice[cmd]
3206
3208
3207 if len(choice) > 1:
3209 if len(choice) > 1:
3208 clist = choice.keys()
3210 clist = choice.keys()
3209 clist.sort()
3211 clist.sort()
3210 raise AmbiguousCommand(cmd, clist)
3212 raise AmbiguousCommand(cmd, clist)
3211
3213
3212 if choice:
3214 if choice:
3213 return choice.values()[0]
3215 return choice.values()[0]
3214
3216
3215 raise UnknownCommand(cmd)
3217 raise UnknownCommand(cmd)
3216
3218
3217 def catchterm(*args):
3219 def catchterm(*args):
3218 raise util.SignalInterrupt
3220 raise util.SignalInterrupt
3219
3221
3220 def run():
3222 def run():
3221 sys.exit(dispatch(sys.argv[1:]))
3223 sys.exit(dispatch(sys.argv[1:]))
3222
3224
3223 class ParseError(Exception):
3225 class ParseError(Exception):
3224 """Exception raised on errors in parsing the command line."""
3226 """Exception raised on errors in parsing the command line."""
3225
3227
3226 def parse(ui, args):
3228 def parse(ui, args):
3227 options = {}
3229 options = {}
3228 cmdoptions = {}
3230 cmdoptions = {}
3229
3231
3230 try:
3232 try:
3231 args = fancyopts.fancyopts(args, globalopts, options)
3233 args = fancyopts.fancyopts(args, globalopts, options)
3232 except fancyopts.getopt.GetoptError, inst:
3234 except fancyopts.getopt.GetoptError, inst:
3233 raise ParseError(None, inst)
3235 raise ParseError(None, inst)
3234
3236
3235 if args:
3237 if args:
3236 cmd, args = args[0], args[1:]
3238 cmd, args = args[0], args[1:]
3237 aliases, i = findcmd(cmd)
3239 aliases, i = findcmd(cmd)
3238 cmd = aliases[0]
3240 cmd = aliases[0]
3239 defaults = ui.config("defaults", cmd)
3241 defaults = ui.config("defaults", cmd)
3240 if defaults:
3242 if defaults:
3241 args = defaults.split() + args
3243 args = defaults.split() + args
3242 c = list(i[1])
3244 c = list(i[1])
3243 else:
3245 else:
3244 cmd = None
3246 cmd = None
3245 c = []
3247 c = []
3246
3248
3247 # combine global options into local
3249 # combine global options into local
3248 for o in globalopts:
3250 for o in globalopts:
3249 c.append((o[0], o[1], options[o[1]], o[3]))
3251 c.append((o[0], o[1], options[o[1]], o[3]))
3250
3252
3251 try:
3253 try:
3252 args = fancyopts.fancyopts(args, c, cmdoptions)
3254 args = fancyopts.fancyopts(args, c, cmdoptions)
3253 except fancyopts.getopt.GetoptError, inst:
3255 except fancyopts.getopt.GetoptError, inst:
3254 raise ParseError(cmd, inst)
3256 raise ParseError(cmd, inst)
3255
3257
3256 # separate global options back out
3258 # separate global options back out
3257 for o in globalopts:
3259 for o in globalopts:
3258 n = o[1]
3260 n = o[1]
3259 options[n] = cmdoptions[n]
3261 options[n] = cmdoptions[n]
3260 del cmdoptions[n]
3262 del cmdoptions[n]
3261
3263
3262 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3264 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3263
3265
3264 external = {}
3266 external = {}
3265
3267
3266 def findext(name):
3268 def findext(name):
3267 '''return module with given extension name'''
3269 '''return module with given extension name'''
3268 try:
3270 try:
3269 return sys.modules[external[name]]
3271 return sys.modules[external[name]]
3270 except KeyError:
3272 except KeyError:
3271 dotname = '.' + name
3273 dotname = '.' + name
3272 for k, v in external.iteritems():
3274 for k, v in external.iteritems():
3273 if k.endswith('.' + name) or v == name:
3275 if k.endswith('.' + name) or v == name:
3274 return sys.modules[v]
3276 return sys.modules[v]
3275 raise KeyError(name)
3277 raise KeyError(name)
3276
3278
3277 def dispatch(args):
3279 def dispatch(args):
3278 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3280 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3279 num = getattr(signal, name, None)
3281 num = getattr(signal, name, None)
3280 if num: signal.signal(num, catchterm)
3282 if num: signal.signal(num, catchterm)
3281
3283
3282 try:
3284 try:
3283 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3285 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3284 except util.Abort, inst:
3286 except util.Abort, inst:
3285 sys.stderr.write(_("abort: %s\n") % inst)
3287 sys.stderr.write(_("abort: %s\n") % inst)
3286 return -1
3288 return -1
3287
3289
3288 for ext_name, load_from_name in u.extensions():
3290 for ext_name, load_from_name in u.extensions():
3289 try:
3291 try:
3290 if load_from_name:
3292 if load_from_name:
3291 # the module will be loaded in sys.modules
3293 # the module will be loaded in sys.modules
3292 # choose an unique name so that it doesn't
3294 # choose an unique name so that it doesn't
3293 # conflicts with other modules
3295 # conflicts with other modules
3294 module_name = "hgext_%s" % ext_name.replace('.', '_')
3296 module_name = "hgext_%s" % ext_name.replace('.', '_')
3295 mod = imp.load_source(module_name, load_from_name)
3297 mod = imp.load_source(module_name, load_from_name)
3296 else:
3298 else:
3297 def importh(name):
3299 def importh(name):
3298 mod = __import__(name)
3300 mod = __import__(name)
3299 components = name.split('.')
3301 components = name.split('.')
3300 for comp in components[1:]:
3302 for comp in components[1:]:
3301 mod = getattr(mod, comp)
3303 mod = getattr(mod, comp)
3302 return mod
3304 return mod
3303 try:
3305 try:
3304 mod = importh("hgext.%s" % ext_name)
3306 mod = importh("hgext.%s" % ext_name)
3305 except ImportError:
3307 except ImportError:
3306 mod = importh(ext_name)
3308 mod = importh(ext_name)
3307 external[ext_name] = mod.__name__
3309 external[ext_name] = mod.__name__
3308 except (util.SignalInterrupt, KeyboardInterrupt):
3310 except (util.SignalInterrupt, KeyboardInterrupt):
3309 raise
3311 raise
3310 except Exception, inst:
3312 except Exception, inst:
3311 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3313 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3312 if u.print_exc():
3314 if u.print_exc():
3313 return 1
3315 return 1
3314
3316
3315 for name in external.itervalues():
3317 for name in external.itervalues():
3316 mod = sys.modules[name]
3318 mod = sys.modules[name]
3317 uisetup = getattr(mod, 'uisetup', None)
3319 uisetup = getattr(mod, 'uisetup', None)
3318 if uisetup:
3320 if uisetup:
3319 uisetup(u)
3321 uisetup(u)
3320 cmdtable = getattr(mod, 'cmdtable', {})
3322 cmdtable = getattr(mod, 'cmdtable', {})
3321 for t in cmdtable:
3323 for t in cmdtable:
3322 if t in table:
3324 if t in table:
3323 u.warn(_("module %s overrides %s\n") % (name, t))
3325 u.warn(_("module %s overrides %s\n") % (name, t))
3324 table.update(cmdtable)
3326 table.update(cmdtable)
3325
3327
3326 try:
3328 try:
3327 cmd, func, args, options, cmdoptions = parse(u, args)
3329 cmd, func, args, options, cmdoptions = parse(u, args)
3328 if options["time"]:
3330 if options["time"]:
3329 def get_times():
3331 def get_times():
3330 t = os.times()
3332 t = os.times()
3331 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3333 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3332 t = (t[0], t[1], t[2], t[3], time.clock())
3334 t = (t[0], t[1], t[2], t[3], time.clock())
3333 return t
3335 return t
3334 s = get_times()
3336 s = get_times()
3335 def print_time():
3337 def print_time():
3336 t = get_times()
3338 t = get_times()
3337 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3339 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3338 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3340 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3339 atexit.register(print_time)
3341 atexit.register(print_time)
3340
3342
3341 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3343 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3342 not options["noninteractive"], options["traceback"],
3344 not options["noninteractive"], options["traceback"],
3343 options["config"])
3345 options["config"])
3344
3346
3345 # enter the debugger before command execution
3347 # enter the debugger before command execution
3346 if options['debugger']:
3348 if options['debugger']:
3347 pdb.set_trace()
3349 pdb.set_trace()
3348
3350
3349 try:
3351 try:
3350 if options['cwd']:
3352 if options['cwd']:
3351 try:
3353 try:
3352 os.chdir(options['cwd'])
3354 os.chdir(options['cwd'])
3353 except OSError, inst:
3355 except OSError, inst:
3354 raise util.Abort('%s: %s' %
3356 raise util.Abort('%s: %s' %
3355 (options['cwd'], inst.strerror))
3357 (options['cwd'], inst.strerror))
3356
3358
3357 path = u.expandpath(options["repository"]) or ""
3359 path = u.expandpath(options["repository"]) or ""
3358 repo = path and hg.repository(u, path=path) or None
3360 repo = path and hg.repository(u, path=path) or None
3359
3361
3360 if options['help']:
3362 if options['help']:
3361 return help_(u, cmd, options['version'])
3363 return help_(u, cmd, options['version'])
3362 elif options['version']:
3364 elif options['version']:
3363 return show_version(u)
3365 return show_version(u)
3364 elif not cmd:
3366 elif not cmd:
3365 return help_(u, 'shortlist')
3367 return help_(u, 'shortlist')
3366
3368
3367 if cmd not in norepo.split():
3369 if cmd not in norepo.split():
3368 try:
3370 try:
3369 if not repo:
3371 if not repo:
3370 repo = hg.repository(u, path=path)
3372 repo = hg.repository(u, path=path)
3371 u = repo.ui
3373 u = repo.ui
3372 for name in external.itervalues():
3374 for name in external.itervalues():
3373 mod = sys.modules[name]
3375 mod = sys.modules[name]
3374 if hasattr(mod, 'reposetup'):
3376 if hasattr(mod, 'reposetup'):
3375 mod.reposetup(u, repo)
3377 mod.reposetup(u, repo)
3376 except hg.RepoError:
3378 except hg.RepoError:
3377 if cmd not in optionalrepo.split():
3379 if cmd not in optionalrepo.split():
3378 raise
3380 raise
3379 d = lambda: func(u, repo, *args, **cmdoptions)
3381 d = lambda: func(u, repo, *args, **cmdoptions)
3380 else:
3382 else:
3381 d = lambda: func(u, *args, **cmdoptions)
3383 d = lambda: func(u, *args, **cmdoptions)
3382
3384
3383 try:
3385 try:
3384 if options['profile']:
3386 if options['profile']:
3385 import hotshot, hotshot.stats
3387 import hotshot, hotshot.stats
3386 prof = hotshot.Profile("hg.prof")
3388 prof = hotshot.Profile("hg.prof")
3387 try:
3389 try:
3388 try:
3390 try:
3389 return prof.runcall(d)
3391 return prof.runcall(d)
3390 except:
3392 except:
3391 try:
3393 try:
3392 u.warn(_('exception raised - generating '
3394 u.warn(_('exception raised - generating '
3393 'profile anyway\n'))
3395 'profile anyway\n'))
3394 except:
3396 except:
3395 pass
3397 pass
3396 raise
3398 raise
3397 finally:
3399 finally:
3398 prof.close()
3400 prof.close()
3399 stats = hotshot.stats.load("hg.prof")
3401 stats = hotshot.stats.load("hg.prof")
3400 stats.strip_dirs()
3402 stats.strip_dirs()
3401 stats.sort_stats('time', 'calls')
3403 stats.sort_stats('time', 'calls')
3402 stats.print_stats(40)
3404 stats.print_stats(40)
3403 elif options['lsprof']:
3405 elif options['lsprof']:
3404 try:
3406 try:
3405 from mercurial import lsprof
3407 from mercurial import lsprof
3406 except ImportError:
3408 except ImportError:
3407 raise util.Abort(_(
3409 raise util.Abort(_(
3408 'lsprof not available - install from '
3410 'lsprof not available - install from '
3409 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3411 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3410 p = lsprof.Profiler()
3412 p = lsprof.Profiler()
3411 p.enable(subcalls=True)
3413 p.enable(subcalls=True)
3412 try:
3414 try:
3413 return d()
3415 return d()
3414 finally:
3416 finally:
3415 p.disable()
3417 p.disable()
3416 stats = lsprof.Stats(p.getstats())
3418 stats = lsprof.Stats(p.getstats())
3417 stats.sort()
3419 stats.sort()
3418 stats.pprint(top=10, file=sys.stderr, climit=5)
3420 stats.pprint(top=10, file=sys.stderr, climit=5)
3419 else:
3421 else:
3420 return d()
3422 return d()
3421 finally:
3423 finally:
3422 u.flush()
3424 u.flush()
3423 except:
3425 except:
3424 # enter the debugger when we hit an exception
3426 # enter the debugger when we hit an exception
3425 if options['debugger']:
3427 if options['debugger']:
3426 pdb.post_mortem(sys.exc_info()[2])
3428 pdb.post_mortem(sys.exc_info()[2])
3427 u.print_exc()
3429 u.print_exc()
3428 raise
3430 raise
3429 except ParseError, inst:
3431 except ParseError, inst:
3430 if inst.args[0]:
3432 if inst.args[0]:
3431 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3433 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3432 help_(u, inst.args[0])
3434 help_(u, inst.args[0])
3433 else:
3435 else:
3434 u.warn(_("hg: %s\n") % inst.args[1])
3436 u.warn(_("hg: %s\n") % inst.args[1])
3435 help_(u, 'shortlist')
3437 help_(u, 'shortlist')
3436 except AmbiguousCommand, inst:
3438 except AmbiguousCommand, inst:
3437 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3439 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3438 (inst.args[0], " ".join(inst.args[1])))
3440 (inst.args[0], " ".join(inst.args[1])))
3439 except UnknownCommand, inst:
3441 except UnknownCommand, inst:
3440 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3442 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3441 help_(u, 'shortlist')
3443 help_(u, 'shortlist')
3442 except hg.RepoError, inst:
3444 except hg.RepoError, inst:
3443 u.warn(_("abort: %s!\n") % inst)
3445 u.warn(_("abort: %s!\n") % inst)
3444 except lock.LockHeld, inst:
3446 except lock.LockHeld, inst:
3445 if inst.errno == errno.ETIMEDOUT:
3447 if inst.errno == errno.ETIMEDOUT:
3446 reason = _('timed out waiting for lock held by %s') % inst.locker
3448 reason = _('timed out waiting for lock held by %s') % inst.locker
3447 else:
3449 else:
3448 reason = _('lock held by %s') % inst.locker
3450 reason = _('lock held by %s') % inst.locker
3449 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3451 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3450 except lock.LockUnavailable, inst:
3452 except lock.LockUnavailable, inst:
3451 u.warn(_("abort: could not lock %s: %s\n") %
3453 u.warn(_("abort: could not lock %s: %s\n") %
3452 (inst.desc or inst.filename, inst.strerror))
3454 (inst.desc or inst.filename, inst.strerror))
3453 except revlog.RevlogError, inst:
3455 except revlog.RevlogError, inst:
3454 u.warn(_("abort: "), inst, "!\n")
3456 u.warn(_("abort: "), inst, "!\n")
3455 except util.SignalInterrupt:
3457 except util.SignalInterrupt:
3456 u.warn(_("killed!\n"))
3458 u.warn(_("killed!\n"))
3457 except KeyboardInterrupt:
3459 except KeyboardInterrupt:
3458 try:
3460 try:
3459 u.warn(_("interrupted!\n"))
3461 u.warn(_("interrupted!\n"))
3460 except IOError, inst:
3462 except IOError, inst:
3461 if inst.errno == errno.EPIPE:
3463 if inst.errno == errno.EPIPE:
3462 if u.debugflag:
3464 if u.debugflag:
3463 u.warn(_("\nbroken pipe\n"))
3465 u.warn(_("\nbroken pipe\n"))
3464 else:
3466 else:
3465 raise
3467 raise
3466 except IOError, inst:
3468 except IOError, inst:
3467 if hasattr(inst, "code"):
3469 if hasattr(inst, "code"):
3468 u.warn(_("abort: %s\n") % inst)
3470 u.warn(_("abort: %s\n") % inst)
3469 elif hasattr(inst, "reason"):
3471 elif hasattr(inst, "reason"):
3470 u.warn(_("abort: error: %s\n") % inst.reason[1])
3472 u.warn(_("abort: error: %s\n") % inst.reason[1])
3471 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3473 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3472 if u.debugflag:
3474 if u.debugflag:
3473 u.warn(_("broken pipe\n"))
3475 u.warn(_("broken pipe\n"))
3474 elif getattr(inst, "strerror", None):
3476 elif getattr(inst, "strerror", None):
3475 if getattr(inst, "filename", None):
3477 if getattr(inst, "filename", None):
3476 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3478 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3477 else:
3479 else:
3478 u.warn(_("abort: %s\n") % inst.strerror)
3480 u.warn(_("abort: %s\n") % inst.strerror)
3479 else:
3481 else:
3480 raise
3482 raise
3481 except OSError, inst:
3483 except OSError, inst:
3482 if hasattr(inst, "filename"):
3484 if hasattr(inst, "filename"):
3483 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3485 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3484 else:
3486 else:
3485 u.warn(_("abort: %s\n") % inst.strerror)
3487 u.warn(_("abort: %s\n") % inst.strerror)
3486 except util.Abort, inst:
3488 except util.Abort, inst:
3487 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3489 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3488 except TypeError, inst:
3490 except TypeError, inst:
3489 # was this an argument error?
3491 # was this an argument error?
3490 tb = traceback.extract_tb(sys.exc_info()[2])
3492 tb = traceback.extract_tb(sys.exc_info()[2])
3491 if len(tb) > 2: # no
3493 if len(tb) > 2: # no
3492 raise
3494 raise
3493 u.debug(inst, "\n")
3495 u.debug(inst, "\n")
3494 u.warn(_("%s: invalid arguments\n") % cmd)
3496 u.warn(_("%s: invalid arguments\n") % cmd)
3495 help_(u, cmd)
3497 help_(u, cmd)
3496 except SystemExit, inst:
3498 except SystemExit, inst:
3497 # Commands shouldn't sys.exit directly, but give a return code.
3499 # Commands shouldn't sys.exit directly, but give a return code.
3498 # Just in case catch this and and pass exit code to caller.
3500 # Just in case catch this and and pass exit code to caller.
3499 return inst.code
3501 return inst.code
3500 except:
3502 except:
3501 u.warn(_("** unknown exception encountered, details follow\n"))
3503 u.warn(_("** unknown exception encountered, details follow\n"))
3502 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3504 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3503 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3505 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3504 % version.get_version())
3506 % version.get_version())
3505 raise
3507 raise
3506
3508
3507 return -1
3509 return -1
@@ -1,205 +1,208 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from repo import *
9 from repo import *
10 from demandload import *
10 from demandload import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo")
12 demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo")
13 demandload(globals(), "errno lock os shutil util")
13 demandload(globals(), "errno lock os shutil util")
14
14
15 def bundle(ui, path):
15 def bundle(ui, path):
16 if path.startswith('bundle://'):
16 if path.startswith('bundle://'):
17 path = path[9:]
17 path = path[9:]
18 else:
18 else:
19 path = path[7:]
19 path = path[7:]
20 s = path.split("+", 1)
20 s = path.split("+", 1)
21 if len(s) == 1:
21 if len(s) == 1:
22 repopath, bundlename = "", s[0]
22 repopath, bundlename = "", s[0]
23 else:
23 else:
24 repopath, bundlename = s
24 repopath, bundlename = s
25 return bundlerepo.bundlerepository(ui, repopath, bundlename)
25 return bundlerepo.bundlerepository(ui, repopath, bundlename)
26
26
27 def hg(ui, path):
27 def hg(ui, path):
28 ui.warn(_("hg:// syntax is deprecated, please use http:// instead\n"))
28 ui.warn(_("hg:// syntax is deprecated, please use http:// instead\n"))
29 return httprepo.httprepository(ui, path.replace("hg://", "http://"))
29 return httprepo.httprepository(ui, path.replace("hg://", "http://"))
30
30
31 def local_(ui, path, create=0):
31 def local_(ui, path, create=0):
32 if path.startswith('file:'):
32 if path.startswith('file:'):
33 path = path[5:]
33 path = path[5:]
34 return localrepo.localrepository(ui, path, create)
34 return localrepo.localrepository(ui, path, create)
35
35
36 def ssh_(ui, path, create=0):
36 def ssh_(ui, path, create=0):
37 return sshrepo.sshrepository(ui, path, create)
37 return sshrepo.sshrepository(ui, path, create)
38
38
39 def old_http(ui, path):
39 def old_http(ui, path):
40 ui.warn(_("old-http:// syntax is deprecated, "
40 ui.warn(_("old-http:// syntax is deprecated, "
41 "please use static-http:// instead\n"))
41 "please use static-http:// instead\n"))
42 return statichttprepo.statichttprepository(
42 return statichttprepo.statichttprepository(
43 ui, path.replace("old-http://", "http://"))
43 ui, path.replace("old-http://", "http://"))
44
44
45 def static_http(ui, path):
45 def static_http(ui, path):
46 return statichttprepo.statichttprepository(
46 return statichttprepo.statichttprepository(
47 ui, path.replace("static-http://", "http://"))
47 ui, path.replace("static-http://", "http://"))
48
48
49 schemes = {
49 schemes = {
50 'bundle': bundle,
50 'bundle': bundle,
51 'file': local_,
51 'file': local_,
52 'hg': hg,
52 'hg': hg,
53 'http': lambda ui, path: httprepo.httprepository(ui, path),
53 'http': lambda ui, path: httprepo.httprepository(ui, path),
54 'https': lambda ui, path: httprepo.httpsrepository(ui, path),
54 'https': lambda ui, path: httprepo.httpsrepository(ui, path),
55 'old-http': old_http,
55 'old-http': old_http,
56 'ssh': ssh_,
56 'ssh': ssh_,
57 'static-http': static_http,
57 'static-http': static_http,
58 }
58 }
59
59
60 def repository(ui, path=None, create=0):
60 def repository(ui, path=None, create=0):
61 scheme = None
61 scheme = None
62 if path:
62 if path:
63 c = path.find(':')
63 c = path.find(':')
64 if c > 0:
64 if c > 0:
65 scheme = schemes.get(path[:c])
65 scheme = schemes.get(path[:c])
66 else:
66 else:
67 path = ''
67 path = ''
68 ctor = scheme or schemes['file']
68 ctor = scheme or schemes['file']
69 if create:
69 if create:
70 try:
70 try:
71 return ctor(ui, path, create)
71 return ctor(ui, path, create)
72 except TypeError:
72 except TypeError:
73 raise util.Abort(_('cannot create new repository over "%s" protocol') %
73 raise util.Abort(_('cannot create new repository over "%s" protocol') %
74 scheme)
74 scheme)
75 return ctor(ui, path)
75 return ctor(ui, path)
76
76
77 def clone(ui, source, dest=None, pull=False, rev=None, update=True):
77 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
78 stream=False):
78 """Make a copy of an existing repository.
79 """Make a copy of an existing repository.
79
80
80 Create a copy of an existing repository in a new directory. The
81 Create a copy of an existing repository in a new directory. The
81 source and destination are URLs, as passed to the repository
82 source and destination are URLs, as passed to the repository
82 function. Returns a pair of repository objects, the source and
83 function. Returns a pair of repository objects, the source and
83 newly created destination.
84 newly created destination.
84
85
85 The location of the source is added to the new repository's
86 The location of the source is added to the new repository's
86 .hg/hgrc file, as the default to be used for future pulls and
87 .hg/hgrc file, as the default to be used for future pulls and
87 pushes.
88 pushes.
88
89
89 If an exception is raised, the partly cloned/updated destination
90 If an exception is raised, the partly cloned/updated destination
90 repository will be deleted.
91 repository will be deleted.
91
92
92 Keyword arguments:
93 Keyword arguments:
93
94
94 dest: URL of destination repository to create (defaults to base
95 dest: URL of destination repository to create (defaults to base
95 name of source repository)
96 name of source repository)
96
97
97 pull: always pull from source repository, even in local case
98 pull: always pull from source repository, even in local case
98
99
100 stream: stream from repository (fast over LAN, slow over WAN)
101
99 rev: revision to clone up to (implies pull=True)
102 rev: revision to clone up to (implies pull=True)
100
103
101 update: update working directory after clone completes, if
104 update: update working directory after clone completes, if
102 destination is local repository
105 destination is local repository
103 """
106 """
104 if dest is None:
107 if dest is None:
105 dest = os.path.basename(os.path.normpath(source))
108 dest = os.path.basename(os.path.normpath(source))
106
109
107 if os.path.exists(dest):
110 if os.path.exists(dest):
108 raise util.Abort(_("destination '%s' already exists"), dest)
111 raise util.Abort(_("destination '%s' already exists"), dest)
109
112
110 class DirCleanup(object):
113 class DirCleanup(object):
111 def __init__(self, dir_):
114 def __init__(self, dir_):
112 self.rmtree = shutil.rmtree
115 self.rmtree = shutil.rmtree
113 self.dir_ = dir_
116 self.dir_ = dir_
114 def close(self):
117 def close(self):
115 self.dir_ = None
118 self.dir_ = None
116 def __del__(self):
119 def __del__(self):
117 if self.dir_:
120 if self.dir_:
118 self.rmtree(self.dir_, True)
121 self.rmtree(self.dir_, True)
119
122
120 src_repo = repository(ui, source)
123 src_repo = repository(ui, source)
121
124
122 dest_repo = None
125 dest_repo = None
123 try:
126 try:
124 dest_repo = repository(ui, dest)
127 dest_repo = repository(ui, dest)
125 raise util.Abort(_("destination '%s' already exists." % dest))
128 raise util.Abort(_("destination '%s' already exists." % dest))
126 except RepoError:
129 except RepoError:
127 dest_repo = repository(ui, dest, create=True)
130 dest_repo = repository(ui, dest, create=True)
128
131
129 dest_path = None
132 dest_path = None
130 dir_cleanup = None
133 dir_cleanup = None
131 if dest_repo.local():
134 if dest_repo.local():
132 dest_path = os.path.realpath(dest)
135 dest_path = os.path.realpath(dest)
133 dir_cleanup = DirCleanup(dest_path)
136 dir_cleanup = DirCleanup(dest_path)
134
137
135 abspath = source
138 abspath = source
136 copy = False
139 copy = False
137 if src_repo.local() and dest_repo.local():
140 if src_repo.local() and dest_repo.local():
138 abspath = os.path.abspath(source)
141 abspath = os.path.abspath(source)
139 copy = not pull and not rev
142 copy = not pull and not rev
140
143
141 src_lock, dest_lock = None, None
144 src_lock, dest_lock = None, None
142 if copy:
145 if copy:
143 try:
146 try:
144 # we use a lock here because if we race with commit, we
147 # we use a lock here because if we race with commit, we
145 # can end up with extra data in the cloned revlogs that's
148 # can end up with extra data in the cloned revlogs that's
146 # not pointed to by changesets, thus causing verify to
149 # not pointed to by changesets, thus causing verify to
147 # fail
150 # fail
148 src_lock = src_repo.lock()
151 src_lock = src_repo.lock()
149 except lock.LockException:
152 except lock.LockException:
150 copy = False
153 copy = False
151
154
152 if copy:
155 if copy:
153 # we lock here to avoid premature writing to the target
156 # we lock here to avoid premature writing to the target
154 dest_lock = lock.lock(os.path.join(dest_path, ".hg", "lock"))
157 dest_lock = lock.lock(os.path.join(dest_path, ".hg", "lock"))
155
158
156 # we need to remove the (empty) data dir in dest so copyfiles
159 # we need to remove the (empty) data dir in dest so copyfiles
157 # can do its work
160 # can do its work
158 os.rmdir(os.path.join(dest_path, ".hg", "data"))
161 os.rmdir(os.path.join(dest_path, ".hg", "data"))
159 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
162 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
160 for f in files.split():
163 for f in files.split():
161 src = os.path.join(source, ".hg", f)
164 src = os.path.join(source, ".hg", f)
162 dst = os.path.join(dest_path, ".hg", f)
165 dst = os.path.join(dest_path, ".hg", f)
163 try:
166 try:
164 util.copyfiles(src, dst)
167 util.copyfiles(src, dst)
165 except OSError, inst:
168 except OSError, inst:
166 if inst.errno != errno.ENOENT:
169 if inst.errno != errno.ENOENT:
167 raise
170 raise
168
171
169 # we need to re-init the repo after manually copying the data
172 # we need to re-init the repo after manually copying the data
170 # into it
173 # into it
171 dest_repo = repository(ui, dest)
174 dest_repo = repository(ui, dest)
172
175
173 else:
176 else:
174 revs = None
177 revs = None
175 if rev:
178 if rev:
176 if not src_repo.local():
179 if not src_repo.local():
177 raise util.Abort(_("clone by revision not supported yet "
180 raise util.Abort(_("clone by revision not supported yet "
178 "for remote repositories"))
181 "for remote repositories"))
179 revs = [src_repo.lookup(r) for r in rev]
182 revs = [src_repo.lookup(r) for r in rev]
180
183
181 if dest_repo.local():
184 if dest_repo.local():
182 dest_repo.clone(src_repo, heads=revs, pull=pull)
185 dest_repo.clone(src_repo, heads=revs, stream=stream)
183 elif src_repo.local():
186 elif src_repo.local():
184 src_repo.push(dest_repo, revs=revs)
187 src_repo.push(dest_repo, revs=revs)
185 else:
188 else:
186 raise util.Abort(_("clone from remote to remote not supported"))
189 raise util.Abort(_("clone from remote to remote not supported"))
187
190
188 if src_lock:
191 if src_lock:
189 src_lock.release()
192 src_lock.release()
190
193
191 if dest_repo.local():
194 if dest_repo.local():
192 fp = dest_repo.opener("hgrc", "w", text=True)
195 fp = dest_repo.opener("hgrc", "w", text=True)
193 fp.write("[paths]\n")
196 fp.write("[paths]\n")
194 fp.write("default = %s\n" % abspath)
197 fp.write("default = %s\n" % abspath)
195 fp.close()
198 fp.close()
196
199
197 if dest_lock:
200 if dest_lock:
198 dest_lock.release()
201 dest_lock.release()
199
202
200 if update:
203 if update:
201 dest_repo.update(dest_repo.changelog.tip())
204 dest_repo.update(dest_repo.changelog.tip())
202 if dir_cleanup:
205 if dir_cleanup:
203 dir_cleanup.close()
206 dir_cleanup.close()
204
207
205 return src_repo, dest_repo
208 return src_repo, dest_repo
@@ -1,2255 +1,2254 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ()
18 capabilities = ()
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("no repo found"))
30 raise repo.RepoError(_("no repo found"))
31 path = p
31 path = p
32 self.path = os.path.join(path, ".hg")
32 self.path = os.path.join(path, ".hg")
33
33
34 if not create and not os.path.isdir(self.path):
34 if not create and not os.path.isdir(self.path):
35 raise repo.RepoError(_("repository %s not found") % path)
35 raise repo.RepoError(_("repository %s not found") % path)
36
36
37 self.root = os.path.abspath(path)
37 self.root = os.path.abspath(path)
38 self.origroot = path
38 self.origroot = path
39 self.ui = ui.ui(parentui=parentui)
39 self.ui = ui.ui(parentui=parentui)
40 self.opener = util.opener(self.path)
40 self.opener = util.opener(self.path)
41 self.wopener = util.opener(self.root)
41 self.wopener = util.opener(self.root)
42
42
43 try:
43 try:
44 self.ui.readconfig(self.join("hgrc"), self.root)
44 self.ui.readconfig(self.join("hgrc"), self.root)
45 except IOError:
45 except IOError:
46 pass
46 pass
47
47
48 v = self.ui.revlogopts
48 v = self.ui.revlogopts
49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
51 fl = v.get('flags', None)
51 fl = v.get('flags', None)
52 flags = 0
52 flags = 0
53 if fl != None:
53 if fl != None:
54 for x in fl.split():
54 for x in fl.split():
55 flags |= revlog.flagstr(x)
55 flags |= revlog.flagstr(x)
56 elif self.revlogv1:
56 elif self.revlogv1:
57 flags = revlog.REVLOG_DEFAULT_FLAGS
57 flags = revlog.REVLOG_DEFAULT_FLAGS
58
58
59 v = self.revlogversion | flags
59 v = self.revlogversion | flags
60 self.manifest = manifest.manifest(self.opener, v)
60 self.manifest = manifest.manifest(self.opener, v)
61 self.changelog = changelog.changelog(self.opener, v)
61 self.changelog = changelog.changelog(self.opener, v)
62
62
63 # the changelog might not have the inline index flag
63 # the changelog might not have the inline index flag
64 # on. If the format of the changelog is the same as found in
64 # on. If the format of the changelog is the same as found in
65 # .hgrc, apply any flags found in the .hgrc as well.
65 # .hgrc, apply any flags found in the .hgrc as well.
66 # Otherwise, just version from the changelog
66 # Otherwise, just version from the changelog
67 v = self.changelog.version
67 v = self.changelog.version
68 if v == self.revlogversion:
68 if v == self.revlogversion:
69 v |= flags
69 v |= flags
70 self.revlogversion = v
70 self.revlogversion = v
71
71
72 self.tagscache = None
72 self.tagscache = None
73 self.nodetagscache = None
73 self.nodetagscache = None
74 self.encodepats = None
74 self.encodepats = None
75 self.decodepats = None
75 self.decodepats = None
76 self.transhandle = None
76 self.transhandle = None
77
77
78 if create:
78 if create:
79 if not os.path.exists(path):
79 if not os.path.exists(path):
80 os.mkdir(path)
80 os.mkdir(path)
81 os.mkdir(self.path)
81 os.mkdir(self.path)
82 os.mkdir(self.join("data"))
82 os.mkdir(self.join("data"))
83
83
84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
85
85
86 def hook(self, name, throw=False, **args):
86 def hook(self, name, throw=False, **args):
87 def callhook(hname, funcname):
87 def callhook(hname, funcname):
88 '''call python hook. hook is callable object, looked up as
88 '''call python hook. hook is callable object, looked up as
89 name in python module. if callable returns "true", hook
89 name in python module. if callable returns "true", hook
90 fails, else passes. if hook raises exception, treated as
90 fails, else passes. if hook raises exception, treated as
91 hook failure. exception propagates if throw is "true".
91 hook failure. exception propagates if throw is "true".
92
92
93 reason for "true" meaning "hook failed" is so that
93 reason for "true" meaning "hook failed" is so that
94 unmodified commands (e.g. mercurial.commands.update) can
94 unmodified commands (e.g. mercurial.commands.update) can
95 be run as hooks without wrappers to convert return values.'''
95 be run as hooks without wrappers to convert return values.'''
96
96
97 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
97 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
98 d = funcname.rfind('.')
98 d = funcname.rfind('.')
99 if d == -1:
99 if d == -1:
100 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
100 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
101 % (hname, funcname))
101 % (hname, funcname))
102 modname = funcname[:d]
102 modname = funcname[:d]
103 try:
103 try:
104 obj = __import__(modname)
104 obj = __import__(modname)
105 except ImportError:
105 except ImportError:
106 try:
106 try:
107 # extensions are loaded with hgext_ prefix
107 # extensions are loaded with hgext_ prefix
108 obj = __import__("hgext_%s" % modname)
108 obj = __import__("hgext_%s" % modname)
109 except ImportError:
109 except ImportError:
110 raise util.Abort(_('%s hook is invalid '
110 raise util.Abort(_('%s hook is invalid '
111 '(import of "%s" failed)') %
111 '(import of "%s" failed)') %
112 (hname, modname))
112 (hname, modname))
113 try:
113 try:
114 for p in funcname.split('.')[1:]:
114 for p in funcname.split('.')[1:]:
115 obj = getattr(obj, p)
115 obj = getattr(obj, p)
116 except AttributeError, err:
116 except AttributeError, err:
117 raise util.Abort(_('%s hook is invalid '
117 raise util.Abort(_('%s hook is invalid '
118 '("%s" is not defined)') %
118 '("%s" is not defined)') %
119 (hname, funcname))
119 (hname, funcname))
120 if not callable(obj):
120 if not callable(obj):
121 raise util.Abort(_('%s hook is invalid '
121 raise util.Abort(_('%s hook is invalid '
122 '("%s" is not callable)') %
122 '("%s" is not callable)') %
123 (hname, funcname))
123 (hname, funcname))
124 try:
124 try:
125 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
125 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
126 except (KeyboardInterrupt, util.SignalInterrupt):
126 except (KeyboardInterrupt, util.SignalInterrupt):
127 raise
127 raise
128 except Exception, exc:
128 except Exception, exc:
129 if isinstance(exc, util.Abort):
129 if isinstance(exc, util.Abort):
130 self.ui.warn(_('error: %s hook failed: %s\n') %
130 self.ui.warn(_('error: %s hook failed: %s\n') %
131 (hname, exc.args[0] % exc.args[1:]))
131 (hname, exc.args[0] % exc.args[1:]))
132 else:
132 else:
133 self.ui.warn(_('error: %s hook raised an exception: '
133 self.ui.warn(_('error: %s hook raised an exception: '
134 '%s\n') % (hname, exc))
134 '%s\n') % (hname, exc))
135 if throw:
135 if throw:
136 raise
136 raise
137 self.ui.print_exc()
137 self.ui.print_exc()
138 return True
138 return True
139 if r:
139 if r:
140 if throw:
140 if throw:
141 raise util.Abort(_('%s hook failed') % hname)
141 raise util.Abort(_('%s hook failed') % hname)
142 self.ui.warn(_('warning: %s hook failed\n') % hname)
142 self.ui.warn(_('warning: %s hook failed\n') % hname)
143 return r
143 return r
144
144
145 def runhook(name, cmd):
145 def runhook(name, cmd):
146 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
146 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
147 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
147 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
148 r = util.system(cmd, environ=env, cwd=self.root)
148 r = util.system(cmd, environ=env, cwd=self.root)
149 if r:
149 if r:
150 desc, r = util.explain_exit(r)
150 desc, r = util.explain_exit(r)
151 if throw:
151 if throw:
152 raise util.Abort(_('%s hook %s') % (name, desc))
152 raise util.Abort(_('%s hook %s') % (name, desc))
153 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
153 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
154 return r
154 return r
155
155
156 r = False
156 r = False
157 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
157 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
158 if hname.split(".", 1)[0] == name and cmd]
158 if hname.split(".", 1)[0] == name and cmd]
159 hooks.sort()
159 hooks.sort()
160 for hname, cmd in hooks:
160 for hname, cmd in hooks:
161 if cmd.startswith('python:'):
161 if cmd.startswith('python:'):
162 r = callhook(hname, cmd[7:].strip()) or r
162 r = callhook(hname, cmd[7:].strip()) or r
163 else:
163 else:
164 r = runhook(hname, cmd) or r
164 r = runhook(hname, cmd) or r
165 return r
165 return r
166
166
167 tag_disallowed = ':\r\n'
167 tag_disallowed = ':\r\n'
168
168
169 def tag(self, name, node, local=False, message=None, user=None, date=None):
169 def tag(self, name, node, local=False, message=None, user=None, date=None):
170 '''tag a revision with a symbolic name.
170 '''tag a revision with a symbolic name.
171
171
172 if local is True, the tag is stored in a per-repository file.
172 if local is True, the tag is stored in a per-repository file.
173 otherwise, it is stored in the .hgtags file, and a new
173 otherwise, it is stored in the .hgtags file, and a new
174 changeset is committed with the change.
174 changeset is committed with the change.
175
175
176 keyword arguments:
176 keyword arguments:
177
177
178 local: whether to store tag in non-version-controlled file
178 local: whether to store tag in non-version-controlled file
179 (default False)
179 (default False)
180
180
181 message: commit message to use if committing
181 message: commit message to use if committing
182
182
183 user: name of user to use if committing
183 user: name of user to use if committing
184
184
185 date: date tuple to use if committing'''
185 date: date tuple to use if committing'''
186
186
187 for c in self.tag_disallowed:
187 for c in self.tag_disallowed:
188 if c in name:
188 if c in name:
189 raise util.Abort(_('%r cannot be used in a tag name') % c)
189 raise util.Abort(_('%r cannot be used in a tag name') % c)
190
190
191 self.hook('pretag', throw=True, node=node, tag=name, local=local)
191 self.hook('pretag', throw=True, node=node, tag=name, local=local)
192
192
193 if local:
193 if local:
194 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
194 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
195 self.hook('tag', node=node, tag=name, local=local)
195 self.hook('tag', node=node, tag=name, local=local)
196 return
196 return
197
197
198 for x in self.changes():
198 for x in self.changes():
199 if '.hgtags' in x:
199 if '.hgtags' in x:
200 raise util.Abort(_('working copy of .hgtags is changed '
200 raise util.Abort(_('working copy of .hgtags is changed '
201 '(please commit .hgtags manually)'))
201 '(please commit .hgtags manually)'))
202
202
203 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
203 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
204 if self.dirstate.state('.hgtags') == '?':
204 if self.dirstate.state('.hgtags') == '?':
205 self.add(['.hgtags'])
205 self.add(['.hgtags'])
206
206
207 if not message:
207 if not message:
208 message = _('Added tag %s for changeset %s') % (name, node)
208 message = _('Added tag %s for changeset %s') % (name, node)
209
209
210 self.commit(['.hgtags'], message, user, date)
210 self.commit(['.hgtags'], message, user, date)
211 self.hook('tag', node=node, tag=name, local=local)
211 self.hook('tag', node=node, tag=name, local=local)
212
212
213 def tags(self):
213 def tags(self):
214 '''return a mapping of tag to node'''
214 '''return a mapping of tag to node'''
215 if not self.tagscache:
215 if not self.tagscache:
216 self.tagscache = {}
216 self.tagscache = {}
217
217
218 def parsetag(line, context):
218 def parsetag(line, context):
219 if not line:
219 if not line:
220 return
220 return
221 s = l.split(" ", 1)
221 s = l.split(" ", 1)
222 if len(s) != 2:
222 if len(s) != 2:
223 self.ui.warn(_("%s: cannot parse entry\n") % context)
223 self.ui.warn(_("%s: cannot parse entry\n") % context)
224 return
224 return
225 node, key = s
225 node, key = s
226 key = key.strip()
226 key = key.strip()
227 try:
227 try:
228 bin_n = bin(node)
228 bin_n = bin(node)
229 except TypeError:
229 except TypeError:
230 self.ui.warn(_("%s: node '%s' is not well formed\n") %
230 self.ui.warn(_("%s: node '%s' is not well formed\n") %
231 (context, node))
231 (context, node))
232 return
232 return
233 if bin_n not in self.changelog.nodemap:
233 if bin_n not in self.changelog.nodemap:
234 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
234 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
235 (context, key))
235 (context, key))
236 return
236 return
237 self.tagscache[key] = bin_n
237 self.tagscache[key] = bin_n
238
238
239 # read the tags file from each head, ending with the tip,
239 # read the tags file from each head, ending with the tip,
240 # and add each tag found to the map, with "newer" ones
240 # and add each tag found to the map, with "newer" ones
241 # taking precedence
241 # taking precedence
242 heads = self.heads()
242 heads = self.heads()
243 heads.reverse()
243 heads.reverse()
244 fl = self.file(".hgtags")
244 fl = self.file(".hgtags")
245 for node in heads:
245 for node in heads:
246 change = self.changelog.read(node)
246 change = self.changelog.read(node)
247 rev = self.changelog.rev(node)
247 rev = self.changelog.rev(node)
248 fn, ff = self.manifest.find(change[0], '.hgtags')
248 fn, ff = self.manifest.find(change[0], '.hgtags')
249 if fn is None: continue
249 if fn is None: continue
250 count = 0
250 count = 0
251 for l in fl.read(fn).splitlines():
251 for l in fl.read(fn).splitlines():
252 count += 1
252 count += 1
253 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
253 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
254 (rev, short(node), count))
254 (rev, short(node), count))
255 try:
255 try:
256 f = self.opener("localtags")
256 f = self.opener("localtags")
257 count = 0
257 count = 0
258 for l in f:
258 for l in f:
259 count += 1
259 count += 1
260 parsetag(l, _("localtags, line %d") % count)
260 parsetag(l, _("localtags, line %d") % count)
261 except IOError:
261 except IOError:
262 pass
262 pass
263
263
264 self.tagscache['tip'] = self.changelog.tip()
264 self.tagscache['tip'] = self.changelog.tip()
265
265
266 return self.tagscache
266 return self.tagscache
267
267
268 def tagslist(self):
268 def tagslist(self):
269 '''return a list of tags ordered by revision'''
269 '''return a list of tags ordered by revision'''
270 l = []
270 l = []
271 for t, n in self.tags().items():
271 for t, n in self.tags().items():
272 try:
272 try:
273 r = self.changelog.rev(n)
273 r = self.changelog.rev(n)
274 except:
274 except:
275 r = -2 # sort to the beginning of the list if unknown
275 r = -2 # sort to the beginning of the list if unknown
276 l.append((r, t, n))
276 l.append((r, t, n))
277 l.sort()
277 l.sort()
278 return [(t, n) for r, t, n in l]
278 return [(t, n) for r, t, n in l]
279
279
280 def nodetags(self, node):
280 def nodetags(self, node):
281 '''return the tags associated with a node'''
281 '''return the tags associated with a node'''
282 if not self.nodetagscache:
282 if not self.nodetagscache:
283 self.nodetagscache = {}
283 self.nodetagscache = {}
284 for t, n in self.tags().items():
284 for t, n in self.tags().items():
285 self.nodetagscache.setdefault(n, []).append(t)
285 self.nodetagscache.setdefault(n, []).append(t)
286 return self.nodetagscache.get(node, [])
286 return self.nodetagscache.get(node, [])
287
287
288 def lookup(self, key):
288 def lookup(self, key):
289 try:
289 try:
290 return self.tags()[key]
290 return self.tags()[key]
291 except KeyError:
291 except KeyError:
292 try:
292 try:
293 return self.changelog.lookup(key)
293 return self.changelog.lookup(key)
294 except:
294 except:
295 raise repo.RepoError(_("unknown revision '%s'") % key)
295 raise repo.RepoError(_("unknown revision '%s'") % key)
296
296
297 def dev(self):
297 def dev(self):
298 return os.lstat(self.path).st_dev
298 return os.lstat(self.path).st_dev
299
299
300 def local(self):
300 def local(self):
301 return True
301 return True
302
302
303 def join(self, f):
303 def join(self, f):
304 return os.path.join(self.path, f)
304 return os.path.join(self.path, f)
305
305
306 def wjoin(self, f):
306 def wjoin(self, f):
307 return os.path.join(self.root, f)
307 return os.path.join(self.root, f)
308
308
309 def file(self, f):
309 def file(self, f):
310 if f[0] == '/':
310 if f[0] == '/':
311 f = f[1:]
311 f = f[1:]
312 return filelog.filelog(self.opener, f, self.revlogversion)
312 return filelog.filelog(self.opener, f, self.revlogversion)
313
313
314 def changectx(self, changeid):
314 def changectx(self, changeid):
315 return context.changectx(self, changeid)
315 return context.changectx(self, changeid)
316
316
317 def filectx(self, path, changeid=None, fileid=None):
317 def filectx(self, path, changeid=None, fileid=None):
318 """changeid can be a changeset revision, node, or tag.
318 """changeid can be a changeset revision, node, or tag.
319 fileid can be a file revision or node."""
319 fileid can be a file revision or node."""
320 return context.filectx(self, path, changeid, fileid)
320 return context.filectx(self, path, changeid, fileid)
321
321
322 def getcwd(self):
322 def getcwd(self):
323 return self.dirstate.getcwd()
323 return self.dirstate.getcwd()
324
324
325 def wfile(self, f, mode='r'):
325 def wfile(self, f, mode='r'):
326 return self.wopener(f, mode)
326 return self.wopener(f, mode)
327
327
328 def wread(self, filename):
328 def wread(self, filename):
329 if self.encodepats == None:
329 if self.encodepats == None:
330 l = []
330 l = []
331 for pat, cmd in self.ui.configitems("encode"):
331 for pat, cmd in self.ui.configitems("encode"):
332 mf = util.matcher(self.root, "", [pat], [], [])[1]
332 mf = util.matcher(self.root, "", [pat], [], [])[1]
333 l.append((mf, cmd))
333 l.append((mf, cmd))
334 self.encodepats = l
334 self.encodepats = l
335
335
336 data = self.wopener(filename, 'r').read()
336 data = self.wopener(filename, 'r').read()
337
337
338 for mf, cmd in self.encodepats:
338 for mf, cmd in self.encodepats:
339 if mf(filename):
339 if mf(filename):
340 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
340 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
341 data = util.filter(data, cmd)
341 data = util.filter(data, cmd)
342 break
342 break
343
343
344 return data
344 return data
345
345
346 def wwrite(self, filename, data, fd=None):
346 def wwrite(self, filename, data, fd=None):
347 if self.decodepats == None:
347 if self.decodepats == None:
348 l = []
348 l = []
349 for pat, cmd in self.ui.configitems("decode"):
349 for pat, cmd in self.ui.configitems("decode"):
350 mf = util.matcher(self.root, "", [pat], [], [])[1]
350 mf = util.matcher(self.root, "", [pat], [], [])[1]
351 l.append((mf, cmd))
351 l.append((mf, cmd))
352 self.decodepats = l
352 self.decodepats = l
353
353
354 for mf, cmd in self.decodepats:
354 for mf, cmd in self.decodepats:
355 if mf(filename):
355 if mf(filename):
356 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
356 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
357 data = util.filter(data, cmd)
357 data = util.filter(data, cmd)
358 break
358 break
359
359
360 if fd:
360 if fd:
361 return fd.write(data)
361 return fd.write(data)
362 return self.wopener(filename, 'w').write(data)
362 return self.wopener(filename, 'w').write(data)
363
363
364 def transaction(self):
364 def transaction(self):
365 tr = self.transhandle
365 tr = self.transhandle
366 if tr != None and tr.running():
366 if tr != None and tr.running():
367 return tr.nest()
367 return tr.nest()
368
368
369 # save dirstate for rollback
369 # save dirstate for rollback
370 try:
370 try:
371 ds = self.opener("dirstate").read()
371 ds = self.opener("dirstate").read()
372 except IOError:
372 except IOError:
373 ds = ""
373 ds = ""
374 self.opener("journal.dirstate", "w").write(ds)
374 self.opener("journal.dirstate", "w").write(ds)
375
375
376 tr = transaction.transaction(self.ui.warn, self.opener,
376 tr = transaction.transaction(self.ui.warn, self.opener,
377 self.join("journal"),
377 self.join("journal"),
378 aftertrans(self.path))
378 aftertrans(self.path))
379 self.transhandle = tr
379 self.transhandle = tr
380 return tr
380 return tr
381
381
382 def recover(self):
382 def recover(self):
383 l = self.lock()
383 l = self.lock()
384 if os.path.exists(self.join("journal")):
384 if os.path.exists(self.join("journal")):
385 self.ui.status(_("rolling back interrupted transaction\n"))
385 self.ui.status(_("rolling back interrupted transaction\n"))
386 transaction.rollback(self.opener, self.join("journal"))
386 transaction.rollback(self.opener, self.join("journal"))
387 self.reload()
387 self.reload()
388 return True
388 return True
389 else:
389 else:
390 self.ui.warn(_("no interrupted transaction available\n"))
390 self.ui.warn(_("no interrupted transaction available\n"))
391 return False
391 return False
392
392
393 def rollback(self, wlock=None):
393 def rollback(self, wlock=None):
394 if not wlock:
394 if not wlock:
395 wlock = self.wlock()
395 wlock = self.wlock()
396 l = self.lock()
396 l = self.lock()
397 if os.path.exists(self.join("undo")):
397 if os.path.exists(self.join("undo")):
398 self.ui.status(_("rolling back last transaction\n"))
398 self.ui.status(_("rolling back last transaction\n"))
399 transaction.rollback(self.opener, self.join("undo"))
399 transaction.rollback(self.opener, self.join("undo"))
400 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
400 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
401 self.reload()
401 self.reload()
402 self.wreload()
402 self.wreload()
403 else:
403 else:
404 self.ui.warn(_("no rollback information available\n"))
404 self.ui.warn(_("no rollback information available\n"))
405
405
406 def wreload(self):
406 def wreload(self):
407 self.dirstate.read()
407 self.dirstate.read()
408
408
409 def reload(self):
409 def reload(self):
410 self.changelog.load()
410 self.changelog.load()
411 self.manifest.load()
411 self.manifest.load()
412 self.tagscache = None
412 self.tagscache = None
413 self.nodetagscache = None
413 self.nodetagscache = None
414
414
415 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
415 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
416 desc=None):
416 desc=None):
417 try:
417 try:
418 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
418 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
419 except lock.LockHeld, inst:
419 except lock.LockHeld, inst:
420 if not wait:
420 if not wait:
421 raise
421 raise
422 self.ui.warn(_("waiting for lock on %s held by %s\n") %
422 self.ui.warn(_("waiting for lock on %s held by %s\n") %
423 (desc, inst.args[0]))
423 (desc, inst.args[0]))
424 # default to 600 seconds timeout
424 # default to 600 seconds timeout
425 l = lock.lock(self.join(lockname),
425 l = lock.lock(self.join(lockname),
426 int(self.ui.config("ui", "timeout") or 600),
426 int(self.ui.config("ui", "timeout") or 600),
427 releasefn, desc=desc)
427 releasefn, desc=desc)
428 if acquirefn:
428 if acquirefn:
429 acquirefn()
429 acquirefn()
430 return l
430 return l
431
431
432 def lock(self, wait=1):
432 def lock(self, wait=1):
433 return self.do_lock("lock", wait, acquirefn=self.reload,
433 return self.do_lock("lock", wait, acquirefn=self.reload,
434 desc=_('repository %s') % self.origroot)
434 desc=_('repository %s') % self.origroot)
435
435
436 def wlock(self, wait=1):
436 def wlock(self, wait=1):
437 return self.do_lock("wlock", wait, self.dirstate.write,
437 return self.do_lock("wlock", wait, self.dirstate.write,
438 self.wreload,
438 self.wreload,
439 desc=_('working directory of %s') % self.origroot)
439 desc=_('working directory of %s') % self.origroot)
440
440
441 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
441 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
442 "determine whether a new filenode is needed"
442 "determine whether a new filenode is needed"
443 fp1 = manifest1.get(filename, nullid)
443 fp1 = manifest1.get(filename, nullid)
444 fp2 = manifest2.get(filename, nullid)
444 fp2 = manifest2.get(filename, nullid)
445
445
446 if fp2 != nullid:
446 if fp2 != nullid:
447 # is one parent an ancestor of the other?
447 # is one parent an ancestor of the other?
448 fpa = filelog.ancestor(fp1, fp2)
448 fpa = filelog.ancestor(fp1, fp2)
449 if fpa == fp1:
449 if fpa == fp1:
450 fp1, fp2 = fp2, nullid
450 fp1, fp2 = fp2, nullid
451 elif fpa == fp2:
451 elif fpa == fp2:
452 fp2 = nullid
452 fp2 = nullid
453
453
454 # is the file unmodified from the parent? report existing entry
454 # is the file unmodified from the parent? report existing entry
455 if fp2 == nullid and text == filelog.read(fp1):
455 if fp2 == nullid and text == filelog.read(fp1):
456 return (fp1, None, None)
456 return (fp1, None, None)
457
457
458 return (None, fp1, fp2)
458 return (None, fp1, fp2)
459
459
460 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
460 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
461 orig_parent = self.dirstate.parents()[0] or nullid
461 orig_parent = self.dirstate.parents()[0] or nullid
462 p1 = p1 or self.dirstate.parents()[0] or nullid
462 p1 = p1 or self.dirstate.parents()[0] or nullid
463 p2 = p2 or self.dirstate.parents()[1] or nullid
463 p2 = p2 or self.dirstate.parents()[1] or nullid
464 c1 = self.changelog.read(p1)
464 c1 = self.changelog.read(p1)
465 c2 = self.changelog.read(p2)
465 c2 = self.changelog.read(p2)
466 m1 = self.manifest.read(c1[0])
466 m1 = self.manifest.read(c1[0])
467 mf1 = self.manifest.readflags(c1[0])
467 mf1 = self.manifest.readflags(c1[0])
468 m2 = self.manifest.read(c2[0])
468 m2 = self.manifest.read(c2[0])
469 changed = []
469 changed = []
470
470
471 if orig_parent == p1:
471 if orig_parent == p1:
472 update_dirstate = 1
472 update_dirstate = 1
473 else:
473 else:
474 update_dirstate = 0
474 update_dirstate = 0
475
475
476 if not wlock:
476 if not wlock:
477 wlock = self.wlock()
477 wlock = self.wlock()
478 l = self.lock()
478 l = self.lock()
479 tr = self.transaction()
479 tr = self.transaction()
480 mm = m1.copy()
480 mm = m1.copy()
481 mfm = mf1.copy()
481 mfm = mf1.copy()
482 linkrev = self.changelog.count()
482 linkrev = self.changelog.count()
483 for f in files:
483 for f in files:
484 try:
484 try:
485 t = self.wread(f)
485 t = self.wread(f)
486 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
486 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
487 r = self.file(f)
487 r = self.file(f)
488 mfm[f] = tm
488 mfm[f] = tm
489
489
490 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
490 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
491 if entry:
491 if entry:
492 mm[f] = entry
492 mm[f] = entry
493 continue
493 continue
494
494
495 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
495 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
496 changed.append(f)
496 changed.append(f)
497 if update_dirstate:
497 if update_dirstate:
498 self.dirstate.update([f], "n")
498 self.dirstate.update([f], "n")
499 except IOError:
499 except IOError:
500 try:
500 try:
501 del mm[f]
501 del mm[f]
502 del mfm[f]
502 del mfm[f]
503 if update_dirstate:
503 if update_dirstate:
504 self.dirstate.forget([f])
504 self.dirstate.forget([f])
505 except:
505 except:
506 # deleted from p2?
506 # deleted from p2?
507 pass
507 pass
508
508
509 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
509 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
510 user = user or self.ui.username()
510 user = user or self.ui.username()
511 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
511 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
512 tr.close()
512 tr.close()
513 if update_dirstate:
513 if update_dirstate:
514 self.dirstate.setparents(n, nullid)
514 self.dirstate.setparents(n, nullid)
515
515
516 def commit(self, files=None, text="", user=None, date=None,
516 def commit(self, files=None, text="", user=None, date=None,
517 match=util.always, force=False, lock=None, wlock=None,
517 match=util.always, force=False, lock=None, wlock=None,
518 force_editor=False):
518 force_editor=False):
519 commit = []
519 commit = []
520 remove = []
520 remove = []
521 changed = []
521 changed = []
522
522
523 if files:
523 if files:
524 for f in files:
524 for f in files:
525 s = self.dirstate.state(f)
525 s = self.dirstate.state(f)
526 if s in 'nmai':
526 if s in 'nmai':
527 commit.append(f)
527 commit.append(f)
528 elif s == 'r':
528 elif s == 'r':
529 remove.append(f)
529 remove.append(f)
530 else:
530 else:
531 self.ui.warn(_("%s not tracked!\n") % f)
531 self.ui.warn(_("%s not tracked!\n") % f)
532 else:
532 else:
533 modified, added, removed, deleted, unknown = self.changes(match=match)
533 modified, added, removed, deleted, unknown = self.changes(match=match)
534 commit = modified + added
534 commit = modified + added
535 remove = removed
535 remove = removed
536
536
537 p1, p2 = self.dirstate.parents()
537 p1, p2 = self.dirstate.parents()
538 c1 = self.changelog.read(p1)
538 c1 = self.changelog.read(p1)
539 c2 = self.changelog.read(p2)
539 c2 = self.changelog.read(p2)
540 m1 = self.manifest.read(c1[0])
540 m1 = self.manifest.read(c1[0])
541 mf1 = self.manifest.readflags(c1[0])
541 mf1 = self.manifest.readflags(c1[0])
542 m2 = self.manifest.read(c2[0])
542 m2 = self.manifest.read(c2[0])
543
543
544 if not commit and not remove and not force and p2 == nullid:
544 if not commit and not remove and not force and p2 == nullid:
545 self.ui.status(_("nothing changed\n"))
545 self.ui.status(_("nothing changed\n"))
546 return None
546 return None
547
547
548 xp1 = hex(p1)
548 xp1 = hex(p1)
549 if p2 == nullid: xp2 = ''
549 if p2 == nullid: xp2 = ''
550 else: xp2 = hex(p2)
550 else: xp2 = hex(p2)
551
551
552 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
552 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
553
553
554 if not wlock:
554 if not wlock:
555 wlock = self.wlock()
555 wlock = self.wlock()
556 if not lock:
556 if not lock:
557 lock = self.lock()
557 lock = self.lock()
558 tr = self.transaction()
558 tr = self.transaction()
559
559
560 # check in files
560 # check in files
561 new = {}
561 new = {}
562 linkrev = self.changelog.count()
562 linkrev = self.changelog.count()
563 commit.sort()
563 commit.sort()
564 for f in commit:
564 for f in commit:
565 self.ui.note(f + "\n")
565 self.ui.note(f + "\n")
566 try:
566 try:
567 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
567 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
568 t = self.wread(f)
568 t = self.wread(f)
569 except IOError:
569 except IOError:
570 self.ui.warn(_("trouble committing %s!\n") % f)
570 self.ui.warn(_("trouble committing %s!\n") % f)
571 raise
571 raise
572
572
573 r = self.file(f)
573 r = self.file(f)
574
574
575 meta = {}
575 meta = {}
576 cp = self.dirstate.copied(f)
576 cp = self.dirstate.copied(f)
577 if cp:
577 if cp:
578 meta["copy"] = cp
578 meta["copy"] = cp
579 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
579 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
580 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
580 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
581 fp1, fp2 = nullid, nullid
581 fp1, fp2 = nullid, nullid
582 else:
582 else:
583 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
583 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
584 if entry:
584 if entry:
585 new[f] = entry
585 new[f] = entry
586 continue
586 continue
587
587
588 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
588 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
589 # remember what we've added so that we can later calculate
589 # remember what we've added so that we can later calculate
590 # the files to pull from a set of changesets
590 # the files to pull from a set of changesets
591 changed.append(f)
591 changed.append(f)
592
592
593 # update manifest
593 # update manifest
594 m1 = m1.copy()
594 m1 = m1.copy()
595 m1.update(new)
595 m1.update(new)
596 for f in remove:
596 for f in remove:
597 if f in m1:
597 if f in m1:
598 del m1[f]
598 del m1[f]
599 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
599 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
600 (new, remove))
600 (new, remove))
601
601
602 # add changeset
602 # add changeset
603 new = new.keys()
603 new = new.keys()
604 new.sort()
604 new.sort()
605
605
606 user = user or self.ui.username()
606 user = user or self.ui.username()
607 if not text or force_editor:
607 if not text or force_editor:
608 edittext = []
608 edittext = []
609 if text:
609 if text:
610 edittext.append(text)
610 edittext.append(text)
611 edittext.append("")
611 edittext.append("")
612 if p2 != nullid:
612 if p2 != nullid:
613 edittext.append("HG: branch merge")
613 edittext.append("HG: branch merge")
614 edittext.extend(["HG: changed %s" % f for f in changed])
614 edittext.extend(["HG: changed %s" % f for f in changed])
615 edittext.extend(["HG: removed %s" % f for f in remove])
615 edittext.extend(["HG: removed %s" % f for f in remove])
616 if not changed and not remove:
616 if not changed and not remove:
617 edittext.append("HG: no files changed")
617 edittext.append("HG: no files changed")
618 edittext.append("")
618 edittext.append("")
619 # run editor in the repository root
619 # run editor in the repository root
620 olddir = os.getcwd()
620 olddir = os.getcwd()
621 os.chdir(self.root)
621 os.chdir(self.root)
622 text = self.ui.edit("\n".join(edittext), user)
622 text = self.ui.edit("\n".join(edittext), user)
623 os.chdir(olddir)
623 os.chdir(olddir)
624
624
625 lines = [line.rstrip() for line in text.rstrip().splitlines()]
625 lines = [line.rstrip() for line in text.rstrip().splitlines()]
626 while lines and not lines[0]:
626 while lines and not lines[0]:
627 del lines[0]
627 del lines[0]
628 if not lines:
628 if not lines:
629 return None
629 return None
630 text = '\n'.join(lines)
630 text = '\n'.join(lines)
631 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
631 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
632 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
632 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
633 parent2=xp2)
633 parent2=xp2)
634 tr.close()
634 tr.close()
635
635
636 self.dirstate.setparents(n)
636 self.dirstate.setparents(n)
637 self.dirstate.update(new, "n")
637 self.dirstate.update(new, "n")
638 self.dirstate.forget(remove)
638 self.dirstate.forget(remove)
639
639
640 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
640 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
641 return n
641 return n
642
642
643 def walk(self, node=None, files=[], match=util.always, badmatch=None):
643 def walk(self, node=None, files=[], match=util.always, badmatch=None):
644 if node:
644 if node:
645 fdict = dict.fromkeys(files)
645 fdict = dict.fromkeys(files)
646 for fn in self.manifest.read(self.changelog.read(node)[0]):
646 for fn in self.manifest.read(self.changelog.read(node)[0]):
647 fdict.pop(fn, None)
647 fdict.pop(fn, None)
648 if match(fn):
648 if match(fn):
649 yield 'm', fn
649 yield 'm', fn
650 for fn in fdict:
650 for fn in fdict:
651 if badmatch and badmatch(fn):
651 if badmatch and badmatch(fn):
652 if match(fn):
652 if match(fn):
653 yield 'b', fn
653 yield 'b', fn
654 else:
654 else:
655 self.ui.warn(_('%s: No such file in rev %s\n') % (
655 self.ui.warn(_('%s: No such file in rev %s\n') % (
656 util.pathto(self.getcwd(), fn), short(node)))
656 util.pathto(self.getcwd(), fn), short(node)))
657 else:
657 else:
658 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
658 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
659 yield src, fn
659 yield src, fn
660
660
661 def changes(self, node1=None, node2=None, files=[], match=util.always,
661 def changes(self, node1=None, node2=None, files=[], match=util.always,
662 wlock=None, show_ignored=None):
662 wlock=None, show_ignored=None):
663 """return changes between two nodes or node and working directory
663 """return changes between two nodes or node and working directory
664
664
665 If node1 is None, use the first dirstate parent instead.
665 If node1 is None, use the first dirstate parent instead.
666 If node2 is None, compare node1 with working directory.
666 If node2 is None, compare node1 with working directory.
667 """
667 """
668
668
669 def fcmp(fn, mf):
669 def fcmp(fn, mf):
670 t1 = self.wread(fn)
670 t1 = self.wread(fn)
671 t2 = self.file(fn).read(mf.get(fn, nullid))
671 t2 = self.file(fn).read(mf.get(fn, nullid))
672 return cmp(t1, t2)
672 return cmp(t1, t2)
673
673
674 def mfmatches(node):
674 def mfmatches(node):
675 change = self.changelog.read(node)
675 change = self.changelog.read(node)
676 mf = dict(self.manifest.read(change[0]))
676 mf = dict(self.manifest.read(change[0]))
677 for fn in mf.keys():
677 for fn in mf.keys():
678 if not match(fn):
678 if not match(fn):
679 del mf[fn]
679 del mf[fn]
680 return mf
680 return mf
681
681
682 modified, added, removed, deleted, unknown, ignored = [],[],[],[],[],[]
682 modified, added, removed, deleted, unknown, ignored = [],[],[],[],[],[]
683 compareworking = False
683 compareworking = False
684 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
684 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
685 compareworking = True
685 compareworking = True
686
686
687 if not compareworking:
687 if not compareworking:
688 # read the manifest from node1 before the manifest from node2,
688 # read the manifest from node1 before the manifest from node2,
689 # so that we'll hit the manifest cache if we're going through
689 # so that we'll hit the manifest cache if we're going through
690 # all the revisions in parent->child order.
690 # all the revisions in parent->child order.
691 mf1 = mfmatches(node1)
691 mf1 = mfmatches(node1)
692
692
693 # are we comparing the working directory?
693 # are we comparing the working directory?
694 if not node2:
694 if not node2:
695 if not wlock:
695 if not wlock:
696 try:
696 try:
697 wlock = self.wlock(wait=0)
697 wlock = self.wlock(wait=0)
698 except lock.LockException:
698 except lock.LockException:
699 wlock = None
699 wlock = None
700 lookup, modified, added, removed, deleted, unknown, ignored = (
700 lookup, modified, added, removed, deleted, unknown, ignored = (
701 self.dirstate.changes(files, match, show_ignored))
701 self.dirstate.changes(files, match, show_ignored))
702
702
703 # are we comparing working dir against its parent?
703 # are we comparing working dir against its parent?
704 if compareworking:
704 if compareworking:
705 if lookup:
705 if lookup:
706 # do a full compare of any files that might have changed
706 # do a full compare of any files that might have changed
707 mf2 = mfmatches(self.dirstate.parents()[0])
707 mf2 = mfmatches(self.dirstate.parents()[0])
708 for f in lookup:
708 for f in lookup:
709 if fcmp(f, mf2):
709 if fcmp(f, mf2):
710 modified.append(f)
710 modified.append(f)
711 elif wlock is not None:
711 elif wlock is not None:
712 self.dirstate.update([f], "n")
712 self.dirstate.update([f], "n")
713 else:
713 else:
714 # we are comparing working dir against non-parent
714 # we are comparing working dir against non-parent
715 # generate a pseudo-manifest for the working dir
715 # generate a pseudo-manifest for the working dir
716 mf2 = mfmatches(self.dirstate.parents()[0])
716 mf2 = mfmatches(self.dirstate.parents()[0])
717 for f in lookup + modified + added:
717 for f in lookup + modified + added:
718 mf2[f] = ""
718 mf2[f] = ""
719 for f in removed:
719 for f in removed:
720 if f in mf2:
720 if f in mf2:
721 del mf2[f]
721 del mf2[f]
722 else:
722 else:
723 # we are comparing two revisions
723 # we are comparing two revisions
724 deleted, unknown, ignored = [], [], []
724 deleted, unknown, ignored = [], [], []
725 mf2 = mfmatches(node2)
725 mf2 = mfmatches(node2)
726
726
727 if not compareworking:
727 if not compareworking:
728 # flush lists from dirstate before comparing manifests
728 # flush lists from dirstate before comparing manifests
729 modified, added = [], []
729 modified, added = [], []
730
730
731 # make sure to sort the files so we talk to the disk in a
731 # make sure to sort the files so we talk to the disk in a
732 # reasonable order
732 # reasonable order
733 mf2keys = mf2.keys()
733 mf2keys = mf2.keys()
734 mf2keys.sort()
734 mf2keys.sort()
735 for fn in mf2keys:
735 for fn in mf2keys:
736 if mf1.has_key(fn):
736 if mf1.has_key(fn):
737 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
737 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
738 modified.append(fn)
738 modified.append(fn)
739 del mf1[fn]
739 del mf1[fn]
740 else:
740 else:
741 added.append(fn)
741 added.append(fn)
742
742
743 removed = mf1.keys()
743 removed = mf1.keys()
744
744
745 # sort and return results:
745 # sort and return results:
746 for l in modified, added, removed, deleted, unknown, ignored:
746 for l in modified, added, removed, deleted, unknown, ignored:
747 l.sort()
747 l.sort()
748 if show_ignored is None:
748 if show_ignored is None:
749 return (modified, added, removed, deleted, unknown)
749 return (modified, added, removed, deleted, unknown)
750 else:
750 else:
751 return (modified, added, removed, deleted, unknown, ignored)
751 return (modified, added, removed, deleted, unknown, ignored)
752
752
753 def add(self, list, wlock=None):
753 def add(self, list, wlock=None):
754 if not wlock:
754 if not wlock:
755 wlock = self.wlock()
755 wlock = self.wlock()
756 for f in list:
756 for f in list:
757 p = self.wjoin(f)
757 p = self.wjoin(f)
758 if not os.path.exists(p):
758 if not os.path.exists(p):
759 self.ui.warn(_("%s does not exist!\n") % f)
759 self.ui.warn(_("%s does not exist!\n") % f)
760 elif not os.path.isfile(p):
760 elif not os.path.isfile(p):
761 self.ui.warn(_("%s not added: only files supported currently\n")
761 self.ui.warn(_("%s not added: only files supported currently\n")
762 % f)
762 % f)
763 elif self.dirstate.state(f) in 'an':
763 elif self.dirstate.state(f) in 'an':
764 self.ui.warn(_("%s already tracked!\n") % f)
764 self.ui.warn(_("%s already tracked!\n") % f)
765 else:
765 else:
766 self.dirstate.update([f], "a")
766 self.dirstate.update([f], "a")
767
767
768 def forget(self, list, wlock=None):
768 def forget(self, list, wlock=None):
769 if not wlock:
769 if not wlock:
770 wlock = self.wlock()
770 wlock = self.wlock()
771 for f in list:
771 for f in list:
772 if self.dirstate.state(f) not in 'ai':
772 if self.dirstate.state(f) not in 'ai':
773 self.ui.warn(_("%s not added!\n") % f)
773 self.ui.warn(_("%s not added!\n") % f)
774 else:
774 else:
775 self.dirstate.forget([f])
775 self.dirstate.forget([f])
776
776
777 def remove(self, list, unlink=False, wlock=None):
777 def remove(self, list, unlink=False, wlock=None):
778 if unlink:
778 if unlink:
779 for f in list:
779 for f in list:
780 try:
780 try:
781 util.unlink(self.wjoin(f))
781 util.unlink(self.wjoin(f))
782 except OSError, inst:
782 except OSError, inst:
783 if inst.errno != errno.ENOENT:
783 if inst.errno != errno.ENOENT:
784 raise
784 raise
785 if not wlock:
785 if not wlock:
786 wlock = self.wlock()
786 wlock = self.wlock()
787 for f in list:
787 for f in list:
788 p = self.wjoin(f)
788 p = self.wjoin(f)
789 if os.path.exists(p):
789 if os.path.exists(p):
790 self.ui.warn(_("%s still exists!\n") % f)
790 self.ui.warn(_("%s still exists!\n") % f)
791 elif self.dirstate.state(f) == 'a':
791 elif self.dirstate.state(f) == 'a':
792 self.dirstate.forget([f])
792 self.dirstate.forget([f])
793 elif f not in self.dirstate:
793 elif f not in self.dirstate:
794 self.ui.warn(_("%s not tracked!\n") % f)
794 self.ui.warn(_("%s not tracked!\n") % f)
795 else:
795 else:
796 self.dirstate.update([f], "r")
796 self.dirstate.update([f], "r")
797
797
798 def undelete(self, list, wlock=None):
798 def undelete(self, list, wlock=None):
799 p = self.dirstate.parents()[0]
799 p = self.dirstate.parents()[0]
800 mn = self.changelog.read(p)[0]
800 mn = self.changelog.read(p)[0]
801 mf = self.manifest.readflags(mn)
801 mf = self.manifest.readflags(mn)
802 m = self.manifest.read(mn)
802 m = self.manifest.read(mn)
803 if not wlock:
803 if not wlock:
804 wlock = self.wlock()
804 wlock = self.wlock()
805 for f in list:
805 for f in list:
806 if self.dirstate.state(f) not in "r":
806 if self.dirstate.state(f) not in "r":
807 self.ui.warn("%s not removed!\n" % f)
807 self.ui.warn("%s not removed!\n" % f)
808 else:
808 else:
809 t = self.file(f).read(m[f])
809 t = self.file(f).read(m[f])
810 self.wwrite(f, t)
810 self.wwrite(f, t)
811 util.set_exec(self.wjoin(f), mf[f])
811 util.set_exec(self.wjoin(f), mf[f])
812 self.dirstate.update([f], "n")
812 self.dirstate.update([f], "n")
813
813
814 def copy(self, source, dest, wlock=None):
814 def copy(self, source, dest, wlock=None):
815 p = self.wjoin(dest)
815 p = self.wjoin(dest)
816 if not os.path.exists(p):
816 if not os.path.exists(p):
817 self.ui.warn(_("%s does not exist!\n") % dest)
817 self.ui.warn(_("%s does not exist!\n") % dest)
818 elif not os.path.isfile(p):
818 elif not os.path.isfile(p):
819 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
819 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
820 else:
820 else:
821 if not wlock:
821 if not wlock:
822 wlock = self.wlock()
822 wlock = self.wlock()
823 if self.dirstate.state(dest) == '?':
823 if self.dirstate.state(dest) == '?':
824 self.dirstate.update([dest], "a")
824 self.dirstate.update([dest], "a")
825 self.dirstate.copy(source, dest)
825 self.dirstate.copy(source, dest)
826
826
827 def heads(self, start=None):
827 def heads(self, start=None):
828 heads = self.changelog.heads(start)
828 heads = self.changelog.heads(start)
829 # sort the output in rev descending order
829 # sort the output in rev descending order
830 heads = [(-self.changelog.rev(h), h) for h in heads]
830 heads = [(-self.changelog.rev(h), h) for h in heads]
831 heads.sort()
831 heads.sort()
832 return [n for (r, n) in heads]
832 return [n for (r, n) in heads]
833
833
834 # branchlookup returns a dict giving a list of branches for
834 # branchlookup returns a dict giving a list of branches for
835 # each head. A branch is defined as the tag of a node or
835 # each head. A branch is defined as the tag of a node or
836 # the branch of the node's parents. If a node has multiple
836 # the branch of the node's parents. If a node has multiple
837 # branch tags, tags are eliminated if they are visible from other
837 # branch tags, tags are eliminated if they are visible from other
838 # branch tags.
838 # branch tags.
839 #
839 #
840 # So, for this graph: a->b->c->d->e
840 # So, for this graph: a->b->c->d->e
841 # \ /
841 # \ /
842 # aa -----/
842 # aa -----/
843 # a has tag 2.6.12
843 # a has tag 2.6.12
844 # d has tag 2.6.13
844 # d has tag 2.6.13
845 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
845 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
846 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
846 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
847 # from the list.
847 # from the list.
848 #
848 #
849 # It is possible that more than one head will have the same branch tag.
849 # It is possible that more than one head will have the same branch tag.
850 # callers need to check the result for multiple heads under the same
850 # callers need to check the result for multiple heads under the same
851 # branch tag if that is a problem for them (ie checkout of a specific
851 # branch tag if that is a problem for them (ie checkout of a specific
852 # branch).
852 # branch).
853 #
853 #
854 # passing in a specific branch will limit the depth of the search
854 # passing in a specific branch will limit the depth of the search
855 # through the parents. It won't limit the branches returned in the
855 # through the parents. It won't limit the branches returned in the
856 # result though.
856 # result though.
857 def branchlookup(self, heads=None, branch=None):
857 def branchlookup(self, heads=None, branch=None):
858 if not heads:
858 if not heads:
859 heads = self.heads()
859 heads = self.heads()
860 headt = [ h for h in heads ]
860 headt = [ h for h in heads ]
861 chlog = self.changelog
861 chlog = self.changelog
862 branches = {}
862 branches = {}
863 merges = []
863 merges = []
864 seenmerge = {}
864 seenmerge = {}
865
865
866 # traverse the tree once for each head, recording in the branches
866 # traverse the tree once for each head, recording in the branches
867 # dict which tags are visible from this head. The branches
867 # dict which tags are visible from this head. The branches
868 # dict also records which tags are visible from each tag
868 # dict also records which tags are visible from each tag
869 # while we traverse.
869 # while we traverse.
870 while headt or merges:
870 while headt or merges:
871 if merges:
871 if merges:
872 n, found = merges.pop()
872 n, found = merges.pop()
873 visit = [n]
873 visit = [n]
874 else:
874 else:
875 h = headt.pop()
875 h = headt.pop()
876 visit = [h]
876 visit = [h]
877 found = [h]
877 found = [h]
878 seen = {}
878 seen = {}
879 while visit:
879 while visit:
880 n = visit.pop()
880 n = visit.pop()
881 if n in seen:
881 if n in seen:
882 continue
882 continue
883 pp = chlog.parents(n)
883 pp = chlog.parents(n)
884 tags = self.nodetags(n)
884 tags = self.nodetags(n)
885 if tags:
885 if tags:
886 for x in tags:
886 for x in tags:
887 if x == 'tip':
887 if x == 'tip':
888 continue
888 continue
889 for f in found:
889 for f in found:
890 branches.setdefault(f, {})[n] = 1
890 branches.setdefault(f, {})[n] = 1
891 branches.setdefault(n, {})[n] = 1
891 branches.setdefault(n, {})[n] = 1
892 break
892 break
893 if n not in found:
893 if n not in found:
894 found.append(n)
894 found.append(n)
895 if branch in tags:
895 if branch in tags:
896 continue
896 continue
897 seen[n] = 1
897 seen[n] = 1
898 if pp[1] != nullid and n not in seenmerge:
898 if pp[1] != nullid and n not in seenmerge:
899 merges.append((pp[1], [x for x in found]))
899 merges.append((pp[1], [x for x in found]))
900 seenmerge[n] = 1
900 seenmerge[n] = 1
901 if pp[0] != nullid:
901 if pp[0] != nullid:
902 visit.append(pp[0])
902 visit.append(pp[0])
903 # traverse the branches dict, eliminating branch tags from each
903 # traverse the branches dict, eliminating branch tags from each
904 # head that are visible from another branch tag for that head.
904 # head that are visible from another branch tag for that head.
905 out = {}
905 out = {}
906 viscache = {}
906 viscache = {}
907 for h in heads:
907 for h in heads:
908 def visible(node):
908 def visible(node):
909 if node in viscache:
909 if node in viscache:
910 return viscache[node]
910 return viscache[node]
911 ret = {}
911 ret = {}
912 visit = [node]
912 visit = [node]
913 while visit:
913 while visit:
914 x = visit.pop()
914 x = visit.pop()
915 if x in viscache:
915 if x in viscache:
916 ret.update(viscache[x])
916 ret.update(viscache[x])
917 elif x not in ret:
917 elif x not in ret:
918 ret[x] = 1
918 ret[x] = 1
919 if x in branches:
919 if x in branches:
920 visit[len(visit):] = branches[x].keys()
920 visit[len(visit):] = branches[x].keys()
921 viscache[node] = ret
921 viscache[node] = ret
922 return ret
922 return ret
923 if h not in branches:
923 if h not in branches:
924 continue
924 continue
925 # O(n^2), but somewhat limited. This only searches the
925 # O(n^2), but somewhat limited. This only searches the
926 # tags visible from a specific head, not all the tags in the
926 # tags visible from a specific head, not all the tags in the
927 # whole repo.
927 # whole repo.
928 for b in branches[h]:
928 for b in branches[h]:
929 vis = False
929 vis = False
930 for bb in branches[h].keys():
930 for bb in branches[h].keys():
931 if b != bb:
931 if b != bb:
932 if b in visible(bb):
932 if b in visible(bb):
933 vis = True
933 vis = True
934 break
934 break
935 if not vis:
935 if not vis:
936 l = out.setdefault(h, [])
936 l = out.setdefault(h, [])
937 l[len(l):] = self.nodetags(b)
937 l[len(l):] = self.nodetags(b)
938 return out
938 return out
939
939
940 def branches(self, nodes):
940 def branches(self, nodes):
941 if not nodes:
941 if not nodes:
942 nodes = [self.changelog.tip()]
942 nodes = [self.changelog.tip()]
943 b = []
943 b = []
944 for n in nodes:
944 for n in nodes:
945 t = n
945 t = n
946 while 1:
946 while 1:
947 p = self.changelog.parents(n)
947 p = self.changelog.parents(n)
948 if p[1] != nullid or p[0] == nullid:
948 if p[1] != nullid or p[0] == nullid:
949 b.append((t, n, p[0], p[1]))
949 b.append((t, n, p[0], p[1]))
950 break
950 break
951 n = p[0]
951 n = p[0]
952 return b
952 return b
953
953
954 def between(self, pairs):
954 def between(self, pairs):
955 r = []
955 r = []
956
956
957 for top, bottom in pairs:
957 for top, bottom in pairs:
958 n, l, i = top, [], 0
958 n, l, i = top, [], 0
959 f = 1
959 f = 1
960
960
961 while n != bottom:
961 while n != bottom:
962 p = self.changelog.parents(n)[0]
962 p = self.changelog.parents(n)[0]
963 if i == f:
963 if i == f:
964 l.append(n)
964 l.append(n)
965 f = f * 2
965 f = f * 2
966 n = p
966 n = p
967 i += 1
967 i += 1
968
968
969 r.append(l)
969 r.append(l)
970
970
971 return r
971 return r
972
972
973 def findincoming(self, remote, base=None, heads=None, force=False):
973 def findincoming(self, remote, base=None, heads=None, force=False):
974 """Return list of roots of the subsets of missing nodes from remote
974 """Return list of roots of the subsets of missing nodes from remote
975
975
976 If base dict is specified, assume that these nodes and their parents
976 If base dict is specified, assume that these nodes and their parents
977 exist on the remote side and that no child of a node of base exists
977 exist on the remote side and that no child of a node of base exists
978 in both remote and self.
978 in both remote and self.
979 Furthermore base will be updated to include the nodes that exists
979 Furthermore base will be updated to include the nodes that exists
980 in self and remote but no children exists in self and remote.
980 in self and remote but no children exists in self and remote.
981 If a list of heads is specified, return only nodes which are heads
981 If a list of heads is specified, return only nodes which are heads
982 or ancestors of these heads.
982 or ancestors of these heads.
983
983
984 All the ancestors of base are in self and in remote.
984 All the ancestors of base are in self and in remote.
985 All the descendants of the list returned are missing in self.
985 All the descendants of the list returned are missing in self.
986 (and so we know that the rest of the nodes are missing in remote, see
986 (and so we know that the rest of the nodes are missing in remote, see
987 outgoing)
987 outgoing)
988 """
988 """
989 m = self.changelog.nodemap
989 m = self.changelog.nodemap
990 search = []
990 search = []
991 fetch = {}
991 fetch = {}
992 seen = {}
992 seen = {}
993 seenbranch = {}
993 seenbranch = {}
994 if base == None:
994 if base == None:
995 base = {}
995 base = {}
996
996
997 if not heads:
997 if not heads:
998 heads = remote.heads()
998 heads = remote.heads()
999
999
1000 if self.changelog.tip() == nullid:
1000 if self.changelog.tip() == nullid:
1001 base[nullid] = 1
1001 base[nullid] = 1
1002 if heads != [nullid]:
1002 if heads != [nullid]:
1003 return [nullid]
1003 return [nullid]
1004 return []
1004 return []
1005
1005
1006 # assume we're closer to the tip than the root
1006 # assume we're closer to the tip than the root
1007 # and start by examining the heads
1007 # and start by examining the heads
1008 self.ui.status(_("searching for changes\n"))
1008 self.ui.status(_("searching for changes\n"))
1009
1009
1010 unknown = []
1010 unknown = []
1011 for h in heads:
1011 for h in heads:
1012 if h not in m:
1012 if h not in m:
1013 unknown.append(h)
1013 unknown.append(h)
1014 else:
1014 else:
1015 base[h] = 1
1015 base[h] = 1
1016
1016
1017 if not unknown:
1017 if not unknown:
1018 return []
1018 return []
1019
1019
1020 req = dict.fromkeys(unknown)
1020 req = dict.fromkeys(unknown)
1021 reqcnt = 0
1021 reqcnt = 0
1022
1022
1023 # search through remote branches
1023 # search through remote branches
1024 # a 'branch' here is a linear segment of history, with four parts:
1024 # a 'branch' here is a linear segment of history, with four parts:
1025 # head, root, first parent, second parent
1025 # head, root, first parent, second parent
1026 # (a branch always has two parents (or none) by definition)
1026 # (a branch always has two parents (or none) by definition)
1027 unknown = remote.branches(unknown)
1027 unknown = remote.branches(unknown)
1028 while unknown:
1028 while unknown:
1029 r = []
1029 r = []
1030 while unknown:
1030 while unknown:
1031 n = unknown.pop(0)
1031 n = unknown.pop(0)
1032 if n[0] in seen:
1032 if n[0] in seen:
1033 continue
1033 continue
1034
1034
1035 self.ui.debug(_("examining %s:%s\n")
1035 self.ui.debug(_("examining %s:%s\n")
1036 % (short(n[0]), short(n[1])))
1036 % (short(n[0]), short(n[1])))
1037 if n[0] == nullid: # found the end of the branch
1037 if n[0] == nullid: # found the end of the branch
1038 pass
1038 pass
1039 elif n in seenbranch:
1039 elif n in seenbranch:
1040 self.ui.debug(_("branch already found\n"))
1040 self.ui.debug(_("branch already found\n"))
1041 continue
1041 continue
1042 elif n[1] and n[1] in m: # do we know the base?
1042 elif n[1] and n[1] in m: # do we know the base?
1043 self.ui.debug(_("found incomplete branch %s:%s\n")
1043 self.ui.debug(_("found incomplete branch %s:%s\n")
1044 % (short(n[0]), short(n[1])))
1044 % (short(n[0]), short(n[1])))
1045 search.append(n) # schedule branch range for scanning
1045 search.append(n) # schedule branch range for scanning
1046 seenbranch[n] = 1
1046 seenbranch[n] = 1
1047 else:
1047 else:
1048 if n[1] not in seen and n[1] not in fetch:
1048 if n[1] not in seen and n[1] not in fetch:
1049 if n[2] in m and n[3] in m:
1049 if n[2] in m and n[3] in m:
1050 self.ui.debug(_("found new changeset %s\n") %
1050 self.ui.debug(_("found new changeset %s\n") %
1051 short(n[1]))
1051 short(n[1]))
1052 fetch[n[1]] = 1 # earliest unknown
1052 fetch[n[1]] = 1 # earliest unknown
1053 for p in n[2:4]:
1053 for p in n[2:4]:
1054 if p in m:
1054 if p in m:
1055 base[p] = 1 # latest known
1055 base[p] = 1 # latest known
1056
1056
1057 for p in n[2:4]:
1057 for p in n[2:4]:
1058 if p not in req and p not in m:
1058 if p not in req and p not in m:
1059 r.append(p)
1059 r.append(p)
1060 req[p] = 1
1060 req[p] = 1
1061 seen[n[0]] = 1
1061 seen[n[0]] = 1
1062
1062
1063 if r:
1063 if r:
1064 reqcnt += 1
1064 reqcnt += 1
1065 self.ui.debug(_("request %d: %s\n") %
1065 self.ui.debug(_("request %d: %s\n") %
1066 (reqcnt, " ".join(map(short, r))))
1066 (reqcnt, " ".join(map(short, r))))
1067 for p in range(0, len(r), 10):
1067 for p in range(0, len(r), 10):
1068 for b in remote.branches(r[p:p+10]):
1068 for b in remote.branches(r[p:p+10]):
1069 self.ui.debug(_("received %s:%s\n") %
1069 self.ui.debug(_("received %s:%s\n") %
1070 (short(b[0]), short(b[1])))
1070 (short(b[0]), short(b[1])))
1071 unknown.append(b)
1071 unknown.append(b)
1072
1072
1073 # do binary search on the branches we found
1073 # do binary search on the branches we found
1074 while search:
1074 while search:
1075 n = search.pop(0)
1075 n = search.pop(0)
1076 reqcnt += 1
1076 reqcnt += 1
1077 l = remote.between([(n[0], n[1])])[0]
1077 l = remote.between([(n[0], n[1])])[0]
1078 l.append(n[1])
1078 l.append(n[1])
1079 p = n[0]
1079 p = n[0]
1080 f = 1
1080 f = 1
1081 for i in l:
1081 for i in l:
1082 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1082 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1083 if i in m:
1083 if i in m:
1084 if f <= 2:
1084 if f <= 2:
1085 self.ui.debug(_("found new branch changeset %s\n") %
1085 self.ui.debug(_("found new branch changeset %s\n") %
1086 short(p))
1086 short(p))
1087 fetch[p] = 1
1087 fetch[p] = 1
1088 base[i] = 1
1088 base[i] = 1
1089 else:
1089 else:
1090 self.ui.debug(_("narrowed branch search to %s:%s\n")
1090 self.ui.debug(_("narrowed branch search to %s:%s\n")
1091 % (short(p), short(i)))
1091 % (short(p), short(i)))
1092 search.append((p, i))
1092 search.append((p, i))
1093 break
1093 break
1094 p, f = i, f * 2
1094 p, f = i, f * 2
1095
1095
1096 # sanity check our fetch list
1096 # sanity check our fetch list
1097 for f in fetch.keys():
1097 for f in fetch.keys():
1098 if f in m:
1098 if f in m:
1099 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1099 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1100
1100
1101 if base.keys() == [nullid]:
1101 if base.keys() == [nullid]:
1102 if force:
1102 if force:
1103 self.ui.warn(_("warning: repository is unrelated\n"))
1103 self.ui.warn(_("warning: repository is unrelated\n"))
1104 else:
1104 else:
1105 raise util.Abort(_("repository is unrelated"))
1105 raise util.Abort(_("repository is unrelated"))
1106
1106
1107 self.ui.note(_("found new changesets starting at ") +
1107 self.ui.note(_("found new changesets starting at ") +
1108 " ".join([short(f) for f in fetch]) + "\n")
1108 " ".join([short(f) for f in fetch]) + "\n")
1109
1109
1110 self.ui.debug(_("%d total queries\n") % reqcnt)
1110 self.ui.debug(_("%d total queries\n") % reqcnt)
1111
1111
1112 return fetch.keys()
1112 return fetch.keys()
1113
1113
1114 def findoutgoing(self, remote, base=None, heads=None, force=False):
1114 def findoutgoing(self, remote, base=None, heads=None, force=False):
1115 """Return list of nodes that are roots of subsets not in remote
1115 """Return list of nodes that are roots of subsets not in remote
1116
1116
1117 If base dict is specified, assume that these nodes and their parents
1117 If base dict is specified, assume that these nodes and their parents
1118 exist on the remote side.
1118 exist on the remote side.
1119 If a list of heads is specified, return only nodes which are heads
1119 If a list of heads is specified, return only nodes which are heads
1120 or ancestors of these heads, and return a second element which
1120 or ancestors of these heads, and return a second element which
1121 contains all remote heads which get new children.
1121 contains all remote heads which get new children.
1122 """
1122 """
1123 if base == None:
1123 if base == None:
1124 base = {}
1124 base = {}
1125 self.findincoming(remote, base, heads, force=force)
1125 self.findincoming(remote, base, heads, force=force)
1126
1126
1127 self.ui.debug(_("common changesets up to ")
1127 self.ui.debug(_("common changesets up to ")
1128 + " ".join(map(short, base.keys())) + "\n")
1128 + " ".join(map(short, base.keys())) + "\n")
1129
1129
1130 remain = dict.fromkeys(self.changelog.nodemap)
1130 remain = dict.fromkeys(self.changelog.nodemap)
1131
1131
1132 # prune everything remote has from the tree
1132 # prune everything remote has from the tree
1133 del remain[nullid]
1133 del remain[nullid]
1134 remove = base.keys()
1134 remove = base.keys()
1135 while remove:
1135 while remove:
1136 n = remove.pop(0)
1136 n = remove.pop(0)
1137 if n in remain:
1137 if n in remain:
1138 del remain[n]
1138 del remain[n]
1139 for p in self.changelog.parents(n):
1139 for p in self.changelog.parents(n):
1140 remove.append(p)
1140 remove.append(p)
1141
1141
1142 # find every node whose parents have been pruned
1142 # find every node whose parents have been pruned
1143 subset = []
1143 subset = []
1144 # find every remote head that will get new children
1144 # find every remote head that will get new children
1145 updated_heads = {}
1145 updated_heads = {}
1146 for n in remain:
1146 for n in remain:
1147 p1, p2 = self.changelog.parents(n)
1147 p1, p2 = self.changelog.parents(n)
1148 if p1 not in remain and p2 not in remain:
1148 if p1 not in remain and p2 not in remain:
1149 subset.append(n)
1149 subset.append(n)
1150 if heads:
1150 if heads:
1151 if p1 in heads:
1151 if p1 in heads:
1152 updated_heads[p1] = True
1152 updated_heads[p1] = True
1153 if p2 in heads:
1153 if p2 in heads:
1154 updated_heads[p2] = True
1154 updated_heads[p2] = True
1155
1155
1156 # this is the set of all roots we have to push
1156 # this is the set of all roots we have to push
1157 if heads:
1157 if heads:
1158 return subset, updated_heads.keys()
1158 return subset, updated_heads.keys()
1159 else:
1159 else:
1160 return subset
1160 return subset
1161
1161
1162 def pull(self, remote, heads=None, force=False):
1162 def pull(self, remote, heads=None, force=False):
1163 l = self.lock()
1163 l = self.lock()
1164
1164
1165 fetch = self.findincoming(remote, force=force)
1165 fetch = self.findincoming(remote, force=force)
1166 if fetch == [nullid]:
1166 if fetch == [nullid]:
1167 self.ui.status(_("requesting all changes\n"))
1167 self.ui.status(_("requesting all changes\n"))
1168
1168
1169 if not fetch:
1169 if not fetch:
1170 self.ui.status(_("no changes found\n"))
1170 self.ui.status(_("no changes found\n"))
1171 return 0
1171 return 0
1172
1172
1173 if heads is None:
1173 if heads is None:
1174 cg = remote.changegroup(fetch, 'pull')
1174 cg = remote.changegroup(fetch, 'pull')
1175 else:
1175 else:
1176 cg = remote.changegroupsubset(fetch, heads, 'pull')
1176 cg = remote.changegroupsubset(fetch, heads, 'pull')
1177 return self.addchangegroup(cg, 'pull')
1177 return self.addchangegroup(cg, 'pull')
1178
1178
1179 def push(self, remote, force=False, revs=None):
1179 def push(self, remote, force=False, revs=None):
1180 # there are two ways to push to remote repo:
1180 # there are two ways to push to remote repo:
1181 #
1181 #
1182 # addchangegroup assumes local user can lock remote
1182 # addchangegroup assumes local user can lock remote
1183 # repo (local filesystem, old ssh servers).
1183 # repo (local filesystem, old ssh servers).
1184 #
1184 #
1185 # unbundle assumes local user cannot lock remote repo (new ssh
1185 # unbundle assumes local user cannot lock remote repo (new ssh
1186 # servers, http servers).
1186 # servers, http servers).
1187
1187
1188 if remote.capable('unbundle'):
1188 if remote.capable('unbundle'):
1189 return self.push_unbundle(remote, force, revs)
1189 return self.push_unbundle(remote, force, revs)
1190 return self.push_addchangegroup(remote, force, revs)
1190 return self.push_addchangegroup(remote, force, revs)
1191
1191
1192 def prepush(self, remote, force, revs):
1192 def prepush(self, remote, force, revs):
1193 base = {}
1193 base = {}
1194 remote_heads = remote.heads()
1194 remote_heads = remote.heads()
1195 inc = self.findincoming(remote, base, remote_heads, force=force)
1195 inc = self.findincoming(remote, base, remote_heads, force=force)
1196 if not force and inc:
1196 if not force and inc:
1197 self.ui.warn(_("abort: unsynced remote changes!\n"))
1197 self.ui.warn(_("abort: unsynced remote changes!\n"))
1198 self.ui.status(_("(did you forget to sync?"
1198 self.ui.status(_("(did you forget to sync?"
1199 " use push -f to force)\n"))
1199 " use push -f to force)\n"))
1200 return None, 1
1200 return None, 1
1201
1201
1202 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1202 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1203 if revs is not None:
1203 if revs is not None:
1204 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1204 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1205 else:
1205 else:
1206 bases, heads = update, self.changelog.heads()
1206 bases, heads = update, self.changelog.heads()
1207
1207
1208 if not bases:
1208 if not bases:
1209 self.ui.status(_("no changes found\n"))
1209 self.ui.status(_("no changes found\n"))
1210 return None, 1
1210 return None, 1
1211 elif not force:
1211 elif not force:
1212 # FIXME we don't properly detect creation of new heads
1212 # FIXME we don't properly detect creation of new heads
1213 # in the push -r case, assume the user knows what he's doing
1213 # in the push -r case, assume the user knows what he's doing
1214 if not revs and len(remote_heads) < len(heads) \
1214 if not revs and len(remote_heads) < len(heads) \
1215 and remote_heads != [nullid]:
1215 and remote_heads != [nullid]:
1216 self.ui.warn(_("abort: push creates new remote branches!\n"))
1216 self.ui.warn(_("abort: push creates new remote branches!\n"))
1217 self.ui.status(_("(did you forget to merge?"
1217 self.ui.status(_("(did you forget to merge?"
1218 " use push -f to force)\n"))
1218 " use push -f to force)\n"))
1219 return None, 1
1219 return None, 1
1220
1220
1221 if revs is None:
1221 if revs is None:
1222 cg = self.changegroup(update, 'push')
1222 cg = self.changegroup(update, 'push')
1223 else:
1223 else:
1224 cg = self.changegroupsubset(update, revs, 'push')
1224 cg = self.changegroupsubset(update, revs, 'push')
1225 return cg, remote_heads
1225 return cg, remote_heads
1226
1226
1227 def push_addchangegroup(self, remote, force, revs):
1227 def push_addchangegroup(self, remote, force, revs):
1228 lock = remote.lock()
1228 lock = remote.lock()
1229
1229
1230 ret = self.prepush(remote, force, revs)
1230 ret = self.prepush(remote, force, revs)
1231 if ret[0] is not None:
1231 if ret[0] is not None:
1232 cg, remote_heads = ret
1232 cg, remote_heads = ret
1233 return remote.addchangegroup(cg, 'push')
1233 return remote.addchangegroup(cg, 'push')
1234 return ret[1]
1234 return ret[1]
1235
1235
1236 def push_unbundle(self, remote, force, revs):
1236 def push_unbundle(self, remote, force, revs):
1237 # local repo finds heads on server, finds out what revs it
1237 # local repo finds heads on server, finds out what revs it
1238 # must push. once revs transferred, if server finds it has
1238 # must push. once revs transferred, if server finds it has
1239 # different heads (someone else won commit/push race), server
1239 # different heads (someone else won commit/push race), server
1240 # aborts.
1240 # aborts.
1241
1241
1242 ret = self.prepush(remote, force, revs)
1242 ret = self.prepush(remote, force, revs)
1243 if ret[0] is not None:
1243 if ret[0] is not None:
1244 cg, remote_heads = ret
1244 cg, remote_heads = ret
1245 if force: remote_heads = ['force']
1245 if force: remote_heads = ['force']
1246 return remote.unbundle(cg, remote_heads, 'push')
1246 return remote.unbundle(cg, remote_heads, 'push')
1247 return ret[1]
1247 return ret[1]
1248
1248
1249 def changegroupsubset(self, bases, heads, source):
1249 def changegroupsubset(self, bases, heads, source):
1250 """This function generates a changegroup consisting of all the nodes
1250 """This function generates a changegroup consisting of all the nodes
1251 that are descendents of any of the bases, and ancestors of any of
1251 that are descendents of any of the bases, and ancestors of any of
1252 the heads.
1252 the heads.
1253
1253
1254 It is fairly complex as determining which filenodes and which
1254 It is fairly complex as determining which filenodes and which
1255 manifest nodes need to be included for the changeset to be complete
1255 manifest nodes need to be included for the changeset to be complete
1256 is non-trivial.
1256 is non-trivial.
1257
1257
1258 Another wrinkle is doing the reverse, figuring out which changeset in
1258 Another wrinkle is doing the reverse, figuring out which changeset in
1259 the changegroup a particular filenode or manifestnode belongs to."""
1259 the changegroup a particular filenode or manifestnode belongs to."""
1260
1260
1261 self.hook('preoutgoing', throw=True, source=source)
1261 self.hook('preoutgoing', throw=True, source=source)
1262
1262
1263 # Set up some initial variables
1263 # Set up some initial variables
1264 # Make it easy to refer to self.changelog
1264 # Make it easy to refer to self.changelog
1265 cl = self.changelog
1265 cl = self.changelog
1266 # msng is short for missing - compute the list of changesets in this
1266 # msng is short for missing - compute the list of changesets in this
1267 # changegroup.
1267 # changegroup.
1268 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1268 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1269 # Some bases may turn out to be superfluous, and some heads may be
1269 # Some bases may turn out to be superfluous, and some heads may be
1270 # too. nodesbetween will return the minimal set of bases and heads
1270 # too. nodesbetween will return the minimal set of bases and heads
1271 # necessary to re-create the changegroup.
1271 # necessary to re-create the changegroup.
1272
1272
1273 # Known heads are the list of heads that it is assumed the recipient
1273 # Known heads are the list of heads that it is assumed the recipient
1274 # of this changegroup will know about.
1274 # of this changegroup will know about.
1275 knownheads = {}
1275 knownheads = {}
1276 # We assume that all parents of bases are known heads.
1276 # We assume that all parents of bases are known heads.
1277 for n in bases:
1277 for n in bases:
1278 for p in cl.parents(n):
1278 for p in cl.parents(n):
1279 if p != nullid:
1279 if p != nullid:
1280 knownheads[p] = 1
1280 knownheads[p] = 1
1281 knownheads = knownheads.keys()
1281 knownheads = knownheads.keys()
1282 if knownheads:
1282 if knownheads:
1283 # Now that we know what heads are known, we can compute which
1283 # Now that we know what heads are known, we can compute which
1284 # changesets are known. The recipient must know about all
1284 # changesets are known. The recipient must know about all
1285 # changesets required to reach the known heads from the null
1285 # changesets required to reach the known heads from the null
1286 # changeset.
1286 # changeset.
1287 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1287 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1288 junk = None
1288 junk = None
1289 # Transform the list into an ersatz set.
1289 # Transform the list into an ersatz set.
1290 has_cl_set = dict.fromkeys(has_cl_set)
1290 has_cl_set = dict.fromkeys(has_cl_set)
1291 else:
1291 else:
1292 # If there were no known heads, the recipient cannot be assumed to
1292 # If there were no known heads, the recipient cannot be assumed to
1293 # know about any changesets.
1293 # know about any changesets.
1294 has_cl_set = {}
1294 has_cl_set = {}
1295
1295
1296 # Make it easy to refer to self.manifest
1296 # Make it easy to refer to self.manifest
1297 mnfst = self.manifest
1297 mnfst = self.manifest
1298 # We don't know which manifests are missing yet
1298 # We don't know which manifests are missing yet
1299 msng_mnfst_set = {}
1299 msng_mnfst_set = {}
1300 # Nor do we know which filenodes are missing.
1300 # Nor do we know which filenodes are missing.
1301 msng_filenode_set = {}
1301 msng_filenode_set = {}
1302
1302
1303 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1303 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1304 junk = None
1304 junk = None
1305
1305
1306 # A changeset always belongs to itself, so the changenode lookup
1306 # A changeset always belongs to itself, so the changenode lookup
1307 # function for a changenode is identity.
1307 # function for a changenode is identity.
1308 def identity(x):
1308 def identity(x):
1309 return x
1309 return x
1310
1310
1311 # A function generating function. Sets up an environment for the
1311 # A function generating function. Sets up an environment for the
1312 # inner function.
1312 # inner function.
1313 def cmp_by_rev_func(revlog):
1313 def cmp_by_rev_func(revlog):
1314 # Compare two nodes by their revision number in the environment's
1314 # Compare two nodes by their revision number in the environment's
1315 # revision history. Since the revision number both represents the
1315 # revision history. Since the revision number both represents the
1316 # most efficient order to read the nodes in, and represents a
1316 # most efficient order to read the nodes in, and represents a
1317 # topological sorting of the nodes, this function is often useful.
1317 # topological sorting of the nodes, this function is often useful.
1318 def cmp_by_rev(a, b):
1318 def cmp_by_rev(a, b):
1319 return cmp(revlog.rev(a), revlog.rev(b))
1319 return cmp(revlog.rev(a), revlog.rev(b))
1320 return cmp_by_rev
1320 return cmp_by_rev
1321
1321
1322 # If we determine that a particular file or manifest node must be a
1322 # If we determine that a particular file or manifest node must be a
1323 # node that the recipient of the changegroup will already have, we can
1323 # node that the recipient of the changegroup will already have, we can
1324 # also assume the recipient will have all the parents. This function
1324 # also assume the recipient will have all the parents. This function
1325 # prunes them from the set of missing nodes.
1325 # prunes them from the set of missing nodes.
1326 def prune_parents(revlog, hasset, msngset):
1326 def prune_parents(revlog, hasset, msngset):
1327 haslst = hasset.keys()
1327 haslst = hasset.keys()
1328 haslst.sort(cmp_by_rev_func(revlog))
1328 haslst.sort(cmp_by_rev_func(revlog))
1329 for node in haslst:
1329 for node in haslst:
1330 parentlst = [p for p in revlog.parents(node) if p != nullid]
1330 parentlst = [p for p in revlog.parents(node) if p != nullid]
1331 while parentlst:
1331 while parentlst:
1332 n = parentlst.pop()
1332 n = parentlst.pop()
1333 if n not in hasset:
1333 if n not in hasset:
1334 hasset[n] = 1
1334 hasset[n] = 1
1335 p = [p for p in revlog.parents(n) if p != nullid]
1335 p = [p for p in revlog.parents(n) if p != nullid]
1336 parentlst.extend(p)
1336 parentlst.extend(p)
1337 for n in hasset:
1337 for n in hasset:
1338 msngset.pop(n, None)
1338 msngset.pop(n, None)
1339
1339
1340 # This is a function generating function used to set up an environment
1340 # This is a function generating function used to set up an environment
1341 # for the inner function to execute in.
1341 # for the inner function to execute in.
1342 def manifest_and_file_collector(changedfileset):
1342 def manifest_and_file_collector(changedfileset):
1343 # This is an information gathering function that gathers
1343 # This is an information gathering function that gathers
1344 # information from each changeset node that goes out as part of
1344 # information from each changeset node that goes out as part of
1345 # the changegroup. The information gathered is a list of which
1345 # the changegroup. The information gathered is a list of which
1346 # manifest nodes are potentially required (the recipient may
1346 # manifest nodes are potentially required (the recipient may
1347 # already have them) and total list of all files which were
1347 # already have them) and total list of all files which were
1348 # changed in any changeset in the changegroup.
1348 # changed in any changeset in the changegroup.
1349 #
1349 #
1350 # We also remember the first changenode we saw any manifest
1350 # We also remember the first changenode we saw any manifest
1351 # referenced by so we can later determine which changenode 'owns'
1351 # referenced by so we can later determine which changenode 'owns'
1352 # the manifest.
1352 # the manifest.
1353 def collect_manifests_and_files(clnode):
1353 def collect_manifests_and_files(clnode):
1354 c = cl.read(clnode)
1354 c = cl.read(clnode)
1355 for f in c[3]:
1355 for f in c[3]:
1356 # This is to make sure we only have one instance of each
1356 # This is to make sure we only have one instance of each
1357 # filename string for each filename.
1357 # filename string for each filename.
1358 changedfileset.setdefault(f, f)
1358 changedfileset.setdefault(f, f)
1359 msng_mnfst_set.setdefault(c[0], clnode)
1359 msng_mnfst_set.setdefault(c[0], clnode)
1360 return collect_manifests_and_files
1360 return collect_manifests_and_files
1361
1361
1362 # Figure out which manifest nodes (of the ones we think might be part
1362 # Figure out which manifest nodes (of the ones we think might be part
1363 # of the changegroup) the recipient must know about and remove them
1363 # of the changegroup) the recipient must know about and remove them
1364 # from the changegroup.
1364 # from the changegroup.
1365 def prune_manifests():
1365 def prune_manifests():
1366 has_mnfst_set = {}
1366 has_mnfst_set = {}
1367 for n in msng_mnfst_set:
1367 for n in msng_mnfst_set:
1368 # If a 'missing' manifest thinks it belongs to a changenode
1368 # If a 'missing' manifest thinks it belongs to a changenode
1369 # the recipient is assumed to have, obviously the recipient
1369 # the recipient is assumed to have, obviously the recipient
1370 # must have that manifest.
1370 # must have that manifest.
1371 linknode = cl.node(mnfst.linkrev(n))
1371 linknode = cl.node(mnfst.linkrev(n))
1372 if linknode in has_cl_set:
1372 if linknode in has_cl_set:
1373 has_mnfst_set[n] = 1
1373 has_mnfst_set[n] = 1
1374 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1374 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1375
1375
1376 # Use the information collected in collect_manifests_and_files to say
1376 # Use the information collected in collect_manifests_and_files to say
1377 # which changenode any manifestnode belongs to.
1377 # which changenode any manifestnode belongs to.
1378 def lookup_manifest_link(mnfstnode):
1378 def lookup_manifest_link(mnfstnode):
1379 return msng_mnfst_set[mnfstnode]
1379 return msng_mnfst_set[mnfstnode]
1380
1380
1381 # A function generating function that sets up the initial environment
1381 # A function generating function that sets up the initial environment
1382 # the inner function.
1382 # the inner function.
1383 def filenode_collector(changedfiles):
1383 def filenode_collector(changedfiles):
1384 next_rev = [0]
1384 next_rev = [0]
1385 # This gathers information from each manifestnode included in the
1385 # This gathers information from each manifestnode included in the
1386 # changegroup about which filenodes the manifest node references
1386 # changegroup about which filenodes the manifest node references
1387 # so we can include those in the changegroup too.
1387 # so we can include those in the changegroup too.
1388 #
1388 #
1389 # It also remembers which changenode each filenode belongs to. It
1389 # It also remembers which changenode each filenode belongs to. It
1390 # does this by assuming the a filenode belongs to the changenode
1390 # does this by assuming the a filenode belongs to the changenode
1391 # the first manifest that references it belongs to.
1391 # the first manifest that references it belongs to.
1392 def collect_msng_filenodes(mnfstnode):
1392 def collect_msng_filenodes(mnfstnode):
1393 r = mnfst.rev(mnfstnode)
1393 r = mnfst.rev(mnfstnode)
1394 if r == next_rev[0]:
1394 if r == next_rev[0]:
1395 # If the last rev we looked at was the one just previous,
1395 # If the last rev we looked at was the one just previous,
1396 # we only need to see a diff.
1396 # we only need to see a diff.
1397 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1397 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1398 # For each line in the delta
1398 # For each line in the delta
1399 for dline in delta.splitlines():
1399 for dline in delta.splitlines():
1400 # get the filename and filenode for that line
1400 # get the filename and filenode for that line
1401 f, fnode = dline.split('\0')
1401 f, fnode = dline.split('\0')
1402 fnode = bin(fnode[:40])
1402 fnode = bin(fnode[:40])
1403 f = changedfiles.get(f, None)
1403 f = changedfiles.get(f, None)
1404 # And if the file is in the list of files we care
1404 # And if the file is in the list of files we care
1405 # about.
1405 # about.
1406 if f is not None:
1406 if f is not None:
1407 # Get the changenode this manifest belongs to
1407 # Get the changenode this manifest belongs to
1408 clnode = msng_mnfst_set[mnfstnode]
1408 clnode = msng_mnfst_set[mnfstnode]
1409 # Create the set of filenodes for the file if
1409 # Create the set of filenodes for the file if
1410 # there isn't one already.
1410 # there isn't one already.
1411 ndset = msng_filenode_set.setdefault(f, {})
1411 ndset = msng_filenode_set.setdefault(f, {})
1412 # And set the filenode's changelog node to the
1412 # And set the filenode's changelog node to the
1413 # manifest's if it hasn't been set already.
1413 # manifest's if it hasn't been set already.
1414 ndset.setdefault(fnode, clnode)
1414 ndset.setdefault(fnode, clnode)
1415 else:
1415 else:
1416 # Otherwise we need a full manifest.
1416 # Otherwise we need a full manifest.
1417 m = mnfst.read(mnfstnode)
1417 m = mnfst.read(mnfstnode)
1418 # For every file in we care about.
1418 # For every file in we care about.
1419 for f in changedfiles:
1419 for f in changedfiles:
1420 fnode = m.get(f, None)
1420 fnode = m.get(f, None)
1421 # If it's in the manifest
1421 # If it's in the manifest
1422 if fnode is not None:
1422 if fnode is not None:
1423 # See comments above.
1423 # See comments above.
1424 clnode = msng_mnfst_set[mnfstnode]
1424 clnode = msng_mnfst_set[mnfstnode]
1425 ndset = msng_filenode_set.setdefault(f, {})
1425 ndset = msng_filenode_set.setdefault(f, {})
1426 ndset.setdefault(fnode, clnode)
1426 ndset.setdefault(fnode, clnode)
1427 # Remember the revision we hope to see next.
1427 # Remember the revision we hope to see next.
1428 next_rev[0] = r + 1
1428 next_rev[0] = r + 1
1429 return collect_msng_filenodes
1429 return collect_msng_filenodes
1430
1430
1431 # We have a list of filenodes we think we need for a file, lets remove
1431 # We have a list of filenodes we think we need for a file, lets remove
1432 # all those we now the recipient must have.
1432 # all those we now the recipient must have.
1433 def prune_filenodes(f, filerevlog):
1433 def prune_filenodes(f, filerevlog):
1434 msngset = msng_filenode_set[f]
1434 msngset = msng_filenode_set[f]
1435 hasset = {}
1435 hasset = {}
1436 # If a 'missing' filenode thinks it belongs to a changenode we
1436 # If a 'missing' filenode thinks it belongs to a changenode we
1437 # assume the recipient must have, then the recipient must have
1437 # assume the recipient must have, then the recipient must have
1438 # that filenode.
1438 # that filenode.
1439 for n in msngset:
1439 for n in msngset:
1440 clnode = cl.node(filerevlog.linkrev(n))
1440 clnode = cl.node(filerevlog.linkrev(n))
1441 if clnode in has_cl_set:
1441 if clnode in has_cl_set:
1442 hasset[n] = 1
1442 hasset[n] = 1
1443 prune_parents(filerevlog, hasset, msngset)
1443 prune_parents(filerevlog, hasset, msngset)
1444
1444
1445 # A function generator function that sets up the a context for the
1445 # A function generator function that sets up the a context for the
1446 # inner function.
1446 # inner function.
1447 def lookup_filenode_link_func(fname):
1447 def lookup_filenode_link_func(fname):
1448 msngset = msng_filenode_set[fname]
1448 msngset = msng_filenode_set[fname]
1449 # Lookup the changenode the filenode belongs to.
1449 # Lookup the changenode the filenode belongs to.
1450 def lookup_filenode_link(fnode):
1450 def lookup_filenode_link(fnode):
1451 return msngset[fnode]
1451 return msngset[fnode]
1452 return lookup_filenode_link
1452 return lookup_filenode_link
1453
1453
1454 # Now that we have all theses utility functions to help out and
1454 # Now that we have all theses utility functions to help out and
1455 # logically divide up the task, generate the group.
1455 # logically divide up the task, generate the group.
1456 def gengroup():
1456 def gengroup():
1457 # The set of changed files starts empty.
1457 # The set of changed files starts empty.
1458 changedfiles = {}
1458 changedfiles = {}
1459 # Create a changenode group generator that will call our functions
1459 # Create a changenode group generator that will call our functions
1460 # back to lookup the owning changenode and collect information.
1460 # back to lookup the owning changenode and collect information.
1461 group = cl.group(msng_cl_lst, identity,
1461 group = cl.group(msng_cl_lst, identity,
1462 manifest_and_file_collector(changedfiles))
1462 manifest_and_file_collector(changedfiles))
1463 for chnk in group:
1463 for chnk in group:
1464 yield chnk
1464 yield chnk
1465
1465
1466 # The list of manifests has been collected by the generator
1466 # The list of manifests has been collected by the generator
1467 # calling our functions back.
1467 # calling our functions back.
1468 prune_manifests()
1468 prune_manifests()
1469 msng_mnfst_lst = msng_mnfst_set.keys()
1469 msng_mnfst_lst = msng_mnfst_set.keys()
1470 # Sort the manifestnodes by revision number.
1470 # Sort the manifestnodes by revision number.
1471 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1471 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1472 # Create a generator for the manifestnodes that calls our lookup
1472 # Create a generator for the manifestnodes that calls our lookup
1473 # and data collection functions back.
1473 # and data collection functions back.
1474 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1474 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1475 filenode_collector(changedfiles))
1475 filenode_collector(changedfiles))
1476 for chnk in group:
1476 for chnk in group:
1477 yield chnk
1477 yield chnk
1478
1478
1479 # These are no longer needed, dereference and toss the memory for
1479 # These are no longer needed, dereference and toss the memory for
1480 # them.
1480 # them.
1481 msng_mnfst_lst = None
1481 msng_mnfst_lst = None
1482 msng_mnfst_set.clear()
1482 msng_mnfst_set.clear()
1483
1483
1484 changedfiles = changedfiles.keys()
1484 changedfiles = changedfiles.keys()
1485 changedfiles.sort()
1485 changedfiles.sort()
1486 # Go through all our files in order sorted by name.
1486 # Go through all our files in order sorted by name.
1487 for fname in changedfiles:
1487 for fname in changedfiles:
1488 filerevlog = self.file(fname)
1488 filerevlog = self.file(fname)
1489 # Toss out the filenodes that the recipient isn't really
1489 # Toss out the filenodes that the recipient isn't really
1490 # missing.
1490 # missing.
1491 if msng_filenode_set.has_key(fname):
1491 if msng_filenode_set.has_key(fname):
1492 prune_filenodes(fname, filerevlog)
1492 prune_filenodes(fname, filerevlog)
1493 msng_filenode_lst = msng_filenode_set[fname].keys()
1493 msng_filenode_lst = msng_filenode_set[fname].keys()
1494 else:
1494 else:
1495 msng_filenode_lst = []
1495 msng_filenode_lst = []
1496 # If any filenodes are left, generate the group for them,
1496 # If any filenodes are left, generate the group for them,
1497 # otherwise don't bother.
1497 # otherwise don't bother.
1498 if len(msng_filenode_lst) > 0:
1498 if len(msng_filenode_lst) > 0:
1499 yield changegroup.genchunk(fname)
1499 yield changegroup.genchunk(fname)
1500 # Sort the filenodes by their revision #
1500 # Sort the filenodes by their revision #
1501 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1501 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1502 # Create a group generator and only pass in a changenode
1502 # Create a group generator and only pass in a changenode
1503 # lookup function as we need to collect no information
1503 # lookup function as we need to collect no information
1504 # from filenodes.
1504 # from filenodes.
1505 group = filerevlog.group(msng_filenode_lst,
1505 group = filerevlog.group(msng_filenode_lst,
1506 lookup_filenode_link_func(fname))
1506 lookup_filenode_link_func(fname))
1507 for chnk in group:
1507 for chnk in group:
1508 yield chnk
1508 yield chnk
1509 if msng_filenode_set.has_key(fname):
1509 if msng_filenode_set.has_key(fname):
1510 # Don't need this anymore, toss it to free memory.
1510 # Don't need this anymore, toss it to free memory.
1511 del msng_filenode_set[fname]
1511 del msng_filenode_set[fname]
1512 # Signal that no more groups are left.
1512 # Signal that no more groups are left.
1513 yield changegroup.closechunk()
1513 yield changegroup.closechunk()
1514
1514
1515 if msng_cl_lst:
1515 if msng_cl_lst:
1516 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1516 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1517
1517
1518 return util.chunkbuffer(gengroup())
1518 return util.chunkbuffer(gengroup())
1519
1519
1520 def changegroup(self, basenodes, source):
1520 def changegroup(self, basenodes, source):
1521 """Generate a changegroup of all nodes that we have that a recipient
1521 """Generate a changegroup of all nodes that we have that a recipient
1522 doesn't.
1522 doesn't.
1523
1523
1524 This is much easier than the previous function as we can assume that
1524 This is much easier than the previous function as we can assume that
1525 the recipient has any changenode we aren't sending them."""
1525 the recipient has any changenode we aren't sending them."""
1526
1526
1527 self.hook('preoutgoing', throw=True, source=source)
1527 self.hook('preoutgoing', throw=True, source=source)
1528
1528
1529 cl = self.changelog
1529 cl = self.changelog
1530 nodes = cl.nodesbetween(basenodes, None)[0]
1530 nodes = cl.nodesbetween(basenodes, None)[0]
1531 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1531 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1532
1532
1533 def identity(x):
1533 def identity(x):
1534 return x
1534 return x
1535
1535
1536 def gennodelst(revlog):
1536 def gennodelst(revlog):
1537 for r in xrange(0, revlog.count()):
1537 for r in xrange(0, revlog.count()):
1538 n = revlog.node(r)
1538 n = revlog.node(r)
1539 if revlog.linkrev(n) in revset:
1539 if revlog.linkrev(n) in revset:
1540 yield n
1540 yield n
1541
1541
1542 def changed_file_collector(changedfileset):
1542 def changed_file_collector(changedfileset):
1543 def collect_changed_files(clnode):
1543 def collect_changed_files(clnode):
1544 c = cl.read(clnode)
1544 c = cl.read(clnode)
1545 for fname in c[3]:
1545 for fname in c[3]:
1546 changedfileset[fname] = 1
1546 changedfileset[fname] = 1
1547 return collect_changed_files
1547 return collect_changed_files
1548
1548
1549 def lookuprevlink_func(revlog):
1549 def lookuprevlink_func(revlog):
1550 def lookuprevlink(n):
1550 def lookuprevlink(n):
1551 return cl.node(revlog.linkrev(n))
1551 return cl.node(revlog.linkrev(n))
1552 return lookuprevlink
1552 return lookuprevlink
1553
1553
1554 def gengroup():
1554 def gengroup():
1555 # construct a list of all changed files
1555 # construct a list of all changed files
1556 changedfiles = {}
1556 changedfiles = {}
1557
1557
1558 for chnk in cl.group(nodes, identity,
1558 for chnk in cl.group(nodes, identity,
1559 changed_file_collector(changedfiles)):
1559 changed_file_collector(changedfiles)):
1560 yield chnk
1560 yield chnk
1561 changedfiles = changedfiles.keys()
1561 changedfiles = changedfiles.keys()
1562 changedfiles.sort()
1562 changedfiles.sort()
1563
1563
1564 mnfst = self.manifest
1564 mnfst = self.manifest
1565 nodeiter = gennodelst(mnfst)
1565 nodeiter = gennodelst(mnfst)
1566 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1566 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1567 yield chnk
1567 yield chnk
1568
1568
1569 for fname in changedfiles:
1569 for fname in changedfiles:
1570 filerevlog = self.file(fname)
1570 filerevlog = self.file(fname)
1571 nodeiter = gennodelst(filerevlog)
1571 nodeiter = gennodelst(filerevlog)
1572 nodeiter = list(nodeiter)
1572 nodeiter = list(nodeiter)
1573 if nodeiter:
1573 if nodeiter:
1574 yield changegroup.genchunk(fname)
1574 yield changegroup.genchunk(fname)
1575 lookup = lookuprevlink_func(filerevlog)
1575 lookup = lookuprevlink_func(filerevlog)
1576 for chnk in filerevlog.group(nodeiter, lookup):
1576 for chnk in filerevlog.group(nodeiter, lookup):
1577 yield chnk
1577 yield chnk
1578
1578
1579 yield changegroup.closechunk()
1579 yield changegroup.closechunk()
1580
1580
1581 if nodes:
1581 if nodes:
1582 self.hook('outgoing', node=hex(nodes[0]), source=source)
1582 self.hook('outgoing', node=hex(nodes[0]), source=source)
1583
1583
1584 return util.chunkbuffer(gengroup())
1584 return util.chunkbuffer(gengroup())
1585
1585
1586 def addchangegroup(self, source, srctype):
1586 def addchangegroup(self, source, srctype):
1587 """add changegroup to repo.
1587 """add changegroup to repo.
1588 returns number of heads modified or added + 1."""
1588 returns number of heads modified or added + 1."""
1589
1589
1590 def csmap(x):
1590 def csmap(x):
1591 self.ui.debug(_("add changeset %s\n") % short(x))
1591 self.ui.debug(_("add changeset %s\n") % short(x))
1592 return cl.count()
1592 return cl.count()
1593
1593
1594 def revmap(x):
1594 def revmap(x):
1595 return cl.rev(x)
1595 return cl.rev(x)
1596
1596
1597 if not source:
1597 if not source:
1598 return 0
1598 return 0
1599
1599
1600 self.hook('prechangegroup', throw=True, source=srctype)
1600 self.hook('prechangegroup', throw=True, source=srctype)
1601
1601
1602 changesets = files = revisions = 0
1602 changesets = files = revisions = 0
1603
1603
1604 tr = self.transaction()
1604 tr = self.transaction()
1605
1605
1606 # write changelog data to temp files so concurrent readers will not see
1606 # write changelog data to temp files so concurrent readers will not see
1607 # inconsistent view
1607 # inconsistent view
1608 cl = None
1608 cl = None
1609 try:
1609 try:
1610 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1610 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1611
1611
1612 oldheads = len(cl.heads())
1612 oldheads = len(cl.heads())
1613
1613
1614 # pull off the changeset group
1614 # pull off the changeset group
1615 self.ui.status(_("adding changesets\n"))
1615 self.ui.status(_("adding changesets\n"))
1616 cor = cl.count() - 1
1616 cor = cl.count() - 1
1617 chunkiter = changegroup.chunkiter(source)
1617 chunkiter = changegroup.chunkiter(source)
1618 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1618 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1619 raise util.Abort(_("received changelog group is empty"))
1619 raise util.Abort(_("received changelog group is empty"))
1620 cnr = cl.count() - 1
1620 cnr = cl.count() - 1
1621 changesets = cnr - cor
1621 changesets = cnr - cor
1622
1622
1623 # pull off the manifest group
1623 # pull off the manifest group
1624 self.ui.status(_("adding manifests\n"))
1624 self.ui.status(_("adding manifests\n"))
1625 chunkiter = changegroup.chunkiter(source)
1625 chunkiter = changegroup.chunkiter(source)
1626 # no need to check for empty manifest group here:
1626 # no need to check for empty manifest group here:
1627 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1627 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1628 # no new manifest will be created and the manifest group will
1628 # no new manifest will be created and the manifest group will
1629 # be empty during the pull
1629 # be empty during the pull
1630 self.manifest.addgroup(chunkiter, revmap, tr)
1630 self.manifest.addgroup(chunkiter, revmap, tr)
1631
1631
1632 # process the files
1632 # process the files
1633 self.ui.status(_("adding file changes\n"))
1633 self.ui.status(_("adding file changes\n"))
1634 while 1:
1634 while 1:
1635 f = changegroup.getchunk(source)
1635 f = changegroup.getchunk(source)
1636 if not f:
1636 if not f:
1637 break
1637 break
1638 self.ui.debug(_("adding %s revisions\n") % f)
1638 self.ui.debug(_("adding %s revisions\n") % f)
1639 fl = self.file(f)
1639 fl = self.file(f)
1640 o = fl.count()
1640 o = fl.count()
1641 chunkiter = changegroup.chunkiter(source)
1641 chunkiter = changegroup.chunkiter(source)
1642 if fl.addgroup(chunkiter, revmap, tr) is None:
1642 if fl.addgroup(chunkiter, revmap, tr) is None:
1643 raise util.Abort(_("received file revlog group is empty"))
1643 raise util.Abort(_("received file revlog group is empty"))
1644 revisions += fl.count() - o
1644 revisions += fl.count() - o
1645 files += 1
1645 files += 1
1646
1646
1647 cl.writedata()
1647 cl.writedata()
1648 finally:
1648 finally:
1649 if cl:
1649 if cl:
1650 cl.cleanup()
1650 cl.cleanup()
1651
1651
1652 # make changelog see real files again
1652 # make changelog see real files again
1653 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1653 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1654 self.changelog.checkinlinesize(tr)
1654 self.changelog.checkinlinesize(tr)
1655
1655
1656 newheads = len(self.changelog.heads())
1656 newheads = len(self.changelog.heads())
1657 heads = ""
1657 heads = ""
1658 if oldheads and newheads != oldheads:
1658 if oldheads and newheads != oldheads:
1659 heads = _(" (%+d heads)") % (newheads - oldheads)
1659 heads = _(" (%+d heads)") % (newheads - oldheads)
1660
1660
1661 self.ui.status(_("added %d changesets"
1661 self.ui.status(_("added %d changesets"
1662 " with %d changes to %d files%s\n")
1662 " with %d changes to %d files%s\n")
1663 % (changesets, revisions, files, heads))
1663 % (changesets, revisions, files, heads))
1664
1664
1665 if changesets > 0:
1665 if changesets > 0:
1666 self.hook('pretxnchangegroup', throw=True,
1666 self.hook('pretxnchangegroup', throw=True,
1667 node=hex(self.changelog.node(cor+1)), source=srctype)
1667 node=hex(self.changelog.node(cor+1)), source=srctype)
1668
1668
1669 tr.close()
1669 tr.close()
1670
1670
1671 if changesets > 0:
1671 if changesets > 0:
1672 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1672 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1673 source=srctype)
1673 source=srctype)
1674
1674
1675 for i in range(cor + 1, cnr + 1):
1675 for i in range(cor + 1, cnr + 1):
1676 self.hook("incoming", node=hex(self.changelog.node(i)),
1676 self.hook("incoming", node=hex(self.changelog.node(i)),
1677 source=srctype)
1677 source=srctype)
1678
1678
1679 return newheads - oldheads + 1
1679 return newheads - oldheads + 1
1680
1680
1681 def update(self, node, allow=False, force=False, choose=None,
1681 def update(self, node, allow=False, force=False, choose=None,
1682 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1682 moddirstate=True, forcemerge=False, wlock=None, show_stats=True):
1683 pl = self.dirstate.parents()
1683 pl = self.dirstate.parents()
1684 if not force and pl[1] != nullid:
1684 if not force and pl[1] != nullid:
1685 raise util.Abort(_("outstanding uncommitted merges"))
1685 raise util.Abort(_("outstanding uncommitted merges"))
1686
1686
1687 err = False
1687 err = False
1688
1688
1689 p1, p2 = pl[0], node
1689 p1, p2 = pl[0], node
1690 pa = self.changelog.ancestor(p1, p2)
1690 pa = self.changelog.ancestor(p1, p2)
1691 m1n = self.changelog.read(p1)[0]
1691 m1n = self.changelog.read(p1)[0]
1692 m2n = self.changelog.read(p2)[0]
1692 m2n = self.changelog.read(p2)[0]
1693 man = self.manifest.ancestor(m1n, m2n)
1693 man = self.manifest.ancestor(m1n, m2n)
1694 m1 = self.manifest.read(m1n)
1694 m1 = self.manifest.read(m1n)
1695 mf1 = self.manifest.readflags(m1n)
1695 mf1 = self.manifest.readflags(m1n)
1696 m2 = self.manifest.read(m2n).copy()
1696 m2 = self.manifest.read(m2n).copy()
1697 mf2 = self.manifest.readflags(m2n)
1697 mf2 = self.manifest.readflags(m2n)
1698 ma = self.manifest.read(man)
1698 ma = self.manifest.read(man)
1699 mfa = self.manifest.readflags(man)
1699 mfa = self.manifest.readflags(man)
1700
1700
1701 modified, added, removed, deleted, unknown = self.changes()
1701 modified, added, removed, deleted, unknown = self.changes()
1702
1702
1703 # is this a jump, or a merge? i.e. is there a linear path
1703 # is this a jump, or a merge? i.e. is there a linear path
1704 # from p1 to p2?
1704 # from p1 to p2?
1705 linear_path = (pa == p1 or pa == p2)
1705 linear_path = (pa == p1 or pa == p2)
1706
1706
1707 if allow and linear_path:
1707 if allow and linear_path:
1708 raise util.Abort(_("there is nothing to merge, just use "
1708 raise util.Abort(_("there is nothing to merge, just use "
1709 "'hg update' or look at 'hg heads'"))
1709 "'hg update' or look at 'hg heads'"))
1710 if allow and not forcemerge:
1710 if allow and not forcemerge:
1711 if modified or added or removed:
1711 if modified or added or removed:
1712 raise util.Abort(_("outstanding uncommitted changes"))
1712 raise util.Abort(_("outstanding uncommitted changes"))
1713
1713
1714 if not forcemerge and not force:
1714 if not forcemerge and not force:
1715 for f in unknown:
1715 for f in unknown:
1716 if f in m2:
1716 if f in m2:
1717 t1 = self.wread(f)
1717 t1 = self.wread(f)
1718 t2 = self.file(f).read(m2[f])
1718 t2 = self.file(f).read(m2[f])
1719 if cmp(t1, t2) != 0:
1719 if cmp(t1, t2) != 0:
1720 raise util.Abort(_("'%s' already exists in the working"
1720 raise util.Abort(_("'%s' already exists in the working"
1721 " dir and differs from remote") % f)
1721 " dir and differs from remote") % f)
1722
1722
1723 # resolve the manifest to determine which files
1723 # resolve the manifest to determine which files
1724 # we care about merging
1724 # we care about merging
1725 self.ui.note(_("resolving manifests\n"))
1725 self.ui.note(_("resolving manifests\n"))
1726 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1726 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1727 (force, allow, moddirstate, linear_path))
1727 (force, allow, moddirstate, linear_path))
1728 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1728 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1729 (short(man), short(m1n), short(m2n)))
1729 (short(man), short(m1n), short(m2n)))
1730
1730
1731 merge = {}
1731 merge = {}
1732 get = {}
1732 get = {}
1733 remove = []
1733 remove = []
1734
1734
1735 # construct a working dir manifest
1735 # construct a working dir manifest
1736 mw = m1.copy()
1736 mw = m1.copy()
1737 mfw = mf1.copy()
1737 mfw = mf1.copy()
1738 umap = dict.fromkeys(unknown)
1738 umap = dict.fromkeys(unknown)
1739
1739
1740 for f in added + modified + unknown:
1740 for f in added + modified + unknown:
1741 mw[f] = ""
1741 mw[f] = ""
1742 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1742 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1743
1743
1744 if moddirstate and not wlock:
1744 if moddirstate and not wlock:
1745 wlock = self.wlock()
1745 wlock = self.wlock()
1746
1746
1747 for f in deleted + removed:
1747 for f in deleted + removed:
1748 if f in mw:
1748 if f in mw:
1749 del mw[f]
1749 del mw[f]
1750
1750
1751 # If we're jumping between revisions (as opposed to merging),
1751 # If we're jumping between revisions (as opposed to merging),
1752 # and if neither the working directory nor the target rev has
1752 # and if neither the working directory nor the target rev has
1753 # the file, then we need to remove it from the dirstate, to
1753 # the file, then we need to remove it from the dirstate, to
1754 # prevent the dirstate from listing the file when it is no
1754 # prevent the dirstate from listing the file when it is no
1755 # longer in the manifest.
1755 # longer in the manifest.
1756 if moddirstate and linear_path and f not in m2:
1756 if moddirstate and linear_path and f not in m2:
1757 self.dirstate.forget((f,))
1757 self.dirstate.forget((f,))
1758
1758
1759 # Compare manifests
1759 # Compare manifests
1760 for f, n in mw.iteritems():
1760 for f, n in mw.iteritems():
1761 if choose and not choose(f):
1761 if choose and not choose(f):
1762 continue
1762 continue
1763 if f in m2:
1763 if f in m2:
1764 s = 0
1764 s = 0
1765
1765
1766 # is the wfile new since m1, and match m2?
1766 # is the wfile new since m1, and match m2?
1767 if f not in m1:
1767 if f not in m1:
1768 t1 = self.wread(f)
1768 t1 = self.wread(f)
1769 t2 = self.file(f).read(m2[f])
1769 t2 = self.file(f).read(m2[f])
1770 if cmp(t1, t2) == 0:
1770 if cmp(t1, t2) == 0:
1771 n = m2[f]
1771 n = m2[f]
1772 del t1, t2
1772 del t1, t2
1773
1773
1774 # are files different?
1774 # are files different?
1775 if n != m2[f]:
1775 if n != m2[f]:
1776 a = ma.get(f, nullid)
1776 a = ma.get(f, nullid)
1777 # are both different from the ancestor?
1777 # are both different from the ancestor?
1778 if n != a and m2[f] != a:
1778 if n != a and m2[f] != a:
1779 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1779 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1780 # merge executable bits
1780 # merge executable bits
1781 # "if we changed or they changed, change in merge"
1781 # "if we changed or they changed, change in merge"
1782 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1782 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1783 mode = ((a^b) | (a^c)) ^ a
1783 mode = ((a^b) | (a^c)) ^ a
1784 merge[f] = (m1.get(f, nullid), m2[f], mode)
1784 merge[f] = (m1.get(f, nullid), m2[f], mode)
1785 s = 1
1785 s = 1
1786 # are we clobbering?
1786 # are we clobbering?
1787 # is remote's version newer?
1787 # is remote's version newer?
1788 # or are we going back in time?
1788 # or are we going back in time?
1789 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1789 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1790 self.ui.debug(_(" remote %s is newer, get\n") % f)
1790 self.ui.debug(_(" remote %s is newer, get\n") % f)
1791 get[f] = m2[f]
1791 get[f] = m2[f]
1792 s = 1
1792 s = 1
1793 elif f in umap or f in added:
1793 elif f in umap or f in added:
1794 # this unknown file is the same as the checkout
1794 # this unknown file is the same as the checkout
1795 # we need to reset the dirstate if the file was added
1795 # we need to reset the dirstate if the file was added
1796 get[f] = m2[f]
1796 get[f] = m2[f]
1797
1797
1798 if not s and mfw[f] != mf2[f]:
1798 if not s and mfw[f] != mf2[f]:
1799 if force:
1799 if force:
1800 self.ui.debug(_(" updating permissions for %s\n") % f)
1800 self.ui.debug(_(" updating permissions for %s\n") % f)
1801 util.set_exec(self.wjoin(f), mf2[f])
1801 util.set_exec(self.wjoin(f), mf2[f])
1802 else:
1802 else:
1803 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1803 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1804 mode = ((a^b) | (a^c)) ^ a
1804 mode = ((a^b) | (a^c)) ^ a
1805 if mode != b:
1805 if mode != b:
1806 self.ui.debug(_(" updating permissions for %s\n")
1806 self.ui.debug(_(" updating permissions for %s\n")
1807 % f)
1807 % f)
1808 util.set_exec(self.wjoin(f), mode)
1808 util.set_exec(self.wjoin(f), mode)
1809 del m2[f]
1809 del m2[f]
1810 elif f in ma:
1810 elif f in ma:
1811 if n != ma[f]:
1811 if n != ma[f]:
1812 r = _("d")
1812 r = _("d")
1813 if not force and (linear_path or allow):
1813 if not force and (linear_path or allow):
1814 r = self.ui.prompt(
1814 r = self.ui.prompt(
1815 (_(" local changed %s which remote deleted\n") % f) +
1815 (_(" local changed %s which remote deleted\n") % f) +
1816 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1816 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1817 if r == _("d"):
1817 if r == _("d"):
1818 remove.append(f)
1818 remove.append(f)
1819 else:
1819 else:
1820 self.ui.debug(_("other deleted %s\n") % f)
1820 self.ui.debug(_("other deleted %s\n") % f)
1821 remove.append(f) # other deleted it
1821 remove.append(f) # other deleted it
1822 else:
1822 else:
1823 # file is created on branch or in working directory
1823 # file is created on branch or in working directory
1824 if force and f not in umap:
1824 if force and f not in umap:
1825 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1825 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1826 remove.append(f)
1826 remove.append(f)
1827 elif n == m1.get(f, nullid): # same as parent
1827 elif n == m1.get(f, nullid): # same as parent
1828 if p2 == pa: # going backwards?
1828 if p2 == pa: # going backwards?
1829 self.ui.debug(_("remote deleted %s\n") % f)
1829 self.ui.debug(_("remote deleted %s\n") % f)
1830 remove.append(f)
1830 remove.append(f)
1831 else:
1831 else:
1832 self.ui.debug(_("local modified %s, keeping\n") % f)
1832 self.ui.debug(_("local modified %s, keeping\n") % f)
1833 else:
1833 else:
1834 self.ui.debug(_("working dir created %s, keeping\n") % f)
1834 self.ui.debug(_("working dir created %s, keeping\n") % f)
1835
1835
1836 for f, n in m2.iteritems():
1836 for f, n in m2.iteritems():
1837 if choose and not choose(f):
1837 if choose and not choose(f):
1838 continue
1838 continue
1839 if f[0] == "/":
1839 if f[0] == "/":
1840 continue
1840 continue
1841 if f in ma and n != ma[f]:
1841 if f in ma and n != ma[f]:
1842 r = _("k")
1842 r = _("k")
1843 if not force and (linear_path or allow):
1843 if not force and (linear_path or allow):
1844 r = self.ui.prompt(
1844 r = self.ui.prompt(
1845 (_("remote changed %s which local deleted\n") % f) +
1845 (_("remote changed %s which local deleted\n") % f) +
1846 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1846 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1847 if r == _("k"):
1847 if r == _("k"):
1848 get[f] = n
1848 get[f] = n
1849 elif f not in ma:
1849 elif f not in ma:
1850 self.ui.debug(_("remote created %s\n") % f)
1850 self.ui.debug(_("remote created %s\n") % f)
1851 get[f] = n
1851 get[f] = n
1852 else:
1852 else:
1853 if force or p2 == pa: # going backwards?
1853 if force or p2 == pa: # going backwards?
1854 self.ui.debug(_("local deleted %s, recreating\n") % f)
1854 self.ui.debug(_("local deleted %s, recreating\n") % f)
1855 get[f] = n
1855 get[f] = n
1856 else:
1856 else:
1857 self.ui.debug(_("local deleted %s\n") % f)
1857 self.ui.debug(_("local deleted %s\n") % f)
1858
1858
1859 del mw, m1, m2, ma
1859 del mw, m1, m2, ma
1860
1860
1861 if force:
1861 if force:
1862 for f in merge:
1862 for f in merge:
1863 get[f] = merge[f][1]
1863 get[f] = merge[f][1]
1864 merge = {}
1864 merge = {}
1865
1865
1866 if linear_path or force:
1866 if linear_path or force:
1867 # we don't need to do any magic, just jump to the new rev
1867 # we don't need to do any magic, just jump to the new rev
1868 branch_merge = False
1868 branch_merge = False
1869 p1, p2 = p2, nullid
1869 p1, p2 = p2, nullid
1870 else:
1870 else:
1871 if not allow:
1871 if not allow:
1872 self.ui.status(_("this update spans a branch"
1872 self.ui.status(_("this update spans a branch"
1873 " affecting the following files:\n"))
1873 " affecting the following files:\n"))
1874 fl = merge.keys() + get.keys()
1874 fl = merge.keys() + get.keys()
1875 fl.sort()
1875 fl.sort()
1876 for f in fl:
1876 for f in fl:
1877 cf = ""
1877 cf = ""
1878 if f in merge:
1878 if f in merge:
1879 cf = _(" (resolve)")
1879 cf = _(" (resolve)")
1880 self.ui.status(" %s%s\n" % (f, cf))
1880 self.ui.status(" %s%s\n" % (f, cf))
1881 self.ui.warn(_("aborting update spanning branches!\n"))
1881 self.ui.warn(_("aborting update spanning branches!\n"))
1882 self.ui.status(_("(use 'hg merge' to merge across branches"
1882 self.ui.status(_("(use 'hg merge' to merge across branches"
1883 " or 'hg update -C' to lose changes)\n"))
1883 " or 'hg update -C' to lose changes)\n"))
1884 return 1
1884 return 1
1885 branch_merge = True
1885 branch_merge = True
1886
1886
1887 xp1 = hex(p1)
1887 xp1 = hex(p1)
1888 xp2 = hex(p2)
1888 xp2 = hex(p2)
1889 if p2 == nullid: xxp2 = ''
1889 if p2 == nullid: xxp2 = ''
1890 else: xxp2 = xp2
1890 else: xxp2 = xp2
1891
1891
1892 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1892 self.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
1893
1893
1894 # get the files we don't need to change
1894 # get the files we don't need to change
1895 files = get.keys()
1895 files = get.keys()
1896 files.sort()
1896 files.sort()
1897 for f in files:
1897 for f in files:
1898 if f[0] == "/":
1898 if f[0] == "/":
1899 continue
1899 continue
1900 self.ui.note(_("getting %s\n") % f)
1900 self.ui.note(_("getting %s\n") % f)
1901 t = self.file(f).read(get[f])
1901 t = self.file(f).read(get[f])
1902 self.wwrite(f, t)
1902 self.wwrite(f, t)
1903 util.set_exec(self.wjoin(f), mf2[f])
1903 util.set_exec(self.wjoin(f), mf2[f])
1904 if moddirstate:
1904 if moddirstate:
1905 if branch_merge:
1905 if branch_merge:
1906 self.dirstate.update([f], 'n', st_mtime=-1)
1906 self.dirstate.update([f], 'n', st_mtime=-1)
1907 else:
1907 else:
1908 self.dirstate.update([f], 'n')
1908 self.dirstate.update([f], 'n')
1909
1909
1910 # merge the tricky bits
1910 # merge the tricky bits
1911 failedmerge = []
1911 failedmerge = []
1912 files = merge.keys()
1912 files = merge.keys()
1913 files.sort()
1913 files.sort()
1914 for f in files:
1914 for f in files:
1915 self.ui.status(_("merging %s\n") % f)
1915 self.ui.status(_("merging %s\n") % f)
1916 my, other, flag = merge[f]
1916 my, other, flag = merge[f]
1917 ret = self.merge3(f, my, other, xp1, xp2)
1917 ret = self.merge3(f, my, other, xp1, xp2)
1918 if ret:
1918 if ret:
1919 err = True
1919 err = True
1920 failedmerge.append(f)
1920 failedmerge.append(f)
1921 util.set_exec(self.wjoin(f), flag)
1921 util.set_exec(self.wjoin(f), flag)
1922 if moddirstate:
1922 if moddirstate:
1923 if branch_merge:
1923 if branch_merge:
1924 # We've done a branch merge, mark this file as merged
1924 # We've done a branch merge, mark this file as merged
1925 # so that we properly record the merger later
1925 # so that we properly record the merger later
1926 self.dirstate.update([f], 'm')
1926 self.dirstate.update([f], 'm')
1927 else:
1927 else:
1928 # We've update-merged a locally modified file, so
1928 # We've update-merged a locally modified file, so
1929 # we set the dirstate to emulate a normal checkout
1929 # we set the dirstate to emulate a normal checkout
1930 # of that file some time in the past. Thus our
1930 # of that file some time in the past. Thus our
1931 # merge will appear as a normal local file
1931 # merge will appear as a normal local file
1932 # modification.
1932 # modification.
1933 f_len = len(self.file(f).read(other))
1933 f_len = len(self.file(f).read(other))
1934 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1934 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1935
1935
1936 remove.sort()
1936 remove.sort()
1937 for f in remove:
1937 for f in remove:
1938 self.ui.note(_("removing %s\n") % f)
1938 self.ui.note(_("removing %s\n") % f)
1939 util.audit_path(f)
1939 util.audit_path(f)
1940 try:
1940 try:
1941 util.unlink(self.wjoin(f))
1941 util.unlink(self.wjoin(f))
1942 except OSError, inst:
1942 except OSError, inst:
1943 if inst.errno != errno.ENOENT:
1943 if inst.errno != errno.ENOENT:
1944 self.ui.warn(_("update failed to remove %s: %s!\n") %
1944 self.ui.warn(_("update failed to remove %s: %s!\n") %
1945 (f, inst.strerror))
1945 (f, inst.strerror))
1946 if moddirstate:
1946 if moddirstate:
1947 if branch_merge:
1947 if branch_merge:
1948 self.dirstate.update(remove, 'r')
1948 self.dirstate.update(remove, 'r')
1949 else:
1949 else:
1950 self.dirstate.forget(remove)
1950 self.dirstate.forget(remove)
1951
1951
1952 if moddirstate:
1952 if moddirstate:
1953 self.dirstate.setparents(p1, p2)
1953 self.dirstate.setparents(p1, p2)
1954
1954
1955 if show_stats:
1955 if show_stats:
1956 stats = ((len(get), _("updated")),
1956 stats = ((len(get), _("updated")),
1957 (len(merge) - len(failedmerge), _("merged")),
1957 (len(merge) - len(failedmerge), _("merged")),
1958 (len(remove), _("removed")),
1958 (len(remove), _("removed")),
1959 (len(failedmerge), _("unresolved")))
1959 (len(failedmerge), _("unresolved")))
1960 note = ", ".join([_("%d files %s") % s for s in stats])
1960 note = ", ".join([_("%d files %s") % s for s in stats])
1961 self.ui.status("%s\n" % note)
1961 self.ui.status("%s\n" % note)
1962 if moddirstate:
1962 if moddirstate:
1963 if branch_merge:
1963 if branch_merge:
1964 if failedmerge:
1964 if failedmerge:
1965 self.ui.status(_("There are unresolved merges,"
1965 self.ui.status(_("There are unresolved merges,"
1966 " you can redo the full merge using:\n"
1966 " you can redo the full merge using:\n"
1967 " hg update -C %s\n"
1967 " hg update -C %s\n"
1968 " hg merge %s\n"
1968 " hg merge %s\n"
1969 % (self.changelog.rev(p1),
1969 % (self.changelog.rev(p1),
1970 self.changelog.rev(p2))))
1970 self.changelog.rev(p2))))
1971 else:
1971 else:
1972 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1972 self.ui.status(_("(branch merge, don't forget to commit)\n"))
1973 elif failedmerge:
1973 elif failedmerge:
1974 self.ui.status(_("There are unresolved merges with"
1974 self.ui.status(_("There are unresolved merges with"
1975 " locally modified files.\n"))
1975 " locally modified files.\n"))
1976
1976
1977 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1977 self.hook('update', parent1=xp1, parent2=xxp2, error=int(err))
1978 return err
1978 return err
1979
1979
1980 def merge3(self, fn, my, other, p1, p2):
1980 def merge3(self, fn, my, other, p1, p2):
1981 """perform a 3-way merge in the working directory"""
1981 """perform a 3-way merge in the working directory"""
1982
1982
1983 def temp(prefix, node):
1983 def temp(prefix, node):
1984 pre = "%s~%s." % (os.path.basename(fn), prefix)
1984 pre = "%s~%s." % (os.path.basename(fn), prefix)
1985 (fd, name) = tempfile.mkstemp(prefix=pre)
1985 (fd, name) = tempfile.mkstemp(prefix=pre)
1986 f = os.fdopen(fd, "wb")
1986 f = os.fdopen(fd, "wb")
1987 self.wwrite(fn, fl.read(node), f)
1987 self.wwrite(fn, fl.read(node), f)
1988 f.close()
1988 f.close()
1989 return name
1989 return name
1990
1990
1991 fl = self.file(fn)
1991 fl = self.file(fn)
1992 base = fl.ancestor(my, other)
1992 base = fl.ancestor(my, other)
1993 a = self.wjoin(fn)
1993 a = self.wjoin(fn)
1994 b = temp("base", base)
1994 b = temp("base", base)
1995 c = temp("other", other)
1995 c = temp("other", other)
1996
1996
1997 self.ui.note(_("resolving %s\n") % fn)
1997 self.ui.note(_("resolving %s\n") % fn)
1998 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1998 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1999 (fn, short(my), short(other), short(base)))
1999 (fn, short(my), short(other), short(base)))
2000
2000
2001 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
2001 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
2002 or "hgmerge")
2002 or "hgmerge")
2003 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
2003 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
2004 environ={'HG_FILE': fn,
2004 environ={'HG_FILE': fn,
2005 'HG_MY_NODE': p1,
2005 'HG_MY_NODE': p1,
2006 'HG_OTHER_NODE': p2,
2006 'HG_OTHER_NODE': p2,
2007 'HG_FILE_MY_NODE': hex(my),
2007 'HG_FILE_MY_NODE': hex(my),
2008 'HG_FILE_OTHER_NODE': hex(other),
2008 'HG_FILE_OTHER_NODE': hex(other),
2009 'HG_FILE_BASE_NODE': hex(base)})
2009 'HG_FILE_BASE_NODE': hex(base)})
2010 if r:
2010 if r:
2011 self.ui.warn(_("merging %s failed!\n") % fn)
2011 self.ui.warn(_("merging %s failed!\n") % fn)
2012
2012
2013 os.unlink(b)
2013 os.unlink(b)
2014 os.unlink(c)
2014 os.unlink(c)
2015 return r
2015 return r
2016
2016
2017 def verify(self):
2017 def verify(self):
2018 filelinkrevs = {}
2018 filelinkrevs = {}
2019 filenodes = {}
2019 filenodes = {}
2020 changesets = revisions = files = 0
2020 changesets = revisions = files = 0
2021 errors = [0]
2021 errors = [0]
2022 warnings = [0]
2022 warnings = [0]
2023 neededmanifests = {}
2023 neededmanifests = {}
2024
2024
2025 def err(msg):
2025 def err(msg):
2026 self.ui.warn(msg + "\n")
2026 self.ui.warn(msg + "\n")
2027 errors[0] += 1
2027 errors[0] += 1
2028
2028
2029 def warn(msg):
2029 def warn(msg):
2030 self.ui.warn(msg + "\n")
2030 self.ui.warn(msg + "\n")
2031 warnings[0] += 1
2031 warnings[0] += 1
2032
2032
2033 def checksize(obj, name):
2033 def checksize(obj, name):
2034 d = obj.checksize()
2034 d = obj.checksize()
2035 if d[0]:
2035 if d[0]:
2036 err(_("%s data length off by %d bytes") % (name, d[0]))
2036 err(_("%s data length off by %d bytes") % (name, d[0]))
2037 if d[1]:
2037 if d[1]:
2038 err(_("%s index contains %d extra bytes") % (name, d[1]))
2038 err(_("%s index contains %d extra bytes") % (name, d[1]))
2039
2039
2040 def checkversion(obj, name):
2040 def checkversion(obj, name):
2041 if obj.version != revlog.REVLOGV0:
2041 if obj.version != revlog.REVLOGV0:
2042 if not revlogv1:
2042 if not revlogv1:
2043 warn(_("warning: `%s' uses revlog format 1") % name)
2043 warn(_("warning: `%s' uses revlog format 1") % name)
2044 elif revlogv1:
2044 elif revlogv1:
2045 warn(_("warning: `%s' uses revlog format 0") % name)
2045 warn(_("warning: `%s' uses revlog format 0") % name)
2046
2046
2047 revlogv1 = self.revlogversion != revlog.REVLOGV0
2047 revlogv1 = self.revlogversion != revlog.REVLOGV0
2048 if self.ui.verbose or revlogv1 != self.revlogv1:
2048 if self.ui.verbose or revlogv1 != self.revlogv1:
2049 self.ui.status(_("repository uses revlog format %d\n") %
2049 self.ui.status(_("repository uses revlog format %d\n") %
2050 (revlogv1 and 1 or 0))
2050 (revlogv1 and 1 or 0))
2051
2051
2052 seen = {}
2052 seen = {}
2053 self.ui.status(_("checking changesets\n"))
2053 self.ui.status(_("checking changesets\n"))
2054 checksize(self.changelog, "changelog")
2054 checksize(self.changelog, "changelog")
2055
2055
2056 for i in range(self.changelog.count()):
2056 for i in range(self.changelog.count()):
2057 changesets += 1
2057 changesets += 1
2058 n = self.changelog.node(i)
2058 n = self.changelog.node(i)
2059 l = self.changelog.linkrev(n)
2059 l = self.changelog.linkrev(n)
2060 if l != i:
2060 if l != i:
2061 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
2061 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
2062 if n in seen:
2062 if n in seen:
2063 err(_("duplicate changeset at revision %d") % i)
2063 err(_("duplicate changeset at revision %d") % i)
2064 seen[n] = 1
2064 seen[n] = 1
2065
2065
2066 for p in self.changelog.parents(n):
2066 for p in self.changelog.parents(n):
2067 if p not in self.changelog.nodemap:
2067 if p not in self.changelog.nodemap:
2068 err(_("changeset %s has unknown parent %s") %
2068 err(_("changeset %s has unknown parent %s") %
2069 (short(n), short(p)))
2069 (short(n), short(p)))
2070 try:
2070 try:
2071 changes = self.changelog.read(n)
2071 changes = self.changelog.read(n)
2072 except KeyboardInterrupt:
2072 except KeyboardInterrupt:
2073 self.ui.warn(_("interrupted"))
2073 self.ui.warn(_("interrupted"))
2074 raise
2074 raise
2075 except Exception, inst:
2075 except Exception, inst:
2076 err(_("unpacking changeset %s: %s") % (short(n), inst))
2076 err(_("unpacking changeset %s: %s") % (short(n), inst))
2077 continue
2077 continue
2078
2078
2079 neededmanifests[changes[0]] = n
2079 neededmanifests[changes[0]] = n
2080
2080
2081 for f in changes[3]:
2081 for f in changes[3]:
2082 filelinkrevs.setdefault(f, []).append(i)
2082 filelinkrevs.setdefault(f, []).append(i)
2083
2083
2084 seen = {}
2084 seen = {}
2085 self.ui.status(_("checking manifests\n"))
2085 self.ui.status(_("checking manifests\n"))
2086 checkversion(self.manifest, "manifest")
2086 checkversion(self.manifest, "manifest")
2087 checksize(self.manifest, "manifest")
2087 checksize(self.manifest, "manifest")
2088
2088
2089 for i in range(self.manifest.count()):
2089 for i in range(self.manifest.count()):
2090 n = self.manifest.node(i)
2090 n = self.manifest.node(i)
2091 l = self.manifest.linkrev(n)
2091 l = self.manifest.linkrev(n)
2092
2092
2093 if l < 0 or l >= self.changelog.count():
2093 if l < 0 or l >= self.changelog.count():
2094 err(_("bad manifest link (%d) at revision %d") % (l, i))
2094 err(_("bad manifest link (%d) at revision %d") % (l, i))
2095
2095
2096 if n in neededmanifests:
2096 if n in neededmanifests:
2097 del neededmanifests[n]
2097 del neededmanifests[n]
2098
2098
2099 if n in seen:
2099 if n in seen:
2100 err(_("duplicate manifest at revision %d") % i)
2100 err(_("duplicate manifest at revision %d") % i)
2101
2101
2102 seen[n] = 1
2102 seen[n] = 1
2103
2103
2104 for p in self.manifest.parents(n):
2104 for p in self.manifest.parents(n):
2105 if p not in self.manifest.nodemap:
2105 if p not in self.manifest.nodemap:
2106 err(_("manifest %s has unknown parent %s") %
2106 err(_("manifest %s has unknown parent %s") %
2107 (short(n), short(p)))
2107 (short(n), short(p)))
2108
2108
2109 try:
2109 try:
2110 delta = mdiff.patchtext(self.manifest.delta(n))
2110 delta = mdiff.patchtext(self.manifest.delta(n))
2111 except KeyboardInterrupt:
2111 except KeyboardInterrupt:
2112 self.ui.warn(_("interrupted"))
2112 self.ui.warn(_("interrupted"))
2113 raise
2113 raise
2114 except Exception, inst:
2114 except Exception, inst:
2115 err(_("unpacking manifest %s: %s") % (short(n), inst))
2115 err(_("unpacking manifest %s: %s") % (short(n), inst))
2116 continue
2116 continue
2117
2117
2118 try:
2118 try:
2119 ff = [ l.split('\0') for l in delta.splitlines() ]
2119 ff = [ l.split('\0') for l in delta.splitlines() ]
2120 for f, fn in ff:
2120 for f, fn in ff:
2121 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2121 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
2122 except (ValueError, TypeError), inst:
2122 except (ValueError, TypeError), inst:
2123 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2123 err(_("broken delta in manifest %s: %s") % (short(n), inst))
2124
2124
2125 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2125 self.ui.status(_("crosschecking files in changesets and manifests\n"))
2126
2126
2127 for m, c in neededmanifests.items():
2127 for m, c in neededmanifests.items():
2128 err(_("Changeset %s refers to unknown manifest %s") %
2128 err(_("Changeset %s refers to unknown manifest %s") %
2129 (short(m), short(c)))
2129 (short(m), short(c)))
2130 del neededmanifests
2130 del neededmanifests
2131
2131
2132 for f in filenodes:
2132 for f in filenodes:
2133 if f not in filelinkrevs:
2133 if f not in filelinkrevs:
2134 err(_("file %s in manifest but not in changesets") % f)
2134 err(_("file %s in manifest but not in changesets") % f)
2135
2135
2136 for f in filelinkrevs:
2136 for f in filelinkrevs:
2137 if f not in filenodes:
2137 if f not in filenodes:
2138 err(_("file %s in changeset but not in manifest") % f)
2138 err(_("file %s in changeset but not in manifest") % f)
2139
2139
2140 self.ui.status(_("checking files\n"))
2140 self.ui.status(_("checking files\n"))
2141 ff = filenodes.keys()
2141 ff = filenodes.keys()
2142 ff.sort()
2142 ff.sort()
2143 for f in ff:
2143 for f in ff:
2144 if f == "/dev/null":
2144 if f == "/dev/null":
2145 continue
2145 continue
2146 files += 1
2146 files += 1
2147 if not f:
2147 if not f:
2148 err(_("file without name in manifest %s") % short(n))
2148 err(_("file without name in manifest %s") % short(n))
2149 continue
2149 continue
2150 fl = self.file(f)
2150 fl = self.file(f)
2151 checkversion(fl, f)
2151 checkversion(fl, f)
2152 checksize(fl, f)
2152 checksize(fl, f)
2153
2153
2154 nodes = {nullid: 1}
2154 nodes = {nullid: 1}
2155 seen = {}
2155 seen = {}
2156 for i in range(fl.count()):
2156 for i in range(fl.count()):
2157 revisions += 1
2157 revisions += 1
2158 n = fl.node(i)
2158 n = fl.node(i)
2159
2159
2160 if n in seen:
2160 if n in seen:
2161 err(_("%s: duplicate revision %d") % (f, i))
2161 err(_("%s: duplicate revision %d") % (f, i))
2162 if n not in filenodes[f]:
2162 if n not in filenodes[f]:
2163 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2163 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2164 else:
2164 else:
2165 del filenodes[f][n]
2165 del filenodes[f][n]
2166
2166
2167 flr = fl.linkrev(n)
2167 flr = fl.linkrev(n)
2168 if flr not in filelinkrevs.get(f, []):
2168 if flr not in filelinkrevs.get(f, []):
2169 err(_("%s:%s points to unexpected changeset %d")
2169 err(_("%s:%s points to unexpected changeset %d")
2170 % (f, short(n), flr))
2170 % (f, short(n), flr))
2171 else:
2171 else:
2172 filelinkrevs[f].remove(flr)
2172 filelinkrevs[f].remove(flr)
2173
2173
2174 # verify contents
2174 # verify contents
2175 try:
2175 try:
2176 t = fl.read(n)
2176 t = fl.read(n)
2177 except KeyboardInterrupt:
2177 except KeyboardInterrupt:
2178 self.ui.warn(_("interrupted"))
2178 self.ui.warn(_("interrupted"))
2179 raise
2179 raise
2180 except Exception, inst:
2180 except Exception, inst:
2181 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2181 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2182
2182
2183 # verify parents
2183 # verify parents
2184 (p1, p2) = fl.parents(n)
2184 (p1, p2) = fl.parents(n)
2185 if p1 not in nodes:
2185 if p1 not in nodes:
2186 err(_("file %s:%s unknown parent 1 %s") %
2186 err(_("file %s:%s unknown parent 1 %s") %
2187 (f, short(n), short(p1)))
2187 (f, short(n), short(p1)))
2188 if p2 not in nodes:
2188 if p2 not in nodes:
2189 err(_("file %s:%s unknown parent 2 %s") %
2189 err(_("file %s:%s unknown parent 2 %s") %
2190 (f, short(n), short(p1)))
2190 (f, short(n), short(p1)))
2191 nodes[n] = 1
2191 nodes[n] = 1
2192
2192
2193 # cross-check
2193 # cross-check
2194 for node in filenodes[f]:
2194 for node in filenodes[f]:
2195 err(_("node %s in manifests not in %s") % (hex(node), f))
2195 err(_("node %s in manifests not in %s") % (hex(node), f))
2196
2196
2197 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2197 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2198 (files, changesets, revisions))
2198 (files, changesets, revisions))
2199
2199
2200 if warnings[0]:
2200 if warnings[0]:
2201 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2201 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2202 if errors[0]:
2202 if errors[0]:
2203 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2203 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2204 return 1
2204 return 1
2205
2205
2206 def stream_in(self, remote):
2206 def stream_in(self, remote):
2207 self.ui.status(_('streaming all changes\n'))
2207 self.ui.status(_('streaming all changes\n'))
2208 fp = remote.stream_out()
2208 fp = remote.stream_out()
2209 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
2209 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
2210 self.ui.status(_('%d files to transfer, %s of data\n') %
2210 self.ui.status(_('%d files to transfer, %s of data\n') %
2211 (total_files, util.bytecount(total_bytes)))
2211 (total_files, util.bytecount(total_bytes)))
2212 start = time.time()
2212 start = time.time()
2213 for i in xrange(total_files):
2213 for i in xrange(total_files):
2214 name, size = fp.readline().split('\0', 1)
2214 name, size = fp.readline().split('\0', 1)
2215 size = int(size)
2215 size = int(size)
2216 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2216 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2217 ofp = self.opener(name, 'w')
2217 ofp = self.opener(name, 'w')
2218 for chunk in util.filechunkiter(fp, limit=size):
2218 for chunk in util.filechunkiter(fp, limit=size):
2219 ofp.write(chunk)
2219 ofp.write(chunk)
2220 ofp.close()
2220 ofp.close()
2221 elapsed = time.time() - start
2221 elapsed = time.time() - start
2222 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2222 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2223 (util.bytecount(total_bytes), elapsed,
2223 (util.bytecount(total_bytes), elapsed,
2224 util.bytecount(total_bytes / elapsed)))
2224 util.bytecount(total_bytes / elapsed)))
2225 self.reload()
2225 self.reload()
2226 return len(self.heads()) + 1
2226 return len(self.heads()) + 1
2227
2227
2228 def clone(self, remote, heads=[], pull=False):
2228 def clone(self, remote, heads=[], stream=False):
2229 '''clone remote repository.
2229 '''clone remote repository.
2230 if possible, changes are streamed from remote server.
2231
2230
2232 keyword arguments:
2231 keyword arguments:
2233 heads: list of revs to clone (forces use of pull)
2232 heads: list of revs to clone (forces use of pull)
2234 pull: force use of pull, even if remote can stream'''
2233 pull: force use of pull, even if remote can stream'''
2235
2234
2236 # now, all clients that can stream can read repo formats
2235 # now, all clients that can stream can read repo formats
2237 # supported by all servers that can stream.
2236 # supported by all servers that can stream.
2238
2237
2239 # if revlog format changes, client will have to check version
2238 # if revlog format changes, client will have to check version
2240 # and format flags on "stream" capability, and stream only if
2239 # and format flags on "stream" capability, and stream only if
2241 # compatible.
2240 # compatible.
2242
2241
2243 if not pull and not heads and remote.capable('stream'):
2242 if stream and not heads and remote.capable('stream'):
2244 return self.stream_in(remote)
2243 return self.stream_in(remote)
2245 return self.pull(remote, heads)
2244 return self.pull(remote, heads)
2246
2245
2247 # used to avoid circular references so destructors work
2246 # used to avoid circular references so destructors work
2248 def aftertrans(base):
2247 def aftertrans(base):
2249 p = base
2248 p = base
2250 def a():
2249 def a():
2251 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2250 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2252 util.rename(os.path.join(p, "journal.dirstate"),
2251 util.rename(os.path.join(p, "journal.dirstate"),
2253 os.path.join(p, "undo.dirstate"))
2252 os.path.join(p, "undo.dirstate"))
2254 return a
2253 return a
2255
2254
@@ -1,25 +1,25 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 mkdir test
3 mkdir test
4 cd test
4 cd test
5 echo foo>foo
5 echo foo>foo
6 hg init
6 hg init
7 hg addremove
7 hg addremove
8 hg commit -m 1
8 hg commit -m 1
9 hg verify
9 hg verify
10 hg serve -p 20059 -d --pid-file=hg.pid
10 hg serve -p 20059 -d --pid-file=hg.pid
11 cat hg.pid >> $DAEMON_PIDS
11 cat hg.pid >> $DAEMON_PIDS
12 cd ..
12 cd ..
13
13
14 echo % clone via stream
14 echo % clone via stream
15 http_proxy= hg clone http://localhost:20059/ copy 2>&1 | \
15 http_proxy= hg clone --stream http://localhost:20059/ copy 2>&1 | \
16 sed -e 's/[0-9][0-9.]*/XXX/g'
16 sed -e 's/[0-9][0-9.]*/XXX/g'
17 cd copy
17 cd copy
18 hg verify
18 hg verify
19
19
20 cd ..
20 cd ..
21
21
22 echo % clone via pull
22 echo % clone via pull
23 http_proxy= hg clone --pull http://localhost:20059/ copy-pull
23 http_proxy= hg clone http://localhost:20059/ copy-pull
24 cd copy-pull
24 cd copy-pull
25 hg verify
25 hg verify
@@ -1,41 +1,41 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 hg init a
3 hg init a
4 cd a
4 cd a
5 echo a > a
5 echo a > a
6 hg ci -Ama -d '1123456789 0'
6 hg ci -Ama -d '1123456789 0'
7 hg serve -p 20059 -d --pid-file=hg.pid
7 hg serve -p 20059 -d --pid-file=hg.pid
8 cat hg.pid >> $DAEMON_PIDS
8 cat hg.pid >> $DAEMON_PIDS
9
9
10 cd ..
10 cd ..
11 ("$TESTDIR/tinyproxy.py" 20060 localhost >proxy.log 2>&1 </dev/null &
11 ("$TESTDIR/tinyproxy.py" 20060 localhost >proxy.log 2>&1 </dev/null &
12 echo $! > proxy.pid)
12 echo $! > proxy.pid)
13 cat proxy.pid >> $DAEMON_PIDS
13 cat proxy.pid >> $DAEMON_PIDS
14 sleep 2
14 sleep 2
15
15
16 echo %% url for proxy, stream
16 echo %% url for proxy, stream
17 http_proxy=http://localhost:20060/ hg --config http_proxy.always=True clone http://localhost:20059/ b | \
17 http_proxy=http://localhost:20060/ hg --config http_proxy.always=True clone --stream http://localhost:20059/ b | \
18 sed -e 's/[0-9][0-9.]*/XXX/g'
18 sed -e 's/[0-9][0-9.]*/XXX/g'
19 cd b
19 cd b
20 hg verify
20 hg verify
21 cd ..
21 cd ..
22
22
23 echo %% url for proxy, pull
23 echo %% url for proxy, pull
24 http_proxy=http://localhost:20060/ hg --config http_proxy.always=True clone --pull http://localhost:20059/ b-pull
24 http_proxy=http://localhost:20060/ hg --config http_proxy.always=True clone http://localhost:20059/ b-pull
25 cd b-pull
25 cd b-pull
26 hg verify
26 hg verify
27 cd ..
27 cd ..
28
28
29 echo %% host:port for proxy
29 echo %% host:port for proxy
30 http_proxy=localhost:20060 hg clone --pull --config http_proxy.always=True http://localhost:20059/ c
30 http_proxy=localhost:20060 hg clone --config http_proxy.always=True http://localhost:20059/ c
31
31
32 echo %% proxy url with user name and password
32 echo %% proxy url with user name and password
33 http_proxy=http://user:passwd@localhost:20060 hg clone --pull --config http_proxy.always=True http://localhost:20059/ d
33 http_proxy=http://user:passwd@localhost:20060 hg clone --config http_proxy.always=True http://localhost:20059/ d
34
34
35 echo %% url with user name and password
35 echo %% url with user name and password
36 http_proxy=http://user:passwd@localhost:20060 hg clone --pull --config http_proxy.always=True http://user:passwd@localhost:20059/ e
36 http_proxy=http://user:passwd@localhost:20060 hg clone --config http_proxy.always=True http://user:passwd@localhost:20059/ e
37
37
38 echo %% bad host:port for proxy
38 echo %% bad host:port for proxy
39 http_proxy=localhost:20061 hg clone --config http_proxy.always=True http://localhost:20059/ f
39 http_proxy=localhost:20061 hg clone --config http_proxy.always=True http://localhost:20059/ f
40
40
41 exit 0
41 exit 0
@@ -1,90 +1,90 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 # This test tries to exercise the ssh functionality with a dummy script
3 # This test tries to exercise the ssh functionality with a dummy script
4
4
5 cat <<'EOF' > dummyssh
5 cat <<'EOF' > dummyssh
6 #!/bin/sh
6 #!/bin/sh
7 # this attempts to deal with relative pathnames
7 # this attempts to deal with relative pathnames
8 cd `dirname $0`
8 cd `dirname $0`
9
9
10 # check for proper args
10 # check for proper args
11 if [ $1 != "user@dummy" ] ; then
11 if [ $1 != "user@dummy" ] ; then
12 exit -1
12 exit -1
13 fi
13 fi
14
14
15 # check that we're in the right directory
15 # check that we're in the right directory
16 if [ ! -x dummyssh ] ; then
16 if [ ! -x dummyssh ] ; then
17 exit -1
17 exit -1
18 fi
18 fi
19
19
20 echo Got arguments 1:$1 2:$2 3:$3 4:$4 5:$5 >> dummylog
20 echo Got arguments 1:$1 2:$2 3:$3 4:$4 5:$5 >> dummylog
21 $2
21 $2
22 EOF
22 EOF
23 chmod +x dummyssh
23 chmod +x dummyssh
24
24
25 echo "# creating 'remote'"
25 echo "# creating 'remote'"
26 hg init remote
26 hg init remote
27 cd remote
27 cd remote
28 echo this > foo
28 echo this > foo
29 hg ci -A -m "init" -d "1000000 0" foo
29 hg ci -A -m "init" -d "1000000 0" foo
30
30
31 cd ..
31 cd ..
32
32
33 echo "# clone remote via stream"
33 echo "# clone remote via stream"
34 hg clone -e ./dummyssh ssh://user@dummy/remote local-stream 2>&1 | \
34 hg clone -e ./dummyssh --stream ssh://user@dummy/remote local-stream 2>&1 | \
35 sed -e 's/[0-9][0-9.]*/XXX/g'
35 sed -e 's/[0-9][0-9.]*/XXX/g'
36 cd local-stream
36 cd local-stream
37 hg verify
37 hg verify
38 cd ..
38 cd ..
39
39
40 echo "# clone remote via pull"
40 echo "# clone remote via pull"
41 hg clone -e ./dummyssh --pull ssh://user@dummy/remote local
41 hg clone -e ./dummyssh ssh://user@dummy/remote local
42
42
43 echo "# verify"
43 echo "# verify"
44 cd local
44 cd local
45 hg verify
45 hg verify
46
46
47 echo "# empty default pull"
47 echo "# empty default pull"
48 hg paths
48 hg paths
49 hg pull -e ../dummyssh
49 hg pull -e ../dummyssh
50
50
51 echo "# local change"
51 echo "# local change"
52 echo bleah > foo
52 echo bleah > foo
53 hg ci -m "add" -d "1000000 0"
53 hg ci -m "add" -d "1000000 0"
54
54
55 echo "# updating rc"
55 echo "# updating rc"
56 echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
56 echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
57 echo "[ui]" >> .hg/hgrc
57 echo "[ui]" >> .hg/hgrc
58 echo "ssh = ../dummyssh" >> .hg/hgrc
58 echo "ssh = ../dummyssh" >> .hg/hgrc
59
59
60 echo "# find outgoing"
60 echo "# find outgoing"
61 hg out ssh://user@dummy/remote
61 hg out ssh://user@dummy/remote
62
62
63 echo "# find incoming on the remote side"
63 echo "# find incoming on the remote side"
64 hg incoming -R ../remote -e ../dummyssh ssh://user@dummy/local
64 hg incoming -R ../remote -e ../dummyssh ssh://user@dummy/local
65
65
66 echo "# push"
66 echo "# push"
67 hg push
67 hg push
68
68
69 cd ../remote
69 cd ../remote
70
70
71 echo "# check remote tip"
71 echo "# check remote tip"
72 hg tip
72 hg tip
73 hg verify
73 hg verify
74 hg cat foo
74 hg cat foo
75
75
76 echo z > z
76 echo z > z
77 hg ci -A -m z -d '1000001 0' z
77 hg ci -A -m z -d '1000001 0' z
78
78
79 cd ../local
79 cd ../local
80 echo r > r
80 echo r > r
81 hg ci -A -m z -d '1000002 0' r
81 hg ci -A -m z -d '1000002 0' r
82
82
83 echo "# push should fail"
83 echo "# push should fail"
84 hg push
84 hg push
85
85
86 echo "# push should succeed"
86 echo "# push should succeed"
87 hg push -f
87 hg push -f
88
88
89 cd ..
89 cd ..
90 cat dummylog
90 cat dummylog
General Comments 0
You need to be logged in to leave comments. Login now