##// END OF EJS Templates
Implement revlogng....
mason@suse.com -
r2072:74d3f533 default
parent child Browse files
Show More
@@ -1,57 +1,58
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import demandload
10 from demandload import demandload
11 demandload(globals(), "os time util")
11 demandload(globals(), "os time util")
12
12
13 class changelog(revlog):
13 class changelog(revlog):
14 def __init__(self, opener):
14 def __init__(self, opener, defversion=0):
15 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
15 revlog.__init__(self, opener, "00changelog.i", "00changelog.d",
16 defversion)
16
17
17 def extract(self, text):
18 def extract(self, text):
18 if not text:
19 if not text:
19 return (nullid, "", (0, 0), [], "")
20 return (nullid, "", (0, 0), [], "")
20 last = text.index("\n\n")
21 last = text.index("\n\n")
21 desc = text[last + 2:]
22 desc = text[last + 2:]
22 l = text[:last].splitlines()
23 l = text[:last].splitlines()
23 manifest = bin(l[0])
24 manifest = bin(l[0])
24 user = l[1]
25 user = l[1]
25 date = l[2].split(' ')
26 date = l[2].split(' ')
26 time = float(date.pop(0))
27 time = float(date.pop(0))
27 try:
28 try:
28 # various tools did silly things with the time zone field.
29 # various tools did silly things with the time zone field.
29 timezone = int(date[0])
30 timezone = int(date[0])
30 except:
31 except:
31 timezone = 0
32 timezone = 0
32 files = l[3:]
33 files = l[3:]
33 return (manifest, user, (time, timezone), files, desc)
34 return (manifest, user, (time, timezone), files, desc)
34
35
35 def read(self, node):
36 def read(self, node):
36 return self.extract(self.revision(node))
37 return self.extract(self.revision(node))
37
38
38 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
39 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
39 user=None, date=None):
40 user=None, date=None):
40 if date:
41 if date:
41 # validate explicit (probably user-specified) date and
42 # validate explicit (probably user-specified) date and
42 # time zone offset. values must fit in signed 32 bits for
43 # time zone offset. values must fit in signed 32 bits for
43 # current 32-bit linux runtimes.
44 # current 32-bit linux runtimes.
44 try:
45 try:
45 when, offset = map(int, date.split(' '))
46 when, offset = map(int, date.split(' '))
46 except ValueError:
47 except ValueError:
47 raise ValueError(_('invalid date: %r') % date)
48 raise ValueError(_('invalid date: %r') % date)
48 if abs(when) > 0x7fffffff:
49 if abs(when) > 0x7fffffff:
49 raise ValueError(_('date exceeds 32 bits: %d') % when)
50 raise ValueError(_('date exceeds 32 bits: %d') % when)
50 if abs(offset) >= 43200:
51 if abs(offset) >= 43200:
51 raise ValueError(_('impossible time zone offset: %d') % offset)
52 raise ValueError(_('impossible time zone offset: %d') % offset)
52 else:
53 else:
53 date = "%d %d" % util.makedate()
54 date = "%d %d" % util.makedate()
54 list.sort()
55 list.sort()
55 l = [hex(manifest), user, date] + list + ["", desc]
56 l = [hex(manifest), user, date] + list + ["", desc]
56 text = "\n".join(l)
57 text = "\n".join(l)
57 return self.addrevision(text, transaction, self.count(), p1, p2)
58 return self.addrevision(text, transaction, self.count(), p1, p2)
@@ -1,3468 +1,3469
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch hgweb mdiff random signal tempfile time")
13 demandload(globals(), "fnmatch hgweb mdiff random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "changegroup")
15 demandload(globals(), "changegroup")
16
16
17 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
18 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
19 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
20 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
21
21
22 def filterfiles(filters, files):
22 def filterfiles(filters, files):
23 l = [x for x in files if x in filters]
23 l = [x for x in files if x in filters]
24
24
25 for t in filters:
25 for t in filters:
26 if t and t[-1] != "/":
26 if t and t[-1] != "/":
27 t += "/"
27 t += "/"
28 l += [x for x in files if x.startswith(t)]
28 l += [x for x in files if x.startswith(t)]
29 return l
29 return l
30
30
31 def relpath(repo, args):
31 def relpath(repo, args):
32 cwd = repo.getcwd()
32 cwd = repo.getcwd()
33 if cwd:
33 if cwd:
34 return [util.normpath(os.path.join(cwd, x)) for x in args]
34 return [util.normpath(os.path.join(cwd, x)) for x in args]
35 return args
35 return args
36
36
37 def matchpats(repo, pats=[], opts={}, head=''):
37 def matchpats(repo, pats=[], opts={}, head=''):
38 cwd = repo.getcwd()
38 cwd = repo.getcwd()
39 if not pats and cwd:
39 if not pats and cwd:
40 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
40 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
41 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
41 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
42 cwd = ''
42 cwd = ''
43 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
43 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
44 opts.get('exclude'), head)
44 opts.get('exclude'), head)
45
45
46 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
46 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
47 files, matchfn, anypats = matchpats(repo, pats, opts, head)
47 files, matchfn, anypats = matchpats(repo, pats, opts, head)
48 exact = dict(zip(files, files))
48 exact = dict(zip(files, files))
49 def walk():
49 def walk():
50 for src, fn in repo.walk(node=node, files=files, match=matchfn,
50 for src, fn in repo.walk(node=node, files=files, match=matchfn,
51 badmatch=badmatch):
51 badmatch=badmatch):
52 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
52 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
53 return files, matchfn, walk()
53 return files, matchfn, walk()
54
54
55 def walk(repo, pats, opts, node=None, head='', badmatch=None):
55 def walk(repo, pats, opts, node=None, head='', badmatch=None):
56 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
56 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
57 for r in results:
57 for r in results:
58 yield r
58 yield r
59
59
60 def walkchangerevs(ui, repo, pats, opts):
60 def walkchangerevs(ui, repo, pats, opts):
61 '''Iterate over files and the revs they changed in.
61 '''Iterate over files and the revs they changed in.
62
62
63 Callers most commonly need to iterate backwards over the history
63 Callers most commonly need to iterate backwards over the history
64 it is interested in. Doing so has awful (quadratic-looking)
64 it is interested in. Doing so has awful (quadratic-looking)
65 performance, so we use iterators in a "windowed" way.
65 performance, so we use iterators in a "windowed" way.
66
66
67 We walk a window of revisions in the desired order. Within the
67 We walk a window of revisions in the desired order. Within the
68 window, we first walk forwards to gather data, then in the desired
68 window, we first walk forwards to gather data, then in the desired
69 order (usually backwards) to display it.
69 order (usually backwards) to display it.
70
70
71 This function returns an (iterator, getchange, matchfn) tuple. The
71 This function returns an (iterator, getchange, matchfn) tuple. The
72 getchange function returns the changelog entry for a numeric
72 getchange function returns the changelog entry for a numeric
73 revision. The iterator yields 3-tuples. They will be of one of
73 revision. The iterator yields 3-tuples. They will be of one of
74 the following forms:
74 the following forms:
75
75
76 "window", incrementing, lastrev: stepping through a window,
76 "window", incrementing, lastrev: stepping through a window,
77 positive if walking forwards through revs, last rev in the
77 positive if walking forwards through revs, last rev in the
78 sequence iterated over - use to reset state for the current window
78 sequence iterated over - use to reset state for the current window
79
79
80 "add", rev, fns: out-of-order traversal of the given file names
80 "add", rev, fns: out-of-order traversal of the given file names
81 fns, which changed during revision rev - use to gather data for
81 fns, which changed during revision rev - use to gather data for
82 possible display
82 possible display
83
83
84 "iter", rev, None: in-order traversal of the revs earlier iterated
84 "iter", rev, None: in-order traversal of the revs earlier iterated
85 over with "add" - use to display data'''
85 over with "add" - use to display data'''
86
86
87 def increasing_windows(start, end, windowsize=8, sizelimit=512):
87 def increasing_windows(start, end, windowsize=8, sizelimit=512):
88 if start < end:
88 if start < end:
89 while start < end:
89 while start < end:
90 yield start, min(windowsize, end-start)
90 yield start, min(windowsize, end-start)
91 start += windowsize
91 start += windowsize
92 if windowsize < sizelimit:
92 if windowsize < sizelimit:
93 windowsize *= 2
93 windowsize *= 2
94 else:
94 else:
95 while start > end:
95 while start > end:
96 yield start, min(windowsize, start-end-1)
96 yield start, min(windowsize, start-end-1)
97 start -= windowsize
97 start -= windowsize
98 if windowsize < sizelimit:
98 if windowsize < sizelimit:
99 windowsize *= 2
99 windowsize *= 2
100
100
101
101
102 files, matchfn, anypats = matchpats(repo, pats, opts)
102 files, matchfn, anypats = matchpats(repo, pats, opts)
103
103
104 if repo.changelog.count() == 0:
104 if repo.changelog.count() == 0:
105 return [], False, matchfn
105 return [], False, matchfn
106
106
107 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
107 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
108 wanted = {}
108 wanted = {}
109 slowpath = anypats
109 slowpath = anypats
110 fncache = {}
110 fncache = {}
111
111
112 chcache = {}
112 chcache = {}
113 def getchange(rev):
113 def getchange(rev):
114 ch = chcache.get(rev)
114 ch = chcache.get(rev)
115 if ch is None:
115 if ch is None:
116 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
116 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
117 return ch
117 return ch
118
118
119 if not slowpath and not files:
119 if not slowpath and not files:
120 # No files, no patterns. Display all revs.
120 # No files, no patterns. Display all revs.
121 wanted = dict(zip(revs, revs))
121 wanted = dict(zip(revs, revs))
122 if not slowpath:
122 if not slowpath:
123 # Only files, no patterns. Check the history of each file.
123 # Only files, no patterns. Check the history of each file.
124 def filerevgen(filelog):
124 def filerevgen(filelog):
125 for i, window in increasing_windows(filelog.count()-1, -1):
125 for i, window in increasing_windows(filelog.count()-1, -1):
126 revs = []
126 revs = []
127 for j in xrange(i - window, i + 1):
127 for j in xrange(i - window, i + 1):
128 revs.append(filelog.linkrev(filelog.node(j)))
128 revs.append(filelog.linkrev(filelog.node(j)))
129 revs.reverse()
129 revs.reverse()
130 for rev in revs:
130 for rev in revs:
131 yield rev
131 yield rev
132
132
133 minrev, maxrev = min(revs), max(revs)
133 minrev, maxrev = min(revs), max(revs)
134 for file_ in files:
134 for file_ in files:
135 filelog = repo.file(file_)
135 filelog = repo.file(file_)
136 # A zero count may be a directory or deleted file, so
136 # A zero count may be a directory or deleted file, so
137 # try to find matching entries on the slow path.
137 # try to find matching entries on the slow path.
138 if filelog.count() == 0:
138 if filelog.count() == 0:
139 slowpath = True
139 slowpath = True
140 break
140 break
141 for rev in filerevgen(filelog):
141 for rev in filerevgen(filelog):
142 if rev <= maxrev:
142 if rev <= maxrev:
143 if rev < minrev:
143 if rev < minrev:
144 break
144 break
145 fncache.setdefault(rev, [])
145 fncache.setdefault(rev, [])
146 fncache[rev].append(file_)
146 fncache[rev].append(file_)
147 wanted[rev] = 1
147 wanted[rev] = 1
148 if slowpath:
148 if slowpath:
149 # The slow path checks files modified in every changeset.
149 # The slow path checks files modified in every changeset.
150 def changerevgen():
150 def changerevgen():
151 for i, window in increasing_windows(repo.changelog.count()-1, -1):
151 for i, window in increasing_windows(repo.changelog.count()-1, -1):
152 for j in xrange(i - window, i + 1):
152 for j in xrange(i - window, i + 1):
153 yield j, getchange(j)[3]
153 yield j, getchange(j)[3]
154
154
155 for rev, changefiles in changerevgen():
155 for rev, changefiles in changerevgen():
156 matches = filter(matchfn, changefiles)
156 matches = filter(matchfn, changefiles)
157 if matches:
157 if matches:
158 fncache[rev] = matches
158 fncache[rev] = matches
159 wanted[rev] = 1
159 wanted[rev] = 1
160
160
161 def iterate():
161 def iterate():
162 for i, window in increasing_windows(0, len(revs)):
162 for i, window in increasing_windows(0, len(revs)):
163 yield 'window', revs[0] < revs[-1], revs[-1]
163 yield 'window', revs[0] < revs[-1], revs[-1]
164 nrevs = [rev for rev in revs[i:i+window]
164 nrevs = [rev for rev in revs[i:i+window]
165 if rev in wanted]
165 if rev in wanted]
166 srevs = list(nrevs)
166 srevs = list(nrevs)
167 srevs.sort()
167 srevs.sort()
168 for rev in srevs:
168 for rev in srevs:
169 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
169 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
170 yield 'add', rev, fns
170 yield 'add', rev, fns
171 for rev in nrevs:
171 for rev in nrevs:
172 yield 'iter', rev, None
172 yield 'iter', rev, None
173 return iterate(), getchange, matchfn
173 return iterate(), getchange, matchfn
174
174
175 revrangesep = ':'
175 revrangesep = ':'
176
176
177 def revrange(ui, repo, revs, revlog=None):
177 def revrange(ui, repo, revs, revlog=None):
178 """Yield revision as strings from a list of revision specifications."""
178 """Yield revision as strings from a list of revision specifications."""
179 if revlog is None:
179 if revlog is None:
180 revlog = repo.changelog
180 revlog = repo.changelog
181 revcount = revlog.count()
181 revcount = revlog.count()
182 def fix(val, defval):
182 def fix(val, defval):
183 if not val:
183 if not val:
184 return defval
184 return defval
185 try:
185 try:
186 num = int(val)
186 num = int(val)
187 if str(num) != val:
187 if str(num) != val:
188 raise ValueError
188 raise ValueError
189 if num < 0:
189 if num < 0:
190 num += revcount
190 num += revcount
191 if num < 0:
191 if num < 0:
192 num = 0
192 num = 0
193 elif num >= revcount:
193 elif num >= revcount:
194 raise ValueError
194 raise ValueError
195 except ValueError:
195 except ValueError:
196 try:
196 try:
197 num = repo.changelog.rev(repo.lookup(val))
197 num = repo.changelog.rev(repo.lookup(val))
198 except KeyError:
198 except KeyError:
199 try:
199 try:
200 num = revlog.rev(revlog.lookup(val))
200 num = revlog.rev(revlog.lookup(val))
201 except KeyError:
201 except KeyError:
202 raise util.Abort(_('invalid revision identifier %s'), val)
202 raise util.Abort(_('invalid revision identifier %s'), val)
203 return num
203 return num
204 seen = {}
204 seen = {}
205 for spec in revs:
205 for spec in revs:
206 if spec.find(revrangesep) >= 0:
206 if spec.find(revrangesep) >= 0:
207 start, end = spec.split(revrangesep, 1)
207 start, end = spec.split(revrangesep, 1)
208 start = fix(start, 0)
208 start = fix(start, 0)
209 end = fix(end, revcount - 1)
209 end = fix(end, revcount - 1)
210 step = start > end and -1 or 1
210 step = start > end and -1 or 1
211 for rev in xrange(start, end+step, step):
211 for rev in xrange(start, end+step, step):
212 if rev in seen:
212 if rev in seen:
213 continue
213 continue
214 seen[rev] = 1
214 seen[rev] = 1
215 yield str(rev)
215 yield str(rev)
216 else:
216 else:
217 rev = fix(spec, None)
217 rev = fix(spec, None)
218 if rev in seen:
218 if rev in seen:
219 continue
219 continue
220 seen[rev] = 1
220 seen[rev] = 1
221 yield str(rev)
221 yield str(rev)
222
222
223 def make_filename(repo, r, pat, node=None,
223 def make_filename(repo, r, pat, node=None,
224 total=None, seqno=None, revwidth=None, pathname=None):
224 total=None, seqno=None, revwidth=None, pathname=None):
225 node_expander = {
225 node_expander = {
226 'H': lambda: hex(node),
226 'H': lambda: hex(node),
227 'R': lambda: str(r.rev(node)),
227 'R': lambda: str(r.rev(node)),
228 'h': lambda: short(node),
228 'h': lambda: short(node),
229 }
229 }
230 expander = {
230 expander = {
231 '%': lambda: '%',
231 '%': lambda: '%',
232 'b': lambda: os.path.basename(repo.root),
232 'b': lambda: os.path.basename(repo.root),
233 }
233 }
234
234
235 try:
235 try:
236 if node:
236 if node:
237 expander.update(node_expander)
237 expander.update(node_expander)
238 if node and revwidth is not None:
238 if node and revwidth is not None:
239 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
239 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
240 if total is not None:
240 if total is not None:
241 expander['N'] = lambda: str(total)
241 expander['N'] = lambda: str(total)
242 if seqno is not None:
242 if seqno is not None:
243 expander['n'] = lambda: str(seqno)
243 expander['n'] = lambda: str(seqno)
244 if total is not None and seqno is not None:
244 if total is not None and seqno is not None:
245 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
245 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
246 if pathname is not None:
246 if pathname is not None:
247 expander['s'] = lambda: os.path.basename(pathname)
247 expander['s'] = lambda: os.path.basename(pathname)
248 expander['d'] = lambda: os.path.dirname(pathname) or '.'
248 expander['d'] = lambda: os.path.dirname(pathname) or '.'
249 expander['p'] = lambda: pathname
249 expander['p'] = lambda: pathname
250
250
251 newname = []
251 newname = []
252 patlen = len(pat)
252 patlen = len(pat)
253 i = 0
253 i = 0
254 while i < patlen:
254 while i < patlen:
255 c = pat[i]
255 c = pat[i]
256 if c == '%':
256 if c == '%':
257 i += 1
257 i += 1
258 c = pat[i]
258 c = pat[i]
259 c = expander[c]()
259 c = expander[c]()
260 newname.append(c)
260 newname.append(c)
261 i += 1
261 i += 1
262 return ''.join(newname)
262 return ''.join(newname)
263 except KeyError, inst:
263 except KeyError, inst:
264 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
264 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
265 inst.args[0])
265 inst.args[0])
266
266
267 def make_file(repo, r, pat, node=None,
267 def make_file(repo, r, pat, node=None,
268 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
268 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
269 if not pat or pat == '-':
269 if not pat or pat == '-':
270 return 'w' in mode and sys.stdout or sys.stdin
270 return 'w' in mode and sys.stdout or sys.stdin
271 if hasattr(pat, 'write') and 'w' in mode:
271 if hasattr(pat, 'write') and 'w' in mode:
272 return pat
272 return pat
273 if hasattr(pat, 'read') and 'r' in mode:
273 if hasattr(pat, 'read') and 'r' in mode:
274 return pat
274 return pat
275 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
275 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
276 pathname),
276 pathname),
277 mode)
277 mode)
278
278
279 def write_bundle(cg, filename=None, compress=True):
279 def write_bundle(cg, filename=None, compress=True):
280 """Write a bundle file and return its filename.
280 """Write a bundle file and return its filename.
281
281
282 Existing files will not be overwritten.
282 Existing files will not be overwritten.
283 If no filename is specified, a temporary file is created.
283 If no filename is specified, a temporary file is created.
284 bz2 compression can be turned off.
284 bz2 compression can be turned off.
285 The bundle file will be deleted in case of errors.
285 The bundle file will be deleted in case of errors.
286 """
286 """
287 class nocompress(object):
287 class nocompress(object):
288 def compress(self, x):
288 def compress(self, x):
289 return x
289 return x
290 def flush(self):
290 def flush(self):
291 return ""
291 return ""
292
292
293 fh = None
293 fh = None
294 cleanup = None
294 cleanup = None
295 try:
295 try:
296 if filename:
296 if filename:
297 if os.path.exists(filename):
297 if os.path.exists(filename):
298 raise util.Abort(_("file '%s' already exists"), filename)
298 raise util.Abort(_("file '%s' already exists"), filename)
299 fh = open(filename, "wb")
299 fh = open(filename, "wb")
300 else:
300 else:
301 fd, filename = tempfile.mkstemp(suffix=".hg", prefix="hg-bundle-")
301 fd, filename = tempfile.mkstemp(suffix=".hg", prefix="hg-bundle-")
302 fh = os.fdopen(fd, "wb")
302 fh = os.fdopen(fd, "wb")
303 cleanup = filename
303 cleanup = filename
304
304
305 if compress:
305 if compress:
306 fh.write("HG10")
306 fh.write("HG10")
307 z = bz2.BZ2Compressor(9)
307 z = bz2.BZ2Compressor(9)
308 else:
308 else:
309 fh.write("HG10UN")
309 fh.write("HG10UN")
310 z = nocompress()
310 z = nocompress()
311 # parse the changegroup data, otherwise we will block
311 # parse the changegroup data, otherwise we will block
312 # in case of sshrepo because we don't know the end of the stream
312 # in case of sshrepo because we don't know the end of the stream
313
313
314 # an empty chunkiter is the end of the changegroup
314 # an empty chunkiter is the end of the changegroup
315 empty = False
315 empty = False
316 while not empty:
316 while not empty:
317 empty = True
317 empty = True
318 for chunk in changegroup.chunkiter(cg):
318 for chunk in changegroup.chunkiter(cg):
319 empty = False
319 empty = False
320 fh.write(z.compress(changegroup.genchunk(chunk)))
320 fh.write(z.compress(changegroup.genchunk(chunk)))
321 fh.write(z.compress(changegroup.closechunk()))
321 fh.write(z.compress(changegroup.closechunk()))
322 fh.write(z.flush())
322 fh.write(z.flush())
323 cleanup = None
323 cleanup = None
324 return filename
324 return filename
325 finally:
325 finally:
326 if fh is not None:
326 if fh is not None:
327 fh.close()
327 fh.close()
328 if cleanup is not None:
328 if cleanup is not None:
329 os.unlink(cleanup)
329 os.unlink(cleanup)
330
330
331 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
331 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
332 changes=None, text=False, opts={}):
332 changes=None, text=False, opts={}):
333 if not node1:
333 if not node1:
334 node1 = repo.dirstate.parents()[0]
334 node1 = repo.dirstate.parents()[0]
335 # reading the data for node1 early allows it to play nicely
335 # reading the data for node1 early allows it to play nicely
336 # with repo.changes and the revlog cache.
336 # with repo.changes and the revlog cache.
337 change = repo.changelog.read(node1)
337 change = repo.changelog.read(node1)
338 mmap = repo.manifest.read(change[0])
338 mmap = repo.manifest.read(change[0])
339 date1 = util.datestr(change[2])
339 date1 = util.datestr(change[2])
340
340
341 if not changes:
341 if not changes:
342 changes = repo.changes(node1, node2, files, match=match)
342 changes = repo.changes(node1, node2, files, match=match)
343 modified, added, removed, deleted, unknown = changes
343 modified, added, removed, deleted, unknown = changes
344 if files:
344 if files:
345 modified, added, removed = map(lambda x: filterfiles(files, x),
345 modified, added, removed = map(lambda x: filterfiles(files, x),
346 (modified, added, removed))
346 (modified, added, removed))
347
347
348 if not modified and not added and not removed:
348 if not modified and not added and not removed:
349 return
349 return
350
350
351 if node2:
351 if node2:
352 change = repo.changelog.read(node2)
352 change = repo.changelog.read(node2)
353 mmap2 = repo.manifest.read(change[0])
353 mmap2 = repo.manifest.read(change[0])
354 date2 = util.datestr(change[2])
354 date2 = util.datestr(change[2])
355 def read(f):
355 def read(f):
356 return repo.file(f).read(mmap2[f])
356 return repo.file(f).read(mmap2[f])
357 else:
357 else:
358 date2 = util.datestr()
358 date2 = util.datestr()
359 def read(f):
359 def read(f):
360 return repo.wread(f)
360 return repo.wread(f)
361
361
362 if ui.quiet:
362 if ui.quiet:
363 r = None
363 r = None
364 else:
364 else:
365 hexfunc = ui.verbose and hex or short
365 hexfunc = ui.verbose and hex or short
366 r = [hexfunc(node) for node in [node1, node2] if node]
366 r = [hexfunc(node) for node in [node1, node2] if node]
367
367
368 diffopts = ui.diffopts()
368 diffopts = ui.diffopts()
369 showfunc = opts.get('show_function') or diffopts['showfunc']
369 showfunc = opts.get('show_function') or diffopts['showfunc']
370 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
370 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
371 for f in modified:
371 for f in modified:
372 to = None
372 to = None
373 if f in mmap:
373 if f in mmap:
374 to = repo.file(f).read(mmap[f])
374 to = repo.file(f).read(mmap[f])
375 tn = read(f)
375 tn = read(f)
376 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
376 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
377 showfunc=showfunc, ignorews=ignorews))
377 showfunc=showfunc, ignorews=ignorews))
378 for f in added:
378 for f in added:
379 to = None
379 to = None
380 tn = read(f)
380 tn = read(f)
381 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
381 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
382 showfunc=showfunc, ignorews=ignorews))
382 showfunc=showfunc, ignorews=ignorews))
383 for f in removed:
383 for f in removed:
384 to = repo.file(f).read(mmap[f])
384 to = repo.file(f).read(mmap[f])
385 tn = None
385 tn = None
386 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
386 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
387 showfunc=showfunc, ignorews=ignorews))
387 showfunc=showfunc, ignorews=ignorews))
388
388
389 def trimuser(ui, name, rev, revcache):
389 def trimuser(ui, name, rev, revcache):
390 """trim the name of the user who committed a change"""
390 """trim the name of the user who committed a change"""
391 user = revcache.get(rev)
391 user = revcache.get(rev)
392 if user is None:
392 if user is None:
393 user = revcache[rev] = ui.shortuser(name)
393 user = revcache[rev] = ui.shortuser(name)
394 return user
394 return user
395
395
396 class changeset_templater(object):
396 class changeset_templater(object):
397 '''use templater module to format changeset information.'''
397 '''use templater module to format changeset information.'''
398
398
399 def __init__(self, ui, repo, mapfile):
399 def __init__(self, ui, repo, mapfile):
400 self.t = templater.templater(mapfile, templater.common_filters,
400 self.t = templater.templater(mapfile, templater.common_filters,
401 cache={'parent': '{rev}:{node|short} ',
401 cache={'parent': '{rev}:{node|short} ',
402 'manifest': '{rev}:{node|short}'})
402 'manifest': '{rev}:{node|short}'})
403 self.ui = ui
403 self.ui = ui
404 self.repo = repo
404 self.repo = repo
405
405
406 def use_template(self, t):
406 def use_template(self, t):
407 '''set template string to use'''
407 '''set template string to use'''
408 self.t.cache['changeset'] = t
408 self.t.cache['changeset'] = t
409
409
410 def write(self, thing, header=False):
410 def write(self, thing, header=False):
411 '''write expanded template.
411 '''write expanded template.
412 uses in-order recursive traverse of iterators.'''
412 uses in-order recursive traverse of iterators.'''
413 for t in thing:
413 for t in thing:
414 if hasattr(t, '__iter__'):
414 if hasattr(t, '__iter__'):
415 self.write(t, header=header)
415 self.write(t, header=header)
416 elif header:
416 elif header:
417 self.ui.write_header(t)
417 self.ui.write_header(t)
418 else:
418 else:
419 self.ui.write(t)
419 self.ui.write(t)
420
420
421 def write_header(self, thing):
421 def write_header(self, thing):
422 self.write(thing, header=True)
422 self.write(thing, header=True)
423
423
424 def show(self, rev=0, changenode=None, brinfo=None):
424 def show(self, rev=0, changenode=None, brinfo=None):
425 '''show a single changeset or file revision'''
425 '''show a single changeset or file revision'''
426 log = self.repo.changelog
426 log = self.repo.changelog
427 if changenode is None:
427 if changenode is None:
428 changenode = log.node(rev)
428 changenode = log.node(rev)
429 elif not rev:
429 elif not rev:
430 rev = log.rev(changenode)
430 rev = log.rev(changenode)
431
431
432 changes = log.read(changenode)
432 changes = log.read(changenode)
433
433
434 def showlist(name, values, plural=None, **args):
434 def showlist(name, values, plural=None, **args):
435 '''expand set of values.
435 '''expand set of values.
436 name is name of key in template map.
436 name is name of key in template map.
437 values is list of strings or dicts.
437 values is list of strings or dicts.
438 plural is plural of name, if not simply name + 's'.
438 plural is plural of name, if not simply name + 's'.
439
439
440 expansion works like this, given name 'foo'.
440 expansion works like this, given name 'foo'.
441
441
442 if values is empty, expand 'no_foos'.
442 if values is empty, expand 'no_foos'.
443
443
444 if 'foo' not in template map, return values as a string,
444 if 'foo' not in template map, return values as a string,
445 joined by space.
445 joined by space.
446
446
447 expand 'start_foos'.
447 expand 'start_foos'.
448
448
449 for each value, expand 'foo'. if 'last_foo' in template
449 for each value, expand 'foo'. if 'last_foo' in template
450 map, expand it instead of 'foo' for last key.
450 map, expand it instead of 'foo' for last key.
451
451
452 expand 'end_foos'.
452 expand 'end_foos'.
453 '''
453 '''
454 if plural: names = plural
454 if plural: names = plural
455 else: names = name + 's'
455 else: names = name + 's'
456 if not values:
456 if not values:
457 noname = 'no_' + names
457 noname = 'no_' + names
458 if noname in self.t:
458 if noname in self.t:
459 yield self.t(noname, **args)
459 yield self.t(noname, **args)
460 return
460 return
461 if name not in self.t:
461 if name not in self.t:
462 if isinstance(values[0], str):
462 if isinstance(values[0], str):
463 yield ' '.join(values)
463 yield ' '.join(values)
464 else:
464 else:
465 for v in values:
465 for v in values:
466 yield dict(v, **args)
466 yield dict(v, **args)
467 return
467 return
468 startname = 'start_' + names
468 startname = 'start_' + names
469 if startname in self.t:
469 if startname in self.t:
470 yield self.t(startname, **args)
470 yield self.t(startname, **args)
471 vargs = args.copy()
471 vargs = args.copy()
472 def one(v, tag=name):
472 def one(v, tag=name):
473 try:
473 try:
474 vargs.update(v)
474 vargs.update(v)
475 except (AttributeError, ValueError):
475 except (AttributeError, ValueError):
476 try:
476 try:
477 for a, b in v:
477 for a, b in v:
478 vargs[a] = b
478 vargs[a] = b
479 except ValueError:
479 except ValueError:
480 vargs[name] = v
480 vargs[name] = v
481 return self.t(tag, **vargs)
481 return self.t(tag, **vargs)
482 lastname = 'last_' + name
482 lastname = 'last_' + name
483 if lastname in self.t:
483 if lastname in self.t:
484 last = values.pop()
484 last = values.pop()
485 else:
485 else:
486 last = None
486 last = None
487 for v in values:
487 for v in values:
488 yield one(v)
488 yield one(v)
489 if last is not None:
489 if last is not None:
490 yield one(last, tag=lastname)
490 yield one(last, tag=lastname)
491 endname = 'end_' + names
491 endname = 'end_' + names
492 if endname in self.t:
492 if endname in self.t:
493 yield self.t(endname, **args)
493 yield self.t(endname, **args)
494
494
495 if brinfo:
495 if brinfo:
496 def showbranches(**args):
496 def showbranches(**args):
497 if changenode in brinfo:
497 if changenode in brinfo:
498 for x in showlist('branch', brinfo[changenode],
498 for x in showlist('branch', brinfo[changenode],
499 plural='branches', **args):
499 plural='branches', **args):
500 yield x
500 yield x
501 else:
501 else:
502 showbranches = ''
502 showbranches = ''
503
503
504 if self.ui.debugflag:
504 if self.ui.debugflag:
505 def showmanifest(**args):
505 def showmanifest(**args):
506 args = args.copy()
506 args = args.copy()
507 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
507 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
508 node=hex(changes[0])))
508 node=hex(changes[0])))
509 yield self.t('manifest', **args)
509 yield self.t('manifest', **args)
510 else:
510 else:
511 showmanifest = ''
511 showmanifest = ''
512
512
513 def showparents(**args):
513 def showparents(**args):
514 parents = [[('rev', log.rev(p)), ('node', hex(p))]
514 parents = [[('rev', log.rev(p)), ('node', hex(p))]
515 for p in log.parents(changenode)
515 for p in log.parents(changenode)
516 if self.ui.debugflag or p != nullid]
516 if self.ui.debugflag or p != nullid]
517 if (not self.ui.debugflag and len(parents) == 1 and
517 if (not self.ui.debugflag and len(parents) == 1 and
518 parents[0][0][1] == rev - 1):
518 parents[0][0][1] == rev - 1):
519 return
519 return
520 for x in showlist('parent', parents, **args):
520 for x in showlist('parent', parents, **args):
521 yield x
521 yield x
522
522
523 def showtags(**args):
523 def showtags(**args):
524 for x in showlist('tag', self.repo.nodetags(changenode), **args):
524 for x in showlist('tag', self.repo.nodetags(changenode), **args):
525 yield x
525 yield x
526
526
527 if self.ui.debugflag:
527 if self.ui.debugflag:
528 files = self.repo.changes(log.parents(changenode)[0], changenode)
528 files = self.repo.changes(log.parents(changenode)[0], changenode)
529 def showfiles(**args):
529 def showfiles(**args):
530 for x in showlist('file', files[0], **args): yield x
530 for x in showlist('file', files[0], **args): yield x
531 def showadds(**args):
531 def showadds(**args):
532 for x in showlist('file_add', files[1], **args): yield x
532 for x in showlist('file_add', files[1], **args): yield x
533 def showdels(**args):
533 def showdels(**args):
534 for x in showlist('file_del', files[2], **args): yield x
534 for x in showlist('file_del', files[2], **args): yield x
535 else:
535 else:
536 def showfiles(**args):
536 def showfiles(**args):
537 for x in showlist('file', changes[3], **args): yield x
537 for x in showlist('file', changes[3], **args): yield x
538 showadds = ''
538 showadds = ''
539 showdels = ''
539 showdels = ''
540
540
541 props = {
541 props = {
542 'author': changes[1],
542 'author': changes[1],
543 'branches': showbranches,
543 'branches': showbranches,
544 'date': changes[2],
544 'date': changes[2],
545 'desc': changes[4],
545 'desc': changes[4],
546 'file_adds': showadds,
546 'file_adds': showadds,
547 'file_dels': showdels,
547 'file_dels': showdels,
548 'files': showfiles,
548 'files': showfiles,
549 'manifest': showmanifest,
549 'manifest': showmanifest,
550 'node': hex(changenode),
550 'node': hex(changenode),
551 'parents': showparents,
551 'parents': showparents,
552 'rev': rev,
552 'rev': rev,
553 'tags': showtags,
553 'tags': showtags,
554 }
554 }
555
555
556 try:
556 try:
557 if self.ui.debugflag and 'header_debug' in self.t:
557 if self.ui.debugflag and 'header_debug' in self.t:
558 key = 'header_debug'
558 key = 'header_debug'
559 elif self.ui.quiet and 'header_quiet' in self.t:
559 elif self.ui.quiet and 'header_quiet' in self.t:
560 key = 'header_quiet'
560 key = 'header_quiet'
561 elif self.ui.verbose and 'header_verbose' in self.t:
561 elif self.ui.verbose and 'header_verbose' in self.t:
562 key = 'header_verbose'
562 key = 'header_verbose'
563 elif 'header' in self.t:
563 elif 'header' in self.t:
564 key = 'header'
564 key = 'header'
565 else:
565 else:
566 key = ''
566 key = ''
567 if key:
567 if key:
568 self.write_header(self.t(key, **props))
568 self.write_header(self.t(key, **props))
569 if self.ui.debugflag and 'changeset_debug' in self.t:
569 if self.ui.debugflag and 'changeset_debug' in self.t:
570 key = 'changeset_debug'
570 key = 'changeset_debug'
571 elif self.ui.quiet and 'changeset_quiet' in self.t:
571 elif self.ui.quiet and 'changeset_quiet' in self.t:
572 key = 'changeset_quiet'
572 key = 'changeset_quiet'
573 elif self.ui.verbose and 'changeset_verbose' in self.t:
573 elif self.ui.verbose and 'changeset_verbose' in self.t:
574 key = 'changeset_verbose'
574 key = 'changeset_verbose'
575 else:
575 else:
576 key = 'changeset'
576 key = 'changeset'
577 self.write(self.t(key, **props))
577 self.write(self.t(key, **props))
578 except KeyError, inst:
578 except KeyError, inst:
579 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
579 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
580 inst.args[0]))
580 inst.args[0]))
581 except SyntaxError, inst:
581 except SyntaxError, inst:
582 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
582 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
583
583
584 class changeset_printer(object):
584 class changeset_printer(object):
585 '''show changeset information when templating not requested.'''
585 '''show changeset information when templating not requested.'''
586
586
587 def __init__(self, ui, repo):
587 def __init__(self, ui, repo):
588 self.ui = ui
588 self.ui = ui
589 self.repo = repo
589 self.repo = repo
590
590
591 def show(self, rev=0, changenode=None, brinfo=None):
591 def show(self, rev=0, changenode=None, brinfo=None):
592 '''show a single changeset or file revision'''
592 '''show a single changeset or file revision'''
593 log = self.repo.changelog
593 log = self.repo.changelog
594 if changenode is None:
594 if changenode is None:
595 changenode = log.node(rev)
595 changenode = log.node(rev)
596 elif not rev:
596 elif not rev:
597 rev = log.rev(changenode)
597 rev = log.rev(changenode)
598
598
599 if self.ui.quiet:
599 if self.ui.quiet:
600 self.ui.write("%d:%s\n" % (rev, short(changenode)))
600 self.ui.write("%d:%s\n" % (rev, short(changenode)))
601 return
601 return
602
602
603 changes = log.read(changenode)
603 changes = log.read(changenode)
604 date = util.datestr(changes[2])
604 date = util.datestr(changes[2])
605
605
606 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
606 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
607 for p in log.parents(changenode)
607 for p in log.parents(changenode)
608 if self.ui.debugflag or p != nullid]
608 if self.ui.debugflag or p != nullid]
609 if (not self.ui.debugflag and len(parents) == 1 and
609 if (not self.ui.debugflag and len(parents) == 1 and
610 parents[0][0] == rev-1):
610 parents[0][0] == rev-1):
611 parents = []
611 parents = []
612
612
613 if self.ui.verbose:
613 if self.ui.verbose:
614 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
614 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
615 else:
615 else:
616 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
616 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
617
617
618 for tag in self.repo.nodetags(changenode):
618 for tag in self.repo.nodetags(changenode):
619 self.ui.status(_("tag: %s\n") % tag)
619 self.ui.status(_("tag: %s\n") % tag)
620 for parent in parents:
620 for parent in parents:
621 self.ui.write(_("parent: %d:%s\n") % parent)
621 self.ui.write(_("parent: %d:%s\n") % parent)
622
622
623 if brinfo and changenode in brinfo:
623 if brinfo and changenode in brinfo:
624 br = brinfo[changenode]
624 br = brinfo[changenode]
625 self.ui.write(_("branch: %s\n") % " ".join(br))
625 self.ui.write(_("branch: %s\n") % " ".join(br))
626
626
627 self.ui.debug(_("manifest: %d:%s\n") %
627 self.ui.debug(_("manifest: %d:%s\n") %
628 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
628 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
629 self.ui.status(_("user: %s\n") % changes[1])
629 self.ui.status(_("user: %s\n") % changes[1])
630 self.ui.status(_("date: %s\n") % date)
630 self.ui.status(_("date: %s\n") % date)
631
631
632 if self.ui.debugflag:
632 if self.ui.debugflag:
633 files = self.repo.changes(log.parents(changenode)[0], changenode)
633 files = self.repo.changes(log.parents(changenode)[0], changenode)
634 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
634 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
635 files):
635 files):
636 if value:
636 if value:
637 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
637 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
638 else:
638 else:
639 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
639 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
640
640
641 description = changes[4].strip()
641 description = changes[4].strip()
642 if description:
642 if description:
643 if self.ui.verbose:
643 if self.ui.verbose:
644 self.ui.status(_("description:\n"))
644 self.ui.status(_("description:\n"))
645 self.ui.status(description)
645 self.ui.status(description)
646 self.ui.status("\n\n")
646 self.ui.status("\n\n")
647 else:
647 else:
648 self.ui.status(_("summary: %s\n") %
648 self.ui.status(_("summary: %s\n") %
649 description.splitlines()[0])
649 description.splitlines()[0])
650 self.ui.status("\n")
650 self.ui.status("\n")
651
651
652 def show_changeset(ui, repo, opts):
652 def show_changeset(ui, repo, opts):
653 '''show one changeset. uses template or regular display. caller
653 '''show one changeset. uses template or regular display. caller
654 can pass in 'style' and 'template' options in opts.'''
654 can pass in 'style' and 'template' options in opts.'''
655
655
656 tmpl = opts.get('template')
656 tmpl = opts.get('template')
657 if tmpl:
657 if tmpl:
658 tmpl = templater.parsestring(tmpl, quoted=False)
658 tmpl = templater.parsestring(tmpl, quoted=False)
659 else:
659 else:
660 tmpl = ui.config('ui', 'logtemplate')
660 tmpl = ui.config('ui', 'logtemplate')
661 if tmpl: tmpl = templater.parsestring(tmpl)
661 if tmpl: tmpl = templater.parsestring(tmpl)
662 mapfile = opts.get('style') or ui.config('ui', 'style')
662 mapfile = opts.get('style') or ui.config('ui', 'style')
663 if tmpl or mapfile:
663 if tmpl or mapfile:
664 if mapfile:
664 if mapfile:
665 if not os.path.isfile(mapfile):
665 if not os.path.isfile(mapfile):
666 mapname = templater.templatepath('map-cmdline.' + mapfile)
666 mapname = templater.templatepath('map-cmdline.' + mapfile)
667 if not mapname: mapname = templater.templatepath(mapfile)
667 if not mapname: mapname = templater.templatepath(mapfile)
668 if mapname: mapfile = mapname
668 if mapname: mapfile = mapname
669 try:
669 try:
670 t = changeset_templater(ui, repo, mapfile)
670 t = changeset_templater(ui, repo, mapfile)
671 except SyntaxError, inst:
671 except SyntaxError, inst:
672 raise util.Abort(inst.args[0])
672 raise util.Abort(inst.args[0])
673 if tmpl: t.use_template(tmpl)
673 if tmpl: t.use_template(tmpl)
674 return t
674 return t
675 return changeset_printer(ui, repo)
675 return changeset_printer(ui, repo)
676
676
677 def show_version(ui):
677 def show_version(ui):
678 """output version and copyright information"""
678 """output version and copyright information"""
679 ui.write(_("Mercurial Distributed SCM (version %s)\n")
679 ui.write(_("Mercurial Distributed SCM (version %s)\n")
680 % version.get_version())
680 % version.get_version())
681 ui.status(_(
681 ui.status(_(
682 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
682 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
683 "This is free software; see the source for copying conditions. "
683 "This is free software; see the source for copying conditions. "
684 "There is NO\nwarranty; "
684 "There is NO\nwarranty; "
685 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
685 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
686 ))
686 ))
687
687
688 def help_(ui, cmd=None, with_version=False):
688 def help_(ui, cmd=None, with_version=False):
689 """show help for a given command or all commands"""
689 """show help for a given command or all commands"""
690 option_lists = []
690 option_lists = []
691 if cmd and cmd != 'shortlist':
691 if cmd and cmd != 'shortlist':
692 if with_version:
692 if with_version:
693 show_version(ui)
693 show_version(ui)
694 ui.write('\n')
694 ui.write('\n')
695 aliases, i = find(cmd)
695 aliases, i = find(cmd)
696 # synopsis
696 # synopsis
697 ui.write("%s\n\n" % i[2])
697 ui.write("%s\n\n" % i[2])
698
698
699 # description
699 # description
700 doc = i[0].__doc__
700 doc = i[0].__doc__
701 if not doc:
701 if not doc:
702 doc = _("(No help text available)")
702 doc = _("(No help text available)")
703 if ui.quiet:
703 if ui.quiet:
704 doc = doc.splitlines(0)[0]
704 doc = doc.splitlines(0)[0]
705 ui.write("%s\n" % doc.rstrip())
705 ui.write("%s\n" % doc.rstrip())
706
706
707 if not ui.quiet:
707 if not ui.quiet:
708 # aliases
708 # aliases
709 if len(aliases) > 1:
709 if len(aliases) > 1:
710 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
710 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
711
711
712 # options
712 # options
713 if i[1]:
713 if i[1]:
714 option_lists.append(("options", i[1]))
714 option_lists.append(("options", i[1]))
715
715
716 else:
716 else:
717 # program name
717 # program name
718 if ui.verbose or with_version:
718 if ui.verbose or with_version:
719 show_version(ui)
719 show_version(ui)
720 else:
720 else:
721 ui.status(_("Mercurial Distributed SCM\n"))
721 ui.status(_("Mercurial Distributed SCM\n"))
722 ui.status('\n')
722 ui.status('\n')
723
723
724 # list of commands
724 # list of commands
725 if cmd == "shortlist":
725 if cmd == "shortlist":
726 ui.status(_('basic commands (use "hg help" '
726 ui.status(_('basic commands (use "hg help" '
727 'for the full list or option "-v" for details):\n\n'))
727 'for the full list or option "-v" for details):\n\n'))
728 elif ui.verbose:
728 elif ui.verbose:
729 ui.status(_('list of commands:\n\n'))
729 ui.status(_('list of commands:\n\n'))
730 else:
730 else:
731 ui.status(_('list of commands (use "hg help -v" '
731 ui.status(_('list of commands (use "hg help -v" '
732 'to show aliases and global options):\n\n'))
732 'to show aliases and global options):\n\n'))
733
733
734 h = {}
734 h = {}
735 cmds = {}
735 cmds = {}
736 for c, e in table.items():
736 for c, e in table.items():
737 f = c.split("|")[0]
737 f = c.split("|")[0]
738 if cmd == "shortlist" and not f.startswith("^"):
738 if cmd == "shortlist" and not f.startswith("^"):
739 continue
739 continue
740 f = f.lstrip("^")
740 f = f.lstrip("^")
741 if not ui.debugflag and f.startswith("debug"):
741 if not ui.debugflag and f.startswith("debug"):
742 continue
742 continue
743 doc = e[0].__doc__
743 doc = e[0].__doc__
744 if not doc:
744 if not doc:
745 doc = _("(No help text available)")
745 doc = _("(No help text available)")
746 h[f] = doc.splitlines(0)[0].rstrip()
746 h[f] = doc.splitlines(0)[0].rstrip()
747 cmds[f] = c.lstrip("^")
747 cmds[f] = c.lstrip("^")
748
748
749 fns = h.keys()
749 fns = h.keys()
750 fns.sort()
750 fns.sort()
751 m = max(map(len, fns))
751 m = max(map(len, fns))
752 for f in fns:
752 for f in fns:
753 if ui.verbose:
753 if ui.verbose:
754 commands = cmds[f].replace("|",", ")
754 commands = cmds[f].replace("|",", ")
755 ui.write(" %s:\n %s\n"%(commands, h[f]))
755 ui.write(" %s:\n %s\n"%(commands, h[f]))
756 else:
756 else:
757 ui.write(' %-*s %s\n' % (m, f, h[f]))
757 ui.write(' %-*s %s\n' % (m, f, h[f]))
758
758
759 # global options
759 # global options
760 if ui.verbose:
760 if ui.verbose:
761 option_lists.append(("global options", globalopts))
761 option_lists.append(("global options", globalopts))
762
762
763 # list all option lists
763 # list all option lists
764 opt_output = []
764 opt_output = []
765 for title, options in option_lists:
765 for title, options in option_lists:
766 opt_output.append(("\n%s:\n" % title, None))
766 opt_output.append(("\n%s:\n" % title, None))
767 for shortopt, longopt, default, desc in options:
767 for shortopt, longopt, default, desc in options:
768 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
768 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
769 longopt and " --%s" % longopt),
769 longopt and " --%s" % longopt),
770 "%s%s" % (desc,
770 "%s%s" % (desc,
771 default
771 default
772 and _(" (default: %s)") % default
772 and _(" (default: %s)") % default
773 or "")))
773 or "")))
774
774
775 if opt_output:
775 if opt_output:
776 opts_len = max([len(line[0]) for line in opt_output if line[1]])
776 opts_len = max([len(line[0]) for line in opt_output if line[1]])
777 for first, second in opt_output:
777 for first, second in opt_output:
778 if second:
778 if second:
779 ui.write(" %-*s %s\n" % (opts_len, first, second))
779 ui.write(" %-*s %s\n" % (opts_len, first, second))
780 else:
780 else:
781 ui.write("%s\n" % first)
781 ui.write("%s\n" % first)
782
782
783 # Commands start here, listed alphabetically
783 # Commands start here, listed alphabetically
784
784
785 def add(ui, repo, *pats, **opts):
785 def add(ui, repo, *pats, **opts):
786 """add the specified files on the next commit
786 """add the specified files on the next commit
787
787
788 Schedule files to be version controlled and added to the repository.
788 Schedule files to be version controlled and added to the repository.
789
789
790 The files will be added to the repository at the next commit.
790 The files will be added to the repository at the next commit.
791
791
792 If no names are given, add all files in the repository.
792 If no names are given, add all files in the repository.
793 """
793 """
794
794
795 names = []
795 names = []
796 for src, abs, rel, exact in walk(repo, pats, opts):
796 for src, abs, rel, exact in walk(repo, pats, opts):
797 if exact:
797 if exact:
798 if ui.verbose:
798 if ui.verbose:
799 ui.status(_('adding %s\n') % rel)
799 ui.status(_('adding %s\n') % rel)
800 names.append(abs)
800 names.append(abs)
801 elif repo.dirstate.state(abs) == '?':
801 elif repo.dirstate.state(abs) == '?':
802 ui.status(_('adding %s\n') % rel)
802 ui.status(_('adding %s\n') % rel)
803 names.append(abs)
803 names.append(abs)
804 repo.add(names)
804 repo.add(names)
805
805
806 def addremove(ui, repo, *pats, **opts):
806 def addremove(ui, repo, *pats, **opts):
807 """add all new files, delete all missing files
807 """add all new files, delete all missing files
808
808
809 Add all new files and remove all missing files from the repository.
809 Add all new files and remove all missing files from the repository.
810
810
811 New files are ignored if they match any of the patterns in .hgignore. As
811 New files are ignored if they match any of the patterns in .hgignore. As
812 with add, these changes take effect at the next commit.
812 with add, these changes take effect at the next commit.
813 """
813 """
814 return addremove_lock(ui, repo, pats, opts)
814 return addremove_lock(ui, repo, pats, opts)
815
815
816 def addremove_lock(ui, repo, pats, opts, wlock=None):
816 def addremove_lock(ui, repo, pats, opts, wlock=None):
817 add, remove = [], []
817 add, remove = [], []
818 for src, abs, rel, exact in walk(repo, pats, opts):
818 for src, abs, rel, exact in walk(repo, pats, opts):
819 if src == 'f' and repo.dirstate.state(abs) == '?':
819 if src == 'f' and repo.dirstate.state(abs) == '?':
820 add.append(abs)
820 add.append(abs)
821 if ui.verbose or not exact:
821 if ui.verbose or not exact:
822 ui.status(_('adding %s\n') % ((pats and rel) or abs))
822 ui.status(_('adding %s\n') % ((pats and rel) or abs))
823 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
823 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
824 remove.append(abs)
824 remove.append(abs)
825 if ui.verbose or not exact:
825 if ui.verbose or not exact:
826 ui.status(_('removing %s\n') % ((pats and rel) or abs))
826 ui.status(_('removing %s\n') % ((pats and rel) or abs))
827 repo.add(add, wlock=wlock)
827 repo.add(add, wlock=wlock)
828 repo.remove(remove, wlock=wlock)
828 repo.remove(remove, wlock=wlock)
829
829
830 def annotate(ui, repo, *pats, **opts):
830 def annotate(ui, repo, *pats, **opts):
831 """show changeset information per file line
831 """show changeset information per file line
832
832
833 List changes in files, showing the revision id responsible for each line
833 List changes in files, showing the revision id responsible for each line
834
834
835 This command is useful to discover who did a change or when a change took
835 This command is useful to discover who did a change or when a change took
836 place.
836 place.
837
837
838 Without the -a option, annotate will avoid processing files it
838 Without the -a option, annotate will avoid processing files it
839 detects as binary. With -a, annotate will generate an annotation
839 detects as binary. With -a, annotate will generate an annotation
840 anyway, probably with undesirable results.
840 anyway, probably with undesirable results.
841 """
841 """
842 def getnode(rev):
842 def getnode(rev):
843 return short(repo.changelog.node(rev))
843 return short(repo.changelog.node(rev))
844
844
845 ucache = {}
845 ucache = {}
846 def getname(rev):
846 def getname(rev):
847 cl = repo.changelog.read(repo.changelog.node(rev))
847 cl = repo.changelog.read(repo.changelog.node(rev))
848 return trimuser(ui, cl[1], rev, ucache)
848 return trimuser(ui, cl[1], rev, ucache)
849
849
850 dcache = {}
850 dcache = {}
851 def getdate(rev):
851 def getdate(rev):
852 datestr = dcache.get(rev)
852 datestr = dcache.get(rev)
853 if datestr is None:
853 if datestr is None:
854 cl = repo.changelog.read(repo.changelog.node(rev))
854 cl = repo.changelog.read(repo.changelog.node(rev))
855 datestr = dcache[rev] = util.datestr(cl[2])
855 datestr = dcache[rev] = util.datestr(cl[2])
856 return datestr
856 return datestr
857
857
858 if not pats:
858 if not pats:
859 raise util.Abort(_('at least one file name or pattern required'))
859 raise util.Abort(_('at least one file name or pattern required'))
860
860
861 opmap = [['user', getname], ['number', str], ['changeset', getnode],
861 opmap = [['user', getname], ['number', str], ['changeset', getnode],
862 ['date', getdate]]
862 ['date', getdate]]
863 if not opts['user'] and not opts['changeset'] and not opts['date']:
863 if not opts['user'] and not opts['changeset'] and not opts['date']:
864 opts['number'] = 1
864 opts['number'] = 1
865
865
866 if opts['rev']:
866 if opts['rev']:
867 node = repo.changelog.lookup(opts['rev'])
867 node = repo.changelog.lookup(opts['rev'])
868 else:
868 else:
869 node = repo.dirstate.parents()[0]
869 node = repo.dirstate.parents()[0]
870 change = repo.changelog.read(node)
870 change = repo.changelog.read(node)
871 mmap = repo.manifest.read(change[0])
871 mmap = repo.manifest.read(change[0])
872
872
873 for src, abs, rel, exact in walk(repo, pats, opts, node=node):
873 for src, abs, rel, exact in walk(repo, pats, opts, node=node):
874 f = repo.file(abs)
874 f = repo.file(abs)
875 if not opts['text'] and util.binary(f.read(mmap[abs])):
875 if not opts['text'] and util.binary(f.read(mmap[abs])):
876 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
876 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
877 continue
877 continue
878
878
879 lines = f.annotate(mmap[abs])
879 lines = f.annotate(mmap[abs])
880 pieces = []
880 pieces = []
881
881
882 for o, f in opmap:
882 for o, f in opmap:
883 if opts[o]:
883 if opts[o]:
884 l = [f(n) for n, dummy in lines]
884 l = [f(n) for n, dummy in lines]
885 if l:
885 if l:
886 m = max(map(len, l))
886 m = max(map(len, l))
887 pieces.append(["%*s" % (m, x) for x in l])
887 pieces.append(["%*s" % (m, x) for x in l])
888
888
889 if pieces:
889 if pieces:
890 for p, l in zip(zip(*pieces), lines):
890 for p, l in zip(zip(*pieces), lines):
891 ui.write("%s: %s" % (" ".join(p), l[1]))
891 ui.write("%s: %s" % (" ".join(p), l[1]))
892
892
893 def bundle(ui, repo, fname, dest="default-push", **opts):
893 def bundle(ui, repo, fname, dest="default-push", **opts):
894 """create a changegroup file
894 """create a changegroup file
895
895
896 Generate a compressed changegroup file collecting all changesets
896 Generate a compressed changegroup file collecting all changesets
897 not found in the other repository.
897 not found in the other repository.
898
898
899 This file can then be transferred using conventional means and
899 This file can then be transferred using conventional means and
900 applied to another repository with the unbundle command. This is
900 applied to another repository with the unbundle command. This is
901 useful when native push and pull are not available or when
901 useful when native push and pull are not available or when
902 exporting an entire repository is undesirable. The standard file
902 exporting an entire repository is undesirable. The standard file
903 extension is ".hg".
903 extension is ".hg".
904
904
905 Unlike import/export, this exactly preserves all changeset
905 Unlike import/export, this exactly preserves all changeset
906 contents including permissions, rename data, and revision history.
906 contents including permissions, rename data, and revision history.
907 """
907 """
908 dest = ui.expandpath(dest)
908 dest = ui.expandpath(dest)
909 other = hg.repository(ui, dest)
909 other = hg.repository(ui, dest)
910 o = repo.findoutgoing(other, force=opts['force'])
910 o = repo.findoutgoing(other, force=opts['force'])
911 cg = repo.changegroup(o, 'bundle')
911 cg = repo.changegroup(o, 'bundle')
912 write_bundle(cg, fname)
912 write_bundle(cg, fname)
913
913
914 def cat(ui, repo, file1, *pats, **opts):
914 def cat(ui, repo, file1, *pats, **opts):
915 """output the latest or given revisions of files
915 """output the latest or given revisions of files
916
916
917 Print the specified files as they were at the given revision.
917 Print the specified files as they were at the given revision.
918 If no revision is given then the tip is used.
918 If no revision is given then the tip is used.
919
919
920 Output may be to a file, in which case the name of the file is
920 Output may be to a file, in which case the name of the file is
921 given using a format string. The formatting rules are the same as
921 given using a format string. The formatting rules are the same as
922 for the export command, with the following additions:
922 for the export command, with the following additions:
923
923
924 %s basename of file being printed
924 %s basename of file being printed
925 %d dirname of file being printed, or '.' if in repo root
925 %d dirname of file being printed, or '.' if in repo root
926 %p root-relative path name of file being printed
926 %p root-relative path name of file being printed
927 """
927 """
928 mf = {}
928 mf = {}
929 rev = opts['rev']
929 rev = opts['rev']
930 if rev:
930 if rev:
931 node = repo.lookup(rev)
931 node = repo.lookup(rev)
932 else:
932 else:
933 node = repo.changelog.tip()
933 node = repo.changelog.tip()
934 change = repo.changelog.read(node)
934 change = repo.changelog.read(node)
935 mf = repo.manifest.read(change[0])
935 mf = repo.manifest.read(change[0])
936 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
936 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
937 r = repo.file(abs)
937 r = repo.file(abs)
938 n = mf[abs]
938 n = mf[abs]
939 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
939 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
940 fp.write(r.read(n))
940 fp.write(r.read(n))
941
941
942 def clone(ui, source, dest=None, **opts):
942 def clone(ui, source, dest=None, **opts):
943 """make a copy of an existing repository
943 """make a copy of an existing repository
944
944
945 Create a copy of an existing repository in a new directory.
945 Create a copy of an existing repository in a new directory.
946
946
947 If no destination directory name is specified, it defaults to the
947 If no destination directory name is specified, it defaults to the
948 basename of the source.
948 basename of the source.
949
949
950 The location of the source is added to the new repository's
950 The location of the source is added to the new repository's
951 .hg/hgrc file, as the default to be used for future pulls.
951 .hg/hgrc file, as the default to be used for future pulls.
952
952
953 For efficiency, hardlinks are used for cloning whenever the source
953 For efficiency, hardlinks are used for cloning whenever the source
954 and destination are on the same filesystem. Some filesystems,
954 and destination are on the same filesystem. Some filesystems,
955 such as AFS, implement hardlinking incorrectly, but do not report
955 such as AFS, implement hardlinking incorrectly, but do not report
956 errors. In these cases, use the --pull option to avoid
956 errors. In these cases, use the --pull option to avoid
957 hardlinking.
957 hardlinking.
958
958
959 See pull for valid source format details.
959 See pull for valid source format details.
960 """
960 """
961 if dest is None:
961 if dest is None:
962 dest = os.path.basename(os.path.normpath(source))
962 dest = os.path.basename(os.path.normpath(source))
963
963
964 if os.path.exists(dest):
964 if os.path.exists(dest):
965 raise util.Abort(_("destination '%s' already exists"), dest)
965 raise util.Abort(_("destination '%s' already exists"), dest)
966
966
967 dest = os.path.realpath(dest)
967 dest = os.path.realpath(dest)
968
968
969 class Dircleanup(object):
969 class Dircleanup(object):
970 def __init__(self, dir_):
970 def __init__(self, dir_):
971 self.rmtree = shutil.rmtree
971 self.rmtree = shutil.rmtree
972 self.dir_ = dir_
972 self.dir_ = dir_
973 os.mkdir(dir_)
973 os.mkdir(dir_)
974 def close(self):
974 def close(self):
975 self.dir_ = None
975 self.dir_ = None
976 def __del__(self):
976 def __del__(self):
977 if self.dir_:
977 if self.dir_:
978 self.rmtree(self.dir_, True)
978 self.rmtree(self.dir_, True)
979
979
980 if opts['ssh']:
980 if opts['ssh']:
981 ui.setconfig("ui", "ssh", opts['ssh'])
981 ui.setconfig("ui", "ssh", opts['ssh'])
982 if opts['remotecmd']:
982 if opts['remotecmd']:
983 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
983 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
984
984
985 source = ui.expandpath(source)
985 source = ui.expandpath(source)
986
986
987 d = Dircleanup(dest)
987 d = Dircleanup(dest)
988 abspath = source
988 abspath = source
989 other = hg.repository(ui, source)
989 other = hg.repository(ui, source)
990
990
991 copy = False
991 copy = False
992 if other.dev() != -1:
992 if other.dev() != -1:
993 abspath = os.path.abspath(source)
993 abspath = os.path.abspath(source)
994 if not opts['pull'] and not opts['rev']:
994 if not opts['pull'] and not opts['rev']:
995 copy = True
995 copy = True
996
996
997 if copy:
997 if copy:
998 try:
998 try:
999 # we use a lock here because if we race with commit, we
999 # we use a lock here because if we race with commit, we
1000 # can end up with extra data in the cloned revlogs that's
1000 # can end up with extra data in the cloned revlogs that's
1001 # not pointed to by changesets, thus causing verify to
1001 # not pointed to by changesets, thus causing verify to
1002 # fail
1002 # fail
1003 l1 = other.lock()
1003 l1 = other.lock()
1004 except lock.LockException:
1004 except lock.LockException:
1005 copy = False
1005 copy = False
1006
1006
1007 if copy:
1007 if copy:
1008 # we lock here to avoid premature writing to the target
1008 # we lock here to avoid premature writing to the target
1009 os.mkdir(os.path.join(dest, ".hg"))
1009 os.mkdir(os.path.join(dest, ".hg"))
1010 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
1010 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
1011
1011
1012 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
1012 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
1013 for f in files.split():
1013 for f in files.split():
1014 src = os.path.join(source, ".hg", f)
1014 src = os.path.join(source, ".hg", f)
1015 dst = os.path.join(dest, ".hg", f)
1015 dst = os.path.join(dest, ".hg", f)
1016 try:
1016 try:
1017 util.copyfiles(src, dst)
1017 util.copyfiles(src, dst)
1018 except OSError, inst:
1018 except OSError, inst:
1019 if inst.errno != errno.ENOENT:
1019 if inst.errno != errno.ENOENT:
1020 raise
1020 raise
1021
1021
1022 repo = hg.repository(ui, dest)
1022 repo = hg.repository(ui, dest)
1023
1023
1024 else:
1024 else:
1025 revs = None
1025 revs = None
1026 if opts['rev']:
1026 if opts['rev']:
1027 if not other.local():
1027 if not other.local():
1028 error = _("clone -r not supported yet for remote repositories.")
1028 error = _("clone -r not supported yet for remote repositories.")
1029 raise util.Abort(error)
1029 raise util.Abort(error)
1030 else:
1030 else:
1031 revs = [other.lookup(rev) for rev in opts['rev']]
1031 revs = [other.lookup(rev) for rev in opts['rev']]
1032 repo = hg.repository(ui, dest, create=1)
1032 repo = hg.repository(ui, dest, create=1)
1033 repo.pull(other, heads = revs)
1033 repo.pull(other, heads = revs)
1034
1034
1035 f = repo.opener("hgrc", "w", text=True)
1035 f = repo.opener("hgrc", "w", text=True)
1036 f.write("[paths]\n")
1036 f.write("[paths]\n")
1037 f.write("default = %s\n" % abspath)
1037 f.write("default = %s\n" % abspath)
1038 f.close()
1038 f.close()
1039
1039
1040 if not opts['noupdate']:
1040 if not opts['noupdate']:
1041 update(repo.ui, repo)
1041 update(repo.ui, repo)
1042
1042
1043 d.close()
1043 d.close()
1044
1044
1045 def commit(ui, repo, *pats, **opts):
1045 def commit(ui, repo, *pats, **opts):
1046 """commit the specified files or all outstanding changes
1046 """commit the specified files or all outstanding changes
1047
1047
1048 Commit changes to the given files into the repository.
1048 Commit changes to the given files into the repository.
1049
1049
1050 If a list of files is omitted, all changes reported by "hg status"
1050 If a list of files is omitted, all changes reported by "hg status"
1051 will be committed.
1051 will be committed.
1052
1052
1053 If no commit message is specified, the editor configured in your hgrc
1053 If no commit message is specified, the editor configured in your hgrc
1054 or in the EDITOR environment variable is started to enter a message.
1054 or in the EDITOR environment variable is started to enter a message.
1055 """
1055 """
1056 message = opts['message']
1056 message = opts['message']
1057 logfile = opts['logfile']
1057 logfile = opts['logfile']
1058
1058
1059 if message and logfile:
1059 if message and logfile:
1060 raise util.Abort(_('options --message and --logfile are mutually '
1060 raise util.Abort(_('options --message and --logfile are mutually '
1061 'exclusive'))
1061 'exclusive'))
1062 if not message and logfile:
1062 if not message and logfile:
1063 try:
1063 try:
1064 if logfile == '-':
1064 if logfile == '-':
1065 message = sys.stdin.read()
1065 message = sys.stdin.read()
1066 else:
1066 else:
1067 message = open(logfile).read()
1067 message = open(logfile).read()
1068 except IOError, inst:
1068 except IOError, inst:
1069 raise util.Abort(_("can't read commit message '%s': %s") %
1069 raise util.Abort(_("can't read commit message '%s': %s") %
1070 (logfile, inst.strerror))
1070 (logfile, inst.strerror))
1071
1071
1072 if opts['addremove']:
1072 if opts['addremove']:
1073 addremove(ui, repo, *pats, **opts)
1073 addremove(ui, repo, *pats, **opts)
1074 fns, match, anypats = matchpats(repo, pats, opts)
1074 fns, match, anypats = matchpats(repo, pats, opts)
1075 if pats:
1075 if pats:
1076 modified, added, removed, deleted, unknown = (
1076 modified, added, removed, deleted, unknown = (
1077 repo.changes(files=fns, match=match))
1077 repo.changes(files=fns, match=match))
1078 files = modified + added + removed
1078 files = modified + added + removed
1079 else:
1079 else:
1080 files = []
1080 files = []
1081 try:
1081 try:
1082 repo.commit(files, message, opts['user'], opts['date'], match)
1082 repo.commit(files, message, opts['user'], opts['date'], match)
1083 except ValueError, inst:
1083 except ValueError, inst:
1084 raise util.Abort(str(inst))
1084 raise util.Abort(str(inst))
1085
1085
1086 def docopy(ui, repo, pats, opts, wlock):
1086 def docopy(ui, repo, pats, opts, wlock):
1087 # called with the repo lock held
1087 # called with the repo lock held
1088 cwd = repo.getcwd()
1088 cwd = repo.getcwd()
1089 errors = 0
1089 errors = 0
1090 copied = []
1090 copied = []
1091 targets = {}
1091 targets = {}
1092
1092
1093 def okaytocopy(abs, rel, exact):
1093 def okaytocopy(abs, rel, exact):
1094 reasons = {'?': _('is not managed'),
1094 reasons = {'?': _('is not managed'),
1095 'a': _('has been marked for add'),
1095 'a': _('has been marked for add'),
1096 'r': _('has been marked for remove')}
1096 'r': _('has been marked for remove')}
1097 state = repo.dirstate.state(abs)
1097 state = repo.dirstate.state(abs)
1098 reason = reasons.get(state)
1098 reason = reasons.get(state)
1099 if reason:
1099 if reason:
1100 if state == 'a':
1100 if state == 'a':
1101 origsrc = repo.dirstate.copied(abs)
1101 origsrc = repo.dirstate.copied(abs)
1102 if origsrc is not None:
1102 if origsrc is not None:
1103 return origsrc
1103 return origsrc
1104 if exact:
1104 if exact:
1105 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1105 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1106 else:
1106 else:
1107 return abs
1107 return abs
1108
1108
1109 def copy(origsrc, abssrc, relsrc, target, exact):
1109 def copy(origsrc, abssrc, relsrc, target, exact):
1110 abstarget = util.canonpath(repo.root, cwd, target)
1110 abstarget = util.canonpath(repo.root, cwd, target)
1111 reltarget = util.pathto(cwd, abstarget)
1111 reltarget = util.pathto(cwd, abstarget)
1112 prevsrc = targets.get(abstarget)
1112 prevsrc = targets.get(abstarget)
1113 if prevsrc is not None:
1113 if prevsrc is not None:
1114 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1114 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1115 (reltarget, abssrc, prevsrc))
1115 (reltarget, abssrc, prevsrc))
1116 return
1116 return
1117 if (not opts['after'] and os.path.exists(reltarget) or
1117 if (not opts['after'] and os.path.exists(reltarget) or
1118 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1118 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1119 if not opts['force']:
1119 if not opts['force']:
1120 ui.warn(_('%s: not overwriting - file exists\n') %
1120 ui.warn(_('%s: not overwriting - file exists\n') %
1121 reltarget)
1121 reltarget)
1122 return
1122 return
1123 if not opts['after']:
1123 if not opts['after']:
1124 os.unlink(reltarget)
1124 os.unlink(reltarget)
1125 if opts['after']:
1125 if opts['after']:
1126 if not os.path.exists(reltarget):
1126 if not os.path.exists(reltarget):
1127 return
1127 return
1128 else:
1128 else:
1129 targetdir = os.path.dirname(reltarget) or '.'
1129 targetdir = os.path.dirname(reltarget) or '.'
1130 if not os.path.isdir(targetdir):
1130 if not os.path.isdir(targetdir):
1131 os.makedirs(targetdir)
1131 os.makedirs(targetdir)
1132 try:
1132 try:
1133 restore = repo.dirstate.state(abstarget) == 'r'
1133 restore = repo.dirstate.state(abstarget) == 'r'
1134 if restore:
1134 if restore:
1135 repo.undelete([abstarget], wlock)
1135 repo.undelete([abstarget], wlock)
1136 try:
1136 try:
1137 shutil.copyfile(relsrc, reltarget)
1137 shutil.copyfile(relsrc, reltarget)
1138 shutil.copymode(relsrc, reltarget)
1138 shutil.copymode(relsrc, reltarget)
1139 restore = False
1139 restore = False
1140 finally:
1140 finally:
1141 if restore:
1141 if restore:
1142 repo.remove([abstarget], wlock)
1142 repo.remove([abstarget], wlock)
1143 except shutil.Error, inst:
1143 except shutil.Error, inst:
1144 raise util.Abort(str(inst))
1144 raise util.Abort(str(inst))
1145 except IOError, inst:
1145 except IOError, inst:
1146 if inst.errno == errno.ENOENT:
1146 if inst.errno == errno.ENOENT:
1147 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1147 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1148 else:
1148 else:
1149 ui.warn(_('%s: cannot copy - %s\n') %
1149 ui.warn(_('%s: cannot copy - %s\n') %
1150 (relsrc, inst.strerror))
1150 (relsrc, inst.strerror))
1151 errors += 1
1151 errors += 1
1152 return
1152 return
1153 if ui.verbose or not exact:
1153 if ui.verbose or not exact:
1154 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1154 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1155 targets[abstarget] = abssrc
1155 targets[abstarget] = abssrc
1156 if abstarget != origsrc:
1156 if abstarget != origsrc:
1157 repo.copy(origsrc, abstarget, wlock)
1157 repo.copy(origsrc, abstarget, wlock)
1158 copied.append((abssrc, relsrc, exact))
1158 copied.append((abssrc, relsrc, exact))
1159
1159
1160 def targetpathfn(pat, dest, srcs):
1160 def targetpathfn(pat, dest, srcs):
1161 if os.path.isdir(pat):
1161 if os.path.isdir(pat):
1162 abspfx = util.canonpath(repo.root, cwd, pat)
1162 abspfx = util.canonpath(repo.root, cwd, pat)
1163 if destdirexists:
1163 if destdirexists:
1164 striplen = len(os.path.split(abspfx)[0])
1164 striplen = len(os.path.split(abspfx)[0])
1165 else:
1165 else:
1166 striplen = len(abspfx)
1166 striplen = len(abspfx)
1167 if striplen:
1167 if striplen:
1168 striplen += len(os.sep)
1168 striplen += len(os.sep)
1169 res = lambda p: os.path.join(dest, p[striplen:])
1169 res = lambda p: os.path.join(dest, p[striplen:])
1170 elif destdirexists:
1170 elif destdirexists:
1171 res = lambda p: os.path.join(dest, os.path.basename(p))
1171 res = lambda p: os.path.join(dest, os.path.basename(p))
1172 else:
1172 else:
1173 res = lambda p: dest
1173 res = lambda p: dest
1174 return res
1174 return res
1175
1175
1176 def targetpathafterfn(pat, dest, srcs):
1176 def targetpathafterfn(pat, dest, srcs):
1177 if util.patkind(pat, None)[0]:
1177 if util.patkind(pat, None)[0]:
1178 # a mercurial pattern
1178 # a mercurial pattern
1179 res = lambda p: os.path.join(dest, os.path.basename(p))
1179 res = lambda p: os.path.join(dest, os.path.basename(p))
1180 else:
1180 else:
1181 abspfx = util.canonpath(repo.root, cwd, pat)
1181 abspfx = util.canonpath(repo.root, cwd, pat)
1182 if len(abspfx) < len(srcs[0][0]):
1182 if len(abspfx) < len(srcs[0][0]):
1183 # A directory. Either the target path contains the last
1183 # A directory. Either the target path contains the last
1184 # component of the source path or it does not.
1184 # component of the source path or it does not.
1185 def evalpath(striplen):
1185 def evalpath(striplen):
1186 score = 0
1186 score = 0
1187 for s in srcs:
1187 for s in srcs:
1188 t = os.path.join(dest, s[0][striplen:])
1188 t = os.path.join(dest, s[0][striplen:])
1189 if os.path.exists(t):
1189 if os.path.exists(t):
1190 score += 1
1190 score += 1
1191 return score
1191 return score
1192
1192
1193 striplen = len(abspfx)
1193 striplen = len(abspfx)
1194 if striplen:
1194 if striplen:
1195 striplen += len(os.sep)
1195 striplen += len(os.sep)
1196 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1196 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1197 score = evalpath(striplen)
1197 score = evalpath(striplen)
1198 striplen1 = len(os.path.split(abspfx)[0])
1198 striplen1 = len(os.path.split(abspfx)[0])
1199 if striplen1:
1199 if striplen1:
1200 striplen1 += len(os.sep)
1200 striplen1 += len(os.sep)
1201 if evalpath(striplen1) > score:
1201 if evalpath(striplen1) > score:
1202 striplen = striplen1
1202 striplen = striplen1
1203 res = lambda p: os.path.join(dest, p[striplen:])
1203 res = lambda p: os.path.join(dest, p[striplen:])
1204 else:
1204 else:
1205 # a file
1205 # a file
1206 if destdirexists:
1206 if destdirexists:
1207 res = lambda p: os.path.join(dest, os.path.basename(p))
1207 res = lambda p: os.path.join(dest, os.path.basename(p))
1208 else:
1208 else:
1209 res = lambda p: dest
1209 res = lambda p: dest
1210 return res
1210 return res
1211
1211
1212
1212
1213 pats = list(pats)
1213 pats = list(pats)
1214 if not pats:
1214 if not pats:
1215 raise util.Abort(_('no source or destination specified'))
1215 raise util.Abort(_('no source or destination specified'))
1216 if len(pats) == 1:
1216 if len(pats) == 1:
1217 raise util.Abort(_('no destination specified'))
1217 raise util.Abort(_('no destination specified'))
1218 dest = pats.pop()
1218 dest = pats.pop()
1219 destdirexists = os.path.isdir(dest)
1219 destdirexists = os.path.isdir(dest)
1220 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1220 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1221 raise util.Abort(_('with multiple sources, destination must be an '
1221 raise util.Abort(_('with multiple sources, destination must be an '
1222 'existing directory'))
1222 'existing directory'))
1223 if opts['after']:
1223 if opts['after']:
1224 tfn = targetpathafterfn
1224 tfn = targetpathafterfn
1225 else:
1225 else:
1226 tfn = targetpathfn
1226 tfn = targetpathfn
1227 copylist = []
1227 copylist = []
1228 for pat in pats:
1228 for pat in pats:
1229 srcs = []
1229 srcs = []
1230 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1230 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1231 origsrc = okaytocopy(abssrc, relsrc, exact)
1231 origsrc = okaytocopy(abssrc, relsrc, exact)
1232 if origsrc:
1232 if origsrc:
1233 srcs.append((origsrc, abssrc, relsrc, exact))
1233 srcs.append((origsrc, abssrc, relsrc, exact))
1234 if not srcs:
1234 if not srcs:
1235 continue
1235 continue
1236 copylist.append((tfn(pat, dest, srcs), srcs))
1236 copylist.append((tfn(pat, dest, srcs), srcs))
1237 if not copylist:
1237 if not copylist:
1238 raise util.Abort(_('no files to copy'))
1238 raise util.Abort(_('no files to copy'))
1239
1239
1240 for targetpath, srcs in copylist:
1240 for targetpath, srcs in copylist:
1241 for origsrc, abssrc, relsrc, exact in srcs:
1241 for origsrc, abssrc, relsrc, exact in srcs:
1242 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1242 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1243
1243
1244 if errors:
1244 if errors:
1245 ui.warn(_('(consider using --after)\n'))
1245 ui.warn(_('(consider using --after)\n'))
1246 return errors, copied
1246 return errors, copied
1247
1247
1248 def copy(ui, repo, *pats, **opts):
1248 def copy(ui, repo, *pats, **opts):
1249 """mark files as copied for the next commit
1249 """mark files as copied for the next commit
1250
1250
1251 Mark dest as having copies of source files. If dest is a
1251 Mark dest as having copies of source files. If dest is a
1252 directory, copies are put in that directory. If dest is a file,
1252 directory, copies are put in that directory. If dest is a file,
1253 there can only be one source.
1253 there can only be one source.
1254
1254
1255 By default, this command copies the contents of files as they
1255 By default, this command copies the contents of files as they
1256 stand in the working directory. If invoked with --after, the
1256 stand in the working directory. If invoked with --after, the
1257 operation is recorded, but no copying is performed.
1257 operation is recorded, but no copying is performed.
1258
1258
1259 This command takes effect in the next commit.
1259 This command takes effect in the next commit.
1260
1260
1261 NOTE: This command should be treated as experimental. While it
1261 NOTE: This command should be treated as experimental. While it
1262 should properly record copied files, this information is not yet
1262 should properly record copied files, this information is not yet
1263 fully used by merge, nor fully reported by log.
1263 fully used by merge, nor fully reported by log.
1264 """
1264 """
1265 wlock = repo.wlock(0)
1265 wlock = repo.wlock(0)
1266 errs, copied = docopy(ui, repo, pats, opts, wlock)
1266 errs, copied = docopy(ui, repo, pats, opts, wlock)
1267 return errs
1267 return errs
1268
1268
1269 def debugancestor(ui, index, rev1, rev2):
1269 def debugancestor(ui, index, rev1, rev2):
1270 """find the ancestor revision of two revisions in a given index"""
1270 """find the ancestor revision of two revisions in a given index"""
1271 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "")
1271 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1272 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1272 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1273 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1273 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1274
1274
1275 def debugcomplete(ui, cmd='', **opts):
1275 def debugcomplete(ui, cmd='', **opts):
1276 """returns the completion list associated with the given command"""
1276 """returns the completion list associated with the given command"""
1277
1277
1278 if opts['options']:
1278 if opts['options']:
1279 options = []
1279 options = []
1280 otables = [globalopts]
1280 otables = [globalopts]
1281 if cmd:
1281 if cmd:
1282 aliases, entry = find(cmd)
1282 aliases, entry = find(cmd)
1283 otables.append(entry[1])
1283 otables.append(entry[1])
1284 for t in otables:
1284 for t in otables:
1285 for o in t:
1285 for o in t:
1286 if o[0]:
1286 if o[0]:
1287 options.append('-%s' % o[0])
1287 options.append('-%s' % o[0])
1288 options.append('--%s' % o[1])
1288 options.append('--%s' % o[1])
1289 ui.write("%s\n" % "\n".join(options))
1289 ui.write("%s\n" % "\n".join(options))
1290 return
1290 return
1291
1291
1292 clist = findpossible(cmd).keys()
1292 clist = findpossible(cmd).keys()
1293 clist.sort()
1293 clist.sort()
1294 ui.write("%s\n" % "\n".join(clist))
1294 ui.write("%s\n" % "\n".join(clist))
1295
1295
1296 def debugrebuildstate(ui, repo, rev=None):
1296 def debugrebuildstate(ui, repo, rev=None):
1297 """rebuild the dirstate as it would look like for the given revision"""
1297 """rebuild the dirstate as it would look like for the given revision"""
1298 if not rev:
1298 if not rev:
1299 rev = repo.changelog.tip()
1299 rev = repo.changelog.tip()
1300 else:
1300 else:
1301 rev = repo.lookup(rev)
1301 rev = repo.lookup(rev)
1302 change = repo.changelog.read(rev)
1302 change = repo.changelog.read(rev)
1303 n = change[0]
1303 n = change[0]
1304 files = repo.manifest.readflags(n)
1304 files = repo.manifest.readflags(n)
1305 wlock = repo.wlock()
1305 wlock = repo.wlock()
1306 repo.dirstate.rebuild(rev, files.iteritems())
1306 repo.dirstate.rebuild(rev, files.iteritems())
1307
1307
1308 def debugcheckstate(ui, repo):
1308 def debugcheckstate(ui, repo):
1309 """validate the correctness of the current dirstate"""
1309 """validate the correctness of the current dirstate"""
1310 parent1, parent2 = repo.dirstate.parents()
1310 parent1, parent2 = repo.dirstate.parents()
1311 repo.dirstate.read()
1311 repo.dirstate.read()
1312 dc = repo.dirstate.map
1312 dc = repo.dirstate.map
1313 keys = dc.keys()
1313 keys = dc.keys()
1314 keys.sort()
1314 keys.sort()
1315 m1n = repo.changelog.read(parent1)[0]
1315 m1n = repo.changelog.read(parent1)[0]
1316 m2n = repo.changelog.read(parent2)[0]
1316 m2n = repo.changelog.read(parent2)[0]
1317 m1 = repo.manifest.read(m1n)
1317 m1 = repo.manifest.read(m1n)
1318 m2 = repo.manifest.read(m2n)
1318 m2 = repo.manifest.read(m2n)
1319 errors = 0
1319 errors = 0
1320 for f in dc:
1320 for f in dc:
1321 state = repo.dirstate.state(f)
1321 state = repo.dirstate.state(f)
1322 if state in "nr" and f not in m1:
1322 if state in "nr" and f not in m1:
1323 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1323 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1324 errors += 1
1324 errors += 1
1325 if state in "a" and f in m1:
1325 if state in "a" and f in m1:
1326 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1326 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1327 errors += 1
1327 errors += 1
1328 if state in "m" and f not in m1 and f not in m2:
1328 if state in "m" and f not in m1 and f not in m2:
1329 ui.warn(_("%s in state %s, but not in either manifest\n") %
1329 ui.warn(_("%s in state %s, but not in either manifest\n") %
1330 (f, state))
1330 (f, state))
1331 errors += 1
1331 errors += 1
1332 for f in m1:
1332 for f in m1:
1333 state = repo.dirstate.state(f)
1333 state = repo.dirstate.state(f)
1334 if state not in "nrm":
1334 if state not in "nrm":
1335 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1335 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1336 errors += 1
1336 errors += 1
1337 if errors:
1337 if errors:
1338 error = _(".hg/dirstate inconsistent with current parent's manifest")
1338 error = _(".hg/dirstate inconsistent with current parent's manifest")
1339 raise util.Abort(error)
1339 raise util.Abort(error)
1340
1340
1341 def debugconfig(ui, repo):
1341 def debugconfig(ui, repo):
1342 """show combined config settings from all hgrc files"""
1342 """show combined config settings from all hgrc files"""
1343 for section, name, value in ui.walkconfig():
1343 for section, name, value in ui.walkconfig():
1344 ui.write('%s.%s=%s\n' % (section, name, value))
1344 ui.write('%s.%s=%s\n' % (section, name, value))
1345
1345
1346 def debugsetparents(ui, repo, rev1, rev2=None):
1346 def debugsetparents(ui, repo, rev1, rev2=None):
1347 """manually set the parents of the current working directory
1347 """manually set the parents of the current working directory
1348
1348
1349 This is useful for writing repository conversion tools, but should
1349 This is useful for writing repository conversion tools, but should
1350 be used with care.
1350 be used with care.
1351 """
1351 """
1352
1352
1353 if not rev2:
1353 if not rev2:
1354 rev2 = hex(nullid)
1354 rev2 = hex(nullid)
1355
1355
1356 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1356 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1357
1357
1358 def debugstate(ui, repo):
1358 def debugstate(ui, repo):
1359 """show the contents of the current dirstate"""
1359 """show the contents of the current dirstate"""
1360 repo.dirstate.read()
1360 repo.dirstate.read()
1361 dc = repo.dirstate.map
1361 dc = repo.dirstate.map
1362 keys = dc.keys()
1362 keys = dc.keys()
1363 keys.sort()
1363 keys.sort()
1364 for file_ in keys:
1364 for file_ in keys:
1365 ui.write("%c %3o %10d %s %s\n"
1365 ui.write("%c %3o %10d %s %s\n"
1366 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1366 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1367 time.strftime("%x %X",
1367 time.strftime("%x %X",
1368 time.localtime(dc[file_][3])), file_))
1368 time.localtime(dc[file_][3])), file_))
1369 for f in repo.dirstate.copies:
1369 for f in repo.dirstate.copies:
1370 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1370 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1371
1371
1372 def debugdata(ui, file_, rev):
1372 def debugdata(ui, file_, rev):
1373 """dump the contents of an data file revision"""
1373 """dump the contents of an data file revision"""
1374 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1374 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1375 file_[:-2] + ".i", file_)
1375 file_[:-2] + ".i", file_, 0)
1376 try:
1376 try:
1377 ui.write(r.revision(r.lookup(rev)))
1377 ui.write(r.revision(r.lookup(rev)))
1378 except KeyError:
1378 except KeyError:
1379 raise util.Abort(_('invalid revision identifier %s'), rev)
1379 raise util.Abort(_('invalid revision identifier %s'), rev)
1380
1380
1381 def debugindex(ui, file_):
1381 def debugindex(ui, file_):
1382 """dump the contents of an index file"""
1382 """dump the contents of an index file"""
1383 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "")
1383 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1384 ui.write(" rev offset length base linkrev" +
1384 ui.write(" rev offset length base linkrev" +
1385 " nodeid p1 p2\n")
1385 " nodeid p1 p2\n")
1386 for i in range(r.count()):
1386 for i in range(r.count()):
1387 e = r.index[i]
1387 node = r.node(i)
1388 pp = r.parents(node)
1388 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1389 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1389 i, e[0], e[1], e[2], e[3],
1390 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1390 short(e[6]), short(e[4]), short(e[5])))
1391 short(node), short(pp[0]), short(pp[1])))
1391
1392
1392 def debugindexdot(ui, file_):
1393 def debugindexdot(ui, file_):
1393 """dump an index DAG as a .dot file"""
1394 """dump an index DAG as a .dot file"""
1394 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "")
1395 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1395 ui.write("digraph G {\n")
1396 ui.write("digraph G {\n")
1396 for i in range(r.count()):
1397 for i in range(r.count()):
1397 e = r.index[i]
1398 e = r.index[i]
1398 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1399 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1399 if e[5] != nullid:
1400 if e[5] != nullid:
1400 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1401 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1401 ui.write("}\n")
1402 ui.write("}\n")
1402
1403
1403 def debugrename(ui, repo, file, rev=None):
1404 def debugrename(ui, repo, file, rev=None):
1404 """dump rename information"""
1405 """dump rename information"""
1405 r = repo.file(relpath(repo, [file])[0])
1406 r = repo.file(relpath(repo, [file])[0])
1406 if rev:
1407 if rev:
1407 try:
1408 try:
1408 # assume all revision numbers are for changesets
1409 # assume all revision numbers are for changesets
1409 n = repo.lookup(rev)
1410 n = repo.lookup(rev)
1410 change = repo.changelog.read(n)
1411 change = repo.changelog.read(n)
1411 m = repo.manifest.read(change[0])
1412 m = repo.manifest.read(change[0])
1412 n = m[relpath(repo, [file])[0]]
1413 n = m[relpath(repo, [file])[0]]
1413 except (hg.RepoError, KeyError):
1414 except (hg.RepoError, KeyError):
1414 n = r.lookup(rev)
1415 n = r.lookup(rev)
1415 else:
1416 else:
1416 n = r.tip()
1417 n = r.tip()
1417 m = r.renamed(n)
1418 m = r.renamed(n)
1418 if m:
1419 if m:
1419 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1420 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1420 else:
1421 else:
1421 ui.write(_("not renamed\n"))
1422 ui.write(_("not renamed\n"))
1422
1423
1423 def debugwalk(ui, repo, *pats, **opts):
1424 def debugwalk(ui, repo, *pats, **opts):
1424 """show how files match on given patterns"""
1425 """show how files match on given patterns"""
1425 items = list(walk(repo, pats, opts))
1426 items = list(walk(repo, pats, opts))
1426 if not items:
1427 if not items:
1427 return
1428 return
1428 fmt = '%%s %%-%ds %%-%ds %%s' % (
1429 fmt = '%%s %%-%ds %%-%ds %%s' % (
1429 max([len(abs) for (src, abs, rel, exact) in items]),
1430 max([len(abs) for (src, abs, rel, exact) in items]),
1430 max([len(rel) for (src, abs, rel, exact) in items]))
1431 max([len(rel) for (src, abs, rel, exact) in items]))
1431 for src, abs, rel, exact in items:
1432 for src, abs, rel, exact in items:
1432 line = fmt % (src, abs, rel, exact and 'exact' or '')
1433 line = fmt % (src, abs, rel, exact and 'exact' or '')
1433 ui.write("%s\n" % line.rstrip())
1434 ui.write("%s\n" % line.rstrip())
1434
1435
1435 def diff(ui, repo, *pats, **opts):
1436 def diff(ui, repo, *pats, **opts):
1436 """diff repository (or selected files)
1437 """diff repository (or selected files)
1437
1438
1438 Show differences between revisions for the specified files.
1439 Show differences between revisions for the specified files.
1439
1440
1440 Differences between files are shown using the unified diff format.
1441 Differences between files are shown using the unified diff format.
1441
1442
1442 When two revision arguments are given, then changes are shown
1443 When two revision arguments are given, then changes are shown
1443 between those revisions. If only one revision is specified then
1444 between those revisions. If only one revision is specified then
1444 that revision is compared to the working directory, and, when no
1445 that revision is compared to the working directory, and, when no
1445 revisions are specified, the working directory files are compared
1446 revisions are specified, the working directory files are compared
1446 to its parent.
1447 to its parent.
1447
1448
1448 Without the -a option, diff will avoid generating diffs of files
1449 Without the -a option, diff will avoid generating diffs of files
1449 it detects as binary. With -a, diff will generate a diff anyway,
1450 it detects as binary. With -a, diff will generate a diff anyway,
1450 probably with undesirable results.
1451 probably with undesirable results.
1451 """
1452 """
1452 node1, node2 = None, None
1453 node1, node2 = None, None
1453 revs = [repo.lookup(x) for x in opts['rev']]
1454 revs = [repo.lookup(x) for x in opts['rev']]
1454
1455
1455 if len(revs) > 0:
1456 if len(revs) > 0:
1456 node1 = revs[0]
1457 node1 = revs[0]
1457 if len(revs) > 1:
1458 if len(revs) > 1:
1458 node2 = revs[1]
1459 node2 = revs[1]
1459 if len(revs) > 2:
1460 if len(revs) > 2:
1460 raise util.Abort(_("too many revisions to diff"))
1461 raise util.Abort(_("too many revisions to diff"))
1461
1462
1462 fns, matchfn, anypats = matchpats(repo, pats, opts)
1463 fns, matchfn, anypats = matchpats(repo, pats, opts)
1463
1464
1464 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1465 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1465 text=opts['text'], opts=opts)
1466 text=opts['text'], opts=opts)
1466
1467
1467 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1468 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1468 node = repo.lookup(changeset)
1469 node = repo.lookup(changeset)
1469 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1470 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1470 if opts['switch_parent']:
1471 if opts['switch_parent']:
1471 parents.reverse()
1472 parents.reverse()
1472 prev = (parents and parents[0]) or nullid
1473 prev = (parents and parents[0]) or nullid
1473 change = repo.changelog.read(node)
1474 change = repo.changelog.read(node)
1474
1475
1475 fp = make_file(repo, repo.changelog, opts['output'],
1476 fp = make_file(repo, repo.changelog, opts['output'],
1476 node=node, total=total, seqno=seqno,
1477 node=node, total=total, seqno=seqno,
1477 revwidth=revwidth)
1478 revwidth=revwidth)
1478 if fp != sys.stdout:
1479 if fp != sys.stdout:
1479 ui.note("%s\n" % fp.name)
1480 ui.note("%s\n" % fp.name)
1480
1481
1481 fp.write("# HG changeset patch\n")
1482 fp.write("# HG changeset patch\n")
1482 fp.write("# User %s\n" % change[1])
1483 fp.write("# User %s\n" % change[1])
1483 fp.write("# Node ID %s\n" % hex(node))
1484 fp.write("# Node ID %s\n" % hex(node))
1484 fp.write("# Parent %s\n" % hex(prev))
1485 fp.write("# Parent %s\n" % hex(prev))
1485 if len(parents) > 1:
1486 if len(parents) > 1:
1486 fp.write("# Parent %s\n" % hex(parents[1]))
1487 fp.write("# Parent %s\n" % hex(parents[1]))
1487 fp.write(change[4].rstrip())
1488 fp.write(change[4].rstrip())
1488 fp.write("\n\n")
1489 fp.write("\n\n")
1489
1490
1490 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1491 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1491 if fp != sys.stdout:
1492 if fp != sys.stdout:
1492 fp.close()
1493 fp.close()
1493
1494
1494 def export(ui, repo, *changesets, **opts):
1495 def export(ui, repo, *changesets, **opts):
1495 """dump the header and diffs for one or more changesets
1496 """dump the header and diffs for one or more changesets
1496
1497
1497 Print the changeset header and diffs for one or more revisions.
1498 Print the changeset header and diffs for one or more revisions.
1498
1499
1499 The information shown in the changeset header is: author,
1500 The information shown in the changeset header is: author,
1500 changeset hash, parent and commit comment.
1501 changeset hash, parent and commit comment.
1501
1502
1502 Output may be to a file, in which case the name of the file is
1503 Output may be to a file, in which case the name of the file is
1503 given using a format string. The formatting rules are as follows:
1504 given using a format string. The formatting rules are as follows:
1504
1505
1505 %% literal "%" character
1506 %% literal "%" character
1506 %H changeset hash (40 bytes of hexadecimal)
1507 %H changeset hash (40 bytes of hexadecimal)
1507 %N number of patches being generated
1508 %N number of patches being generated
1508 %R changeset revision number
1509 %R changeset revision number
1509 %b basename of the exporting repository
1510 %b basename of the exporting repository
1510 %h short-form changeset hash (12 bytes of hexadecimal)
1511 %h short-form changeset hash (12 bytes of hexadecimal)
1511 %n zero-padded sequence number, starting at 1
1512 %n zero-padded sequence number, starting at 1
1512 %r zero-padded changeset revision number
1513 %r zero-padded changeset revision number
1513
1514
1514 Without the -a option, export will avoid generating diffs of files
1515 Without the -a option, export will avoid generating diffs of files
1515 it detects as binary. With -a, export will generate a diff anyway,
1516 it detects as binary. With -a, export will generate a diff anyway,
1516 probably with undesirable results.
1517 probably with undesirable results.
1517
1518
1518 With the --switch-parent option, the diff will be against the second
1519 With the --switch-parent option, the diff will be against the second
1519 parent. It can be useful to review a merge.
1520 parent. It can be useful to review a merge.
1520 """
1521 """
1521 if not changesets:
1522 if not changesets:
1522 raise util.Abort(_("export requires at least one changeset"))
1523 raise util.Abort(_("export requires at least one changeset"))
1523 seqno = 0
1524 seqno = 0
1524 revs = list(revrange(ui, repo, changesets))
1525 revs = list(revrange(ui, repo, changesets))
1525 total = len(revs)
1526 total = len(revs)
1526 revwidth = max(map(len, revs))
1527 revwidth = max(map(len, revs))
1527 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1528 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1528 ui.note(msg)
1529 ui.note(msg)
1529 for cset in revs:
1530 for cset in revs:
1530 seqno += 1
1531 seqno += 1
1531 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1532 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1532
1533
1533 def forget(ui, repo, *pats, **opts):
1534 def forget(ui, repo, *pats, **opts):
1534 """don't add the specified files on the next commit
1535 """don't add the specified files on the next commit
1535
1536
1536 Undo an 'hg add' scheduled for the next commit.
1537 Undo an 'hg add' scheduled for the next commit.
1537 """
1538 """
1538 forget = []
1539 forget = []
1539 for src, abs, rel, exact in walk(repo, pats, opts):
1540 for src, abs, rel, exact in walk(repo, pats, opts):
1540 if repo.dirstate.state(abs) == 'a':
1541 if repo.dirstate.state(abs) == 'a':
1541 forget.append(abs)
1542 forget.append(abs)
1542 if ui.verbose or not exact:
1543 if ui.verbose or not exact:
1543 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1544 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1544 repo.forget(forget)
1545 repo.forget(forget)
1545
1546
1546 def grep(ui, repo, pattern, *pats, **opts):
1547 def grep(ui, repo, pattern, *pats, **opts):
1547 """search for a pattern in specified files and revisions
1548 """search for a pattern in specified files and revisions
1548
1549
1549 Search revisions of files for a regular expression.
1550 Search revisions of files for a regular expression.
1550
1551
1551 This command behaves differently than Unix grep. It only accepts
1552 This command behaves differently than Unix grep. It only accepts
1552 Python/Perl regexps. It searches repository history, not the
1553 Python/Perl regexps. It searches repository history, not the
1553 working directory. It always prints the revision number in which
1554 working directory. It always prints the revision number in which
1554 a match appears.
1555 a match appears.
1555
1556
1556 By default, grep only prints output for the first revision of a
1557 By default, grep only prints output for the first revision of a
1557 file in which it finds a match. To get it to print every revision
1558 file in which it finds a match. To get it to print every revision
1558 that contains a change in match status ("-" for a match that
1559 that contains a change in match status ("-" for a match that
1559 becomes a non-match, or "+" for a non-match that becomes a match),
1560 becomes a non-match, or "+" for a non-match that becomes a match),
1560 use the --all flag.
1561 use the --all flag.
1561 """
1562 """
1562 reflags = 0
1563 reflags = 0
1563 if opts['ignore_case']:
1564 if opts['ignore_case']:
1564 reflags |= re.I
1565 reflags |= re.I
1565 regexp = re.compile(pattern, reflags)
1566 regexp = re.compile(pattern, reflags)
1566 sep, eol = ':', '\n'
1567 sep, eol = ':', '\n'
1567 if opts['print0']:
1568 if opts['print0']:
1568 sep = eol = '\0'
1569 sep = eol = '\0'
1569
1570
1570 fcache = {}
1571 fcache = {}
1571 def getfile(fn):
1572 def getfile(fn):
1572 if fn not in fcache:
1573 if fn not in fcache:
1573 fcache[fn] = repo.file(fn)
1574 fcache[fn] = repo.file(fn)
1574 return fcache[fn]
1575 return fcache[fn]
1575
1576
1576 def matchlines(body):
1577 def matchlines(body):
1577 begin = 0
1578 begin = 0
1578 linenum = 0
1579 linenum = 0
1579 while True:
1580 while True:
1580 match = regexp.search(body, begin)
1581 match = regexp.search(body, begin)
1581 if not match:
1582 if not match:
1582 break
1583 break
1583 mstart, mend = match.span()
1584 mstart, mend = match.span()
1584 linenum += body.count('\n', begin, mstart) + 1
1585 linenum += body.count('\n', begin, mstart) + 1
1585 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1586 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1586 lend = body.find('\n', mend)
1587 lend = body.find('\n', mend)
1587 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1588 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1588 begin = lend + 1
1589 begin = lend + 1
1589
1590
1590 class linestate(object):
1591 class linestate(object):
1591 def __init__(self, line, linenum, colstart, colend):
1592 def __init__(self, line, linenum, colstart, colend):
1592 self.line = line
1593 self.line = line
1593 self.linenum = linenum
1594 self.linenum = linenum
1594 self.colstart = colstart
1595 self.colstart = colstart
1595 self.colend = colend
1596 self.colend = colend
1596 def __eq__(self, other):
1597 def __eq__(self, other):
1597 return self.line == other.line
1598 return self.line == other.line
1598 def __hash__(self):
1599 def __hash__(self):
1599 return hash(self.line)
1600 return hash(self.line)
1600
1601
1601 matches = {}
1602 matches = {}
1602 def grepbody(fn, rev, body):
1603 def grepbody(fn, rev, body):
1603 matches[rev].setdefault(fn, {})
1604 matches[rev].setdefault(fn, {})
1604 m = matches[rev][fn]
1605 m = matches[rev][fn]
1605 for lnum, cstart, cend, line in matchlines(body):
1606 for lnum, cstart, cend, line in matchlines(body):
1606 s = linestate(line, lnum, cstart, cend)
1607 s = linestate(line, lnum, cstart, cend)
1607 m[s] = s
1608 m[s] = s
1608
1609
1609 # FIXME: prev isn't used, why ?
1610 # FIXME: prev isn't used, why ?
1610 prev = {}
1611 prev = {}
1611 ucache = {}
1612 ucache = {}
1612 def display(fn, rev, states, prevstates):
1613 def display(fn, rev, states, prevstates):
1613 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1614 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1614 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1615 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1615 counts = {'-': 0, '+': 0}
1616 counts = {'-': 0, '+': 0}
1616 filerevmatches = {}
1617 filerevmatches = {}
1617 for l in diff:
1618 for l in diff:
1618 if incrementing or not opts['all']:
1619 if incrementing or not opts['all']:
1619 change = ((l in prevstates) and '-') or '+'
1620 change = ((l in prevstates) and '-') or '+'
1620 r = rev
1621 r = rev
1621 else:
1622 else:
1622 change = ((l in states) and '-') or '+'
1623 change = ((l in states) and '-') or '+'
1623 r = prev[fn]
1624 r = prev[fn]
1624 cols = [fn, str(rev)]
1625 cols = [fn, str(rev)]
1625 if opts['line_number']:
1626 if opts['line_number']:
1626 cols.append(str(l.linenum))
1627 cols.append(str(l.linenum))
1627 if opts['all']:
1628 if opts['all']:
1628 cols.append(change)
1629 cols.append(change)
1629 if opts['user']:
1630 if opts['user']:
1630 cols.append(trimuser(ui, getchange(rev)[1], rev,
1631 cols.append(trimuser(ui, getchange(rev)[1], rev,
1631 ucache))
1632 ucache))
1632 if opts['files_with_matches']:
1633 if opts['files_with_matches']:
1633 c = (fn, rev)
1634 c = (fn, rev)
1634 if c in filerevmatches:
1635 if c in filerevmatches:
1635 continue
1636 continue
1636 filerevmatches[c] = 1
1637 filerevmatches[c] = 1
1637 else:
1638 else:
1638 cols.append(l.line)
1639 cols.append(l.line)
1639 ui.write(sep.join(cols), eol)
1640 ui.write(sep.join(cols), eol)
1640 counts[change] += 1
1641 counts[change] += 1
1641 return counts['+'], counts['-']
1642 return counts['+'], counts['-']
1642
1643
1643 fstate = {}
1644 fstate = {}
1644 skip = {}
1645 skip = {}
1645 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1646 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1646 count = 0
1647 count = 0
1647 incrementing = False
1648 incrementing = False
1648 for st, rev, fns in changeiter:
1649 for st, rev, fns in changeiter:
1649 if st == 'window':
1650 if st == 'window':
1650 incrementing = rev
1651 incrementing = rev
1651 matches.clear()
1652 matches.clear()
1652 elif st == 'add':
1653 elif st == 'add':
1653 change = repo.changelog.read(repo.lookup(str(rev)))
1654 change = repo.changelog.read(repo.lookup(str(rev)))
1654 mf = repo.manifest.read(change[0])
1655 mf = repo.manifest.read(change[0])
1655 matches[rev] = {}
1656 matches[rev] = {}
1656 for fn in fns:
1657 for fn in fns:
1657 if fn in skip:
1658 if fn in skip:
1658 continue
1659 continue
1659 fstate.setdefault(fn, {})
1660 fstate.setdefault(fn, {})
1660 try:
1661 try:
1661 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1662 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1662 except KeyError:
1663 except KeyError:
1663 pass
1664 pass
1664 elif st == 'iter':
1665 elif st == 'iter':
1665 states = matches[rev].items()
1666 states = matches[rev].items()
1666 states.sort()
1667 states.sort()
1667 for fn, m in states:
1668 for fn, m in states:
1668 if fn in skip:
1669 if fn in skip:
1669 continue
1670 continue
1670 if incrementing or not opts['all'] or fstate[fn]:
1671 if incrementing or not opts['all'] or fstate[fn]:
1671 pos, neg = display(fn, rev, m, fstate[fn])
1672 pos, neg = display(fn, rev, m, fstate[fn])
1672 count += pos + neg
1673 count += pos + neg
1673 if pos and not opts['all']:
1674 if pos and not opts['all']:
1674 skip[fn] = True
1675 skip[fn] = True
1675 fstate[fn] = m
1676 fstate[fn] = m
1676 prev[fn] = rev
1677 prev[fn] = rev
1677
1678
1678 if not incrementing:
1679 if not incrementing:
1679 fstate = fstate.items()
1680 fstate = fstate.items()
1680 fstate.sort()
1681 fstate.sort()
1681 for fn, state in fstate:
1682 for fn, state in fstate:
1682 if fn in skip:
1683 if fn in skip:
1683 continue
1684 continue
1684 display(fn, rev, {}, state)
1685 display(fn, rev, {}, state)
1685 return (count == 0 and 1) or 0
1686 return (count == 0 and 1) or 0
1686
1687
1687 def heads(ui, repo, **opts):
1688 def heads(ui, repo, **opts):
1688 """show current repository heads
1689 """show current repository heads
1689
1690
1690 Show all repository head changesets.
1691 Show all repository head changesets.
1691
1692
1692 Repository "heads" are changesets that don't have children
1693 Repository "heads" are changesets that don't have children
1693 changesets. They are where development generally takes place and
1694 changesets. They are where development generally takes place and
1694 are the usual targets for update and merge operations.
1695 are the usual targets for update and merge operations.
1695 """
1696 """
1696 if opts['rev']:
1697 if opts['rev']:
1697 heads = repo.heads(repo.lookup(opts['rev']))
1698 heads = repo.heads(repo.lookup(opts['rev']))
1698 else:
1699 else:
1699 heads = repo.heads()
1700 heads = repo.heads()
1700 br = None
1701 br = None
1701 if opts['branches']:
1702 if opts['branches']:
1702 br = repo.branchlookup(heads)
1703 br = repo.branchlookup(heads)
1703 displayer = show_changeset(ui, repo, opts)
1704 displayer = show_changeset(ui, repo, opts)
1704 for n in heads:
1705 for n in heads:
1705 displayer.show(changenode=n, brinfo=br)
1706 displayer.show(changenode=n, brinfo=br)
1706
1707
1707 def identify(ui, repo):
1708 def identify(ui, repo):
1708 """print information about the working copy
1709 """print information about the working copy
1709
1710
1710 Print a short summary of the current state of the repo.
1711 Print a short summary of the current state of the repo.
1711
1712
1712 This summary identifies the repository state using one or two parent
1713 This summary identifies the repository state using one or two parent
1713 hash identifiers, followed by a "+" if there are uncommitted changes
1714 hash identifiers, followed by a "+" if there are uncommitted changes
1714 in the working directory, followed by a list of tags for this revision.
1715 in the working directory, followed by a list of tags for this revision.
1715 """
1716 """
1716 parents = [p for p in repo.dirstate.parents() if p != nullid]
1717 parents = [p for p in repo.dirstate.parents() if p != nullid]
1717 if not parents:
1718 if not parents:
1718 ui.write(_("unknown\n"))
1719 ui.write(_("unknown\n"))
1719 return
1720 return
1720
1721
1721 hexfunc = ui.verbose and hex or short
1722 hexfunc = ui.verbose and hex or short
1722 modified, added, removed, deleted, unknown = repo.changes()
1723 modified, added, removed, deleted, unknown = repo.changes()
1723 output = ["%s%s" %
1724 output = ["%s%s" %
1724 ('+'.join([hexfunc(parent) for parent in parents]),
1725 ('+'.join([hexfunc(parent) for parent in parents]),
1725 (modified or added or removed or deleted) and "+" or "")]
1726 (modified or added or removed or deleted) and "+" or "")]
1726
1727
1727 if not ui.quiet:
1728 if not ui.quiet:
1728 # multiple tags for a single parent separated by '/'
1729 # multiple tags for a single parent separated by '/'
1729 parenttags = ['/'.join(tags)
1730 parenttags = ['/'.join(tags)
1730 for tags in map(repo.nodetags, parents) if tags]
1731 for tags in map(repo.nodetags, parents) if tags]
1731 # tags for multiple parents separated by ' + '
1732 # tags for multiple parents separated by ' + '
1732 if parenttags:
1733 if parenttags:
1733 output.append(' + '.join(parenttags))
1734 output.append(' + '.join(parenttags))
1734
1735
1735 ui.write("%s\n" % ' '.join(output))
1736 ui.write("%s\n" % ' '.join(output))
1736
1737
1737 def import_(ui, repo, patch1, *patches, **opts):
1738 def import_(ui, repo, patch1, *patches, **opts):
1738 """import an ordered set of patches
1739 """import an ordered set of patches
1739
1740
1740 Import a list of patches and commit them individually.
1741 Import a list of patches and commit them individually.
1741
1742
1742 If there are outstanding changes in the working directory, import
1743 If there are outstanding changes in the working directory, import
1743 will abort unless given the -f flag.
1744 will abort unless given the -f flag.
1744
1745
1745 If a patch looks like a mail message (its first line starts with
1746 If a patch looks like a mail message (its first line starts with
1746 "From " or looks like an RFC822 header), it will not be applied
1747 "From " or looks like an RFC822 header), it will not be applied
1747 unless the -f option is used. The importer neither parses nor
1748 unless the -f option is used. The importer neither parses nor
1748 discards mail headers, so use -f only to override the "mailness"
1749 discards mail headers, so use -f only to override the "mailness"
1749 safety check, not to import a real mail message.
1750 safety check, not to import a real mail message.
1750 """
1751 """
1751 patches = (patch1,) + patches
1752 patches = (patch1,) + patches
1752
1753
1753 if not opts['force']:
1754 if not opts['force']:
1754 modified, added, removed, deleted, unknown = repo.changes()
1755 modified, added, removed, deleted, unknown = repo.changes()
1755 if modified or added or removed or deleted:
1756 if modified or added or removed or deleted:
1756 raise util.Abort(_("outstanding uncommitted changes"))
1757 raise util.Abort(_("outstanding uncommitted changes"))
1757
1758
1758 d = opts["base"]
1759 d = opts["base"]
1759 strip = opts["strip"]
1760 strip = opts["strip"]
1760
1761
1761 mailre = re.compile(r'(?:From |[\w-]+:)')
1762 mailre = re.compile(r'(?:From |[\w-]+:)')
1762
1763
1763 # attempt to detect the start of a patch
1764 # attempt to detect the start of a patch
1764 # (this heuristic is borrowed from quilt)
1765 # (this heuristic is borrowed from quilt)
1765 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1766 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1766 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1767 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1767 '(---|\*\*\*)[ \t])')
1768 '(---|\*\*\*)[ \t])')
1768
1769
1769 for patch in patches:
1770 for patch in patches:
1770 ui.status(_("applying %s\n") % patch)
1771 ui.status(_("applying %s\n") % patch)
1771 pf = os.path.join(d, patch)
1772 pf = os.path.join(d, patch)
1772
1773
1773 message = []
1774 message = []
1774 user = None
1775 user = None
1775 hgpatch = False
1776 hgpatch = False
1776 for line in file(pf):
1777 for line in file(pf):
1777 line = line.rstrip()
1778 line = line.rstrip()
1778 if (not message and not hgpatch and
1779 if (not message and not hgpatch and
1779 mailre.match(line) and not opts['force']):
1780 mailre.match(line) and not opts['force']):
1780 if len(line) > 35:
1781 if len(line) > 35:
1781 line = line[:32] + '...'
1782 line = line[:32] + '...'
1782 raise util.Abort(_('first line looks like a '
1783 raise util.Abort(_('first line looks like a '
1783 'mail header: ') + line)
1784 'mail header: ') + line)
1784 if diffre.match(line):
1785 if diffre.match(line):
1785 break
1786 break
1786 elif hgpatch:
1787 elif hgpatch:
1787 # parse values when importing the result of an hg export
1788 # parse values when importing the result of an hg export
1788 if line.startswith("# User "):
1789 if line.startswith("# User "):
1789 user = line[7:]
1790 user = line[7:]
1790 ui.debug(_('User: %s\n') % user)
1791 ui.debug(_('User: %s\n') % user)
1791 elif not line.startswith("# ") and line:
1792 elif not line.startswith("# ") and line:
1792 message.append(line)
1793 message.append(line)
1793 hgpatch = False
1794 hgpatch = False
1794 elif line == '# HG changeset patch':
1795 elif line == '# HG changeset patch':
1795 hgpatch = True
1796 hgpatch = True
1796 message = [] # We may have collected garbage
1797 message = [] # We may have collected garbage
1797 else:
1798 else:
1798 message.append(line)
1799 message.append(line)
1799
1800
1800 # make sure message isn't empty
1801 # make sure message isn't empty
1801 if not message:
1802 if not message:
1802 message = _("imported patch %s\n") % patch
1803 message = _("imported patch %s\n") % patch
1803 else:
1804 else:
1804 message = "%s\n" % '\n'.join(message)
1805 message = "%s\n" % '\n'.join(message)
1805 ui.debug(_('message:\n%s\n') % message)
1806 ui.debug(_('message:\n%s\n') % message)
1806
1807
1807 files = util.patch(strip, pf, ui)
1808 files = util.patch(strip, pf, ui)
1808
1809
1809 if len(files) > 0:
1810 if len(files) > 0:
1810 addremove(ui, repo, *files)
1811 addremove(ui, repo, *files)
1811 repo.commit(files, message, user)
1812 repo.commit(files, message, user)
1812
1813
1813 def incoming(ui, repo, source="default", **opts):
1814 def incoming(ui, repo, source="default", **opts):
1814 """show new changesets found in source
1815 """show new changesets found in source
1815
1816
1816 Show new changesets found in the specified path/URL or the default
1817 Show new changesets found in the specified path/URL or the default
1817 pull location. These are the changesets that would be pulled if a pull
1818 pull location. These are the changesets that would be pulled if a pull
1818 was requested.
1819 was requested.
1819
1820
1820 For remote repository, using --bundle avoids downloading the changesets
1821 For remote repository, using --bundle avoids downloading the changesets
1821 twice if the incoming is followed by a pull.
1822 twice if the incoming is followed by a pull.
1822
1823
1823 See pull for valid source format details.
1824 See pull for valid source format details.
1824 """
1825 """
1825 source = ui.expandpath(source)
1826 source = ui.expandpath(source)
1826 if opts['ssh']:
1827 if opts['ssh']:
1827 ui.setconfig("ui", "ssh", opts['ssh'])
1828 ui.setconfig("ui", "ssh", opts['ssh'])
1828 if opts['remotecmd']:
1829 if opts['remotecmd']:
1829 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1830 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1830
1831
1831 other = hg.repository(ui, source)
1832 other = hg.repository(ui, source)
1832 incoming = repo.findincoming(other, force=opts["force"])
1833 incoming = repo.findincoming(other, force=opts["force"])
1833 if not incoming:
1834 if not incoming:
1834 ui.status(_("no changes found\n"))
1835 ui.status(_("no changes found\n"))
1835 return
1836 return
1836
1837
1837 cleanup = None
1838 cleanup = None
1838 try:
1839 try:
1839 fname = opts["bundle"]
1840 fname = opts["bundle"]
1840 if fname or not other.local():
1841 if fname or not other.local():
1841 # create a bundle (uncompressed if other repo is not local)
1842 # create a bundle (uncompressed if other repo is not local)
1842 cg = other.changegroup(incoming, "incoming")
1843 cg = other.changegroup(incoming, "incoming")
1843 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1844 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1844 # keep written bundle?
1845 # keep written bundle?
1845 if opts["bundle"]:
1846 if opts["bundle"]:
1846 cleanup = None
1847 cleanup = None
1847 if not other.local():
1848 if not other.local():
1848 # use the created uncompressed bundlerepo
1849 # use the created uncompressed bundlerepo
1849 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1850 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1850
1851
1851 o = other.changelog.nodesbetween(incoming)[0]
1852 o = other.changelog.nodesbetween(incoming)[0]
1852 if opts['newest_first']:
1853 if opts['newest_first']:
1853 o.reverse()
1854 o.reverse()
1854 displayer = show_changeset(ui, other, opts)
1855 displayer = show_changeset(ui, other, opts)
1855 for n in o:
1856 for n in o:
1856 parents = [p for p in other.changelog.parents(n) if p != nullid]
1857 parents = [p for p in other.changelog.parents(n) if p != nullid]
1857 if opts['no_merges'] and len(parents) == 2:
1858 if opts['no_merges'] and len(parents) == 2:
1858 continue
1859 continue
1859 displayer.show(changenode=n)
1860 displayer.show(changenode=n)
1860 if opts['patch']:
1861 if opts['patch']:
1861 prev = (parents and parents[0]) or nullid
1862 prev = (parents and parents[0]) or nullid
1862 dodiff(ui, ui, other, prev, n)
1863 dodiff(ui, ui, other, prev, n)
1863 ui.write("\n")
1864 ui.write("\n")
1864 finally:
1865 finally:
1865 if hasattr(other, 'close'):
1866 if hasattr(other, 'close'):
1866 other.close()
1867 other.close()
1867 if cleanup:
1868 if cleanup:
1868 os.unlink(cleanup)
1869 os.unlink(cleanup)
1869
1870
1870 def init(ui, dest="."):
1871 def init(ui, dest="."):
1871 """create a new repository in the given directory
1872 """create a new repository in the given directory
1872
1873
1873 Initialize a new repository in the given directory. If the given
1874 Initialize a new repository in the given directory. If the given
1874 directory does not exist, it is created.
1875 directory does not exist, it is created.
1875
1876
1876 If no directory is given, the current directory is used.
1877 If no directory is given, the current directory is used.
1877 """
1878 """
1878 if not os.path.exists(dest):
1879 if not os.path.exists(dest):
1879 os.mkdir(dest)
1880 os.mkdir(dest)
1880 hg.repository(ui, dest, create=1)
1881 hg.repository(ui, dest, create=1)
1881
1882
1882 def locate(ui, repo, *pats, **opts):
1883 def locate(ui, repo, *pats, **opts):
1883 """locate files matching specific patterns
1884 """locate files matching specific patterns
1884
1885
1885 Print all files under Mercurial control whose names match the
1886 Print all files under Mercurial control whose names match the
1886 given patterns.
1887 given patterns.
1887
1888
1888 This command searches the current directory and its
1889 This command searches the current directory and its
1889 subdirectories. To search an entire repository, move to the root
1890 subdirectories. To search an entire repository, move to the root
1890 of the repository.
1891 of the repository.
1891
1892
1892 If no patterns are given to match, this command prints all file
1893 If no patterns are given to match, this command prints all file
1893 names.
1894 names.
1894
1895
1895 If you want to feed the output of this command into the "xargs"
1896 If you want to feed the output of this command into the "xargs"
1896 command, use the "-0" option to both this command and "xargs".
1897 command, use the "-0" option to both this command and "xargs".
1897 This will avoid the problem of "xargs" treating single filenames
1898 This will avoid the problem of "xargs" treating single filenames
1898 that contain white space as multiple filenames.
1899 that contain white space as multiple filenames.
1899 """
1900 """
1900 end = opts['print0'] and '\0' or '\n'
1901 end = opts['print0'] and '\0' or '\n'
1901 rev = opts['rev']
1902 rev = opts['rev']
1902 if rev:
1903 if rev:
1903 node = repo.lookup(rev)
1904 node = repo.lookup(rev)
1904 else:
1905 else:
1905 node = None
1906 node = None
1906
1907
1907 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1908 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1908 head='(?:.*/|)'):
1909 head='(?:.*/|)'):
1909 if not node and repo.dirstate.state(abs) == '?':
1910 if not node and repo.dirstate.state(abs) == '?':
1910 continue
1911 continue
1911 if opts['fullpath']:
1912 if opts['fullpath']:
1912 ui.write(os.path.join(repo.root, abs), end)
1913 ui.write(os.path.join(repo.root, abs), end)
1913 else:
1914 else:
1914 ui.write(((pats and rel) or abs), end)
1915 ui.write(((pats and rel) or abs), end)
1915
1916
1916 def log(ui, repo, *pats, **opts):
1917 def log(ui, repo, *pats, **opts):
1917 """show revision history of entire repository or files
1918 """show revision history of entire repository or files
1918
1919
1919 Print the revision history of the specified files or the entire project.
1920 Print the revision history of the specified files or the entire project.
1920
1921
1921 By default this command outputs: changeset id and hash, tags,
1922 By default this command outputs: changeset id and hash, tags,
1922 non-trivial parents, user, date and time, and a summary for each
1923 non-trivial parents, user, date and time, and a summary for each
1923 commit. When the -v/--verbose switch is used, the list of changed
1924 commit. When the -v/--verbose switch is used, the list of changed
1924 files and full commit message is shown.
1925 files and full commit message is shown.
1925 """
1926 """
1926 class dui(object):
1927 class dui(object):
1927 # Implement and delegate some ui protocol. Save hunks of
1928 # Implement and delegate some ui protocol. Save hunks of
1928 # output for later display in the desired order.
1929 # output for later display in the desired order.
1929 def __init__(self, ui):
1930 def __init__(self, ui):
1930 self.ui = ui
1931 self.ui = ui
1931 self.hunk = {}
1932 self.hunk = {}
1932 self.header = {}
1933 self.header = {}
1933 def bump(self, rev):
1934 def bump(self, rev):
1934 self.rev = rev
1935 self.rev = rev
1935 self.hunk[rev] = []
1936 self.hunk[rev] = []
1936 self.header[rev] = []
1937 self.header[rev] = []
1937 def note(self, *args):
1938 def note(self, *args):
1938 if self.verbose:
1939 if self.verbose:
1939 self.write(*args)
1940 self.write(*args)
1940 def status(self, *args):
1941 def status(self, *args):
1941 if not self.quiet:
1942 if not self.quiet:
1942 self.write(*args)
1943 self.write(*args)
1943 def write(self, *args):
1944 def write(self, *args):
1944 self.hunk[self.rev].append(args)
1945 self.hunk[self.rev].append(args)
1945 def write_header(self, *args):
1946 def write_header(self, *args):
1946 self.header[self.rev].append(args)
1947 self.header[self.rev].append(args)
1947 def debug(self, *args):
1948 def debug(self, *args):
1948 if self.debugflag:
1949 if self.debugflag:
1949 self.write(*args)
1950 self.write(*args)
1950 def __getattr__(self, key):
1951 def __getattr__(self, key):
1951 return getattr(self.ui, key)
1952 return getattr(self.ui, key)
1952
1953
1953 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1954 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1954
1955
1955 if opts['limit']:
1956 if opts['limit']:
1956 try:
1957 try:
1957 limit = int(opts['limit'])
1958 limit = int(opts['limit'])
1958 except ValueError:
1959 except ValueError:
1959 raise util.Abort(_('limit must be a positive integer'))
1960 raise util.Abort(_('limit must be a positive integer'))
1960 if limit <= 0: raise util.Abort(_('limit must be positive'))
1961 if limit <= 0: raise util.Abort(_('limit must be positive'))
1961 else:
1962 else:
1962 limit = sys.maxint
1963 limit = sys.maxint
1963 count = 0
1964 count = 0
1964
1965
1965 displayer = show_changeset(ui, repo, opts)
1966 displayer = show_changeset(ui, repo, opts)
1966 for st, rev, fns in changeiter:
1967 for st, rev, fns in changeiter:
1967 if st == 'window':
1968 if st == 'window':
1968 du = dui(ui)
1969 du = dui(ui)
1969 displayer.ui = du
1970 displayer.ui = du
1970 elif st == 'add':
1971 elif st == 'add':
1971 du.bump(rev)
1972 du.bump(rev)
1972 changenode = repo.changelog.node(rev)
1973 changenode = repo.changelog.node(rev)
1973 parents = [p for p in repo.changelog.parents(changenode)
1974 parents = [p for p in repo.changelog.parents(changenode)
1974 if p != nullid]
1975 if p != nullid]
1975 if opts['no_merges'] and len(parents) == 2:
1976 if opts['no_merges'] and len(parents) == 2:
1976 continue
1977 continue
1977 if opts['only_merges'] and len(parents) != 2:
1978 if opts['only_merges'] and len(parents) != 2:
1978 continue
1979 continue
1979
1980
1980 if opts['keyword']:
1981 if opts['keyword']:
1981 changes = getchange(rev)
1982 changes = getchange(rev)
1982 miss = 0
1983 miss = 0
1983 for k in [kw.lower() for kw in opts['keyword']]:
1984 for k in [kw.lower() for kw in opts['keyword']]:
1984 if not (k in changes[1].lower() or
1985 if not (k in changes[1].lower() or
1985 k in changes[4].lower() or
1986 k in changes[4].lower() or
1986 k in " ".join(changes[3][:20]).lower()):
1987 k in " ".join(changes[3][:20]).lower()):
1987 miss = 1
1988 miss = 1
1988 break
1989 break
1989 if miss:
1990 if miss:
1990 continue
1991 continue
1991
1992
1992 br = None
1993 br = None
1993 if opts['branches']:
1994 if opts['branches']:
1994 br = repo.branchlookup([repo.changelog.node(rev)])
1995 br = repo.branchlookup([repo.changelog.node(rev)])
1995
1996
1996 displayer.show(rev, brinfo=br)
1997 displayer.show(rev, brinfo=br)
1997 if opts['patch']:
1998 if opts['patch']:
1998 prev = (parents and parents[0]) or nullid
1999 prev = (parents and parents[0]) or nullid
1999 dodiff(du, du, repo, prev, changenode, match=matchfn)
2000 dodiff(du, du, repo, prev, changenode, match=matchfn)
2000 du.write("\n\n")
2001 du.write("\n\n")
2001 elif st == 'iter':
2002 elif st == 'iter':
2002 if count == limit: break
2003 if count == limit: break
2003 if du.header[rev]:
2004 if du.header[rev]:
2004 for args in du.header[rev]:
2005 for args in du.header[rev]:
2005 ui.write_header(*args)
2006 ui.write_header(*args)
2006 if du.hunk[rev]:
2007 if du.hunk[rev]:
2007 count += 1
2008 count += 1
2008 for args in du.hunk[rev]:
2009 for args in du.hunk[rev]:
2009 ui.write(*args)
2010 ui.write(*args)
2010
2011
2011 def manifest(ui, repo, rev=None):
2012 def manifest(ui, repo, rev=None):
2012 """output the latest or given revision of the project manifest
2013 """output the latest or given revision of the project manifest
2013
2014
2014 Print a list of version controlled files for the given revision.
2015 Print a list of version controlled files for the given revision.
2015
2016
2016 The manifest is the list of files being version controlled. If no revision
2017 The manifest is the list of files being version controlled. If no revision
2017 is given then the tip is used.
2018 is given then the tip is used.
2018 """
2019 """
2019 if rev:
2020 if rev:
2020 try:
2021 try:
2021 # assume all revision numbers are for changesets
2022 # assume all revision numbers are for changesets
2022 n = repo.lookup(rev)
2023 n = repo.lookup(rev)
2023 change = repo.changelog.read(n)
2024 change = repo.changelog.read(n)
2024 n = change[0]
2025 n = change[0]
2025 except hg.RepoError:
2026 except hg.RepoError:
2026 n = repo.manifest.lookup(rev)
2027 n = repo.manifest.lookup(rev)
2027 else:
2028 else:
2028 n = repo.manifest.tip()
2029 n = repo.manifest.tip()
2029 m = repo.manifest.read(n)
2030 m = repo.manifest.read(n)
2030 mf = repo.manifest.readflags(n)
2031 mf = repo.manifest.readflags(n)
2031 files = m.keys()
2032 files = m.keys()
2032 files.sort()
2033 files.sort()
2033
2034
2034 for f in files:
2035 for f in files:
2035 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2036 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2036
2037
2037 def merge(ui, repo, node=None, **opts):
2038 def merge(ui, repo, node=None, **opts):
2038 """Merge working directory with another revision
2039 """Merge working directory with another revision
2039
2040
2040 Merge the contents of the current working directory and the
2041 Merge the contents of the current working directory and the
2041 requested revision. Files that changed between either parent are
2042 requested revision. Files that changed between either parent are
2042 marked as changed for the next commit and a commit must be
2043 marked as changed for the next commit and a commit must be
2043 performed before any further updates are allowed.
2044 performed before any further updates are allowed.
2044 """
2045 """
2045 return update(ui, repo, node=node, merge=True, **opts)
2046 return update(ui, repo, node=node, merge=True, **opts)
2046
2047
2047 def outgoing(ui, repo, dest="default-push", **opts):
2048 def outgoing(ui, repo, dest="default-push", **opts):
2048 """show changesets not found in destination
2049 """show changesets not found in destination
2049
2050
2050 Show changesets not found in the specified destination repository or
2051 Show changesets not found in the specified destination repository or
2051 the default push location. These are the changesets that would be pushed
2052 the default push location. These are the changesets that would be pushed
2052 if a push was requested.
2053 if a push was requested.
2053
2054
2054 See pull for valid destination format details.
2055 See pull for valid destination format details.
2055 """
2056 """
2056 dest = ui.expandpath(dest)
2057 dest = ui.expandpath(dest)
2057 if opts['ssh']:
2058 if opts['ssh']:
2058 ui.setconfig("ui", "ssh", opts['ssh'])
2059 ui.setconfig("ui", "ssh", opts['ssh'])
2059 if opts['remotecmd']:
2060 if opts['remotecmd']:
2060 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2061 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2061
2062
2062 other = hg.repository(ui, dest)
2063 other = hg.repository(ui, dest)
2063 o = repo.findoutgoing(other, force=opts['force'])
2064 o = repo.findoutgoing(other, force=opts['force'])
2064 if not o:
2065 if not o:
2065 ui.status(_("no changes found\n"))
2066 ui.status(_("no changes found\n"))
2066 return
2067 return
2067 o = repo.changelog.nodesbetween(o)[0]
2068 o = repo.changelog.nodesbetween(o)[0]
2068 if opts['newest_first']:
2069 if opts['newest_first']:
2069 o.reverse()
2070 o.reverse()
2070 displayer = show_changeset(ui, repo, opts)
2071 displayer = show_changeset(ui, repo, opts)
2071 for n in o:
2072 for n in o:
2072 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2073 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2073 if opts['no_merges'] and len(parents) == 2:
2074 if opts['no_merges'] and len(parents) == 2:
2074 continue
2075 continue
2075 displayer.show(changenode=n)
2076 displayer.show(changenode=n)
2076 if opts['patch']:
2077 if opts['patch']:
2077 prev = (parents and parents[0]) or nullid
2078 prev = (parents and parents[0]) or nullid
2078 dodiff(ui, ui, repo, prev, n)
2079 dodiff(ui, ui, repo, prev, n)
2079 ui.write("\n")
2080 ui.write("\n")
2080
2081
2081 def parents(ui, repo, rev=None, branches=None, **opts):
2082 def parents(ui, repo, rev=None, branches=None, **opts):
2082 """show the parents of the working dir or revision
2083 """show the parents of the working dir or revision
2083
2084
2084 Print the working directory's parent revisions.
2085 Print the working directory's parent revisions.
2085 """
2086 """
2086 if rev:
2087 if rev:
2087 p = repo.changelog.parents(repo.lookup(rev))
2088 p = repo.changelog.parents(repo.lookup(rev))
2088 else:
2089 else:
2089 p = repo.dirstate.parents()
2090 p = repo.dirstate.parents()
2090
2091
2091 br = None
2092 br = None
2092 if branches is not None:
2093 if branches is not None:
2093 br = repo.branchlookup(p)
2094 br = repo.branchlookup(p)
2094 displayer = show_changeset(ui, repo, opts)
2095 displayer = show_changeset(ui, repo, opts)
2095 for n in p:
2096 for n in p:
2096 if n != nullid:
2097 if n != nullid:
2097 displayer.show(changenode=n, brinfo=br)
2098 displayer.show(changenode=n, brinfo=br)
2098
2099
2099 def paths(ui, repo, search=None):
2100 def paths(ui, repo, search=None):
2100 """show definition of symbolic path names
2101 """show definition of symbolic path names
2101
2102
2102 Show definition of symbolic path name NAME. If no name is given, show
2103 Show definition of symbolic path name NAME. If no name is given, show
2103 definition of available names.
2104 definition of available names.
2104
2105
2105 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2106 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2106 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2107 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2107 """
2108 """
2108 if search:
2109 if search:
2109 for name, path in ui.configitems("paths"):
2110 for name, path in ui.configitems("paths"):
2110 if name == search:
2111 if name == search:
2111 ui.write("%s\n" % path)
2112 ui.write("%s\n" % path)
2112 return
2113 return
2113 ui.warn(_("not found!\n"))
2114 ui.warn(_("not found!\n"))
2114 return 1
2115 return 1
2115 else:
2116 else:
2116 for name, path in ui.configitems("paths"):
2117 for name, path in ui.configitems("paths"):
2117 ui.write("%s = %s\n" % (name, path))
2118 ui.write("%s = %s\n" % (name, path))
2118
2119
2119 def postincoming(ui, repo, modheads, optupdate):
2120 def postincoming(ui, repo, modheads, optupdate):
2120 if modheads == 0:
2121 if modheads == 0:
2121 return
2122 return
2122 if optupdate:
2123 if optupdate:
2123 if modheads == 1:
2124 if modheads == 1:
2124 return update(ui, repo)
2125 return update(ui, repo)
2125 else:
2126 else:
2126 ui.status(_("not updating, since new heads added\n"))
2127 ui.status(_("not updating, since new heads added\n"))
2127 if modheads > 1:
2128 if modheads > 1:
2128 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2129 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2129 else:
2130 else:
2130 ui.status(_("(run 'hg update' to get a working copy)\n"))
2131 ui.status(_("(run 'hg update' to get a working copy)\n"))
2131
2132
2132 def pull(ui, repo, source="default", **opts):
2133 def pull(ui, repo, source="default", **opts):
2133 """pull changes from the specified source
2134 """pull changes from the specified source
2134
2135
2135 Pull changes from a remote repository to a local one.
2136 Pull changes from a remote repository to a local one.
2136
2137
2137 This finds all changes from the repository at the specified path
2138 This finds all changes from the repository at the specified path
2138 or URL and adds them to the local repository. By default, this
2139 or URL and adds them to the local repository. By default, this
2139 does not update the copy of the project in the working directory.
2140 does not update the copy of the project in the working directory.
2140
2141
2141 Valid URLs are of the form:
2142 Valid URLs are of the form:
2142
2143
2143 local/filesystem/path
2144 local/filesystem/path
2144 http://[user@]host[:port][/path]
2145 http://[user@]host[:port][/path]
2145 https://[user@]host[:port][/path]
2146 https://[user@]host[:port][/path]
2146 ssh://[user@]host[:port][/path]
2147 ssh://[user@]host[:port][/path]
2147
2148
2148 Some notes about using SSH with Mercurial:
2149 Some notes about using SSH with Mercurial:
2149 - SSH requires an accessible shell account on the destination machine
2150 - SSH requires an accessible shell account on the destination machine
2150 and a copy of hg in the remote path or specified with as remotecmd.
2151 and a copy of hg in the remote path or specified with as remotecmd.
2151 - /path is relative to the remote user's home directory by default.
2152 - /path is relative to the remote user's home directory by default.
2152 Use two slashes at the start of a path to specify an absolute path.
2153 Use two slashes at the start of a path to specify an absolute path.
2153 - Mercurial doesn't use its own compression via SSH; the right thing
2154 - Mercurial doesn't use its own compression via SSH; the right thing
2154 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2155 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2155 Host *.mylocalnetwork.example.com
2156 Host *.mylocalnetwork.example.com
2156 Compression off
2157 Compression off
2157 Host *
2158 Host *
2158 Compression on
2159 Compression on
2159 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2160 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2160 with the --ssh command line option.
2161 with the --ssh command line option.
2161 """
2162 """
2162 source = ui.expandpath(source)
2163 source = ui.expandpath(source)
2163 ui.status(_('pulling from %s\n') % (source))
2164 ui.status(_('pulling from %s\n') % (source))
2164
2165
2165 if opts['ssh']:
2166 if opts['ssh']:
2166 ui.setconfig("ui", "ssh", opts['ssh'])
2167 ui.setconfig("ui", "ssh", opts['ssh'])
2167 if opts['remotecmd']:
2168 if opts['remotecmd']:
2168 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2169 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2169
2170
2170 other = hg.repository(ui, source)
2171 other = hg.repository(ui, source)
2171 revs = None
2172 revs = None
2172 if opts['rev'] and not other.local():
2173 if opts['rev'] and not other.local():
2173 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2174 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2174 elif opts['rev']:
2175 elif opts['rev']:
2175 revs = [other.lookup(rev) for rev in opts['rev']]
2176 revs = [other.lookup(rev) for rev in opts['rev']]
2176 modheads = repo.pull(other, heads=revs, force=opts['force'])
2177 modheads = repo.pull(other, heads=revs, force=opts['force'])
2177 return postincoming(ui, repo, modheads, opts['update'])
2178 return postincoming(ui, repo, modheads, opts['update'])
2178
2179
2179 def push(ui, repo, dest="default-push", **opts):
2180 def push(ui, repo, dest="default-push", **opts):
2180 """push changes to the specified destination
2181 """push changes to the specified destination
2181
2182
2182 Push changes from the local repository to the given destination.
2183 Push changes from the local repository to the given destination.
2183
2184
2184 This is the symmetrical operation for pull. It helps to move
2185 This is the symmetrical operation for pull. It helps to move
2185 changes from the current repository to a different one. If the
2186 changes from the current repository to a different one. If the
2186 destination is local this is identical to a pull in that directory
2187 destination is local this is identical to a pull in that directory
2187 from the current one.
2188 from the current one.
2188
2189
2189 By default, push will refuse to run if it detects the result would
2190 By default, push will refuse to run if it detects the result would
2190 increase the number of remote heads. This generally indicates the
2191 increase the number of remote heads. This generally indicates the
2191 the client has forgotten to sync and merge before pushing.
2192 the client has forgotten to sync and merge before pushing.
2192
2193
2193 Valid URLs are of the form:
2194 Valid URLs are of the form:
2194
2195
2195 local/filesystem/path
2196 local/filesystem/path
2196 ssh://[user@]host[:port][/path]
2197 ssh://[user@]host[:port][/path]
2197
2198
2198 Look at the help text for the pull command for important details
2199 Look at the help text for the pull command for important details
2199 about ssh:// URLs.
2200 about ssh:// URLs.
2200 """
2201 """
2201 dest = ui.expandpath(dest)
2202 dest = ui.expandpath(dest)
2202 ui.status('pushing to %s\n' % (dest))
2203 ui.status('pushing to %s\n' % (dest))
2203
2204
2204 if opts['ssh']:
2205 if opts['ssh']:
2205 ui.setconfig("ui", "ssh", opts['ssh'])
2206 ui.setconfig("ui", "ssh", opts['ssh'])
2206 if opts['remotecmd']:
2207 if opts['remotecmd']:
2207 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2208 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2208
2209
2209 other = hg.repository(ui, dest)
2210 other = hg.repository(ui, dest)
2210 revs = None
2211 revs = None
2211 if opts['rev']:
2212 if opts['rev']:
2212 revs = [repo.lookup(rev) for rev in opts['rev']]
2213 revs = [repo.lookup(rev) for rev in opts['rev']]
2213 r = repo.push(other, opts['force'], revs=revs)
2214 r = repo.push(other, opts['force'], revs=revs)
2214 return r == 0
2215 return r == 0
2215
2216
2216 def rawcommit(ui, repo, *flist, **rc):
2217 def rawcommit(ui, repo, *flist, **rc):
2217 """raw commit interface (DEPRECATED)
2218 """raw commit interface (DEPRECATED)
2218
2219
2219 (DEPRECATED)
2220 (DEPRECATED)
2220 Lowlevel commit, for use in helper scripts.
2221 Lowlevel commit, for use in helper scripts.
2221
2222
2222 This command is not intended to be used by normal users, as it is
2223 This command is not intended to be used by normal users, as it is
2223 primarily useful for importing from other SCMs.
2224 primarily useful for importing from other SCMs.
2224
2225
2225 This command is now deprecated and will be removed in a future
2226 This command is now deprecated and will be removed in a future
2226 release, please use debugsetparents and commit instead.
2227 release, please use debugsetparents and commit instead.
2227 """
2228 """
2228
2229
2229 ui.warn(_("(the rawcommit command is deprecated)\n"))
2230 ui.warn(_("(the rawcommit command is deprecated)\n"))
2230
2231
2231 message = rc['message']
2232 message = rc['message']
2232 if not message and rc['logfile']:
2233 if not message and rc['logfile']:
2233 try:
2234 try:
2234 message = open(rc['logfile']).read()
2235 message = open(rc['logfile']).read()
2235 except IOError:
2236 except IOError:
2236 pass
2237 pass
2237 if not message and not rc['logfile']:
2238 if not message and not rc['logfile']:
2238 raise util.Abort(_("missing commit message"))
2239 raise util.Abort(_("missing commit message"))
2239
2240
2240 files = relpath(repo, list(flist))
2241 files = relpath(repo, list(flist))
2241 if rc['files']:
2242 if rc['files']:
2242 files += open(rc['files']).read().splitlines()
2243 files += open(rc['files']).read().splitlines()
2243
2244
2244 rc['parent'] = map(repo.lookup, rc['parent'])
2245 rc['parent'] = map(repo.lookup, rc['parent'])
2245
2246
2246 try:
2247 try:
2247 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2248 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2248 except ValueError, inst:
2249 except ValueError, inst:
2249 raise util.Abort(str(inst))
2250 raise util.Abort(str(inst))
2250
2251
2251 def recover(ui, repo):
2252 def recover(ui, repo):
2252 """roll back an interrupted transaction
2253 """roll back an interrupted transaction
2253
2254
2254 Recover from an interrupted commit or pull.
2255 Recover from an interrupted commit or pull.
2255
2256
2256 This command tries to fix the repository status after an interrupted
2257 This command tries to fix the repository status after an interrupted
2257 operation. It should only be necessary when Mercurial suggests it.
2258 operation. It should only be necessary when Mercurial suggests it.
2258 """
2259 """
2259 if repo.recover():
2260 if repo.recover():
2260 return repo.verify()
2261 return repo.verify()
2261 return False
2262 return False
2262
2263
2263 def remove(ui, repo, pat, *pats, **opts):
2264 def remove(ui, repo, pat, *pats, **opts):
2264 """remove the specified files on the next commit
2265 """remove the specified files on the next commit
2265
2266
2266 Schedule the indicated files for removal from the repository.
2267 Schedule the indicated files for removal from the repository.
2267
2268
2268 This command schedules the files to be removed at the next commit.
2269 This command schedules the files to be removed at the next commit.
2269 This only removes files from the current branch, not from the
2270 This only removes files from the current branch, not from the
2270 entire project history. If the files still exist in the working
2271 entire project history. If the files still exist in the working
2271 directory, they will be deleted from it.
2272 directory, they will be deleted from it.
2272 """
2273 """
2273 names = []
2274 names = []
2274 def okaytoremove(abs, rel, exact):
2275 def okaytoremove(abs, rel, exact):
2275 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
2276 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
2276 reason = None
2277 reason = None
2277 if modified and not opts['force']:
2278 if modified and not opts['force']:
2278 reason = _('is modified')
2279 reason = _('is modified')
2279 elif added:
2280 elif added:
2280 reason = _('has been marked for add')
2281 reason = _('has been marked for add')
2281 elif unknown:
2282 elif unknown:
2282 reason = _('is not managed')
2283 reason = _('is not managed')
2283 if reason:
2284 if reason:
2284 if exact:
2285 if exact:
2285 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2286 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2286 else:
2287 else:
2287 return True
2288 return True
2288 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
2289 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
2289 if okaytoremove(abs, rel, exact):
2290 if okaytoremove(abs, rel, exact):
2290 if ui.verbose or not exact:
2291 if ui.verbose or not exact:
2291 ui.status(_('removing %s\n') % rel)
2292 ui.status(_('removing %s\n') % rel)
2292 names.append(abs)
2293 names.append(abs)
2293 repo.remove(names, unlink=True)
2294 repo.remove(names, unlink=True)
2294
2295
2295 def rename(ui, repo, *pats, **opts):
2296 def rename(ui, repo, *pats, **opts):
2296 """rename files; equivalent of copy + remove
2297 """rename files; equivalent of copy + remove
2297
2298
2298 Mark dest as copies of sources; mark sources for deletion. If
2299 Mark dest as copies of sources; mark sources for deletion. If
2299 dest is a directory, copies are put in that directory. If dest is
2300 dest is a directory, copies are put in that directory. If dest is
2300 a file, there can only be one source.
2301 a file, there can only be one source.
2301
2302
2302 By default, this command copies the contents of files as they
2303 By default, this command copies the contents of files as they
2303 stand in the working directory. If invoked with --after, the
2304 stand in the working directory. If invoked with --after, the
2304 operation is recorded, but no copying is performed.
2305 operation is recorded, but no copying is performed.
2305
2306
2306 This command takes effect in the next commit.
2307 This command takes effect in the next commit.
2307
2308
2308 NOTE: This command should be treated as experimental. While it
2309 NOTE: This command should be treated as experimental. While it
2309 should properly record rename files, this information is not yet
2310 should properly record rename files, this information is not yet
2310 fully used by merge, nor fully reported by log.
2311 fully used by merge, nor fully reported by log.
2311 """
2312 """
2312 wlock = repo.wlock(0)
2313 wlock = repo.wlock(0)
2313 errs, copied = docopy(ui, repo, pats, opts, wlock)
2314 errs, copied = docopy(ui, repo, pats, opts, wlock)
2314 names = []
2315 names = []
2315 for abs, rel, exact in copied:
2316 for abs, rel, exact in copied:
2316 if ui.verbose or not exact:
2317 if ui.verbose or not exact:
2317 ui.status(_('removing %s\n') % rel)
2318 ui.status(_('removing %s\n') % rel)
2318 names.append(abs)
2319 names.append(abs)
2319 repo.remove(names, True, wlock)
2320 repo.remove(names, True, wlock)
2320 return errs
2321 return errs
2321
2322
2322 def revert(ui, repo, *pats, **opts):
2323 def revert(ui, repo, *pats, **opts):
2323 """revert modified files or dirs back to their unmodified states
2324 """revert modified files or dirs back to their unmodified states
2324
2325
2325 In its default mode, it reverts any uncommitted modifications made
2326 In its default mode, it reverts any uncommitted modifications made
2326 to the named files or directories. This restores the contents of
2327 to the named files or directories. This restores the contents of
2327 the affected files to an unmodified state.
2328 the affected files to an unmodified state.
2328
2329
2329 Modified files are saved with a .orig suffix before reverting.
2330 Modified files are saved with a .orig suffix before reverting.
2330 To disable these backups, use --no-backup.
2331 To disable these backups, use --no-backup.
2331
2332
2332 Using the -r option, it reverts the given files or directories to
2333 Using the -r option, it reverts the given files or directories to
2333 their state as of an earlier revision. This can be helpful to "roll
2334 their state as of an earlier revision. This can be helpful to "roll
2334 back" some or all of a change that should not have been committed.
2335 back" some or all of a change that should not have been committed.
2335
2336
2336 Revert modifies the working directory. It does not commit any
2337 Revert modifies the working directory. It does not commit any
2337 changes, or change the parent of the current working directory.
2338 changes, or change the parent of the current working directory.
2338
2339
2339 If a file has been deleted, it is recreated. If the executable
2340 If a file has been deleted, it is recreated. If the executable
2340 mode of a file was changed, it is reset.
2341 mode of a file was changed, it is reset.
2341
2342
2342 If names are given, all files matching the names are reverted.
2343 If names are given, all files matching the names are reverted.
2343
2344
2344 If no arguments are given, all files in the repository are reverted.
2345 If no arguments are given, all files in the repository are reverted.
2345 """
2346 """
2346 parent = repo.dirstate.parents()[0]
2347 parent = repo.dirstate.parents()[0]
2347 node = opts['rev'] and repo.lookup(opts['rev']) or parent
2348 node = opts['rev'] and repo.lookup(opts['rev']) or parent
2348 mf = repo.manifest.read(repo.changelog.read(node)[0])
2349 mf = repo.manifest.read(repo.changelog.read(node)[0])
2349
2350
2350 wlock = repo.wlock()
2351 wlock = repo.wlock()
2351
2352
2352 # need all matching names in dirstate and manifest of target rev,
2353 # need all matching names in dirstate and manifest of target rev,
2353 # so have to walk both. do not print errors if files exist in one
2354 # so have to walk both. do not print errors if files exist in one
2354 # but not other.
2355 # but not other.
2355
2356
2356 names = {}
2357 names = {}
2357 target_only = {}
2358 target_only = {}
2358
2359
2359 # walk dirstate.
2360 # walk dirstate.
2360
2361
2361 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2362 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2362 names[abs] = (rel, exact)
2363 names[abs] = (rel, exact)
2363 if src == 'b':
2364 if src == 'b':
2364 target_only[abs] = True
2365 target_only[abs] = True
2365
2366
2366 # walk target manifest.
2367 # walk target manifest.
2367
2368
2368 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2369 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2369 badmatch=names.has_key):
2370 badmatch=names.has_key):
2370 if abs in names: continue
2371 if abs in names: continue
2371 names[abs] = (rel, exact)
2372 names[abs] = (rel, exact)
2372 target_only[abs] = True
2373 target_only[abs] = True
2373
2374
2374 changes = repo.changes(match=names.has_key, wlock=wlock)
2375 changes = repo.changes(match=names.has_key, wlock=wlock)
2375 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2376 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2376
2377
2377 revert = ([], _('reverting %s\n'))
2378 revert = ([], _('reverting %s\n'))
2378 add = ([], _('adding %s\n'))
2379 add = ([], _('adding %s\n'))
2379 remove = ([], _('removing %s\n'))
2380 remove = ([], _('removing %s\n'))
2380 forget = ([], _('forgetting %s\n'))
2381 forget = ([], _('forgetting %s\n'))
2381 undelete = ([], _('undeleting %s\n'))
2382 undelete = ([], _('undeleting %s\n'))
2382 update = {}
2383 update = {}
2383
2384
2384 disptable = (
2385 disptable = (
2385 # dispatch table:
2386 # dispatch table:
2386 # file state
2387 # file state
2387 # action if in target manifest
2388 # action if in target manifest
2388 # action if not in target manifest
2389 # action if not in target manifest
2389 # make backup if in target manifest
2390 # make backup if in target manifest
2390 # make backup if not in target manifest
2391 # make backup if not in target manifest
2391 (modified, revert, remove, True, True),
2392 (modified, revert, remove, True, True),
2392 (added, revert, forget, True, True),
2393 (added, revert, forget, True, True),
2393 (removed, undelete, None, False, False),
2394 (removed, undelete, None, False, False),
2394 (deleted, revert, remove, False, False),
2395 (deleted, revert, remove, False, False),
2395 (unknown, add, None, True, False),
2396 (unknown, add, None, True, False),
2396 (target_only, add, None, False, False),
2397 (target_only, add, None, False, False),
2397 )
2398 )
2398
2399
2399 entries = names.items()
2400 entries = names.items()
2400 entries.sort()
2401 entries.sort()
2401
2402
2402 for abs, (rel, exact) in entries:
2403 for abs, (rel, exact) in entries:
2403 in_mf = abs in mf
2404 in_mf = abs in mf
2404 def handle(xlist, dobackup):
2405 def handle(xlist, dobackup):
2405 xlist[0].append(abs)
2406 xlist[0].append(abs)
2406 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2407 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2407 bakname = "%s.orig" % rel
2408 bakname = "%s.orig" % rel
2408 ui.note(_('saving current version of %s as %s\n') %
2409 ui.note(_('saving current version of %s as %s\n') %
2409 (rel, bakname))
2410 (rel, bakname))
2410 shutil.copyfile(rel, bakname)
2411 shutil.copyfile(rel, bakname)
2411 shutil.copymode(rel, bakname)
2412 shutil.copymode(rel, bakname)
2412 if ui.verbose or not exact:
2413 if ui.verbose or not exact:
2413 ui.status(xlist[1] % rel)
2414 ui.status(xlist[1] % rel)
2414 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2415 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2415 if abs not in table: continue
2416 if abs not in table: continue
2416 # file has changed in dirstate
2417 # file has changed in dirstate
2417 if in_mf:
2418 if in_mf:
2418 handle(hitlist, backuphit)
2419 handle(hitlist, backuphit)
2419 elif misslist is not None:
2420 elif misslist is not None:
2420 handle(misslist, backupmiss)
2421 handle(misslist, backupmiss)
2421 else:
2422 else:
2422 if exact: ui.warn(_('file not managed: %s\n' % rel))
2423 if exact: ui.warn(_('file not managed: %s\n' % rel))
2423 break
2424 break
2424 else:
2425 else:
2425 # file has not changed in dirstate
2426 # file has not changed in dirstate
2426 if node == parent:
2427 if node == parent:
2427 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2428 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2428 continue
2429 continue
2429 if not in_mf:
2430 if not in_mf:
2430 handle(remove, False)
2431 handle(remove, False)
2431 update[abs] = True
2432 update[abs] = True
2432
2433
2433 repo.dirstate.forget(forget[0])
2434 repo.dirstate.forget(forget[0])
2434 r = repo.update(node, False, True, update.has_key, False, wlock=wlock)
2435 r = repo.update(node, False, True, update.has_key, False, wlock=wlock)
2435 repo.dirstate.update(add[0], 'a')
2436 repo.dirstate.update(add[0], 'a')
2436 repo.dirstate.update(undelete[0], 'n')
2437 repo.dirstate.update(undelete[0], 'n')
2437 repo.dirstate.update(remove[0], 'r')
2438 repo.dirstate.update(remove[0], 'r')
2438 return r
2439 return r
2439
2440
2440 def root(ui, repo):
2441 def root(ui, repo):
2441 """print the root (top) of the current working dir
2442 """print the root (top) of the current working dir
2442
2443
2443 Print the root directory of the current repository.
2444 Print the root directory of the current repository.
2444 """
2445 """
2445 ui.write(repo.root + "\n")
2446 ui.write(repo.root + "\n")
2446
2447
2447 def serve(ui, repo, **opts):
2448 def serve(ui, repo, **opts):
2448 """export the repository via HTTP
2449 """export the repository via HTTP
2449
2450
2450 Start a local HTTP repository browser and pull server.
2451 Start a local HTTP repository browser and pull server.
2451
2452
2452 By default, the server logs accesses to stdout and errors to
2453 By default, the server logs accesses to stdout and errors to
2453 stderr. Use the "-A" and "-E" options to log to files.
2454 stderr. Use the "-A" and "-E" options to log to files.
2454 """
2455 """
2455
2456
2456 if opts["stdio"]:
2457 if opts["stdio"]:
2457 fin, fout = sys.stdin, sys.stdout
2458 fin, fout = sys.stdin, sys.stdout
2458 sys.stdout = sys.stderr
2459 sys.stdout = sys.stderr
2459
2460
2460 # Prevent insertion/deletion of CRs
2461 # Prevent insertion/deletion of CRs
2461 util.set_binary(fin)
2462 util.set_binary(fin)
2462 util.set_binary(fout)
2463 util.set_binary(fout)
2463
2464
2464 def getarg():
2465 def getarg():
2465 argline = fin.readline()[:-1]
2466 argline = fin.readline()[:-1]
2466 arg, l = argline.split()
2467 arg, l = argline.split()
2467 val = fin.read(int(l))
2468 val = fin.read(int(l))
2468 return arg, val
2469 return arg, val
2469 def respond(v):
2470 def respond(v):
2470 fout.write("%d\n" % len(v))
2471 fout.write("%d\n" % len(v))
2471 fout.write(v)
2472 fout.write(v)
2472 fout.flush()
2473 fout.flush()
2473
2474
2474 lock = None
2475 lock = None
2475
2476
2476 while 1:
2477 while 1:
2477 cmd = fin.readline()[:-1]
2478 cmd = fin.readline()[:-1]
2478 if cmd == '':
2479 if cmd == '':
2479 return
2480 return
2480 if cmd == "heads":
2481 if cmd == "heads":
2481 h = repo.heads()
2482 h = repo.heads()
2482 respond(" ".join(map(hex, h)) + "\n")
2483 respond(" ".join(map(hex, h)) + "\n")
2483 if cmd == "lock":
2484 if cmd == "lock":
2484 lock = repo.lock()
2485 lock = repo.lock()
2485 respond("")
2486 respond("")
2486 if cmd == "unlock":
2487 if cmd == "unlock":
2487 if lock:
2488 if lock:
2488 lock.release()
2489 lock.release()
2489 lock = None
2490 lock = None
2490 respond("")
2491 respond("")
2491 elif cmd == "branches":
2492 elif cmd == "branches":
2492 arg, nodes = getarg()
2493 arg, nodes = getarg()
2493 nodes = map(bin, nodes.split(" "))
2494 nodes = map(bin, nodes.split(" "))
2494 r = []
2495 r = []
2495 for b in repo.branches(nodes):
2496 for b in repo.branches(nodes):
2496 r.append(" ".join(map(hex, b)) + "\n")
2497 r.append(" ".join(map(hex, b)) + "\n")
2497 respond("".join(r))
2498 respond("".join(r))
2498 elif cmd == "between":
2499 elif cmd == "between":
2499 arg, pairs = getarg()
2500 arg, pairs = getarg()
2500 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2501 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2501 r = []
2502 r = []
2502 for b in repo.between(pairs):
2503 for b in repo.between(pairs):
2503 r.append(" ".join(map(hex, b)) + "\n")
2504 r.append(" ".join(map(hex, b)) + "\n")
2504 respond("".join(r))
2505 respond("".join(r))
2505 elif cmd == "changegroup":
2506 elif cmd == "changegroup":
2506 nodes = []
2507 nodes = []
2507 arg, roots = getarg()
2508 arg, roots = getarg()
2508 nodes = map(bin, roots.split(" "))
2509 nodes = map(bin, roots.split(" "))
2509
2510
2510 cg = repo.changegroup(nodes, 'serve')
2511 cg = repo.changegroup(nodes, 'serve')
2511 while 1:
2512 while 1:
2512 d = cg.read(4096)
2513 d = cg.read(4096)
2513 if not d:
2514 if not d:
2514 break
2515 break
2515 fout.write(d)
2516 fout.write(d)
2516
2517
2517 fout.flush()
2518 fout.flush()
2518
2519
2519 elif cmd == "addchangegroup":
2520 elif cmd == "addchangegroup":
2520 if not lock:
2521 if not lock:
2521 respond("not locked")
2522 respond("not locked")
2522 continue
2523 continue
2523 respond("")
2524 respond("")
2524
2525
2525 r = repo.addchangegroup(fin)
2526 r = repo.addchangegroup(fin)
2526 respond(str(r))
2527 respond(str(r))
2527
2528
2528 optlist = "name templates style address port ipv6 accesslog errorlog"
2529 optlist = "name templates style address port ipv6 accesslog errorlog"
2529 for o in optlist.split():
2530 for o in optlist.split():
2530 if opts[o]:
2531 if opts[o]:
2531 ui.setconfig("web", o, opts[o])
2532 ui.setconfig("web", o, opts[o])
2532
2533
2533 if opts['daemon'] and not opts['daemon_pipefds']:
2534 if opts['daemon'] and not opts['daemon_pipefds']:
2534 rfd, wfd = os.pipe()
2535 rfd, wfd = os.pipe()
2535 args = sys.argv[:]
2536 args = sys.argv[:]
2536 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2537 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2537 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2538 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2538 args[0], args)
2539 args[0], args)
2539 os.close(wfd)
2540 os.close(wfd)
2540 os.read(rfd, 1)
2541 os.read(rfd, 1)
2541 os._exit(0)
2542 os._exit(0)
2542
2543
2543 try:
2544 try:
2544 httpd = hgweb.create_server(repo)
2545 httpd = hgweb.create_server(repo)
2545 except socket.error, inst:
2546 except socket.error, inst:
2546 raise util.Abort(_('cannot start server: ') + inst.args[1])
2547 raise util.Abort(_('cannot start server: ') + inst.args[1])
2547
2548
2548 if ui.verbose:
2549 if ui.verbose:
2549 addr, port = httpd.socket.getsockname()
2550 addr, port = httpd.socket.getsockname()
2550 if addr == '0.0.0.0':
2551 if addr == '0.0.0.0':
2551 addr = socket.gethostname()
2552 addr = socket.gethostname()
2552 else:
2553 else:
2553 try:
2554 try:
2554 addr = socket.gethostbyaddr(addr)[0]
2555 addr = socket.gethostbyaddr(addr)[0]
2555 except socket.error:
2556 except socket.error:
2556 pass
2557 pass
2557 if port != 80:
2558 if port != 80:
2558 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2559 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2559 else:
2560 else:
2560 ui.status(_('listening at http://%s/\n') % addr)
2561 ui.status(_('listening at http://%s/\n') % addr)
2561
2562
2562 if opts['pid_file']:
2563 if opts['pid_file']:
2563 fp = open(opts['pid_file'], 'w')
2564 fp = open(opts['pid_file'], 'w')
2564 fp.write(str(os.getpid()))
2565 fp.write(str(os.getpid()))
2565 fp.close()
2566 fp.close()
2566
2567
2567 if opts['daemon_pipefds']:
2568 if opts['daemon_pipefds']:
2568 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2569 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2569 os.close(rfd)
2570 os.close(rfd)
2570 os.write(wfd, 'y')
2571 os.write(wfd, 'y')
2571 os.close(wfd)
2572 os.close(wfd)
2572 sys.stdout.flush()
2573 sys.stdout.flush()
2573 sys.stderr.flush()
2574 sys.stderr.flush()
2574 fd = os.open(util.nulldev, os.O_RDWR)
2575 fd = os.open(util.nulldev, os.O_RDWR)
2575 if fd != 0: os.dup2(fd, 0)
2576 if fd != 0: os.dup2(fd, 0)
2576 if fd != 1: os.dup2(fd, 1)
2577 if fd != 1: os.dup2(fd, 1)
2577 if fd != 2: os.dup2(fd, 2)
2578 if fd != 2: os.dup2(fd, 2)
2578 if fd not in (0, 1, 2): os.close(fd)
2579 if fd not in (0, 1, 2): os.close(fd)
2579
2580
2580 httpd.serve_forever()
2581 httpd.serve_forever()
2581
2582
2582 def status(ui, repo, *pats, **opts):
2583 def status(ui, repo, *pats, **opts):
2583 """show changed files in the working directory
2584 """show changed files in the working directory
2584
2585
2585 Show changed files in the repository. If names are
2586 Show changed files in the repository. If names are
2586 given, only files that match are shown.
2587 given, only files that match are shown.
2587
2588
2588 The codes used to show the status of files are:
2589 The codes used to show the status of files are:
2589 M = modified
2590 M = modified
2590 A = added
2591 A = added
2591 R = removed
2592 R = removed
2592 ! = deleted, but still tracked
2593 ! = deleted, but still tracked
2593 ? = not tracked
2594 ? = not tracked
2594 I = ignored (not shown by default)
2595 I = ignored (not shown by default)
2595 """
2596 """
2596
2597
2597 show_ignored = opts['ignored'] and True or False
2598 show_ignored = opts['ignored'] and True or False
2598 files, matchfn, anypats = matchpats(repo, pats, opts)
2599 files, matchfn, anypats = matchpats(repo, pats, opts)
2599 cwd = (pats and repo.getcwd()) or ''
2600 cwd = (pats and repo.getcwd()) or ''
2600 modified, added, removed, deleted, unknown, ignored = [
2601 modified, added, removed, deleted, unknown, ignored = [
2601 [util.pathto(cwd, x) for x in n]
2602 [util.pathto(cwd, x) for x in n]
2602 for n in repo.changes(files=files, match=matchfn,
2603 for n in repo.changes(files=files, match=matchfn,
2603 show_ignored=show_ignored)]
2604 show_ignored=show_ignored)]
2604
2605
2605 changetypes = [('modified', 'M', modified),
2606 changetypes = [('modified', 'M', modified),
2606 ('added', 'A', added),
2607 ('added', 'A', added),
2607 ('removed', 'R', removed),
2608 ('removed', 'R', removed),
2608 ('deleted', '!', deleted),
2609 ('deleted', '!', deleted),
2609 ('unknown', '?', unknown),
2610 ('unknown', '?', unknown),
2610 ('ignored', 'I', ignored)]
2611 ('ignored', 'I', ignored)]
2611
2612
2612 end = opts['print0'] and '\0' or '\n'
2613 end = opts['print0'] and '\0' or '\n'
2613
2614
2614 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2615 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2615 or changetypes):
2616 or changetypes):
2616 if opts['no_status']:
2617 if opts['no_status']:
2617 format = "%%s%s" % end
2618 format = "%%s%s" % end
2618 else:
2619 else:
2619 format = "%s %%s%s" % (char, end)
2620 format = "%s %%s%s" % (char, end)
2620
2621
2621 for f in changes:
2622 for f in changes:
2622 ui.write(format % f)
2623 ui.write(format % f)
2623
2624
2624 def tag(ui, repo, name, rev_=None, **opts):
2625 def tag(ui, repo, name, rev_=None, **opts):
2625 """add a tag for the current tip or a given revision
2626 """add a tag for the current tip or a given revision
2626
2627
2627 Name a particular revision using <name>.
2628 Name a particular revision using <name>.
2628
2629
2629 Tags are used to name particular revisions of the repository and are
2630 Tags are used to name particular revisions of the repository and are
2630 very useful to compare different revision, to go back to significant
2631 very useful to compare different revision, to go back to significant
2631 earlier versions or to mark branch points as releases, etc.
2632 earlier versions or to mark branch points as releases, etc.
2632
2633
2633 If no revision is given, the tip is used.
2634 If no revision is given, the tip is used.
2634
2635
2635 To facilitate version control, distribution, and merging of tags,
2636 To facilitate version control, distribution, and merging of tags,
2636 they are stored as a file named ".hgtags" which is managed
2637 they are stored as a file named ".hgtags" which is managed
2637 similarly to other project files and can be hand-edited if
2638 similarly to other project files and can be hand-edited if
2638 necessary. The file '.hg/localtags' is used for local tags (not
2639 necessary. The file '.hg/localtags' is used for local tags (not
2639 shared among repositories).
2640 shared among repositories).
2640 """
2641 """
2641 if name == "tip":
2642 if name == "tip":
2642 raise util.Abort(_("the name 'tip' is reserved"))
2643 raise util.Abort(_("the name 'tip' is reserved"))
2643 if rev_ is not None:
2644 if rev_ is not None:
2644 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2645 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2645 "please use 'hg tag [-r REV] NAME' instead\n"))
2646 "please use 'hg tag [-r REV] NAME' instead\n"))
2646 if opts['rev']:
2647 if opts['rev']:
2647 raise util.Abort(_("use only one form to specify the revision"))
2648 raise util.Abort(_("use only one form to specify the revision"))
2648 if opts['rev']:
2649 if opts['rev']:
2649 rev_ = opts['rev']
2650 rev_ = opts['rev']
2650 if rev_:
2651 if rev_:
2651 r = hex(repo.lookup(rev_))
2652 r = hex(repo.lookup(rev_))
2652 else:
2653 else:
2653 r = hex(repo.changelog.tip())
2654 r = hex(repo.changelog.tip())
2654
2655
2655 disallowed = (revrangesep, '\r', '\n')
2656 disallowed = (revrangesep, '\r', '\n')
2656 for c in disallowed:
2657 for c in disallowed:
2657 if name.find(c) >= 0:
2658 if name.find(c) >= 0:
2658 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2659 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2659
2660
2660 repo.hook('pretag', throw=True, node=r, tag=name,
2661 repo.hook('pretag', throw=True, node=r, tag=name,
2661 local=int(not not opts['local']))
2662 local=int(not not opts['local']))
2662
2663
2663 if opts['local']:
2664 if opts['local']:
2664 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2665 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2665 repo.hook('tag', node=r, tag=name, local=1)
2666 repo.hook('tag', node=r, tag=name, local=1)
2666 return
2667 return
2667
2668
2668 for x in repo.changes():
2669 for x in repo.changes():
2669 if ".hgtags" in x:
2670 if ".hgtags" in x:
2670 raise util.Abort(_("working copy of .hgtags is changed "
2671 raise util.Abort(_("working copy of .hgtags is changed "
2671 "(please commit .hgtags manually)"))
2672 "(please commit .hgtags manually)"))
2672
2673
2673 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2674 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2674 if repo.dirstate.state(".hgtags") == '?':
2675 if repo.dirstate.state(".hgtags") == '?':
2675 repo.add([".hgtags"])
2676 repo.add([".hgtags"])
2676
2677
2677 message = (opts['message'] or
2678 message = (opts['message'] or
2678 _("Added tag %s for changeset %s") % (name, r))
2679 _("Added tag %s for changeset %s") % (name, r))
2679 try:
2680 try:
2680 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2681 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2681 repo.hook('tag', node=r, tag=name, local=0)
2682 repo.hook('tag', node=r, tag=name, local=0)
2682 except ValueError, inst:
2683 except ValueError, inst:
2683 raise util.Abort(str(inst))
2684 raise util.Abort(str(inst))
2684
2685
2685 def tags(ui, repo):
2686 def tags(ui, repo):
2686 """list repository tags
2687 """list repository tags
2687
2688
2688 List the repository tags.
2689 List the repository tags.
2689
2690
2690 This lists both regular and local tags.
2691 This lists both regular and local tags.
2691 """
2692 """
2692
2693
2693 l = repo.tagslist()
2694 l = repo.tagslist()
2694 l.reverse()
2695 l.reverse()
2695 for t, n in l:
2696 for t, n in l:
2696 try:
2697 try:
2697 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2698 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2698 except KeyError:
2699 except KeyError:
2699 r = " ?:?"
2700 r = " ?:?"
2700 if ui.quiet:
2701 if ui.quiet:
2701 ui.write("%s\n" % t)
2702 ui.write("%s\n" % t)
2702 else:
2703 else:
2703 ui.write("%-30s %s\n" % (t, r))
2704 ui.write("%-30s %s\n" % (t, r))
2704
2705
2705 def tip(ui, repo, **opts):
2706 def tip(ui, repo, **opts):
2706 """show the tip revision
2707 """show the tip revision
2707
2708
2708 Show the tip revision.
2709 Show the tip revision.
2709 """
2710 """
2710 n = repo.changelog.tip()
2711 n = repo.changelog.tip()
2711 br = None
2712 br = None
2712 if opts['branches']:
2713 if opts['branches']:
2713 br = repo.branchlookup([n])
2714 br = repo.branchlookup([n])
2714 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2715 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2715 if opts['patch']:
2716 if opts['patch']:
2716 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2717 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2717
2718
2718 def unbundle(ui, repo, fname, **opts):
2719 def unbundle(ui, repo, fname, **opts):
2719 """apply a changegroup file
2720 """apply a changegroup file
2720
2721
2721 Apply a compressed changegroup file generated by the bundle
2722 Apply a compressed changegroup file generated by the bundle
2722 command.
2723 command.
2723 """
2724 """
2724 f = urllib.urlopen(fname)
2725 f = urllib.urlopen(fname)
2725
2726
2726 header = f.read(6)
2727 header = f.read(6)
2727 if not header.startswith("HG"):
2728 if not header.startswith("HG"):
2728 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2729 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2729 elif not header.startswith("HG10"):
2730 elif not header.startswith("HG10"):
2730 raise util.Abort(_("%s: unknown bundle version") % fname)
2731 raise util.Abort(_("%s: unknown bundle version") % fname)
2731 elif header == "HG10BZ":
2732 elif header == "HG10BZ":
2732 def generator(f):
2733 def generator(f):
2733 zd = bz2.BZ2Decompressor()
2734 zd = bz2.BZ2Decompressor()
2734 zd.decompress("BZ")
2735 zd.decompress("BZ")
2735 for chunk in f:
2736 for chunk in f:
2736 yield zd.decompress(chunk)
2737 yield zd.decompress(chunk)
2737 elif header == "HG10UN":
2738 elif header == "HG10UN":
2738 def generator(f):
2739 def generator(f):
2739 for chunk in f:
2740 for chunk in f:
2740 yield chunk
2741 yield chunk
2741 else:
2742 else:
2742 raise util.Abort(_("%s: unknown bundle compression type")
2743 raise util.Abort(_("%s: unknown bundle compression type")
2743 % fname)
2744 % fname)
2744 gen = generator(util.filechunkiter(f, 4096))
2745 gen = generator(util.filechunkiter(f, 4096))
2745 modheads = repo.addchangegroup(util.chunkbuffer(gen))
2746 modheads = repo.addchangegroup(util.chunkbuffer(gen))
2746 return postincoming(ui, repo, modheads, opts['update'])
2747 return postincoming(ui, repo, modheads, opts['update'])
2747
2748
2748 def undo(ui, repo):
2749 def undo(ui, repo):
2749 """undo the last commit or pull
2750 """undo the last commit or pull
2750
2751
2751 Roll back the last pull or commit transaction on the
2752 Roll back the last pull or commit transaction on the
2752 repository, restoring the project to its earlier state.
2753 repository, restoring the project to its earlier state.
2753
2754
2754 This command should be used with care. There is only one level of
2755 This command should be used with care. There is only one level of
2755 undo and there is no redo.
2756 undo and there is no redo.
2756
2757
2757 This command is not intended for use on public repositories. Once
2758 This command is not intended for use on public repositories. Once
2758 a change is visible for pull by other users, undoing it locally is
2759 a change is visible for pull by other users, undoing it locally is
2759 ineffective.
2760 ineffective.
2760 """
2761 """
2761 repo.undo()
2762 repo.undo()
2762
2763
2763 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2764 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2764 branch=None, **opts):
2765 branch=None, **opts):
2765 """update or merge working directory
2766 """update or merge working directory
2766
2767
2767 Update the working directory to the specified revision.
2768 Update the working directory to the specified revision.
2768
2769
2769 If there are no outstanding changes in the working directory and
2770 If there are no outstanding changes in the working directory and
2770 there is a linear relationship between the current version and the
2771 there is a linear relationship between the current version and the
2771 requested version, the result is the requested version.
2772 requested version, the result is the requested version.
2772
2773
2773 Otherwise the result is a merge between the contents of the
2774 Otherwise the result is a merge between the contents of the
2774 current working directory and the requested version. Files that
2775 current working directory and the requested version. Files that
2775 changed between either parent are marked as changed for the next
2776 changed between either parent are marked as changed for the next
2776 commit and a commit must be performed before any further updates
2777 commit and a commit must be performed before any further updates
2777 are allowed.
2778 are allowed.
2778
2779
2779 By default, update will refuse to run if doing so would require
2780 By default, update will refuse to run if doing so would require
2780 merging or discarding local changes.
2781 merging or discarding local changes.
2781 """
2782 """
2782 if branch:
2783 if branch:
2783 br = repo.branchlookup(branch=branch)
2784 br = repo.branchlookup(branch=branch)
2784 found = []
2785 found = []
2785 for x in br:
2786 for x in br:
2786 if branch in br[x]:
2787 if branch in br[x]:
2787 found.append(x)
2788 found.append(x)
2788 if len(found) > 1:
2789 if len(found) > 1:
2789 ui.warn(_("Found multiple heads for %s\n") % branch)
2790 ui.warn(_("Found multiple heads for %s\n") % branch)
2790 for x in found:
2791 for x in found:
2791 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2792 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2792 return 1
2793 return 1
2793 if len(found) == 1:
2794 if len(found) == 1:
2794 node = found[0]
2795 node = found[0]
2795 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2796 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2796 else:
2797 else:
2797 ui.warn(_("branch %s not found\n") % (branch))
2798 ui.warn(_("branch %s not found\n") % (branch))
2798 return 1
2799 return 1
2799 else:
2800 else:
2800 node = node and repo.lookup(node) or repo.changelog.tip()
2801 node = node and repo.lookup(node) or repo.changelog.tip()
2801 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2802 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2802
2803
2803 def verify(ui, repo):
2804 def verify(ui, repo):
2804 """verify the integrity of the repository
2805 """verify the integrity of the repository
2805
2806
2806 Verify the integrity of the current repository.
2807 Verify the integrity of the current repository.
2807
2808
2808 This will perform an extensive check of the repository's
2809 This will perform an extensive check of the repository's
2809 integrity, validating the hashes and checksums of each entry in
2810 integrity, validating the hashes and checksums of each entry in
2810 the changelog, manifest, and tracked files, as well as the
2811 the changelog, manifest, and tracked files, as well as the
2811 integrity of their crosslinks and indices.
2812 integrity of their crosslinks and indices.
2812 """
2813 """
2813 return repo.verify()
2814 return repo.verify()
2814
2815
2815 # Command options and aliases are listed here, alphabetically
2816 # Command options and aliases are listed here, alphabetically
2816
2817
2817 table = {
2818 table = {
2818 "^add":
2819 "^add":
2819 (add,
2820 (add,
2820 [('I', 'include', [], _('include names matching the given patterns')),
2821 [('I', 'include', [], _('include names matching the given patterns')),
2821 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2822 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2822 _('hg add [OPTION]... [FILE]...')),
2823 _('hg add [OPTION]... [FILE]...')),
2823 "addremove":
2824 "addremove":
2824 (addremove,
2825 (addremove,
2825 [('I', 'include', [], _('include names matching the given patterns')),
2826 [('I', 'include', [], _('include names matching the given patterns')),
2826 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2827 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2827 _('hg addremove [OPTION]... [FILE]...')),
2828 _('hg addremove [OPTION]... [FILE]...')),
2828 "^annotate":
2829 "^annotate":
2829 (annotate,
2830 (annotate,
2830 [('r', 'rev', '', _('annotate the specified revision')),
2831 [('r', 'rev', '', _('annotate the specified revision')),
2831 ('a', 'text', None, _('treat all files as text')),
2832 ('a', 'text', None, _('treat all files as text')),
2832 ('u', 'user', None, _('list the author')),
2833 ('u', 'user', None, _('list the author')),
2833 ('d', 'date', None, _('list the date')),
2834 ('d', 'date', None, _('list the date')),
2834 ('n', 'number', None, _('list the revision number (default)')),
2835 ('n', 'number', None, _('list the revision number (default)')),
2835 ('c', 'changeset', None, _('list the changeset')),
2836 ('c', 'changeset', None, _('list the changeset')),
2836 ('I', 'include', [], _('include names matching the given patterns')),
2837 ('I', 'include', [], _('include names matching the given patterns')),
2837 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2838 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2838 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2839 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2839 "bundle":
2840 "bundle":
2840 (bundle,
2841 (bundle,
2841 [('f', 'force', None,
2842 [('f', 'force', None,
2842 _('run even when remote repository is unrelated'))],
2843 _('run even when remote repository is unrelated'))],
2843 _('hg bundle FILE DEST')),
2844 _('hg bundle FILE DEST')),
2844 "cat":
2845 "cat":
2845 (cat,
2846 (cat,
2846 [('o', 'output', '', _('print output to file with formatted name')),
2847 [('o', 'output', '', _('print output to file with formatted name')),
2847 ('r', 'rev', '', _('print the given revision')),
2848 ('r', 'rev', '', _('print the given revision')),
2848 ('I', 'include', [], _('include names matching the given patterns')),
2849 ('I', 'include', [], _('include names matching the given patterns')),
2849 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2850 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2850 _('hg cat [OPTION]... FILE...')),
2851 _('hg cat [OPTION]... FILE...')),
2851 "^clone":
2852 "^clone":
2852 (clone,
2853 (clone,
2853 [('U', 'noupdate', None, _('do not update the new working directory')),
2854 [('U', 'noupdate', None, _('do not update the new working directory')),
2854 ('r', 'rev', [],
2855 ('r', 'rev', [],
2855 _('a changeset you would like to have after cloning')),
2856 _('a changeset you would like to have after cloning')),
2856 ('', 'pull', None, _('use pull protocol to copy metadata')),
2857 ('', 'pull', None, _('use pull protocol to copy metadata')),
2857 ('e', 'ssh', '', _('specify ssh command to use')),
2858 ('e', 'ssh', '', _('specify ssh command to use')),
2858 ('', 'remotecmd', '',
2859 ('', 'remotecmd', '',
2859 _('specify hg command to run on the remote side'))],
2860 _('specify hg command to run on the remote side'))],
2860 _('hg clone [OPTION]... SOURCE [DEST]')),
2861 _('hg clone [OPTION]... SOURCE [DEST]')),
2861 "^commit|ci":
2862 "^commit|ci":
2862 (commit,
2863 (commit,
2863 [('A', 'addremove', None, _('run addremove during commit')),
2864 [('A', 'addremove', None, _('run addremove during commit')),
2864 ('m', 'message', '', _('use <text> as commit message')),
2865 ('m', 'message', '', _('use <text> as commit message')),
2865 ('l', 'logfile', '', _('read the commit message from <file>')),
2866 ('l', 'logfile', '', _('read the commit message from <file>')),
2866 ('d', 'date', '', _('record datecode as commit date')),
2867 ('d', 'date', '', _('record datecode as commit date')),
2867 ('u', 'user', '', _('record user as commiter')),
2868 ('u', 'user', '', _('record user as commiter')),
2868 ('I', 'include', [], _('include names matching the given patterns')),
2869 ('I', 'include', [], _('include names matching the given patterns')),
2869 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2870 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2870 _('hg commit [OPTION]... [FILE]...')),
2871 _('hg commit [OPTION]... [FILE]...')),
2871 "copy|cp":
2872 "copy|cp":
2872 (copy,
2873 (copy,
2873 [('A', 'after', None, _('record a copy that has already occurred')),
2874 [('A', 'after', None, _('record a copy that has already occurred')),
2874 ('f', 'force', None,
2875 ('f', 'force', None,
2875 _('forcibly copy over an existing managed file')),
2876 _('forcibly copy over an existing managed file')),
2876 ('I', 'include', [], _('include names matching the given patterns')),
2877 ('I', 'include', [], _('include names matching the given patterns')),
2877 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2878 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2878 _('hg copy [OPTION]... [SOURCE]... DEST')),
2879 _('hg copy [OPTION]... [SOURCE]... DEST')),
2879 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2880 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2880 "debugcomplete":
2881 "debugcomplete":
2881 (debugcomplete,
2882 (debugcomplete,
2882 [('o', 'options', None, _('show the command options'))],
2883 [('o', 'options', None, _('show the command options'))],
2883 _('debugcomplete [-o] CMD')),
2884 _('debugcomplete [-o] CMD')),
2884 "debugrebuildstate":
2885 "debugrebuildstate":
2885 (debugrebuildstate,
2886 (debugrebuildstate,
2886 [('r', 'rev', '', _('revision to rebuild to'))],
2887 [('r', 'rev', '', _('revision to rebuild to'))],
2887 _('debugrebuildstate [-r REV] [REV]')),
2888 _('debugrebuildstate [-r REV] [REV]')),
2888 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2889 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2889 "debugconfig": (debugconfig, [], _('debugconfig')),
2890 "debugconfig": (debugconfig, [], _('debugconfig')),
2890 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2891 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2891 "debugstate": (debugstate, [], _('debugstate')),
2892 "debugstate": (debugstate, [], _('debugstate')),
2892 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2893 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2893 "debugindex": (debugindex, [], _('debugindex FILE')),
2894 "debugindex": (debugindex, [], _('debugindex FILE')),
2894 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2895 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2895 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2896 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2896 "debugwalk":
2897 "debugwalk":
2897 (debugwalk,
2898 (debugwalk,
2898 [('I', 'include', [], _('include names matching the given patterns')),
2899 [('I', 'include', [], _('include names matching the given patterns')),
2899 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2900 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2900 _('debugwalk [OPTION]... [FILE]...')),
2901 _('debugwalk [OPTION]... [FILE]...')),
2901 "^diff":
2902 "^diff":
2902 (diff,
2903 (diff,
2903 [('r', 'rev', [], _('revision')),
2904 [('r', 'rev', [], _('revision')),
2904 ('a', 'text', None, _('treat all files as text')),
2905 ('a', 'text', None, _('treat all files as text')),
2905 ('p', 'show-function', None,
2906 ('p', 'show-function', None,
2906 _('show which function each change is in')),
2907 _('show which function each change is in')),
2907 ('w', 'ignore-all-space', None,
2908 ('w', 'ignore-all-space', None,
2908 _('ignore white space when comparing lines')),
2909 _('ignore white space when comparing lines')),
2909 ('I', 'include', [], _('include names matching the given patterns')),
2910 ('I', 'include', [], _('include names matching the given patterns')),
2910 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2911 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2911 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2912 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2912 "^export":
2913 "^export":
2913 (export,
2914 (export,
2914 [('o', 'output', '', _('print output to file with formatted name')),
2915 [('o', 'output', '', _('print output to file with formatted name')),
2915 ('a', 'text', None, _('treat all files as text')),
2916 ('a', 'text', None, _('treat all files as text')),
2916 ('', 'switch-parent', None, _('diff against the second parent'))],
2917 ('', 'switch-parent', None, _('diff against the second parent'))],
2917 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2918 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2918 "forget":
2919 "forget":
2919 (forget,
2920 (forget,
2920 [('I', 'include', [], _('include names matching the given patterns')),
2921 [('I', 'include', [], _('include names matching the given patterns')),
2921 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2922 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2922 _('hg forget [OPTION]... FILE...')),
2923 _('hg forget [OPTION]... FILE...')),
2923 "grep":
2924 "grep":
2924 (grep,
2925 (grep,
2925 [('0', 'print0', None, _('end fields with NUL')),
2926 [('0', 'print0', None, _('end fields with NUL')),
2926 ('', 'all', None, _('print all revisions that match')),
2927 ('', 'all', None, _('print all revisions that match')),
2927 ('i', 'ignore-case', None, _('ignore case when matching')),
2928 ('i', 'ignore-case', None, _('ignore case when matching')),
2928 ('l', 'files-with-matches', None,
2929 ('l', 'files-with-matches', None,
2929 _('print only filenames and revs that match')),
2930 _('print only filenames and revs that match')),
2930 ('n', 'line-number', None, _('print matching line numbers')),
2931 ('n', 'line-number', None, _('print matching line numbers')),
2931 ('r', 'rev', [], _('search in given revision range')),
2932 ('r', 'rev', [], _('search in given revision range')),
2932 ('u', 'user', None, _('print user who committed change')),
2933 ('u', 'user', None, _('print user who committed change')),
2933 ('I', 'include', [], _('include names matching the given patterns')),
2934 ('I', 'include', [], _('include names matching the given patterns')),
2934 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2935 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2935 _('hg grep [OPTION]... PATTERN [FILE]...')),
2936 _('hg grep [OPTION]... PATTERN [FILE]...')),
2936 "heads":
2937 "heads":
2937 (heads,
2938 (heads,
2938 [('b', 'branches', None, _('show branches')),
2939 [('b', 'branches', None, _('show branches')),
2939 ('', 'style', '', _('display using template map file')),
2940 ('', 'style', '', _('display using template map file')),
2940 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2941 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2941 ('', 'template', '', _('display with template'))],
2942 ('', 'template', '', _('display with template'))],
2942 _('hg heads [-b] [-r <rev>]')),
2943 _('hg heads [-b] [-r <rev>]')),
2943 "help": (help_, [], _('hg help [COMMAND]')),
2944 "help": (help_, [], _('hg help [COMMAND]')),
2944 "identify|id": (identify, [], _('hg identify')),
2945 "identify|id": (identify, [], _('hg identify')),
2945 "import|patch":
2946 "import|patch":
2946 (import_,
2947 (import_,
2947 [('p', 'strip', 1,
2948 [('p', 'strip', 1,
2948 _('directory strip option for patch. This has the same\n') +
2949 _('directory strip option for patch. This has the same\n') +
2949 _('meaning as the corresponding patch option')),
2950 _('meaning as the corresponding patch option')),
2950 ('b', 'base', '', _('base path')),
2951 ('b', 'base', '', _('base path')),
2951 ('f', 'force', None,
2952 ('f', 'force', None,
2952 _('skip check for outstanding uncommitted changes'))],
2953 _('skip check for outstanding uncommitted changes'))],
2953 _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
2954 _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
2954 "incoming|in": (incoming,
2955 "incoming|in": (incoming,
2955 [('M', 'no-merges', None, _('do not show merges')),
2956 [('M', 'no-merges', None, _('do not show merges')),
2956 ('f', 'force', None,
2957 ('f', 'force', None,
2957 _('run even when remote repository is unrelated')),
2958 _('run even when remote repository is unrelated')),
2958 ('', 'style', '', _('display using template map file')),
2959 ('', 'style', '', _('display using template map file')),
2959 ('n', 'newest-first', None, _('show newest record first')),
2960 ('n', 'newest-first', None, _('show newest record first')),
2960 ('', 'bundle', '', _('file to store the bundles into')),
2961 ('', 'bundle', '', _('file to store the bundles into')),
2961 ('p', 'patch', None, _('show patch')),
2962 ('p', 'patch', None, _('show patch')),
2962 ('', 'template', '', _('display with template')),
2963 ('', 'template', '', _('display with template')),
2963 ('e', 'ssh', '', _('specify ssh command to use')),
2964 ('e', 'ssh', '', _('specify ssh command to use')),
2964 ('', 'remotecmd', '',
2965 ('', 'remotecmd', '',
2965 _('specify hg command to run on the remote side'))],
2966 _('specify hg command to run on the remote side'))],
2966 _('hg incoming [-p] [-n] [-M] [--bundle FILENAME] [SOURCE]')),
2967 _('hg incoming [-p] [-n] [-M] [--bundle FILENAME] [SOURCE]')),
2967 "^init": (init, [], _('hg init [DEST]')),
2968 "^init": (init, [], _('hg init [DEST]')),
2968 "locate":
2969 "locate":
2969 (locate,
2970 (locate,
2970 [('r', 'rev', '', _('search the repository as it stood at rev')),
2971 [('r', 'rev', '', _('search the repository as it stood at rev')),
2971 ('0', 'print0', None,
2972 ('0', 'print0', None,
2972 _('end filenames with NUL, for use with xargs')),
2973 _('end filenames with NUL, for use with xargs')),
2973 ('f', 'fullpath', None,
2974 ('f', 'fullpath', None,
2974 _('print complete paths from the filesystem root')),
2975 _('print complete paths from the filesystem root')),
2975 ('I', 'include', [], _('include names matching the given patterns')),
2976 ('I', 'include', [], _('include names matching the given patterns')),
2976 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2977 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2977 _('hg locate [OPTION]... [PATTERN]...')),
2978 _('hg locate [OPTION]... [PATTERN]...')),
2978 "^log|history":
2979 "^log|history":
2979 (log,
2980 (log,
2980 [('b', 'branches', None, _('show branches')),
2981 [('b', 'branches', None, _('show branches')),
2981 ('k', 'keyword', [], _('search for a keyword')),
2982 ('k', 'keyword', [], _('search for a keyword')),
2982 ('l', 'limit', '', _('limit number of changes displayed')),
2983 ('l', 'limit', '', _('limit number of changes displayed')),
2983 ('r', 'rev', [], _('show the specified revision or range')),
2984 ('r', 'rev', [], _('show the specified revision or range')),
2984 ('M', 'no-merges', None, _('do not show merges')),
2985 ('M', 'no-merges', None, _('do not show merges')),
2985 ('', 'style', '', _('display using template map file')),
2986 ('', 'style', '', _('display using template map file')),
2986 ('m', 'only-merges', None, _('show only merges')),
2987 ('m', 'only-merges', None, _('show only merges')),
2987 ('p', 'patch', None, _('show patch')),
2988 ('p', 'patch', None, _('show patch')),
2988 ('', 'template', '', _('display with template')),
2989 ('', 'template', '', _('display with template')),
2989 ('I', 'include', [], _('include names matching the given patterns')),
2990 ('I', 'include', [], _('include names matching the given patterns')),
2990 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2991 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2991 _('hg log [OPTION]... [FILE]')),
2992 _('hg log [OPTION]... [FILE]')),
2992 "manifest": (manifest, [], _('hg manifest [REV]')),
2993 "manifest": (manifest, [], _('hg manifest [REV]')),
2993 "merge":
2994 "merge":
2994 (merge,
2995 (merge,
2995 [('b', 'branch', '', _('merge with head of a specific branch')),
2996 [('b', 'branch', '', _('merge with head of a specific branch')),
2996 ('', 'style', '', _('display using template map file')),
2997 ('', 'style', '', _('display using template map file')),
2997 ('f', 'force', None, _('force a merge with outstanding changes')),
2998 ('f', 'force', None, _('force a merge with outstanding changes')),
2998 ('', 'template', '', _('display with template'))],
2999 ('', 'template', '', _('display with template'))],
2999 _('hg merge [-b TAG] [-f] [REV]')),
3000 _('hg merge [-b TAG] [-f] [REV]')),
3000 "outgoing|out": (outgoing,
3001 "outgoing|out": (outgoing,
3001 [('M', 'no-merges', None, _('do not show merges')),
3002 [('M', 'no-merges', None, _('do not show merges')),
3002 ('f', 'force', None,
3003 ('f', 'force', None,
3003 _('run even when remote repository is unrelated')),
3004 _('run even when remote repository is unrelated')),
3004 ('p', 'patch', None, _('show patch')),
3005 ('p', 'patch', None, _('show patch')),
3005 ('', 'style', '', _('display using template map file')),
3006 ('', 'style', '', _('display using template map file')),
3006 ('n', 'newest-first', None, _('show newest record first')),
3007 ('n', 'newest-first', None, _('show newest record first')),
3007 ('', 'template', '', _('display with template')),
3008 ('', 'template', '', _('display with template')),
3008 ('e', 'ssh', '', _('specify ssh command to use')),
3009 ('e', 'ssh', '', _('specify ssh command to use')),
3009 ('', 'remotecmd', '',
3010 ('', 'remotecmd', '',
3010 _('specify hg command to run on the remote side'))],
3011 _('specify hg command to run on the remote side'))],
3011 _('hg outgoing [-M] [-p] [-n] [DEST]')),
3012 _('hg outgoing [-M] [-p] [-n] [DEST]')),
3012 "^parents":
3013 "^parents":
3013 (parents,
3014 (parents,
3014 [('b', 'branches', None, _('show branches')),
3015 [('b', 'branches', None, _('show branches')),
3015 ('', 'style', '', _('display using template map file')),
3016 ('', 'style', '', _('display using template map file')),
3016 ('', 'template', '', _('display with template'))],
3017 ('', 'template', '', _('display with template'))],
3017 _('hg parents [-b] [REV]')),
3018 _('hg parents [-b] [REV]')),
3018 "paths": (paths, [], _('hg paths [NAME]')),
3019 "paths": (paths, [], _('hg paths [NAME]')),
3019 "^pull":
3020 "^pull":
3020 (pull,
3021 (pull,
3021 [('u', 'update', None,
3022 [('u', 'update', None,
3022 _('update the working directory to tip after pull')),
3023 _('update the working directory to tip after pull')),
3023 ('e', 'ssh', '', _('specify ssh command to use')),
3024 ('e', 'ssh', '', _('specify ssh command to use')),
3024 ('f', 'force', None,
3025 ('f', 'force', None,
3025 _('run even when remote repository is unrelated')),
3026 _('run even when remote repository is unrelated')),
3026 ('r', 'rev', [], _('a specific revision you would like to pull')),
3027 ('r', 'rev', [], _('a specific revision you would like to pull')),
3027 ('', 'remotecmd', '',
3028 ('', 'remotecmd', '',
3028 _('specify hg command to run on the remote side'))],
3029 _('specify hg command to run on the remote side'))],
3029 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
3030 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
3030 "^push":
3031 "^push":
3031 (push,
3032 (push,
3032 [('f', 'force', None, _('force push')),
3033 [('f', 'force', None, _('force push')),
3033 ('e', 'ssh', '', _('specify ssh command to use')),
3034 ('e', 'ssh', '', _('specify ssh command to use')),
3034 ('r', 'rev', [], _('a specific revision you would like to push')),
3035 ('r', 'rev', [], _('a specific revision you would like to push')),
3035 ('', 'remotecmd', '',
3036 ('', 'remotecmd', '',
3036 _('specify hg command to run on the remote side'))],
3037 _('specify hg command to run on the remote side'))],
3037 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
3038 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
3038 "debugrawcommit|rawcommit":
3039 "debugrawcommit|rawcommit":
3039 (rawcommit,
3040 (rawcommit,
3040 [('p', 'parent', [], _('parent')),
3041 [('p', 'parent', [], _('parent')),
3041 ('d', 'date', '', _('date code')),
3042 ('d', 'date', '', _('date code')),
3042 ('u', 'user', '', _('user')),
3043 ('u', 'user', '', _('user')),
3043 ('F', 'files', '', _('file list')),
3044 ('F', 'files', '', _('file list')),
3044 ('m', 'message', '', _('commit message')),
3045 ('m', 'message', '', _('commit message')),
3045 ('l', 'logfile', '', _('commit message file'))],
3046 ('l', 'logfile', '', _('commit message file'))],
3046 _('hg debugrawcommit [OPTION]... [FILE]...')),
3047 _('hg debugrawcommit [OPTION]... [FILE]...')),
3047 "recover": (recover, [], _('hg recover')),
3048 "recover": (recover, [], _('hg recover')),
3048 "^remove|rm":
3049 "^remove|rm":
3049 (remove,
3050 (remove,
3050 [('f', 'force', None, _('remove file even if modified')),
3051 [('f', 'force', None, _('remove file even if modified')),
3051 ('I', 'include', [], _('include names matching the given patterns')),
3052 ('I', 'include', [], _('include names matching the given patterns')),
3052 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3053 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3053 _('hg remove [OPTION]... FILE...')),
3054 _('hg remove [OPTION]... FILE...')),
3054 "rename|mv":
3055 "rename|mv":
3055 (rename,
3056 (rename,
3056 [('A', 'after', None, _('record a rename that has already occurred')),
3057 [('A', 'after', None, _('record a rename that has already occurred')),
3057 ('f', 'force', None,
3058 ('f', 'force', None,
3058 _('forcibly copy over an existing managed file')),
3059 _('forcibly copy over an existing managed file')),
3059 ('I', 'include', [], _('include names matching the given patterns')),
3060 ('I', 'include', [], _('include names matching the given patterns')),
3060 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3061 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3061 _('hg rename [OPTION]... SOURCE... DEST')),
3062 _('hg rename [OPTION]... SOURCE... DEST')),
3062 "^revert":
3063 "^revert":
3063 (revert,
3064 (revert,
3064 [('r', 'rev', '', _('revision to revert to')),
3065 [('r', 'rev', '', _('revision to revert to')),
3065 ('', 'no-backup', None, _('do not save backup copies of files')),
3066 ('', 'no-backup', None, _('do not save backup copies of files')),
3066 ('I', 'include', [], _('include names matching given patterns')),
3067 ('I', 'include', [], _('include names matching given patterns')),
3067 ('X', 'exclude', [], _('exclude names matching given patterns'))],
3068 ('X', 'exclude', [], _('exclude names matching given patterns'))],
3068 _('hg revert [-r REV] [NAME]...')),
3069 _('hg revert [-r REV] [NAME]...')),
3069 "root": (root, [], _('hg root')),
3070 "root": (root, [], _('hg root')),
3070 "^serve":
3071 "^serve":
3071 (serve,
3072 (serve,
3072 [('A', 'accesslog', '', _('name of access log file to write to')),
3073 [('A', 'accesslog', '', _('name of access log file to write to')),
3073 ('d', 'daemon', None, _('run server in background')),
3074 ('d', 'daemon', None, _('run server in background')),
3074 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3075 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3075 ('E', 'errorlog', '', _('name of error log file to write to')),
3076 ('E', 'errorlog', '', _('name of error log file to write to')),
3076 ('p', 'port', 0, _('port to use (default: 8000)')),
3077 ('p', 'port', 0, _('port to use (default: 8000)')),
3077 ('a', 'address', '', _('address to use')),
3078 ('a', 'address', '', _('address to use')),
3078 ('n', 'name', '',
3079 ('n', 'name', '',
3079 _('name to show in web pages (default: working dir)')),
3080 _('name to show in web pages (default: working dir)')),
3080 ('', 'pid-file', '', _('name of file to write process ID to')),
3081 ('', 'pid-file', '', _('name of file to write process ID to')),
3081 ('', 'stdio', None, _('for remote clients')),
3082 ('', 'stdio', None, _('for remote clients')),
3082 ('t', 'templates', '', _('web templates to use')),
3083 ('t', 'templates', '', _('web templates to use')),
3083 ('', 'style', '', _('template style to use')),
3084 ('', 'style', '', _('template style to use')),
3084 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3085 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3085 _('hg serve [OPTION]...')),
3086 _('hg serve [OPTION]...')),
3086 "^status|st":
3087 "^status|st":
3087 (status,
3088 (status,
3088 [('m', 'modified', None, _('show only modified files')),
3089 [('m', 'modified', None, _('show only modified files')),
3089 ('a', 'added', None, _('show only added files')),
3090 ('a', 'added', None, _('show only added files')),
3090 ('r', 'removed', None, _('show only removed files')),
3091 ('r', 'removed', None, _('show only removed files')),
3091 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3092 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3092 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3093 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3093 ('i', 'ignored', None, _('show ignored files')),
3094 ('i', 'ignored', None, _('show ignored files')),
3094 ('n', 'no-status', None, _('hide status prefix')),
3095 ('n', 'no-status', None, _('hide status prefix')),
3095 ('0', 'print0', None,
3096 ('0', 'print0', None,
3096 _('end filenames with NUL, for use with xargs')),
3097 _('end filenames with NUL, for use with xargs')),
3097 ('I', 'include', [], _('include names matching the given patterns')),
3098 ('I', 'include', [], _('include names matching the given patterns')),
3098 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3099 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3099 _('hg status [OPTION]... [FILE]...')),
3100 _('hg status [OPTION]... [FILE]...')),
3100 "tag":
3101 "tag":
3101 (tag,
3102 (tag,
3102 [('l', 'local', None, _('make the tag local')),
3103 [('l', 'local', None, _('make the tag local')),
3103 ('m', 'message', '', _('message for tag commit log entry')),
3104 ('m', 'message', '', _('message for tag commit log entry')),
3104 ('d', 'date', '', _('record datecode as commit date')),
3105 ('d', 'date', '', _('record datecode as commit date')),
3105 ('u', 'user', '', _('record user as commiter')),
3106 ('u', 'user', '', _('record user as commiter')),
3106 ('r', 'rev', '', _('revision to tag'))],
3107 ('r', 'rev', '', _('revision to tag'))],
3107 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3108 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3108 "tags": (tags, [], _('hg tags')),
3109 "tags": (tags, [], _('hg tags')),
3109 "tip":
3110 "tip":
3110 (tip,
3111 (tip,
3111 [('b', 'branches', None, _('show branches')),
3112 [('b', 'branches', None, _('show branches')),
3112 ('', 'style', '', _('display using template map file')),
3113 ('', 'style', '', _('display using template map file')),
3113 ('p', 'patch', None, _('show patch')),
3114 ('p', 'patch', None, _('show patch')),
3114 ('', 'template', '', _('display with template'))],
3115 ('', 'template', '', _('display with template'))],
3115 _('hg tip [-b] [-p]')),
3116 _('hg tip [-b] [-p]')),
3116 "unbundle":
3117 "unbundle":
3117 (unbundle,
3118 (unbundle,
3118 [('u', 'update', None,
3119 [('u', 'update', None,
3119 _('update the working directory to tip after unbundle'))],
3120 _('update the working directory to tip after unbundle'))],
3120 _('hg unbundle [-u] FILE')),
3121 _('hg unbundle [-u] FILE')),
3121 "undo": (undo, [], _('hg undo')),
3122 "undo": (undo, [], _('hg undo')),
3122 "^update|up|checkout|co":
3123 "^update|up|checkout|co":
3123 (update,
3124 (update,
3124 [('b', 'branch', '', _('checkout the head of a specific branch')),
3125 [('b', 'branch', '', _('checkout the head of a specific branch')),
3125 ('', 'style', '', _('display using template map file')),
3126 ('', 'style', '', _('display using template map file')),
3126 ('m', 'merge', None, _('allow merging of branches')),
3127 ('m', 'merge', None, _('allow merging of branches')),
3127 ('C', 'clean', None, _('overwrite locally modified files')),
3128 ('C', 'clean', None, _('overwrite locally modified files')),
3128 ('f', 'force', None, _('force a merge with outstanding changes')),
3129 ('f', 'force', None, _('force a merge with outstanding changes')),
3129 ('', 'template', '', _('display with template'))],
3130 ('', 'template', '', _('display with template'))],
3130 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3131 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3131 "verify": (verify, [], _('hg verify')),
3132 "verify": (verify, [], _('hg verify')),
3132 "version": (show_version, [], _('hg version')),
3133 "version": (show_version, [], _('hg version')),
3133 }
3134 }
3134
3135
3135 globalopts = [
3136 globalopts = [
3136 ('R', 'repository', '',
3137 ('R', 'repository', '',
3137 _('repository root directory or symbolic path name')),
3138 _('repository root directory or symbolic path name')),
3138 ('', 'cwd', '', _('change working directory')),
3139 ('', 'cwd', '', _('change working directory')),
3139 ('y', 'noninteractive', None,
3140 ('y', 'noninteractive', None,
3140 _('do not prompt, assume \'yes\' for any required answers')),
3141 _('do not prompt, assume \'yes\' for any required answers')),
3141 ('q', 'quiet', None, _('suppress output')),
3142 ('q', 'quiet', None, _('suppress output')),
3142 ('v', 'verbose', None, _('enable additional output')),
3143 ('v', 'verbose', None, _('enable additional output')),
3143 ('', 'debug', None, _('enable debugging output')),
3144 ('', 'debug', None, _('enable debugging output')),
3144 ('', 'debugger', None, _('start debugger')),
3145 ('', 'debugger', None, _('start debugger')),
3145 ('', 'traceback', None, _('print traceback on exception')),
3146 ('', 'traceback', None, _('print traceback on exception')),
3146 ('', 'time', None, _('time how long the command takes')),
3147 ('', 'time', None, _('time how long the command takes')),
3147 ('', 'profile', None, _('print command execution profile')),
3148 ('', 'profile', None, _('print command execution profile')),
3148 ('', 'version', None, _('output version information and exit')),
3149 ('', 'version', None, _('output version information and exit')),
3149 ('h', 'help', None, _('display help and exit')),
3150 ('h', 'help', None, _('display help and exit')),
3150 ]
3151 ]
3151
3152
3152 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3153 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3153 " debugindex debugindexdot")
3154 " debugindex debugindexdot")
3154 optionalrepo = ("paths debugconfig")
3155 optionalrepo = ("paths debugconfig")
3155
3156
3156 def findpossible(cmd):
3157 def findpossible(cmd):
3157 """
3158 """
3158 Return cmd -> (aliases, command table entry)
3159 Return cmd -> (aliases, command table entry)
3159 for each matching command
3160 for each matching command
3160 """
3161 """
3161 choice = {}
3162 choice = {}
3162 debugchoice = {}
3163 debugchoice = {}
3163 for e in table.keys():
3164 for e in table.keys():
3164 aliases = e.lstrip("^").split("|")
3165 aliases = e.lstrip("^").split("|")
3165 if cmd in aliases:
3166 if cmd in aliases:
3166 choice[cmd] = (aliases, table[e])
3167 choice[cmd] = (aliases, table[e])
3167 continue
3168 continue
3168 for a in aliases:
3169 for a in aliases:
3169 if a.startswith(cmd):
3170 if a.startswith(cmd):
3170 if aliases[0].startswith("debug"):
3171 if aliases[0].startswith("debug"):
3171 debugchoice[a] = (aliases, table[e])
3172 debugchoice[a] = (aliases, table[e])
3172 else:
3173 else:
3173 choice[a] = (aliases, table[e])
3174 choice[a] = (aliases, table[e])
3174 break
3175 break
3175
3176
3176 if not choice and debugchoice:
3177 if not choice and debugchoice:
3177 choice = debugchoice
3178 choice = debugchoice
3178
3179
3179 return choice
3180 return choice
3180
3181
3181 def find(cmd):
3182 def find(cmd):
3182 """Return (aliases, command table entry) for command string."""
3183 """Return (aliases, command table entry) for command string."""
3183 choice = findpossible(cmd)
3184 choice = findpossible(cmd)
3184
3185
3185 if choice.has_key(cmd):
3186 if choice.has_key(cmd):
3186 return choice[cmd]
3187 return choice[cmd]
3187
3188
3188 if len(choice) > 1:
3189 if len(choice) > 1:
3189 clist = choice.keys()
3190 clist = choice.keys()
3190 clist.sort()
3191 clist.sort()
3191 raise AmbiguousCommand(cmd, clist)
3192 raise AmbiguousCommand(cmd, clist)
3192
3193
3193 if choice:
3194 if choice:
3194 return choice.values()[0]
3195 return choice.values()[0]
3195
3196
3196 raise UnknownCommand(cmd)
3197 raise UnknownCommand(cmd)
3197
3198
3198 class SignalInterrupt(Exception):
3199 class SignalInterrupt(Exception):
3199 """Exception raised on SIGTERM and SIGHUP."""
3200 """Exception raised on SIGTERM and SIGHUP."""
3200
3201
3201 def catchterm(*args):
3202 def catchterm(*args):
3202 raise SignalInterrupt
3203 raise SignalInterrupt
3203
3204
3204 def run():
3205 def run():
3205 sys.exit(dispatch(sys.argv[1:]))
3206 sys.exit(dispatch(sys.argv[1:]))
3206
3207
3207 class ParseError(Exception):
3208 class ParseError(Exception):
3208 """Exception raised on errors in parsing the command line."""
3209 """Exception raised on errors in parsing the command line."""
3209
3210
3210 def parse(ui, args):
3211 def parse(ui, args):
3211 options = {}
3212 options = {}
3212 cmdoptions = {}
3213 cmdoptions = {}
3213
3214
3214 try:
3215 try:
3215 args = fancyopts.fancyopts(args, globalopts, options)
3216 args = fancyopts.fancyopts(args, globalopts, options)
3216 except fancyopts.getopt.GetoptError, inst:
3217 except fancyopts.getopt.GetoptError, inst:
3217 raise ParseError(None, inst)
3218 raise ParseError(None, inst)
3218
3219
3219 if args:
3220 if args:
3220 cmd, args = args[0], args[1:]
3221 cmd, args = args[0], args[1:]
3221 aliases, i = find(cmd)
3222 aliases, i = find(cmd)
3222 cmd = aliases[0]
3223 cmd = aliases[0]
3223 defaults = ui.config("defaults", cmd)
3224 defaults = ui.config("defaults", cmd)
3224 if defaults:
3225 if defaults:
3225 args = defaults.split() + args
3226 args = defaults.split() + args
3226 c = list(i[1])
3227 c = list(i[1])
3227 else:
3228 else:
3228 cmd = None
3229 cmd = None
3229 c = []
3230 c = []
3230
3231
3231 # combine global options into local
3232 # combine global options into local
3232 for o in globalopts:
3233 for o in globalopts:
3233 c.append((o[0], o[1], options[o[1]], o[3]))
3234 c.append((o[0], o[1], options[o[1]], o[3]))
3234
3235
3235 try:
3236 try:
3236 args = fancyopts.fancyopts(args, c, cmdoptions)
3237 args = fancyopts.fancyopts(args, c, cmdoptions)
3237 except fancyopts.getopt.GetoptError, inst:
3238 except fancyopts.getopt.GetoptError, inst:
3238 raise ParseError(cmd, inst)
3239 raise ParseError(cmd, inst)
3239
3240
3240 # separate global options back out
3241 # separate global options back out
3241 for o in globalopts:
3242 for o in globalopts:
3242 n = o[1]
3243 n = o[1]
3243 options[n] = cmdoptions[n]
3244 options[n] = cmdoptions[n]
3244 del cmdoptions[n]
3245 del cmdoptions[n]
3245
3246
3246 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3247 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3247
3248
3248 def dispatch(args):
3249 def dispatch(args):
3249 signal.signal(signal.SIGTERM, catchterm)
3250 signal.signal(signal.SIGTERM, catchterm)
3250 try:
3251 try:
3251 signal.signal(signal.SIGHUP, catchterm)
3252 signal.signal(signal.SIGHUP, catchterm)
3252 except AttributeError:
3253 except AttributeError:
3253 pass
3254 pass
3254
3255
3255 try:
3256 try:
3256 u = ui.ui()
3257 u = ui.ui()
3257 except util.Abort, inst:
3258 except util.Abort, inst:
3258 sys.stderr.write(_("abort: %s\n") % inst)
3259 sys.stderr.write(_("abort: %s\n") % inst)
3259 sys.exit(1)
3260 sys.exit(1)
3260
3261
3261 external = []
3262 external = []
3262 for x in u.extensions():
3263 for x in u.extensions():
3263 def on_exception(exc, inst):
3264 def on_exception(exc, inst):
3264 u.warn(_("*** failed to import extension %s\n") % x[1])
3265 u.warn(_("*** failed to import extension %s\n") % x[1])
3265 u.warn("%s\n" % inst)
3266 u.warn("%s\n" % inst)
3266 if "--traceback" in sys.argv[1:]:
3267 if "--traceback" in sys.argv[1:]:
3267 traceback.print_exc()
3268 traceback.print_exc()
3268 if x[1]:
3269 if x[1]:
3269 try:
3270 try:
3270 mod = imp.load_source(x[0], x[1])
3271 mod = imp.load_source(x[0], x[1])
3271 except Exception, inst:
3272 except Exception, inst:
3272 on_exception(Exception, inst)
3273 on_exception(Exception, inst)
3273 continue
3274 continue
3274 else:
3275 else:
3275 def importh(name):
3276 def importh(name):
3276 mod = __import__(name)
3277 mod = __import__(name)
3277 components = name.split('.')
3278 components = name.split('.')
3278 for comp in components[1:]:
3279 for comp in components[1:]:
3279 mod = getattr(mod, comp)
3280 mod = getattr(mod, comp)
3280 return mod
3281 return mod
3281 try:
3282 try:
3282 try:
3283 try:
3283 mod = importh("hgext." + x[0])
3284 mod = importh("hgext." + x[0])
3284 except ImportError:
3285 except ImportError:
3285 mod = importh(x[0])
3286 mod = importh(x[0])
3286 except Exception, inst:
3287 except Exception, inst:
3287 on_exception(Exception, inst)
3288 on_exception(Exception, inst)
3288 continue
3289 continue
3289
3290
3290 external.append(mod)
3291 external.append(mod)
3291 for x in external:
3292 for x in external:
3292 cmdtable = getattr(x, 'cmdtable', {})
3293 cmdtable = getattr(x, 'cmdtable', {})
3293 for t in cmdtable:
3294 for t in cmdtable:
3294 if t in table:
3295 if t in table:
3295 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
3296 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
3296 table.update(cmdtable)
3297 table.update(cmdtable)
3297
3298
3298 try:
3299 try:
3299 cmd, func, args, options, cmdoptions = parse(u, args)
3300 cmd, func, args, options, cmdoptions = parse(u, args)
3300 if options["time"]:
3301 if options["time"]:
3301 def get_times():
3302 def get_times():
3302 t = os.times()
3303 t = os.times()
3303 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3304 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3304 t = (t[0], t[1], t[2], t[3], time.clock())
3305 t = (t[0], t[1], t[2], t[3], time.clock())
3305 return t
3306 return t
3306 s = get_times()
3307 s = get_times()
3307 def print_time():
3308 def print_time():
3308 t = get_times()
3309 t = get_times()
3309 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3310 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3310 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3311 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3311 atexit.register(print_time)
3312 atexit.register(print_time)
3312
3313
3313 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3314 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3314 not options["noninteractive"])
3315 not options["noninteractive"])
3315
3316
3316 # enter the debugger before command execution
3317 # enter the debugger before command execution
3317 if options['debugger']:
3318 if options['debugger']:
3318 pdb.set_trace()
3319 pdb.set_trace()
3319
3320
3320 try:
3321 try:
3321 if options['cwd']:
3322 if options['cwd']:
3322 try:
3323 try:
3323 os.chdir(options['cwd'])
3324 os.chdir(options['cwd'])
3324 except OSError, inst:
3325 except OSError, inst:
3325 raise util.Abort('%s: %s' %
3326 raise util.Abort('%s: %s' %
3326 (options['cwd'], inst.strerror))
3327 (options['cwd'], inst.strerror))
3327
3328
3328 path = u.expandpath(options["repository"]) or ""
3329 path = u.expandpath(options["repository"]) or ""
3329 repo = path and hg.repository(u, path=path) or None
3330 repo = path and hg.repository(u, path=path) or None
3330
3331
3331 if options['help']:
3332 if options['help']:
3332 help_(u, cmd, options['version'])
3333 help_(u, cmd, options['version'])
3333 sys.exit(0)
3334 sys.exit(0)
3334 elif options['version']:
3335 elif options['version']:
3335 show_version(u)
3336 show_version(u)
3336 sys.exit(0)
3337 sys.exit(0)
3337 elif not cmd:
3338 elif not cmd:
3338 help_(u, 'shortlist')
3339 help_(u, 'shortlist')
3339 sys.exit(0)
3340 sys.exit(0)
3340
3341
3341 if cmd not in norepo.split():
3342 if cmd not in norepo.split():
3342 try:
3343 try:
3343 if not repo:
3344 if not repo:
3344 repo = hg.repository(u, path=path)
3345 repo = hg.repository(u, path=path)
3345 u = repo.ui
3346 u = repo.ui
3346 for x in external:
3347 for x in external:
3347 if hasattr(x, 'reposetup'):
3348 if hasattr(x, 'reposetup'):
3348 x.reposetup(u, repo)
3349 x.reposetup(u, repo)
3349 except hg.RepoError:
3350 except hg.RepoError:
3350 if cmd not in optionalrepo.split():
3351 if cmd not in optionalrepo.split():
3351 raise
3352 raise
3352 d = lambda: func(u, repo, *args, **cmdoptions)
3353 d = lambda: func(u, repo, *args, **cmdoptions)
3353 else:
3354 else:
3354 d = lambda: func(u, *args, **cmdoptions)
3355 d = lambda: func(u, *args, **cmdoptions)
3355
3356
3356 try:
3357 try:
3357 if options['profile']:
3358 if options['profile']:
3358 import hotshot, hotshot.stats
3359 import hotshot, hotshot.stats
3359 prof = hotshot.Profile("hg.prof")
3360 prof = hotshot.Profile("hg.prof")
3360 try:
3361 try:
3361 try:
3362 try:
3362 return prof.runcall(d)
3363 return prof.runcall(d)
3363 except:
3364 except:
3364 try:
3365 try:
3365 u.warn(_('exception raised - generating '
3366 u.warn(_('exception raised - generating '
3366 'profile anyway\n'))
3367 'profile anyway\n'))
3367 except:
3368 except:
3368 pass
3369 pass
3369 raise
3370 raise
3370 finally:
3371 finally:
3371 prof.close()
3372 prof.close()
3372 stats = hotshot.stats.load("hg.prof")
3373 stats = hotshot.stats.load("hg.prof")
3373 stats.strip_dirs()
3374 stats.strip_dirs()
3374 stats.sort_stats('time', 'calls')
3375 stats.sort_stats('time', 'calls')
3375 stats.print_stats(40)
3376 stats.print_stats(40)
3376 else:
3377 else:
3377 return d()
3378 return d()
3378 finally:
3379 finally:
3379 u.flush()
3380 u.flush()
3380 except:
3381 except:
3381 # enter the debugger when we hit an exception
3382 # enter the debugger when we hit an exception
3382 if options['debugger']:
3383 if options['debugger']:
3383 pdb.post_mortem(sys.exc_info()[2])
3384 pdb.post_mortem(sys.exc_info()[2])
3384 if options['traceback']:
3385 if options['traceback']:
3385 traceback.print_exc()
3386 traceback.print_exc()
3386 raise
3387 raise
3387 except ParseError, inst:
3388 except ParseError, inst:
3388 if inst.args[0]:
3389 if inst.args[0]:
3389 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3390 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3390 help_(u, inst.args[0])
3391 help_(u, inst.args[0])
3391 else:
3392 else:
3392 u.warn(_("hg: %s\n") % inst.args[1])
3393 u.warn(_("hg: %s\n") % inst.args[1])
3393 help_(u, 'shortlist')
3394 help_(u, 'shortlist')
3394 sys.exit(-1)
3395 sys.exit(-1)
3395 except AmbiguousCommand, inst:
3396 except AmbiguousCommand, inst:
3396 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3397 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3397 (inst.args[0], " ".join(inst.args[1])))
3398 (inst.args[0], " ".join(inst.args[1])))
3398 sys.exit(1)
3399 sys.exit(1)
3399 except UnknownCommand, inst:
3400 except UnknownCommand, inst:
3400 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3401 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3401 help_(u, 'shortlist')
3402 help_(u, 'shortlist')
3402 sys.exit(1)
3403 sys.exit(1)
3403 except hg.RepoError, inst:
3404 except hg.RepoError, inst:
3404 u.warn(_("abort: "), inst, "!\n")
3405 u.warn(_("abort: "), inst, "!\n")
3405 except lock.LockHeld, inst:
3406 except lock.LockHeld, inst:
3406 if inst.errno == errno.ETIMEDOUT:
3407 if inst.errno == errno.ETIMEDOUT:
3407 reason = _('timed out waiting for lock held by %s') % inst.locker
3408 reason = _('timed out waiting for lock held by %s') % inst.locker
3408 else:
3409 else:
3409 reason = _('lock held by %s') % inst.locker
3410 reason = _('lock held by %s') % inst.locker
3410 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3411 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3411 except lock.LockUnavailable, inst:
3412 except lock.LockUnavailable, inst:
3412 u.warn(_("abort: could not lock %s: %s\n") %
3413 u.warn(_("abort: could not lock %s: %s\n") %
3413 (inst.desc or inst.filename, inst.strerror))
3414 (inst.desc or inst.filename, inst.strerror))
3414 except revlog.RevlogError, inst:
3415 except revlog.RevlogError, inst:
3415 u.warn(_("abort: "), inst, "!\n")
3416 u.warn(_("abort: "), inst, "!\n")
3416 except SignalInterrupt:
3417 except SignalInterrupt:
3417 u.warn(_("killed!\n"))
3418 u.warn(_("killed!\n"))
3418 except KeyboardInterrupt:
3419 except KeyboardInterrupt:
3419 try:
3420 try:
3420 u.warn(_("interrupted!\n"))
3421 u.warn(_("interrupted!\n"))
3421 except IOError, inst:
3422 except IOError, inst:
3422 if inst.errno == errno.EPIPE:
3423 if inst.errno == errno.EPIPE:
3423 if u.debugflag:
3424 if u.debugflag:
3424 u.warn(_("\nbroken pipe\n"))
3425 u.warn(_("\nbroken pipe\n"))
3425 else:
3426 else:
3426 raise
3427 raise
3427 except IOError, inst:
3428 except IOError, inst:
3428 if hasattr(inst, "code"):
3429 if hasattr(inst, "code"):
3429 u.warn(_("abort: %s\n") % inst)
3430 u.warn(_("abort: %s\n") % inst)
3430 elif hasattr(inst, "reason"):
3431 elif hasattr(inst, "reason"):
3431 u.warn(_("abort: error: %s\n") % inst.reason[1])
3432 u.warn(_("abort: error: %s\n") % inst.reason[1])
3432 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3433 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3433 if u.debugflag:
3434 if u.debugflag:
3434 u.warn(_("broken pipe\n"))
3435 u.warn(_("broken pipe\n"))
3435 elif getattr(inst, "strerror", None):
3436 elif getattr(inst, "strerror", None):
3436 if getattr(inst, "filename", None):
3437 if getattr(inst, "filename", None):
3437 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3438 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3438 else:
3439 else:
3439 u.warn(_("abort: %s\n") % inst.strerror)
3440 u.warn(_("abort: %s\n") % inst.strerror)
3440 else:
3441 else:
3441 raise
3442 raise
3442 except OSError, inst:
3443 except OSError, inst:
3443 if hasattr(inst, "filename"):
3444 if hasattr(inst, "filename"):
3444 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3445 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3445 else:
3446 else:
3446 u.warn(_("abort: %s\n") % inst.strerror)
3447 u.warn(_("abort: %s\n") % inst.strerror)
3447 except util.Abort, inst:
3448 except util.Abort, inst:
3448 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3449 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3449 sys.exit(1)
3450 sys.exit(1)
3450 except TypeError, inst:
3451 except TypeError, inst:
3451 # was this an argument error?
3452 # was this an argument error?
3452 tb = traceback.extract_tb(sys.exc_info()[2])
3453 tb = traceback.extract_tb(sys.exc_info()[2])
3453 if len(tb) > 2: # no
3454 if len(tb) > 2: # no
3454 raise
3455 raise
3455 u.debug(inst, "\n")
3456 u.debug(inst, "\n")
3456 u.warn(_("%s: invalid arguments\n") % cmd)
3457 u.warn(_("%s: invalid arguments\n") % cmd)
3457 help_(u, cmd)
3458 help_(u, cmd)
3458 except SystemExit:
3459 except SystemExit:
3459 # don't catch this in the catch-all below
3460 # don't catch this in the catch-all below
3460 raise
3461 raise
3461 except:
3462 except:
3462 u.warn(_("** unknown exception encountered, details follow\n"))
3463 u.warn(_("** unknown exception encountered, details follow\n"))
3463 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3464 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3464 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3465 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3465 % version.get_version())
3466 % version.get_version())
3466 raise
3467 raise
3467
3468
3468 sys.exit(-1)
3469 sys.exit(-1)
@@ -1,107 +1,108
1 # filelog.py - file history class for mercurial
1 # filelog.py - file history class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os
8 import os
9 from revlog import *
9 from revlog import *
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "bdiff")
11 demandload(globals(), "bdiff")
12
12
13 class filelog(revlog):
13 class filelog(revlog):
14 def __init__(self, opener, path):
14 def __init__(self, opener, path, defversion=0):
15 revlog.__init__(self, opener,
15 revlog.__init__(self, opener,
16 os.path.join("data", self.encodedir(path + ".i")),
16 os.path.join("data", self.encodedir(path + ".i")),
17 os.path.join("data", self.encodedir(path + ".d")))
17 os.path.join("data", self.encodedir(path + ".d")),
18 defversion)
18
19
19 # This avoids a collision between a file named foo and a dir named
20 # This avoids a collision between a file named foo and a dir named
20 # foo.i or foo.d
21 # foo.i or foo.d
21 def encodedir(self, path):
22 def encodedir(self, path):
22 return (path
23 return (path
23 .replace(".hg/", ".hg.hg/")
24 .replace(".hg/", ".hg.hg/")
24 .replace(".i/", ".i.hg/")
25 .replace(".i/", ".i.hg/")
25 .replace(".d/", ".d.hg/"))
26 .replace(".d/", ".d.hg/"))
26
27
27 def decodedir(self, path):
28 def decodedir(self, path):
28 return (path
29 return (path
29 .replace(".d.hg/", ".d/")
30 .replace(".d.hg/", ".d/")
30 .replace(".i.hg/", ".i/")
31 .replace(".i.hg/", ".i/")
31 .replace(".hg.hg/", ".hg/"))
32 .replace(".hg.hg/", ".hg/"))
32
33
33 def read(self, node):
34 def read(self, node):
34 t = self.revision(node)
35 t = self.revision(node)
35 if not t.startswith('\1\n'):
36 if not t.startswith('\1\n'):
36 return t
37 return t
37 s = t.find('\1\n', 2)
38 s = t.find('\1\n', 2)
38 return t[s+2:]
39 return t[s+2:]
39
40
40 def readmeta(self, node):
41 def readmeta(self, node):
41 t = self.revision(node)
42 t = self.revision(node)
42 if not t.startswith('\1\n'):
43 if not t.startswith('\1\n'):
43 return {}
44 return {}
44 s = t.find('\1\n', 2)
45 s = t.find('\1\n', 2)
45 mt = t[2:s]
46 mt = t[2:s]
46 m = {}
47 m = {}
47 for l in mt.splitlines():
48 for l in mt.splitlines():
48 k, v = l.split(": ", 1)
49 k, v = l.split(": ", 1)
49 m[k] = v
50 m[k] = v
50 return m
51 return m
51
52
52 def add(self, text, meta, transaction, link, p1=None, p2=None):
53 def add(self, text, meta, transaction, link, p1=None, p2=None):
53 if meta or text.startswith('\1\n'):
54 if meta or text.startswith('\1\n'):
54 mt = ""
55 mt = ""
55 if meta:
56 if meta:
56 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
57 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
57 text = "\1\n%s\1\n%s" % ("".join(mt), text)
58 text = "\1\n%s\1\n%s" % ("".join(mt), text)
58 return self.addrevision(text, transaction, link, p1, p2)
59 return self.addrevision(text, transaction, link, p1, p2)
59
60
60 def renamed(self, node):
61 def renamed(self, node):
61 if self.parents(node)[0] != nullid:
62 if self.parents(node)[0] != nullid:
62 return False
63 return False
63 m = self.readmeta(node)
64 m = self.readmeta(node)
64 if m and m.has_key("copy"):
65 if m and m.has_key("copy"):
65 return (m["copy"], bin(m["copyrev"]))
66 return (m["copy"], bin(m["copyrev"]))
66 return False
67 return False
67
68
68 def annotate(self, node):
69 def annotate(self, node):
69
70
70 def decorate(text, rev):
71 def decorate(text, rev):
71 return ([rev] * len(text.splitlines()), text)
72 return ([rev] * len(text.splitlines()), text)
72
73
73 def pair(parent, child):
74 def pair(parent, child):
74 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
75 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
75 child[0][b1:b2] = parent[0][a1:a2]
76 child[0][b1:b2] = parent[0][a1:a2]
76 return child
77 return child
77
78
78 # find all ancestors
79 # find all ancestors
79 needed = {node:1}
80 needed = {node:1}
80 visit = [node]
81 visit = [node]
81 while visit:
82 while visit:
82 n = visit.pop(0)
83 n = visit.pop(0)
83 for p in self.parents(n):
84 for p in self.parents(n):
84 if p not in needed:
85 if p not in needed:
85 needed[p] = 1
86 needed[p] = 1
86 visit.append(p)
87 visit.append(p)
87 else:
88 else:
88 # count how many times we'll use this
89 # count how many times we'll use this
89 needed[p] += 1
90 needed[p] += 1
90
91
91 # sort by revision which is a topological order
92 # sort by revision which is a topological order
92 visit = [ (self.rev(n), n) for n in needed.keys() ]
93 visit = [ (self.rev(n), n) for n in needed.keys() ]
93 visit.sort()
94 visit.sort()
94 hist = {}
95 hist = {}
95
96
96 for r,n in visit:
97 for r,n in visit:
97 curr = decorate(self.read(n), self.linkrev(n))
98 curr = decorate(self.read(n), self.linkrev(n))
98 for p in self.parents(n):
99 for p in self.parents(n):
99 if p != nullid:
100 if p != nullid:
100 curr = pair(hist[p], curr)
101 curr = pair(hist[p], curr)
101 # trim the history of unneeded revs
102 # trim the history of unneeded revs
102 needed[p] -= 1
103 needed[p] -= 1
103 if not needed[p]:
104 if not needed[p]:
104 del hist[p]
105 del hist[p]
105 hist[n] = curr
106 hist[n] = curr
106
107
107 return zip(hist[n][0], hist[n][1].splitlines(1))
108 return zip(hist[n][0], hist[n][1].splitlines(1))
@@ -1,1956 +1,1963
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, util
8 import os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "appendfile changegroup")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui revlog")
15
15
16 class localrepository(object):
16 class localrepository(object):
17 def __del__(self):
17 def __del__(self):
18 self.transhandle = None
18 self.transhandle = None
19 def __init__(self, parentui, path=None, create=0):
19 def __init__(self, parentui, path=None, create=0):
20 if not path:
20 if not path:
21 p = os.getcwd()
21 p = os.getcwd()
22 while not os.path.isdir(os.path.join(p, ".hg")):
22 while not os.path.isdir(os.path.join(p, ".hg")):
23 oldp = p
23 oldp = p
24 p = os.path.dirname(p)
24 p = os.path.dirname(p)
25 if p == oldp:
25 if p == oldp:
26 raise repo.RepoError(_("no repo found"))
26 raise repo.RepoError(_("no repo found"))
27 path = p
27 path = p
28 self.path = os.path.join(path, ".hg")
28 self.path = os.path.join(path, ".hg")
29
29
30 if not create and not os.path.isdir(self.path):
30 if not create and not os.path.isdir(self.path):
31 raise repo.RepoError(_("repository %s not found") % path)
31 raise repo.RepoError(_("repository %s not found") % path)
32
32
33 self.root = os.path.abspath(path)
33 self.root = os.path.abspath(path)
34 self.origroot = path
34 self.origroot = path
35 self.ui = ui.ui(parentui=parentui)
35 self.ui = ui.ui(parentui=parentui)
36 self.opener = util.opener(self.path)
36 self.opener = util.opener(self.path)
37 self.wopener = util.opener(self.root)
37 self.wopener = util.opener(self.root)
38 self.manifest = manifest.manifest(self.opener)
38
39 self.changelog = changelog.changelog(self.opener)
39 try:
40 self.ui.readconfig(self.join("hgrc"), self.root)
41 except IOError:
42 pass
43
44 v = self.ui.revlogopts
45 self.revlogversion = int(v.get('format', 0))
46 for x in v.get('flags', "").split():
47 self.revlogversion |= revlog.flagstr(x)
48
49 self.manifest = manifest.manifest(self.opener, self.revlogversion)
50 self.changelog = changelog.changelog(self.opener, self.revlogversion)
51 self.revlogversion = self.changelog.version
40 self.tagscache = None
52 self.tagscache = None
41 self.nodetagscache = None
53 self.nodetagscache = None
42 self.encodepats = None
54 self.encodepats = None
43 self.decodepats = None
55 self.decodepats = None
44 self.transhandle = None
56 self.transhandle = None
45
57
46 if create:
58 if create:
47 os.mkdir(self.path)
59 os.mkdir(self.path)
48 os.mkdir(self.join("data"))
60 os.mkdir(self.join("data"))
49
61
50 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
62 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
51 try:
52 self.ui.readconfig(self.join("hgrc"), self.root)
53 except IOError:
54 pass
55
56 def hook(self, name, throw=False, **args):
63 def hook(self, name, throw=False, **args):
57 def runhook(name, cmd):
64 def runhook(name, cmd):
58 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
65 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
59 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
66 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
60 [(k.upper(), v) for k, v in args.iteritems()])
67 [(k.upper(), v) for k, v in args.iteritems()])
61 r = util.system(cmd, environ=env, cwd=self.root)
68 r = util.system(cmd, environ=env, cwd=self.root)
62 if r:
69 if r:
63 desc, r = util.explain_exit(r)
70 desc, r = util.explain_exit(r)
64 if throw:
71 if throw:
65 raise util.Abort(_('%s hook %s') % (name, desc))
72 raise util.Abort(_('%s hook %s') % (name, desc))
66 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
73 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
67 return False
74 return False
68 return True
75 return True
69
76
70 r = True
77 r = True
71 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
78 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
72 if hname.split(".", 1)[0] == name and cmd]
79 if hname.split(".", 1)[0] == name and cmd]
73 hooks.sort()
80 hooks.sort()
74 for hname, cmd in hooks:
81 for hname, cmd in hooks:
75 r = runhook(hname, cmd) and r
82 r = runhook(hname, cmd) and r
76 return r
83 return r
77
84
78 def tags(self):
85 def tags(self):
79 '''return a mapping of tag to node'''
86 '''return a mapping of tag to node'''
80 if not self.tagscache:
87 if not self.tagscache:
81 self.tagscache = {}
88 self.tagscache = {}
82
89
83 def parsetag(line, context):
90 def parsetag(line, context):
84 if not line:
91 if not line:
85 return
92 return
86 s = l.split(" ", 1)
93 s = l.split(" ", 1)
87 if len(s) != 2:
94 if len(s) != 2:
88 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
95 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
89 return
96 return
90 node, key = s
97 node, key = s
91 try:
98 try:
92 bin_n = bin(node)
99 bin_n = bin(node)
93 except TypeError:
100 except TypeError:
94 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
101 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
95 return
102 return
96 if bin_n not in self.changelog.nodemap:
103 if bin_n not in self.changelog.nodemap:
97 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
104 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
98 return
105 return
99 self.tagscache[key.strip()] = bin_n
106 self.tagscache[key.strip()] = bin_n
100
107
101 # read each head of the tags file, ending with the tip
108 # read each head of the tags file, ending with the tip
102 # and add each tag found to the map, with "newer" ones
109 # and add each tag found to the map, with "newer" ones
103 # taking precedence
110 # taking precedence
104 fl = self.file(".hgtags")
111 fl = self.file(".hgtags")
105 h = fl.heads()
112 h = fl.heads()
106 h.reverse()
113 h.reverse()
107 for r in h:
114 for r in h:
108 count = 0
115 count = 0
109 for l in fl.read(r).splitlines():
116 for l in fl.read(r).splitlines():
110 count += 1
117 count += 1
111 parsetag(l, ".hgtags:%d" % count)
118 parsetag(l, ".hgtags:%d" % count)
112
119
113 try:
120 try:
114 f = self.opener("localtags")
121 f = self.opener("localtags")
115 count = 0
122 count = 0
116 for l in f:
123 for l in f:
117 count += 1
124 count += 1
118 parsetag(l, "localtags:%d" % count)
125 parsetag(l, "localtags:%d" % count)
119 except IOError:
126 except IOError:
120 pass
127 pass
121
128
122 self.tagscache['tip'] = self.changelog.tip()
129 self.tagscache['tip'] = self.changelog.tip()
123
130
124 return self.tagscache
131 return self.tagscache
125
132
126 def tagslist(self):
133 def tagslist(self):
127 '''return a list of tags ordered by revision'''
134 '''return a list of tags ordered by revision'''
128 l = []
135 l = []
129 for t, n in self.tags().items():
136 for t, n in self.tags().items():
130 try:
137 try:
131 r = self.changelog.rev(n)
138 r = self.changelog.rev(n)
132 except:
139 except:
133 r = -2 # sort to the beginning of the list if unknown
140 r = -2 # sort to the beginning of the list if unknown
134 l.append((r, t, n))
141 l.append((r, t, n))
135 l.sort()
142 l.sort()
136 return [(t, n) for r, t, n in l]
143 return [(t, n) for r, t, n in l]
137
144
138 def nodetags(self, node):
145 def nodetags(self, node):
139 '''return the tags associated with a node'''
146 '''return the tags associated with a node'''
140 if not self.nodetagscache:
147 if not self.nodetagscache:
141 self.nodetagscache = {}
148 self.nodetagscache = {}
142 for t, n in self.tags().items():
149 for t, n in self.tags().items():
143 self.nodetagscache.setdefault(n, []).append(t)
150 self.nodetagscache.setdefault(n, []).append(t)
144 return self.nodetagscache.get(node, [])
151 return self.nodetagscache.get(node, [])
145
152
146 def lookup(self, key):
153 def lookup(self, key):
147 try:
154 try:
148 return self.tags()[key]
155 return self.tags()[key]
149 except KeyError:
156 except KeyError:
150 try:
157 try:
151 return self.changelog.lookup(key)
158 return self.changelog.lookup(key)
152 except:
159 except:
153 raise repo.RepoError(_("unknown revision '%s'") % key)
160 raise repo.RepoError(_("unknown revision '%s'") % key)
154
161
155 def dev(self):
162 def dev(self):
156 return os.stat(self.path).st_dev
163 return os.stat(self.path).st_dev
157
164
158 def local(self):
165 def local(self):
159 return True
166 return True
160
167
161 def join(self, f):
168 def join(self, f):
162 return os.path.join(self.path, f)
169 return os.path.join(self.path, f)
163
170
164 def wjoin(self, f):
171 def wjoin(self, f):
165 return os.path.join(self.root, f)
172 return os.path.join(self.root, f)
166
173
167 def file(self, f):
174 def file(self, f):
168 if f[0] == '/':
175 if f[0] == '/':
169 f = f[1:]
176 f = f[1:]
170 return filelog.filelog(self.opener, f)
177 return filelog.filelog(self.opener, f, self.revlogversion)
171
178
172 def getcwd(self):
179 def getcwd(self):
173 return self.dirstate.getcwd()
180 return self.dirstate.getcwd()
174
181
175 def wfile(self, f, mode='r'):
182 def wfile(self, f, mode='r'):
176 return self.wopener(f, mode)
183 return self.wopener(f, mode)
177
184
178 def wread(self, filename):
185 def wread(self, filename):
179 if self.encodepats == None:
186 if self.encodepats == None:
180 l = []
187 l = []
181 for pat, cmd in self.ui.configitems("encode"):
188 for pat, cmd in self.ui.configitems("encode"):
182 mf = util.matcher(self.root, "", [pat], [], [])[1]
189 mf = util.matcher(self.root, "", [pat], [], [])[1]
183 l.append((mf, cmd))
190 l.append((mf, cmd))
184 self.encodepats = l
191 self.encodepats = l
185
192
186 data = self.wopener(filename, 'r').read()
193 data = self.wopener(filename, 'r').read()
187
194
188 for mf, cmd in self.encodepats:
195 for mf, cmd in self.encodepats:
189 if mf(filename):
196 if mf(filename):
190 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
197 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
191 data = util.filter(data, cmd)
198 data = util.filter(data, cmd)
192 break
199 break
193
200
194 return data
201 return data
195
202
196 def wwrite(self, filename, data, fd=None):
203 def wwrite(self, filename, data, fd=None):
197 if self.decodepats == None:
204 if self.decodepats == None:
198 l = []
205 l = []
199 for pat, cmd in self.ui.configitems("decode"):
206 for pat, cmd in self.ui.configitems("decode"):
200 mf = util.matcher(self.root, "", [pat], [], [])[1]
207 mf = util.matcher(self.root, "", [pat], [], [])[1]
201 l.append((mf, cmd))
208 l.append((mf, cmd))
202 self.decodepats = l
209 self.decodepats = l
203
210
204 for mf, cmd in self.decodepats:
211 for mf, cmd in self.decodepats:
205 if mf(filename):
212 if mf(filename):
206 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
213 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
207 data = util.filter(data, cmd)
214 data = util.filter(data, cmd)
208 break
215 break
209
216
210 if fd:
217 if fd:
211 return fd.write(data)
218 return fd.write(data)
212 return self.wopener(filename, 'w').write(data)
219 return self.wopener(filename, 'w').write(data)
213
220
214 def transaction(self):
221 def transaction(self):
215 tr = self.transhandle
222 tr = self.transhandle
216 if tr != None and tr.running():
223 if tr != None and tr.running():
217 return tr.nest()
224 return tr.nest()
218
225
219 # save dirstate for undo
226 # save dirstate for undo
220 try:
227 try:
221 ds = self.opener("dirstate").read()
228 ds = self.opener("dirstate").read()
222 except IOError:
229 except IOError:
223 ds = ""
230 ds = ""
224 self.opener("journal.dirstate", "w").write(ds)
231 self.opener("journal.dirstate", "w").write(ds)
225
232
226 tr = transaction.transaction(self.ui.warn, self.opener,
233 tr = transaction.transaction(self.ui.warn, self.opener,
227 self.join("journal"),
234 self.join("journal"),
228 aftertrans(self.path))
235 aftertrans(self.path))
229 self.transhandle = tr
236 self.transhandle = tr
230 return tr
237 return tr
231
238
232 def recover(self):
239 def recover(self):
233 l = self.lock()
240 l = self.lock()
234 if os.path.exists(self.join("journal")):
241 if os.path.exists(self.join("journal")):
235 self.ui.status(_("rolling back interrupted transaction\n"))
242 self.ui.status(_("rolling back interrupted transaction\n"))
236 transaction.rollback(self.opener, self.join("journal"))
243 transaction.rollback(self.opener, self.join("journal"))
237 self.reload()
244 self.reload()
238 return True
245 return True
239 else:
246 else:
240 self.ui.warn(_("no interrupted transaction available\n"))
247 self.ui.warn(_("no interrupted transaction available\n"))
241 return False
248 return False
242
249
243 def undo(self, wlock=None):
250 def undo(self, wlock=None):
244 if not wlock:
251 if not wlock:
245 wlock = self.wlock()
252 wlock = self.wlock()
246 l = self.lock()
253 l = self.lock()
247 if os.path.exists(self.join("undo")):
254 if os.path.exists(self.join("undo")):
248 self.ui.status(_("rolling back last transaction\n"))
255 self.ui.status(_("rolling back last transaction\n"))
249 transaction.rollback(self.opener, self.join("undo"))
256 transaction.rollback(self.opener, self.join("undo"))
250 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
257 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
251 self.reload()
258 self.reload()
252 self.wreload()
259 self.wreload()
253 else:
260 else:
254 self.ui.warn(_("no undo information available\n"))
261 self.ui.warn(_("no undo information available\n"))
255
262
256 def wreload(self):
263 def wreload(self):
257 self.dirstate.read()
264 self.dirstate.read()
258
265
259 def reload(self):
266 def reload(self):
260 self.changelog.load()
267 self.changelog.load()
261 self.manifest.load()
268 self.manifest.load()
262 self.tagscache = None
269 self.tagscache = None
263 self.nodetagscache = None
270 self.nodetagscache = None
264
271
265 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
272 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
266 desc=None):
273 desc=None):
267 try:
274 try:
268 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
275 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
269 except lock.LockHeld, inst:
276 except lock.LockHeld, inst:
270 if not wait:
277 if not wait:
271 raise
278 raise
272 self.ui.warn(_("waiting for lock on %s held by %s\n") %
279 self.ui.warn(_("waiting for lock on %s held by %s\n") %
273 (desc, inst.args[0]))
280 (desc, inst.args[0]))
274 # default to 600 seconds timeout
281 # default to 600 seconds timeout
275 l = lock.lock(self.join(lockname),
282 l = lock.lock(self.join(lockname),
276 int(self.ui.config("ui", "timeout") or 600),
283 int(self.ui.config("ui", "timeout") or 600),
277 releasefn, desc=desc)
284 releasefn, desc=desc)
278 if acquirefn:
285 if acquirefn:
279 acquirefn()
286 acquirefn()
280 return l
287 return l
281
288
282 def lock(self, wait=1):
289 def lock(self, wait=1):
283 return self.do_lock("lock", wait, acquirefn=self.reload,
290 return self.do_lock("lock", wait, acquirefn=self.reload,
284 desc=_('repository %s') % self.origroot)
291 desc=_('repository %s') % self.origroot)
285
292
286 def wlock(self, wait=1):
293 def wlock(self, wait=1):
287 return self.do_lock("wlock", wait, self.dirstate.write,
294 return self.do_lock("wlock", wait, self.dirstate.write,
288 self.wreload,
295 self.wreload,
289 desc=_('working directory of %s') % self.origroot)
296 desc=_('working directory of %s') % self.origroot)
290
297
291 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
298 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
292 "determine whether a new filenode is needed"
299 "determine whether a new filenode is needed"
293 fp1 = manifest1.get(filename, nullid)
300 fp1 = manifest1.get(filename, nullid)
294 fp2 = manifest2.get(filename, nullid)
301 fp2 = manifest2.get(filename, nullid)
295
302
296 if fp2 != nullid:
303 if fp2 != nullid:
297 # is one parent an ancestor of the other?
304 # is one parent an ancestor of the other?
298 fpa = filelog.ancestor(fp1, fp2)
305 fpa = filelog.ancestor(fp1, fp2)
299 if fpa == fp1:
306 if fpa == fp1:
300 fp1, fp2 = fp2, nullid
307 fp1, fp2 = fp2, nullid
301 elif fpa == fp2:
308 elif fpa == fp2:
302 fp2 = nullid
309 fp2 = nullid
303
310
304 # is the file unmodified from the parent? report existing entry
311 # is the file unmodified from the parent? report existing entry
305 if fp2 == nullid and text == filelog.read(fp1):
312 if fp2 == nullid and text == filelog.read(fp1):
306 return (fp1, None, None)
313 return (fp1, None, None)
307
314
308 return (None, fp1, fp2)
315 return (None, fp1, fp2)
309
316
310 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
317 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
311 orig_parent = self.dirstate.parents()[0] or nullid
318 orig_parent = self.dirstate.parents()[0] or nullid
312 p1 = p1 or self.dirstate.parents()[0] or nullid
319 p1 = p1 or self.dirstate.parents()[0] or nullid
313 p2 = p2 or self.dirstate.parents()[1] or nullid
320 p2 = p2 or self.dirstate.parents()[1] or nullid
314 c1 = self.changelog.read(p1)
321 c1 = self.changelog.read(p1)
315 c2 = self.changelog.read(p2)
322 c2 = self.changelog.read(p2)
316 m1 = self.manifest.read(c1[0])
323 m1 = self.manifest.read(c1[0])
317 mf1 = self.manifest.readflags(c1[0])
324 mf1 = self.manifest.readflags(c1[0])
318 m2 = self.manifest.read(c2[0])
325 m2 = self.manifest.read(c2[0])
319 changed = []
326 changed = []
320
327
321 if orig_parent == p1:
328 if orig_parent == p1:
322 update_dirstate = 1
329 update_dirstate = 1
323 else:
330 else:
324 update_dirstate = 0
331 update_dirstate = 0
325
332
326 if not wlock:
333 if not wlock:
327 wlock = self.wlock()
334 wlock = self.wlock()
328 l = self.lock()
335 l = self.lock()
329 tr = self.transaction()
336 tr = self.transaction()
330 mm = m1.copy()
337 mm = m1.copy()
331 mfm = mf1.copy()
338 mfm = mf1.copy()
332 linkrev = self.changelog.count()
339 linkrev = self.changelog.count()
333 for f in files:
340 for f in files:
334 try:
341 try:
335 t = self.wread(f)
342 t = self.wread(f)
336 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
343 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
337 r = self.file(f)
344 r = self.file(f)
338 mfm[f] = tm
345 mfm[f] = tm
339
346
340 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
347 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
341 if entry:
348 if entry:
342 mm[f] = entry
349 mm[f] = entry
343 continue
350 continue
344
351
345 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
352 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
346 changed.append(f)
353 changed.append(f)
347 if update_dirstate:
354 if update_dirstate:
348 self.dirstate.update([f], "n")
355 self.dirstate.update([f], "n")
349 except IOError:
356 except IOError:
350 try:
357 try:
351 del mm[f]
358 del mm[f]
352 del mfm[f]
359 del mfm[f]
353 if update_dirstate:
360 if update_dirstate:
354 self.dirstate.forget([f])
361 self.dirstate.forget([f])
355 except:
362 except:
356 # deleted from p2?
363 # deleted from p2?
357 pass
364 pass
358
365
359 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
366 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
360 user = user or self.ui.username()
367 user = user or self.ui.username()
361 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
368 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
362 tr.close()
369 tr.close()
363 if update_dirstate:
370 if update_dirstate:
364 self.dirstate.setparents(n, nullid)
371 self.dirstate.setparents(n, nullid)
365
372
366 def commit(self, files=None, text="", user=None, date=None,
373 def commit(self, files=None, text="", user=None, date=None,
367 match=util.always, force=False, lock=None, wlock=None):
374 match=util.always, force=False, lock=None, wlock=None):
368 commit = []
375 commit = []
369 remove = []
376 remove = []
370 changed = []
377 changed = []
371
378
372 if files:
379 if files:
373 for f in files:
380 for f in files:
374 s = self.dirstate.state(f)
381 s = self.dirstate.state(f)
375 if s in 'nmai':
382 if s in 'nmai':
376 commit.append(f)
383 commit.append(f)
377 elif s == 'r':
384 elif s == 'r':
378 remove.append(f)
385 remove.append(f)
379 else:
386 else:
380 self.ui.warn(_("%s not tracked!\n") % f)
387 self.ui.warn(_("%s not tracked!\n") % f)
381 else:
388 else:
382 modified, added, removed, deleted, unknown = self.changes(match=match)
389 modified, added, removed, deleted, unknown = self.changes(match=match)
383 commit = modified + added
390 commit = modified + added
384 remove = removed
391 remove = removed
385
392
386 p1, p2 = self.dirstate.parents()
393 p1, p2 = self.dirstate.parents()
387 c1 = self.changelog.read(p1)
394 c1 = self.changelog.read(p1)
388 c2 = self.changelog.read(p2)
395 c2 = self.changelog.read(p2)
389 m1 = self.manifest.read(c1[0])
396 m1 = self.manifest.read(c1[0])
390 mf1 = self.manifest.readflags(c1[0])
397 mf1 = self.manifest.readflags(c1[0])
391 m2 = self.manifest.read(c2[0])
398 m2 = self.manifest.read(c2[0])
392
399
393 if not commit and not remove and not force and p2 == nullid:
400 if not commit and not remove and not force and p2 == nullid:
394 self.ui.status(_("nothing changed\n"))
401 self.ui.status(_("nothing changed\n"))
395 return None
402 return None
396
403
397 xp1 = hex(p1)
404 xp1 = hex(p1)
398 if p2 == nullid: xp2 = ''
405 if p2 == nullid: xp2 = ''
399 else: xp2 = hex(p2)
406 else: xp2 = hex(p2)
400
407
401 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
408 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
402
409
403 if not wlock:
410 if not wlock:
404 wlock = self.wlock()
411 wlock = self.wlock()
405 if not lock:
412 if not lock:
406 lock = self.lock()
413 lock = self.lock()
407 tr = self.transaction()
414 tr = self.transaction()
408
415
409 # check in files
416 # check in files
410 new = {}
417 new = {}
411 linkrev = self.changelog.count()
418 linkrev = self.changelog.count()
412 commit.sort()
419 commit.sort()
413 for f in commit:
420 for f in commit:
414 self.ui.note(f + "\n")
421 self.ui.note(f + "\n")
415 try:
422 try:
416 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
423 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
417 t = self.wread(f)
424 t = self.wread(f)
418 except IOError:
425 except IOError:
419 self.ui.warn(_("trouble committing %s!\n") % f)
426 self.ui.warn(_("trouble committing %s!\n") % f)
420 raise
427 raise
421
428
422 r = self.file(f)
429 r = self.file(f)
423
430
424 meta = {}
431 meta = {}
425 cp = self.dirstate.copied(f)
432 cp = self.dirstate.copied(f)
426 if cp:
433 if cp:
427 meta["copy"] = cp
434 meta["copy"] = cp
428 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
435 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
429 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
436 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
430 fp1, fp2 = nullid, nullid
437 fp1, fp2 = nullid, nullid
431 else:
438 else:
432 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
439 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
433 if entry:
440 if entry:
434 new[f] = entry
441 new[f] = entry
435 continue
442 continue
436
443
437 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
444 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
438 # remember what we've added so that we can later calculate
445 # remember what we've added so that we can later calculate
439 # the files to pull from a set of changesets
446 # the files to pull from a set of changesets
440 changed.append(f)
447 changed.append(f)
441
448
442 # update manifest
449 # update manifest
443 m1 = m1.copy()
450 m1 = m1.copy()
444 m1.update(new)
451 m1.update(new)
445 for f in remove:
452 for f in remove:
446 if f in m1:
453 if f in m1:
447 del m1[f]
454 del m1[f]
448 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
455 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
449 (new, remove))
456 (new, remove))
450
457
451 # add changeset
458 # add changeset
452 new = new.keys()
459 new = new.keys()
453 new.sort()
460 new.sort()
454
461
455 user = user or self.ui.username()
462 user = user or self.ui.username()
456 if not text:
463 if not text:
457 edittext = [""]
464 edittext = [""]
458 if p2 != nullid:
465 if p2 != nullid:
459 edittext.append("HG: branch merge")
466 edittext.append("HG: branch merge")
460 edittext.extend(["HG: changed %s" % f for f in changed])
467 edittext.extend(["HG: changed %s" % f for f in changed])
461 edittext.extend(["HG: removed %s" % f for f in remove])
468 edittext.extend(["HG: removed %s" % f for f in remove])
462 if not changed and not remove:
469 if not changed and not remove:
463 edittext.append("HG: no files changed")
470 edittext.append("HG: no files changed")
464 edittext.append("")
471 edittext.append("")
465 # run editor in the repository root
472 # run editor in the repository root
466 olddir = os.getcwd()
473 olddir = os.getcwd()
467 os.chdir(self.root)
474 os.chdir(self.root)
468 edittext = self.ui.edit("\n".join(edittext), user)
475 edittext = self.ui.edit("\n".join(edittext), user)
469 os.chdir(olddir)
476 os.chdir(olddir)
470 if not edittext.rstrip():
477 if not edittext.rstrip():
471 return None
478 return None
472 text = edittext
479 text = edittext
473
480
474 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
481 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
475 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
482 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
476 parent2=xp2)
483 parent2=xp2)
477 tr.close()
484 tr.close()
478
485
479 self.dirstate.setparents(n)
486 self.dirstate.setparents(n)
480 self.dirstate.update(new, "n")
487 self.dirstate.update(new, "n")
481 self.dirstate.forget(remove)
488 self.dirstate.forget(remove)
482
489
483 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
490 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
484 return n
491 return n
485
492
486 def walk(self, node=None, files=[], match=util.always, badmatch=None):
493 def walk(self, node=None, files=[], match=util.always, badmatch=None):
487 if node:
494 if node:
488 fdict = dict.fromkeys(files)
495 fdict = dict.fromkeys(files)
489 for fn in self.manifest.read(self.changelog.read(node)[0]):
496 for fn in self.manifest.read(self.changelog.read(node)[0]):
490 fdict.pop(fn, None)
497 fdict.pop(fn, None)
491 if match(fn):
498 if match(fn):
492 yield 'm', fn
499 yield 'm', fn
493 for fn in fdict:
500 for fn in fdict:
494 if badmatch and badmatch(fn):
501 if badmatch and badmatch(fn):
495 if match(fn):
502 if match(fn):
496 yield 'b', fn
503 yield 'b', fn
497 else:
504 else:
498 self.ui.warn(_('%s: No such file in rev %s\n') % (
505 self.ui.warn(_('%s: No such file in rev %s\n') % (
499 util.pathto(self.getcwd(), fn), short(node)))
506 util.pathto(self.getcwd(), fn), short(node)))
500 else:
507 else:
501 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
508 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
502 yield src, fn
509 yield src, fn
503
510
504 def changes(self, node1=None, node2=None, files=[], match=util.always,
511 def changes(self, node1=None, node2=None, files=[], match=util.always,
505 wlock=None, show_ignored=None):
512 wlock=None, show_ignored=None):
506 """return changes between two nodes or node and working directory
513 """return changes between two nodes or node and working directory
507
514
508 If node1 is None, use the first dirstate parent instead.
515 If node1 is None, use the first dirstate parent instead.
509 If node2 is None, compare node1 with working directory.
516 If node2 is None, compare node1 with working directory.
510 """
517 """
511
518
512 def fcmp(fn, mf):
519 def fcmp(fn, mf):
513 t1 = self.wread(fn)
520 t1 = self.wread(fn)
514 t2 = self.file(fn).read(mf.get(fn, nullid))
521 t2 = self.file(fn).read(mf.get(fn, nullid))
515 return cmp(t1, t2)
522 return cmp(t1, t2)
516
523
517 def mfmatches(node):
524 def mfmatches(node):
518 change = self.changelog.read(node)
525 change = self.changelog.read(node)
519 mf = dict(self.manifest.read(change[0]))
526 mf = dict(self.manifest.read(change[0]))
520 for fn in mf.keys():
527 for fn in mf.keys():
521 if not match(fn):
528 if not match(fn):
522 del mf[fn]
529 del mf[fn]
523 return mf
530 return mf
524
531
525 if node1:
532 if node1:
526 # read the manifest from node1 before the manifest from node2,
533 # read the manifest from node1 before the manifest from node2,
527 # so that we'll hit the manifest cache if we're going through
534 # so that we'll hit the manifest cache if we're going through
528 # all the revisions in parent->child order.
535 # all the revisions in parent->child order.
529 mf1 = mfmatches(node1)
536 mf1 = mfmatches(node1)
530
537
531 # are we comparing the working directory?
538 # are we comparing the working directory?
532 if not node2:
539 if not node2:
533 if not wlock:
540 if not wlock:
534 try:
541 try:
535 wlock = self.wlock(wait=0)
542 wlock = self.wlock(wait=0)
536 except lock.LockException:
543 except lock.LockException:
537 wlock = None
544 wlock = None
538 lookup, modified, added, removed, deleted, unknown, ignored = (
545 lookup, modified, added, removed, deleted, unknown, ignored = (
539 self.dirstate.changes(files, match, show_ignored))
546 self.dirstate.changes(files, match, show_ignored))
540
547
541 # are we comparing working dir against its parent?
548 # are we comparing working dir against its parent?
542 if not node1:
549 if not node1:
543 if lookup:
550 if lookup:
544 # do a full compare of any files that might have changed
551 # do a full compare of any files that might have changed
545 mf2 = mfmatches(self.dirstate.parents()[0])
552 mf2 = mfmatches(self.dirstate.parents()[0])
546 for f in lookup:
553 for f in lookup:
547 if fcmp(f, mf2):
554 if fcmp(f, mf2):
548 modified.append(f)
555 modified.append(f)
549 elif wlock is not None:
556 elif wlock is not None:
550 self.dirstate.update([f], "n")
557 self.dirstate.update([f], "n")
551 else:
558 else:
552 # we are comparing working dir against non-parent
559 # we are comparing working dir against non-parent
553 # generate a pseudo-manifest for the working dir
560 # generate a pseudo-manifest for the working dir
554 mf2 = mfmatches(self.dirstate.parents()[0])
561 mf2 = mfmatches(self.dirstate.parents()[0])
555 for f in lookup + modified + added:
562 for f in lookup + modified + added:
556 mf2[f] = ""
563 mf2[f] = ""
557 for f in removed:
564 for f in removed:
558 if f in mf2:
565 if f in mf2:
559 del mf2[f]
566 del mf2[f]
560 else:
567 else:
561 # we are comparing two revisions
568 # we are comparing two revisions
562 deleted, unknown, ignored = [], [], []
569 deleted, unknown, ignored = [], [], []
563 mf2 = mfmatches(node2)
570 mf2 = mfmatches(node2)
564
571
565 if node1:
572 if node1:
566 # flush lists from dirstate before comparing manifests
573 # flush lists from dirstate before comparing manifests
567 modified, added = [], []
574 modified, added = [], []
568
575
569 for fn in mf2:
576 for fn in mf2:
570 if mf1.has_key(fn):
577 if mf1.has_key(fn):
571 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
578 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
572 modified.append(fn)
579 modified.append(fn)
573 del mf1[fn]
580 del mf1[fn]
574 else:
581 else:
575 added.append(fn)
582 added.append(fn)
576
583
577 removed = mf1.keys()
584 removed = mf1.keys()
578
585
579 # sort and return results:
586 # sort and return results:
580 for l in modified, added, removed, deleted, unknown, ignored:
587 for l in modified, added, removed, deleted, unknown, ignored:
581 l.sort()
588 l.sort()
582 if show_ignored is None:
589 if show_ignored is None:
583 return (modified, added, removed, deleted, unknown)
590 return (modified, added, removed, deleted, unknown)
584 else:
591 else:
585 return (modified, added, removed, deleted, unknown, ignored)
592 return (modified, added, removed, deleted, unknown, ignored)
586
593
587 def add(self, list, wlock=None):
594 def add(self, list, wlock=None):
588 if not wlock:
595 if not wlock:
589 wlock = self.wlock()
596 wlock = self.wlock()
590 for f in list:
597 for f in list:
591 p = self.wjoin(f)
598 p = self.wjoin(f)
592 if not os.path.exists(p):
599 if not os.path.exists(p):
593 self.ui.warn(_("%s does not exist!\n") % f)
600 self.ui.warn(_("%s does not exist!\n") % f)
594 elif not os.path.isfile(p):
601 elif not os.path.isfile(p):
595 self.ui.warn(_("%s not added: only files supported currently\n")
602 self.ui.warn(_("%s not added: only files supported currently\n")
596 % f)
603 % f)
597 elif self.dirstate.state(f) in 'an':
604 elif self.dirstate.state(f) in 'an':
598 self.ui.warn(_("%s already tracked!\n") % f)
605 self.ui.warn(_("%s already tracked!\n") % f)
599 else:
606 else:
600 self.dirstate.update([f], "a")
607 self.dirstate.update([f], "a")
601
608
602 def forget(self, list, wlock=None):
609 def forget(self, list, wlock=None):
603 if not wlock:
610 if not wlock:
604 wlock = self.wlock()
611 wlock = self.wlock()
605 for f in list:
612 for f in list:
606 if self.dirstate.state(f) not in 'ai':
613 if self.dirstate.state(f) not in 'ai':
607 self.ui.warn(_("%s not added!\n") % f)
614 self.ui.warn(_("%s not added!\n") % f)
608 else:
615 else:
609 self.dirstate.forget([f])
616 self.dirstate.forget([f])
610
617
611 def remove(self, list, unlink=False, wlock=None):
618 def remove(self, list, unlink=False, wlock=None):
612 if unlink:
619 if unlink:
613 for f in list:
620 for f in list:
614 try:
621 try:
615 util.unlink(self.wjoin(f))
622 util.unlink(self.wjoin(f))
616 except OSError, inst:
623 except OSError, inst:
617 if inst.errno != errno.ENOENT:
624 if inst.errno != errno.ENOENT:
618 raise
625 raise
619 if not wlock:
626 if not wlock:
620 wlock = self.wlock()
627 wlock = self.wlock()
621 for f in list:
628 for f in list:
622 p = self.wjoin(f)
629 p = self.wjoin(f)
623 if os.path.exists(p):
630 if os.path.exists(p):
624 self.ui.warn(_("%s still exists!\n") % f)
631 self.ui.warn(_("%s still exists!\n") % f)
625 elif self.dirstate.state(f) == 'a':
632 elif self.dirstate.state(f) == 'a':
626 self.dirstate.forget([f])
633 self.dirstate.forget([f])
627 elif f not in self.dirstate:
634 elif f not in self.dirstate:
628 self.ui.warn(_("%s not tracked!\n") % f)
635 self.ui.warn(_("%s not tracked!\n") % f)
629 else:
636 else:
630 self.dirstate.update([f], "r")
637 self.dirstate.update([f], "r")
631
638
632 def undelete(self, list, wlock=None):
639 def undelete(self, list, wlock=None):
633 p = self.dirstate.parents()[0]
640 p = self.dirstate.parents()[0]
634 mn = self.changelog.read(p)[0]
641 mn = self.changelog.read(p)[0]
635 mf = self.manifest.readflags(mn)
642 mf = self.manifest.readflags(mn)
636 m = self.manifest.read(mn)
643 m = self.manifest.read(mn)
637 if not wlock:
644 if not wlock:
638 wlock = self.wlock()
645 wlock = self.wlock()
639 for f in list:
646 for f in list:
640 if self.dirstate.state(f) not in "r":
647 if self.dirstate.state(f) not in "r":
641 self.ui.warn("%s not removed!\n" % f)
648 self.ui.warn("%s not removed!\n" % f)
642 else:
649 else:
643 t = self.file(f).read(m[f])
650 t = self.file(f).read(m[f])
644 self.wwrite(f, t)
651 self.wwrite(f, t)
645 util.set_exec(self.wjoin(f), mf[f])
652 util.set_exec(self.wjoin(f), mf[f])
646 self.dirstate.update([f], "n")
653 self.dirstate.update([f], "n")
647
654
648 def copy(self, source, dest, wlock=None):
655 def copy(self, source, dest, wlock=None):
649 p = self.wjoin(dest)
656 p = self.wjoin(dest)
650 if not os.path.exists(p):
657 if not os.path.exists(p):
651 self.ui.warn(_("%s does not exist!\n") % dest)
658 self.ui.warn(_("%s does not exist!\n") % dest)
652 elif not os.path.isfile(p):
659 elif not os.path.isfile(p):
653 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
660 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
654 else:
661 else:
655 if not wlock:
662 if not wlock:
656 wlock = self.wlock()
663 wlock = self.wlock()
657 if self.dirstate.state(dest) == '?':
664 if self.dirstate.state(dest) == '?':
658 self.dirstate.update([dest], "a")
665 self.dirstate.update([dest], "a")
659 self.dirstate.copy(source, dest)
666 self.dirstate.copy(source, dest)
660
667
661 def heads(self, start=None):
668 def heads(self, start=None):
662 heads = self.changelog.heads(start)
669 heads = self.changelog.heads(start)
663 # sort the output in rev descending order
670 # sort the output in rev descending order
664 heads = [(-self.changelog.rev(h), h) for h in heads]
671 heads = [(-self.changelog.rev(h), h) for h in heads]
665 heads.sort()
672 heads.sort()
666 return [n for (r, n) in heads]
673 return [n for (r, n) in heads]
667
674
668 # branchlookup returns a dict giving a list of branches for
675 # branchlookup returns a dict giving a list of branches for
669 # each head. A branch is defined as the tag of a node or
676 # each head. A branch is defined as the tag of a node or
670 # the branch of the node's parents. If a node has multiple
677 # the branch of the node's parents. If a node has multiple
671 # branch tags, tags are eliminated if they are visible from other
678 # branch tags, tags are eliminated if they are visible from other
672 # branch tags.
679 # branch tags.
673 #
680 #
674 # So, for this graph: a->b->c->d->e
681 # So, for this graph: a->b->c->d->e
675 # \ /
682 # \ /
676 # aa -----/
683 # aa -----/
677 # a has tag 2.6.12
684 # a has tag 2.6.12
678 # d has tag 2.6.13
685 # d has tag 2.6.13
679 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
686 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
680 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
687 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
681 # from the list.
688 # from the list.
682 #
689 #
683 # It is possible that more than one head will have the same branch tag.
690 # It is possible that more than one head will have the same branch tag.
684 # callers need to check the result for multiple heads under the same
691 # callers need to check the result for multiple heads under the same
685 # branch tag if that is a problem for them (ie checkout of a specific
692 # branch tag if that is a problem for them (ie checkout of a specific
686 # branch).
693 # branch).
687 #
694 #
688 # passing in a specific branch will limit the depth of the search
695 # passing in a specific branch will limit the depth of the search
689 # through the parents. It won't limit the branches returned in the
696 # through the parents. It won't limit the branches returned in the
690 # result though.
697 # result though.
691 def branchlookup(self, heads=None, branch=None):
698 def branchlookup(self, heads=None, branch=None):
692 if not heads:
699 if not heads:
693 heads = self.heads()
700 heads = self.heads()
694 headt = [ h for h in heads ]
701 headt = [ h for h in heads ]
695 chlog = self.changelog
702 chlog = self.changelog
696 branches = {}
703 branches = {}
697 merges = []
704 merges = []
698 seenmerge = {}
705 seenmerge = {}
699
706
700 # traverse the tree once for each head, recording in the branches
707 # traverse the tree once for each head, recording in the branches
701 # dict which tags are visible from this head. The branches
708 # dict which tags are visible from this head. The branches
702 # dict also records which tags are visible from each tag
709 # dict also records which tags are visible from each tag
703 # while we traverse.
710 # while we traverse.
704 while headt or merges:
711 while headt or merges:
705 if merges:
712 if merges:
706 n, found = merges.pop()
713 n, found = merges.pop()
707 visit = [n]
714 visit = [n]
708 else:
715 else:
709 h = headt.pop()
716 h = headt.pop()
710 visit = [h]
717 visit = [h]
711 found = [h]
718 found = [h]
712 seen = {}
719 seen = {}
713 while visit:
720 while visit:
714 n = visit.pop()
721 n = visit.pop()
715 if n in seen:
722 if n in seen:
716 continue
723 continue
717 pp = chlog.parents(n)
724 pp = chlog.parents(n)
718 tags = self.nodetags(n)
725 tags = self.nodetags(n)
719 if tags:
726 if tags:
720 for x in tags:
727 for x in tags:
721 if x == 'tip':
728 if x == 'tip':
722 continue
729 continue
723 for f in found:
730 for f in found:
724 branches.setdefault(f, {})[n] = 1
731 branches.setdefault(f, {})[n] = 1
725 branches.setdefault(n, {})[n] = 1
732 branches.setdefault(n, {})[n] = 1
726 break
733 break
727 if n not in found:
734 if n not in found:
728 found.append(n)
735 found.append(n)
729 if branch in tags:
736 if branch in tags:
730 continue
737 continue
731 seen[n] = 1
738 seen[n] = 1
732 if pp[1] != nullid and n not in seenmerge:
739 if pp[1] != nullid and n not in seenmerge:
733 merges.append((pp[1], [x for x in found]))
740 merges.append((pp[1], [x for x in found]))
734 seenmerge[n] = 1
741 seenmerge[n] = 1
735 if pp[0] != nullid:
742 if pp[0] != nullid:
736 visit.append(pp[0])
743 visit.append(pp[0])
737 # traverse the branches dict, eliminating branch tags from each
744 # traverse the branches dict, eliminating branch tags from each
738 # head that are visible from another branch tag for that head.
745 # head that are visible from another branch tag for that head.
739 out = {}
746 out = {}
740 viscache = {}
747 viscache = {}
741 for h in heads:
748 for h in heads:
742 def visible(node):
749 def visible(node):
743 if node in viscache:
750 if node in viscache:
744 return viscache[node]
751 return viscache[node]
745 ret = {}
752 ret = {}
746 visit = [node]
753 visit = [node]
747 while visit:
754 while visit:
748 x = visit.pop()
755 x = visit.pop()
749 if x in viscache:
756 if x in viscache:
750 ret.update(viscache[x])
757 ret.update(viscache[x])
751 elif x not in ret:
758 elif x not in ret:
752 ret[x] = 1
759 ret[x] = 1
753 if x in branches:
760 if x in branches:
754 visit[len(visit):] = branches[x].keys()
761 visit[len(visit):] = branches[x].keys()
755 viscache[node] = ret
762 viscache[node] = ret
756 return ret
763 return ret
757 if h not in branches:
764 if h not in branches:
758 continue
765 continue
759 # O(n^2), but somewhat limited. This only searches the
766 # O(n^2), but somewhat limited. This only searches the
760 # tags visible from a specific head, not all the tags in the
767 # tags visible from a specific head, not all the tags in the
761 # whole repo.
768 # whole repo.
762 for b in branches[h]:
769 for b in branches[h]:
763 vis = False
770 vis = False
764 for bb in branches[h].keys():
771 for bb in branches[h].keys():
765 if b != bb:
772 if b != bb:
766 if b in visible(bb):
773 if b in visible(bb):
767 vis = True
774 vis = True
768 break
775 break
769 if not vis:
776 if not vis:
770 l = out.setdefault(h, [])
777 l = out.setdefault(h, [])
771 l[len(l):] = self.nodetags(b)
778 l[len(l):] = self.nodetags(b)
772 return out
779 return out
773
780
774 def branches(self, nodes):
781 def branches(self, nodes):
775 if not nodes:
782 if not nodes:
776 nodes = [self.changelog.tip()]
783 nodes = [self.changelog.tip()]
777 b = []
784 b = []
778 for n in nodes:
785 for n in nodes:
779 t = n
786 t = n
780 while n:
787 while n:
781 p = self.changelog.parents(n)
788 p = self.changelog.parents(n)
782 if p[1] != nullid or p[0] == nullid:
789 if p[1] != nullid or p[0] == nullid:
783 b.append((t, n, p[0], p[1]))
790 b.append((t, n, p[0], p[1]))
784 break
791 break
785 n = p[0]
792 n = p[0]
786 return b
793 return b
787
794
788 def between(self, pairs):
795 def between(self, pairs):
789 r = []
796 r = []
790
797
791 for top, bottom in pairs:
798 for top, bottom in pairs:
792 n, l, i = top, [], 0
799 n, l, i = top, [], 0
793 f = 1
800 f = 1
794
801
795 while n != bottom:
802 while n != bottom:
796 p = self.changelog.parents(n)[0]
803 p = self.changelog.parents(n)[0]
797 if i == f:
804 if i == f:
798 l.append(n)
805 l.append(n)
799 f = f * 2
806 f = f * 2
800 n = p
807 n = p
801 i += 1
808 i += 1
802
809
803 r.append(l)
810 r.append(l)
804
811
805 return r
812 return r
806
813
807 def findincoming(self, remote, base=None, heads=None, force=False):
814 def findincoming(self, remote, base=None, heads=None, force=False):
808 m = self.changelog.nodemap
815 m = self.changelog.nodemap
809 search = []
816 search = []
810 fetch = {}
817 fetch = {}
811 seen = {}
818 seen = {}
812 seenbranch = {}
819 seenbranch = {}
813 if base == None:
820 if base == None:
814 base = {}
821 base = {}
815
822
816 # assume we're closer to the tip than the root
823 # assume we're closer to the tip than the root
817 # and start by examining the heads
824 # and start by examining the heads
818 self.ui.status(_("searching for changes\n"))
825 self.ui.status(_("searching for changes\n"))
819
826
820 if not heads:
827 if not heads:
821 heads = remote.heads()
828 heads = remote.heads()
822
829
823 unknown = []
830 unknown = []
824 for h in heads:
831 for h in heads:
825 if h not in m:
832 if h not in m:
826 unknown.append(h)
833 unknown.append(h)
827 else:
834 else:
828 base[h] = 1
835 base[h] = 1
829
836
830 if not unknown:
837 if not unknown:
831 return []
838 return []
832
839
833 rep = {}
840 rep = {}
834 reqcnt = 0
841 reqcnt = 0
835
842
836 # search through remote branches
843 # search through remote branches
837 # a 'branch' here is a linear segment of history, with four parts:
844 # a 'branch' here is a linear segment of history, with four parts:
838 # head, root, first parent, second parent
845 # head, root, first parent, second parent
839 # (a branch always has two parents (or none) by definition)
846 # (a branch always has two parents (or none) by definition)
840 unknown = remote.branches(unknown)
847 unknown = remote.branches(unknown)
841 while unknown:
848 while unknown:
842 r = []
849 r = []
843 while unknown:
850 while unknown:
844 n = unknown.pop(0)
851 n = unknown.pop(0)
845 if n[0] in seen:
852 if n[0] in seen:
846 continue
853 continue
847
854
848 self.ui.debug(_("examining %s:%s\n")
855 self.ui.debug(_("examining %s:%s\n")
849 % (short(n[0]), short(n[1])))
856 % (short(n[0]), short(n[1])))
850 if n[0] == nullid:
857 if n[0] == nullid:
851 break
858 break
852 if n in seenbranch:
859 if n in seenbranch:
853 self.ui.debug(_("branch already found\n"))
860 self.ui.debug(_("branch already found\n"))
854 continue
861 continue
855 if n[1] and n[1] in m: # do we know the base?
862 if n[1] and n[1] in m: # do we know the base?
856 self.ui.debug(_("found incomplete branch %s:%s\n")
863 self.ui.debug(_("found incomplete branch %s:%s\n")
857 % (short(n[0]), short(n[1])))
864 % (short(n[0]), short(n[1])))
858 search.append(n) # schedule branch range for scanning
865 search.append(n) # schedule branch range for scanning
859 seenbranch[n] = 1
866 seenbranch[n] = 1
860 else:
867 else:
861 if n[1] not in seen and n[1] not in fetch:
868 if n[1] not in seen and n[1] not in fetch:
862 if n[2] in m and n[3] in m:
869 if n[2] in m and n[3] in m:
863 self.ui.debug(_("found new changeset %s\n") %
870 self.ui.debug(_("found new changeset %s\n") %
864 short(n[1]))
871 short(n[1]))
865 fetch[n[1]] = 1 # earliest unknown
872 fetch[n[1]] = 1 # earliest unknown
866 base[n[2]] = 1 # latest known
873 base[n[2]] = 1 # latest known
867 continue
874 continue
868
875
869 for a in n[2:4]:
876 for a in n[2:4]:
870 if a not in rep:
877 if a not in rep:
871 r.append(a)
878 r.append(a)
872 rep[a] = 1
879 rep[a] = 1
873
880
874 seen[n[0]] = 1
881 seen[n[0]] = 1
875
882
876 if r:
883 if r:
877 reqcnt += 1
884 reqcnt += 1
878 self.ui.debug(_("request %d: %s\n") %
885 self.ui.debug(_("request %d: %s\n") %
879 (reqcnt, " ".join(map(short, r))))
886 (reqcnt, " ".join(map(short, r))))
880 for p in range(0, len(r), 10):
887 for p in range(0, len(r), 10):
881 for b in remote.branches(r[p:p+10]):
888 for b in remote.branches(r[p:p+10]):
882 self.ui.debug(_("received %s:%s\n") %
889 self.ui.debug(_("received %s:%s\n") %
883 (short(b[0]), short(b[1])))
890 (short(b[0]), short(b[1])))
884 if b[0] in m:
891 if b[0] in m:
885 self.ui.debug(_("found base node %s\n")
892 self.ui.debug(_("found base node %s\n")
886 % short(b[0]))
893 % short(b[0]))
887 base[b[0]] = 1
894 base[b[0]] = 1
888 elif b[0] not in seen:
895 elif b[0] not in seen:
889 unknown.append(b)
896 unknown.append(b)
890
897
891 # do binary search on the branches we found
898 # do binary search on the branches we found
892 while search:
899 while search:
893 n = search.pop(0)
900 n = search.pop(0)
894 reqcnt += 1
901 reqcnt += 1
895 l = remote.between([(n[0], n[1])])[0]
902 l = remote.between([(n[0], n[1])])[0]
896 l.append(n[1])
903 l.append(n[1])
897 p = n[0]
904 p = n[0]
898 f = 1
905 f = 1
899 for i in l:
906 for i in l:
900 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
907 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
901 if i in m:
908 if i in m:
902 if f <= 2:
909 if f <= 2:
903 self.ui.debug(_("found new branch changeset %s\n") %
910 self.ui.debug(_("found new branch changeset %s\n") %
904 short(p))
911 short(p))
905 fetch[p] = 1
912 fetch[p] = 1
906 base[i] = 1
913 base[i] = 1
907 else:
914 else:
908 self.ui.debug(_("narrowed branch search to %s:%s\n")
915 self.ui.debug(_("narrowed branch search to %s:%s\n")
909 % (short(p), short(i)))
916 % (short(p), short(i)))
910 search.append((p, i))
917 search.append((p, i))
911 break
918 break
912 p, f = i, f * 2
919 p, f = i, f * 2
913
920
914 # sanity check our fetch list
921 # sanity check our fetch list
915 for f in fetch.keys():
922 for f in fetch.keys():
916 if f in m:
923 if f in m:
917 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
924 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
918
925
919 if base.keys() == [nullid]:
926 if base.keys() == [nullid]:
920 if force:
927 if force:
921 self.ui.warn(_("warning: repository is unrelated\n"))
928 self.ui.warn(_("warning: repository is unrelated\n"))
922 else:
929 else:
923 raise util.Abort(_("repository is unrelated"))
930 raise util.Abort(_("repository is unrelated"))
924
931
925 self.ui.note(_("found new changesets starting at ") +
932 self.ui.note(_("found new changesets starting at ") +
926 " ".join([short(f) for f in fetch]) + "\n")
933 " ".join([short(f) for f in fetch]) + "\n")
927
934
928 self.ui.debug(_("%d total queries\n") % reqcnt)
935 self.ui.debug(_("%d total queries\n") % reqcnt)
929
936
930 return fetch.keys()
937 return fetch.keys()
931
938
932 def findoutgoing(self, remote, base=None, heads=None, force=False):
939 def findoutgoing(self, remote, base=None, heads=None, force=False):
933 """Return list of nodes that are roots of subsets not in remote
940 """Return list of nodes that are roots of subsets not in remote
934
941
935 If base dict is specified, assume that these nodes and their parents
942 If base dict is specified, assume that these nodes and their parents
936 exist on the remote side.
943 exist on the remote side.
937 If a list of heads is specified, return only nodes which are heads
944 If a list of heads is specified, return only nodes which are heads
938 or ancestors of these heads, and return a second element which
945 or ancestors of these heads, and return a second element which
939 contains all remote heads which get new children.
946 contains all remote heads which get new children.
940 """
947 """
941 if base == None:
948 if base == None:
942 base = {}
949 base = {}
943 self.findincoming(remote, base, heads, force=force)
950 self.findincoming(remote, base, heads, force=force)
944
951
945 self.ui.debug(_("common changesets up to ")
952 self.ui.debug(_("common changesets up to ")
946 + " ".join(map(short, base.keys())) + "\n")
953 + " ".join(map(short, base.keys())) + "\n")
947
954
948 remain = dict.fromkeys(self.changelog.nodemap)
955 remain = dict.fromkeys(self.changelog.nodemap)
949
956
950 # prune everything remote has from the tree
957 # prune everything remote has from the tree
951 del remain[nullid]
958 del remain[nullid]
952 remove = base.keys()
959 remove = base.keys()
953 while remove:
960 while remove:
954 n = remove.pop(0)
961 n = remove.pop(0)
955 if n in remain:
962 if n in remain:
956 del remain[n]
963 del remain[n]
957 for p in self.changelog.parents(n):
964 for p in self.changelog.parents(n):
958 remove.append(p)
965 remove.append(p)
959
966
960 # find every node whose parents have been pruned
967 # find every node whose parents have been pruned
961 subset = []
968 subset = []
962 # find every remote head that will get new children
969 # find every remote head that will get new children
963 updated_heads = {}
970 updated_heads = {}
964 for n in remain:
971 for n in remain:
965 p1, p2 = self.changelog.parents(n)
972 p1, p2 = self.changelog.parents(n)
966 if p1 not in remain and p2 not in remain:
973 if p1 not in remain and p2 not in remain:
967 subset.append(n)
974 subset.append(n)
968 if heads:
975 if heads:
969 if p1 in heads:
976 if p1 in heads:
970 updated_heads[p1] = True
977 updated_heads[p1] = True
971 if p2 in heads:
978 if p2 in heads:
972 updated_heads[p2] = True
979 updated_heads[p2] = True
973
980
974 # this is the set of all roots we have to push
981 # this is the set of all roots we have to push
975 if heads:
982 if heads:
976 return subset, updated_heads.keys()
983 return subset, updated_heads.keys()
977 else:
984 else:
978 return subset
985 return subset
979
986
980 def pull(self, remote, heads=None, force=False):
987 def pull(self, remote, heads=None, force=False):
981 l = self.lock()
988 l = self.lock()
982
989
983 # if we have an empty repo, fetch everything
990 # if we have an empty repo, fetch everything
984 if self.changelog.tip() == nullid:
991 if self.changelog.tip() == nullid:
985 self.ui.status(_("requesting all changes\n"))
992 self.ui.status(_("requesting all changes\n"))
986 fetch = [nullid]
993 fetch = [nullid]
987 else:
994 else:
988 fetch = self.findincoming(remote, force=force)
995 fetch = self.findincoming(remote, force=force)
989
996
990 if not fetch:
997 if not fetch:
991 self.ui.status(_("no changes found\n"))
998 self.ui.status(_("no changes found\n"))
992 return 0
999 return 0
993
1000
994 if heads is None:
1001 if heads is None:
995 cg = remote.changegroup(fetch, 'pull')
1002 cg = remote.changegroup(fetch, 'pull')
996 else:
1003 else:
997 cg = remote.changegroupsubset(fetch, heads, 'pull')
1004 cg = remote.changegroupsubset(fetch, heads, 'pull')
998 return self.addchangegroup(cg)
1005 return self.addchangegroup(cg)
999
1006
1000 def push(self, remote, force=False, revs=None):
1007 def push(self, remote, force=False, revs=None):
1001 lock = remote.lock()
1008 lock = remote.lock()
1002
1009
1003 base = {}
1010 base = {}
1004 remote_heads = remote.heads()
1011 remote_heads = remote.heads()
1005 inc = self.findincoming(remote, base, remote_heads, force=force)
1012 inc = self.findincoming(remote, base, remote_heads, force=force)
1006 if not force and inc:
1013 if not force and inc:
1007 self.ui.warn(_("abort: unsynced remote changes!\n"))
1014 self.ui.warn(_("abort: unsynced remote changes!\n"))
1008 self.ui.status(_("(did you forget to sync?"
1015 self.ui.status(_("(did you forget to sync?"
1009 " use push -f to force)\n"))
1016 " use push -f to force)\n"))
1010 return 1
1017 return 1
1011
1018
1012 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1019 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1013 if revs is not None:
1020 if revs is not None:
1014 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1021 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1015 else:
1022 else:
1016 bases, heads = update, self.changelog.heads()
1023 bases, heads = update, self.changelog.heads()
1017
1024
1018 if not bases:
1025 if not bases:
1019 self.ui.status(_("no changes found\n"))
1026 self.ui.status(_("no changes found\n"))
1020 return 1
1027 return 1
1021 elif not force:
1028 elif not force:
1022 if revs is not None:
1029 if revs is not None:
1023 updated_heads = {}
1030 updated_heads = {}
1024 for base in msng_cl:
1031 for base in msng_cl:
1025 for parent in self.changelog.parents(base):
1032 for parent in self.changelog.parents(base):
1026 if parent in remote_heads:
1033 if parent in remote_heads:
1027 updated_heads[parent] = True
1034 updated_heads[parent] = True
1028 updated_heads = updated_heads.keys()
1035 updated_heads = updated_heads.keys()
1029 if len(updated_heads) < len(heads):
1036 if len(updated_heads) < len(heads):
1030 self.ui.warn(_("abort: push creates new remote branches!\n"))
1037 self.ui.warn(_("abort: push creates new remote branches!\n"))
1031 self.ui.status(_("(did you forget to merge?"
1038 self.ui.status(_("(did you forget to merge?"
1032 " use push -f to force)\n"))
1039 " use push -f to force)\n"))
1033 return 1
1040 return 1
1034
1041
1035 if revs is None:
1042 if revs is None:
1036 cg = self.changegroup(update, 'push')
1043 cg = self.changegroup(update, 'push')
1037 else:
1044 else:
1038 cg = self.changegroupsubset(update, revs, 'push')
1045 cg = self.changegroupsubset(update, revs, 'push')
1039 return remote.addchangegroup(cg)
1046 return remote.addchangegroup(cg)
1040
1047
1041 def changegroupsubset(self, bases, heads, source):
1048 def changegroupsubset(self, bases, heads, source):
1042 """This function generates a changegroup consisting of all the nodes
1049 """This function generates a changegroup consisting of all the nodes
1043 that are descendents of any of the bases, and ancestors of any of
1050 that are descendents of any of the bases, and ancestors of any of
1044 the heads.
1051 the heads.
1045
1052
1046 It is fairly complex as determining which filenodes and which
1053 It is fairly complex as determining which filenodes and which
1047 manifest nodes need to be included for the changeset to be complete
1054 manifest nodes need to be included for the changeset to be complete
1048 is non-trivial.
1055 is non-trivial.
1049
1056
1050 Another wrinkle is doing the reverse, figuring out which changeset in
1057 Another wrinkle is doing the reverse, figuring out which changeset in
1051 the changegroup a particular filenode or manifestnode belongs to."""
1058 the changegroup a particular filenode or manifestnode belongs to."""
1052
1059
1053 self.hook('preoutgoing', throw=True, source=source)
1060 self.hook('preoutgoing', throw=True, source=source)
1054
1061
1055 # Set up some initial variables
1062 # Set up some initial variables
1056 # Make it easy to refer to self.changelog
1063 # Make it easy to refer to self.changelog
1057 cl = self.changelog
1064 cl = self.changelog
1058 # msng is short for missing - compute the list of changesets in this
1065 # msng is short for missing - compute the list of changesets in this
1059 # changegroup.
1066 # changegroup.
1060 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1067 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1061 # Some bases may turn out to be superfluous, and some heads may be
1068 # Some bases may turn out to be superfluous, and some heads may be
1062 # too. nodesbetween will return the minimal set of bases and heads
1069 # too. nodesbetween will return the minimal set of bases and heads
1063 # necessary to re-create the changegroup.
1070 # necessary to re-create the changegroup.
1064
1071
1065 # Known heads are the list of heads that it is assumed the recipient
1072 # Known heads are the list of heads that it is assumed the recipient
1066 # of this changegroup will know about.
1073 # of this changegroup will know about.
1067 knownheads = {}
1074 knownheads = {}
1068 # We assume that all parents of bases are known heads.
1075 # We assume that all parents of bases are known heads.
1069 for n in bases:
1076 for n in bases:
1070 for p in cl.parents(n):
1077 for p in cl.parents(n):
1071 if p != nullid:
1078 if p != nullid:
1072 knownheads[p] = 1
1079 knownheads[p] = 1
1073 knownheads = knownheads.keys()
1080 knownheads = knownheads.keys()
1074 if knownheads:
1081 if knownheads:
1075 # Now that we know what heads are known, we can compute which
1082 # Now that we know what heads are known, we can compute which
1076 # changesets are known. The recipient must know about all
1083 # changesets are known. The recipient must know about all
1077 # changesets required to reach the known heads from the null
1084 # changesets required to reach the known heads from the null
1078 # changeset.
1085 # changeset.
1079 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1086 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1080 junk = None
1087 junk = None
1081 # Transform the list into an ersatz set.
1088 # Transform the list into an ersatz set.
1082 has_cl_set = dict.fromkeys(has_cl_set)
1089 has_cl_set = dict.fromkeys(has_cl_set)
1083 else:
1090 else:
1084 # If there were no known heads, the recipient cannot be assumed to
1091 # If there were no known heads, the recipient cannot be assumed to
1085 # know about any changesets.
1092 # know about any changesets.
1086 has_cl_set = {}
1093 has_cl_set = {}
1087
1094
1088 # Make it easy to refer to self.manifest
1095 # Make it easy to refer to self.manifest
1089 mnfst = self.manifest
1096 mnfst = self.manifest
1090 # We don't know which manifests are missing yet
1097 # We don't know which manifests are missing yet
1091 msng_mnfst_set = {}
1098 msng_mnfst_set = {}
1092 # Nor do we know which filenodes are missing.
1099 # Nor do we know which filenodes are missing.
1093 msng_filenode_set = {}
1100 msng_filenode_set = {}
1094
1101
1095 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1102 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1096 junk = None
1103 junk = None
1097
1104
1098 # A changeset always belongs to itself, so the changenode lookup
1105 # A changeset always belongs to itself, so the changenode lookup
1099 # function for a changenode is identity.
1106 # function for a changenode is identity.
1100 def identity(x):
1107 def identity(x):
1101 return x
1108 return x
1102
1109
1103 # A function generating function. Sets up an environment for the
1110 # A function generating function. Sets up an environment for the
1104 # inner function.
1111 # inner function.
1105 def cmp_by_rev_func(revlog):
1112 def cmp_by_rev_func(revlog):
1106 # Compare two nodes by their revision number in the environment's
1113 # Compare two nodes by their revision number in the environment's
1107 # revision history. Since the revision number both represents the
1114 # revision history. Since the revision number both represents the
1108 # most efficient order to read the nodes in, and represents a
1115 # most efficient order to read the nodes in, and represents a
1109 # topological sorting of the nodes, this function is often useful.
1116 # topological sorting of the nodes, this function is often useful.
1110 def cmp_by_rev(a, b):
1117 def cmp_by_rev(a, b):
1111 return cmp(revlog.rev(a), revlog.rev(b))
1118 return cmp(revlog.rev(a), revlog.rev(b))
1112 return cmp_by_rev
1119 return cmp_by_rev
1113
1120
1114 # If we determine that a particular file or manifest node must be a
1121 # If we determine that a particular file or manifest node must be a
1115 # node that the recipient of the changegroup will already have, we can
1122 # node that the recipient of the changegroup will already have, we can
1116 # also assume the recipient will have all the parents. This function
1123 # also assume the recipient will have all the parents. This function
1117 # prunes them from the set of missing nodes.
1124 # prunes them from the set of missing nodes.
1118 def prune_parents(revlog, hasset, msngset):
1125 def prune_parents(revlog, hasset, msngset):
1119 haslst = hasset.keys()
1126 haslst = hasset.keys()
1120 haslst.sort(cmp_by_rev_func(revlog))
1127 haslst.sort(cmp_by_rev_func(revlog))
1121 for node in haslst:
1128 for node in haslst:
1122 parentlst = [p for p in revlog.parents(node) if p != nullid]
1129 parentlst = [p for p in revlog.parents(node) if p != nullid]
1123 while parentlst:
1130 while parentlst:
1124 n = parentlst.pop()
1131 n = parentlst.pop()
1125 if n not in hasset:
1132 if n not in hasset:
1126 hasset[n] = 1
1133 hasset[n] = 1
1127 p = [p for p in revlog.parents(n) if p != nullid]
1134 p = [p for p in revlog.parents(n) if p != nullid]
1128 parentlst.extend(p)
1135 parentlst.extend(p)
1129 for n in hasset:
1136 for n in hasset:
1130 msngset.pop(n, None)
1137 msngset.pop(n, None)
1131
1138
1132 # This is a function generating function used to set up an environment
1139 # This is a function generating function used to set up an environment
1133 # for the inner function to execute in.
1140 # for the inner function to execute in.
1134 def manifest_and_file_collector(changedfileset):
1141 def manifest_and_file_collector(changedfileset):
1135 # This is an information gathering function that gathers
1142 # This is an information gathering function that gathers
1136 # information from each changeset node that goes out as part of
1143 # information from each changeset node that goes out as part of
1137 # the changegroup. The information gathered is a list of which
1144 # the changegroup. The information gathered is a list of which
1138 # manifest nodes are potentially required (the recipient may
1145 # manifest nodes are potentially required (the recipient may
1139 # already have them) and total list of all files which were
1146 # already have them) and total list of all files which were
1140 # changed in any changeset in the changegroup.
1147 # changed in any changeset in the changegroup.
1141 #
1148 #
1142 # We also remember the first changenode we saw any manifest
1149 # We also remember the first changenode we saw any manifest
1143 # referenced by so we can later determine which changenode 'owns'
1150 # referenced by so we can later determine which changenode 'owns'
1144 # the manifest.
1151 # the manifest.
1145 def collect_manifests_and_files(clnode):
1152 def collect_manifests_and_files(clnode):
1146 c = cl.read(clnode)
1153 c = cl.read(clnode)
1147 for f in c[3]:
1154 for f in c[3]:
1148 # This is to make sure we only have one instance of each
1155 # This is to make sure we only have one instance of each
1149 # filename string for each filename.
1156 # filename string for each filename.
1150 changedfileset.setdefault(f, f)
1157 changedfileset.setdefault(f, f)
1151 msng_mnfst_set.setdefault(c[0], clnode)
1158 msng_mnfst_set.setdefault(c[0], clnode)
1152 return collect_manifests_and_files
1159 return collect_manifests_and_files
1153
1160
1154 # Figure out which manifest nodes (of the ones we think might be part
1161 # Figure out which manifest nodes (of the ones we think might be part
1155 # of the changegroup) the recipient must know about and remove them
1162 # of the changegroup) the recipient must know about and remove them
1156 # from the changegroup.
1163 # from the changegroup.
1157 def prune_manifests():
1164 def prune_manifests():
1158 has_mnfst_set = {}
1165 has_mnfst_set = {}
1159 for n in msng_mnfst_set:
1166 for n in msng_mnfst_set:
1160 # If a 'missing' manifest thinks it belongs to a changenode
1167 # If a 'missing' manifest thinks it belongs to a changenode
1161 # the recipient is assumed to have, obviously the recipient
1168 # the recipient is assumed to have, obviously the recipient
1162 # must have that manifest.
1169 # must have that manifest.
1163 linknode = cl.node(mnfst.linkrev(n))
1170 linknode = cl.node(mnfst.linkrev(n))
1164 if linknode in has_cl_set:
1171 if linknode in has_cl_set:
1165 has_mnfst_set[n] = 1
1172 has_mnfst_set[n] = 1
1166 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1173 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1167
1174
1168 # Use the information collected in collect_manifests_and_files to say
1175 # Use the information collected in collect_manifests_and_files to say
1169 # which changenode any manifestnode belongs to.
1176 # which changenode any manifestnode belongs to.
1170 def lookup_manifest_link(mnfstnode):
1177 def lookup_manifest_link(mnfstnode):
1171 return msng_mnfst_set[mnfstnode]
1178 return msng_mnfst_set[mnfstnode]
1172
1179
1173 # A function generating function that sets up the initial environment
1180 # A function generating function that sets up the initial environment
1174 # the inner function.
1181 # the inner function.
1175 def filenode_collector(changedfiles):
1182 def filenode_collector(changedfiles):
1176 next_rev = [0]
1183 next_rev = [0]
1177 # This gathers information from each manifestnode included in the
1184 # This gathers information from each manifestnode included in the
1178 # changegroup about which filenodes the manifest node references
1185 # changegroup about which filenodes the manifest node references
1179 # so we can include those in the changegroup too.
1186 # so we can include those in the changegroup too.
1180 #
1187 #
1181 # It also remembers which changenode each filenode belongs to. It
1188 # It also remembers which changenode each filenode belongs to. It
1182 # does this by assuming the a filenode belongs to the changenode
1189 # does this by assuming the a filenode belongs to the changenode
1183 # the first manifest that references it belongs to.
1190 # the first manifest that references it belongs to.
1184 def collect_msng_filenodes(mnfstnode):
1191 def collect_msng_filenodes(mnfstnode):
1185 r = mnfst.rev(mnfstnode)
1192 r = mnfst.rev(mnfstnode)
1186 if r == next_rev[0]:
1193 if r == next_rev[0]:
1187 # If the last rev we looked at was the one just previous,
1194 # If the last rev we looked at was the one just previous,
1188 # we only need to see a diff.
1195 # we only need to see a diff.
1189 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1196 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1190 # For each line in the delta
1197 # For each line in the delta
1191 for dline in delta.splitlines():
1198 for dline in delta.splitlines():
1192 # get the filename and filenode for that line
1199 # get the filename and filenode for that line
1193 f, fnode = dline.split('\0')
1200 f, fnode = dline.split('\0')
1194 fnode = bin(fnode[:40])
1201 fnode = bin(fnode[:40])
1195 f = changedfiles.get(f, None)
1202 f = changedfiles.get(f, None)
1196 # And if the file is in the list of files we care
1203 # And if the file is in the list of files we care
1197 # about.
1204 # about.
1198 if f is not None:
1205 if f is not None:
1199 # Get the changenode this manifest belongs to
1206 # Get the changenode this manifest belongs to
1200 clnode = msng_mnfst_set[mnfstnode]
1207 clnode = msng_mnfst_set[mnfstnode]
1201 # Create the set of filenodes for the file if
1208 # Create the set of filenodes for the file if
1202 # there isn't one already.
1209 # there isn't one already.
1203 ndset = msng_filenode_set.setdefault(f, {})
1210 ndset = msng_filenode_set.setdefault(f, {})
1204 # And set the filenode's changelog node to the
1211 # And set the filenode's changelog node to the
1205 # manifest's if it hasn't been set already.
1212 # manifest's if it hasn't been set already.
1206 ndset.setdefault(fnode, clnode)
1213 ndset.setdefault(fnode, clnode)
1207 else:
1214 else:
1208 # Otherwise we need a full manifest.
1215 # Otherwise we need a full manifest.
1209 m = mnfst.read(mnfstnode)
1216 m = mnfst.read(mnfstnode)
1210 # For every file in we care about.
1217 # For every file in we care about.
1211 for f in changedfiles:
1218 for f in changedfiles:
1212 fnode = m.get(f, None)
1219 fnode = m.get(f, None)
1213 # If it's in the manifest
1220 # If it's in the manifest
1214 if fnode is not None:
1221 if fnode is not None:
1215 # See comments above.
1222 # See comments above.
1216 clnode = msng_mnfst_set[mnfstnode]
1223 clnode = msng_mnfst_set[mnfstnode]
1217 ndset = msng_filenode_set.setdefault(f, {})
1224 ndset = msng_filenode_set.setdefault(f, {})
1218 ndset.setdefault(fnode, clnode)
1225 ndset.setdefault(fnode, clnode)
1219 # Remember the revision we hope to see next.
1226 # Remember the revision we hope to see next.
1220 next_rev[0] = r + 1
1227 next_rev[0] = r + 1
1221 return collect_msng_filenodes
1228 return collect_msng_filenodes
1222
1229
1223 # We have a list of filenodes we think we need for a file, lets remove
1230 # We have a list of filenodes we think we need for a file, lets remove
1224 # all those we now the recipient must have.
1231 # all those we now the recipient must have.
1225 def prune_filenodes(f, filerevlog):
1232 def prune_filenodes(f, filerevlog):
1226 msngset = msng_filenode_set[f]
1233 msngset = msng_filenode_set[f]
1227 hasset = {}
1234 hasset = {}
1228 # If a 'missing' filenode thinks it belongs to a changenode we
1235 # If a 'missing' filenode thinks it belongs to a changenode we
1229 # assume the recipient must have, then the recipient must have
1236 # assume the recipient must have, then the recipient must have
1230 # that filenode.
1237 # that filenode.
1231 for n in msngset:
1238 for n in msngset:
1232 clnode = cl.node(filerevlog.linkrev(n))
1239 clnode = cl.node(filerevlog.linkrev(n))
1233 if clnode in has_cl_set:
1240 if clnode in has_cl_set:
1234 hasset[n] = 1
1241 hasset[n] = 1
1235 prune_parents(filerevlog, hasset, msngset)
1242 prune_parents(filerevlog, hasset, msngset)
1236
1243
1237 # A function generator function that sets up the a context for the
1244 # A function generator function that sets up the a context for the
1238 # inner function.
1245 # inner function.
1239 def lookup_filenode_link_func(fname):
1246 def lookup_filenode_link_func(fname):
1240 msngset = msng_filenode_set[fname]
1247 msngset = msng_filenode_set[fname]
1241 # Lookup the changenode the filenode belongs to.
1248 # Lookup the changenode the filenode belongs to.
1242 def lookup_filenode_link(fnode):
1249 def lookup_filenode_link(fnode):
1243 return msngset[fnode]
1250 return msngset[fnode]
1244 return lookup_filenode_link
1251 return lookup_filenode_link
1245
1252
1246 # Now that we have all theses utility functions to help out and
1253 # Now that we have all theses utility functions to help out and
1247 # logically divide up the task, generate the group.
1254 # logically divide up the task, generate the group.
1248 def gengroup():
1255 def gengroup():
1249 # The set of changed files starts empty.
1256 # The set of changed files starts empty.
1250 changedfiles = {}
1257 changedfiles = {}
1251 # Create a changenode group generator that will call our functions
1258 # Create a changenode group generator that will call our functions
1252 # back to lookup the owning changenode and collect information.
1259 # back to lookup the owning changenode and collect information.
1253 group = cl.group(msng_cl_lst, identity,
1260 group = cl.group(msng_cl_lst, identity,
1254 manifest_and_file_collector(changedfiles))
1261 manifest_and_file_collector(changedfiles))
1255 for chnk in group:
1262 for chnk in group:
1256 yield chnk
1263 yield chnk
1257
1264
1258 # The list of manifests has been collected by the generator
1265 # The list of manifests has been collected by the generator
1259 # calling our functions back.
1266 # calling our functions back.
1260 prune_manifests()
1267 prune_manifests()
1261 msng_mnfst_lst = msng_mnfst_set.keys()
1268 msng_mnfst_lst = msng_mnfst_set.keys()
1262 # Sort the manifestnodes by revision number.
1269 # Sort the manifestnodes by revision number.
1263 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1270 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1264 # Create a generator for the manifestnodes that calls our lookup
1271 # Create a generator for the manifestnodes that calls our lookup
1265 # and data collection functions back.
1272 # and data collection functions back.
1266 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1273 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1267 filenode_collector(changedfiles))
1274 filenode_collector(changedfiles))
1268 for chnk in group:
1275 for chnk in group:
1269 yield chnk
1276 yield chnk
1270
1277
1271 # These are no longer needed, dereference and toss the memory for
1278 # These are no longer needed, dereference and toss the memory for
1272 # them.
1279 # them.
1273 msng_mnfst_lst = None
1280 msng_mnfst_lst = None
1274 msng_mnfst_set.clear()
1281 msng_mnfst_set.clear()
1275
1282
1276 changedfiles = changedfiles.keys()
1283 changedfiles = changedfiles.keys()
1277 changedfiles.sort()
1284 changedfiles.sort()
1278 # Go through all our files in order sorted by name.
1285 # Go through all our files in order sorted by name.
1279 for fname in changedfiles:
1286 for fname in changedfiles:
1280 filerevlog = self.file(fname)
1287 filerevlog = self.file(fname)
1281 # Toss out the filenodes that the recipient isn't really
1288 # Toss out the filenodes that the recipient isn't really
1282 # missing.
1289 # missing.
1283 if msng_filenode_set.has_key(fname):
1290 if msng_filenode_set.has_key(fname):
1284 prune_filenodes(fname, filerevlog)
1291 prune_filenodes(fname, filerevlog)
1285 msng_filenode_lst = msng_filenode_set[fname].keys()
1292 msng_filenode_lst = msng_filenode_set[fname].keys()
1286 else:
1293 else:
1287 msng_filenode_lst = []
1294 msng_filenode_lst = []
1288 # If any filenodes are left, generate the group for them,
1295 # If any filenodes are left, generate the group for them,
1289 # otherwise don't bother.
1296 # otherwise don't bother.
1290 if len(msng_filenode_lst) > 0:
1297 if len(msng_filenode_lst) > 0:
1291 yield changegroup.genchunk(fname)
1298 yield changegroup.genchunk(fname)
1292 # Sort the filenodes by their revision #
1299 # Sort the filenodes by their revision #
1293 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1300 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1294 # Create a group generator and only pass in a changenode
1301 # Create a group generator and only pass in a changenode
1295 # lookup function as we need to collect no information
1302 # lookup function as we need to collect no information
1296 # from filenodes.
1303 # from filenodes.
1297 group = filerevlog.group(msng_filenode_lst,
1304 group = filerevlog.group(msng_filenode_lst,
1298 lookup_filenode_link_func(fname))
1305 lookup_filenode_link_func(fname))
1299 for chnk in group:
1306 for chnk in group:
1300 yield chnk
1307 yield chnk
1301 if msng_filenode_set.has_key(fname):
1308 if msng_filenode_set.has_key(fname):
1302 # Don't need this anymore, toss it to free memory.
1309 # Don't need this anymore, toss it to free memory.
1303 del msng_filenode_set[fname]
1310 del msng_filenode_set[fname]
1304 # Signal that no more groups are left.
1311 # Signal that no more groups are left.
1305 yield changegroup.closechunk()
1312 yield changegroup.closechunk()
1306
1313
1307 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1314 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1308
1315
1309 return util.chunkbuffer(gengroup())
1316 return util.chunkbuffer(gengroup())
1310
1317
1311 def changegroup(self, basenodes, source):
1318 def changegroup(self, basenodes, source):
1312 """Generate a changegroup of all nodes that we have that a recipient
1319 """Generate a changegroup of all nodes that we have that a recipient
1313 doesn't.
1320 doesn't.
1314
1321
1315 This is much easier than the previous function as we can assume that
1322 This is much easier than the previous function as we can assume that
1316 the recipient has any changenode we aren't sending them."""
1323 the recipient has any changenode we aren't sending them."""
1317
1324
1318 self.hook('preoutgoing', throw=True, source=source)
1325 self.hook('preoutgoing', throw=True, source=source)
1319
1326
1320 cl = self.changelog
1327 cl = self.changelog
1321 nodes = cl.nodesbetween(basenodes, None)[0]
1328 nodes = cl.nodesbetween(basenodes, None)[0]
1322 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1329 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1323
1330
1324 def identity(x):
1331 def identity(x):
1325 return x
1332 return x
1326
1333
1327 def gennodelst(revlog):
1334 def gennodelst(revlog):
1328 for r in xrange(0, revlog.count()):
1335 for r in xrange(0, revlog.count()):
1329 n = revlog.node(r)
1336 n = revlog.node(r)
1330 if revlog.linkrev(n) in revset:
1337 if revlog.linkrev(n) in revset:
1331 yield n
1338 yield n
1332
1339
1333 def changed_file_collector(changedfileset):
1340 def changed_file_collector(changedfileset):
1334 def collect_changed_files(clnode):
1341 def collect_changed_files(clnode):
1335 c = cl.read(clnode)
1342 c = cl.read(clnode)
1336 for fname in c[3]:
1343 for fname in c[3]:
1337 changedfileset[fname] = 1
1344 changedfileset[fname] = 1
1338 return collect_changed_files
1345 return collect_changed_files
1339
1346
1340 def lookuprevlink_func(revlog):
1347 def lookuprevlink_func(revlog):
1341 def lookuprevlink(n):
1348 def lookuprevlink(n):
1342 return cl.node(revlog.linkrev(n))
1349 return cl.node(revlog.linkrev(n))
1343 return lookuprevlink
1350 return lookuprevlink
1344
1351
1345 def gengroup():
1352 def gengroup():
1346 # construct a list of all changed files
1353 # construct a list of all changed files
1347 changedfiles = {}
1354 changedfiles = {}
1348
1355
1349 for chnk in cl.group(nodes, identity,
1356 for chnk in cl.group(nodes, identity,
1350 changed_file_collector(changedfiles)):
1357 changed_file_collector(changedfiles)):
1351 yield chnk
1358 yield chnk
1352 changedfiles = changedfiles.keys()
1359 changedfiles = changedfiles.keys()
1353 changedfiles.sort()
1360 changedfiles.sort()
1354
1361
1355 mnfst = self.manifest
1362 mnfst = self.manifest
1356 nodeiter = gennodelst(mnfst)
1363 nodeiter = gennodelst(mnfst)
1357 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1364 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1358 yield chnk
1365 yield chnk
1359
1366
1360 for fname in changedfiles:
1367 for fname in changedfiles:
1361 filerevlog = self.file(fname)
1368 filerevlog = self.file(fname)
1362 nodeiter = gennodelst(filerevlog)
1369 nodeiter = gennodelst(filerevlog)
1363 nodeiter = list(nodeiter)
1370 nodeiter = list(nodeiter)
1364 if nodeiter:
1371 if nodeiter:
1365 yield changegroup.genchunk(fname)
1372 yield changegroup.genchunk(fname)
1366 lookup = lookuprevlink_func(filerevlog)
1373 lookup = lookuprevlink_func(filerevlog)
1367 for chnk in filerevlog.group(nodeiter, lookup):
1374 for chnk in filerevlog.group(nodeiter, lookup):
1368 yield chnk
1375 yield chnk
1369
1376
1370 yield changegroup.closechunk()
1377 yield changegroup.closechunk()
1371 self.hook('outgoing', node=hex(nodes[0]), source=source)
1378 self.hook('outgoing', node=hex(nodes[0]), source=source)
1372
1379
1373 return util.chunkbuffer(gengroup())
1380 return util.chunkbuffer(gengroup())
1374
1381
1375 def addchangegroup(self, source):
1382 def addchangegroup(self, source):
1376 """add changegroup to repo.
1383 """add changegroup to repo.
1377 returns number of heads modified or added + 1."""
1384 returns number of heads modified or added + 1."""
1378
1385
1379 def csmap(x):
1386 def csmap(x):
1380 self.ui.debug(_("add changeset %s\n") % short(x))
1387 self.ui.debug(_("add changeset %s\n") % short(x))
1381 return cl.count()
1388 return cl.count()
1382
1389
1383 def revmap(x):
1390 def revmap(x):
1384 return cl.rev(x)
1391 return cl.rev(x)
1385
1392
1386 if not source:
1393 if not source:
1387 return 0
1394 return 0
1388
1395
1389 self.hook('prechangegroup', throw=True)
1396 self.hook('prechangegroup', throw=True)
1390
1397
1391 changesets = files = revisions = 0
1398 changesets = files = revisions = 0
1392
1399
1393 tr = self.transaction()
1400 tr = self.transaction()
1394
1401
1395 # write changelog and manifest data to temp files so
1402 # write changelog and manifest data to temp files so
1396 # concurrent readers will not see inconsistent view
1403 # concurrent readers will not see inconsistent view
1397 cl = appendfile.appendchangelog(self.opener)
1404 cl = appendfile.appendchangelog(self.opener)
1398
1405
1399 oldheads = len(cl.heads())
1406 oldheads = len(cl.heads())
1400
1407
1401 # pull off the changeset group
1408 # pull off the changeset group
1402 self.ui.status(_("adding changesets\n"))
1409 self.ui.status(_("adding changesets\n"))
1403 co = cl.tip()
1410 co = cl.tip()
1404 chunkiter = changegroup.chunkiter(source)
1411 chunkiter = changegroup.chunkiter(source)
1405 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1412 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1406 cnr, cor = map(cl.rev, (cn, co))
1413 cnr, cor = map(cl.rev, (cn, co))
1407 if cn == nullid:
1414 if cn == nullid:
1408 cnr = cor
1415 cnr = cor
1409 changesets = cnr - cor
1416 changesets = cnr - cor
1410
1417
1411 mf = appendfile.appendmanifest(self.opener)
1418 mf = appendfile.appendmanifest(self.opener)
1412
1419
1413 # pull off the manifest group
1420 # pull off the manifest group
1414 self.ui.status(_("adding manifests\n"))
1421 self.ui.status(_("adding manifests\n"))
1415 mm = mf.tip()
1422 mm = mf.tip()
1416 chunkiter = changegroup.chunkiter(source)
1423 chunkiter = changegroup.chunkiter(source)
1417 mo = mf.addgroup(chunkiter, revmap, tr)
1424 mo = mf.addgroup(chunkiter, revmap, tr)
1418
1425
1419 # process the files
1426 # process the files
1420 self.ui.status(_("adding file changes\n"))
1427 self.ui.status(_("adding file changes\n"))
1421 while 1:
1428 while 1:
1422 f = changegroup.getchunk(source)
1429 f = changegroup.getchunk(source)
1423 if not f:
1430 if not f:
1424 break
1431 break
1425 self.ui.debug(_("adding %s revisions\n") % f)
1432 self.ui.debug(_("adding %s revisions\n") % f)
1426 fl = self.file(f)
1433 fl = self.file(f)
1427 o = fl.count()
1434 o = fl.count()
1428 chunkiter = changegroup.chunkiter(source)
1435 chunkiter = changegroup.chunkiter(source)
1429 n = fl.addgroup(chunkiter, revmap, tr)
1436 n = fl.addgroup(chunkiter, revmap, tr)
1430 revisions += fl.count() - o
1437 revisions += fl.count() - o
1431 files += 1
1438 files += 1
1432
1439
1433 # write order here is important so concurrent readers will see
1440 # write order here is important so concurrent readers will see
1434 # consistent view of repo
1441 # consistent view of repo
1435 mf.writedata()
1442 mf.writedata()
1436 cl.writedata()
1443 cl.writedata()
1437
1444
1438 # make changelog and manifest see real files again
1445 # make changelog and manifest see real files again
1439 self.changelog = changelog.changelog(self.opener)
1446 self.changelog = changelog.changelog(self.opener)
1440 self.manifest = manifest.manifest(self.opener)
1447 self.manifest = manifest.manifest(self.opener)
1441
1448
1442 newheads = len(self.changelog.heads())
1449 newheads = len(self.changelog.heads())
1443 heads = ""
1450 heads = ""
1444 if oldheads and newheads > oldheads:
1451 if oldheads and newheads > oldheads:
1445 heads = _(" (+%d heads)") % (newheads - oldheads)
1452 heads = _(" (+%d heads)") % (newheads - oldheads)
1446
1453
1447 self.ui.status(_("added %d changesets"
1454 self.ui.status(_("added %d changesets"
1448 " with %d changes to %d files%s\n")
1455 " with %d changes to %d files%s\n")
1449 % (changesets, revisions, files, heads))
1456 % (changesets, revisions, files, heads))
1450
1457
1451 self.hook('pretxnchangegroup', throw=True,
1458 self.hook('pretxnchangegroup', throw=True,
1452 node=hex(self.changelog.node(cor+1)))
1459 node=hex(self.changelog.node(cor+1)))
1453
1460
1454 tr.close()
1461 tr.close()
1455
1462
1456 if changesets > 0:
1463 if changesets > 0:
1457 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1464 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1458
1465
1459 for i in range(cor + 1, cnr + 1):
1466 for i in range(cor + 1, cnr + 1):
1460 self.hook("incoming", node=hex(self.changelog.node(i)))
1467 self.hook("incoming", node=hex(self.changelog.node(i)))
1461
1468
1462 return newheads - oldheads + 1
1469 return newheads - oldheads + 1
1463
1470
1464 def update(self, node, allow=False, force=False, choose=None,
1471 def update(self, node, allow=False, force=False, choose=None,
1465 moddirstate=True, forcemerge=False, wlock=None):
1472 moddirstate=True, forcemerge=False, wlock=None):
1466 pl = self.dirstate.parents()
1473 pl = self.dirstate.parents()
1467 if not force and pl[1] != nullid:
1474 if not force and pl[1] != nullid:
1468 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1475 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1469 return 1
1476 return 1
1470
1477
1471 err = False
1478 err = False
1472
1479
1473 p1, p2 = pl[0], node
1480 p1, p2 = pl[0], node
1474 pa = self.changelog.ancestor(p1, p2)
1481 pa = self.changelog.ancestor(p1, p2)
1475 m1n = self.changelog.read(p1)[0]
1482 m1n = self.changelog.read(p1)[0]
1476 m2n = self.changelog.read(p2)[0]
1483 m2n = self.changelog.read(p2)[0]
1477 man = self.manifest.ancestor(m1n, m2n)
1484 man = self.manifest.ancestor(m1n, m2n)
1478 m1 = self.manifest.read(m1n)
1485 m1 = self.manifest.read(m1n)
1479 mf1 = self.manifest.readflags(m1n)
1486 mf1 = self.manifest.readflags(m1n)
1480 m2 = self.manifest.read(m2n).copy()
1487 m2 = self.manifest.read(m2n).copy()
1481 mf2 = self.manifest.readflags(m2n)
1488 mf2 = self.manifest.readflags(m2n)
1482 ma = self.manifest.read(man)
1489 ma = self.manifest.read(man)
1483 mfa = self.manifest.readflags(man)
1490 mfa = self.manifest.readflags(man)
1484
1491
1485 modified, added, removed, deleted, unknown = self.changes()
1492 modified, added, removed, deleted, unknown = self.changes()
1486
1493
1487 # is this a jump, or a merge? i.e. is there a linear path
1494 # is this a jump, or a merge? i.e. is there a linear path
1488 # from p1 to p2?
1495 # from p1 to p2?
1489 linear_path = (pa == p1 or pa == p2)
1496 linear_path = (pa == p1 or pa == p2)
1490
1497
1491 if allow and linear_path:
1498 if allow and linear_path:
1492 raise util.Abort(_("there is nothing to merge, "
1499 raise util.Abort(_("there is nothing to merge, "
1493 "just use 'hg update'"))
1500 "just use 'hg update'"))
1494 if allow and not forcemerge:
1501 if allow and not forcemerge:
1495 if modified or added or removed:
1502 if modified or added or removed:
1496 raise util.Abort(_("outstanding uncommitted changes"))
1503 raise util.Abort(_("outstanding uncommitted changes"))
1497 if not forcemerge and not force:
1504 if not forcemerge and not force:
1498 for f in unknown:
1505 for f in unknown:
1499 if f in m2:
1506 if f in m2:
1500 t1 = self.wread(f)
1507 t1 = self.wread(f)
1501 t2 = self.file(f).read(m2[f])
1508 t2 = self.file(f).read(m2[f])
1502 if cmp(t1, t2) != 0:
1509 if cmp(t1, t2) != 0:
1503 raise util.Abort(_("'%s' already exists in the working"
1510 raise util.Abort(_("'%s' already exists in the working"
1504 " dir and differs from remote") % f)
1511 " dir and differs from remote") % f)
1505
1512
1506 # resolve the manifest to determine which files
1513 # resolve the manifest to determine which files
1507 # we care about merging
1514 # we care about merging
1508 self.ui.note(_("resolving manifests\n"))
1515 self.ui.note(_("resolving manifests\n"))
1509 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1516 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1510 (force, allow, moddirstate, linear_path))
1517 (force, allow, moddirstate, linear_path))
1511 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1518 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1512 (short(man), short(m1n), short(m2n)))
1519 (short(man), short(m1n), short(m2n)))
1513
1520
1514 merge = {}
1521 merge = {}
1515 get = {}
1522 get = {}
1516 remove = []
1523 remove = []
1517
1524
1518 # construct a working dir manifest
1525 # construct a working dir manifest
1519 mw = m1.copy()
1526 mw = m1.copy()
1520 mfw = mf1.copy()
1527 mfw = mf1.copy()
1521 umap = dict.fromkeys(unknown)
1528 umap = dict.fromkeys(unknown)
1522
1529
1523 for f in added + modified + unknown:
1530 for f in added + modified + unknown:
1524 mw[f] = ""
1531 mw[f] = ""
1525 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1532 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1526
1533
1527 if moddirstate and not wlock:
1534 if moddirstate and not wlock:
1528 wlock = self.wlock()
1535 wlock = self.wlock()
1529
1536
1530 for f in deleted + removed:
1537 for f in deleted + removed:
1531 if f in mw:
1538 if f in mw:
1532 del mw[f]
1539 del mw[f]
1533
1540
1534 # If we're jumping between revisions (as opposed to merging),
1541 # If we're jumping between revisions (as opposed to merging),
1535 # and if neither the working directory nor the target rev has
1542 # and if neither the working directory nor the target rev has
1536 # the file, then we need to remove it from the dirstate, to
1543 # the file, then we need to remove it from the dirstate, to
1537 # prevent the dirstate from listing the file when it is no
1544 # prevent the dirstate from listing the file when it is no
1538 # longer in the manifest.
1545 # longer in the manifest.
1539 if moddirstate and linear_path and f not in m2:
1546 if moddirstate and linear_path and f not in m2:
1540 self.dirstate.forget((f,))
1547 self.dirstate.forget((f,))
1541
1548
1542 # Compare manifests
1549 # Compare manifests
1543 for f, n in mw.iteritems():
1550 for f, n in mw.iteritems():
1544 if choose and not choose(f):
1551 if choose and not choose(f):
1545 continue
1552 continue
1546 if f in m2:
1553 if f in m2:
1547 s = 0
1554 s = 0
1548
1555
1549 # is the wfile new since m1, and match m2?
1556 # is the wfile new since m1, and match m2?
1550 if f not in m1:
1557 if f not in m1:
1551 t1 = self.wread(f)
1558 t1 = self.wread(f)
1552 t2 = self.file(f).read(m2[f])
1559 t2 = self.file(f).read(m2[f])
1553 if cmp(t1, t2) == 0:
1560 if cmp(t1, t2) == 0:
1554 n = m2[f]
1561 n = m2[f]
1555 del t1, t2
1562 del t1, t2
1556
1563
1557 # are files different?
1564 # are files different?
1558 if n != m2[f]:
1565 if n != m2[f]:
1559 a = ma.get(f, nullid)
1566 a = ma.get(f, nullid)
1560 # are both different from the ancestor?
1567 # are both different from the ancestor?
1561 if n != a and m2[f] != a:
1568 if n != a and m2[f] != a:
1562 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1569 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1563 # merge executable bits
1570 # merge executable bits
1564 # "if we changed or they changed, change in merge"
1571 # "if we changed or they changed, change in merge"
1565 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1572 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1566 mode = ((a^b) | (a^c)) ^ a
1573 mode = ((a^b) | (a^c)) ^ a
1567 merge[f] = (m1.get(f, nullid), m2[f], mode)
1574 merge[f] = (m1.get(f, nullid), m2[f], mode)
1568 s = 1
1575 s = 1
1569 # are we clobbering?
1576 # are we clobbering?
1570 # is remote's version newer?
1577 # is remote's version newer?
1571 # or are we going back in time?
1578 # or are we going back in time?
1572 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1579 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1573 self.ui.debug(_(" remote %s is newer, get\n") % f)
1580 self.ui.debug(_(" remote %s is newer, get\n") % f)
1574 get[f] = m2[f]
1581 get[f] = m2[f]
1575 s = 1
1582 s = 1
1576 elif f in umap:
1583 elif f in umap:
1577 # this unknown file is the same as the checkout
1584 # this unknown file is the same as the checkout
1578 get[f] = m2[f]
1585 get[f] = m2[f]
1579
1586
1580 if not s and mfw[f] != mf2[f]:
1587 if not s and mfw[f] != mf2[f]:
1581 if force:
1588 if force:
1582 self.ui.debug(_(" updating permissions for %s\n") % f)
1589 self.ui.debug(_(" updating permissions for %s\n") % f)
1583 util.set_exec(self.wjoin(f), mf2[f])
1590 util.set_exec(self.wjoin(f), mf2[f])
1584 else:
1591 else:
1585 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1592 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1586 mode = ((a^b) | (a^c)) ^ a
1593 mode = ((a^b) | (a^c)) ^ a
1587 if mode != b:
1594 if mode != b:
1588 self.ui.debug(_(" updating permissions for %s\n")
1595 self.ui.debug(_(" updating permissions for %s\n")
1589 % f)
1596 % f)
1590 util.set_exec(self.wjoin(f), mode)
1597 util.set_exec(self.wjoin(f), mode)
1591 del m2[f]
1598 del m2[f]
1592 elif f in ma:
1599 elif f in ma:
1593 if n != ma[f]:
1600 if n != ma[f]:
1594 r = _("d")
1601 r = _("d")
1595 if not force and (linear_path or allow):
1602 if not force and (linear_path or allow):
1596 r = self.ui.prompt(
1603 r = self.ui.prompt(
1597 (_(" local changed %s which remote deleted\n") % f) +
1604 (_(" local changed %s which remote deleted\n") % f) +
1598 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1605 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1599 if r == _("d"):
1606 if r == _("d"):
1600 remove.append(f)
1607 remove.append(f)
1601 else:
1608 else:
1602 self.ui.debug(_("other deleted %s\n") % f)
1609 self.ui.debug(_("other deleted %s\n") % f)
1603 remove.append(f) # other deleted it
1610 remove.append(f) # other deleted it
1604 else:
1611 else:
1605 # file is created on branch or in working directory
1612 # file is created on branch or in working directory
1606 if force and f not in umap:
1613 if force and f not in umap:
1607 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1614 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1608 remove.append(f)
1615 remove.append(f)
1609 elif n == m1.get(f, nullid): # same as parent
1616 elif n == m1.get(f, nullid): # same as parent
1610 if p2 == pa: # going backwards?
1617 if p2 == pa: # going backwards?
1611 self.ui.debug(_("remote deleted %s\n") % f)
1618 self.ui.debug(_("remote deleted %s\n") % f)
1612 remove.append(f)
1619 remove.append(f)
1613 else:
1620 else:
1614 self.ui.debug(_("local modified %s, keeping\n") % f)
1621 self.ui.debug(_("local modified %s, keeping\n") % f)
1615 else:
1622 else:
1616 self.ui.debug(_("working dir created %s, keeping\n") % f)
1623 self.ui.debug(_("working dir created %s, keeping\n") % f)
1617
1624
1618 for f, n in m2.iteritems():
1625 for f, n in m2.iteritems():
1619 if choose and not choose(f):
1626 if choose and not choose(f):
1620 continue
1627 continue
1621 if f[0] == "/":
1628 if f[0] == "/":
1622 continue
1629 continue
1623 if f in ma and n != ma[f]:
1630 if f in ma and n != ma[f]:
1624 r = _("k")
1631 r = _("k")
1625 if not force and (linear_path or allow):
1632 if not force and (linear_path or allow):
1626 r = self.ui.prompt(
1633 r = self.ui.prompt(
1627 (_("remote changed %s which local deleted\n") % f) +
1634 (_("remote changed %s which local deleted\n") % f) +
1628 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1635 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1629 if r == _("k"):
1636 if r == _("k"):
1630 get[f] = n
1637 get[f] = n
1631 elif f not in ma:
1638 elif f not in ma:
1632 self.ui.debug(_("remote created %s\n") % f)
1639 self.ui.debug(_("remote created %s\n") % f)
1633 get[f] = n
1640 get[f] = n
1634 else:
1641 else:
1635 if force or p2 == pa: # going backwards?
1642 if force or p2 == pa: # going backwards?
1636 self.ui.debug(_("local deleted %s, recreating\n") % f)
1643 self.ui.debug(_("local deleted %s, recreating\n") % f)
1637 get[f] = n
1644 get[f] = n
1638 else:
1645 else:
1639 self.ui.debug(_("local deleted %s\n") % f)
1646 self.ui.debug(_("local deleted %s\n") % f)
1640
1647
1641 del mw, m1, m2, ma
1648 del mw, m1, m2, ma
1642
1649
1643 if force:
1650 if force:
1644 for f in merge:
1651 for f in merge:
1645 get[f] = merge[f][1]
1652 get[f] = merge[f][1]
1646 merge = {}
1653 merge = {}
1647
1654
1648 if linear_path or force:
1655 if linear_path or force:
1649 # we don't need to do any magic, just jump to the new rev
1656 # we don't need to do any magic, just jump to the new rev
1650 branch_merge = False
1657 branch_merge = False
1651 p1, p2 = p2, nullid
1658 p1, p2 = p2, nullid
1652 else:
1659 else:
1653 if not allow:
1660 if not allow:
1654 self.ui.status(_("this update spans a branch"
1661 self.ui.status(_("this update spans a branch"
1655 " affecting the following files:\n"))
1662 " affecting the following files:\n"))
1656 fl = merge.keys() + get.keys()
1663 fl = merge.keys() + get.keys()
1657 fl.sort()
1664 fl.sort()
1658 for f in fl:
1665 for f in fl:
1659 cf = ""
1666 cf = ""
1660 if f in merge:
1667 if f in merge:
1661 cf = _(" (resolve)")
1668 cf = _(" (resolve)")
1662 self.ui.status(" %s%s\n" % (f, cf))
1669 self.ui.status(" %s%s\n" % (f, cf))
1663 self.ui.warn(_("aborting update spanning branches!\n"))
1670 self.ui.warn(_("aborting update spanning branches!\n"))
1664 self.ui.status(_("(use 'hg merge' to merge across branches"
1671 self.ui.status(_("(use 'hg merge' to merge across branches"
1665 " or 'hg update -C' to lose changes)\n"))
1672 " or 'hg update -C' to lose changes)\n"))
1666 return 1
1673 return 1
1667 branch_merge = True
1674 branch_merge = True
1668
1675
1669 # get the files we don't need to change
1676 # get the files we don't need to change
1670 files = get.keys()
1677 files = get.keys()
1671 files.sort()
1678 files.sort()
1672 for f in files:
1679 for f in files:
1673 if f[0] == "/":
1680 if f[0] == "/":
1674 continue
1681 continue
1675 self.ui.note(_("getting %s\n") % f)
1682 self.ui.note(_("getting %s\n") % f)
1676 t = self.file(f).read(get[f])
1683 t = self.file(f).read(get[f])
1677 self.wwrite(f, t)
1684 self.wwrite(f, t)
1678 util.set_exec(self.wjoin(f), mf2[f])
1685 util.set_exec(self.wjoin(f), mf2[f])
1679 if moddirstate:
1686 if moddirstate:
1680 if branch_merge:
1687 if branch_merge:
1681 self.dirstate.update([f], 'n', st_mtime=-1)
1688 self.dirstate.update([f], 'n', st_mtime=-1)
1682 else:
1689 else:
1683 self.dirstate.update([f], 'n')
1690 self.dirstate.update([f], 'n')
1684
1691
1685 # merge the tricky bits
1692 # merge the tricky bits
1686 failedmerge = []
1693 failedmerge = []
1687 files = merge.keys()
1694 files = merge.keys()
1688 files.sort()
1695 files.sort()
1689 xp1 = hex(p1)
1696 xp1 = hex(p1)
1690 xp2 = hex(p2)
1697 xp2 = hex(p2)
1691 for f in files:
1698 for f in files:
1692 self.ui.status(_("merging %s\n") % f)
1699 self.ui.status(_("merging %s\n") % f)
1693 my, other, flag = merge[f]
1700 my, other, flag = merge[f]
1694 ret = self.merge3(f, my, other, xp1, xp2)
1701 ret = self.merge3(f, my, other, xp1, xp2)
1695 if ret:
1702 if ret:
1696 err = True
1703 err = True
1697 failedmerge.append(f)
1704 failedmerge.append(f)
1698 util.set_exec(self.wjoin(f), flag)
1705 util.set_exec(self.wjoin(f), flag)
1699 if moddirstate:
1706 if moddirstate:
1700 if branch_merge:
1707 if branch_merge:
1701 # We've done a branch merge, mark this file as merged
1708 # We've done a branch merge, mark this file as merged
1702 # so that we properly record the merger later
1709 # so that we properly record the merger later
1703 self.dirstate.update([f], 'm')
1710 self.dirstate.update([f], 'm')
1704 else:
1711 else:
1705 # We've update-merged a locally modified file, so
1712 # We've update-merged a locally modified file, so
1706 # we set the dirstate to emulate a normal checkout
1713 # we set the dirstate to emulate a normal checkout
1707 # of that file some time in the past. Thus our
1714 # of that file some time in the past. Thus our
1708 # merge will appear as a normal local file
1715 # merge will appear as a normal local file
1709 # modification.
1716 # modification.
1710 f_len = len(self.file(f).read(other))
1717 f_len = len(self.file(f).read(other))
1711 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1718 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1712
1719
1713 remove.sort()
1720 remove.sort()
1714 for f in remove:
1721 for f in remove:
1715 self.ui.note(_("removing %s\n") % f)
1722 self.ui.note(_("removing %s\n") % f)
1716 util.audit_path(f)
1723 util.audit_path(f)
1717 try:
1724 try:
1718 util.unlink(self.wjoin(f))
1725 util.unlink(self.wjoin(f))
1719 except OSError, inst:
1726 except OSError, inst:
1720 if inst.errno != errno.ENOENT:
1727 if inst.errno != errno.ENOENT:
1721 self.ui.warn(_("update failed to remove %s: %s!\n") %
1728 self.ui.warn(_("update failed to remove %s: %s!\n") %
1722 (f, inst.strerror))
1729 (f, inst.strerror))
1723 if moddirstate:
1730 if moddirstate:
1724 if branch_merge:
1731 if branch_merge:
1725 self.dirstate.update(remove, 'r')
1732 self.dirstate.update(remove, 'r')
1726 else:
1733 else:
1727 self.dirstate.forget(remove)
1734 self.dirstate.forget(remove)
1728
1735
1729 if moddirstate:
1736 if moddirstate:
1730 self.dirstate.setparents(p1, p2)
1737 self.dirstate.setparents(p1, p2)
1731
1738
1732 stat = ((len(get), _("updated")),
1739 stat = ((len(get), _("updated")),
1733 (len(merge) - len(failedmerge), _("merged")),
1740 (len(merge) - len(failedmerge), _("merged")),
1734 (len(remove), _("removed")),
1741 (len(remove), _("removed")),
1735 (len(failedmerge), _("unresolved")))
1742 (len(failedmerge), _("unresolved")))
1736 note = ", ".join([_("%d files %s") % s for s in stat])
1743 note = ", ".join([_("%d files %s") % s for s in stat])
1737 self.ui.note("%s\n" % note)
1744 self.ui.note("%s\n" % note)
1738 if moddirstate and branch_merge:
1745 if moddirstate and branch_merge:
1739 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1746 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1740
1747
1741 return err
1748 return err
1742
1749
1743 def merge3(self, fn, my, other, p1, p2):
1750 def merge3(self, fn, my, other, p1, p2):
1744 """perform a 3-way merge in the working directory"""
1751 """perform a 3-way merge in the working directory"""
1745
1752
1746 def temp(prefix, node):
1753 def temp(prefix, node):
1747 pre = "%s~%s." % (os.path.basename(fn), prefix)
1754 pre = "%s~%s." % (os.path.basename(fn), prefix)
1748 (fd, name) = tempfile.mkstemp("", pre)
1755 (fd, name) = tempfile.mkstemp("", pre)
1749 f = os.fdopen(fd, "wb")
1756 f = os.fdopen(fd, "wb")
1750 self.wwrite(fn, fl.read(node), f)
1757 self.wwrite(fn, fl.read(node), f)
1751 f.close()
1758 f.close()
1752 return name
1759 return name
1753
1760
1754 fl = self.file(fn)
1761 fl = self.file(fn)
1755 base = fl.ancestor(my, other)
1762 base = fl.ancestor(my, other)
1756 a = self.wjoin(fn)
1763 a = self.wjoin(fn)
1757 b = temp("base", base)
1764 b = temp("base", base)
1758 c = temp("other", other)
1765 c = temp("other", other)
1759
1766
1760 self.ui.note(_("resolving %s\n") % fn)
1767 self.ui.note(_("resolving %s\n") % fn)
1761 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1768 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1762 (fn, short(my), short(other), short(base)))
1769 (fn, short(my), short(other), short(base)))
1763
1770
1764 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1771 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1765 or "hgmerge")
1772 or "hgmerge")
1766 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1773 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1767 environ={'HG_FILE': fn,
1774 environ={'HG_FILE': fn,
1768 'HG_MY_NODE': p1,
1775 'HG_MY_NODE': p1,
1769 'HG_OTHER_NODE': p2,
1776 'HG_OTHER_NODE': p2,
1770 'HG_FILE_MY_NODE': hex(my),
1777 'HG_FILE_MY_NODE': hex(my),
1771 'HG_FILE_OTHER_NODE': hex(other),
1778 'HG_FILE_OTHER_NODE': hex(other),
1772 'HG_FILE_BASE_NODE': hex(base)})
1779 'HG_FILE_BASE_NODE': hex(base)})
1773 if r:
1780 if r:
1774 self.ui.warn(_("merging %s failed!\n") % fn)
1781 self.ui.warn(_("merging %s failed!\n") % fn)
1775
1782
1776 os.unlink(b)
1783 os.unlink(b)
1777 os.unlink(c)
1784 os.unlink(c)
1778 return r
1785 return r
1779
1786
1780 def verify(self):
1787 def verify(self):
1781 filelinkrevs = {}
1788 filelinkrevs = {}
1782 filenodes = {}
1789 filenodes = {}
1783 changesets = revisions = files = 0
1790 changesets = revisions = files = 0
1784 errors = [0]
1791 errors = [0]
1785 neededmanifests = {}
1792 neededmanifests = {}
1786
1793
1787 def err(msg):
1794 def err(msg):
1788 self.ui.warn(msg + "\n")
1795 self.ui.warn(msg + "\n")
1789 errors[0] += 1
1796 errors[0] += 1
1790
1797
1791 def checksize(obj, name):
1798 def checksize(obj, name):
1792 d = obj.checksize()
1799 d = obj.checksize()
1793 if d[0]:
1800 if d[0]:
1794 err(_("%s data length off by %d bytes") % (name, d[0]))
1801 err(_("%s data length off by %d bytes") % (name, d[0]))
1795 if d[1]:
1802 if d[1]:
1796 err(_("%s index contains %d extra bytes") % (name, d[1]))
1803 err(_("%s index contains %d extra bytes") % (name, d[1]))
1797
1804
1798 seen = {}
1805 seen = {}
1799 self.ui.status(_("checking changesets\n"))
1806 self.ui.status(_("checking changesets\n"))
1800 checksize(self.changelog, "changelog")
1807 checksize(self.changelog, "changelog")
1801
1808
1802 for i in range(self.changelog.count()):
1809 for i in range(self.changelog.count()):
1803 changesets += 1
1810 changesets += 1
1804 n = self.changelog.node(i)
1811 n = self.changelog.node(i)
1805 l = self.changelog.linkrev(n)
1812 l = self.changelog.linkrev(n)
1806 if l != i:
1813 if l != i:
1807 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1814 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1808 if n in seen:
1815 if n in seen:
1809 err(_("duplicate changeset at revision %d") % i)
1816 err(_("duplicate changeset at revision %d") % i)
1810 seen[n] = 1
1817 seen[n] = 1
1811
1818
1812 for p in self.changelog.parents(n):
1819 for p in self.changelog.parents(n):
1813 if p not in self.changelog.nodemap:
1820 if p not in self.changelog.nodemap:
1814 err(_("changeset %s has unknown parent %s") %
1821 err(_("changeset %s has unknown parent %s") %
1815 (short(n), short(p)))
1822 (short(n), short(p)))
1816 try:
1823 try:
1817 changes = self.changelog.read(n)
1824 changes = self.changelog.read(n)
1818 except KeyboardInterrupt:
1825 except KeyboardInterrupt:
1819 self.ui.warn(_("interrupted"))
1826 self.ui.warn(_("interrupted"))
1820 raise
1827 raise
1821 except Exception, inst:
1828 except Exception, inst:
1822 err(_("unpacking changeset %s: %s") % (short(n), inst))
1829 err(_("unpacking changeset %s: %s") % (short(n), inst))
1823 continue
1830 continue
1824
1831
1825 neededmanifests[changes[0]] = n
1832 neededmanifests[changes[0]] = n
1826
1833
1827 for f in changes[3]:
1834 for f in changes[3]:
1828 filelinkrevs.setdefault(f, []).append(i)
1835 filelinkrevs.setdefault(f, []).append(i)
1829
1836
1830 seen = {}
1837 seen = {}
1831 self.ui.status(_("checking manifests\n"))
1838 self.ui.status(_("checking manifests\n"))
1832 checksize(self.manifest, "manifest")
1839 checksize(self.manifest, "manifest")
1833
1840
1834 for i in range(self.manifest.count()):
1841 for i in range(self.manifest.count()):
1835 n = self.manifest.node(i)
1842 n = self.manifest.node(i)
1836 l = self.manifest.linkrev(n)
1843 l = self.manifest.linkrev(n)
1837
1844
1838 if l < 0 or l >= self.changelog.count():
1845 if l < 0 or l >= self.changelog.count():
1839 err(_("bad manifest link (%d) at revision %d") % (l, i))
1846 err(_("bad manifest link (%d) at revision %d") % (l, i))
1840
1847
1841 if n in neededmanifests:
1848 if n in neededmanifests:
1842 del neededmanifests[n]
1849 del neededmanifests[n]
1843
1850
1844 if n in seen:
1851 if n in seen:
1845 err(_("duplicate manifest at revision %d") % i)
1852 err(_("duplicate manifest at revision %d") % i)
1846
1853
1847 seen[n] = 1
1854 seen[n] = 1
1848
1855
1849 for p in self.manifest.parents(n):
1856 for p in self.manifest.parents(n):
1850 if p not in self.manifest.nodemap:
1857 if p not in self.manifest.nodemap:
1851 err(_("manifest %s has unknown parent %s") %
1858 err(_("manifest %s has unknown parent %s") %
1852 (short(n), short(p)))
1859 (short(n), short(p)))
1853
1860
1854 try:
1861 try:
1855 delta = mdiff.patchtext(self.manifest.delta(n))
1862 delta = mdiff.patchtext(self.manifest.delta(n))
1856 except KeyboardInterrupt:
1863 except KeyboardInterrupt:
1857 self.ui.warn(_("interrupted"))
1864 self.ui.warn(_("interrupted"))
1858 raise
1865 raise
1859 except Exception, inst:
1866 except Exception, inst:
1860 err(_("unpacking manifest %s: %s") % (short(n), inst))
1867 err(_("unpacking manifest %s: %s") % (short(n), inst))
1861 continue
1868 continue
1862
1869
1863 try:
1870 try:
1864 ff = [ l.split('\0') for l in delta.splitlines() ]
1871 ff = [ l.split('\0') for l in delta.splitlines() ]
1865 for f, fn in ff:
1872 for f, fn in ff:
1866 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1873 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1867 except (ValueError, TypeError), inst:
1874 except (ValueError, TypeError), inst:
1868 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1875 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1869
1876
1870 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1877 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1871
1878
1872 for m, c in neededmanifests.items():
1879 for m, c in neededmanifests.items():
1873 err(_("Changeset %s refers to unknown manifest %s") %
1880 err(_("Changeset %s refers to unknown manifest %s") %
1874 (short(m), short(c)))
1881 (short(m), short(c)))
1875 del neededmanifests
1882 del neededmanifests
1876
1883
1877 for f in filenodes:
1884 for f in filenodes:
1878 if f not in filelinkrevs:
1885 if f not in filelinkrevs:
1879 err(_("file %s in manifest but not in changesets") % f)
1886 err(_("file %s in manifest but not in changesets") % f)
1880
1887
1881 for f in filelinkrevs:
1888 for f in filelinkrevs:
1882 if f not in filenodes:
1889 if f not in filenodes:
1883 err(_("file %s in changeset but not in manifest") % f)
1890 err(_("file %s in changeset but not in manifest") % f)
1884
1891
1885 self.ui.status(_("checking files\n"))
1892 self.ui.status(_("checking files\n"))
1886 ff = filenodes.keys()
1893 ff = filenodes.keys()
1887 ff.sort()
1894 ff.sort()
1888 for f in ff:
1895 for f in ff:
1889 if f == "/dev/null":
1896 if f == "/dev/null":
1890 continue
1897 continue
1891 files += 1
1898 files += 1
1892 if not f:
1899 if not f:
1893 err(_("file without name in manifest %s") % short(n))
1900 err(_("file without name in manifest %s") % short(n))
1894 continue
1901 continue
1895 fl = self.file(f)
1902 fl = self.file(f)
1896 checksize(fl, f)
1903 checksize(fl, f)
1897
1904
1898 nodes = {nullid: 1}
1905 nodes = {nullid: 1}
1899 seen = {}
1906 seen = {}
1900 for i in range(fl.count()):
1907 for i in range(fl.count()):
1901 revisions += 1
1908 revisions += 1
1902 n = fl.node(i)
1909 n = fl.node(i)
1903
1910
1904 if n in seen:
1911 if n in seen:
1905 err(_("%s: duplicate revision %d") % (f, i))
1912 err(_("%s: duplicate revision %d") % (f, i))
1906 if n not in filenodes[f]:
1913 if n not in filenodes[f]:
1907 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1914 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1908 else:
1915 else:
1909 del filenodes[f][n]
1916 del filenodes[f][n]
1910
1917
1911 flr = fl.linkrev(n)
1918 flr = fl.linkrev(n)
1912 if flr not in filelinkrevs.get(f, []):
1919 if flr not in filelinkrevs.get(f, []):
1913 err(_("%s:%s points to unexpected changeset %d")
1920 err(_("%s:%s points to unexpected changeset %d")
1914 % (f, short(n), flr))
1921 % (f, short(n), flr))
1915 else:
1922 else:
1916 filelinkrevs[f].remove(flr)
1923 filelinkrevs[f].remove(flr)
1917
1924
1918 # verify contents
1925 # verify contents
1919 try:
1926 try:
1920 t = fl.read(n)
1927 t = fl.read(n)
1921 except KeyboardInterrupt:
1928 except KeyboardInterrupt:
1922 self.ui.warn(_("interrupted"))
1929 self.ui.warn(_("interrupted"))
1923 raise
1930 raise
1924 except Exception, inst:
1931 except Exception, inst:
1925 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1932 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1926
1933
1927 # verify parents
1934 # verify parents
1928 (p1, p2) = fl.parents(n)
1935 (p1, p2) = fl.parents(n)
1929 if p1 not in nodes:
1936 if p1 not in nodes:
1930 err(_("file %s:%s unknown parent 1 %s") %
1937 err(_("file %s:%s unknown parent 1 %s") %
1931 (f, short(n), short(p1)))
1938 (f, short(n), short(p1)))
1932 if p2 not in nodes:
1939 if p2 not in nodes:
1933 err(_("file %s:%s unknown parent 2 %s") %
1940 err(_("file %s:%s unknown parent 2 %s") %
1934 (f, short(n), short(p1)))
1941 (f, short(n), short(p1)))
1935 nodes[n] = 1
1942 nodes[n] = 1
1936
1943
1937 # cross-check
1944 # cross-check
1938 for node in filenodes[f]:
1945 for node in filenodes[f]:
1939 err(_("node %s in manifests not in %s") % (hex(node), f))
1946 err(_("node %s in manifests not in %s") % (hex(node), f))
1940
1947
1941 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1948 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1942 (files, changesets, revisions))
1949 (files, changesets, revisions))
1943
1950
1944 if errors[0]:
1951 if errors[0]:
1945 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1952 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1946 return 1
1953 return 1
1947
1954
1948 # used to avoid circular references so destructors work
1955 # used to avoid circular references so destructors work
1949 def aftertrans(base):
1956 def aftertrans(base):
1950 p = base
1957 p = base
1951 def a():
1958 def a():
1952 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1959 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1953 util.rename(os.path.join(p, "journal.dirstate"),
1960 util.rename(os.path.join(p, "journal.dirstate"),
1954 os.path.join(p, "undo.dirstate"))
1961 os.path.join(p, "undo.dirstate"))
1955 return a
1962 return a
1956
1963
@@ -1,175 +1,176
1 # manifest.py - manifest revision class for mercurial
1 # manifest.py - manifest revision class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct
8 import struct
9 from revlog import *
9 from revlog import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "bisect array")
12 demandload(globals(), "bisect array")
13
13
14 class manifest(revlog):
14 class manifest(revlog):
15 def __init__(self, opener):
15 def __init__(self, opener, defversion=0):
16 self.mapcache = None
16 self.mapcache = None
17 self.listcache = None
17 self.listcache = None
18 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
18 revlog.__init__(self, opener, "00manifest.i", "00manifest.d",
19 defversion)
19
20
20 def read(self, node):
21 def read(self, node):
21 if node == nullid: return {} # don't upset local cache
22 if node == nullid: return {} # don't upset local cache
22 if self.mapcache and self.mapcache[0] == node:
23 if self.mapcache and self.mapcache[0] == node:
23 return self.mapcache[1]
24 return self.mapcache[1]
24 text = self.revision(node)
25 text = self.revision(node)
25 map = {}
26 map = {}
26 flag = {}
27 flag = {}
27 self.listcache = array.array('c', text)
28 self.listcache = array.array('c', text)
28 lines = text.splitlines(1)
29 lines = text.splitlines(1)
29 for l in lines:
30 for l in lines:
30 (f, n) = l.split('\0')
31 (f, n) = l.split('\0')
31 map[f] = bin(n[:40])
32 map[f] = bin(n[:40])
32 flag[f] = (n[40:-1] == "x")
33 flag[f] = (n[40:-1] == "x")
33 self.mapcache = (node, map, flag)
34 self.mapcache = (node, map, flag)
34 return map
35 return map
35
36
36 def readflags(self, node):
37 def readflags(self, node):
37 if node == nullid: return {} # don't upset local cache
38 if node == nullid: return {} # don't upset local cache
38 if not self.mapcache or self.mapcache[0] != node:
39 if not self.mapcache or self.mapcache[0] != node:
39 self.read(node)
40 self.read(node)
40 return self.mapcache[2]
41 return self.mapcache[2]
41
42
42 def diff(self, a, b):
43 def diff(self, a, b):
43 return mdiff.textdiff(str(a), str(b))
44 return mdiff.textdiff(str(a), str(b))
44
45
45 def add(self, map, flags, transaction, link, p1=None, p2=None,
46 def add(self, map, flags, transaction, link, p1=None, p2=None,
46 changed=None):
47 changed=None):
47
48
48 # returns a tuple (start, end). If the string is found
49 # returns a tuple (start, end). If the string is found
49 # m[start:end] are the line containing that string. If start == end
50 # m[start:end] are the line containing that string. If start == end
50 # the string was not found and they indicate the proper sorted
51 # the string was not found and they indicate the proper sorted
51 # insertion point. This was taken from bisect_left, and modified
52 # insertion point. This was taken from bisect_left, and modified
52 # to find line start/end as it goes along.
53 # to find line start/end as it goes along.
53 #
54 #
54 # m should be a buffer or a string
55 # m should be a buffer or a string
55 # s is a string
56 # s is a string
56 #
57 #
57 def manifestsearch(m, s, lo=0, hi=None):
58 def manifestsearch(m, s, lo=0, hi=None):
58 def advance(i, c):
59 def advance(i, c):
59 while i < lenm and m[i] != c:
60 while i < lenm and m[i] != c:
60 i += 1
61 i += 1
61 return i
62 return i
62 lenm = len(m)
63 lenm = len(m)
63 if not hi:
64 if not hi:
64 hi = lenm
65 hi = lenm
65 while lo < hi:
66 while lo < hi:
66 mid = (lo + hi) // 2
67 mid = (lo + hi) // 2
67 start = mid
68 start = mid
68 while start > 0 and m[start-1] != '\n':
69 while start > 0 and m[start-1] != '\n':
69 start -= 1
70 start -= 1
70 end = advance(start, '\0')
71 end = advance(start, '\0')
71 if m[start:end] < s:
72 if m[start:end] < s:
72 # we know that after the null there are 40 bytes of sha1
73 # we know that after the null there are 40 bytes of sha1
73 # this translates to the bisect lo = mid + 1
74 # this translates to the bisect lo = mid + 1
74 lo = advance(end + 40, '\n') + 1
75 lo = advance(end + 40, '\n') + 1
75 else:
76 else:
76 # this translates to the bisect hi = mid
77 # this translates to the bisect hi = mid
77 hi = start
78 hi = start
78 end = advance(lo, '\0')
79 end = advance(lo, '\0')
79 found = m[lo:end]
80 found = m[lo:end]
80 if cmp(s, found) == 0:
81 if cmp(s, found) == 0:
81 # we know that after the null there are 40 bytes of sha1
82 # we know that after the null there are 40 bytes of sha1
82 end = advance(end + 40, '\n')
83 end = advance(end + 40, '\n')
83 return (lo, end+1)
84 return (lo, end+1)
84 else:
85 else:
85 return (lo, lo)
86 return (lo, lo)
86
87
87 # apply the changes collected during the bisect loop to our addlist
88 # apply the changes collected during the bisect loop to our addlist
88 # return a delta suitable for addrevision
89 # return a delta suitable for addrevision
89 def addlistdelta(addlist, x):
90 def addlistdelta(addlist, x):
90 # start from the bottom up
91 # start from the bottom up
91 # so changes to the offsets don't mess things up.
92 # so changes to the offsets don't mess things up.
92 i = len(x)
93 i = len(x)
93 while i > 0:
94 while i > 0:
94 i -= 1
95 i -= 1
95 start = x[i][0]
96 start = x[i][0]
96 end = x[i][1]
97 end = x[i][1]
97 if x[i][2]:
98 if x[i][2]:
98 addlist[start:end] = array.array('c', x[i][2])
99 addlist[start:end] = array.array('c', x[i][2])
99 else:
100 else:
100 del addlist[start:end]
101 del addlist[start:end]
101 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
102 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2] \
102 for d in x ])
103 for d in x ])
103
104
104 # if we're using the listcache, make sure it is valid and
105 # if we're using the listcache, make sure it is valid and
105 # parented by the same node we're diffing against
106 # parented by the same node we're diffing against
106 if not changed or not self.listcache or not p1 or \
107 if not changed or not self.listcache or not p1 or \
107 self.mapcache[0] != p1:
108 self.mapcache[0] != p1:
108 files = map.keys()
109 files = map.keys()
109 files.sort()
110 files.sort()
110
111
111 # if this is changed to support newlines in filenames,
112 # if this is changed to support newlines in filenames,
112 # be sure to check the templates/ dir again (especially *-raw.tmpl)
113 # be sure to check the templates/ dir again (especially *-raw.tmpl)
113 text = ["%s\000%s%s\n" %
114 text = ["%s\000%s%s\n" %
114 (f, hex(map[f]), flags[f] and "x" or '')
115 (f, hex(map[f]), flags[f] and "x" or '')
115 for f in files]
116 for f in files]
116 self.listcache = array.array('c', "".join(text))
117 self.listcache = array.array('c', "".join(text))
117 cachedelta = None
118 cachedelta = None
118 else:
119 else:
119 addlist = self.listcache
120 addlist = self.listcache
120
121
121 # combine the changed lists into one list for sorting
122 # combine the changed lists into one list for sorting
122 work = [[x, 0] for x in changed[0]]
123 work = [[x, 0] for x in changed[0]]
123 work[len(work):] = [[x, 1] for x in changed[1]]
124 work[len(work):] = [[x, 1] for x in changed[1]]
124 work.sort()
125 work.sort()
125
126
126 delta = []
127 delta = []
127 dstart = None
128 dstart = None
128 dend = None
129 dend = None
129 dline = [""]
130 dline = [""]
130 start = 0
131 start = 0
131 # zero copy representation of addlist as a buffer
132 # zero copy representation of addlist as a buffer
132 addbuf = buffer(addlist)
133 addbuf = buffer(addlist)
133
134
134 # start with a readonly loop that finds the offset of
135 # start with a readonly loop that finds the offset of
135 # each line and creates the deltas
136 # each line and creates the deltas
136 for w in work:
137 for w in work:
137 f = w[0]
138 f = w[0]
138 # bs will either be the index of the item or the insert point
139 # bs will either be the index of the item or the insert point
139 start, end = manifestsearch(addbuf, f, start)
140 start, end = manifestsearch(addbuf, f, start)
140 if w[1] == 0:
141 if w[1] == 0:
141 l = "%s\000%s%s\n" % (f, hex(map[f]),
142 l = "%s\000%s%s\n" % (f, hex(map[f]),
142 flags[f] and "x" or '')
143 flags[f] and "x" or '')
143 else:
144 else:
144 l = ""
145 l = ""
145 if start == end and w[1] == 1:
146 if start == end and w[1] == 1:
146 # item we want to delete was not found, error out
147 # item we want to delete was not found, error out
147 raise AssertionError(
148 raise AssertionError(
148 _("failed to remove %s from manifest\n") % f)
149 _("failed to remove %s from manifest\n") % f)
149 if dstart != None and dstart <= start and dend >= start:
150 if dstart != None and dstart <= start and dend >= start:
150 if dend < end:
151 if dend < end:
151 dend = end
152 dend = end
152 if l:
153 if l:
153 dline.append(l)
154 dline.append(l)
154 else:
155 else:
155 if dstart != None:
156 if dstart != None:
156 delta.append([dstart, dend, "".join(dline)])
157 delta.append([dstart, dend, "".join(dline)])
157 dstart = start
158 dstart = start
158 dend = end
159 dend = end
159 dline = [l]
160 dline = [l]
160
161
161 if dstart != None:
162 if dstart != None:
162 delta.append([dstart, dend, "".join(dline)])
163 delta.append([dstart, dend, "".join(dline)])
163 # apply the delta to the addlist, and get a delta for addrevision
164 # apply the delta to the addlist, and get a delta for addrevision
164 cachedelta = addlistdelta(addlist, delta)
165 cachedelta = addlistdelta(addlist, delta)
165
166
166 # the delta is only valid if we've been processing the tip revision
167 # the delta is only valid if we've been processing the tip revision
167 if self.mapcache[0] != self.tip():
168 if self.mapcache[0] != self.tip():
168 cachedelta = None
169 cachedelta = None
169 self.listcache = addlist
170 self.listcache = addlist
170
171
171 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
172 n = self.addrevision(buffer(self.listcache), transaction, link, p1, \
172 p2, cachedelta)
173 p2, cachedelta)
173 self.mapcache = (n, map, flags)
174 self.mapcache = (n, map, flags)
174
175
175 return n
176 return n
@@ -1,883 +1,962
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 from demandload import demandload
16 demandload(globals(), "binascii changegroup errno heapq mdiff os")
16 demandload(globals(), "binascii changegroup errno heapq mdiff os")
17 demandload(globals(), "sha struct zlib")
17 demandload(globals(), "sha struct zlib")
18
18
19 # revlog version strings
20 REVLOGV0 = 0
21 REVLOGNG = 1
22
19 def hash(text, p1, p2):
23 def hash(text, p1, p2):
20 """generate a hash from the given text and its parent hashes
24 """generate a hash from the given text and its parent hashes
21
25
22 This hash combines both the current file contents and its history
26 This hash combines both the current file contents and its history
23 in a manner that makes it easy to distinguish nodes with the same
27 in a manner that makes it easy to distinguish nodes with the same
24 content in the revision graph.
28 content in the revision graph.
25 """
29 """
26 l = [p1, p2]
30 l = [p1, p2]
27 l.sort()
31 l.sort()
28 s = sha.new(l[0])
32 s = sha.new(l[0])
29 s.update(l[1])
33 s.update(l[1])
30 s.update(text)
34 s.update(text)
31 return s.digest()
35 return s.digest()
32
36
33 def compress(text):
37 def compress(text):
34 """ generate a possibly-compressed representation of text """
38 """ generate a possibly-compressed representation of text """
35 if not text: return ("", text)
39 if not text: return ("", text)
36 if len(text) < 44:
40 if len(text) < 44:
37 if text[0] == '\0': return ("", text)
41 if text[0] == '\0': return ("", text)
38 return ('u', text)
42 return ('u', text)
39 bin = zlib.compress(text)
43 bin = zlib.compress(text)
40 if len(bin) > len(text):
44 if len(bin) > len(text):
41 if text[0] == '\0': return ("", text)
45 if text[0] == '\0': return ("", text)
42 return ('u', text)
46 return ('u', text)
43 return ("", bin)
47 return ("", bin)
44
48
45 def decompress(bin):
49 def decompress(bin):
46 """ decompress the given input """
50 """ decompress the given input """
47 if not bin: return bin
51 if not bin: return bin
48 t = bin[0]
52 t = bin[0]
49 if t == '\0': return bin
53 if t == '\0': return bin
50 if t == 'x': return zlib.decompress(bin)
54 if t == 'x': return zlib.decompress(bin)
51 if t == 'u': return bin[1:]
55 if t == 'u': return bin[1:]
52 raise RevlogError(_("unknown compression type %r") % t)
56 raise RevlogError(_("unknown compression type %r") % t)
53
57
54 indexformat = ">4l20s20s20s"
58 indexformatv0 = ">4l20s20s20s"
59 # index ng:
60 # 6 bytes offset
61 # 2 bytes flags
62 # 4 bytes compressed length
63 # 4 bytes uncompressed length
64 # 4 bytes: base rev
65 # 4 bytes link rev
66 # 4 bytes parent 1 rev
67 # 4 bytes parent 2 rev
68 # 32 bytes: nodeid
69 indexformatng = ">Qiiiiii20s12x"
70 versionformat = ">i"
55
71
56 class lazyparser(object):
72 class lazyparser(object):
57 """
73 """
58 this class avoids the need to parse the entirety of large indices
74 this class avoids the need to parse the entirety of large indices
59
75
60 By default we parse and load 1000 entries at a time.
76 By default we parse and load 1000 entries at a time.
61
77
62 If no position is specified, we load the whole index, and replace
78 If no position is specified, we load the whole index, and replace
63 the lazy objects in revlog with the underlying objects for
79 the lazy objects in revlog with the underlying objects for
64 efficiency in cases where we look at most of the nodes.
80 efficiency in cases where we look at most of the nodes.
65 """
81 """
66 def __init__(self, data, revlog):
82 def __init__(self, data, revlog, indexformat):
67 self.data = data
83 self.data = data
68 self.s = struct.calcsize(indexformat)
84 self.s = struct.calcsize(indexformat)
85 self.indexformat = indexformat
69 self.l = len(data)/self.s
86 self.l = len(data)/self.s
70 self.index = [None] * self.l
87 self.index = [None] * self.l
71 self.map = {nullid: -1}
88 self.map = {nullid: -1}
72 self.all = 0
89 self.all = 0
73 self.revlog = revlog
90 self.revlog = revlog
74
91
75 def trunc(self, pos):
76 self.l = pos/self.s
77
78 def load(self, pos=None):
92 def load(self, pos=None):
79 if self.all: return
93 if self.all: return
80 if pos is not None:
94 if pos is not None:
81 block = pos / 1000
95 block = pos / 1000
82 i = block * 1000
96 i = block * 1000
83 end = min(self.l, i + 1000)
97 end = min(self.l, i + 1000)
84 else:
98 else:
85 self.all = 1
99 self.all = 1
86 i = 0
100 i = 0
87 end = self.l
101 end = self.l
88 self.revlog.index = self.index
102 self.revlog.index = self.index
89 self.revlog.nodemap = self.map
103 self.revlog.nodemap = self.map
90
104
91 while i < end:
105 while i < end:
92 d = self.data[i * self.s: (i + 1) * self.s]
106 if not self.index[i]:
93 e = struct.unpack(indexformat, d)
107 d = self.data[i * self.s: (i + 1) * self.s]
94 self.index[i] = e
108 e = struct.unpack(self.indexformat, d)
95 self.map[e[6]] = i
109 self.index[i] = e
110 self.map[e[-1]] = i
96 i += 1
111 i += 1
97
112
98 class lazyindex(object):
113 class lazyindex(object):
99 """a lazy version of the index array"""
114 """a lazy version of the index array"""
100 def __init__(self, parser):
115 def __init__(self, parser):
101 self.p = parser
116 self.p = parser
102 def __len__(self):
117 def __len__(self):
103 return len(self.p.index)
118 return len(self.p.index)
104 def load(self, pos):
119 def load(self, pos):
105 if pos < 0:
120 if pos < 0:
106 pos += len(self.p.index)
121 pos += len(self.p.index)
107 self.p.load(pos)
122 self.p.load(pos)
108 return self.p.index[pos]
123 return self.p.index[pos]
109 def __getitem__(self, pos):
124 def __getitem__(self, pos):
110 return self.p.index[pos] or self.load(pos)
125 return self.p.index[pos] or self.load(pos)
126 def __setitem__(self, pos, item):
127 self.p.index[pos] = item
111 def __delitem__(self, pos):
128 def __delitem__(self, pos):
112 del self.p.index[pos]
129 del self.p.index[pos]
113 def append(self, e):
130 def append(self, e):
114 self.p.index.append(e)
131 self.p.index.append(e)
115 def trunc(self, pos):
116 self.p.trunc(pos)
117
132
118 class lazymap(object):
133 class lazymap(object):
119 """a lazy version of the node map"""
134 """a lazy version of the node map"""
120 def __init__(self, parser):
135 def __init__(self, parser):
121 self.p = parser
136 self.p = parser
122 def load(self, key):
137 def load(self, key):
123 if self.p.all: return
138 if self.p.all: return
124 n = self.p.data.find(key)
139 n = self.p.data.find(key)
125 if n < 0:
140 if n < 0:
126 raise KeyError(key)
141 raise KeyError(key)
127 pos = n / self.p.s
142 pos = n / self.p.s
128 self.p.load(pos)
143 self.p.load(pos)
129 def __contains__(self, key):
144 def __contains__(self, key):
130 self.p.load()
145 self.p.load()
131 return key in self.p.map
146 return key in self.p.map
132 def __iter__(self):
147 def __iter__(self):
133 yield nullid
148 yield nullid
134 for i in xrange(self.p.l):
149 for i in xrange(self.p.l):
135 try:
150 try:
136 yield self.p.index[i][6]
151 yield self.p.index[i][-1]
137 except:
152 except:
138 self.p.load(i)
153 self.p.load(i)
139 yield self.p.index[i][6]
154 yield self.p.index[i][-1]
140 def __getitem__(self, key):
155 def __getitem__(self, key):
141 try:
156 try:
142 return self.p.map[key]
157 return self.p.map[key]
143 except KeyError:
158 except KeyError:
144 try:
159 try:
145 self.load(key)
160 self.load(key)
146 return self.p.map[key]
161 return self.p.map[key]
147 except KeyError:
162 except KeyError:
148 raise KeyError("node " + hex(key))
163 raise KeyError("node " + hex(key))
149 def __setitem__(self, key, val):
164 def __setitem__(self, key, val):
150 self.p.map[key] = val
165 self.p.map[key] = val
151 def __delitem__(self, key):
166 def __delitem__(self, key):
152 del self.p.map[key]
167 del self.p.map[key]
153
168
154 class RevlogError(Exception): pass
169 class RevlogError(Exception): pass
155
170
156 class revlog(object):
171 class revlog(object):
157 """
172 """
158 the underlying revision storage object
173 the underlying revision storage object
159
174
160 A revlog consists of two parts, an index and the revision data.
175 A revlog consists of two parts, an index and the revision data.
161
176
162 The index is a file with a fixed record size containing
177 The index is a file with a fixed record size containing
163 information on each revision, includings its nodeid (hash), the
178 information on each revision, includings its nodeid (hash), the
164 nodeids of its parents, the position and offset of its data within
179 nodeids of its parents, the position and offset of its data within
165 the data file, and the revision it's based on. Finally, each entry
180 the data file, and the revision it's based on. Finally, each entry
166 contains a linkrev entry that can serve as a pointer to external
181 contains a linkrev entry that can serve as a pointer to external
167 data.
182 data.
168
183
169 The revision data itself is a linear collection of data chunks.
184 The revision data itself is a linear collection of data chunks.
170 Each chunk represents a revision and is usually represented as a
185 Each chunk represents a revision and is usually represented as a
171 delta against the previous chunk. To bound lookup time, runs of
186 delta against the previous chunk. To bound lookup time, runs of
172 deltas are limited to about 2 times the length of the original
187 deltas are limited to about 2 times the length of the original
173 version data. This makes retrieval of a version proportional to
188 version data. This makes retrieval of a version proportional to
174 its size, or O(1) relative to the number of revisions.
189 its size, or O(1) relative to the number of revisions.
175
190
176 Both pieces of the revlog are written to in an append-only
191 Both pieces of the revlog are written to in an append-only
177 fashion, which means we never need to rewrite a file to insert or
192 fashion, which means we never need to rewrite a file to insert or
178 remove data, and can use some simple techniques to avoid the need
193 remove data, and can use some simple techniques to avoid the need
179 for locking while reading.
194 for locking while reading.
180 """
195 """
181 def __init__(self, opener, indexfile, datafile):
196 def __init__(self, opener, indexfile, datafile, defversion=0):
182 """
197 """
183 create a revlog object
198 create a revlog object
184
199
185 opener is a function that abstracts the file opening operation
200 opener is a function that abstracts the file opening operation
186 and can be used to implement COW semantics or the like.
201 and can be used to implement COW semantics or the like.
187 """
202 """
188 self.indexfile = indexfile
203 self.indexfile = indexfile
189 self.datafile = datafile
204 self.datafile = datafile
190 self.opener = opener
205 self.opener = opener
191
206
192 self.indexstat = None
207 self.indexstat = None
193 self.cache = None
208 self.cache = None
194 self.chunkcache = None
209 self.chunkcache = None
210 self.defversion = defversion
195 self.load()
211 self.load()
196
212
197 def load(self):
213 def load(self):
214 v = self.defversion
198 try:
215 try:
199 f = self.opener(self.indexfile)
216 f = self.opener(self.indexfile)
217 i = f.read()
200 except IOError, inst:
218 except IOError, inst:
201 if inst.errno != errno.ENOENT:
219 if inst.errno != errno.ENOENT:
202 raise
220 raise
203 i = ""
221 i = ""
204 else:
222 else:
205 try:
223 try:
206 st = os.fstat(f.fileno())
224 st = os.fstat(f.fileno())
207 except AttributeError, inst:
225 except AttributeError, inst:
208 st = None
226 st = None
209 else:
227 else:
210 oldst = self.indexstat
228 oldst = self.indexstat
211 if (oldst and st.st_dev == oldst.st_dev
229 if (oldst and st.st_dev == oldst.st_dev
212 and st.st_ino == oldst.st_ino
230 and st.st_ino == oldst.st_ino
213 and st.st_mtime == oldst.st_mtime
231 and st.st_mtime == oldst.st_mtime
214 and st.st_ctime == oldst.st_ctime):
232 and st.st_ctime == oldst.st_ctime):
215 return
233 return
216 self.indexstat = st
234 self.indexstat = st
217 i = f.read()
235 if len(i) > 0:
236 v = struct.unpack(versionformat, i[:4])[0]
237 if v != 0:
238 flags = v & ~0xFFFF
239 fmt = v & 0xFFFF
240 if fmt != REVLOGNG or (flags & ~(REVLOGNGINLINEDATA)):
241 raise RevlogError(
242 _("unknown version format %d or flags %x on %s") %
243 (v, flags, self.indexfile))
244 self.version = v
245 if v == 0:
246 self.indexformat = indexformatv0
247 else:
248 self.indexformat = indexformatng
218
249
219 if i and i[:4] != "\0\0\0\0":
250 if i:
220 raise RevlogError(_("incompatible revlog signature on %s") %
251 if st and st.st_size > 10000:
221 self.indexfile)
252 # big index, let's parse it on demand
222
253 parser = lazyparser(i, self, self.indexformat)
223 if len(i) > 10000:
254 self.index = lazyindex(parser)
224 # big index, let's parse it on demand
255 self.nodemap = lazymap(parser)
225 parser = lazyparser(i, self)
256 else:
226 self.index = lazyindex(parser)
257 self.parseindex(i)
227 self.nodemap = lazymap(parser)
258 if self.version != 0:
259 e = list(self.index[0])
260 type = self.ngtype(e[0])
261 e[0] = self.offset_type(0, type)
262 self.index[0] = e
228 else:
263 else:
229 s = struct.calcsize(indexformat)
264 self.nodemap = { nullid: -1}
230 l = len(i) / s
265 self.index = []
231 self.index = [None] * l
266
232 m = [None] * l
267
268 def parseindex(self, data):
269 s = struct.calcsize(self.indexformat)
270 l = len(data)
271 self.index = []
272 self.nodemap = {nullid: -1}
273 off = 0
274 n = 0
275 while off < l:
276 e = struct.unpack(self.indexformat, data[off:off + s])
277 self.index.append(e)
278 self.nodemap[e[-1]] = n
279 n += 1
280 off += s
233
281
234 n = 0
282 def ngoffset(self, q):
235 for f in xrange(0, l * s, s):
283 if q & 0xFFFF:
236 # offset, size, base, linkrev, p1, p2, nodeid
284 raise RevlogError(_('%s: incompatible revision flag %x') %
237 e = struct.unpack(indexformat, i[f:f + s])
285 (self.indexfile, type))
238 m[n] = (e[6], n)
286 return long(q >> 16)
239 self.index[n] = e
287
240 n += 1
288 def ngtype(self, q):
289 return int(q & 0xFFFF)
241
290
242 self.nodemap = dict(m)
291 def offset_type(self, offset, type):
243 self.nodemap[nullid] = -1
292 return long(long(offset) << 16 | type)
293
294 def loadindexmap(self):
295 """loads both the map and the index from the lazy parser"""
296 if isinstance(self.index, lazyindex):
297 p = self.index.p
298 p.load()
244
299
245 def tip(self): return self.node(len(self.index) - 1)
300 def tip(self): return self.node(len(self.index) - 1)
246 def count(self): return len(self.index)
301 def count(self): return len(self.index)
247 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
302 def node(self, rev):
303 return (rev < 0) and nullid or self.index[rev][-1]
248 def rev(self, node):
304 def rev(self, node):
249 try:
305 try:
250 return self.nodemap[node]
306 return self.nodemap[node]
251 except KeyError:
307 except KeyError:
252 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
308 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
253 def linkrev(self, node): return self.index[self.rev(node)][3]
309 def linkrev(self, node): return self.index[self.rev(node)][-4]
254 def parents(self, node):
310 def parents(self, node):
255 if node == nullid: return (nullid, nullid)
311 if node == nullid: return (nullid, nullid)
256 return self.index[self.rev(node)][4:6]
312 r = self.rev(node)
313 d = self.index[r][-3:-1]
314 if self.version == 0:
315 return d
316 return [ self.node(x) for x in d ]
317 def start(self, rev):
318 if rev < 0:
319 return -1
320 if self.version != 0:
321 return self.ngoffset(self.index[rev][0])
322 return self.index[rev][0]
323 def end(self, rev): return self.start(rev) + self.length(rev)
257
324
258 def start(self, rev): return (rev < 0) and -1 or self.index[rev][0]
259 def length(self, rev):
325 def length(self, rev):
260 if rev < 0:
326 if rev < 0:
261 return 0
327 return 0
262 else:
328 else:
263 return self.index[rev][1]
329 return self.index[rev][1]
264 def end(self, rev): return self.start(rev) + self.length(rev)
330 def base(self, rev): return (rev < 0) and rev or self.index[rev][-5]
265 def base(self, rev): return (rev < 0) and rev or self.index[rev][2]
266
331
267 def reachable(self, rev, stop=None):
332 def reachable(self, rev, stop=None):
268 reachable = {}
333 reachable = {}
269 visit = [rev]
334 visit = [rev]
270 reachable[rev] = 1
335 reachable[rev] = 1
271 if stop:
336 if stop:
272 stopn = self.rev(stop)
337 stopn = self.rev(stop)
273 else:
338 else:
274 stopn = 0
339 stopn = 0
275 while visit:
340 while visit:
276 n = visit.pop(0)
341 n = visit.pop(0)
277 if n == stop:
342 if n == stop:
278 continue
343 continue
279 if n == nullid:
344 if n == nullid:
280 continue
345 continue
281 for p in self.parents(n):
346 for p in self.parents(n):
282 if self.rev(p) < stopn:
347 if self.rev(p) < stopn:
283 continue
348 continue
284 if p not in reachable:
349 if p not in reachable:
285 reachable[p] = 1
350 reachable[p] = 1
286 visit.append(p)
351 visit.append(p)
287 return reachable
352 return reachable
288
353
289 def nodesbetween(self, roots=None, heads=None):
354 def nodesbetween(self, roots=None, heads=None):
290 """Return a tuple containing three elements. Elements 1 and 2 contain
355 """Return a tuple containing three elements. Elements 1 and 2 contain
291 a final list bases and heads after all the unreachable ones have been
356 a final list bases and heads after all the unreachable ones have been
292 pruned. Element 0 contains a topologically sorted list of all
357 pruned. Element 0 contains a topologically sorted list of all
293
358
294 nodes that satisfy these constraints:
359 nodes that satisfy these constraints:
295 1. All nodes must be descended from a node in roots (the nodes on
360 1. All nodes must be descended from a node in roots (the nodes on
296 roots are considered descended from themselves).
361 roots are considered descended from themselves).
297 2. All nodes must also be ancestors of a node in heads (the nodes in
362 2. All nodes must also be ancestors of a node in heads (the nodes in
298 heads are considered to be their own ancestors).
363 heads are considered to be their own ancestors).
299
364
300 If roots is unspecified, nullid is assumed as the only root.
365 If roots is unspecified, nullid is assumed as the only root.
301 If heads is unspecified, it is taken to be the output of the
366 If heads is unspecified, it is taken to be the output of the
302 heads method (i.e. a list of all nodes in the repository that
367 heads method (i.e. a list of all nodes in the repository that
303 have no children)."""
368 have no children)."""
304 nonodes = ([], [], [])
369 nonodes = ([], [], [])
305 if roots is not None:
370 if roots is not None:
306 roots = list(roots)
371 roots = list(roots)
307 if not roots:
372 if not roots:
308 return nonodes
373 return nonodes
309 lowestrev = min([self.rev(n) for n in roots])
374 lowestrev = min([self.rev(n) for n in roots])
310 else:
375 else:
311 roots = [nullid] # Everybody's a descendent of nullid
376 roots = [nullid] # Everybody's a descendent of nullid
312 lowestrev = -1
377 lowestrev = -1
313 if (lowestrev == -1) and (heads is None):
378 if (lowestrev == -1) and (heads is None):
314 # We want _all_ the nodes!
379 # We want _all_ the nodes!
315 return ([self.node(r) for r in xrange(0, self.count())],
380 return ([self.node(r) for r in xrange(0, self.count())],
316 [nullid], list(self.heads()))
381 [nullid], list(self.heads()))
317 if heads is None:
382 if heads is None:
318 # All nodes are ancestors, so the latest ancestor is the last
383 # All nodes are ancestors, so the latest ancestor is the last
319 # node.
384 # node.
320 highestrev = self.count() - 1
385 highestrev = self.count() - 1
321 # Set ancestors to None to signal that every node is an ancestor.
386 # Set ancestors to None to signal that every node is an ancestor.
322 ancestors = None
387 ancestors = None
323 # Set heads to an empty dictionary for later discovery of heads
388 # Set heads to an empty dictionary for later discovery of heads
324 heads = {}
389 heads = {}
325 else:
390 else:
326 heads = list(heads)
391 heads = list(heads)
327 if not heads:
392 if not heads:
328 return nonodes
393 return nonodes
329 ancestors = {}
394 ancestors = {}
330 # Start at the top and keep marking parents until we're done.
395 # Start at the top and keep marking parents until we're done.
331 nodestotag = heads[:]
396 nodestotag = heads[:]
332 # Turn heads into a dictionary so we can remove 'fake' heads.
397 # Turn heads into a dictionary so we can remove 'fake' heads.
333 # Also, later we will be using it to filter out the heads we can't
398 # Also, later we will be using it to filter out the heads we can't
334 # find from roots.
399 # find from roots.
335 heads = dict.fromkeys(heads, 0)
400 heads = dict.fromkeys(heads, 0)
336 # Remember where the top was so we can use it as a limit later.
401 # Remember where the top was so we can use it as a limit later.
337 highestrev = max([self.rev(n) for n in nodestotag])
402 highestrev = max([self.rev(n) for n in nodestotag])
338 while nodestotag:
403 while nodestotag:
339 # grab a node to tag
404 # grab a node to tag
340 n = nodestotag.pop()
405 n = nodestotag.pop()
341 # Never tag nullid
406 # Never tag nullid
342 if n == nullid:
407 if n == nullid:
343 continue
408 continue
344 # A node's revision number represents its place in a
409 # A node's revision number represents its place in a
345 # topologically sorted list of nodes.
410 # topologically sorted list of nodes.
346 r = self.rev(n)
411 r = self.rev(n)
347 if r >= lowestrev:
412 if r >= lowestrev:
348 if n not in ancestors:
413 if n not in ancestors:
349 # If we are possibly a descendent of one of the roots
414 # If we are possibly a descendent of one of the roots
350 # and we haven't already been marked as an ancestor
415 # and we haven't already been marked as an ancestor
351 ancestors[n] = 1 # Mark as ancestor
416 ancestors[n] = 1 # Mark as ancestor
352 # Add non-nullid parents to list of nodes to tag.
417 # Add non-nullid parents to list of nodes to tag.
353 nodestotag.extend([p for p in self.parents(n) if
418 nodestotag.extend([p for p in self.parents(n) if
354 p != nullid])
419 p != nullid])
355 elif n in heads: # We've seen it before, is it a fake head?
420 elif n in heads: # We've seen it before, is it a fake head?
356 # So it is, real heads should not be the ancestors of
421 # So it is, real heads should not be the ancestors of
357 # any other heads.
422 # any other heads.
358 heads.pop(n)
423 heads.pop(n)
359 if not ancestors:
424 if not ancestors:
360 return nonodes
425 return nonodes
361 # Now that we have our set of ancestors, we want to remove any
426 # Now that we have our set of ancestors, we want to remove any
362 # roots that are not ancestors.
427 # roots that are not ancestors.
363
428
364 # If one of the roots was nullid, everything is included anyway.
429 # If one of the roots was nullid, everything is included anyway.
365 if lowestrev > -1:
430 if lowestrev > -1:
366 # But, since we weren't, let's recompute the lowest rev to not
431 # But, since we weren't, let's recompute the lowest rev to not
367 # include roots that aren't ancestors.
432 # include roots that aren't ancestors.
368
433
369 # Filter out roots that aren't ancestors of heads
434 # Filter out roots that aren't ancestors of heads
370 roots = [n for n in roots if n in ancestors]
435 roots = [n for n in roots if n in ancestors]
371 # Recompute the lowest revision
436 # Recompute the lowest revision
372 if roots:
437 if roots:
373 lowestrev = min([self.rev(n) for n in roots])
438 lowestrev = min([self.rev(n) for n in roots])
374 else:
439 else:
375 # No more roots? Return empty list
440 # No more roots? Return empty list
376 return nonodes
441 return nonodes
377 else:
442 else:
378 # We are descending from nullid, and don't need to care about
443 # We are descending from nullid, and don't need to care about
379 # any other roots.
444 # any other roots.
380 lowestrev = -1
445 lowestrev = -1
381 roots = [nullid]
446 roots = [nullid]
382 # Transform our roots list into a 'set' (i.e. a dictionary where the
447 # Transform our roots list into a 'set' (i.e. a dictionary where the
383 # values don't matter.
448 # values don't matter.
384 descendents = dict.fromkeys(roots, 1)
449 descendents = dict.fromkeys(roots, 1)
385 # Also, keep the original roots so we can filter out roots that aren't
450 # Also, keep the original roots so we can filter out roots that aren't
386 # 'real' roots (i.e. are descended from other roots).
451 # 'real' roots (i.e. are descended from other roots).
387 roots = descendents.copy()
452 roots = descendents.copy()
388 # Our topologically sorted list of output nodes.
453 # Our topologically sorted list of output nodes.
389 orderedout = []
454 orderedout = []
390 # Don't start at nullid since we don't want nullid in our output list,
455 # Don't start at nullid since we don't want nullid in our output list,
391 # and if nullid shows up in descedents, empty parents will look like
456 # and if nullid shows up in descedents, empty parents will look like
392 # they're descendents.
457 # they're descendents.
393 for r in xrange(max(lowestrev, 0), highestrev + 1):
458 for r in xrange(max(lowestrev, 0), highestrev + 1):
394 n = self.node(r)
459 n = self.node(r)
395 isdescendent = False
460 isdescendent = False
396 if lowestrev == -1: # Everybody is a descendent of nullid
461 if lowestrev == -1: # Everybody is a descendent of nullid
397 isdescendent = True
462 isdescendent = True
398 elif n in descendents:
463 elif n in descendents:
399 # n is already a descendent
464 # n is already a descendent
400 isdescendent = True
465 isdescendent = True
401 # This check only needs to be done here because all the roots
466 # This check only needs to be done here because all the roots
402 # will start being marked is descendents before the loop.
467 # will start being marked is descendents before the loop.
403 if n in roots:
468 if n in roots:
404 # If n was a root, check if it's a 'real' root.
469 # If n was a root, check if it's a 'real' root.
405 p = tuple(self.parents(n))
470 p = tuple(self.parents(n))
406 # If any of its parents are descendents, it's not a root.
471 # If any of its parents are descendents, it's not a root.
407 if (p[0] in descendents) or (p[1] in descendents):
472 if (p[0] in descendents) or (p[1] in descendents):
408 roots.pop(n)
473 roots.pop(n)
409 else:
474 else:
410 p = tuple(self.parents(n))
475 p = tuple(self.parents(n))
411 # A node is a descendent if either of its parents are
476 # A node is a descendent if either of its parents are
412 # descendents. (We seeded the dependents list with the roots
477 # descendents. (We seeded the dependents list with the roots
413 # up there, remember?)
478 # up there, remember?)
414 if (p[0] in descendents) or (p[1] in descendents):
479 if (p[0] in descendents) or (p[1] in descendents):
415 descendents[n] = 1
480 descendents[n] = 1
416 isdescendent = True
481 isdescendent = True
417 if isdescendent and ((ancestors is None) or (n in ancestors)):
482 if isdescendent and ((ancestors is None) or (n in ancestors)):
418 # Only include nodes that are both descendents and ancestors.
483 # Only include nodes that are both descendents and ancestors.
419 orderedout.append(n)
484 orderedout.append(n)
420 if (ancestors is not None) and (n in heads):
485 if (ancestors is not None) and (n in heads):
421 # We're trying to figure out which heads are reachable
486 # We're trying to figure out which heads are reachable
422 # from roots.
487 # from roots.
423 # Mark this head as having been reached
488 # Mark this head as having been reached
424 heads[n] = 1
489 heads[n] = 1
425 elif ancestors is None:
490 elif ancestors is None:
426 # Otherwise, we're trying to discover the heads.
491 # Otherwise, we're trying to discover the heads.
427 # Assume this is a head because if it isn't, the next step
492 # Assume this is a head because if it isn't, the next step
428 # will eventually remove it.
493 # will eventually remove it.
429 heads[n] = 1
494 heads[n] = 1
430 # But, obviously its parents aren't.
495 # But, obviously its parents aren't.
431 for p in self.parents(n):
496 for p in self.parents(n):
432 heads.pop(p, None)
497 heads.pop(p, None)
433 heads = [n for n in heads.iterkeys() if heads[n] != 0]
498 heads = [n for n in heads.iterkeys() if heads[n] != 0]
434 roots = roots.keys()
499 roots = roots.keys()
435 assert orderedout
500 assert orderedout
436 assert roots
501 assert roots
437 assert heads
502 assert heads
438 return (orderedout, roots, heads)
503 return (orderedout, roots, heads)
439
504
440 def heads(self, start=None):
505 def heads(self, start=None):
441 """return the list of all nodes that have no children
506 """return the list of all nodes that have no children
442
507
443 if start is specified, only heads that are descendants of
508 if start is specified, only heads that are descendants of
444 start will be returned
509 start will be returned
445
510
446 """
511 """
447 if start is None:
512 if start is None:
448 start = nullid
513 start = nullid
449 reachable = {start: 1}
514 reachable = {start: 1}
450 heads = {start: 1}
515 heads = {start: 1}
451 startrev = self.rev(start)
516 startrev = self.rev(start)
452
517
453 for r in xrange(startrev + 1, self.count()):
518 for r in xrange(startrev + 1, self.count()):
454 n = self.node(r)
519 n = self.node(r)
455 for pn in self.parents(n):
520 for pn in self.parents(n):
456 if pn in reachable:
521 if pn in reachable:
457 reachable[n] = 1
522 reachable[n] = 1
458 heads[n] = 1
523 heads[n] = 1
459 if pn in heads:
524 if pn in heads:
460 del heads[pn]
525 del heads[pn]
461 return heads.keys()
526 return heads.keys()
462
527
463 def children(self, node):
528 def children(self, node):
464 """find the children of a given node"""
529 """find the children of a given node"""
465 c = []
530 c = []
466 p = self.rev(node)
531 p = self.rev(node)
467 for r in range(p + 1, self.count()):
532 for r in range(p + 1, self.count()):
468 n = self.node(r)
533 n = self.node(r)
469 for pn in self.parents(n):
534 for pn in self.parents(n):
470 if pn == node:
535 if pn == node:
471 c.append(n)
536 c.append(n)
472 continue
537 continue
473 elif pn == nullid:
538 elif pn == nullid:
474 continue
539 continue
475 return c
540 return c
476
541
477 def lookup(self, id):
542 def lookup(self, id):
478 """locate a node based on revision number or subset of hex nodeid"""
543 """locate a node based on revision number or subset of hex nodeid"""
479 try:
544 try:
480 rev = int(id)
545 rev = int(id)
481 if str(rev) != id: raise ValueError
546 if str(rev) != id: raise ValueError
482 if rev < 0: rev = self.count() + rev
547 if rev < 0: rev = self.count() + rev
483 if rev < 0 or rev >= self.count(): raise ValueError
548 if rev < 0 or rev >= self.count(): raise ValueError
484 return self.node(rev)
549 return self.node(rev)
485 except (ValueError, OverflowError):
550 except (ValueError, OverflowError):
486 c = []
551 c = []
487 for n in self.nodemap:
552 for n in self.nodemap:
488 if hex(n).startswith(id):
553 if hex(n).startswith(id):
489 c.append(n)
554 c.append(n)
490 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
555 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
491 if len(c) < 1: raise RevlogError(_("No match found"))
556 if len(c) < 1: raise RevlogError(_("No match found"))
492 return c[0]
557 return c[0]
493
558
494 return None
559 return None
495
560
496 def diff(self, a, b):
561 def diff(self, a, b):
497 """return a delta between two revisions"""
562 """return a delta between two revisions"""
498 return mdiff.textdiff(a, b)
563 return mdiff.textdiff(a, b)
499
564
500 def patches(self, t, pl):
565 def patches(self, t, pl):
501 """apply a list of patches to a string"""
566 """apply a list of patches to a string"""
502 return mdiff.patches(t, pl)
567 return mdiff.patches(t, pl)
503
568
504 def chunk(self, rev):
569 def chunk(self, rev, df=None, cachelen=4096):
505 start, length = self.start(rev), self.length(rev)
570 start, length = self.start(rev), self.length(rev)
506 end = start + length
571 end = start + length
507
572 def loadcache(df):
508 def loadcache():
573 cache_length = max(cachelen, length) # 4k
509 cache_length = max(4096 * 1024, length) # 4Mo
574 if not df:
510 df = self.opener(self.datafile)
575 df = self.opener(self.datafile)
511 df.seek(start)
576 df.seek(start)
512 self.chunkcache = (start, df.read(cache_length))
577 self.chunkcache = (start, df.read(cache_length))
513
578
514 if not self.chunkcache:
579 if not self.chunkcache:
515 loadcache()
580 loadcache(df)
516
581
517 cache_start = self.chunkcache[0]
582 cache_start = self.chunkcache[0]
518 cache_end = cache_start + len(self.chunkcache[1])
583 cache_end = cache_start + len(self.chunkcache[1])
519 if start >= cache_start and end <= cache_end:
584 if start >= cache_start and end <= cache_end:
520 # it is cached
585 # it is cached
521 offset = start - cache_start
586 offset = start - cache_start
522 else:
587 else:
523 loadcache()
588 loadcache(df)
524 offset = 0
589 offset = 0
525
590
526 #def checkchunk():
591 #def checkchunk():
527 # df = self.opener(self.datafile)
592 # df = self.opener(self.datafile)
528 # df.seek(start)
593 # df.seek(start)
529 # return df.read(length)
594 # return df.read(length)
530 #assert s == checkchunk()
595 #assert s == checkchunk()
531 return decompress(self.chunkcache[1][offset:offset + length])
596 return decompress(self.chunkcache[1][offset:offset + length])
532
597
533 def delta(self, node):
598 def delta(self, node):
534 """return or calculate a delta between a node and its predecessor"""
599 """return or calculate a delta between a node and its predecessor"""
535 r = self.rev(node)
600 r = self.rev(node)
536 return self.revdiff(r - 1, r)
601 return self.revdiff(r - 1, r)
537
602
538 def revdiff(self, rev1, rev2):
603 def revdiff(self, rev1, rev2):
539 """return or calculate a delta between two revisions"""
604 """return or calculate a delta between two revisions"""
540 b1 = self.base(rev1)
605 b1 = self.base(rev1)
541 b2 = self.base(rev2)
606 b2 = self.base(rev2)
542 if b1 == b2 and rev1 + 1 == rev2:
607 if b1 == b2 and rev1 + 1 == rev2:
543 return self.chunk(rev2)
608 return self.chunk(rev2)
544 else:
609 else:
545 return self.diff(self.revision(self.node(rev1)),
610 return self.diff(self.revision(self.node(rev1)),
546 self.revision(self.node(rev2)))
611 self.revision(self.node(rev2)))
547
612
548 def revision(self, node):
613 def revision(self, node):
549 """return an uncompressed revision of a given"""
614 """return an uncompressed revision of a given"""
550 if node == nullid: return ""
615 if node == nullid: return ""
551 if self.cache and self.cache[0] == node: return self.cache[2]
616 if self.cache and self.cache[0] == node: return self.cache[2]
552
617
553 # look up what we need to read
618 # look up what we need to read
554 text = None
619 text = None
555 rev = self.rev(node)
620 rev = self.rev(node)
556 base = self.base(rev)
621 base = self.base(rev)
557
622
623 df = self.opener(self.datafile)
624
558 # do we have useful data cached?
625 # do we have useful data cached?
559 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
626 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
560 base = self.cache[1]
627 base = self.cache[1]
561 text = self.cache[2]
628 text = self.cache[2]
562 else:
629 else:
563 text = self.chunk(base)
630 text = self.chunk(base, df=df)
564
631
565 bins = []
632 bins = []
566 for r in xrange(base + 1, rev + 1):
633 for r in xrange(base + 1, rev + 1):
567 bins.append(self.chunk(r))
634 bins.append(self.chunk(r, df=df))
568
635
569 text = self.patches(text, bins)
636 text = self.patches(text, bins)
570
637
571 p1, p2 = self.parents(node)
638 p1, p2 = self.parents(node)
572 if node != hash(text, p1, p2):
639 if node != hash(text, p1, p2):
573 raise RevlogError(_("integrity check failed on %s:%d")
640 raise RevlogError(_("integrity check failed on %s:%d")
574 % (self.datafile, rev))
641 % (self.datafile, rev))
575
642
576 self.cache = (node, rev, text)
643 self.cache = (node, rev, text)
577 return text
644 return text
578
645
579 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
646 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
580 """add a revision to the log
647 """add a revision to the log
581
648
582 text - the revision data to add
649 text - the revision data to add
583 transaction - the transaction object used for rollback
650 transaction - the transaction object used for rollback
584 link - the linkrev data to add
651 link - the linkrev data to add
585 p1, p2 - the parent nodeids of the revision
652 p1, p2 - the parent nodeids of the revision
586 d - an optional precomputed delta
653 d - an optional precomputed delta
587 """
654 """
588 if text is None: text = ""
655 if text is None: text = ""
589 if p1 is None: p1 = self.tip()
656 if p1 is None: p1 = self.tip()
590 if p2 is None: p2 = nullid
657 if p2 is None: p2 = nullid
591
658
592 node = hash(text, p1, p2)
659 node = hash(text, p1, p2)
593
660
594 if node in self.nodemap:
661 if node in self.nodemap:
595 return node
662 return node
596
663
597 n = self.count()
664 n = self.count()
598 t = n - 1
665 t = n - 1
599
666
600 if n:
667 if n:
601 base = self.base(t)
668 base = self.base(t)
602 start = self.start(base)
669 start = self.start(base)
603 end = self.end(t)
670 end = self.end(t)
604 if not d:
671 if not d:
605 prev = self.revision(self.tip())
672 prev = self.revision(self.tip())
606 d = self.diff(prev, str(text))
673 d = self.diff(prev, str(text))
607 data = compress(d)
674 data = compress(d)
608 l = len(data[1]) + len(data[0])
675 l = len(data[1]) + len(data[0])
609 dist = end - start + l
676 dist = end - start + l
610
677
611 # full versions are inserted when the needed deltas
678 # full versions are inserted when the needed deltas
612 # become comparable to the uncompressed text
679 # become comparable to the uncompressed text
613 if not n or dist > len(text) * 2:
680 if not n or dist > len(text) * 2:
614 data = compress(text)
681 data = compress(text)
615 l = len(data[1]) + len(data[0])
682 l = len(data[1]) + len(data[0])
616 base = n
683 base = n
617 else:
684 else:
618 base = self.base(t)
685 base = self.base(t)
619
686
620 offset = 0
687 offset = 0
621 if t >= 0:
688 if t >= 0:
622 offset = self.end(t)
689 offset = self.end(t)
623
690
624 e = (offset, l, base, link, p1, p2, node)
691 if self.version == 0:
692 e = (offset, l, base, link, p1, p2, node)
693 else:
694 e = (self.offset_type(offset, 0), l, len(text),
695 base, link, self.rev(p1), self.rev(p2), node)
625
696
626 self.index.append(e)
697 self.index.append(e)
627 self.nodemap[node] = n
698 self.nodemap[node] = n
628 entry = struct.pack(indexformat, *e)
699 entry = struct.pack(self.indexformat, *e)
629
700
630 transaction.add(self.datafile, e[0])
701 transaction.add(self.datafile, offset)
702 transaction.add(self.indexfile, n * len(entry))
631 f = self.opener(self.datafile, "a")
703 f = self.opener(self.datafile, "a")
632 if data[0]:
704 if data[0]:
633 f.write(data[0])
705 f.write(data[0])
634 f.write(data[1])
706 f.write(data[1])
635 transaction.add(self.indexfile, n * len(entry))
707 f = self.opener(self.indexfile, "a")
636 self.opener(self.indexfile, "a").write(entry)
708
709 if len(self.index) == 1 and self.version != 0:
710 l = struct.pack(versionformat, self.version)
711 f.write(l)
712 entry = entry[4:]
713
714 f.write(entry)
637
715
638 self.cache = (node, n, text)
716 self.cache = (node, n, text)
639 return node
717 return node
640
718
641 def ancestor(self, a, b):
719 def ancestor(self, a, b):
642 """calculate the least common ancestor of nodes a and b"""
720 """calculate the least common ancestor of nodes a and b"""
643 # calculate the distance of every node from root
721 # calculate the distance of every node from root
644 dist = {nullid: 0}
722 dist = {nullid: 0}
645 for i in xrange(self.count()):
723 for i in xrange(self.count()):
646 n = self.node(i)
724 n = self.node(i)
647 p1, p2 = self.parents(n)
725 p1, p2 = self.parents(n)
648 dist[n] = max(dist[p1], dist[p2]) + 1
726 dist[n] = max(dist[p1], dist[p2]) + 1
649
727
650 # traverse ancestors in order of decreasing distance from root
728 # traverse ancestors in order of decreasing distance from root
651 def ancestors(node):
729 def ancestors(node):
652 # we store negative distances because heap returns smallest member
730 # we store negative distances because heap returns smallest member
653 h = [(-dist[node], node)]
731 h = [(-dist[node], node)]
654 seen = {}
732 seen = {}
655 while h:
733 while h:
656 d, n = heapq.heappop(h)
734 d, n = heapq.heappop(h)
657 if n not in seen:
735 if n not in seen:
658 seen[n] = 1
736 seen[n] = 1
659 yield (-d, n)
737 yield (-d, n)
660 for p in self.parents(n):
738 for p in self.parents(n):
661 heapq.heappush(h, (-dist[p], p))
739 heapq.heappush(h, (-dist[p], p))
662
740
663 def generations(node):
741 def generations(node):
664 sg, s = None, {}
742 sg, s = None, {}
665 for g,n in ancestors(node):
743 for g,n in ancestors(node):
666 if g != sg:
744 if g != sg:
667 if sg:
745 if sg:
668 yield sg, s
746 yield sg, s
669 sg, s = g, {n:1}
747 sg, s = g, {n:1}
670 else:
748 else:
671 s[n] = 1
749 s[n] = 1
672 yield sg, s
750 yield sg, s
673
751
674 x = generations(a)
752 x = generations(a)
675 y = generations(b)
753 y = generations(b)
676 gx = x.next()
754 gx = x.next()
677 gy = y.next()
755 gy = y.next()
678
756
679 # increment each ancestor list until it is closer to root than
757 # increment each ancestor list until it is closer to root than
680 # the other, or they match
758 # the other, or they match
681 while 1:
759 while 1:
682 #print "ancestor gen %s %s" % (gx[0], gy[0])
760 #print "ancestor gen %s %s" % (gx[0], gy[0])
683 if gx[0] == gy[0]:
761 if gx[0] == gy[0]:
684 # find the intersection
762 # find the intersection
685 i = [ n for n in gx[1] if n in gy[1] ]
763 i = [ n for n in gx[1] if n in gy[1] ]
686 if i:
764 if i:
687 return i[0]
765 return i[0]
688 else:
766 else:
689 #print "next"
767 #print "next"
690 gy = y.next()
768 gy = y.next()
691 gx = x.next()
769 gx = x.next()
692 elif gx[0] < gy[0]:
770 elif gx[0] < gy[0]:
693 #print "next y"
771 #print "next y"
694 gy = y.next()
772 gy = y.next()
695 else:
773 else:
696 #print "next x"
774 #print "next x"
697 gx = x.next()
775 gx = x.next()
698
776
699 def group(self, nodelist, lookup, infocollect=None):
777 def group(self, nodelist, lookup, infocollect=None):
700 """calculate a delta group
778 """calculate a delta group
701
779
702 Given a list of changeset revs, return a set of deltas and
780 Given a list of changeset revs, return a set of deltas and
703 metadata corresponding to nodes. the first delta is
781 metadata corresponding to nodes. the first delta is
704 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
782 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
705 have this parent as it has all history before these
783 have this parent as it has all history before these
706 changesets. parent is parent[0]
784 changesets. parent is parent[0]
707 """
785 """
708 revs = [self.rev(n) for n in nodelist]
786 revs = [self.rev(n) for n in nodelist]
709
787
710 # if we don't have any revisions touched by these changesets, bail
788 # if we don't have any revisions touched by these changesets, bail
711 if not revs:
789 if not revs:
712 yield changegroup.closechunk()
790 yield changegroup.closechunk()
713 return
791 return
714
792
715 # add the parent of the first rev
793 # add the parent of the first rev
716 p = self.parents(self.node(revs[0]))[0]
794 p = self.parents(self.node(revs[0]))[0]
717 revs.insert(0, self.rev(p))
795 revs.insert(0, self.rev(p))
718
796
719 # build deltas
797 # build deltas
720 for d in xrange(0, len(revs) - 1):
798 for d in xrange(0, len(revs) - 1):
721 a, b = revs[d], revs[d + 1]
799 a, b = revs[d], revs[d + 1]
722 nb = self.node(b)
800 nb = self.node(b)
723
801
724 if infocollect is not None:
802 if infocollect is not None:
725 infocollect(nb)
803 infocollect(nb)
726
804
727 d = self.revdiff(a, b)
805 d = self.revdiff(a, b)
728 p = self.parents(nb)
806 p = self.parents(nb)
729 meta = nb + p[0] + p[1] + lookup(nb)
807 meta = nb + p[0] + p[1] + lookup(nb)
730 yield changegroup.genchunk("%s%s" % (meta, d))
808 yield changegroup.genchunk("%s%s" % (meta, d))
731
809
732 yield changegroup.closechunk()
810 yield changegroup.closechunk()
733
811
734 def addgroup(self, revs, linkmapper, transaction, unique=0):
812 def addgroup(self, revs, linkmapper, transaction, unique=0):
735 """
813 """
736 add a delta group
814 add a delta group
737
815
738 given a set of deltas, add them to the revision log. the
816 given a set of deltas, add them to the revision log. the
739 first delta is against its parent, which should be in our
817 first delta is against its parent, which should be in our
740 log, the rest are against the previous delta.
818 log, the rest are against the previous delta.
741 """
819 """
742
820
743 #track the base of the current delta log
821 #track the base of the current delta log
744 r = self.count()
822 r = self.count()
745 t = r - 1
823 t = r - 1
746 node = None
824 node = None
747
825
748 base = prev = -1
826 base = prev = -1
749 start = end = measure = 0
827 start = end = measure = 0
750 if r:
828 if r:
751 base = self.base(t)
752 start = self.start(base)
753 end = self.end(t)
829 end = self.end(t)
754 measure = self.length(base)
755 prev = self.tip()
756
830
831 ifh = self.opener(self.indexfile, "a+")
832 transaction.add(self.indexfile, ifh.tell())
757 transaction.add(self.datafile, end)
833 transaction.add(self.datafile, end)
758 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
759 dfh = self.opener(self.datafile, "a")
834 dfh = self.opener(self.datafile, "a")
760 ifh = self.opener(self.indexfile, "a")
761
835
762 # loop through our set of deltas
836 # loop through our set of deltas
763 chain = None
837 chain = None
764 for chunk in revs:
838 for chunk in revs:
765 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
839 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
766 link = linkmapper(cs)
840 link = linkmapper(cs)
767 if node in self.nodemap:
841 if node in self.nodemap:
768 # this can happen if two branches make the same change
842 # this can happen if two branches make the same change
769 # if unique:
843 # if unique:
770 # raise RevlogError(_("already have %s") % hex(node[:4]))
844 # raise RevlogError(_("already have %s") % hex(node[:4]))
771 chain = node
845 chain = node
772 continue
846 continue
773 delta = chunk[80:]
847 delta = chunk[80:]
774
848
775 for p in (p1, p2):
849 for p in (p1, p2):
776 if not p in self.nodemap:
850 if not p in self.nodemap:
777 raise RevlogError(_("unknown parent %s") % short(p1))
851 raise RevlogError(_("unknown parent %s") % short(p1))
778
852
779 if not chain:
853 if not chain:
780 # retrieve the parent revision of the delta chain
854 # retrieve the parent revision of the delta chain
781 chain = p1
855 chain = p1
782 if not chain in self.nodemap:
856 if not chain in self.nodemap:
783 raise RevlogError(_("unknown base %s") % short(chain[:4]))
857 raise RevlogError(_("unknown base %s") % short(chain[:4]))
784
858
785 # full versions are inserted when the needed deltas become
859 # full versions are inserted when the needed deltas become
786 # comparable to the uncompressed text or when the previous
860 # comparable to the uncompressed text or when the previous
787 # version is not the one we have a delta against. We use
861 # version is not the one we have a delta against. We use
788 # the size of the previous full rev as a proxy for the
862 # the size of the previous full rev as a proxy for the
789 # current size.
863 # current size.
790
864
791 if chain == prev:
865 if chain == prev:
792 tempd = compress(delta)
866 tempd = compress(delta)
793 cdelta = tempd[0] + tempd[1]
867 cdelta = tempd[0] + tempd[1]
794
868
795 if chain != prev or (end - start + len(cdelta)) > measure * 2:
869 if chain != prev or (end - start + len(cdelta)) > measure * 2:
796 # flush our writes here so we can read it in revision
870 # flush our writes here so we can read it in revision
797 dfh.flush()
871 if dfh:
872 dfh.flush()
798 ifh.flush()
873 ifh.flush()
799 text = self.revision(chain)
874 text = self.revision(chain)
800 text = self.patches(text, [delta])
875 text = self.patches(text, [delta])
801 chk = self.addrevision(text, transaction, link, p1, p2)
876 chk = self.addrevision(text, transaction, link, p1, p2)
802 if chk != node:
877 if chk != node:
803 raise RevlogError(_("consistency error adding group"))
878 raise RevlogError(_("consistency error adding group"))
804 measure = len(text)
879 measure = len(text)
805 else:
880 else:
806 e = (end, len(cdelta), base, link, p1, p2, node)
881 if self.version == 0:
882 e = (end, len(cdelta), base, link, p1, p2, node)
883 else:
884 e = (self.offset_type(end, 0), len(cdelta), -1, base,
885 link, self.rev(p1), self.rev(p2), node)
807 self.index.append(e)
886 self.index.append(e)
808 self.nodemap[node] = r
887 self.nodemap[node] = r
809 dfh.write(cdelta)
888 dfh.write(cdelta)
810 ifh.write(struct.pack(indexformat, *e))
889 ifh.write(struct.pack(self.indexformat, *e))
811
890
812 t, r, chain, prev = r, r + 1, node, node
891 t, r, chain, prev = r, r + 1, node, node
813 base = self.base(t)
892 base = self.base(t)
814 start = self.start(base)
893 start = self.start(base)
815 end = self.end(t)
894 end = self.end(t)
816
895
817 dfh.close()
818 ifh.close()
819 if node is None:
896 if node is None:
820 raise RevlogError(_("group to be added is empty"))
897 raise RevlogError(_("group to be added is empty"))
821 return node
898 return node
822
899
823 def strip(self, rev, minlink):
900 def strip(self, rev, minlink):
824 if self.count() == 0 or rev >= self.count():
901 if self.count() == 0 or rev >= self.count():
825 return
902 return
826
903
904 if isinstance(self.index, lazyindex):
905 self.loadindexmap()
906
827 # When stripping away a revision, we need to make sure it
907 # When stripping away a revision, we need to make sure it
828 # does not actually belong to an older changeset.
908 # does not actually belong to an older changeset.
829 # The minlink parameter defines the oldest revision
909 # The minlink parameter defines the oldest revision
830 # we're allowed to strip away.
910 # we're allowed to strip away.
831 while minlink > self.index[rev][3]:
911 while minlink > self.index[rev][-4]:
832 rev += 1
912 rev += 1
833 if rev >= self.count():
913 if rev >= self.count():
834 return
914 return
835
915
836 # first truncate the files on disk
916 # first truncate the files on disk
837 end = self.start(rev)
917 end = self.start(rev)
838 self.opener(self.datafile, "a").truncate(end)
918 df = self.opener(self.datafile, "a")
839 end = rev * struct.calcsize(indexformat)
919 df.truncate(end)
840 self.opener(self.indexfile, "a").truncate(end)
920 end = rev * struct.calcsize(self.indexformat)
921
922 indexf = self.opener(self.indexfile, "a")
923 indexf.truncate(end)
841
924
842 # then reset internal state in memory to forget those revisions
925 # then reset internal state in memory to forget those revisions
843 self.cache = None
926 self.cache = None
844 self.chunkcache = None
927 self.chunkcache = None
845 for p in self.index[rev:]:
928 for x in xrange(rev, self.count()):
846 del self.nodemap[p[6]]
929 del self.nodemap[self.node(x)]
847 del self.index[rev:]
848
930
849 # truncating the lazyindex also truncates the lazymap.
931 del self.index[rev:]
850 if isinstance(self.index, lazyindex):
851 self.index.trunc(end)
852
853
932
854 def checksize(self):
933 def checksize(self):
855 expected = 0
934 expected = 0
856 if self.count():
935 if self.count():
857 expected = self.end(self.count() - 1)
936 expected = self.end(self.count() - 1)
858
937
859 try:
938 try:
860 f = self.opener(self.datafile)
939 f = self.opener(self.datafile)
861 f.seek(0, 2)
940 f.seek(0, 2)
862 actual = f.tell()
941 actual = f.tell()
863 dd = actual - expected
942 dd = actual - expected
864 except IOError, inst:
943 except IOError, inst:
865 if inst.errno != errno.ENOENT:
944 if inst.errno != errno.ENOENT:
866 raise
945 raise
867 dd = 0
946 dd = 0
868
947
869 try:
948 try:
870 f = self.opener(self.indexfile)
949 f = self.opener(self.indexfile)
871 f.seek(0, 2)
950 f.seek(0, 2)
872 actual = f.tell()
951 actual = f.tell()
873 s = struct.calcsize(indexformat)
952 s = struct.calcsize(self.indexformat)
874 i = actual / s
953 i = actual / s
875 di = actual - (i * s)
954 di = actual - (i * s)
876 except IOError, inst:
955 except IOError, inst:
877 if inst.errno != errno.ENOENT:
956 if inst.errno != errno.ENOENT:
878 raise
957 raise
879 di = 0
958 di = 0
880
959
881 return (dd, di)
960 return (dd, di)
882
961
883
962
@@ -1,47 +1,48
1 # statichttprepo.py - simple http repository class for mercurial
1 # statichttprepo.py - simple http repository class for mercurial
2 #
2 #
3 # This provides read-only repo access to repositories exported via static http
3 # This provides read-only repo access to repositories exported via static http
4 #
4 #
5 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms
7 # This software may be used and distributed according to the terms
8 # of the GNU General Public License, incorporated herein by reference.
8 # of the GNU General Public License, incorporated herein by reference.
9
9
10 from demandload import demandload
10 from demandload import demandload
11 demandload(globals(), "changelog filelog httprangereader")
11 demandload(globals(), "changelog filelog httprangereader")
12 demandload(globals(), "localrepo manifest os urllib urllib2")
12 demandload(globals(), "localrepo manifest os urllib urllib2")
13
13
14 class rangereader(httprangereader.httprangereader):
14 class rangereader(httprangereader.httprangereader):
15 def read(self, size=None):
15 def read(self, size=None):
16 try:
16 try:
17 return httprangereader.httprangereader.read(self, size)
17 return httprangereader.httprangereader.read(self, size)
18 except urllib2.HTTPError, inst:
18 except urllib2.HTTPError, inst:
19 raise IOError(None, inst)
19 raise IOError(None, inst)
20 except urllib2.URLError, inst:
20 except urllib2.URLError, inst:
21 raise IOError(None, inst.reason[1])
21 raise IOError(None, inst.reason[1])
22
22
23 def opener(base):
23 def opener(base):
24 """return a function that opens files over http"""
24 """return a function that opens files over http"""
25 p = base
25 p = base
26 def o(path, mode="r"):
26 def o(path, mode="r"):
27 f = os.path.join(p, urllib.quote(path))
27 f = os.path.join(p, urllib.quote(path))
28 return rangereader(f)
28 return rangereader(f)
29 return o
29 return o
30
30
31 class statichttprepository(localrepo.localrepository):
31 class statichttprepository(localrepo.localrepository):
32 def __init__(self, ui, path):
32 def __init__(self, ui, path):
33 self.path = (path + "/.hg")
33 self.path = (path + "/.hg")
34 self.ui = ui
34 self.ui = ui
35 self.revlogversion = 0
35 self.opener = opener(self.path)
36 self.opener = opener(self.path)
36 self.manifest = manifest.manifest(self.opener)
37 self.manifest = manifest.manifest(self.opener)
37 self.changelog = changelog.changelog(self.opener)
38 self.changelog = changelog.changelog(self.opener)
38 self.tagscache = None
39 self.tagscache = None
39 self.nodetagscache = None
40 self.nodetagscache = None
40 self.encodepats = None
41 self.encodepats = None
41 self.decodepats = None
42 self.decodepats = None
42
43
43 def dev(self):
44 def dev(self):
44 return -1
45 return -1
45
46
46 def local(self):
47 def local(self):
47 return False
48 return False
@@ -1,257 +1,264
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import ConfigParser
8 import ConfigParser
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "errno os re socket sys tempfile util")
11 demandload(globals(), "errno os re socket sys tempfile util")
12
12
13 class ui(object):
13 class ui(object):
14 def __init__(self, verbose=False, debug=False, quiet=False,
14 def __init__(self, verbose=False, debug=False, quiet=False,
15 interactive=True, parentui=None):
15 interactive=True, parentui=None):
16 self.overlay = {}
16 self.overlay = {}
17 if parentui is None:
17 if parentui is None:
18 # this is the parent of all ui children
18 # this is the parent of all ui children
19 self.parentui = None
19 self.parentui = None
20 self.cdata = ConfigParser.SafeConfigParser()
20 self.cdata = ConfigParser.SafeConfigParser()
21 self.readconfig(util.rcpath())
21 self.readconfig(util.rcpath())
22
22
23 self.quiet = self.configbool("ui", "quiet")
23 self.quiet = self.configbool("ui", "quiet")
24 self.verbose = self.configbool("ui", "verbose")
24 self.verbose = self.configbool("ui", "verbose")
25 self.debugflag = self.configbool("ui", "debug")
25 self.debugflag = self.configbool("ui", "debug")
26 self.interactive = self.configbool("ui", "interactive", True)
26 self.interactive = self.configbool("ui", "interactive", True)
27
27
28 self.updateopts(verbose, debug, quiet, interactive)
28 self.updateopts(verbose, debug, quiet, interactive)
29 self.diffcache = None
29 self.diffcache = None
30 self.header = []
30 self.header = []
31 self.prev_header = []
31 self.prev_header = []
32 self.revlogopts = self.configrevlog()
32 else:
33 else:
33 # parentui may point to an ui object which is already a child
34 # parentui may point to an ui object which is already a child
34 self.parentui = parentui.parentui or parentui
35 self.parentui = parentui.parentui or parentui
35 parent_cdata = self.parentui.cdata
36 parent_cdata = self.parentui.cdata
36 self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
37 self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
37 # make interpolation work
38 # make interpolation work
38 for section in parent_cdata.sections():
39 for section in parent_cdata.sections():
39 self.cdata.add_section(section)
40 self.cdata.add_section(section)
40 for name, value in parent_cdata.items(section, raw=True):
41 for name, value in parent_cdata.items(section, raw=True):
41 self.cdata.set(section, name, value)
42 self.cdata.set(section, name, value)
42
43
43 def __getattr__(self, key):
44 def __getattr__(self, key):
44 return getattr(self.parentui, key)
45 return getattr(self.parentui, key)
45
46
46 def updateopts(self, verbose=False, debug=False, quiet=False,
47 def updateopts(self, verbose=False, debug=False, quiet=False,
47 interactive=True):
48 interactive=True):
48 self.quiet = (self.quiet or quiet) and not verbose and not debug
49 self.quiet = (self.quiet or quiet) and not verbose and not debug
49 self.verbose = (self.verbose or verbose) or debug
50 self.verbose = (self.verbose or verbose) or debug
50 self.debugflag = (self.debugflag or debug)
51 self.debugflag = (self.debugflag or debug)
51 self.interactive = (self.interactive and interactive)
52 self.interactive = (self.interactive and interactive)
52
53
53 def readconfig(self, fn, root=None):
54 def readconfig(self, fn, root=None):
54 if isinstance(fn, basestring):
55 if isinstance(fn, basestring):
55 fn = [fn]
56 fn = [fn]
56 for f in fn:
57 for f in fn:
57 try:
58 try:
58 self.cdata.read(f)
59 self.cdata.read(f)
59 except ConfigParser.ParsingError, inst:
60 except ConfigParser.ParsingError, inst:
60 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
61 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
61 # translate paths relative to root (or home) into absolute paths
62 # translate paths relative to root (or home) into absolute paths
62 if root is None:
63 if root is None:
63 root = os.path.expanduser('~')
64 root = os.path.expanduser('~')
64 for name, path in self.configitems("paths"):
65 for name, path in self.configitems("paths"):
65 if path and path.find("://") == -1 and not os.path.isabs(path):
66 if path and path.find("://") == -1 and not os.path.isabs(path):
66 self.cdata.set("paths", name, os.path.join(root, path))
67 self.cdata.set("paths", name, os.path.join(root, path))
67
68
68 def setconfig(self, section, name, val):
69 def setconfig(self, section, name, val):
69 self.overlay[(section, name)] = val
70 self.overlay[(section, name)] = val
70
71
71 def config(self, section, name, default=None):
72 def config(self, section, name, default=None):
72 if self.overlay.has_key((section, name)):
73 if self.overlay.has_key((section, name)):
73 return self.overlay[(section, name)]
74 return self.overlay[(section, name)]
74 if self.cdata.has_option(section, name):
75 if self.cdata.has_option(section, name):
75 try:
76 try:
76 return self.cdata.get(section, name)
77 return self.cdata.get(section, name)
77 except ConfigParser.InterpolationError, inst:
78 except ConfigParser.InterpolationError, inst:
78 raise util.Abort(_("Error in configuration:\n%s") % inst)
79 raise util.Abort(_("Error in configuration:\n%s") % inst)
79 if self.parentui is None:
80 if self.parentui is None:
80 return default
81 return default
81 else:
82 else:
82 return self.parentui.config(section, name, default)
83 return self.parentui.config(section, name, default)
83
84
84 def configbool(self, section, name, default=False):
85 def configbool(self, section, name, default=False):
85 if self.overlay.has_key((section, name)):
86 if self.overlay.has_key((section, name)):
86 return self.overlay[(section, name)]
87 return self.overlay[(section, name)]
87 if self.cdata.has_option(section, name):
88 if self.cdata.has_option(section, name):
88 try:
89 try:
89 return self.cdata.getboolean(section, name)
90 return self.cdata.getboolean(section, name)
90 except ConfigParser.InterpolationError, inst:
91 except ConfigParser.InterpolationError, inst:
91 raise util.Abort(_("Error in configuration:\n%s") % inst)
92 raise util.Abort(_("Error in configuration:\n%s") % inst)
92 if self.parentui is None:
93 if self.parentui is None:
93 return default
94 return default
94 else:
95 else:
95 return self.parentui.configbool(section, name, default)
96 return self.parentui.configbool(section, name, default)
96
97
97 def configitems(self, section):
98 def configitems(self, section):
98 items = {}
99 items = {}
99 if self.parentui is not None:
100 if self.parentui is not None:
100 items = dict(self.parentui.configitems(section))
101 items = dict(self.parentui.configitems(section))
101 if self.cdata.has_section(section):
102 if self.cdata.has_section(section):
102 try:
103 try:
103 items.update(dict(self.cdata.items(section)))
104 items.update(dict(self.cdata.items(section)))
104 except ConfigParser.InterpolationError, inst:
105 except ConfigParser.InterpolationError, inst:
105 raise util.Abort(_("Error in configuration:\n%s") % inst)
106 raise util.Abort(_("Error in configuration:\n%s") % inst)
106 x = items.items()
107 x = items.items()
107 x.sort()
108 x.sort()
108 return x
109 return x
109
110
110 def walkconfig(self, seen=None):
111 def walkconfig(self, seen=None):
111 if seen is None:
112 if seen is None:
112 seen = {}
113 seen = {}
113 for (section, name), value in self.overlay.iteritems():
114 for (section, name), value in self.overlay.iteritems():
114 yield section, name, value
115 yield section, name, value
115 seen[section, name] = 1
116 seen[section, name] = 1
116 for section in self.cdata.sections():
117 for section in self.cdata.sections():
117 for name, value in self.cdata.items(section):
118 for name, value in self.cdata.items(section):
118 if (section, name) in seen: continue
119 if (section, name) in seen: continue
119 yield section, name, value.replace('\n', '\\n')
120 yield section, name, value.replace('\n', '\\n')
120 seen[section, name] = 1
121 seen[section, name] = 1
121 if self.parentui is not None:
122 if self.parentui is not None:
122 for parent in self.parentui.walkconfig(seen):
123 for parent in self.parentui.walkconfig(seen):
123 yield parent
124 yield parent
124
125
125 def extensions(self):
126 def extensions(self):
126 return self.configitems("extensions")
127 return self.configitems("extensions")
127
128
128 def hgignorefiles(self):
129 def hgignorefiles(self):
129 result = []
130 result = []
130 cfgitems = self.configitems("ui")
131 cfgitems = self.configitems("ui")
131 for key, value in cfgitems:
132 for key, value in cfgitems:
132 if key == 'ignore' or key.startswith('ignore.'):
133 if key == 'ignore' or key.startswith('ignore.'):
133 path = os.path.expanduser(value)
134 path = os.path.expanduser(value)
134 result.append(path)
135 result.append(path)
135 return result
136 return result
136
137
138 def configrevlog(self):
139 ret = {}
140 for x in self.configitems("revlog"):
141 k = x[0].lower()
142 ret[k] = x[1]
143 return ret
137 def diffopts(self):
144 def diffopts(self):
138 if self.diffcache:
145 if self.diffcache:
139 return self.diffcache
146 return self.diffcache
140 ret = { 'showfunc' : True, 'ignorews' : False}
147 ret = { 'showfunc' : True, 'ignorews' : False}
141 for x in self.configitems("diff"):
148 for x in self.configitems("diff"):
142 k = x[0].lower()
149 k = x[0].lower()
143 v = x[1]
150 v = x[1]
144 if v:
151 if v:
145 v = v.lower()
152 v = v.lower()
146 if v == 'true':
153 if v == 'true':
147 value = True
154 value = True
148 else:
155 else:
149 value = False
156 value = False
150 ret[k] = value
157 ret[k] = value
151 self.diffcache = ret
158 self.diffcache = ret
152 return ret
159 return ret
153
160
154 def username(self):
161 def username(self):
155 """Return default username to be used in commits.
162 """Return default username to be used in commits.
156
163
157 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
164 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
158 and stop searching if one of these is set.
165 and stop searching if one of these is set.
159 Abort if found username is an empty string to force specifying
166 Abort if found username is an empty string to force specifying
160 the commit user elsewhere, e.g. with line option or repo hgrc.
167 the commit user elsewhere, e.g. with line option or repo hgrc.
161 If not found, use $LOGNAME or $USERNAME +"@full.hostname".
168 If not found, use $LOGNAME or $USERNAME +"@full.hostname".
162 """
169 """
163 user = os.environ.get("HGUSER")
170 user = os.environ.get("HGUSER")
164 if user is None:
171 if user is None:
165 user = self.config("ui", "username")
172 user = self.config("ui", "username")
166 if user is None:
173 if user is None:
167 user = os.environ.get("EMAIL")
174 user = os.environ.get("EMAIL")
168 if user is None:
175 if user is None:
169 user = os.environ.get("LOGNAME") or os.environ.get("USERNAME")
176 user = os.environ.get("LOGNAME") or os.environ.get("USERNAME")
170 if user:
177 if user:
171 user = "%s@%s" % (user, socket.getfqdn())
178 user = "%s@%s" % (user, socket.getfqdn())
172 if not user:
179 if not user:
173 raise util.Abort(_("Please specify a username."))
180 raise util.Abort(_("Please specify a username."))
174 return user
181 return user
175
182
176 def shortuser(self, user):
183 def shortuser(self, user):
177 """Return a short representation of a user name or email address."""
184 """Return a short representation of a user name or email address."""
178 if not self.verbose: user = util.shortuser(user)
185 if not self.verbose: user = util.shortuser(user)
179 return user
186 return user
180
187
181 def expandpath(self, loc):
188 def expandpath(self, loc):
182 """Return repository location relative to cwd or from [paths]"""
189 """Return repository location relative to cwd or from [paths]"""
183 if loc.find("://") != -1 or os.path.exists(loc):
190 if loc.find("://") != -1 or os.path.exists(loc):
184 return loc
191 return loc
185
192
186 return self.config("paths", loc, loc)
193 return self.config("paths", loc, loc)
187
194
188 def write(self, *args):
195 def write(self, *args):
189 if self.header:
196 if self.header:
190 if self.header != self.prev_header:
197 if self.header != self.prev_header:
191 self.prev_header = self.header
198 self.prev_header = self.header
192 self.write(*self.header)
199 self.write(*self.header)
193 self.header = []
200 self.header = []
194 for a in args:
201 for a in args:
195 sys.stdout.write(str(a))
202 sys.stdout.write(str(a))
196
203
197 def write_header(self, *args):
204 def write_header(self, *args):
198 for a in args:
205 for a in args:
199 self.header.append(str(a))
206 self.header.append(str(a))
200
207
201 def write_err(self, *args):
208 def write_err(self, *args):
202 try:
209 try:
203 if not sys.stdout.closed: sys.stdout.flush()
210 if not sys.stdout.closed: sys.stdout.flush()
204 for a in args:
211 for a in args:
205 sys.stderr.write(str(a))
212 sys.stderr.write(str(a))
206 except IOError, inst:
213 except IOError, inst:
207 if inst.errno != errno.EPIPE:
214 if inst.errno != errno.EPIPE:
208 raise
215 raise
209
216
210 def flush(self):
217 def flush(self):
211 try: sys.stdout.flush()
218 try: sys.stdout.flush()
212 except: pass
219 except: pass
213 try: sys.stderr.flush()
220 try: sys.stderr.flush()
214 except: pass
221 except: pass
215
222
216 def readline(self):
223 def readline(self):
217 return sys.stdin.readline()[:-1]
224 return sys.stdin.readline()[:-1]
218 def prompt(self, msg, pat, default="y"):
225 def prompt(self, msg, pat, default="y"):
219 if not self.interactive: return default
226 if not self.interactive: return default
220 while 1:
227 while 1:
221 self.write(msg, " ")
228 self.write(msg, " ")
222 r = self.readline()
229 r = self.readline()
223 if re.match(pat, r):
230 if re.match(pat, r):
224 return r
231 return r
225 else:
232 else:
226 self.write(_("unrecognized response\n"))
233 self.write(_("unrecognized response\n"))
227 def status(self, *msg):
234 def status(self, *msg):
228 if not self.quiet: self.write(*msg)
235 if not self.quiet: self.write(*msg)
229 def warn(self, *msg):
236 def warn(self, *msg):
230 self.write_err(*msg)
237 self.write_err(*msg)
231 def note(self, *msg):
238 def note(self, *msg):
232 if self.verbose: self.write(*msg)
239 if self.verbose: self.write(*msg)
233 def debug(self, *msg):
240 def debug(self, *msg):
234 if self.debugflag: self.write(*msg)
241 if self.debugflag: self.write(*msg)
235 def edit(self, text, user):
242 def edit(self, text, user):
236 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt")
243 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt")
237 try:
244 try:
238 f = os.fdopen(fd, "w")
245 f = os.fdopen(fd, "w")
239 f.write(text)
246 f.write(text)
240 f.close()
247 f.close()
241
248
242 editor = (os.environ.get("HGEDITOR") or
249 editor = (os.environ.get("HGEDITOR") or
243 self.config("ui", "editor") or
250 self.config("ui", "editor") or
244 os.environ.get("EDITOR", "vi"))
251 os.environ.get("EDITOR", "vi"))
245
252
246 util.system("%s \"%s\"" % (editor, name),
253 util.system("%s \"%s\"" % (editor, name),
247 environ={'HGUSER': user},
254 environ={'HGUSER': user},
248 onerr=util.Abort, errprefix=_("edit failed"))
255 onerr=util.Abort, errprefix=_("edit failed"))
249
256
250 f = open(name)
257 f = open(name)
251 t = f.read()
258 t = f.read()
252 f.close()
259 f.close()
253 t = re.sub("(?m)^HG:.*\n", "", t)
260 t = re.sub("(?m)^HG:.*\n", "", t)
254 finally:
261 finally:
255 os.unlink(name)
262 os.unlink(name)
256
263
257 return t
264 return t
General Comments 0
You need to be logged in to leave comments. Login now