##// END OF EJS Templates
Merge bundle -r work from Eric Hopper
Matt Mackall -
r1469:0847c45f merge default
parent child Browse files
Show More
@@ -0,0 +1,59 b''
1 #!/bin/bash
2
3 hg init test
4 cd test
5 cat >>afile <<EOF
6 0
7 EOF
8 hg add afile
9 hg commit -m "0.0"
10 cat >>afile <<EOF
11 1
12 EOF
13 hg commit -m "0.1"
14 cat >>afile <<EOF
15 2
16 EOF
17 hg commit -m "0.2"
18 cat >>afile <<EOF
19 3
20 EOF
21 hg commit -m "0.3"
22 hg update -C 0
23 cat >>afile <<EOF
24 1
25 EOF
26 hg commit -m "1.1"
27 cat >>afile <<EOF
28 2
29 EOF
30 hg commit -m "1.2"
31 cat >fred <<EOF
32 a line
33 EOF
34 cat >>afile <<EOF
35 3
36 EOF
37 hg add fred
38 hg commit -m "1.3"
39 hg mv afile adifferentfile
40 hg commit -m "1.3m"
41 hg update -C 3
42 hg mv afile anotherfile
43 hg commit -m "0.3m"
44 hg debugindex .hg/data/afile.i
45 hg debugindex .hg/data/adifferentfile.i
46 hg debugindex .hg/data/anotherfile.i
47 hg debugindex .hg/data/fred.i
48 hg debugindex .hg/00manifest.i
49 hg verify
50 cd ..
51 for i in 0 1 2 3 4 5 6 7 8; do
52 hg clone -r "$i" test test-"$i"
53 cd test-"$i"
54 hg verify
55 cd ..
56 done
57 cd test-8
58 hg pull ../test-7
59 hg verify
@@ -0,0 +1,126 b''
1 rev offset length base linkrev nodeid p1 p2
2 0 0 3 0 0 362fef284ce2 000000000000 000000000000
3 1 3 5 1 1 125144f7e028 362fef284ce2 000000000000
4 2 8 7 2 2 4c982badb186 125144f7e028 000000000000
5 3 15 9 3 3 19b1fc555737 4c982badb186 000000000000
6 rev offset length base linkrev nodeid p1 p2
7 0 0 75 0 7 905359268f77 000000000000 000000000000
8 rev offset length base linkrev nodeid p1 p2
9 0 0 75 0 8 905359268f77 000000000000 000000000000
10 rev offset length base linkrev nodeid p1 p2
11 0 0 8 0 6 12ab3bcc5ea4 000000000000 000000000000
12 rev offset length base linkrev nodeid p1 p2
13 0 0 48 0 0 43eadb1d2d06 000000000000 000000000000
14 1 48 48 1 1 8b89697eba2c 43eadb1d2d06 000000000000
15 2 96 48 2 2 626a32663c2f 8b89697eba2c 000000000000
16 3 144 48 3 3 f54c32f13478 626a32663c2f 000000000000
17 4 192 58 3 6 de68e904d169 626a32663c2f 000000000000
18 5 250 68 3 7 3b45cc2ab868 de68e904d169 000000000000
19 6 318 54 6 8 24d86153a002 f54c32f13478 000000000000
20 checking changesets
21 checking manifests
22 crosschecking files in changesets and manifests
23 checking files
24 4 files, 9 changesets, 7 total revisions
25 requesting all changes
26 adding changesets
27 adding manifests
28 adding file changes
29 added 1 changesets with 1 changes to 1 files
30 checking changesets
31 checking manifests
32 crosschecking files in changesets and manifests
33 checking files
34 1 files, 1 changesets, 1 total revisions
35 requesting all changes
36 adding changesets
37 adding manifests
38 adding file changes
39 added 2 changesets with 2 changes to 1 files
40 checking changesets
41 checking manifests
42 crosschecking files in changesets and manifests
43 checking files
44 1 files, 2 changesets, 2 total revisions
45 requesting all changes
46 adding changesets
47 adding manifests
48 adding file changes
49 added 3 changesets with 3 changes to 1 files
50 checking changesets
51 checking manifests
52 crosschecking files in changesets and manifests
53 checking files
54 1 files, 3 changesets, 3 total revisions
55 requesting all changes
56 adding changesets
57 adding manifests
58 adding file changes
59 added 4 changesets with 4 changes to 1 files
60 checking changesets
61 checking manifests
62 crosschecking files in changesets and manifests
63 checking files
64 1 files, 4 changesets, 4 total revisions
65 requesting all changes
66 adding changesets
67 adding manifests
68 adding file changes
69 added 2 changesets with 2 changes to 1 files
70 checking changesets
71 checking manifests
72 crosschecking files in changesets and manifests
73 checking files
74 1 files, 2 changesets, 2 total revisions
75 requesting all changes
76 adding changesets
77 adding manifests
78 adding file changes
79 added 3 changesets with 3 changes to 1 files
80 checking changesets
81 checking manifests
82 crosschecking files in changesets and manifests
83 checking files
84 1 files, 3 changesets, 3 total revisions
85 requesting all changes
86 adding changesets
87 adding manifests
88 adding file changes
89 added 4 changesets with 5 changes to 2 files
90 checking changesets
91 checking manifests
92 crosschecking files in changesets and manifests
93 checking files
94 2 files, 4 changesets, 5 total revisions
95 requesting all changes
96 adding changesets
97 adding manifests
98 adding file changes
99 added 5 changesets with 6 changes to 3 files
100 checking changesets
101 checking manifests
102 crosschecking files in changesets and manifests
103 checking files
104 3 files, 5 changesets, 6 total revisions
105 requesting all changes
106 adding changesets
107 adding manifests
108 adding file changes
109 added 5 changesets with 5 changes to 2 files
110 checking changesets
111 checking manifests
112 crosschecking files in changesets and manifests
113 checking files
114 2 files, 5 changesets, 5 total revisions
115 pulling from ../test-7
116 searching for changes
117 adding changesets
118 adding manifests
119 adding file changes
120 added 4 changesets with 2 changes to 3 files (+1 heads)
121 (run 'hg update' to get a working copy)
122 checking changesets
123 checking manifests
124 crosschecking files in changesets and manifests
125 checking files
126 4 files, 9 changesets, 7 total revisions
@@ -1,2653 +1,2666 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18
18
19 def filterfiles(filters, files):
19 def filterfiles(filters, files):
20 l = [x for x in files if x in filters]
20 l = [x for x in files if x in filters]
21
21
22 for t in filters:
22 for t in filters:
23 if t and t[-1] != "/":
23 if t and t[-1] != "/":
24 t += "/"
24 t += "/"
25 l += [x for x in files if x.startswith(t)]
25 l += [x for x in files if x.startswith(t)]
26 return l
26 return l
27
27
28 def relpath(repo, args):
28 def relpath(repo, args):
29 cwd = repo.getcwd()
29 cwd = repo.getcwd()
30 if cwd:
30 if cwd:
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 return args
32 return args
33
33
34 def matchpats(repo, cwd, pats=[], opts={}, head=''):
34 def matchpats(repo, cwd, pats=[], opts={}, head=''):
35 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
35 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
36 opts.get('exclude'), head)
36 opts.get('exclude'), head)
37
37
38 def makewalk(repo, pats, opts, head=''):
38 def makewalk(repo, pats, opts, head=''):
39 cwd = repo.getcwd()
39 cwd = repo.getcwd()
40 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
40 files, matchfn, anypats = matchpats(repo, cwd, pats, opts, head)
41 exact = dict(zip(files, files))
41 exact = dict(zip(files, files))
42 def walk():
42 def walk():
43 for src, fn in repo.walk(files=files, match=matchfn):
43 for src, fn in repo.walk(files=files, match=matchfn):
44 yield src, fn, util.pathto(cwd, fn), fn in exact
44 yield src, fn, util.pathto(cwd, fn), fn in exact
45 return files, matchfn, walk()
45 return files, matchfn, walk()
46
46
47 def walk(repo, pats, opts, head=''):
47 def walk(repo, pats, opts, head=''):
48 files, matchfn, results = makewalk(repo, pats, opts, head)
48 files, matchfn, results = makewalk(repo, pats, opts, head)
49 for r in results:
49 for r in results:
50 yield r
50 yield r
51
51
52 def walkchangerevs(ui, repo, cwd, pats, opts):
52 def walkchangerevs(ui, repo, cwd, pats, opts):
53 '''Iterate over files and the revs they changed in.
53 '''Iterate over files and the revs they changed in.
54
54
55 Callers most commonly need to iterate backwards over the history
55 Callers most commonly need to iterate backwards over the history
56 it is interested in. Doing so has awful (quadratic-looking)
56 it is interested in. Doing so has awful (quadratic-looking)
57 performance, so we use iterators in a "windowed" way.
57 performance, so we use iterators in a "windowed" way.
58
58
59 We walk a window of revisions in the desired order. Within the
59 We walk a window of revisions in the desired order. Within the
60 window, we first walk forwards to gather data, then in the desired
60 window, we first walk forwards to gather data, then in the desired
61 order (usually backwards) to display it.
61 order (usually backwards) to display it.
62
62
63 This function returns an (iterator, getchange) pair. The
63 This function returns an (iterator, getchange) pair. The
64 getchange function returns the changelog entry for a numeric
64 getchange function returns the changelog entry for a numeric
65 revision. The iterator yields 3-tuples. They will be of one of
65 revision. The iterator yields 3-tuples. They will be of one of
66 the following forms:
66 the following forms:
67
67
68 "window", incrementing, lastrev: stepping through a window,
68 "window", incrementing, lastrev: stepping through a window,
69 positive if walking forwards through revs, last rev in the
69 positive if walking forwards through revs, last rev in the
70 sequence iterated over - use to reset state for the current window
70 sequence iterated over - use to reset state for the current window
71
71
72 "add", rev, fns: out-of-order traversal of the given file names
72 "add", rev, fns: out-of-order traversal of the given file names
73 fns, which changed during revision rev - use to gather data for
73 fns, which changed during revision rev - use to gather data for
74 possible display
74 possible display
75
75
76 "iter", rev, None: in-order traversal of the revs earlier iterated
76 "iter", rev, None: in-order traversal of the revs earlier iterated
77 over with "add" - use to display data'''
77 over with "add" - use to display data'''
78
78
79 if repo.changelog.count() == 0:
79 if repo.changelog.count() == 0:
80 return [], False
80 return [], False
81
81
82 cwd = repo.getcwd()
82 cwd = repo.getcwd()
83 if not pats and cwd:
83 if not pats and cwd:
84 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
84 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
85 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
85 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
86 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
86 files, matchfn, anypats = matchpats(repo, (pats and cwd) or '',
87 pats, opts)
87 pats, opts)
88 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
88 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
89 wanted = {}
89 wanted = {}
90 slowpath = anypats
90 slowpath = anypats
91 window = 300
91 window = 300
92 fncache = {}
92 fncache = {}
93
93
94 chcache = {}
94 chcache = {}
95 def getchange(rev):
95 def getchange(rev):
96 ch = chcache.get(rev)
96 ch = chcache.get(rev)
97 if ch is None:
97 if ch is None:
98 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
98 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
99 return ch
99 return ch
100
100
101 if not slowpath and not files:
101 if not slowpath and not files:
102 # No files, no patterns. Display all revs.
102 # No files, no patterns. Display all revs.
103 wanted = dict(zip(revs, revs))
103 wanted = dict(zip(revs, revs))
104 if not slowpath:
104 if not slowpath:
105 # Only files, no patterns. Check the history of each file.
105 # Only files, no patterns. Check the history of each file.
106 def filerevgen(filelog):
106 def filerevgen(filelog):
107 for i in xrange(filelog.count() - 1, -1, -window):
107 for i in xrange(filelog.count() - 1, -1, -window):
108 revs = []
108 revs = []
109 for j in xrange(max(0, i - window), i + 1):
109 for j in xrange(max(0, i - window), i + 1):
110 revs.append(filelog.linkrev(filelog.node(j)))
110 revs.append(filelog.linkrev(filelog.node(j)))
111 revs.reverse()
111 revs.reverse()
112 for rev in revs:
112 for rev in revs:
113 yield rev
113 yield rev
114
114
115 minrev, maxrev = min(revs), max(revs)
115 minrev, maxrev = min(revs), max(revs)
116 for file in files:
116 for file in files:
117 filelog = repo.file(file)
117 filelog = repo.file(file)
118 # A zero count may be a directory or deleted file, so
118 # A zero count may be a directory or deleted file, so
119 # try to find matching entries on the slow path.
119 # try to find matching entries on the slow path.
120 if filelog.count() == 0:
120 if filelog.count() == 0:
121 slowpath = True
121 slowpath = True
122 break
122 break
123 for rev in filerevgen(filelog):
123 for rev in filerevgen(filelog):
124 if rev <= maxrev:
124 if rev <= maxrev:
125 if rev < minrev:
125 if rev < minrev:
126 break
126 break
127 fncache.setdefault(rev, [])
127 fncache.setdefault(rev, [])
128 fncache[rev].append(file)
128 fncache[rev].append(file)
129 wanted[rev] = 1
129 wanted[rev] = 1
130 if slowpath:
130 if slowpath:
131 # The slow path checks files modified in every changeset.
131 # The slow path checks files modified in every changeset.
132 def changerevgen():
132 def changerevgen():
133 for i in xrange(repo.changelog.count() - 1, -1, -window):
133 for i in xrange(repo.changelog.count() - 1, -1, -window):
134 for j in xrange(max(0, i - window), i + 1):
134 for j in xrange(max(0, i - window), i + 1):
135 yield j, getchange(j)[3]
135 yield j, getchange(j)[3]
136
136
137 for rev, changefiles in changerevgen():
137 for rev, changefiles in changerevgen():
138 matches = filter(matchfn, changefiles)
138 matches = filter(matchfn, changefiles)
139 if matches:
139 if matches:
140 fncache[rev] = matches
140 fncache[rev] = matches
141 wanted[rev] = 1
141 wanted[rev] = 1
142
142
143 def iterate():
143 def iterate():
144 for i in xrange(0, len(revs), window):
144 for i in xrange(0, len(revs), window):
145 yield 'window', revs[0] < revs[-1], revs[-1]
145 yield 'window', revs[0] < revs[-1], revs[-1]
146 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
146 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
147 if rev in wanted]
147 if rev in wanted]
148 srevs = list(nrevs)
148 srevs = list(nrevs)
149 srevs.sort()
149 srevs.sort()
150 for rev in srevs:
150 for rev in srevs:
151 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
151 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
152 yield 'add', rev, fns
152 yield 'add', rev, fns
153 for rev in nrevs:
153 for rev in nrevs:
154 yield 'iter', rev, None
154 yield 'iter', rev, None
155 return iterate(), getchange
155 return iterate(), getchange
156
156
157 revrangesep = ':'
157 revrangesep = ':'
158
158
159 def revrange(ui, repo, revs, revlog=None):
159 def revrange(ui, repo, revs, revlog=None):
160 """Yield revision as strings from a list of revision specifications."""
160 """Yield revision as strings from a list of revision specifications."""
161 if revlog is None:
161 if revlog is None:
162 revlog = repo.changelog
162 revlog = repo.changelog
163 revcount = revlog.count()
163 revcount = revlog.count()
164 def fix(val, defval):
164 def fix(val, defval):
165 if not val:
165 if not val:
166 return defval
166 return defval
167 try:
167 try:
168 num = int(val)
168 num = int(val)
169 if str(num) != val:
169 if str(num) != val:
170 raise ValueError
170 raise ValueError
171 if num < 0: num += revcount
171 if num < 0: num += revcount
172 if num < 0: num = 0
172 if num < 0: num = 0
173 elif num >= revcount:
173 elif num >= revcount:
174 raise ValueError
174 raise ValueError
175 except ValueError:
175 except ValueError:
176 try:
176 try:
177 num = repo.changelog.rev(repo.lookup(val))
177 num = repo.changelog.rev(repo.lookup(val))
178 except KeyError:
178 except KeyError:
179 try:
179 try:
180 num = revlog.rev(revlog.lookup(val))
180 num = revlog.rev(revlog.lookup(val))
181 except KeyError:
181 except KeyError:
182 raise util.Abort(_('invalid revision identifier %s'), val)
182 raise util.Abort(_('invalid revision identifier %s'), val)
183 return num
183 return num
184 seen = {}
184 seen = {}
185 for spec in revs:
185 for spec in revs:
186 if spec.find(revrangesep) >= 0:
186 if spec.find(revrangesep) >= 0:
187 start, end = spec.split(revrangesep, 1)
187 start, end = spec.split(revrangesep, 1)
188 start = fix(start, 0)
188 start = fix(start, 0)
189 end = fix(end, revcount - 1)
189 end = fix(end, revcount - 1)
190 step = start > end and -1 or 1
190 step = start > end and -1 or 1
191 for rev in xrange(start, end+step, step):
191 for rev in xrange(start, end+step, step):
192 if rev in seen: continue
192 if rev in seen: continue
193 seen[rev] = 1
193 seen[rev] = 1
194 yield str(rev)
194 yield str(rev)
195 else:
195 else:
196 rev = fix(spec, None)
196 rev = fix(spec, None)
197 if rev in seen: continue
197 if rev in seen: continue
198 seen[rev] = 1
198 seen[rev] = 1
199 yield str(rev)
199 yield str(rev)
200
200
201 def make_filename(repo, r, pat, node=None,
201 def make_filename(repo, r, pat, node=None,
202 total=None, seqno=None, revwidth=None, pathname=None):
202 total=None, seqno=None, revwidth=None, pathname=None):
203 node_expander = {
203 node_expander = {
204 'H': lambda: hex(node),
204 'H': lambda: hex(node),
205 'R': lambda: str(r.rev(node)),
205 'R': lambda: str(r.rev(node)),
206 'h': lambda: short(node),
206 'h': lambda: short(node),
207 }
207 }
208 expander = {
208 expander = {
209 '%': lambda: '%',
209 '%': lambda: '%',
210 'b': lambda: os.path.basename(repo.root),
210 'b': lambda: os.path.basename(repo.root),
211 }
211 }
212
212
213 try:
213 try:
214 if node:
214 if node:
215 expander.update(node_expander)
215 expander.update(node_expander)
216 if node and revwidth is not None:
216 if node and revwidth is not None:
217 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
217 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
218 if total is not None:
218 if total is not None:
219 expander['N'] = lambda: str(total)
219 expander['N'] = lambda: str(total)
220 if seqno is not None:
220 if seqno is not None:
221 expander['n'] = lambda: str(seqno)
221 expander['n'] = lambda: str(seqno)
222 if total is not None and seqno is not None:
222 if total is not None and seqno is not None:
223 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
223 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
224 if pathname is not None:
224 if pathname is not None:
225 expander['s'] = lambda: os.path.basename(pathname)
225 expander['s'] = lambda: os.path.basename(pathname)
226 expander['d'] = lambda: os.path.dirname(pathname) or '.'
226 expander['d'] = lambda: os.path.dirname(pathname) or '.'
227 expander['p'] = lambda: pathname
227 expander['p'] = lambda: pathname
228
228
229 newname = []
229 newname = []
230 patlen = len(pat)
230 patlen = len(pat)
231 i = 0
231 i = 0
232 while i < patlen:
232 while i < patlen:
233 c = pat[i]
233 c = pat[i]
234 if c == '%':
234 if c == '%':
235 i += 1
235 i += 1
236 c = pat[i]
236 c = pat[i]
237 c = expander[c]()
237 c = expander[c]()
238 newname.append(c)
238 newname.append(c)
239 i += 1
239 i += 1
240 return ''.join(newname)
240 return ''.join(newname)
241 except KeyError, inst:
241 except KeyError, inst:
242 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
242 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
243 inst.args[0])
243 inst.args[0])
244
244
245 def make_file(repo, r, pat, node=None,
245 def make_file(repo, r, pat, node=None,
246 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
246 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
247 if not pat or pat == '-':
247 if not pat or pat == '-':
248 return 'w' in mode and sys.stdout or sys.stdin
248 return 'w' in mode and sys.stdout or sys.stdin
249 if hasattr(pat, 'write') and 'w' in mode:
249 if hasattr(pat, 'write') and 'w' in mode:
250 return pat
250 return pat
251 if hasattr(pat, 'read') and 'r' in mode:
251 if hasattr(pat, 'read') and 'r' in mode:
252 return pat
252 return pat
253 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
253 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
254 pathname),
254 pathname),
255 mode)
255 mode)
256
256
257 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
257 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
258 changes=None, text=False):
258 changes=None, text=False):
259 if not changes:
259 if not changes:
260 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
260 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
261 else:
261 else:
262 (c, a, d, u) = changes
262 (c, a, d, u) = changes
263 if files:
263 if files:
264 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
264 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
265
265
266 if not c and not a and not d:
266 if not c and not a and not d:
267 return
267 return
268
268
269 if node2:
269 if node2:
270 change = repo.changelog.read(node2)
270 change = repo.changelog.read(node2)
271 mmap2 = repo.manifest.read(change[0])
271 mmap2 = repo.manifest.read(change[0])
272 date2 = util.datestr(change[2])
272 date2 = util.datestr(change[2])
273 def read(f):
273 def read(f):
274 return repo.file(f).read(mmap2[f])
274 return repo.file(f).read(mmap2[f])
275 else:
275 else:
276 date2 = util.datestr()
276 date2 = util.datestr()
277 if not node1:
277 if not node1:
278 node1 = repo.dirstate.parents()[0]
278 node1 = repo.dirstate.parents()[0]
279 def read(f):
279 def read(f):
280 return repo.wfile(f).read()
280 return repo.wfile(f).read()
281
281
282 if ui.quiet:
282 if ui.quiet:
283 r = None
283 r = None
284 else:
284 else:
285 hexfunc = ui.verbose and hex or short
285 hexfunc = ui.verbose and hex or short
286 r = [hexfunc(node) for node in [node1, node2] if node]
286 r = [hexfunc(node) for node in [node1, node2] if node]
287
287
288 change = repo.changelog.read(node1)
288 change = repo.changelog.read(node1)
289 mmap = repo.manifest.read(change[0])
289 mmap = repo.manifest.read(change[0])
290 date1 = util.datestr(change[2])
290 date1 = util.datestr(change[2])
291
291
292 for f in c:
292 for f in c:
293 to = None
293 to = None
294 if f in mmap:
294 if f in mmap:
295 to = repo.file(f).read(mmap[f])
295 to = repo.file(f).read(mmap[f])
296 tn = read(f)
296 tn = read(f)
297 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
297 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
298 for f in a:
298 for f in a:
299 to = None
299 to = None
300 tn = read(f)
300 tn = read(f)
301 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
301 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
302 for f in d:
302 for f in d:
303 to = repo.file(f).read(mmap[f])
303 to = repo.file(f).read(mmap[f])
304 tn = None
304 tn = None
305 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
305 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
306
306
307 def trimuser(ui, name, rev, revcache):
307 def trimuser(ui, name, rev, revcache):
308 """trim the name of the user who committed a change"""
308 """trim the name of the user who committed a change"""
309 user = revcache.get(rev)
309 user = revcache.get(rev)
310 if user is None:
310 if user is None:
311 user = revcache[rev] = ui.shortuser(name)
311 user = revcache[rev] = ui.shortuser(name)
312 return user
312 return user
313
313
314 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
314 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
315 """show a single changeset or file revision"""
315 """show a single changeset or file revision"""
316 log = repo.changelog
316 log = repo.changelog
317 if changenode is None:
317 if changenode is None:
318 changenode = log.node(rev)
318 changenode = log.node(rev)
319 elif not rev:
319 elif not rev:
320 rev = log.rev(changenode)
320 rev = log.rev(changenode)
321
321
322 if ui.quiet:
322 if ui.quiet:
323 ui.write("%d:%s\n" % (rev, short(changenode)))
323 ui.write("%d:%s\n" % (rev, short(changenode)))
324 return
324 return
325
325
326 changes = log.read(changenode)
326 changes = log.read(changenode)
327 date = util.datestr(changes[2])
327 date = util.datestr(changes[2])
328
328
329 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
329 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
330 for p in log.parents(changenode)
330 for p in log.parents(changenode)
331 if ui.debugflag or p != nullid]
331 if ui.debugflag or p != nullid]
332 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
332 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
333 parents = []
333 parents = []
334
334
335 if ui.verbose:
335 if ui.verbose:
336 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
336 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
337 else:
337 else:
338 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
338 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
339
339
340 for tag in repo.nodetags(changenode):
340 for tag in repo.nodetags(changenode):
341 ui.status(_("tag: %s\n") % tag)
341 ui.status(_("tag: %s\n") % tag)
342 for parent in parents:
342 for parent in parents:
343 ui.write(_("parent: %d:%s\n") % parent)
343 ui.write(_("parent: %d:%s\n") % parent)
344
344
345 if brinfo and changenode in brinfo:
345 if brinfo and changenode in brinfo:
346 br = brinfo[changenode]
346 br = brinfo[changenode]
347 ui.write(_("branch: %s\n") % " ".join(br))
347 ui.write(_("branch: %s\n") % " ".join(br))
348
348
349 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
349 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
350 hex(changes[0])))
350 hex(changes[0])))
351 ui.status(_("user: %s\n") % changes[1])
351 ui.status(_("user: %s\n") % changes[1])
352 ui.status(_("date: %s\n") % date)
352 ui.status(_("date: %s\n") % date)
353
353
354 if ui.debugflag:
354 if ui.debugflag:
355 files = repo.changes(log.parents(changenode)[0], changenode)
355 files = repo.changes(log.parents(changenode)[0], changenode)
356 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
356 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
357 if value:
357 if value:
358 ui.note("%-12s %s\n" % (key, " ".join(value)))
358 ui.note("%-12s %s\n" % (key, " ".join(value)))
359 else:
359 else:
360 ui.note(_("files: %s\n") % " ".join(changes[3]))
360 ui.note(_("files: %s\n") % " ".join(changes[3]))
361
361
362 description = changes[4].strip()
362 description = changes[4].strip()
363 if description:
363 if description:
364 if ui.verbose:
364 if ui.verbose:
365 ui.status(_("description:\n"))
365 ui.status(_("description:\n"))
366 ui.status(description)
366 ui.status(description)
367 ui.status("\n\n")
367 ui.status("\n\n")
368 else:
368 else:
369 ui.status(_("summary: %s\n") % description.splitlines()[0])
369 ui.status(_("summary: %s\n") % description.splitlines()[0])
370 ui.status("\n")
370 ui.status("\n")
371
371
372 def show_version(ui):
372 def show_version(ui):
373 """output version and copyright information"""
373 """output version and copyright information"""
374 ui.write(_("Mercurial Distributed SCM (version %s)\n")
374 ui.write(_("Mercurial Distributed SCM (version %s)\n")
375 % version.get_version())
375 % version.get_version())
376 ui.status(_(
376 ui.status(_(
377 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
377 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
378 "This is free software; see the source for copying conditions. "
378 "This is free software; see the source for copying conditions. "
379 "There is NO\nwarranty; "
379 "There is NO\nwarranty; "
380 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
380 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
381 ))
381 ))
382
382
383 def help_(ui, cmd=None, with_version=False):
383 def help_(ui, cmd=None, with_version=False):
384 """show help for a given command or all commands"""
384 """show help for a given command or all commands"""
385 option_lists = []
385 option_lists = []
386 if cmd and cmd != 'shortlist':
386 if cmd and cmd != 'shortlist':
387 if with_version:
387 if with_version:
388 show_version(ui)
388 show_version(ui)
389 ui.write('\n')
389 ui.write('\n')
390 key, i = find(cmd)
390 key, i = find(cmd)
391 # synopsis
391 # synopsis
392 ui.write("%s\n\n" % i[2])
392 ui.write("%s\n\n" % i[2])
393
393
394 # description
394 # description
395 doc = i[0].__doc__
395 doc = i[0].__doc__
396 if ui.quiet:
396 if ui.quiet:
397 doc = doc.splitlines(0)[0]
397 doc = doc.splitlines(0)[0]
398 ui.write("%s\n" % doc.rstrip())
398 ui.write("%s\n" % doc.rstrip())
399
399
400 if not ui.quiet:
400 if not ui.quiet:
401 # aliases
401 # aliases
402 aliases = ', '.join(key.split('|')[1:])
402 aliases = ', '.join(key.split('|')[1:])
403 if aliases:
403 if aliases:
404 ui.write(_("\naliases: %s\n") % aliases)
404 ui.write(_("\naliases: %s\n") % aliases)
405
405
406 # options
406 # options
407 if i[1]:
407 if i[1]:
408 option_lists.append(("options", i[1]))
408 option_lists.append(("options", i[1]))
409
409
410 else:
410 else:
411 # program name
411 # program name
412 if ui.verbose or with_version:
412 if ui.verbose or with_version:
413 show_version(ui)
413 show_version(ui)
414 else:
414 else:
415 ui.status(_("Mercurial Distributed SCM\n"))
415 ui.status(_("Mercurial Distributed SCM\n"))
416 ui.status('\n')
416 ui.status('\n')
417
417
418 # list of commands
418 # list of commands
419 if cmd == "shortlist":
419 if cmd == "shortlist":
420 ui.status(_('basic commands (use "hg help" '
420 ui.status(_('basic commands (use "hg help" '
421 'for the full list or option "-v" for details):\n\n'))
421 'for the full list or option "-v" for details):\n\n'))
422 elif ui.verbose:
422 elif ui.verbose:
423 ui.status(_('list of commands:\n\n'))
423 ui.status(_('list of commands:\n\n'))
424 else:
424 else:
425 ui.status(_('list of commands (use "hg help -v" '
425 ui.status(_('list of commands (use "hg help -v" '
426 'to show aliases and global options):\n\n'))
426 'to show aliases and global options):\n\n'))
427
427
428 h = {}
428 h = {}
429 cmds = {}
429 cmds = {}
430 for c, e in table.items():
430 for c, e in table.items():
431 f = c.split("|")[0]
431 f = c.split("|")[0]
432 if cmd == "shortlist" and not f.startswith("^"):
432 if cmd == "shortlist" and not f.startswith("^"):
433 continue
433 continue
434 f = f.lstrip("^")
434 f = f.lstrip("^")
435 if not ui.debugflag and f.startswith("debug"):
435 if not ui.debugflag and f.startswith("debug"):
436 continue
436 continue
437 d = ""
437 d = ""
438 if e[0].__doc__:
438 if e[0].__doc__:
439 d = e[0].__doc__.splitlines(0)[0].rstrip()
439 d = e[0].__doc__.splitlines(0)[0].rstrip()
440 h[f] = d
440 h[f] = d
441 cmds[f]=c.lstrip("^")
441 cmds[f]=c.lstrip("^")
442
442
443 fns = h.keys()
443 fns = h.keys()
444 fns.sort()
444 fns.sort()
445 m = max(map(len, fns))
445 m = max(map(len, fns))
446 for f in fns:
446 for f in fns:
447 if ui.verbose:
447 if ui.verbose:
448 commands = cmds[f].replace("|",", ")
448 commands = cmds[f].replace("|",", ")
449 ui.write(" %s:\n %s\n"%(commands,h[f]))
449 ui.write(" %s:\n %s\n"%(commands,h[f]))
450 else:
450 else:
451 ui.write(' %-*s %s\n' % (m, f, h[f]))
451 ui.write(' %-*s %s\n' % (m, f, h[f]))
452
452
453 # global options
453 # global options
454 if ui.verbose:
454 if ui.verbose:
455 option_lists.append(("global options", globalopts))
455 option_lists.append(("global options", globalopts))
456
456
457 # list all option lists
457 # list all option lists
458 opt_output = []
458 opt_output = []
459 for title, options in option_lists:
459 for title, options in option_lists:
460 opt_output.append(("\n%s:\n" % title, None))
460 opt_output.append(("\n%s:\n" % title, None))
461 for shortopt, longopt, default, desc in options:
461 for shortopt, longopt, default, desc in options:
462 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
462 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
463 longopt and " --%s" % longopt),
463 longopt and " --%s" % longopt),
464 "%s%s" % (desc,
464 "%s%s" % (desc,
465 default and _(" (default: %s)") % default
465 default and _(" (default: %s)") % default
466 or "")))
466 or "")))
467
467
468 if opt_output:
468 if opt_output:
469 opts_len = max([len(line[0]) for line in opt_output if line[1]])
469 opts_len = max([len(line[0]) for line in opt_output if line[1]])
470 for first, second in opt_output:
470 for first, second in opt_output:
471 if second:
471 if second:
472 ui.write(" %-*s %s\n" % (opts_len, first, second))
472 ui.write(" %-*s %s\n" % (opts_len, first, second))
473 else:
473 else:
474 ui.write("%s\n" % first)
474 ui.write("%s\n" % first)
475
475
476 # Commands start here, listed alphabetically
476 # Commands start here, listed alphabetically
477
477
478 def add(ui, repo, *pats, **opts):
478 def add(ui, repo, *pats, **opts):
479 """add the specified files on the next commit
479 """add the specified files on the next commit
480
480
481 Schedule files to be version controlled and added to the repository.
481 Schedule files to be version controlled and added to the repository.
482
482
483 The files will be added to the repository at the next commit.
483 The files will be added to the repository at the next commit.
484
484
485 If no names are given, add all files in the current directory and
485 If no names are given, add all files in the current directory and
486 its subdirectories.
486 its subdirectories.
487 """
487 """
488
488
489 names = []
489 names = []
490 for src, abs, rel, exact in walk(repo, pats, opts):
490 for src, abs, rel, exact in walk(repo, pats, opts):
491 if exact:
491 if exact:
492 if ui.verbose: ui.status(_('adding %s\n') % rel)
492 if ui.verbose: ui.status(_('adding %s\n') % rel)
493 names.append(abs)
493 names.append(abs)
494 elif repo.dirstate.state(abs) == '?':
494 elif repo.dirstate.state(abs) == '?':
495 ui.status(_('adding %s\n') % rel)
495 ui.status(_('adding %s\n') % rel)
496 names.append(abs)
496 names.append(abs)
497 repo.add(names)
497 repo.add(names)
498
498
499 def addremove(ui, repo, *pats, **opts):
499 def addremove(ui, repo, *pats, **opts):
500 """add all new files, delete all missing files
500 """add all new files, delete all missing files
501
501
502 Add all new files and remove all missing files from the repository.
502 Add all new files and remove all missing files from the repository.
503
503
504 New files are ignored if they match any of the patterns in .hgignore. As
504 New files are ignored if they match any of the patterns in .hgignore. As
505 with add, these changes take effect at the next commit.
505 with add, these changes take effect at the next commit.
506 """
506 """
507 add, remove = [], []
507 add, remove = [], []
508 for src, abs, rel, exact in walk(repo, pats, opts):
508 for src, abs, rel, exact in walk(repo, pats, opts):
509 if src == 'f' and repo.dirstate.state(abs) == '?':
509 if src == 'f' and repo.dirstate.state(abs) == '?':
510 add.append(abs)
510 add.append(abs)
511 if ui.verbose or not exact:
511 if ui.verbose or not exact:
512 ui.status(_('adding %s\n') % rel)
512 ui.status(_('adding %s\n') % rel)
513 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
513 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
514 remove.append(abs)
514 remove.append(abs)
515 if ui.verbose or not exact:
515 if ui.verbose or not exact:
516 ui.status(_('removing %s\n') % rel)
516 ui.status(_('removing %s\n') % rel)
517 repo.add(add)
517 repo.add(add)
518 repo.remove(remove)
518 repo.remove(remove)
519
519
520 def annotate(ui, repo, *pats, **opts):
520 def annotate(ui, repo, *pats, **opts):
521 """show changeset information per file line
521 """show changeset information per file line
522
522
523 List changes in files, showing the revision id responsible for each line
523 List changes in files, showing the revision id responsible for each line
524
524
525 This command is useful to discover who did a change or when a change took
525 This command is useful to discover who did a change or when a change took
526 place.
526 place.
527
527
528 Without the -a option, annotate will avoid processing files it
528 Without the -a option, annotate will avoid processing files it
529 detects as binary. With -a, annotate will generate an annotation
529 detects as binary. With -a, annotate will generate an annotation
530 anyway, probably with undesirable results.
530 anyway, probably with undesirable results.
531 """
531 """
532 def getnode(rev):
532 def getnode(rev):
533 return short(repo.changelog.node(rev))
533 return short(repo.changelog.node(rev))
534
534
535 ucache = {}
535 ucache = {}
536 def getname(rev):
536 def getname(rev):
537 cl = repo.changelog.read(repo.changelog.node(rev))
537 cl = repo.changelog.read(repo.changelog.node(rev))
538 return trimuser(ui, cl[1], rev, ucache)
538 return trimuser(ui, cl[1], rev, ucache)
539
539
540 if not pats:
540 if not pats:
541 raise util.Abort(_('at least one file name or pattern required'))
541 raise util.Abort(_('at least one file name or pattern required'))
542
542
543 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
543 opmap = [['user', getname], ['number', str], ['changeset', getnode]]
544 if not opts['user'] and not opts['changeset']:
544 if not opts['user'] and not opts['changeset']:
545 opts['number'] = 1
545 opts['number'] = 1
546
546
547 if opts['rev']:
547 if opts['rev']:
548 node = repo.changelog.lookup(opts['rev'])
548 node = repo.changelog.lookup(opts['rev'])
549 else:
549 else:
550 node = repo.dirstate.parents()[0]
550 node = repo.dirstate.parents()[0]
551 change = repo.changelog.read(node)
551 change = repo.changelog.read(node)
552 mmap = repo.manifest.read(change[0])
552 mmap = repo.manifest.read(change[0])
553
553
554 for src, abs, rel, exact in walk(repo, pats, opts):
554 for src, abs, rel, exact in walk(repo, pats, opts):
555 if abs not in mmap:
555 if abs not in mmap:
556 ui.warn(_("warning: %s is not in the repository!\n") % rel)
556 ui.warn(_("warning: %s is not in the repository!\n") % rel)
557 continue
557 continue
558
558
559 f = repo.file(abs)
559 f = repo.file(abs)
560 if not opts['text'] and util.binary(f.read(mmap[abs])):
560 if not opts['text'] and util.binary(f.read(mmap[abs])):
561 ui.write(_("%s: binary file\n") % rel)
561 ui.write(_("%s: binary file\n") % rel)
562 continue
562 continue
563
563
564 lines = f.annotate(mmap[abs])
564 lines = f.annotate(mmap[abs])
565 pieces = []
565 pieces = []
566
566
567 for o, f in opmap:
567 for o, f in opmap:
568 if opts[o]:
568 if opts[o]:
569 l = [f(n) for n, dummy in lines]
569 l = [f(n) for n, dummy in lines]
570 if l:
570 if l:
571 m = max(map(len, l))
571 m = max(map(len, l))
572 pieces.append(["%*s" % (m, x) for x in l])
572 pieces.append(["%*s" % (m, x) for x in l])
573
573
574 if pieces:
574 if pieces:
575 for p, l in zip(zip(*pieces), lines):
575 for p, l in zip(zip(*pieces), lines):
576 ui.write("%s: %s" % (" ".join(p), l[1]))
576 ui.write("%s: %s" % (" ".join(p), l[1]))
577
577
578 def bundle(ui, repo, fname, dest="default-push", **opts):
578 def bundle(ui, repo, fname, dest="default-push", **opts):
579 """create a changegroup file
579 """create a changegroup file
580
580
581 Generate a compressed changegroup file collecting all changesets
581 Generate a compressed changegroup file collecting all changesets
582 not found in the other repository.
582 not found in the other repository.
583
583
584 This file can then be transferred using conventional means and
584 This file can then be transferred using conventional means and
585 applied to another repository with the unbundle command. This is
585 applied to another repository with the unbundle command. This is
586 useful when native push and pull are not available or when
586 useful when native push and pull are not available or when
587 exporting an entire repository is undesirable. The standard file
587 exporting an entire repository is undesirable. The standard file
588 extension is ".hg".
588 extension is ".hg".
589
589
590 Unlike import/export, this exactly preserves all changeset
590 Unlike import/export, this exactly preserves all changeset
591 contents including permissions, rename data, and revision history.
591 contents including permissions, rename data, and revision history.
592 """
592 """
593 f = open(fname, "wb")
593 f = open(fname, "wb")
594 dest = ui.expandpath(dest, repo.root)
594 dest = ui.expandpath(dest, repo.root)
595 other = hg.repository(ui, dest)
595 other = hg.repository(ui, dest)
596 o = repo.findoutgoing(other)
596 o = repo.findoutgoing(other)
597 cg = repo.changegroup(o)
597 cg = repo.changegroup(o)
598
598
599 try:
599 try:
600 f.write("HG10")
600 f.write("HG10")
601 z = bz2.BZ2Compressor(9)
601 z = bz2.BZ2Compressor(9)
602 while 1:
602 while 1:
603 chunk = cg.read(4096)
603 chunk = cg.read(4096)
604 if not chunk:
604 if not chunk:
605 break
605 break
606 f.write(z.compress(chunk))
606 f.write(z.compress(chunk))
607 f.write(z.flush())
607 f.write(z.flush())
608 except:
608 except:
609 os.unlink(fname)
609 os.unlink(fname)
610 raise
610 raise
611
611
612 def cat(ui, repo, file1, *pats, **opts):
612 def cat(ui, repo, file1, *pats, **opts):
613 """output the latest or given revisions of files
613 """output the latest or given revisions of files
614
614
615 Print the specified files as they were at the given revision.
615 Print the specified files as they were at the given revision.
616 If no revision is given then the tip is used.
616 If no revision is given then the tip is used.
617
617
618 Output may be to a file, in which case the name of the file is
618 Output may be to a file, in which case the name of the file is
619 given using a format string. The formatting rules are the same as
619 given using a format string. The formatting rules are the same as
620 for the export command, with the following additions:
620 for the export command, with the following additions:
621
621
622 %s basename of file being printed
622 %s basename of file being printed
623 %d dirname of file being printed, or '.' if in repo root
623 %d dirname of file being printed, or '.' if in repo root
624 %p root-relative path name of file being printed
624 %p root-relative path name of file being printed
625 """
625 """
626 mf = {}
626 mf = {}
627 if opts['rev']:
627 if opts['rev']:
628 change = repo.changelog.read(repo.lookup(opts['rev']))
628 change = repo.changelog.read(repo.lookup(opts['rev']))
629 mf = repo.manifest.read(change[0])
629 mf = repo.manifest.read(change[0])
630 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
630 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts):
631 r = repo.file(abs)
631 r = repo.file(abs)
632 if opts['rev']:
632 if opts['rev']:
633 try:
633 try:
634 n = mf[abs]
634 n = mf[abs]
635 except (hg.RepoError, KeyError):
635 except (hg.RepoError, KeyError):
636 try:
636 try:
637 n = r.lookup(rev)
637 n = r.lookup(rev)
638 except KeyError, inst:
638 except KeyError, inst:
639 raise util.Abort(_('cannot find file %s in rev %s'), rel, rev)
639 raise util.Abort(_('cannot find file %s in rev %s'), rel, rev)
640 else:
640 else:
641 n = r.tip()
641 n = r.tip()
642 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
642 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
643 fp.write(r.read(n))
643 fp.write(r.read(n))
644
644
645 def clone(ui, source, dest=None, **opts):
645 def clone(ui, source, dest=None, **opts):
646 """make a copy of an existing repository
646 """make a copy of an existing repository
647
647
648 Create a copy of an existing repository in a new directory.
648 Create a copy of an existing repository in a new directory.
649
649
650 If no destination directory name is specified, it defaults to the
650 If no destination directory name is specified, it defaults to the
651 basename of the source.
651 basename of the source.
652
652
653 The location of the source is added to the new repository's
653 The location of the source is added to the new repository's
654 .hg/hgrc file, as the default to be used for future pulls.
654 .hg/hgrc file, as the default to be used for future pulls.
655
655
656 For efficiency, hardlinks are used for cloning whenever the source
656 For efficiency, hardlinks are used for cloning whenever the source
657 and destination are on the same filesystem. Some filesystems,
657 and destination are on the same filesystem. Some filesystems,
658 such as AFS, implement hardlinking incorrectly, but do not report
658 such as AFS, implement hardlinking incorrectly, but do not report
659 errors. In these cases, use the --pull option to avoid
659 errors. In these cases, use the --pull option to avoid
660 hardlinking.
660 hardlinking.
661 """
661 """
662 if dest is None:
662 if dest is None:
663 dest = os.path.basename(os.path.normpath(source))
663 dest = os.path.basename(os.path.normpath(source))
664
664
665 if os.path.exists(dest):
665 if os.path.exists(dest):
666 raise util.Abort(_("destination '%s' already exists"), dest)
666 raise util.Abort(_("destination '%s' already exists"), dest)
667
667
668 dest = os.path.realpath(dest)
668 dest = os.path.realpath(dest)
669
669
670 class Dircleanup:
670 class Dircleanup:
671 def __init__(self, dir_):
671 def __init__(self, dir_):
672 self.rmtree = shutil.rmtree
672 self.rmtree = shutil.rmtree
673 self.dir_ = dir_
673 self.dir_ = dir_
674 os.mkdir(dir_)
674 os.mkdir(dir_)
675 def close(self):
675 def close(self):
676 self.dir_ = None
676 self.dir_ = None
677 def __del__(self):
677 def __del__(self):
678 if self.dir_:
678 if self.dir_:
679 self.rmtree(self.dir_, True)
679 self.rmtree(self.dir_, True)
680
680
681 if opts['ssh']:
681 if opts['ssh']:
682 ui.setconfig("ui", "ssh", opts['ssh'])
682 ui.setconfig("ui", "ssh", opts['ssh'])
683 if opts['remotecmd']:
683 if opts['remotecmd']:
684 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
684 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
685
685
686 if not os.path.exists(source):
686 if not os.path.exists(source):
687 source = ui.expandpath(source)
687 source = ui.expandpath(source)
688
688
689 d = Dircleanup(dest)
689 d = Dircleanup(dest)
690 abspath = source
690 abspath = source
691 other = hg.repository(ui, source)
691 other = hg.repository(ui, source)
692
692
693 copy = False
693 copy = False
694 if other.dev() != -1:
694 if other.dev() != -1:
695 abspath = os.path.abspath(source)
695 abspath = os.path.abspath(source)
696 if not opts['pull']:
696 if not opts['pull'] and not opts['rev']:
697 copy = True
697 copy = True
698
698
699 if copy:
699 if copy:
700 try:
700 try:
701 # we use a lock here because if we race with commit, we
701 # we use a lock here because if we race with commit, we
702 # can end up with extra data in the cloned revlogs that's
702 # can end up with extra data in the cloned revlogs that's
703 # not pointed to by changesets, thus causing verify to
703 # not pointed to by changesets, thus causing verify to
704 # fail
704 # fail
705 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
705 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
706 except OSError:
706 except OSError:
707 copy = False
707 copy = False
708
708
709 if copy:
709 if copy:
710 # we lock here to avoid premature writing to the target
710 # we lock here to avoid premature writing to the target
711 os.mkdir(os.path.join(dest, ".hg"))
711 os.mkdir(os.path.join(dest, ".hg"))
712 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
712 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
713
713
714 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
714 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
715 for f in files.split():
715 for f in files.split():
716 src = os.path.join(source, ".hg", f)
716 src = os.path.join(source, ".hg", f)
717 dst = os.path.join(dest, ".hg", f)
717 dst = os.path.join(dest, ".hg", f)
718 try:
718 try:
719 util.copyfiles(src, dst)
719 util.copyfiles(src, dst)
720 except OSError, inst:
720 except OSError, inst:
721 if inst.errno != errno.ENOENT: raise
721 if inst.errno != errno.ENOENT: raise
722
722
723 repo = hg.repository(ui, dest)
723 repo = hg.repository(ui, dest)
724
724
725 else:
725 else:
726 revs = None
727 if opts['rev']:
728 if not other.local():
729 raise util.Abort("clone -r not supported yet for remote repositories.")
730 else:
731 revs = [other.lookup(rev) for rev in opts['rev']]
726 repo = hg.repository(ui, dest, create=1)
732 repo = hg.repository(ui, dest, create=1)
727 repo.pull(other)
733 repo.pull(other, heads = revs)
728
734
729 f = repo.opener("hgrc", "w", text=True)
735 f = repo.opener("hgrc", "w", text=True)
730 f.write("[paths]\n")
736 f.write("[paths]\n")
731 f.write("default = %s\n" % abspath)
737 f.write("default = %s\n" % abspath)
732
738
733 if not opts['noupdate']:
739 if not opts['noupdate']:
734 update(ui, repo)
740 update(ui, repo)
735
741
736 d.close()
742 d.close()
737
743
738 def commit(ui, repo, *pats, **opts):
744 def commit(ui, repo, *pats, **opts):
739 """commit the specified files or all outstanding changes
745 """commit the specified files or all outstanding changes
740
746
741 Commit changes to the given files into the repository.
747 Commit changes to the given files into the repository.
742
748
743 If a list of files is omitted, all changes reported by "hg status"
749 If a list of files is omitted, all changes reported by "hg status"
744 from the root of the repository will be commited.
750 from the root of the repository will be commited.
745
751
746 The HGEDITOR or EDITOR environment variables are used to start an
752 The HGEDITOR or EDITOR environment variables are used to start an
747 editor to add a commit comment.
753 editor to add a commit comment.
748 """
754 """
749 message = opts['message']
755 message = opts['message']
750 logfile = opts['logfile']
756 logfile = opts['logfile']
751
757
752 if message and logfile:
758 if message and logfile:
753 raise util.Abort(_('options --message and --logfile are mutually '
759 raise util.Abort(_('options --message and --logfile are mutually '
754 'exclusive'))
760 'exclusive'))
755 if not message and logfile:
761 if not message and logfile:
756 try:
762 try:
757 if logfile == '-':
763 if logfile == '-':
758 message = sys.stdin.read()
764 message = sys.stdin.read()
759 else:
765 else:
760 message = open(logfile).read()
766 message = open(logfile).read()
761 except IOError, inst:
767 except IOError, inst:
762 raise util.Abort(_("can't read commit message '%s': %s") %
768 raise util.Abort(_("can't read commit message '%s': %s") %
763 (logfile, inst.strerror))
769 (logfile, inst.strerror))
764
770
765 if opts['addremove']:
771 if opts['addremove']:
766 addremove(ui, repo, *pats, **opts)
772 addremove(ui, repo, *pats, **opts)
767 cwd = repo.getcwd()
773 cwd = repo.getcwd()
768 if not pats and cwd:
774 if not pats and cwd:
769 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
775 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
770 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
776 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
771 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
777 fns, match, anypats = matchpats(repo, (pats and repo.getcwd()) or '',
772 pats, opts)
778 pats, opts)
773 if pats:
779 if pats:
774 c, a, d, u = repo.changes(files=fns, match=match)
780 c, a, d, u = repo.changes(files=fns, match=match)
775 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
781 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
776 else:
782 else:
777 files = []
783 files = []
778 try:
784 try:
779 repo.commit(files, message, opts['user'], opts['date'], match)
785 repo.commit(files, message, opts['user'], opts['date'], match)
780 except ValueError, inst:
786 except ValueError, inst:
781 raise util.Abort(str(inst))
787 raise util.Abort(str(inst))
782
788
783 def docopy(ui, repo, pats, opts):
789 def docopy(ui, repo, pats, opts):
784 if not pats:
790 if not pats:
785 raise util.Abort(_('no source or destination specified'))
791 raise util.Abort(_('no source or destination specified'))
786 elif len(pats) == 1:
792 elif len(pats) == 1:
787 raise util.Abort(_('no destination specified'))
793 raise util.Abort(_('no destination specified'))
788 pats = list(pats)
794 pats = list(pats)
789 dest = pats.pop()
795 dest = pats.pop()
790 sources = []
796 sources = []
791 dir2dir = len(pats) == 1 and os.path.isdir(pats[0])
797 dir2dir = len(pats) == 1 and os.path.isdir(pats[0])
792
798
793 def okaytocopy(abs, rel, exact):
799 def okaytocopy(abs, rel, exact):
794 reasons = {'?': _('is not managed'),
800 reasons = {'?': _('is not managed'),
795 'a': _('has been marked for add')}
801 'a': _('has been marked for add')}
796 reason = reasons.get(repo.dirstate.state(abs))
802 reason = reasons.get(repo.dirstate.state(abs))
797 if reason:
803 if reason:
798 if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
804 if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
799 else:
805 else:
800 return True
806 return True
801
807
802 for src, abs, rel, exact in walk(repo, pats, opts):
808 for src, abs, rel, exact in walk(repo, pats, opts):
803 if okaytocopy(abs, rel, exact):
809 if okaytocopy(abs, rel, exact):
804 sources.append((abs, rel, exact))
810 sources.append((abs, rel, exact))
805 if not sources:
811 if not sources:
806 raise util.Abort(_('no files to copy'))
812 raise util.Abort(_('no files to copy'))
807
813
808 cwd = repo.getcwd()
814 cwd = repo.getcwd()
809 absdest = util.canonpath(repo.root, cwd, dest)
815 absdest = util.canonpath(repo.root, cwd, dest)
810 reldest = util.pathto(cwd, absdest)
816 reldest = util.pathto(cwd, absdest)
811 if os.path.exists(reldest):
817 if os.path.exists(reldest):
812 destisfile = not os.path.isdir(reldest)
818 destisfile = not os.path.isdir(reldest)
813 else:
819 else:
814 destisfile = not dir2dir and (len(sources) == 1
820 destisfile = not dir2dir and (len(sources) == 1
815 or repo.dirstate.state(absdest) != '?')
821 or repo.dirstate.state(absdest) != '?')
816
822
817 if destisfile and len(sources) > 1:
823 if destisfile and len(sources) > 1:
818 raise util.Abort(_('with multiple sources, destination must be a '
824 raise util.Abort(_('with multiple sources, destination must be a '
819 'directory'))
825 'directory'))
820
826
821 srcpfxlen = 0
827 srcpfxlen = 0
822 if dir2dir:
828 if dir2dir:
823 srcpfx = util.pathto(cwd, util.canonpath(repo.root, cwd, pats[0]))
829 srcpfx = util.pathto(cwd, util.canonpath(repo.root, cwd, pats[0]))
824 if os.path.exists(reldest):
830 if os.path.exists(reldest):
825 srcpfx = os.path.split(srcpfx)[0]
831 srcpfx = os.path.split(srcpfx)[0]
826 if srcpfx:
832 if srcpfx:
827 srcpfx += os.sep
833 srcpfx += os.sep
828 srcpfxlen = len(srcpfx)
834 srcpfxlen = len(srcpfx)
829
835
830 errs, copied = 0, []
836 errs, copied = 0, []
831 for abs, rel, exact in sources:
837 for abs, rel, exact in sources:
832 if destisfile:
838 if destisfile:
833 mydest = reldest
839 mydest = reldest
834 elif dir2dir:
840 elif dir2dir:
835 mydest = os.path.join(dest, rel[srcpfxlen:])
841 mydest = os.path.join(dest, rel[srcpfxlen:])
836 else:
842 else:
837 mydest = os.path.join(dest, os.path.basename(rel))
843 mydest = os.path.join(dest, os.path.basename(rel))
838 myabsdest = util.canonpath(repo.root, cwd, mydest)
844 myabsdest = util.canonpath(repo.root, cwd, mydest)
839 myreldest = util.pathto(cwd, myabsdest)
845 myreldest = util.pathto(cwd, myabsdest)
840 if not opts['force'] and repo.dirstate.state(myabsdest) not in 'a?':
846 if not opts['force'] and repo.dirstate.state(myabsdest) not in 'a?':
841 ui.warn(_('%s: not overwriting - file already managed\n') % myreldest)
847 ui.warn(_('%s: not overwriting - file already managed\n') % myreldest)
842 continue
848 continue
843 mydestdir = os.path.dirname(myreldest) or '.'
849 mydestdir = os.path.dirname(myreldest) or '.'
844 if not opts['after']:
850 if not opts['after']:
845 try:
851 try:
846 if dir2dir: os.makedirs(mydestdir)
852 if dir2dir: os.makedirs(mydestdir)
847 elif not destisfile: os.mkdir(mydestdir)
853 elif not destisfile: os.mkdir(mydestdir)
848 except OSError, inst:
854 except OSError, inst:
849 if inst.errno != errno.EEXIST: raise
855 if inst.errno != errno.EEXIST: raise
850 if ui.verbose or not exact:
856 if ui.verbose or not exact:
851 ui.status(_('copying %s to %s\n') % (rel, myreldest))
857 ui.status(_('copying %s to %s\n') % (rel, myreldest))
852 if not opts['after']:
858 if not opts['after']:
853 try:
859 try:
854 shutil.copyfile(rel, myreldest)
860 shutil.copyfile(rel, myreldest)
855 shutil.copymode(rel, myreldest)
861 shutil.copymode(rel, myreldest)
856 except shutil.Error, inst:
862 except shutil.Error, inst:
857 raise util.Abort(str(inst))
863 raise util.Abort(str(inst))
858 except IOError, inst:
864 except IOError, inst:
859 if inst.errno == errno.ENOENT:
865 if inst.errno == errno.ENOENT:
860 ui.warn(_('%s: deleted in working copy\n') % rel)
866 ui.warn(_('%s: deleted in working copy\n') % rel)
861 else:
867 else:
862 ui.warn(_('%s: cannot copy - %s\n') % (rel, inst.strerror))
868 ui.warn(_('%s: cannot copy - %s\n') % (rel, inst.strerror))
863 errs += 1
869 errs += 1
864 continue
870 continue
865 repo.copy(abs, myabsdest)
871 repo.copy(abs, myabsdest)
866 copied.append((abs, rel, exact))
872 copied.append((abs, rel, exact))
867 if errs:
873 if errs:
868 ui.warn(_('(consider using --after)\n'))
874 ui.warn(_('(consider using --after)\n'))
869 return errs, copied
875 return errs, copied
870
876
871 def copy(ui, repo, *pats, **opts):
877 def copy(ui, repo, *pats, **opts):
872 """mark files as copied for the next commit
878 """mark files as copied for the next commit
873
879
874 Mark dest as having copies of source files. If dest is a
880 Mark dest as having copies of source files. If dest is a
875 directory, copies are put in that directory. If dest is a file,
881 directory, copies are put in that directory. If dest is a file,
876 there can only be one source.
882 there can only be one source.
877
883
878 By default, this command copies the contents of files as they
884 By default, this command copies the contents of files as they
879 stand in the working directory. If invoked with --after, the
885 stand in the working directory. If invoked with --after, the
880 operation is recorded, but no copying is performed.
886 operation is recorded, but no copying is performed.
881
887
882 This command takes effect in the next commit.
888 This command takes effect in the next commit.
883
889
884 NOTE: This command should be treated as experimental. While it
890 NOTE: This command should be treated as experimental. While it
885 should properly record copied files, this information is not yet
891 should properly record copied files, this information is not yet
886 fully used by merge, nor fully reported by log.
892 fully used by merge, nor fully reported by log.
887 """
893 """
888 errs, copied = docopy(ui, repo, pats, opts)
894 errs, copied = docopy(ui, repo, pats, opts)
889 return errs
895 return errs
890
896
891 def debugancestor(ui, index, rev1, rev2):
897 def debugancestor(ui, index, rev1, rev2):
892 """find the ancestor revision of two revisions in a given index"""
898 """find the ancestor revision of two revisions in a given index"""
893 r = revlog.revlog(file, index, "")
899 r = revlog.revlog(file, index, "")
894 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
900 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
895 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
901 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
896
902
897 def debugcheckstate(ui, repo):
903 def debugcheckstate(ui, repo):
898 """validate the correctness of the current dirstate"""
904 """validate the correctness of the current dirstate"""
899 parent1, parent2 = repo.dirstate.parents()
905 parent1, parent2 = repo.dirstate.parents()
900 repo.dirstate.read()
906 repo.dirstate.read()
901 dc = repo.dirstate.map
907 dc = repo.dirstate.map
902 keys = dc.keys()
908 keys = dc.keys()
903 keys.sort()
909 keys.sort()
904 m1n = repo.changelog.read(parent1)[0]
910 m1n = repo.changelog.read(parent1)[0]
905 m2n = repo.changelog.read(parent2)[0]
911 m2n = repo.changelog.read(parent2)[0]
906 m1 = repo.manifest.read(m1n)
912 m1 = repo.manifest.read(m1n)
907 m2 = repo.manifest.read(m2n)
913 m2 = repo.manifest.read(m2n)
908 errors = 0
914 errors = 0
909 for f in dc:
915 for f in dc:
910 state = repo.dirstate.state(f)
916 state = repo.dirstate.state(f)
911 if state in "nr" and f not in m1:
917 if state in "nr" and f not in m1:
912 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
918 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
913 errors += 1
919 errors += 1
914 if state in "a" and f in m1:
920 if state in "a" and f in m1:
915 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
921 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
916 errors += 1
922 errors += 1
917 if state in "m" and f not in m1 and f not in m2:
923 if state in "m" and f not in m1 and f not in m2:
918 ui.warn(_("%s in state %s, but not in either manifest\n") %
924 ui.warn(_("%s in state %s, but not in either manifest\n") %
919 (f, state))
925 (f, state))
920 errors += 1
926 errors += 1
921 for f in m1:
927 for f in m1:
922 state = repo.dirstate.state(f)
928 state = repo.dirstate.state(f)
923 if state not in "nrm":
929 if state not in "nrm":
924 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
930 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
925 errors += 1
931 errors += 1
926 if errors:
932 if errors:
927 raise util.Abort(_(".hg/dirstate inconsistent with current parent's manifest"))
933 raise util.Abort(_(".hg/dirstate inconsistent with current parent's manifest"))
928
934
929 def debugconfig(ui):
935 def debugconfig(ui):
930 """show combined config settings from all hgrc files"""
936 """show combined config settings from all hgrc files"""
931 try:
937 try:
932 repo = hg.repository(ui)
938 repo = hg.repository(ui)
933 except hg.RepoError:
939 except hg.RepoError:
934 pass
940 pass
935 for section, name, value in ui.walkconfig():
941 for section, name, value in ui.walkconfig():
936 ui.write('%s.%s=%s\n' % (section, name, value))
942 ui.write('%s.%s=%s\n' % (section, name, value))
937
943
938 def debugsetparents(ui, repo, rev1, rev2=None):
944 def debugsetparents(ui, repo, rev1, rev2=None):
939 """
945 """
940 manually set the parents of the current working directory
946 manually set the parents of the current working directory
941
947
942 This is useful for writing repository conversion tools, but should
948 This is useful for writing repository conversion tools, but should
943 be used with care.
949 be used with care.
944 """
950 """
945
951
946 if not rev2:
952 if not rev2:
947 rev2 = hex(nullid)
953 rev2 = hex(nullid)
948
954
949 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
955 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
950
956
951 def debugstate(ui, repo):
957 def debugstate(ui, repo):
952 """show the contents of the current dirstate"""
958 """show the contents of the current dirstate"""
953 repo.dirstate.read()
959 repo.dirstate.read()
954 dc = repo.dirstate.map
960 dc = repo.dirstate.map
955 keys = dc.keys()
961 keys = dc.keys()
956 keys.sort()
962 keys.sort()
957 for file_ in keys:
963 for file_ in keys:
958 ui.write("%c %3o %10d %s %s\n"
964 ui.write("%c %3o %10d %s %s\n"
959 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
965 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
960 time.strftime("%x %X",
966 time.strftime("%x %X",
961 time.localtime(dc[file_][3])), file_))
967 time.localtime(dc[file_][3])), file_))
962 for f in repo.dirstate.copies:
968 for f in repo.dirstate.copies:
963 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
969 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
964
970
965 def debugdata(ui, file_, rev):
971 def debugdata(ui, file_, rev):
966 """dump the contents of an data file revision"""
972 """dump the contents of an data file revision"""
967 r = revlog.revlog(file, file_[:-2] + ".i", file_)
973 r = revlog.revlog(file, file_[:-2] + ".i", file_)
968 try:
974 try:
969 ui.write(r.revision(r.lookup(rev)))
975 ui.write(r.revision(r.lookup(rev)))
970 except KeyError:
976 except KeyError:
971 raise util.Abort(_('invalid revision identifier %s'), rev)
977 raise util.Abort(_('invalid revision identifier %s'), rev)
972
978
973 def debugindex(ui, file_):
979 def debugindex(ui, file_):
974 """dump the contents of an index file"""
980 """dump the contents of an index file"""
975 r = revlog.revlog(file, file_, "")
981 r = revlog.revlog(file, file_, "")
976 ui.write(" rev offset length base linkrev" +
982 ui.write(" rev offset length base linkrev" +
977 " nodeid p1 p2\n")
983 " nodeid p1 p2\n")
978 for i in range(r.count()):
984 for i in range(r.count()):
979 e = r.index[i]
985 e = r.index[i]
980 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
986 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
981 i, e[0], e[1], e[2], e[3],
987 i, e[0], e[1], e[2], e[3],
982 short(e[6]), short(e[4]), short(e[5])))
988 short(e[6]), short(e[4]), short(e[5])))
983
989
984 def debugindexdot(ui, file_):
990 def debugindexdot(ui, file_):
985 """dump an index DAG as a .dot file"""
991 """dump an index DAG as a .dot file"""
986 r = revlog.revlog(file, file_, "")
992 r = revlog.revlog(file, file_, "")
987 ui.write("digraph G {\n")
993 ui.write("digraph G {\n")
988 for i in range(r.count()):
994 for i in range(r.count()):
989 e = r.index[i]
995 e = r.index[i]
990 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
996 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
991 if e[5] != nullid:
997 if e[5] != nullid:
992 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
998 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
993 ui.write("}\n")
999 ui.write("}\n")
994
1000
995 def debugrename(ui, repo, file, rev=None):
1001 def debugrename(ui, repo, file, rev=None):
996 """dump rename information"""
1002 """dump rename information"""
997 r = repo.file(relpath(repo, [file])[0])
1003 r = repo.file(relpath(repo, [file])[0])
998 if rev:
1004 if rev:
999 try:
1005 try:
1000 # assume all revision numbers are for changesets
1006 # assume all revision numbers are for changesets
1001 n = repo.lookup(rev)
1007 n = repo.lookup(rev)
1002 change = repo.changelog.read(n)
1008 change = repo.changelog.read(n)
1003 m = repo.manifest.read(change[0])
1009 m = repo.manifest.read(change[0])
1004 n = m[relpath(repo, [file])[0]]
1010 n = m[relpath(repo, [file])[0]]
1005 except hg.RepoError, KeyError:
1011 except hg.RepoError, KeyError:
1006 n = r.lookup(rev)
1012 n = r.lookup(rev)
1007 else:
1013 else:
1008 n = r.tip()
1014 n = r.tip()
1009 m = r.renamed(n)
1015 m = r.renamed(n)
1010 if m:
1016 if m:
1011 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1017 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1012 else:
1018 else:
1013 ui.write(_("not renamed\n"))
1019 ui.write(_("not renamed\n"))
1014
1020
1015 def debugwalk(ui, repo, *pats, **opts):
1021 def debugwalk(ui, repo, *pats, **opts):
1016 """show how files match on given patterns"""
1022 """show how files match on given patterns"""
1017 items = list(walk(repo, pats, opts))
1023 items = list(walk(repo, pats, opts))
1018 if not items:
1024 if not items:
1019 return
1025 return
1020 fmt = '%%s %%-%ds %%-%ds %%s' % (
1026 fmt = '%%s %%-%ds %%-%ds %%s' % (
1021 max([len(abs) for (src, abs, rel, exact) in items]),
1027 max([len(abs) for (src, abs, rel, exact) in items]),
1022 max([len(rel) for (src, abs, rel, exact) in items]))
1028 max([len(rel) for (src, abs, rel, exact) in items]))
1023 for src, abs, rel, exact in items:
1029 for src, abs, rel, exact in items:
1024 line = fmt % (src, abs, rel, exact and 'exact' or '')
1030 line = fmt % (src, abs, rel, exact and 'exact' or '')
1025 ui.write("%s\n" % line.rstrip())
1031 ui.write("%s\n" % line.rstrip())
1026
1032
1027 def diff(ui, repo, *pats, **opts):
1033 def diff(ui, repo, *pats, **opts):
1028 """diff working directory (or selected files)
1034 """diff working directory (or selected files)
1029
1035
1030 Show differences between revisions for the specified files.
1036 Show differences between revisions for the specified files.
1031
1037
1032 Differences between files are shown using the unified diff format.
1038 Differences between files are shown using the unified diff format.
1033
1039
1034 When two revision arguments are given, then changes are shown
1040 When two revision arguments are given, then changes are shown
1035 between those revisions. If only one revision is specified then
1041 between those revisions. If only one revision is specified then
1036 that revision is compared to the working directory, and, when no
1042 that revision is compared to the working directory, and, when no
1037 revisions are specified, the working directory files are compared
1043 revisions are specified, the working directory files are compared
1038 to its parent.
1044 to its parent.
1039
1045
1040 Without the -a option, diff will avoid generating diffs of files
1046 Without the -a option, diff will avoid generating diffs of files
1041 it detects as binary. With -a, diff will generate a diff anyway,
1047 it detects as binary. With -a, diff will generate a diff anyway,
1042 probably with undesirable results.
1048 probably with undesirable results.
1043 """
1049 """
1044 node1, node2 = None, None
1050 node1, node2 = None, None
1045 revs = [repo.lookup(x) for x in opts['rev']]
1051 revs = [repo.lookup(x) for x in opts['rev']]
1046
1052
1047 if len(revs) > 0:
1053 if len(revs) > 0:
1048 node1 = revs[0]
1054 node1 = revs[0]
1049 if len(revs) > 1:
1055 if len(revs) > 1:
1050 node2 = revs[1]
1056 node2 = revs[1]
1051 if len(revs) > 2:
1057 if len(revs) > 2:
1052 raise util.Abort(_("too many revisions to diff"))
1058 raise util.Abort(_("too many revisions to diff"))
1053
1059
1054 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
1060 fns, matchfn, anypats = matchpats(repo, repo.getcwd(), pats, opts)
1055
1061
1056 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1062 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1057 text=opts['text'])
1063 text=opts['text'])
1058
1064
1059 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1065 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1060 node = repo.lookup(changeset)
1066 node = repo.lookup(changeset)
1061 prev, other = repo.changelog.parents(node)
1067 prev, other = repo.changelog.parents(node)
1062 change = repo.changelog.read(node)
1068 change = repo.changelog.read(node)
1063
1069
1064 fp = make_file(repo, repo.changelog, opts['output'],
1070 fp = make_file(repo, repo.changelog, opts['output'],
1065 node=node, total=total, seqno=seqno,
1071 node=node, total=total, seqno=seqno,
1066 revwidth=revwidth)
1072 revwidth=revwidth)
1067 if fp != sys.stdout:
1073 if fp != sys.stdout:
1068 ui.note("%s\n" % fp.name)
1074 ui.note("%s\n" % fp.name)
1069
1075
1070 fp.write("# HG changeset patch\n")
1076 fp.write("# HG changeset patch\n")
1071 fp.write("# User %s\n" % change[1])
1077 fp.write("# User %s\n" % change[1])
1072 fp.write("# Node ID %s\n" % hex(node))
1078 fp.write("# Node ID %s\n" % hex(node))
1073 fp.write("# Parent %s\n" % hex(prev))
1079 fp.write("# Parent %s\n" % hex(prev))
1074 if other != nullid:
1080 if other != nullid:
1075 fp.write("# Parent %s\n" % hex(other))
1081 fp.write("# Parent %s\n" % hex(other))
1076 fp.write(change[4].rstrip())
1082 fp.write(change[4].rstrip())
1077 fp.write("\n\n")
1083 fp.write("\n\n")
1078
1084
1079 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1085 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1080 if fp != sys.stdout:
1086 if fp != sys.stdout:
1081 fp.close()
1087 fp.close()
1082
1088
1083 def export(ui, repo, *changesets, **opts):
1089 def export(ui, repo, *changesets, **opts):
1084 """dump the header and diffs for one or more changesets
1090 """dump the header and diffs for one or more changesets
1085
1091
1086 Print the changeset header and diffs for one or more revisions.
1092 Print the changeset header and diffs for one or more revisions.
1087
1093
1088 The information shown in the changeset header is: author,
1094 The information shown in the changeset header is: author,
1089 changeset hash, parent and commit comment.
1095 changeset hash, parent and commit comment.
1090
1096
1091 Output may be to a file, in which case the name of the file is
1097 Output may be to a file, in which case the name of the file is
1092 given using a format string. The formatting rules are as follows:
1098 given using a format string. The formatting rules are as follows:
1093
1099
1094 %% literal "%" character
1100 %% literal "%" character
1095 %H changeset hash (40 bytes of hexadecimal)
1101 %H changeset hash (40 bytes of hexadecimal)
1096 %N number of patches being generated
1102 %N number of patches being generated
1097 %R changeset revision number
1103 %R changeset revision number
1098 %b basename of the exporting repository
1104 %b basename of the exporting repository
1099 %h short-form changeset hash (12 bytes of hexadecimal)
1105 %h short-form changeset hash (12 bytes of hexadecimal)
1100 %n zero-padded sequence number, starting at 1
1106 %n zero-padded sequence number, starting at 1
1101 %r zero-padded changeset revision number
1107 %r zero-padded changeset revision number
1102
1108
1103 Without the -a option, export will avoid generating diffs of files
1109 Without the -a option, export will avoid generating diffs of files
1104 it detects as binary. With -a, export will generate a diff anyway,
1110 it detects as binary. With -a, export will generate a diff anyway,
1105 probably with undesirable results.
1111 probably with undesirable results.
1106 """
1112 """
1107 if not changesets:
1113 if not changesets:
1108 raise util.Abort(_("export requires at least one changeset"))
1114 raise util.Abort(_("export requires at least one changeset"))
1109 seqno = 0
1115 seqno = 0
1110 revs = list(revrange(ui, repo, changesets))
1116 revs = list(revrange(ui, repo, changesets))
1111 total = len(revs)
1117 total = len(revs)
1112 revwidth = max(map(len, revs))
1118 revwidth = max(map(len, revs))
1113 ui.note(len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n"))
1119 ui.note(len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n"))
1114 for cset in revs:
1120 for cset in revs:
1115 seqno += 1
1121 seqno += 1
1116 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1122 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1117
1123
1118 def forget(ui, repo, *pats, **opts):
1124 def forget(ui, repo, *pats, **opts):
1119 """don't add the specified files on the next commit
1125 """don't add the specified files on the next commit
1120
1126
1121 Undo an 'hg add' scheduled for the next commit.
1127 Undo an 'hg add' scheduled for the next commit.
1122 """
1128 """
1123 forget = []
1129 forget = []
1124 for src, abs, rel, exact in walk(repo, pats, opts):
1130 for src, abs, rel, exact in walk(repo, pats, opts):
1125 if repo.dirstate.state(abs) == 'a':
1131 if repo.dirstate.state(abs) == 'a':
1126 forget.append(abs)
1132 forget.append(abs)
1127 if ui.verbose or not exact:
1133 if ui.verbose or not exact:
1128 ui.status(_('forgetting %s\n') % rel)
1134 ui.status(_('forgetting %s\n') % rel)
1129 repo.forget(forget)
1135 repo.forget(forget)
1130
1136
1131 def grep(ui, repo, pattern, *pats, **opts):
1137 def grep(ui, repo, pattern, *pats, **opts):
1132 """search for a pattern in specified files and revisions
1138 """search for a pattern in specified files and revisions
1133
1139
1134 Search revisions of files for a regular expression.
1140 Search revisions of files for a regular expression.
1135
1141
1136 This command behaves differently than Unix grep. It only accepts
1142 This command behaves differently than Unix grep. It only accepts
1137 Python/Perl regexps. It searches repository history, not the
1143 Python/Perl regexps. It searches repository history, not the
1138 working directory. It always prints the revision number in which
1144 working directory. It always prints the revision number in which
1139 a match appears.
1145 a match appears.
1140
1146
1141 By default, grep only prints output for the first revision of a
1147 By default, grep only prints output for the first revision of a
1142 file in which it finds a match. To get it to print every revision
1148 file in which it finds a match. To get it to print every revision
1143 that contains a change in match status ("-" for a match that
1149 that contains a change in match status ("-" for a match that
1144 becomes a non-match, or "+" for a non-match that becomes a match),
1150 becomes a non-match, or "+" for a non-match that becomes a match),
1145 use the --all flag.
1151 use the --all flag.
1146 """
1152 """
1147 reflags = 0
1153 reflags = 0
1148 if opts['ignore_case']:
1154 if opts['ignore_case']:
1149 reflags |= re.I
1155 reflags |= re.I
1150 regexp = re.compile(pattern, reflags)
1156 regexp = re.compile(pattern, reflags)
1151 sep, eol = ':', '\n'
1157 sep, eol = ':', '\n'
1152 if opts['print0']:
1158 if opts['print0']:
1153 sep = eol = '\0'
1159 sep = eol = '\0'
1154
1160
1155 fcache = {}
1161 fcache = {}
1156 def getfile(fn):
1162 def getfile(fn):
1157 if fn not in fcache:
1163 if fn not in fcache:
1158 fcache[fn] = repo.file(fn)
1164 fcache[fn] = repo.file(fn)
1159 return fcache[fn]
1165 return fcache[fn]
1160
1166
1161 def matchlines(body):
1167 def matchlines(body):
1162 begin = 0
1168 begin = 0
1163 linenum = 0
1169 linenum = 0
1164 while True:
1170 while True:
1165 match = regexp.search(body, begin)
1171 match = regexp.search(body, begin)
1166 if not match:
1172 if not match:
1167 break
1173 break
1168 mstart, mend = match.span()
1174 mstart, mend = match.span()
1169 linenum += body.count('\n', begin, mstart) + 1
1175 linenum += body.count('\n', begin, mstart) + 1
1170 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1176 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1171 lend = body.find('\n', mend)
1177 lend = body.find('\n', mend)
1172 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1178 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1173 begin = lend + 1
1179 begin = lend + 1
1174
1180
1175 class linestate:
1181 class linestate:
1176 def __init__(self, line, linenum, colstart, colend):
1182 def __init__(self, line, linenum, colstart, colend):
1177 self.line = line
1183 self.line = line
1178 self.linenum = linenum
1184 self.linenum = linenum
1179 self.colstart = colstart
1185 self.colstart = colstart
1180 self.colend = colend
1186 self.colend = colend
1181 def __eq__(self, other):
1187 def __eq__(self, other):
1182 return self.line == other.line
1188 return self.line == other.line
1183 def __hash__(self):
1189 def __hash__(self):
1184 return hash(self.line)
1190 return hash(self.line)
1185
1191
1186 matches = {}
1192 matches = {}
1187 def grepbody(fn, rev, body):
1193 def grepbody(fn, rev, body):
1188 matches[rev].setdefault(fn, {})
1194 matches[rev].setdefault(fn, {})
1189 m = matches[rev][fn]
1195 m = matches[rev][fn]
1190 for lnum, cstart, cend, line in matchlines(body):
1196 for lnum, cstart, cend, line in matchlines(body):
1191 s = linestate(line, lnum, cstart, cend)
1197 s = linestate(line, lnum, cstart, cend)
1192 m[s] = s
1198 m[s] = s
1193
1199
1194 prev = {}
1200 prev = {}
1195 ucache = {}
1201 ucache = {}
1196 def display(fn, rev, states, prevstates):
1202 def display(fn, rev, states, prevstates):
1197 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1203 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1198 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1204 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1199 counts = {'-': 0, '+': 0}
1205 counts = {'-': 0, '+': 0}
1200 filerevmatches = {}
1206 filerevmatches = {}
1201 for l in diff:
1207 for l in diff:
1202 if incrementing or not opts['all']:
1208 if incrementing or not opts['all']:
1203 change = ((l in prevstates) and '-') or '+'
1209 change = ((l in prevstates) and '-') or '+'
1204 r = rev
1210 r = rev
1205 else:
1211 else:
1206 change = ((l in states) and '-') or '+'
1212 change = ((l in states) and '-') or '+'
1207 r = prev[fn]
1213 r = prev[fn]
1208 cols = [fn, str(rev)]
1214 cols = [fn, str(rev)]
1209 if opts['line_number']: cols.append(str(l.linenum))
1215 if opts['line_number']: cols.append(str(l.linenum))
1210 if opts['all']: cols.append(change)
1216 if opts['all']: cols.append(change)
1211 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1217 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1212 ucache))
1218 ucache))
1213 if opts['files_with_matches']:
1219 if opts['files_with_matches']:
1214 c = (fn, rev)
1220 c = (fn, rev)
1215 if c in filerevmatches: continue
1221 if c in filerevmatches: continue
1216 filerevmatches[c] = 1
1222 filerevmatches[c] = 1
1217 else:
1223 else:
1218 cols.append(l.line)
1224 cols.append(l.line)
1219 ui.write(sep.join(cols), eol)
1225 ui.write(sep.join(cols), eol)
1220 counts[change] += 1
1226 counts[change] += 1
1221 return counts['+'], counts['-']
1227 return counts['+'], counts['-']
1222
1228
1223 fstate = {}
1229 fstate = {}
1224 skip = {}
1230 skip = {}
1225 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1231 changeiter, getchange = walkchangerevs(ui, repo, repo.getcwd(), pats, opts)
1226 count = 0
1232 count = 0
1227 incrementing = False
1233 incrementing = False
1228 for st, rev, fns in changeiter:
1234 for st, rev, fns in changeiter:
1229 if st == 'window':
1235 if st == 'window':
1230 incrementing = rev
1236 incrementing = rev
1231 matches.clear()
1237 matches.clear()
1232 elif st == 'add':
1238 elif st == 'add':
1233 change = repo.changelog.read(repo.lookup(str(rev)))
1239 change = repo.changelog.read(repo.lookup(str(rev)))
1234 mf = repo.manifest.read(change[0])
1240 mf = repo.manifest.read(change[0])
1235 matches[rev] = {}
1241 matches[rev] = {}
1236 for fn in fns:
1242 for fn in fns:
1237 if fn in skip: continue
1243 if fn in skip: continue
1238 fstate.setdefault(fn, {})
1244 fstate.setdefault(fn, {})
1239 try:
1245 try:
1240 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1246 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1241 except KeyError:
1247 except KeyError:
1242 pass
1248 pass
1243 elif st == 'iter':
1249 elif st == 'iter':
1244 states = matches[rev].items()
1250 states = matches[rev].items()
1245 states.sort()
1251 states.sort()
1246 for fn, m in states:
1252 for fn, m in states:
1247 if fn in skip: continue
1253 if fn in skip: continue
1248 if incrementing or not opts['all'] or fstate[fn]:
1254 if incrementing or not opts['all'] or fstate[fn]:
1249 pos, neg = display(fn, rev, m, fstate[fn])
1255 pos, neg = display(fn, rev, m, fstate[fn])
1250 count += pos + neg
1256 count += pos + neg
1251 if pos and not opts['all']:
1257 if pos and not opts['all']:
1252 skip[fn] = True
1258 skip[fn] = True
1253 fstate[fn] = m
1259 fstate[fn] = m
1254 prev[fn] = rev
1260 prev[fn] = rev
1255
1261
1256 if not incrementing:
1262 if not incrementing:
1257 fstate = fstate.items()
1263 fstate = fstate.items()
1258 fstate.sort()
1264 fstate.sort()
1259 for fn, state in fstate:
1265 for fn, state in fstate:
1260 if fn in skip: continue
1266 if fn in skip: continue
1261 display(fn, rev, {}, state)
1267 display(fn, rev, {}, state)
1262 return (count == 0 and 1) or 0
1268 return (count == 0 and 1) or 0
1263
1269
1264 def heads(ui, repo, **opts):
1270 def heads(ui, repo, **opts):
1265 """show current repository heads
1271 """show current repository heads
1266
1272
1267 Show all repository head changesets.
1273 Show all repository head changesets.
1268
1274
1269 Repository "heads" are changesets that don't have children
1275 Repository "heads" are changesets that don't have children
1270 changesets. They are where development generally takes place and
1276 changesets. They are where development generally takes place and
1271 are the usual targets for update and merge operations.
1277 are the usual targets for update and merge operations.
1272 """
1278 """
1273 heads = repo.changelog.heads()
1279 heads = repo.changelog.heads()
1274 br = None
1280 br = None
1275 if opts['branches']:
1281 if opts['branches']:
1276 br = repo.branchlookup(heads)
1282 br = repo.branchlookup(heads)
1277 for n in repo.changelog.heads():
1283 for n in repo.changelog.heads():
1278 show_changeset(ui, repo, changenode=n, brinfo=br)
1284 show_changeset(ui, repo, changenode=n, brinfo=br)
1279
1285
1280 def identify(ui, repo):
1286 def identify(ui, repo):
1281 """print information about the working copy
1287 """print information about the working copy
1282 Print a short summary of the current state of the repo.
1288 Print a short summary of the current state of the repo.
1283
1289
1284 This summary identifies the repository state using one or two parent
1290 This summary identifies the repository state using one or two parent
1285 hash identifiers, followed by a "+" if there are uncommitted changes
1291 hash identifiers, followed by a "+" if there are uncommitted changes
1286 in the working directory, followed by a list of tags for this revision.
1292 in the working directory, followed by a list of tags for this revision.
1287 """
1293 """
1288 parents = [p for p in repo.dirstate.parents() if p != nullid]
1294 parents = [p for p in repo.dirstate.parents() if p != nullid]
1289 if not parents:
1295 if not parents:
1290 ui.write(_("unknown\n"))
1296 ui.write(_("unknown\n"))
1291 return
1297 return
1292
1298
1293 hexfunc = ui.verbose and hex or short
1299 hexfunc = ui.verbose and hex or short
1294 (c, a, d, u) = repo.changes()
1300 (c, a, d, u) = repo.changes()
1295 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1301 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1296 (c or a or d) and "+" or "")]
1302 (c or a or d) and "+" or "")]
1297
1303
1298 if not ui.quiet:
1304 if not ui.quiet:
1299 # multiple tags for a single parent separated by '/'
1305 # multiple tags for a single parent separated by '/'
1300 parenttags = ['/'.join(tags)
1306 parenttags = ['/'.join(tags)
1301 for tags in map(repo.nodetags, parents) if tags]
1307 for tags in map(repo.nodetags, parents) if tags]
1302 # tags for multiple parents separated by ' + '
1308 # tags for multiple parents separated by ' + '
1303 if parenttags:
1309 if parenttags:
1304 output.append(' + '.join(parenttags))
1310 output.append(' + '.join(parenttags))
1305
1311
1306 ui.write("%s\n" % ' '.join(output))
1312 ui.write("%s\n" % ' '.join(output))
1307
1313
1308 def import_(ui, repo, patch1, *patches, **opts):
1314 def import_(ui, repo, patch1, *patches, **opts):
1309 """import an ordered set of patches
1315 """import an ordered set of patches
1310
1316
1311 Import a list of patches and commit them individually.
1317 Import a list of patches and commit them individually.
1312
1318
1313 If there are outstanding changes in the working directory, import
1319 If there are outstanding changes in the working directory, import
1314 will abort unless given the -f flag.
1320 will abort unless given the -f flag.
1315
1321
1316 If a patch looks like a mail message (its first line starts with
1322 If a patch looks like a mail message (its first line starts with
1317 "From " or looks like an RFC822 header), it will not be applied
1323 "From " or looks like an RFC822 header), it will not be applied
1318 unless the -f option is used. The importer neither parses nor
1324 unless the -f option is used. The importer neither parses nor
1319 discards mail headers, so use -f only to override the "mailness"
1325 discards mail headers, so use -f only to override the "mailness"
1320 safety check, not to import a real mail message.
1326 safety check, not to import a real mail message.
1321 """
1327 """
1322 patches = (patch1,) + patches
1328 patches = (patch1,) + patches
1323
1329
1324 if not opts['force']:
1330 if not opts['force']:
1325 (c, a, d, u) = repo.changes()
1331 (c, a, d, u) = repo.changes()
1326 if c or a or d:
1332 if c or a or d:
1327 raise util.Abort(_("outstanding uncommitted changes"))
1333 raise util.Abort(_("outstanding uncommitted changes"))
1328
1334
1329 d = opts["base"]
1335 d = opts["base"]
1330 strip = opts["strip"]
1336 strip = opts["strip"]
1331
1337
1332 mailre = re.compile(r'(?:From |[\w-]+:)')
1338 mailre = re.compile(r'(?:From |[\w-]+:)')
1333
1339
1334 # attempt to detect the start of a patch
1340 # attempt to detect the start of a patch
1335 # (this heuristic is borrowed from quilt)
1341 # (this heuristic is borrowed from quilt)
1336 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1342 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1337 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1343 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1338 '(---|\*\*\*)[ \t])')
1344 '(---|\*\*\*)[ \t])')
1339
1345
1340 for patch in patches:
1346 for patch in patches:
1341 ui.status(_("applying %s\n") % patch)
1347 ui.status(_("applying %s\n") % patch)
1342 pf = os.path.join(d, patch)
1348 pf = os.path.join(d, patch)
1343
1349
1344 message = []
1350 message = []
1345 user = None
1351 user = None
1346 hgpatch = False
1352 hgpatch = False
1347 for line in file(pf):
1353 for line in file(pf):
1348 line = line.rstrip()
1354 line = line.rstrip()
1349 if (not message and not hgpatch and
1355 if (not message and not hgpatch and
1350 mailre.match(line) and not opts['force']):
1356 mailre.match(line) and not opts['force']):
1351 if len(line) > 35: line = line[:32] + '...'
1357 if len(line) > 35: line = line[:32] + '...'
1352 raise util.Abort(_('first line looks like a '
1358 raise util.Abort(_('first line looks like a '
1353 'mail header: ') + line)
1359 'mail header: ') + line)
1354 if diffre.match(line):
1360 if diffre.match(line):
1355 break
1361 break
1356 elif hgpatch:
1362 elif hgpatch:
1357 # parse values when importing the result of an hg export
1363 # parse values when importing the result of an hg export
1358 if line.startswith("# User "):
1364 if line.startswith("# User "):
1359 user = line[7:]
1365 user = line[7:]
1360 ui.debug(_('User: %s\n') % user)
1366 ui.debug(_('User: %s\n') % user)
1361 elif not line.startswith("# ") and line:
1367 elif not line.startswith("# ") and line:
1362 message.append(line)
1368 message.append(line)
1363 hgpatch = False
1369 hgpatch = False
1364 elif line == '# HG changeset patch':
1370 elif line == '# HG changeset patch':
1365 hgpatch = True
1371 hgpatch = True
1366 message = [] # We may have collected garbage
1372 message = [] # We may have collected garbage
1367 else:
1373 else:
1368 message.append(line)
1374 message.append(line)
1369
1375
1370 # make sure message isn't empty
1376 # make sure message isn't empty
1371 if not message:
1377 if not message:
1372 message = _("imported patch %s\n") % patch
1378 message = _("imported patch %s\n") % patch
1373 else:
1379 else:
1374 message = "%s\n" % '\n'.join(message)
1380 message = "%s\n" % '\n'.join(message)
1375 ui.debug(_('message:\n%s\n') % message)
1381 ui.debug(_('message:\n%s\n') % message)
1376
1382
1377 files = util.patch(strip, pf, ui)
1383 files = util.patch(strip, pf, ui)
1378
1384
1379 if len(files) > 0:
1385 if len(files) > 0:
1380 addremove(ui, repo, *files)
1386 addremove(ui, repo, *files)
1381 repo.commit(files, message, user)
1387 repo.commit(files, message, user)
1382
1388
1383 def incoming(ui, repo, source="default", **opts):
1389 def incoming(ui, repo, source="default", **opts):
1384 """show new changesets found in source
1390 """show new changesets found in source
1385
1391
1386 Show new changesets found in the specified repo or the default
1392 Show new changesets found in the specified repo or the default
1387 pull repo. These are the changesets that would be pulled if a pull
1393 pull repo. These are the changesets that would be pulled if a pull
1388 was requested.
1394 was requested.
1389
1395
1390 Currently only local repositories are supported.
1396 Currently only local repositories are supported.
1391 """
1397 """
1392 source = ui.expandpath(source, repo.root)
1398 source = ui.expandpath(source, repo.root)
1393 other = hg.repository(ui, source)
1399 other = hg.repository(ui, source)
1394 if not other.local():
1400 if not other.local():
1395 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1401 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1396 o = repo.findincoming(other)
1402 o = repo.findincoming(other)
1397 if not o:
1403 if not o:
1398 return
1404 return
1399 o = other.newer(o)
1405 o = other.changelog.nodesbetween(o)[0]
1400 if opts['newest_first']:
1406 if opts['newest_first']:
1401 o.reverse()
1407 o.reverse()
1402 for n in o:
1408 for n in o:
1403 parents = [p for p in other.changelog.parents(n) if p != nullid]
1409 parents = [p for p in other.changelog.parents(n) if p != nullid]
1404 if opts['no_merges'] and len(parents) == 2:
1410 if opts['no_merges'] and len(parents) == 2:
1405 continue
1411 continue
1406 show_changeset(ui, other, changenode=n)
1412 show_changeset(ui, other, changenode=n)
1407 if opts['patch']:
1413 if opts['patch']:
1408 prev = (parents and parents[0]) or nullid
1414 prev = (parents and parents[0]) or nullid
1409 dodiff(ui, ui, other, prev, n)
1415 dodiff(ui, ui, other, prev, n)
1410 ui.write("\n")
1416 ui.write("\n")
1411
1417
1412 def init(ui, dest="."):
1418 def init(ui, dest="."):
1413 """create a new repository in the given directory
1419 """create a new repository in the given directory
1414
1420
1415 Initialize a new repository in the given directory. If the given
1421 Initialize a new repository in the given directory. If the given
1416 directory does not exist, it is created.
1422 directory does not exist, it is created.
1417
1423
1418 If no directory is given, the current directory is used.
1424 If no directory is given, the current directory is used.
1419 """
1425 """
1420 if not os.path.exists(dest):
1426 if not os.path.exists(dest):
1421 os.mkdir(dest)
1427 os.mkdir(dest)
1422 hg.repository(ui, dest, create=1)
1428 hg.repository(ui, dest, create=1)
1423
1429
1424 def locate(ui, repo, *pats, **opts):
1430 def locate(ui, repo, *pats, **opts):
1425 """locate files matching specific patterns
1431 """locate files matching specific patterns
1426
1432
1427 Print all files under Mercurial control whose names match the
1433 Print all files under Mercurial control whose names match the
1428 given patterns.
1434 given patterns.
1429
1435
1430 This command searches the current directory and its
1436 This command searches the current directory and its
1431 subdirectories. To search an entire repository, move to the root
1437 subdirectories. To search an entire repository, move to the root
1432 of the repository.
1438 of the repository.
1433
1439
1434 If no patterns are given to match, this command prints all file
1440 If no patterns are given to match, this command prints all file
1435 names.
1441 names.
1436
1442
1437 If you want to feed the output of this command into the "xargs"
1443 If you want to feed the output of this command into the "xargs"
1438 command, use the "-0" option to both this command and "xargs".
1444 command, use the "-0" option to both this command and "xargs".
1439 This will avoid the problem of "xargs" treating single filenames
1445 This will avoid the problem of "xargs" treating single filenames
1440 that contain white space as multiple filenames.
1446 that contain white space as multiple filenames.
1441 """
1447 """
1442 end = opts['print0'] and '\0' or '\n'
1448 end = opts['print0'] and '\0' or '\n'
1443
1449
1444 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1450 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1445 if repo.dirstate.state(abs) == '?':
1451 if repo.dirstate.state(abs) == '?':
1446 continue
1452 continue
1447 if opts['fullpath']:
1453 if opts['fullpath']:
1448 ui.write(os.path.join(repo.root, abs), end)
1454 ui.write(os.path.join(repo.root, abs), end)
1449 else:
1455 else:
1450 ui.write(rel, end)
1456 ui.write(rel, end)
1451
1457
1452 def log(ui, repo, *pats, **opts):
1458 def log(ui, repo, *pats, **opts):
1453 """show revision history of entire repository or files
1459 """show revision history of entire repository or files
1454
1460
1455 Print the revision history of the specified files or the entire project.
1461 Print the revision history of the specified files or the entire project.
1456
1462
1457 By default this command outputs: changeset id and hash, tags,
1463 By default this command outputs: changeset id and hash, tags,
1458 parents, user, date and time, and a summary for each commit. The
1464 parents, user, date and time, and a summary for each commit. The
1459 -v switch adds some more detail, such as changed files, manifest
1465 -v switch adds some more detail, such as changed files, manifest
1460 hashes or message signatures.
1466 hashes or message signatures.
1461 """
1467 """
1462 class dui:
1468 class dui:
1463 # Implement and delegate some ui protocol. Save hunks of
1469 # Implement and delegate some ui protocol. Save hunks of
1464 # output for later display in the desired order.
1470 # output for later display in the desired order.
1465 def __init__(self, ui):
1471 def __init__(self, ui):
1466 self.ui = ui
1472 self.ui = ui
1467 self.hunk = {}
1473 self.hunk = {}
1468 def bump(self, rev):
1474 def bump(self, rev):
1469 self.rev = rev
1475 self.rev = rev
1470 self.hunk[rev] = []
1476 self.hunk[rev] = []
1471 def note(self, *args):
1477 def note(self, *args):
1472 if self.verbose:
1478 if self.verbose:
1473 self.write(*args)
1479 self.write(*args)
1474 def status(self, *args):
1480 def status(self, *args):
1475 if not self.quiet:
1481 if not self.quiet:
1476 self.write(*args)
1482 self.write(*args)
1477 def write(self, *args):
1483 def write(self, *args):
1478 self.hunk[self.rev].append(args)
1484 self.hunk[self.rev].append(args)
1479 def debug(self, *args):
1485 def debug(self, *args):
1480 if self.debugflag:
1486 if self.debugflag:
1481 self.write(*args)
1487 self.write(*args)
1482 def __getattr__(self, key):
1488 def __getattr__(self, key):
1483 return getattr(self.ui, key)
1489 return getattr(self.ui, key)
1484 cwd = repo.getcwd()
1490 cwd = repo.getcwd()
1485 if not pats and cwd:
1491 if not pats and cwd:
1486 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1492 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
1487 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1493 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
1488 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1494 changeiter, getchange = walkchangerevs(ui, repo, (pats and cwd) or '',
1489 pats, opts)
1495 pats, opts)
1490 for st, rev, fns in changeiter:
1496 for st, rev, fns in changeiter:
1491 if st == 'window':
1497 if st == 'window':
1492 du = dui(ui)
1498 du = dui(ui)
1493 elif st == 'add':
1499 elif st == 'add':
1494 du.bump(rev)
1500 du.bump(rev)
1495 changenode = repo.changelog.node(rev)
1501 changenode = repo.changelog.node(rev)
1496 parents = [p for p in repo.changelog.parents(changenode)
1502 parents = [p for p in repo.changelog.parents(changenode)
1497 if p != nullid]
1503 if p != nullid]
1498 if opts['no_merges'] and len(parents) == 2:
1504 if opts['no_merges'] and len(parents) == 2:
1499 continue
1505 continue
1500 if opts['only_merges'] and len(parents) != 2:
1506 if opts['only_merges'] and len(parents) != 2:
1501 continue
1507 continue
1502
1508
1503 br = None
1509 br = None
1504 if opts['keyword']:
1510 if opts['keyword']:
1505 changes = repo.changelog.read(repo.changelog.node(rev))
1511 changes = repo.changelog.read(repo.changelog.node(rev))
1506 miss = 0
1512 miss = 0
1507 for k in [kw.lower() for kw in opts['keyword']]:
1513 for k in [kw.lower() for kw in opts['keyword']]:
1508 if not (k in changes[1].lower() or
1514 if not (k in changes[1].lower() or
1509 k in changes[4].lower() or
1515 k in changes[4].lower() or
1510 k in " ".join(changes[3][:20]).lower()):
1516 k in " ".join(changes[3][:20]).lower()):
1511 miss = 1
1517 miss = 1
1512 break
1518 break
1513 if miss:
1519 if miss:
1514 continue
1520 continue
1515
1521
1516 if opts['branch']:
1522 if opts['branch']:
1517 br = repo.branchlookup([repo.changelog.node(rev)])
1523 br = repo.branchlookup([repo.changelog.node(rev)])
1518
1524
1519 show_changeset(du, repo, rev, brinfo=br)
1525 show_changeset(du, repo, rev, brinfo=br)
1520 if opts['patch']:
1526 if opts['patch']:
1521 prev = (parents and parents[0]) or nullid
1527 prev = (parents and parents[0]) or nullid
1522 dodiff(du, du, repo, prev, changenode, fns)
1528 dodiff(du, du, repo, prev, changenode, fns)
1523 du.write("\n\n")
1529 du.write("\n\n")
1524 elif st == 'iter':
1530 elif st == 'iter':
1525 for args in du.hunk[rev]:
1531 for args in du.hunk[rev]:
1526 ui.write(*args)
1532 ui.write(*args)
1527
1533
1528 def manifest(ui, repo, rev=None):
1534 def manifest(ui, repo, rev=None):
1529 """output the latest or given revision of the project manifest
1535 """output the latest or given revision of the project manifest
1530
1536
1531 Print a list of version controlled files for the given revision.
1537 Print a list of version controlled files for the given revision.
1532
1538
1533 The manifest is the list of files being version controlled. If no revision
1539 The manifest is the list of files being version controlled. If no revision
1534 is given then the tip is used.
1540 is given then the tip is used.
1535 """
1541 """
1536 if rev:
1542 if rev:
1537 try:
1543 try:
1538 # assume all revision numbers are for changesets
1544 # assume all revision numbers are for changesets
1539 n = repo.lookup(rev)
1545 n = repo.lookup(rev)
1540 change = repo.changelog.read(n)
1546 change = repo.changelog.read(n)
1541 n = change[0]
1547 n = change[0]
1542 except hg.RepoError:
1548 except hg.RepoError:
1543 n = repo.manifest.lookup(rev)
1549 n = repo.manifest.lookup(rev)
1544 else:
1550 else:
1545 n = repo.manifest.tip()
1551 n = repo.manifest.tip()
1546 m = repo.manifest.read(n)
1552 m = repo.manifest.read(n)
1547 mf = repo.manifest.readflags(n)
1553 mf = repo.manifest.readflags(n)
1548 files = m.keys()
1554 files = m.keys()
1549 files.sort()
1555 files.sort()
1550
1556
1551 for f in files:
1557 for f in files:
1552 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1558 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1553
1559
1554 def outgoing(ui, repo, dest="default-push", **opts):
1560 def outgoing(ui, repo, dest="default-push", **opts):
1555 """show changesets not found in destination
1561 """show changesets not found in destination
1556
1562
1557 Show changesets not found in the specified destination repo or the
1563 Show changesets not found in the specified destination repo or the
1558 default push repo. These are the changesets that would be pushed
1564 default push repo. These are the changesets that would be pushed
1559 if a push was requested.
1565 if a push was requested.
1560 """
1566 """
1561 dest = ui.expandpath(dest, repo.root)
1567 dest = ui.expandpath(dest, repo.root)
1562 other = hg.repository(ui, dest)
1568 other = hg.repository(ui, dest)
1563 o = repo.findoutgoing(other)
1569 o = repo.findoutgoing(other)
1564 o = repo.newer(o)
1570 o = repo.changelog.nodesbetween(o)[0]
1565 if opts['newest_first']:
1571 if opts['newest_first']:
1566 o.reverse()
1572 o.reverse()
1567 for n in o:
1573 for n in o:
1568 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1574 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1569 if opts['no_merges'] and len(parents) == 2:
1575 if opts['no_merges'] and len(parents) == 2:
1570 continue
1576 continue
1571 show_changeset(ui, repo, changenode=n)
1577 show_changeset(ui, repo, changenode=n)
1572 if opts['patch']:
1578 if opts['patch']:
1573 prev = (parents and parents[0]) or nullid
1579 prev = (parents and parents[0]) or nullid
1574 dodiff(ui, ui, repo, prev, n)
1580 dodiff(ui, ui, repo, prev, n)
1575 ui.write("\n")
1581 ui.write("\n")
1576
1582
1577 def parents(ui, repo, rev=None):
1583 def parents(ui, repo, rev=None):
1578 """show the parents of the working dir or revision
1584 """show the parents of the working dir or revision
1579
1585
1580 Print the working directory's parent revisions.
1586 Print the working directory's parent revisions.
1581 """
1587 """
1582 if rev:
1588 if rev:
1583 p = repo.changelog.parents(repo.lookup(rev))
1589 p = repo.changelog.parents(repo.lookup(rev))
1584 else:
1590 else:
1585 p = repo.dirstate.parents()
1591 p = repo.dirstate.parents()
1586
1592
1587 for n in p:
1593 for n in p:
1588 if n != nullid:
1594 if n != nullid:
1589 show_changeset(ui, repo, changenode=n)
1595 show_changeset(ui, repo, changenode=n)
1590
1596
1591 def paths(ui, search=None):
1597 def paths(ui, search=None):
1592 """show definition of symbolic path names
1598 """show definition of symbolic path names
1593
1599
1594 Show definition of symbolic path name NAME. If no name is given, show
1600 Show definition of symbolic path name NAME. If no name is given, show
1595 definition of available names.
1601 definition of available names.
1596
1602
1597 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1603 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1598 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1604 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1599 """
1605 """
1600 try:
1606 try:
1601 repo = hg.repository(ui=ui)
1607 repo = hg.repository(ui=ui)
1602 except hg.RepoError:
1608 except hg.RepoError:
1603 pass
1609 pass
1604
1610
1605 if search:
1611 if search:
1606 for name, path in ui.configitems("paths"):
1612 for name, path in ui.configitems("paths"):
1607 if name == search:
1613 if name == search:
1608 ui.write("%s\n" % path)
1614 ui.write("%s\n" % path)
1609 return
1615 return
1610 ui.warn(_("not found!\n"))
1616 ui.warn(_("not found!\n"))
1611 return 1
1617 return 1
1612 else:
1618 else:
1613 for name, path in ui.configitems("paths"):
1619 for name, path in ui.configitems("paths"):
1614 ui.write("%s = %s\n" % (name, path))
1620 ui.write("%s = %s\n" % (name, path))
1615
1621
1616 def pull(ui, repo, source="default", **opts):
1622 def pull(ui, repo, source="default", **opts):
1617 """pull changes from the specified source
1623 """pull changes from the specified source
1618
1624
1619 Pull changes from a remote repository to a local one.
1625 Pull changes from a remote repository to a local one.
1620
1626
1621 This finds all changes from the repository at the specified path
1627 This finds all changes from the repository at the specified path
1622 or URL and adds them to the local repository. By default, this
1628 or URL and adds them to the local repository. By default, this
1623 does not update the copy of the project in the working directory.
1629 does not update the copy of the project in the working directory.
1624
1630
1625 Valid URLs are of the form:
1631 Valid URLs are of the form:
1626
1632
1627 local/filesystem/path
1633 local/filesystem/path
1628 http://[user@]host[:port][/path]
1634 http://[user@]host[:port][/path]
1629 https://[user@]host[:port][/path]
1635 https://[user@]host[:port][/path]
1630 ssh://[user@]host[:port][/path]
1636 ssh://[user@]host[:port][/path]
1631
1637
1632 SSH requires an accessible shell account on the destination machine
1638 SSH requires an accessible shell account on the destination machine
1633 and a copy of hg in the remote path. With SSH, paths are relative
1639 and a copy of hg in the remote path. With SSH, paths are relative
1634 to the remote user's home directory by default; use two slashes at
1640 to the remote user's home directory by default; use two slashes at
1635 the start of a path to specify it as relative to the filesystem root.
1641 the start of a path to specify it as relative to the filesystem root.
1636 """
1642 """
1637 source = ui.expandpath(source, repo.root)
1643 source = ui.expandpath(source, repo.root)
1638 ui.status(_('pulling from %s\n') % (source))
1644 ui.status(_('pulling from %s\n') % (source))
1639
1645
1640 if opts['ssh']:
1646 if opts['ssh']:
1641 ui.setconfig("ui", "ssh", opts['ssh'])
1647 ui.setconfig("ui", "ssh", opts['ssh'])
1642 if opts['remotecmd']:
1648 if opts['remotecmd']:
1643 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1649 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1644
1650
1645 other = hg.repository(ui, source)
1651 other = hg.repository(ui, source)
1646 r = repo.pull(other)
1652 revs = None
1653 if opts['rev'] and not other.local():
1654 raise util.Abort("pull -r doesn't work for remote repositories yet")
1655 elif opts['rev']:
1656 revs = [other.lookup(rev) for rev in opts['rev']]
1657 r = repo.pull(other, heads=revs)
1647 if not r:
1658 if not r:
1648 if opts['update']:
1659 if opts['update']:
1649 return update(ui, repo)
1660 return update(ui, repo)
1650 else:
1661 else:
1651 ui.status(_("(run 'hg update' to get a working copy)\n"))
1662 ui.status(_("(run 'hg update' to get a working copy)\n"))
1652
1663
1653 return r
1664 return r
1654
1665
1655 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1666 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1656 """push changes to the specified destination
1667 """push changes to the specified destination
1657
1668
1658 Push changes from the local repository to the given destination.
1669 Push changes from the local repository to the given destination.
1659
1670
1660 This is the symmetrical operation for pull. It helps to move
1671 This is the symmetrical operation for pull. It helps to move
1661 changes from the current repository to a different one. If the
1672 changes from the current repository to a different one. If the
1662 destination is local this is identical to a pull in that directory
1673 destination is local this is identical to a pull in that directory
1663 from the current one.
1674 from the current one.
1664
1675
1665 By default, push will refuse to run if it detects the result would
1676 By default, push will refuse to run if it detects the result would
1666 increase the number of remote heads. This generally indicates the
1677 increase the number of remote heads. This generally indicates the
1667 the client has forgotten to sync and merge before pushing.
1678 the client has forgotten to sync and merge before pushing.
1668
1679
1669 Valid URLs are of the form:
1680 Valid URLs are of the form:
1670
1681
1671 local/filesystem/path
1682 local/filesystem/path
1672 ssh://[user@]host[:port][/path]
1683 ssh://[user@]host[:port][/path]
1673
1684
1674 SSH requires an accessible shell account on the destination
1685 SSH requires an accessible shell account on the destination
1675 machine and a copy of hg in the remote path.
1686 machine and a copy of hg in the remote path.
1676 """
1687 """
1677 dest = ui.expandpath(dest, repo.root)
1688 dest = ui.expandpath(dest, repo.root)
1678 ui.status('pushing to %s\n' % (dest))
1689 ui.status('pushing to %s\n' % (dest))
1679
1690
1680 if ssh:
1691 if ssh:
1681 ui.setconfig("ui", "ssh", ssh)
1692 ui.setconfig("ui", "ssh", ssh)
1682 if remotecmd:
1693 if remotecmd:
1683 ui.setconfig("ui", "remotecmd", remotecmd)
1694 ui.setconfig("ui", "remotecmd", remotecmd)
1684
1695
1685 other = hg.repository(ui, dest)
1696 other = hg.repository(ui, dest)
1686 r = repo.push(other, force)
1697 r = repo.push(other, force)
1687 return r
1698 return r
1688
1699
1689 def rawcommit(ui, repo, *flist, **rc):
1700 def rawcommit(ui, repo, *flist, **rc):
1690 """raw commit interface
1701 """raw commit interface
1691
1702
1692 Lowlevel commit, for use in helper scripts.
1703 Lowlevel commit, for use in helper scripts.
1693
1704
1694 This command is not intended to be used by normal users, as it is
1705 This command is not intended to be used by normal users, as it is
1695 primarily useful for importing from other SCMs.
1706 primarily useful for importing from other SCMs.
1696 """
1707 """
1697 if rc['text']:
1708 if rc['text']:
1698 ui.warn(_("Warning: -t and --text is deprecated,"
1709 ui.warn(_("Warning: -t and --text is deprecated,"
1699 " please use -m or --message instead.\n"))
1710 " please use -m or --message instead.\n"))
1700 message = rc['message'] or rc['text']
1711 message = rc['message'] or rc['text']
1701 if not message and rc['logfile']:
1712 if not message and rc['logfile']:
1702 try:
1713 try:
1703 message = open(rc['logfile']).read()
1714 message = open(rc['logfile']).read()
1704 except IOError:
1715 except IOError:
1705 pass
1716 pass
1706 if not message and not rc['logfile']:
1717 if not message and not rc['logfile']:
1707 raise util.Abort(_("missing commit message"))
1718 raise util.Abort(_("missing commit message"))
1708
1719
1709 files = relpath(repo, list(flist))
1720 files = relpath(repo, list(flist))
1710 if rc['files']:
1721 if rc['files']:
1711 files += open(rc['files']).read().splitlines()
1722 files += open(rc['files']).read().splitlines()
1712
1723
1713 rc['parent'] = map(repo.lookup, rc['parent'])
1724 rc['parent'] = map(repo.lookup, rc['parent'])
1714
1725
1715 try:
1726 try:
1716 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1727 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1717 except ValueError, inst:
1728 except ValueError, inst:
1718 raise util.Abort(str(inst))
1729 raise util.Abort(str(inst))
1719
1730
1720 def recover(ui, repo):
1731 def recover(ui, repo):
1721 """roll back an interrupted transaction
1732 """roll back an interrupted transaction
1722
1733
1723 Recover from an interrupted commit or pull.
1734 Recover from an interrupted commit or pull.
1724
1735
1725 This command tries to fix the repository status after an interrupted
1736 This command tries to fix the repository status after an interrupted
1726 operation. It should only be necessary when Mercurial suggests it.
1737 operation. It should only be necessary when Mercurial suggests it.
1727 """
1738 """
1728 repo.recover()
1739 repo.recover()
1729
1740
1730 def remove(ui, repo, pat, *pats, **opts):
1741 def remove(ui, repo, pat, *pats, **opts):
1731 """remove the specified files on the next commit
1742 """remove the specified files on the next commit
1732
1743
1733 Schedule the indicated files for removal from the repository.
1744 Schedule the indicated files for removal from the repository.
1734
1745
1735 This command schedules the files to be removed at the next commit.
1746 This command schedules the files to be removed at the next commit.
1736 This only removes files from the current branch, not from the
1747 This only removes files from the current branch, not from the
1737 entire project history. If the files still exist in the working
1748 entire project history. If the files still exist in the working
1738 directory, they will be deleted from it.
1749 directory, they will be deleted from it.
1739 """
1750 """
1740 names = []
1751 names = []
1741 def okaytoremove(abs, rel, exact):
1752 def okaytoremove(abs, rel, exact):
1742 c, a, d, u = repo.changes(files = [abs])
1753 c, a, d, u = repo.changes(files = [abs])
1743 reason = None
1754 reason = None
1744 if c: reason = _('is modified')
1755 if c: reason = _('is modified')
1745 elif a: reason = _('has been marked for add')
1756 elif a: reason = _('has been marked for add')
1746 elif u: reason = _('is not managed')
1757 elif u: reason = _('is not managed')
1747 if reason:
1758 if reason:
1748 if exact: ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1759 if exact: ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1749 else:
1760 else:
1750 return True
1761 return True
1751 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1762 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1752 if okaytoremove(abs, rel, exact):
1763 if okaytoremove(abs, rel, exact):
1753 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1764 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1754 names.append(abs)
1765 names.append(abs)
1755 repo.remove(names, unlink=True)
1766 repo.remove(names, unlink=True)
1756
1767
1757 def rename(ui, repo, *pats, **opts):
1768 def rename(ui, repo, *pats, **opts):
1758 """rename files; equivalent of copy + remove
1769 """rename files; equivalent of copy + remove
1759
1770
1760 Mark dest as copies of sources; mark sources for deletion. If
1771 Mark dest as copies of sources; mark sources for deletion. If
1761 dest is a directory, copies are put in that directory. If dest is
1772 dest is a directory, copies are put in that directory. If dest is
1762 a file, there can only be one source.
1773 a file, there can only be one source.
1763
1774
1764 By default, this command copies the contents of files as they
1775 By default, this command copies the contents of files as they
1765 stand in the working directory. If invoked with --after, the
1776 stand in the working directory. If invoked with --after, the
1766 operation is recorded, but no copying is performed.
1777 operation is recorded, but no copying is performed.
1767
1778
1768 This command takes effect in the next commit.
1779 This command takes effect in the next commit.
1769
1780
1770 NOTE: This command should be treated as experimental. While it
1781 NOTE: This command should be treated as experimental. While it
1771 should properly record rename files, this information is not yet
1782 should properly record rename files, this information is not yet
1772 fully used by merge, nor fully reported by log.
1783 fully used by merge, nor fully reported by log.
1773 """
1784 """
1774 errs, copied = docopy(ui, repo, pats, opts)
1785 errs, copied = docopy(ui, repo, pats, opts)
1775 names = []
1786 names = []
1776 for abs, rel, exact in copied:
1787 for abs, rel, exact in copied:
1777 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1788 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1778 names.append(abs)
1789 names.append(abs)
1779 repo.remove(names, unlink=True)
1790 repo.remove(names, unlink=True)
1780 return errs
1791 return errs
1781
1792
1782 def revert(ui, repo, *names, **opts):
1793 def revert(ui, repo, *names, **opts):
1783 """revert modified files or dirs back to their unmodified states
1794 """revert modified files or dirs back to their unmodified states
1784
1795
1785 Revert any uncommitted modifications made to the named files or
1796 Revert any uncommitted modifications made to the named files or
1786 directories. This restores the contents of the affected files to
1797 directories. This restores the contents of the affected files to
1787 an unmodified state.
1798 an unmodified state.
1788
1799
1789 If a file has been deleted, it is recreated. If the executable
1800 If a file has been deleted, it is recreated. If the executable
1790 mode of a file was changed, it is reset.
1801 mode of a file was changed, it is reset.
1791
1802
1792 If a directory is given, all files in that directory and its
1803 If a directory is given, all files in that directory and its
1793 subdirectories are reverted.
1804 subdirectories are reverted.
1794
1805
1795 If no arguments are given, all files in the current directory and
1806 If no arguments are given, all files in the current directory and
1796 its subdirectories are reverted.
1807 its subdirectories are reverted.
1797 """
1808 """
1798 node = opts['rev'] and repo.lookup(opts['rev']) or \
1809 node = opts['rev'] and repo.lookup(opts['rev']) or \
1799 repo.dirstate.parents()[0]
1810 repo.dirstate.parents()[0]
1800 root = os.path.realpath(repo.root)
1811 root = os.path.realpath(repo.root)
1801
1812
1802 def trimpath(p):
1813 def trimpath(p):
1803 p = os.path.realpath(p)
1814 p = os.path.realpath(p)
1804 if p.startswith(root):
1815 if p.startswith(root):
1805 rest = p[len(root):]
1816 rest = p[len(root):]
1806 if not rest:
1817 if not rest:
1807 return rest
1818 return rest
1808 if p.startswith(os.sep):
1819 if p.startswith(os.sep):
1809 return rest[1:]
1820 return rest[1:]
1810 return p
1821 return p
1811
1822
1812 relnames = map(trimpath, names or [os.getcwd()])
1823 relnames = map(trimpath, names or [os.getcwd()])
1813 chosen = {}
1824 chosen = {}
1814
1825
1815 def choose(name):
1826 def choose(name):
1816 def body(name):
1827 def body(name):
1817 for r in relnames:
1828 for r in relnames:
1818 if not name.startswith(r):
1829 if not name.startswith(r):
1819 continue
1830 continue
1820 rest = name[len(r):]
1831 rest = name[len(r):]
1821 if not rest:
1832 if not rest:
1822 return r, True
1833 return r, True
1823 depth = rest.count(os.sep)
1834 depth = rest.count(os.sep)
1824 if not r:
1835 if not r:
1825 if depth == 0 or not opts['nonrecursive']:
1836 if depth == 0 or not opts['nonrecursive']:
1826 return r, True
1837 return r, True
1827 elif rest[0] == os.sep:
1838 elif rest[0] == os.sep:
1828 if depth == 1 or not opts['nonrecursive']:
1839 if depth == 1 or not opts['nonrecursive']:
1829 return r, True
1840 return r, True
1830 return None, False
1841 return None, False
1831 relname, ret = body(name)
1842 relname, ret = body(name)
1832 if ret:
1843 if ret:
1833 chosen[relname] = 1
1844 chosen[relname] = 1
1834 return ret
1845 return ret
1835
1846
1836 (c, a, d, u) = repo.changes()
1847 (c, a, d, u) = repo.changes()
1837 repo.forget(filter(choose, a))
1848 repo.forget(filter(choose, a))
1838 repo.undelete(filter(choose, d))
1849 repo.undelete(filter(choose, d))
1839
1850
1840 r = repo.update(node, False, True, choose, False)
1851 r = repo.update(node, False, True, choose, False)
1841 for n in relnames:
1852 for n in relnames:
1842 if n not in chosen:
1853 if n not in chosen:
1843 ui.warn(_('error: no matches for %s\n') % n)
1854 ui.warn(_('error: no matches for %s\n') % n)
1844 r = 1
1855 r = 1
1845 sys.stdout.flush()
1856 sys.stdout.flush()
1846 return r
1857 return r
1847
1858
1848 def root(ui, repo):
1859 def root(ui, repo):
1849 """print the root (top) of the current working dir
1860 """print the root (top) of the current working dir
1850
1861
1851 Print the root directory of the current repository.
1862 Print the root directory of the current repository.
1852 """
1863 """
1853 ui.write(repo.root + "\n")
1864 ui.write(repo.root + "\n")
1854
1865
1855 def serve(ui, repo, **opts):
1866 def serve(ui, repo, **opts):
1856 """export the repository via HTTP
1867 """export the repository via HTTP
1857
1868
1858 Start a local HTTP repository browser and pull server.
1869 Start a local HTTP repository browser and pull server.
1859
1870
1860 By default, the server logs accesses to stdout and errors to
1871 By default, the server logs accesses to stdout and errors to
1861 stderr. Use the "-A" and "-E" options to log to files.
1872 stderr. Use the "-A" and "-E" options to log to files.
1862 """
1873 """
1863
1874
1864 if opts["stdio"]:
1875 if opts["stdio"]:
1865 fin, fout = sys.stdin, sys.stdout
1876 fin, fout = sys.stdin, sys.stdout
1866 sys.stdout = sys.stderr
1877 sys.stdout = sys.stderr
1867
1878
1868 # Prevent insertion/deletion of CRs
1879 # Prevent insertion/deletion of CRs
1869 util.set_binary(fin)
1880 util.set_binary(fin)
1870 util.set_binary(fout)
1881 util.set_binary(fout)
1871
1882
1872 def getarg():
1883 def getarg():
1873 argline = fin.readline()[:-1]
1884 argline = fin.readline()[:-1]
1874 arg, l = argline.split()
1885 arg, l = argline.split()
1875 val = fin.read(int(l))
1886 val = fin.read(int(l))
1876 return arg, val
1887 return arg, val
1877 def respond(v):
1888 def respond(v):
1878 fout.write("%d\n" % len(v))
1889 fout.write("%d\n" % len(v))
1879 fout.write(v)
1890 fout.write(v)
1880 fout.flush()
1891 fout.flush()
1881
1892
1882 lock = None
1893 lock = None
1883
1894
1884 while 1:
1895 while 1:
1885 cmd = fin.readline()[:-1]
1896 cmd = fin.readline()[:-1]
1886 if cmd == '':
1897 if cmd == '':
1887 return
1898 return
1888 if cmd == "heads":
1899 if cmd == "heads":
1889 h = repo.heads()
1900 h = repo.heads()
1890 respond(" ".join(map(hex, h)) + "\n")
1901 respond(" ".join(map(hex, h)) + "\n")
1891 if cmd == "lock":
1902 if cmd == "lock":
1892 lock = repo.lock()
1903 lock = repo.lock()
1893 respond("")
1904 respond("")
1894 if cmd == "unlock":
1905 if cmd == "unlock":
1895 if lock:
1906 if lock:
1896 lock.release()
1907 lock.release()
1897 lock = None
1908 lock = None
1898 respond("")
1909 respond("")
1899 elif cmd == "branches":
1910 elif cmd == "branches":
1900 arg, nodes = getarg()
1911 arg, nodes = getarg()
1901 nodes = map(bin, nodes.split(" "))
1912 nodes = map(bin, nodes.split(" "))
1902 r = []
1913 r = []
1903 for b in repo.branches(nodes):
1914 for b in repo.branches(nodes):
1904 r.append(" ".join(map(hex, b)) + "\n")
1915 r.append(" ".join(map(hex, b)) + "\n")
1905 respond("".join(r))
1916 respond("".join(r))
1906 elif cmd == "between":
1917 elif cmd == "between":
1907 arg, pairs = getarg()
1918 arg, pairs = getarg()
1908 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1919 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1909 r = []
1920 r = []
1910 for b in repo.between(pairs):
1921 for b in repo.between(pairs):
1911 r.append(" ".join(map(hex, b)) + "\n")
1922 r.append(" ".join(map(hex, b)) + "\n")
1912 respond("".join(r))
1923 respond("".join(r))
1913 elif cmd == "changegroup":
1924 elif cmd == "changegroup":
1914 nodes = []
1925 nodes = []
1915 arg, roots = getarg()
1926 arg, roots = getarg()
1916 nodes = map(bin, roots.split(" "))
1927 nodes = map(bin, roots.split(" "))
1917
1928
1918 cg = repo.changegroup(nodes)
1929 cg = repo.changegroup(nodes)
1919 while 1:
1930 while 1:
1920 d = cg.read(4096)
1931 d = cg.read(4096)
1921 if not d:
1932 if not d:
1922 break
1933 break
1923 fout.write(d)
1934 fout.write(d)
1924
1935
1925 fout.flush()
1936 fout.flush()
1926
1937
1927 elif cmd == "addchangegroup":
1938 elif cmd == "addchangegroup":
1928 if not lock:
1939 if not lock:
1929 respond("not locked")
1940 respond("not locked")
1930 continue
1941 continue
1931 respond("")
1942 respond("")
1932
1943
1933 r = repo.addchangegroup(fin)
1944 r = repo.addchangegroup(fin)
1934 respond("")
1945 respond("")
1935
1946
1936 optlist = "name templates style address port ipv6 accesslog errorlog"
1947 optlist = "name templates style address port ipv6 accesslog errorlog"
1937 for o in optlist.split():
1948 for o in optlist.split():
1938 if opts[o]:
1949 if opts[o]:
1939 ui.setconfig("web", o, opts[o])
1950 ui.setconfig("web", o, opts[o])
1940
1951
1941 try:
1952 try:
1942 httpd = hgweb.create_server(repo)
1953 httpd = hgweb.create_server(repo)
1943 except socket.error, inst:
1954 except socket.error, inst:
1944 raise util.Abort('cannot start server: ' + inst.args[1])
1955 raise util.Abort('cannot start server: ' + inst.args[1])
1945
1956
1946 if ui.verbose:
1957 if ui.verbose:
1947 addr, port = httpd.socket.getsockname()
1958 addr, port = httpd.socket.getsockname()
1948 if addr == '0.0.0.0':
1959 if addr == '0.0.0.0':
1949 addr = socket.gethostname()
1960 addr = socket.gethostname()
1950 else:
1961 else:
1951 try:
1962 try:
1952 addr = socket.gethostbyaddr(addr)[0]
1963 addr = socket.gethostbyaddr(addr)[0]
1953 except socket.error:
1964 except socket.error:
1954 pass
1965 pass
1955 if port != 80:
1966 if port != 80:
1956 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
1967 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
1957 else:
1968 else:
1958 ui.status(_('listening at http://%s/\n') % addr)
1969 ui.status(_('listening at http://%s/\n') % addr)
1959 httpd.serve_forever()
1970 httpd.serve_forever()
1960
1971
1961 def status(ui, repo, *pats, **opts):
1972 def status(ui, repo, *pats, **opts):
1962 """show changed files in the working directory
1973 """show changed files in the working directory
1963
1974
1964 Show changed files in the working directory. If no names are
1975 Show changed files in the working directory. If no names are
1965 given, all files are shown. Otherwise, only files matching the
1976 given, all files are shown. Otherwise, only files matching the
1966 given names are shown.
1977 given names are shown.
1967
1978
1968 The codes used to show the status of files are:
1979 The codes used to show the status of files are:
1969 M = modified
1980 M = modified
1970 A = added
1981 A = added
1971 R = removed
1982 R = removed
1972 ? = not tracked
1983 ? = not tracked
1973 """
1984 """
1974
1985
1975 cwd = repo.getcwd()
1986 cwd = repo.getcwd()
1976 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1987 files, matchfn, anypats = matchpats(repo, cwd, pats, opts)
1977 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1988 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
1978 for n in repo.changes(files=files, match=matchfn)]
1989 for n in repo.changes(files=files, match=matchfn)]
1979
1990
1980 changetypes = [(_('modified'), 'M', c),
1991 changetypes = [(_('modified'), 'M', c),
1981 (_('added'), 'A', a),
1992 (_('added'), 'A', a),
1982 (_('removed'), 'R', d),
1993 (_('removed'), 'R', d),
1983 (_('unknown'), '?', u)]
1994 (_('unknown'), '?', u)]
1984
1995
1985 end = opts['print0'] and '\0' or '\n'
1996 end = opts['print0'] and '\0' or '\n'
1986
1997
1987 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1998 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
1988 or changetypes):
1999 or changetypes):
1989 if opts['no_status']:
2000 if opts['no_status']:
1990 format = "%%s%s" % end
2001 format = "%%s%s" % end
1991 else:
2002 else:
1992 format = "%s %%s%s" % (char, end);
2003 format = "%s %%s%s" % (char, end);
1993
2004
1994 for f in changes:
2005 for f in changes:
1995 ui.write(format % f)
2006 ui.write(format % f)
1996
2007
1997 def tag(ui, repo, name, rev=None, **opts):
2008 def tag(ui, repo, name, rev=None, **opts):
1998 """add a tag for the current tip or a given revision
2009 """add a tag for the current tip or a given revision
1999
2010
2000 Name a particular revision using <name>.
2011 Name a particular revision using <name>.
2001
2012
2002 Tags are used to name particular revisions of the repository and are
2013 Tags are used to name particular revisions of the repository and are
2003 very useful to compare different revision, to go back to significant
2014 very useful to compare different revision, to go back to significant
2004 earlier versions or to mark branch points as releases, etc.
2015 earlier versions or to mark branch points as releases, etc.
2005
2016
2006 If no revision is given, the tip is used.
2017 If no revision is given, the tip is used.
2007
2018
2008 To facilitate version control, distribution, and merging of tags,
2019 To facilitate version control, distribution, and merging of tags,
2009 they are stored as a file named ".hgtags" which is managed
2020 they are stored as a file named ".hgtags" which is managed
2010 similarly to other project files and can be hand-edited if
2021 similarly to other project files and can be hand-edited if
2011 necessary.
2022 necessary.
2012 """
2023 """
2013 if opts['text']:
2024 if opts['text']:
2014 ui.warn(_("Warning: -t and --text is deprecated,"
2025 ui.warn(_("Warning: -t and --text is deprecated,"
2015 " please use -m or --message instead.\n"))
2026 " please use -m or --message instead.\n"))
2016 if name == "tip":
2027 if name == "tip":
2017 raise util.Abort(_("the name 'tip' is reserved"))
2028 raise util.Abort(_("the name 'tip' is reserved"))
2018 if rev:
2029 if rev:
2019 r = hex(repo.lookup(rev))
2030 r = hex(repo.lookup(rev))
2020 else:
2031 else:
2021 r = hex(repo.changelog.tip())
2032 r = hex(repo.changelog.tip())
2022
2033
2023 if name.find(revrangesep) >= 0:
2034 if name.find(revrangesep) >= 0:
2024 raise util.Abort(_("'%s' cannot be used in a tag name") % revrangesep)
2035 raise util.Abort(_("'%s' cannot be used in a tag name") % revrangesep)
2025
2036
2026 if opts['local']:
2037 if opts['local']:
2027 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2038 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2028 return
2039 return
2029
2040
2030 (c, a, d, u) = repo.changes()
2041 (c, a, d, u) = repo.changes()
2031 for x in (c, a, d, u):
2042 for x in (c, a, d, u):
2032 if ".hgtags" in x:
2043 if ".hgtags" in x:
2033 raise util.Abort(_("working copy of .hgtags is changed "
2044 raise util.Abort(_("working copy of .hgtags is changed "
2034 "(please commit .hgtags manually)"))
2045 "(please commit .hgtags manually)"))
2035
2046
2036 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2047 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2037 if repo.dirstate.state(".hgtags") == '?':
2048 if repo.dirstate.state(".hgtags") == '?':
2038 repo.add([".hgtags"])
2049 repo.add([".hgtags"])
2039
2050
2040 message = (opts['message'] or opts['text'] or
2051 message = (opts['message'] or opts['text'] or
2041 _("Added tag %s for changeset %s") % (name, r))
2052 _("Added tag %s for changeset %s") % (name, r))
2042 try:
2053 try:
2043 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2054 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2044 except ValueError, inst:
2055 except ValueError, inst:
2045 raise util.Abort(str(inst))
2056 raise util.Abort(str(inst))
2046
2057
2047 def tags(ui, repo):
2058 def tags(ui, repo):
2048 """list repository tags
2059 """list repository tags
2049
2060
2050 List the repository tags.
2061 List the repository tags.
2051
2062
2052 This lists both regular and local tags.
2063 This lists both regular and local tags.
2053 """
2064 """
2054
2065
2055 l = repo.tagslist()
2066 l = repo.tagslist()
2056 l.reverse()
2067 l.reverse()
2057 for t, n in l:
2068 for t, n in l:
2058 try:
2069 try:
2059 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2070 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2060 except KeyError:
2071 except KeyError:
2061 r = " ?:?"
2072 r = " ?:?"
2062 ui.write("%-30s %s\n" % (t, r))
2073 ui.write("%-30s %s\n" % (t, r))
2063
2074
2064 def tip(ui, repo):
2075 def tip(ui, repo):
2065 """show the tip revision
2076 """show the tip revision
2066
2077
2067 Show the tip revision.
2078 Show the tip revision.
2068 """
2079 """
2069 n = repo.changelog.tip()
2080 n = repo.changelog.tip()
2070 show_changeset(ui, repo, changenode=n)
2081 show_changeset(ui, repo, changenode=n)
2071
2082
2072 def unbundle(ui, repo, fname):
2083 def unbundle(ui, repo, fname):
2073 """apply a changegroup file
2084 """apply a changegroup file
2074
2085
2075 Apply a compressed changegroup file generated by the bundle
2086 Apply a compressed changegroup file generated by the bundle
2076 command.
2087 command.
2077 """
2088 """
2078 f = urllib.urlopen(fname)
2089 f = urllib.urlopen(fname)
2079
2090
2080 if f.read(4) != "HG10":
2091 if f.read(4) != "HG10":
2081 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2092 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2082
2093
2083 def bzgenerator(f):
2094 def bzgenerator(f):
2084 zd = bz2.BZ2Decompressor()
2095 zd = bz2.BZ2Decompressor()
2085 for chunk in f:
2096 for chunk in f:
2086 yield zd.decompress(chunk)
2097 yield zd.decompress(chunk)
2087
2098
2088 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2099 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2089 repo.addchangegroup(util.chunkbuffer(bzgen))
2100 repo.addchangegroup(util.chunkbuffer(bzgen))
2090
2101
2091 def undo(ui, repo):
2102 def undo(ui, repo):
2092 """undo the last commit or pull
2103 """undo the last commit or pull
2093
2104
2094 Roll back the last pull or commit transaction on the
2105 Roll back the last pull or commit transaction on the
2095 repository, restoring the project to its earlier state.
2106 repository, restoring the project to its earlier state.
2096
2107
2097 This command should be used with care. There is only one level of
2108 This command should be used with care. There is only one level of
2098 undo and there is no redo.
2109 undo and there is no redo.
2099
2110
2100 This command is not intended for use on public repositories. Once
2111 This command is not intended for use on public repositories. Once
2101 a change is visible for pull by other users, undoing it locally is
2112 a change is visible for pull by other users, undoing it locally is
2102 ineffective.
2113 ineffective.
2103 """
2114 """
2104 repo.undo()
2115 repo.undo()
2105
2116
2106 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
2117 def update(ui, repo, node=None, merge=False, clean=False, branch=None):
2107 """update or merge working directory
2118 """update or merge working directory
2108
2119
2109 Update the working directory to the specified revision.
2120 Update the working directory to the specified revision.
2110
2121
2111 If there are no outstanding changes in the working directory and
2122 If there are no outstanding changes in the working directory and
2112 there is a linear relationship between the current version and the
2123 there is a linear relationship between the current version and the
2113 requested version, the result is the requested version.
2124 requested version, the result is the requested version.
2114
2125
2115 Otherwise the result is a merge between the contents of the
2126 Otherwise the result is a merge between the contents of the
2116 current working directory and the requested version. Files that
2127 current working directory and the requested version. Files that
2117 changed between either parent are marked as changed for the next
2128 changed between either parent are marked as changed for the next
2118 commit and a commit must be performed before any further updates
2129 commit and a commit must be performed before any further updates
2119 are allowed.
2130 are allowed.
2120
2131
2121 By default, update will refuse to run if doing so would require
2132 By default, update will refuse to run if doing so would require
2122 merging or discarding local changes.
2133 merging or discarding local changes.
2123 """
2134 """
2124 if branch:
2135 if branch:
2125 br = repo.branchlookup(branch=branch)
2136 br = repo.branchlookup(branch=branch)
2126 found = []
2137 found = []
2127 for x in br:
2138 for x in br:
2128 if branch in br[x]:
2139 if branch in br[x]:
2129 found.append(x)
2140 found.append(x)
2130 if len(found) > 1:
2141 if len(found) > 1:
2131 ui.warn(_("Found multiple heads for %s\n") % branch)
2142 ui.warn(_("Found multiple heads for %s\n") % branch)
2132 for x in found:
2143 for x in found:
2133 show_changeset(ui, repo, changenode=x, brinfo=br)
2144 show_changeset(ui, repo, changenode=x, brinfo=br)
2134 return 1
2145 return 1
2135 if len(found) == 1:
2146 if len(found) == 1:
2136 node = found[0]
2147 node = found[0]
2137 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2148 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2138 else:
2149 else:
2139 ui.warn(_("branch %s not found\n") % (branch))
2150 ui.warn(_("branch %s not found\n") % (branch))
2140 return 1
2151 return 1
2141 else:
2152 else:
2142 node = node and repo.lookup(node) or repo.changelog.tip()
2153 node = node and repo.lookup(node) or repo.changelog.tip()
2143 return repo.update(node, allow=merge, force=clean)
2154 return repo.update(node, allow=merge, force=clean)
2144
2155
2145 def verify(ui, repo):
2156 def verify(ui, repo):
2146 """verify the integrity of the repository
2157 """verify the integrity of the repository
2147
2158
2148 Verify the integrity of the current repository.
2159 Verify the integrity of the current repository.
2149
2160
2150 This will perform an extensive check of the repository's
2161 This will perform an extensive check of the repository's
2151 integrity, validating the hashes and checksums of each entry in
2162 integrity, validating the hashes and checksums of each entry in
2152 the changelog, manifest, and tracked files, as well as the
2163 the changelog, manifest, and tracked files, as well as the
2153 integrity of their crosslinks and indices.
2164 integrity of their crosslinks and indices.
2154 """
2165 """
2155 return repo.verify()
2166 return repo.verify()
2156
2167
2157 # Command options and aliases are listed here, alphabetically
2168 # Command options and aliases are listed here, alphabetically
2158
2169
2159 table = {
2170 table = {
2160 "^add":
2171 "^add":
2161 (add,
2172 (add,
2162 [('I', 'include', [], _('include names matching the given patterns')),
2173 [('I', 'include', [], _('include names matching the given patterns')),
2163 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2174 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2164 "hg add [OPTION]... [FILE]..."),
2175 "hg add [OPTION]... [FILE]..."),
2165 "addremove":
2176 "addremove":
2166 (addremove,
2177 (addremove,
2167 [('I', 'include', [], _('include names matching the given patterns')),
2178 [('I', 'include', [], _('include names matching the given patterns')),
2168 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2179 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2169 "hg addremove [OPTION]... [FILE]..."),
2180 "hg addremove [OPTION]... [FILE]..."),
2170 "^annotate":
2181 "^annotate":
2171 (annotate,
2182 (annotate,
2172 [('r', 'rev', '', _('annotate the specified revision')),
2183 [('r', 'rev', '', _('annotate the specified revision')),
2173 ('a', 'text', None, _('treat all files as text')),
2184 ('a', 'text', None, _('treat all files as text')),
2174 ('u', 'user', None, _('list the author')),
2185 ('u', 'user', None, _('list the author')),
2175 ('n', 'number', None, _('list the revision number (default)')),
2186 ('n', 'number', None, _('list the revision number (default)')),
2176 ('c', 'changeset', None, _('list the changeset')),
2187 ('c', 'changeset', None, _('list the changeset')),
2177 ('I', 'include', [], _('include names matching the given patterns')),
2188 ('I', 'include', [], _('include names matching the given patterns')),
2178 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2189 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2179 _('hg annotate [OPTION]... FILE...')),
2190 _('hg annotate [OPTION]... FILE...')),
2180 "bundle":
2191 "bundle":
2181 (bundle,
2192 (bundle,
2182 [],
2193 [],
2183 _('hg bundle FILE DEST')),
2194 _('hg bundle FILE DEST')),
2184 "cat":
2195 "cat":
2185 (cat,
2196 (cat,
2186 [('I', 'include', [], _('include names matching the given patterns')),
2197 [('I', 'include', [], _('include names matching the given patterns')),
2187 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2198 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2188 ('o', 'output', "", _('print output to file with formatted name')),
2199 ('o', 'output', "", _('print output to file with formatted name')),
2189 ('r', 'rev', '', _('print the given revision'))],
2200 ('r', 'rev', '', _('print the given revision'))],
2190 _('hg cat [OPTION]... FILE...')),
2201 _('hg cat [OPTION]... FILE...')),
2191 "^clone":
2202 "^clone":
2192 (clone,
2203 (clone,
2193 [('U', 'noupdate', None, _('do not update the new working directory')),
2204 [('U', 'noupdate', None, _('do not update the new working directory')),
2194 ('e', 'ssh', "", _('specify ssh command to use')),
2205 ('e', 'ssh', "", _('specify ssh command to use')),
2195 ('', 'pull', None, _('use pull protocol to copy metadata')),
2206 ('', 'pull', None, _('use pull protocol to copy metadata')),
2207 ('r', 'rev', [], _('a changeset you would like to have after cloning')),
2196 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2208 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2197 _('hg clone [OPTION]... SOURCE [DEST]')),
2209 _('hg clone [OPTION]... SOURCE [DEST]')),
2198 "^commit|ci":
2210 "^commit|ci":
2199 (commit,
2211 (commit,
2200 [('A', 'addremove', None, _('run addremove during commit')),
2212 [('A', 'addremove', None, _('run addremove during commit')),
2201 ('I', 'include', [], _('include names matching the given patterns')),
2213 ('I', 'include', [], _('include names matching the given patterns')),
2202 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2214 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2203 ('m', 'message', "", _('use <text> as commit message')),
2215 ('m', 'message', "", _('use <text> as commit message')),
2204 ('l', 'logfile', "", _('read the commit message from <file>')),
2216 ('l', 'logfile', "", _('read the commit message from <file>')),
2205 ('d', 'date', "", _('record datecode as commit date')),
2217 ('d', 'date', "", _('record datecode as commit date')),
2206 ('u', 'user', "", _('record user as commiter'))],
2218 ('u', 'user', "", _('record user as commiter'))],
2207 _('hg commit [OPTION]... [FILE]...')),
2219 _('hg commit [OPTION]... [FILE]...')),
2208 "copy|cp": (copy,
2220 "copy|cp": (copy,
2209 [('I', 'include', [], _('include names matching the given patterns')),
2221 [('I', 'include', [], _('include names matching the given patterns')),
2210 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2222 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2211 ('A', 'after', None, _('record a copy that has already occurred')),
2223 ('A', 'after', None, _('record a copy that has already occurred')),
2212 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2224 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2213 _('hg copy [OPTION]... [SOURCE]... DEST')),
2225 _('hg copy [OPTION]... [SOURCE]... DEST')),
2214 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2226 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2215 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2227 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2216 "debugconfig": (debugconfig, [], _('debugconfig')),
2228 "debugconfig": (debugconfig, [], _('debugconfig')),
2217 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2229 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2218 "debugstate": (debugstate, [], _('debugstate')),
2230 "debugstate": (debugstate, [], _('debugstate')),
2219 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2231 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2220 "debugindex": (debugindex, [], _('debugindex FILE')),
2232 "debugindex": (debugindex, [], _('debugindex FILE')),
2221 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2233 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2222 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2234 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2223 "debugwalk":
2235 "debugwalk":
2224 (debugwalk,
2236 (debugwalk,
2225 [('I', 'include', [], _('include names matching the given patterns')),
2237 [('I', 'include', [], _('include names matching the given patterns')),
2226 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2238 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2227 _('debugwalk [OPTION]... [FILE]...')),
2239 _('debugwalk [OPTION]... [FILE]...')),
2228 "^diff":
2240 "^diff":
2229 (diff,
2241 (diff,
2230 [('r', 'rev', [], _('revision')),
2242 [('r', 'rev', [], _('revision')),
2231 ('a', 'text', None, _('treat all files as text')),
2243 ('a', 'text', None, _('treat all files as text')),
2232 ('I', 'include', [], _('include names matching the given patterns')),
2244 ('I', 'include', [], _('include names matching the given patterns')),
2233 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2245 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2234 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2246 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2235 "^export":
2247 "^export":
2236 (export,
2248 (export,
2237 [('o', 'output', "", _('print output to file with formatted name')),
2249 [('o', 'output', "", _('print output to file with formatted name')),
2238 ('a', 'text', None, _('treat all files as text'))],
2250 ('a', 'text', None, _('treat all files as text'))],
2239 "hg export [-a] [-o OUTFILE] REV..."),
2251 "hg export [-a] [-o OUTFILE] REV..."),
2240 "forget":
2252 "forget":
2241 (forget,
2253 (forget,
2242 [('I', 'include', [], _('include names matching the given patterns')),
2254 [('I', 'include', [], _('include names matching the given patterns')),
2243 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2255 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2244 "hg forget [OPTION]... FILE..."),
2256 "hg forget [OPTION]... FILE..."),
2245 "grep":
2257 "grep":
2246 (grep,
2258 (grep,
2247 [('0', 'print0', None, _('end fields with NUL')),
2259 [('0', 'print0', None, _('end fields with NUL')),
2248 ('I', 'include', [], _('include names matching the given patterns')),
2260 ('I', 'include', [], _('include names matching the given patterns')),
2249 ('X', 'exclude', [], _('include names matching the given patterns')),
2261 ('X', 'exclude', [], _('include names matching the given patterns')),
2250 ('', 'all', None, _('print all revisions that match')),
2262 ('', 'all', None, _('print all revisions that match')),
2251 ('i', 'ignore-case', None, _('ignore case when matching')),
2263 ('i', 'ignore-case', None, _('ignore case when matching')),
2252 ('l', 'files-with-matches', None, _('print only filenames and revs that match')),
2264 ('l', 'files-with-matches', None, _('print only filenames and revs that match')),
2253 ('n', 'line-number', None, _('print matching line numbers')),
2265 ('n', 'line-number', None, _('print matching line numbers')),
2254 ('r', 'rev', [], _('search in given revision range')),
2266 ('r', 'rev', [], _('search in given revision range')),
2255 ('u', 'user', None, _('print user who committed change'))],
2267 ('u', 'user', None, _('print user who committed change'))],
2256 "hg grep [OPTION]... PATTERN [FILE]..."),
2268 "hg grep [OPTION]... PATTERN [FILE]..."),
2257 "heads":
2269 "heads":
2258 (heads,
2270 (heads,
2259 [('b', 'branches', None, _('find branch info'))],
2271 [('b', 'branches', None, _('find branch info'))],
2260 _('hg heads [-b]')),
2272 _('hg heads [-b]')),
2261 "help": (help_, [], _('hg help [COMMAND]')),
2273 "help": (help_, [], _('hg help [COMMAND]')),
2262 "identify|id": (identify, [], _('hg identify')),
2274 "identify|id": (identify, [], _('hg identify')),
2263 "import|patch":
2275 "import|patch":
2264 (import_,
2276 (import_,
2265 [('p', 'strip', 1, _('directory strip option for patch. This has the same\n') +
2277 [('p', 'strip', 1, _('directory strip option for patch. This has the same\n') +
2266 _('meaning as the corresponding patch option')),
2278 _('meaning as the corresponding patch option')),
2267 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
2279 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
2268 ('b', 'base', "", _('base path'))],
2280 ('b', 'base', "", _('base path'))],
2269 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
2281 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
2270 "incoming|in": (incoming,
2282 "incoming|in": (incoming,
2271 [('M', 'no-merges', None, _("do not show merges")),
2283 [('M', 'no-merges', None, _("do not show merges")),
2272 ('p', 'patch', None, _('show patch')),
2284 ('p', 'patch', None, _('show patch')),
2273 ('n', 'newest-first', None, _('show newest record first'))],
2285 ('n', 'newest-first', None, _('show newest record first'))],
2274 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2286 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2275 "^init": (init, [], _('hg init [DEST]')),
2287 "^init": (init, [], _('hg init [DEST]')),
2276 "locate":
2288 "locate":
2277 (locate,
2289 (locate,
2278 [('r', 'rev', '', _('search the repository as it stood at rev')),
2290 [('r', 'rev', '', _('search the repository as it stood at rev')),
2279 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2291 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2280 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
2292 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
2281 ('I', 'include', [], _('include names matching the given patterns')),
2293 ('I', 'include', [], _('include names matching the given patterns')),
2282 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2294 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2283 _('hg locate [OPTION]... [PATTERN]...')),
2295 _('hg locate [OPTION]... [PATTERN]...')),
2284 "^log|history":
2296 "^log|history":
2285 (log,
2297 (log,
2286 [('I', 'include', [], _('include names matching the given patterns')),
2298 [('I', 'include', [], _('include names matching the given patterns')),
2287 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2299 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2288 ('b', 'branch', None, _('show branches')),
2300 ('b', 'branch', None, _('show branches')),
2289 ('k', 'keyword', [], _('search for a keyword')),
2301 ('k', 'keyword', [], _('search for a keyword')),
2290 ('r', 'rev', [], _('show the specified revision or range')),
2302 ('r', 'rev', [], _('show the specified revision or range')),
2291 ('M', 'no-merges', None, _("do not show merges")),
2303 ('M', 'no-merges', None, _("do not show merges")),
2292 ('m', 'only-merges', None, _("show only merges")),
2304 ('m', 'only-merges', None, _("show only merges")),
2293 ('p', 'patch', None, _('show patch'))],
2305 ('p', 'patch', None, _('show patch'))],
2294 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2306 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2295 "manifest": (manifest, [], _('hg manifest [REV]')),
2307 "manifest": (manifest, [], _('hg manifest [REV]')),
2296 "outgoing|out": (outgoing,
2308 "outgoing|out": (outgoing,
2297 [('M', 'no-merges', None, _("do not show merges")),
2309 [('M', 'no-merges', None, _("do not show merges")),
2298 ('p', 'patch', None, _('show patch')),
2310 ('p', 'patch', None, _('show patch')),
2299 ('n', 'newest-first', None, _('show newest record first'))],
2311 ('n', 'newest-first', None, _('show newest record first'))],
2300 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2312 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2301 "parents": (parents, [], _('hg parents [REV]')),
2313 "parents": (parents, [], _('hg parents [REV]')),
2302 "paths": (paths, [], _('hg paths [NAME]')),
2314 "paths": (paths, [], _('hg paths [NAME]')),
2303 "^pull":
2315 "^pull":
2304 (pull,
2316 (pull,
2305 [('u', 'update', None, _('update the working directory to tip after pull')),
2317 [('u', 'update', None, _('update the working directory to tip after pull')),
2306 ('e', 'ssh', "", _('specify ssh command to use')),
2318 ('e', 'ssh', "", _('specify ssh command to use')),
2319 ('r', 'rev', [], _('a specific revision you would like to pull')),
2307 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2320 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2308 _('hg pull [-u] [-e FILE] [--remotecmd FILE] [SOURCE]')),
2321 _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')),
2309 "^push":
2322 "^push":
2310 (push,
2323 (push,
2311 [('f', 'force', None, _('force push')),
2324 [('f', 'force', None, _('force push')),
2312 ('e', 'ssh', "", _('specify ssh command to use')),
2325 ('e', 'ssh', "", _('specify ssh command to use')),
2313 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2326 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2314 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2327 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2315 "rawcommit":
2328 "rawcommit":
2316 (rawcommit,
2329 (rawcommit,
2317 [('p', 'parent', [], _('parent')),
2330 [('p', 'parent', [], _('parent')),
2318 ('d', 'date', "", _('date code')),
2331 ('d', 'date', "", _('date code')),
2319 ('u', 'user', "", _('user')),
2332 ('u', 'user', "", _('user')),
2320 ('F', 'files', "", _('file list')),
2333 ('F', 'files', "", _('file list')),
2321 ('m', 'message', "", _('commit message')),
2334 ('m', 'message', "", _('commit message')),
2322 ('t', 'text', "", _('commit message (deprecated: use -m)')),
2335 ('t', 'text', "", _('commit message (deprecated: use -m)')),
2323 ('l', 'logfile', "", _('commit message file'))],
2336 ('l', 'logfile', "", _('commit message file'))],
2324 _('hg rawcommit [OPTION]... [FILE]...')),
2337 _('hg rawcommit [OPTION]... [FILE]...')),
2325 "recover": (recover, [], _("hg recover")),
2338 "recover": (recover, [], _("hg recover")),
2326 "^remove|rm": (remove,
2339 "^remove|rm": (remove,
2327 [('I', 'include', [], _('include names matching the given patterns')),
2340 [('I', 'include', [], _('include names matching the given patterns')),
2328 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2341 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2329 _("hg remove [OPTION]... FILE...")),
2342 _("hg remove [OPTION]... FILE...")),
2330 "rename|mv": (rename,
2343 "rename|mv": (rename,
2331 [('I', 'include', [], _('include names matching the given patterns')),
2344 [('I', 'include', [], _('include names matching the given patterns')),
2332 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2345 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2333 ('A', 'after', None, _('record a rename that has already occurred')),
2346 ('A', 'after', None, _('record a rename that has already occurred')),
2334 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2347 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2335 _('hg rename [OPTION]... [SOURCE]... DEST')),
2348 _('hg rename [OPTION]... [SOURCE]... DEST')),
2336 "^revert":
2349 "^revert":
2337 (revert,
2350 (revert,
2338 [("n", "nonrecursive", None, _("do not recurse into subdirectories")),
2351 [("n", "nonrecursive", None, _("do not recurse into subdirectories")),
2339 ("r", "rev", "", _("revision to revert to"))],
2352 ("r", "rev", "", _("revision to revert to"))],
2340 _("hg revert [-n] [-r REV] [NAME]...")),
2353 _("hg revert [-n] [-r REV] [NAME]...")),
2341 "root": (root, [], _("hg root")),
2354 "root": (root, [], _("hg root")),
2342 "^serve":
2355 "^serve":
2343 (serve,
2356 (serve,
2344 [('A', 'accesslog', '', _('name of access log file to write to')),
2357 [('A', 'accesslog', '', _('name of access log file to write to')),
2345 ('E', 'errorlog', '', _('name of error log file to write to')),
2358 ('E', 'errorlog', '', _('name of error log file to write to')),
2346 ('p', 'port', 0, _('port to use (default: 8000)')),
2359 ('p', 'port', 0, _('port to use (default: 8000)')),
2347 ('a', 'address', '', _('address to use')),
2360 ('a', 'address', '', _('address to use')),
2348 ('n', 'name', "", _('name to show in web pages (default: working dir)')),
2361 ('n', 'name', "", _('name to show in web pages (default: working dir)')),
2349 ('', 'stdio', None, _('for remote clients')),
2362 ('', 'stdio', None, _('for remote clients')),
2350 ('t', 'templates', "", _('web templates to use')),
2363 ('t', 'templates', "", _('web templates to use')),
2351 ('', 'style', "", _('template style to use')),
2364 ('', 'style', "", _('template style to use')),
2352 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2365 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2353 _("hg serve [OPTION]...")),
2366 _("hg serve [OPTION]...")),
2354 "^status":
2367 "^status":
2355 (status,
2368 (status,
2356 [('m', 'modified', None, _('show only modified files')),
2369 [('m', 'modified', None, _('show only modified files')),
2357 ('a', 'added', None, _('show only added files')),
2370 ('a', 'added', None, _('show only added files')),
2358 ('r', 'removed', None, _('show only removed files')),
2371 ('r', 'removed', None, _('show only removed files')),
2359 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2372 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2360 ('n', 'no-status', None, _('hide status prefix')),
2373 ('n', 'no-status', None, _('hide status prefix')),
2361 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2374 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2362 ('I', 'include', [], _('include names matching the given patterns')),
2375 ('I', 'include', [], _('include names matching the given patterns')),
2363 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2376 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2364 _("hg status [OPTION]... [FILE]...")),
2377 _("hg status [OPTION]... [FILE]...")),
2365 "tag":
2378 "tag":
2366 (tag,
2379 (tag,
2367 [('l', 'local', None, _('make the tag local')),
2380 [('l', 'local', None, _('make the tag local')),
2368 ('m', 'message', "", _('message for tag commit log entry')),
2381 ('m', 'message', "", _('message for tag commit log entry')),
2369 ('t', 'text', "", _('commit message (deprecated: use -m)')),
2382 ('t', 'text', "", _('commit message (deprecated: use -m)')),
2370 ('d', 'date', "", _('record datecode as commit date')),
2383 ('d', 'date', "", _('record datecode as commit date')),
2371 ('u', 'user', "", _('record user as commiter'))],
2384 ('u', 'user', "", _('record user as commiter'))],
2372 _('hg tag [OPTION]... NAME [REV]')),
2385 _('hg tag [OPTION]... NAME [REV]')),
2373 "tags": (tags, [], _('hg tags')),
2386 "tags": (tags, [], _('hg tags')),
2374 "tip": (tip, [], _('hg tip')),
2387 "tip": (tip, [], _('hg tip')),
2375 "unbundle":
2388 "unbundle":
2376 (unbundle,
2389 (unbundle,
2377 [],
2390 [],
2378 _('hg unbundle FILE')),
2391 _('hg unbundle FILE')),
2379 "undo": (undo, [], _('hg undo')),
2392 "undo": (undo, [], _('hg undo')),
2380 "^update|up|checkout|co":
2393 "^update|up|checkout|co":
2381 (update,
2394 (update,
2382 [('b', 'branch', "", _('checkout the head of a specific branch')),
2395 [('b', 'branch', "", _('checkout the head of a specific branch')),
2383 ('m', 'merge', None, _('allow merging of branches')),
2396 ('m', 'merge', None, _('allow merging of branches')),
2384 ('C', 'clean', None, _('overwrite locally modified files'))],
2397 ('C', 'clean', None, _('overwrite locally modified files'))],
2385 _('hg update [-b TAG] [-m] [-C] [REV]')),
2398 _('hg update [-b TAG] [-m] [-C] [REV]')),
2386 "verify": (verify, [], _('hg verify')),
2399 "verify": (verify, [], _('hg verify')),
2387 "version": (show_version, [], _('hg version')),
2400 "version": (show_version, [], _('hg version')),
2388 }
2401 }
2389
2402
2390 globalopts = [
2403 globalopts = [
2391 ('R', 'repository', "", _("repository root directory")),
2404 ('R', 'repository', "", _("repository root directory")),
2392 ('', 'cwd', '', _("change working directory")),
2405 ('', 'cwd', '', _("change working directory")),
2393 ('y', 'noninteractive', None, _("do not prompt, assume 'yes' for any required answers")),
2406 ('y', 'noninteractive', None, _("do not prompt, assume 'yes' for any required answers")),
2394 ('q', 'quiet', None, _("suppress output")),
2407 ('q', 'quiet', None, _("suppress output")),
2395 ('v', 'verbose', None, _("enable additional output")),
2408 ('v', 'verbose', None, _("enable additional output")),
2396 ('', 'debug', None, _("enable debugging output")),
2409 ('', 'debug', None, _("enable debugging output")),
2397 ('', 'debugger', None, _("start debugger")),
2410 ('', 'debugger', None, _("start debugger")),
2398 ('', 'traceback', None, _("print traceback on exception")),
2411 ('', 'traceback', None, _("print traceback on exception")),
2399 ('', 'time', None, _("time how long the command takes")),
2412 ('', 'time', None, _("time how long the command takes")),
2400 ('', 'profile', None, _("print command execution profile")),
2413 ('', 'profile', None, _("print command execution profile")),
2401 ('', 'version', None, _("output version information and exit")),
2414 ('', 'version', None, _("output version information and exit")),
2402 ('h', 'help', None, _("display help and exit")),
2415 ('h', 'help', None, _("display help and exit")),
2403 ]
2416 ]
2404
2417
2405 norepo = ("clone init version help debugancestor debugconfig debugdata"
2418 norepo = ("clone init version help debugancestor debugconfig debugdata"
2406 " debugindex debugindexdot paths")
2419 " debugindex debugindexdot paths")
2407
2420
2408 def find(cmd):
2421 def find(cmd):
2409 for e in table.keys():
2422 for e in table.keys():
2410 if re.match("(%s)$" % e, cmd):
2423 if re.match("(%s)$" % e, cmd):
2411 return e, table[e]
2424 return e, table[e]
2412
2425
2413 raise UnknownCommand(cmd)
2426 raise UnknownCommand(cmd)
2414
2427
2415 class SignalInterrupt(Exception):
2428 class SignalInterrupt(Exception):
2416 """Exception raised on SIGTERM and SIGHUP."""
2429 """Exception raised on SIGTERM and SIGHUP."""
2417
2430
2418 def catchterm(*args):
2431 def catchterm(*args):
2419 raise SignalInterrupt
2432 raise SignalInterrupt
2420
2433
2421 def run():
2434 def run():
2422 sys.exit(dispatch(sys.argv[1:]))
2435 sys.exit(dispatch(sys.argv[1:]))
2423
2436
2424 class ParseError(Exception):
2437 class ParseError(Exception):
2425 """Exception raised on errors in parsing the command line."""
2438 """Exception raised on errors in parsing the command line."""
2426
2439
2427 def parse(ui, args):
2440 def parse(ui, args):
2428 options = {}
2441 options = {}
2429 cmdoptions = {}
2442 cmdoptions = {}
2430
2443
2431 try:
2444 try:
2432 args = fancyopts.fancyopts(args, globalopts, options)
2445 args = fancyopts.fancyopts(args, globalopts, options)
2433 except fancyopts.getopt.GetoptError, inst:
2446 except fancyopts.getopt.GetoptError, inst:
2434 raise ParseError(None, inst)
2447 raise ParseError(None, inst)
2435
2448
2436 if args:
2449 if args:
2437 cmd, args = args[0], args[1:]
2450 cmd, args = args[0], args[1:]
2438 defaults = ui.config("defaults", cmd)
2451 defaults = ui.config("defaults", cmd)
2439 if defaults:
2452 if defaults:
2440 # reparse with command defaults added
2453 # reparse with command defaults added
2441 args = [cmd] + defaults.split() + args
2454 args = [cmd] + defaults.split() + args
2442 try:
2455 try:
2443 args = fancyopts.fancyopts(args, globalopts, options)
2456 args = fancyopts.fancyopts(args, globalopts, options)
2444 except fancyopts.getopt.GetoptError, inst:
2457 except fancyopts.getopt.GetoptError, inst:
2445 raise ParseError(None, inst)
2458 raise ParseError(None, inst)
2446
2459
2447 cmd, args = args[0], args[1:]
2460 cmd, args = args[0], args[1:]
2448
2461
2449 i = find(cmd)[1]
2462 i = find(cmd)[1]
2450 c = list(i[1])
2463 c = list(i[1])
2451 else:
2464 else:
2452 cmd = None
2465 cmd = None
2453 c = []
2466 c = []
2454
2467
2455 # combine global options into local
2468 # combine global options into local
2456 for o in globalopts:
2469 for o in globalopts:
2457 c.append((o[0], o[1], options[o[1]], o[3]))
2470 c.append((o[0], o[1], options[o[1]], o[3]))
2458
2471
2459 try:
2472 try:
2460 args = fancyopts.fancyopts(args, c, cmdoptions)
2473 args = fancyopts.fancyopts(args, c, cmdoptions)
2461 except fancyopts.getopt.GetoptError, inst:
2474 except fancyopts.getopt.GetoptError, inst:
2462 raise ParseError(cmd, inst)
2475 raise ParseError(cmd, inst)
2463
2476
2464 # separate global options back out
2477 # separate global options back out
2465 for o in globalopts:
2478 for o in globalopts:
2466 n = o[1]
2479 n = o[1]
2467 options[n] = cmdoptions[n]
2480 options[n] = cmdoptions[n]
2468 del cmdoptions[n]
2481 del cmdoptions[n]
2469
2482
2470 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2483 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2471
2484
2472 def dispatch(args):
2485 def dispatch(args):
2473 signal.signal(signal.SIGTERM, catchterm)
2486 signal.signal(signal.SIGTERM, catchterm)
2474 try:
2487 try:
2475 signal.signal(signal.SIGHUP, catchterm)
2488 signal.signal(signal.SIGHUP, catchterm)
2476 except AttributeError:
2489 except AttributeError:
2477 pass
2490 pass
2478
2491
2479 u = ui.ui()
2492 u = ui.ui()
2480 external = []
2493 external = []
2481 for x in u.extensions():
2494 for x in u.extensions():
2482 def on_exception(Exception, inst):
2495 def on_exception(Exception, inst):
2483 u.warn(_("*** failed to import extension %s\n") % x[1])
2496 u.warn(_("*** failed to import extension %s\n") % x[1])
2484 u.warn("%s\n" % inst)
2497 u.warn("%s\n" % inst)
2485 if "--traceback" in sys.argv[1:]:
2498 if "--traceback" in sys.argv[1:]:
2486 traceback.print_exc()
2499 traceback.print_exc()
2487 if x[1]:
2500 if x[1]:
2488 try:
2501 try:
2489 mod = imp.load_source(x[0], x[1])
2502 mod = imp.load_source(x[0], x[1])
2490 except Exception, inst:
2503 except Exception, inst:
2491 on_exception(Exception, inst)
2504 on_exception(Exception, inst)
2492 continue
2505 continue
2493 else:
2506 else:
2494 def importh(name):
2507 def importh(name):
2495 mod = __import__(name)
2508 mod = __import__(name)
2496 components = name.split('.')
2509 components = name.split('.')
2497 for comp in components[1:]:
2510 for comp in components[1:]:
2498 mod = getattr(mod, comp)
2511 mod = getattr(mod, comp)
2499 return mod
2512 return mod
2500 try:
2513 try:
2501 mod = importh(x[0])
2514 mod = importh(x[0])
2502 except Exception, inst:
2515 except Exception, inst:
2503 on_exception(Exception, inst)
2516 on_exception(Exception, inst)
2504 continue
2517 continue
2505
2518
2506 external.append(mod)
2519 external.append(mod)
2507 for x in external:
2520 for x in external:
2508 cmdtable = getattr(x, 'cmdtable', {})
2521 cmdtable = getattr(x, 'cmdtable', {})
2509 for t in cmdtable:
2522 for t in cmdtable:
2510 if t in table:
2523 if t in table:
2511 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2524 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2512 table.update(cmdtable)
2525 table.update(cmdtable)
2513
2526
2514 try:
2527 try:
2515 cmd, func, args, options, cmdoptions = parse(u, args)
2528 cmd, func, args, options, cmdoptions = parse(u, args)
2516 except ParseError, inst:
2529 except ParseError, inst:
2517 if inst.args[0]:
2530 if inst.args[0]:
2518 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2531 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2519 help_(u, inst.args[0])
2532 help_(u, inst.args[0])
2520 else:
2533 else:
2521 u.warn(_("hg: %s\n") % inst.args[1])
2534 u.warn(_("hg: %s\n") % inst.args[1])
2522 help_(u, 'shortlist')
2535 help_(u, 'shortlist')
2523 sys.exit(-1)
2536 sys.exit(-1)
2524 except UnknownCommand, inst:
2537 except UnknownCommand, inst:
2525 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2538 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2526 help_(u, 'shortlist')
2539 help_(u, 'shortlist')
2527 sys.exit(1)
2540 sys.exit(1)
2528
2541
2529 if options["time"]:
2542 if options["time"]:
2530 def get_times():
2543 def get_times():
2531 t = os.times()
2544 t = os.times()
2532 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2545 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2533 t = (t[0], t[1], t[2], t[3], time.clock())
2546 t = (t[0], t[1], t[2], t[3], time.clock())
2534 return t
2547 return t
2535 s = get_times()
2548 s = get_times()
2536 def print_time():
2549 def print_time():
2537 t = get_times()
2550 t = get_times()
2538 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2551 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2539 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2552 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2540 atexit.register(print_time)
2553 atexit.register(print_time)
2541
2554
2542 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2555 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2543 not options["noninteractive"])
2556 not options["noninteractive"])
2544
2557
2545 # enter the debugger before command execution
2558 # enter the debugger before command execution
2546 if options['debugger']:
2559 if options['debugger']:
2547 pdb.set_trace()
2560 pdb.set_trace()
2548
2561
2549 try:
2562 try:
2550 try:
2563 try:
2551 if options['help']:
2564 if options['help']:
2552 help_(u, cmd, options['version'])
2565 help_(u, cmd, options['version'])
2553 sys.exit(0)
2566 sys.exit(0)
2554 elif options['version']:
2567 elif options['version']:
2555 show_version(u)
2568 show_version(u)
2556 sys.exit(0)
2569 sys.exit(0)
2557 elif not cmd:
2570 elif not cmd:
2558 help_(u, 'shortlist')
2571 help_(u, 'shortlist')
2559 sys.exit(0)
2572 sys.exit(0)
2560
2573
2561 if options['cwd']:
2574 if options['cwd']:
2562 try:
2575 try:
2563 os.chdir(options['cwd'])
2576 os.chdir(options['cwd'])
2564 except OSError, inst:
2577 except OSError, inst:
2565 raise util.Abort('%s: %s' %
2578 raise util.Abort('%s: %s' %
2566 (options['cwd'], inst.strerror))
2579 (options['cwd'], inst.strerror))
2567
2580
2568 if cmd not in norepo.split():
2581 if cmd not in norepo.split():
2569 path = options["repository"] or ""
2582 path = options["repository"] or ""
2570 repo = hg.repository(ui=u, path=path)
2583 repo = hg.repository(ui=u, path=path)
2571 for x in external:
2584 for x in external:
2572 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2585 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2573 d = lambda: func(u, repo, *args, **cmdoptions)
2586 d = lambda: func(u, repo, *args, **cmdoptions)
2574 else:
2587 else:
2575 d = lambda: func(u, *args, **cmdoptions)
2588 d = lambda: func(u, *args, **cmdoptions)
2576
2589
2577 if options['profile']:
2590 if options['profile']:
2578 import hotshot, hotshot.stats
2591 import hotshot, hotshot.stats
2579 prof = hotshot.Profile("hg.prof")
2592 prof = hotshot.Profile("hg.prof")
2580 r = prof.runcall(d)
2593 r = prof.runcall(d)
2581 prof.close()
2594 prof.close()
2582 stats = hotshot.stats.load("hg.prof")
2595 stats = hotshot.stats.load("hg.prof")
2583 stats.strip_dirs()
2596 stats.strip_dirs()
2584 stats.sort_stats('time', 'calls')
2597 stats.sort_stats('time', 'calls')
2585 stats.print_stats(40)
2598 stats.print_stats(40)
2586 return r
2599 return r
2587 else:
2600 else:
2588 return d()
2601 return d()
2589 except:
2602 except:
2590 # enter the debugger when we hit an exception
2603 # enter the debugger when we hit an exception
2591 if options['debugger']:
2604 if options['debugger']:
2592 pdb.post_mortem(sys.exc_info()[2])
2605 pdb.post_mortem(sys.exc_info()[2])
2593 if options['traceback']:
2606 if options['traceback']:
2594 traceback.print_exc()
2607 traceback.print_exc()
2595 raise
2608 raise
2596 except hg.RepoError, inst:
2609 except hg.RepoError, inst:
2597 u.warn(_("abort: "), inst, "!\n")
2610 u.warn(_("abort: "), inst, "!\n")
2598 except revlog.RevlogError, inst:
2611 except revlog.RevlogError, inst:
2599 u.warn(_("abort: "), inst, "!\n")
2612 u.warn(_("abort: "), inst, "!\n")
2600 except SignalInterrupt:
2613 except SignalInterrupt:
2601 u.warn(_("killed!\n"))
2614 u.warn(_("killed!\n"))
2602 except KeyboardInterrupt:
2615 except KeyboardInterrupt:
2603 try:
2616 try:
2604 u.warn(_("interrupted!\n"))
2617 u.warn(_("interrupted!\n"))
2605 except IOError, inst:
2618 except IOError, inst:
2606 if inst.errno == errno.EPIPE:
2619 if inst.errno == errno.EPIPE:
2607 if u.debugflag:
2620 if u.debugflag:
2608 u.warn(_("\nbroken pipe\n"))
2621 u.warn(_("\nbroken pipe\n"))
2609 else:
2622 else:
2610 raise
2623 raise
2611 except IOError, inst:
2624 except IOError, inst:
2612 if hasattr(inst, "code"):
2625 if hasattr(inst, "code"):
2613 u.warn(_("abort: %s\n") % inst)
2626 u.warn(_("abort: %s\n") % inst)
2614 elif hasattr(inst, "reason"):
2627 elif hasattr(inst, "reason"):
2615 u.warn(_("abort: error: %s\n") % inst.reason[1])
2628 u.warn(_("abort: error: %s\n") % inst.reason[1])
2616 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2629 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2617 if u.debugflag:
2630 if u.debugflag:
2618 u.warn(_("broken pipe\n"))
2631 u.warn(_("broken pipe\n"))
2619 elif getattr(inst, "strerror", None):
2632 elif getattr(inst, "strerror", None):
2620 if getattr(inst, "filename", None):
2633 if getattr(inst, "filename", None):
2621 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2634 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2622 else:
2635 else:
2623 u.warn(_("abort: %s\n") % inst.strerror)
2636 u.warn(_("abort: %s\n") % inst.strerror)
2624 else:
2637 else:
2625 raise
2638 raise
2626 except OSError, inst:
2639 except OSError, inst:
2627 if hasattr(inst, "filename"):
2640 if hasattr(inst, "filename"):
2628 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2641 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2629 else:
2642 else:
2630 u.warn(_("abort: %s\n") % inst.strerror)
2643 u.warn(_("abort: %s\n") % inst.strerror)
2631 except util.Abort, inst:
2644 except util.Abort, inst:
2632 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2645 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2633 sys.exit(1)
2646 sys.exit(1)
2634 except TypeError, inst:
2647 except TypeError, inst:
2635 # was this an argument error?
2648 # was this an argument error?
2636 tb = traceback.extract_tb(sys.exc_info()[2])
2649 tb = traceback.extract_tb(sys.exc_info()[2])
2637 if len(tb) > 2: # no
2650 if len(tb) > 2: # no
2638 raise
2651 raise
2639 u.debug(inst, "\n")
2652 u.debug(inst, "\n")
2640 u.warn(_("%s: invalid arguments\n") % cmd)
2653 u.warn(_("%s: invalid arguments\n") % cmd)
2641 help_(u, cmd)
2654 help_(u, cmd)
2642 except UnknownCommand, inst:
2655 except UnknownCommand, inst:
2643 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2656 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2644 help_(u, 'shortlist')
2657 help_(u, 'shortlist')
2645 except SystemExit:
2658 except SystemExit:
2646 # don't catch this in the catch-all below
2659 # don't catch this in the catch-all below
2647 raise
2660 raise
2648 except:
2661 except:
2649 u.warn(_("** unknown exception encountered, details follow\n"))
2662 u.warn(_("** unknown exception encountered, details follow\n"))
2650 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2663 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2651 raise
2664 raise
2652
2665
2653 sys.exit(-1)
2666 sys.exit(-1)
@@ -1,1472 +1,1736 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14
14
15 class localrepository:
15 class localrepository:
16 def __init__(self, ui, path=None, create=0):
16 def __init__(self, ui, path=None, create=0):
17 if not path:
17 if not path:
18 p = os.getcwd()
18 p = os.getcwd()
19 while not os.path.isdir(os.path.join(p, ".hg")):
19 while not os.path.isdir(os.path.join(p, ".hg")):
20 oldp = p
20 oldp = p
21 p = os.path.dirname(p)
21 p = os.path.dirname(p)
22 if p == oldp: raise repo.RepoError(_("no repo found"))
22 if p == oldp: raise repo.RepoError(_("no repo found"))
23 path = p
23 path = p
24 self.path = os.path.join(path, ".hg")
24 self.path = os.path.join(path, ".hg")
25
25
26 if not create and not os.path.isdir(self.path):
26 if not create and not os.path.isdir(self.path):
27 raise repo.RepoError(_("repository %s not found") % self.path)
27 raise repo.RepoError(_("repository %s not found") % self.path)
28
28
29 self.root = os.path.abspath(path)
29 self.root = os.path.abspath(path)
30 self.ui = ui
30 self.ui = ui
31 self.opener = util.opener(self.path)
31 self.opener = util.opener(self.path)
32 self.wopener = util.opener(self.root)
32 self.wopener = util.opener(self.root)
33 self.manifest = manifest.manifest(self.opener)
33 self.manifest = manifest.manifest(self.opener)
34 self.changelog = changelog.changelog(self.opener)
34 self.changelog = changelog.changelog(self.opener)
35 self.tagscache = None
35 self.tagscache = None
36 self.nodetagscache = None
36 self.nodetagscache = None
37 self.encodepats = None
37 self.encodepats = None
38 self.decodepats = None
38 self.decodepats = None
39
39
40 if create:
40 if create:
41 os.mkdir(self.path)
41 os.mkdir(self.path)
42 os.mkdir(self.join("data"))
42 os.mkdir(self.join("data"))
43
43
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 try:
45 try:
46 self.ui.readconfig(self.opener("hgrc"))
46 self.ui.readconfig(self.opener("hgrc"))
47 except IOError: pass
47 except IOError: pass
48
48
49 def hook(self, name, **args):
49 def hook(self, name, **args):
50 s = self.ui.config("hooks", name)
50 s = self.ui.config("hooks", name)
51 if s:
51 if s:
52 self.ui.note(_("running hook %s: %s\n") % (name, s))
52 self.ui.note(_("running hook %s: %s\n") % (name, s))
53 old = {}
53 old = {}
54 for k, v in args.items():
54 for k, v in args.items():
55 k = k.upper()
55 k = k.upper()
56 old[k] = os.environ.get(k, None)
56 old[k] = os.environ.get(k, None)
57 os.environ[k] = v
57 os.environ[k] = v
58
58
59 # Hooks run in the repository root
59 # Hooks run in the repository root
60 olddir = os.getcwd()
60 olddir = os.getcwd()
61 os.chdir(self.root)
61 os.chdir(self.root)
62 r = os.system(s)
62 r = os.system(s)
63 os.chdir(olddir)
63 os.chdir(olddir)
64
64
65 for k, v in old.items():
65 for k, v in old.items():
66 if v != None:
66 if v != None:
67 os.environ[k] = v
67 os.environ[k] = v
68 else:
68 else:
69 del os.environ[k]
69 del os.environ[k]
70
70
71 if r:
71 if r:
72 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
72 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
73 (name, r))
73 (name, r))
74 return False
74 return False
75 return True
75 return True
76
76
77 def tags(self):
77 def tags(self):
78 '''return a mapping of tag to node'''
78 '''return a mapping of tag to node'''
79 if not self.tagscache:
79 if not self.tagscache:
80 self.tagscache = {}
80 self.tagscache = {}
81 def addtag(self, k, n):
81 def addtag(self, k, n):
82 try:
82 try:
83 bin_n = bin(n)
83 bin_n = bin(n)
84 except TypeError:
84 except TypeError:
85 bin_n = ''
85 bin_n = ''
86 self.tagscache[k.strip()] = bin_n
86 self.tagscache[k.strip()] = bin_n
87
87
88 try:
88 try:
89 # read each head of the tags file, ending with the tip
89 # read each head of the tags file, ending with the tip
90 # and add each tag found to the map, with "newer" ones
90 # and add each tag found to the map, with "newer" ones
91 # taking precedence
91 # taking precedence
92 fl = self.file(".hgtags")
92 fl = self.file(".hgtags")
93 h = fl.heads()
93 h = fl.heads()
94 h.reverse()
94 h.reverse()
95 for r in h:
95 for r in h:
96 for l in fl.read(r).splitlines():
96 for l in fl.read(r).splitlines():
97 if l:
97 if l:
98 n, k = l.split(" ", 1)
98 n, k = l.split(" ", 1)
99 addtag(self, k, n)
99 addtag(self, k, n)
100 except KeyError:
100 except KeyError:
101 pass
101 pass
102
102
103 try:
103 try:
104 f = self.opener("localtags")
104 f = self.opener("localtags")
105 for l in f:
105 for l in f:
106 n, k = l.split(" ", 1)
106 n, k = l.split(" ", 1)
107 addtag(self, k, n)
107 addtag(self, k, n)
108 except IOError:
108 except IOError:
109 pass
109 pass
110
110
111 self.tagscache['tip'] = self.changelog.tip()
111 self.tagscache['tip'] = self.changelog.tip()
112
112
113 return self.tagscache
113 return self.tagscache
114
114
115 def tagslist(self):
115 def tagslist(self):
116 '''return a list of tags ordered by revision'''
116 '''return a list of tags ordered by revision'''
117 l = []
117 l = []
118 for t, n in self.tags().items():
118 for t, n in self.tags().items():
119 try:
119 try:
120 r = self.changelog.rev(n)
120 r = self.changelog.rev(n)
121 except:
121 except:
122 r = -2 # sort to the beginning of the list if unknown
122 r = -2 # sort to the beginning of the list if unknown
123 l.append((r,t,n))
123 l.append((r,t,n))
124 l.sort()
124 l.sort()
125 return [(t,n) for r,t,n in l]
125 return [(t,n) for r,t,n in l]
126
126
127 def nodetags(self, node):
127 def nodetags(self, node):
128 '''return the tags associated with a node'''
128 '''return the tags associated with a node'''
129 if not self.nodetagscache:
129 if not self.nodetagscache:
130 self.nodetagscache = {}
130 self.nodetagscache = {}
131 for t,n in self.tags().items():
131 for t,n in self.tags().items():
132 self.nodetagscache.setdefault(n,[]).append(t)
132 self.nodetagscache.setdefault(n,[]).append(t)
133 return self.nodetagscache.get(node, [])
133 return self.nodetagscache.get(node, [])
134
134
135 def lookup(self, key):
135 def lookup(self, key):
136 try:
136 try:
137 return self.tags()[key]
137 return self.tags()[key]
138 except KeyError:
138 except KeyError:
139 try:
139 try:
140 return self.changelog.lookup(key)
140 return self.changelog.lookup(key)
141 except:
141 except:
142 raise repo.RepoError(_("unknown revision '%s'") % key)
142 raise repo.RepoError(_("unknown revision '%s'") % key)
143
143
144 def dev(self):
144 def dev(self):
145 return os.stat(self.path).st_dev
145 return os.stat(self.path).st_dev
146
146
147 def local(self):
147 def local(self):
148 return True
148 return True
149
149
150 def join(self, f):
150 def join(self, f):
151 return os.path.join(self.path, f)
151 return os.path.join(self.path, f)
152
152
153 def wjoin(self, f):
153 def wjoin(self, f):
154 return os.path.join(self.root, f)
154 return os.path.join(self.root, f)
155
155
156 def file(self, f):
156 def file(self, f):
157 if f[0] == '/': f = f[1:]
157 if f[0] == '/': f = f[1:]
158 return filelog.filelog(self.opener, f)
158 return filelog.filelog(self.opener, f)
159
159
160 def getcwd(self):
160 def getcwd(self):
161 return self.dirstate.getcwd()
161 return self.dirstate.getcwd()
162
162
163 def wfile(self, f, mode='r'):
163 def wfile(self, f, mode='r'):
164 return self.wopener(f, mode)
164 return self.wopener(f, mode)
165
165
166 def wread(self, filename):
166 def wread(self, filename):
167 if self.encodepats == None:
167 if self.encodepats == None:
168 l = []
168 l = []
169 for pat, cmd in self.ui.configitems("encode"):
169 for pat, cmd in self.ui.configitems("encode"):
170 mf = util.matcher("", "/", [pat], [], [])[1]
170 mf = util.matcher("", "/", [pat], [], [])[1]
171 l.append((mf, cmd))
171 l.append((mf, cmd))
172 self.encodepats = l
172 self.encodepats = l
173
173
174 data = self.wopener(filename, 'r').read()
174 data = self.wopener(filename, 'r').read()
175
175
176 for mf, cmd in self.encodepats:
176 for mf, cmd in self.encodepats:
177 if mf(filename):
177 if mf(filename):
178 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
178 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
179 data = util.filter(data, cmd)
179 data = util.filter(data, cmd)
180 break
180 break
181
181
182 return data
182 return data
183
183
184 def wwrite(self, filename, data, fd=None):
184 def wwrite(self, filename, data, fd=None):
185 if self.decodepats == None:
185 if self.decodepats == None:
186 l = []
186 l = []
187 for pat, cmd in self.ui.configitems("decode"):
187 for pat, cmd in self.ui.configitems("decode"):
188 mf = util.matcher("", "/", [pat], [], [])[1]
188 mf = util.matcher("", "/", [pat], [], [])[1]
189 l.append((mf, cmd))
189 l.append((mf, cmd))
190 self.decodepats = l
190 self.decodepats = l
191
191
192 for mf, cmd in self.decodepats:
192 for mf, cmd in self.decodepats:
193 if mf(filename):
193 if mf(filename):
194 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
194 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
195 data = util.filter(data, cmd)
195 data = util.filter(data, cmd)
196 break
196 break
197
197
198 if fd:
198 if fd:
199 return fd.write(data)
199 return fd.write(data)
200 return self.wopener(filename, 'w').write(data)
200 return self.wopener(filename, 'w').write(data)
201
201
202 def transaction(self):
202 def transaction(self):
203 # save dirstate for undo
203 # save dirstate for undo
204 try:
204 try:
205 ds = self.opener("dirstate").read()
205 ds = self.opener("dirstate").read()
206 except IOError:
206 except IOError:
207 ds = ""
207 ds = ""
208 self.opener("journal.dirstate", "w").write(ds)
208 self.opener("journal.dirstate", "w").write(ds)
209
209
210 def after():
210 def after():
211 util.rename(self.join("journal"), self.join("undo"))
211 util.rename(self.join("journal"), self.join("undo"))
212 util.rename(self.join("journal.dirstate"),
212 util.rename(self.join("journal.dirstate"),
213 self.join("undo.dirstate"))
213 self.join("undo.dirstate"))
214
214
215 return transaction.transaction(self.ui.warn, self.opener,
215 return transaction.transaction(self.ui.warn, self.opener,
216 self.join("journal"), after)
216 self.join("journal"), after)
217
217
218 def recover(self):
218 def recover(self):
219 lock = self.lock()
219 lock = self.lock()
220 if os.path.exists(self.join("journal")):
220 if os.path.exists(self.join("journal")):
221 self.ui.status(_("rolling back interrupted transaction\n"))
221 self.ui.status(_("rolling back interrupted transaction\n"))
222 return transaction.rollback(self.opener, self.join("journal"))
222 return transaction.rollback(self.opener, self.join("journal"))
223 else:
223 else:
224 self.ui.warn(_("no interrupted transaction available\n"))
224 self.ui.warn(_("no interrupted transaction available\n"))
225
225
226 def undo(self):
226 def undo(self):
227 lock = self.lock()
227 lock = self.lock()
228 if os.path.exists(self.join("undo")):
228 if os.path.exists(self.join("undo")):
229 self.ui.status(_("rolling back last transaction\n"))
229 self.ui.status(_("rolling back last transaction\n"))
230 transaction.rollback(self.opener, self.join("undo"))
230 transaction.rollback(self.opener, self.join("undo"))
231 self.dirstate = None
231 self.dirstate = None
232 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
232 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
233 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
233 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
234 else:
234 else:
235 self.ui.warn(_("no undo information available\n"))
235 self.ui.warn(_("no undo information available\n"))
236
236
237 def lock(self, wait=1):
237 def lock(self, wait=1):
238 try:
238 try:
239 return lock.lock(self.join("lock"), 0)
239 return lock.lock(self.join("lock"), 0)
240 except lock.LockHeld, inst:
240 except lock.LockHeld, inst:
241 if wait:
241 if wait:
242 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
242 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
243 return lock.lock(self.join("lock"), wait)
243 return lock.lock(self.join("lock"), wait)
244 raise inst
244 raise inst
245
245
246 def rawcommit(self, files, text, user, date, p1=None, p2=None):
246 def rawcommit(self, files, text, user, date, p1=None, p2=None):
247 orig_parent = self.dirstate.parents()[0] or nullid
247 orig_parent = self.dirstate.parents()[0] or nullid
248 p1 = p1 or self.dirstate.parents()[0] or nullid
248 p1 = p1 or self.dirstate.parents()[0] or nullid
249 p2 = p2 or self.dirstate.parents()[1] or nullid
249 p2 = p2 or self.dirstate.parents()[1] or nullid
250 c1 = self.changelog.read(p1)
250 c1 = self.changelog.read(p1)
251 c2 = self.changelog.read(p2)
251 c2 = self.changelog.read(p2)
252 m1 = self.manifest.read(c1[0])
252 m1 = self.manifest.read(c1[0])
253 mf1 = self.manifest.readflags(c1[0])
253 mf1 = self.manifest.readflags(c1[0])
254 m2 = self.manifest.read(c2[0])
254 m2 = self.manifest.read(c2[0])
255 changed = []
255 changed = []
256
256
257 if orig_parent == p1:
257 if orig_parent == p1:
258 update_dirstate = 1
258 update_dirstate = 1
259 else:
259 else:
260 update_dirstate = 0
260 update_dirstate = 0
261
261
262 tr = self.transaction()
262 tr = self.transaction()
263 mm = m1.copy()
263 mm = m1.copy()
264 mfm = mf1.copy()
264 mfm = mf1.copy()
265 linkrev = self.changelog.count()
265 linkrev = self.changelog.count()
266 for f in files:
266 for f in files:
267 try:
267 try:
268 t = self.wread(f)
268 t = self.wread(f)
269 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
269 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
270 r = self.file(f)
270 r = self.file(f)
271 mfm[f] = tm
271 mfm[f] = tm
272
272
273 fp1 = m1.get(f, nullid)
273 fp1 = m1.get(f, nullid)
274 fp2 = m2.get(f, nullid)
274 fp2 = m2.get(f, nullid)
275
275
276 # is the same revision on two branches of a merge?
276 # is the same revision on two branches of a merge?
277 if fp2 == fp1:
277 if fp2 == fp1:
278 fp2 = nullid
278 fp2 = nullid
279
279
280 if fp2 != nullid:
280 if fp2 != nullid:
281 # is one parent an ancestor of the other?
281 # is one parent an ancestor of the other?
282 fpa = r.ancestor(fp1, fp2)
282 fpa = r.ancestor(fp1, fp2)
283 if fpa == fp1:
283 if fpa == fp1:
284 fp1, fp2 = fp2, nullid
284 fp1, fp2 = fp2, nullid
285 elif fpa == fp2:
285 elif fpa == fp2:
286 fp2 = nullid
286 fp2 = nullid
287
287
288 # is the file unmodified from the parent?
288 # is the file unmodified from the parent?
289 if t == r.read(fp1):
289 if t == r.read(fp1):
290 # record the proper existing parent in manifest
290 # record the proper existing parent in manifest
291 # no need to add a revision
291 # no need to add a revision
292 mm[f] = fp1
292 mm[f] = fp1
293 continue
293 continue
294
294
295 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
295 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
296 changed.append(f)
296 changed.append(f)
297 if update_dirstate:
297 if update_dirstate:
298 self.dirstate.update([f], "n")
298 self.dirstate.update([f], "n")
299 except IOError:
299 except IOError:
300 try:
300 try:
301 del mm[f]
301 del mm[f]
302 del mfm[f]
302 del mfm[f]
303 if update_dirstate:
303 if update_dirstate:
304 self.dirstate.forget([f])
304 self.dirstate.forget([f])
305 except:
305 except:
306 # deleted from p2?
306 # deleted from p2?
307 pass
307 pass
308
308
309 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
309 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
310 user = user or self.ui.username()
310 user = user or self.ui.username()
311 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
311 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
312 tr.close()
312 tr.close()
313 if update_dirstate:
313 if update_dirstate:
314 self.dirstate.setparents(n, nullid)
314 self.dirstate.setparents(n, nullid)
315
315
316 def commit(self, files = None, text = "", user = None, date = None,
316 def commit(self, files = None, text = "", user = None, date = None,
317 match = util.always, force=False):
317 match = util.always, force=False):
318 commit = []
318 commit = []
319 remove = []
319 remove = []
320 changed = []
320 changed = []
321
321
322 if files:
322 if files:
323 for f in files:
323 for f in files:
324 s = self.dirstate.state(f)
324 s = self.dirstate.state(f)
325 if s in 'nmai':
325 if s in 'nmai':
326 commit.append(f)
326 commit.append(f)
327 elif s == 'r':
327 elif s == 'r':
328 remove.append(f)
328 remove.append(f)
329 else:
329 else:
330 self.ui.warn(_("%s not tracked!\n") % f)
330 self.ui.warn(_("%s not tracked!\n") % f)
331 else:
331 else:
332 (c, a, d, u) = self.changes(match=match)
332 (c, a, d, u) = self.changes(match=match)
333 commit = c + a
333 commit = c + a
334 remove = d
334 remove = d
335
335
336 p1, p2 = self.dirstate.parents()
336 p1, p2 = self.dirstate.parents()
337 c1 = self.changelog.read(p1)
337 c1 = self.changelog.read(p1)
338 c2 = self.changelog.read(p2)
338 c2 = self.changelog.read(p2)
339 m1 = self.manifest.read(c1[0])
339 m1 = self.manifest.read(c1[0])
340 mf1 = self.manifest.readflags(c1[0])
340 mf1 = self.manifest.readflags(c1[0])
341 m2 = self.manifest.read(c2[0])
341 m2 = self.manifest.read(c2[0])
342
342
343 if not commit and not remove and not force and p2 == nullid:
343 if not commit and not remove and not force and p2 == nullid:
344 self.ui.status(_("nothing changed\n"))
344 self.ui.status(_("nothing changed\n"))
345 return None
345 return None
346
346
347 if not self.hook("precommit"):
347 if not self.hook("precommit"):
348 return None
348 return None
349
349
350 lock = self.lock()
350 lock = self.lock()
351 tr = self.transaction()
351 tr = self.transaction()
352
352
353 # check in files
353 # check in files
354 new = {}
354 new = {}
355 linkrev = self.changelog.count()
355 linkrev = self.changelog.count()
356 commit.sort()
356 commit.sort()
357 for f in commit:
357 for f in commit:
358 self.ui.note(f + "\n")
358 self.ui.note(f + "\n")
359 try:
359 try:
360 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
360 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
361 t = self.wread(f)
361 t = self.wread(f)
362 except IOError:
362 except IOError:
363 self.ui.warn(_("trouble committing %s!\n") % f)
363 self.ui.warn(_("trouble committing %s!\n") % f)
364 raise
364 raise
365
365
366 r = self.file(f)
366 r = self.file(f)
367
367
368 meta = {}
368 meta = {}
369 cp = self.dirstate.copied(f)
369 cp = self.dirstate.copied(f)
370 if cp:
370 if cp:
371 meta["copy"] = cp
371 meta["copy"] = cp
372 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
372 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
373 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
373 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
374 fp1, fp2 = nullid, nullid
374 fp1, fp2 = nullid, nullid
375 else:
375 else:
376 fp1 = m1.get(f, nullid)
376 fp1 = m1.get(f, nullid)
377 fp2 = m2.get(f, nullid)
377 fp2 = m2.get(f, nullid)
378
378
379 # is the same revision on two branches of a merge?
379 # is the same revision on two branches of a merge?
380 if fp2 == fp1:
380 if fp2 == fp1:
381 fp2 = nullid
381 fp2 = nullid
382
382
383 if fp2 != nullid:
383 if fp2 != nullid:
384 # is one parent an ancestor of the other?
384 # is one parent an ancestor of the other?
385 fpa = r.ancestor(fp1, fp2)
385 fpa = r.ancestor(fp1, fp2)
386 if fpa == fp1:
386 if fpa == fp1:
387 fp1, fp2 = fp2, nullid
387 fp1, fp2 = fp2, nullid
388 elif fpa == fp2:
388 elif fpa == fp2:
389 fp2 = nullid
389 fp2 = nullid
390
390
391 # is the file unmodified from the parent?
391 # is the file unmodified from the parent?
392 if not meta and t == r.read(fp1):
392 if not meta and t == r.read(fp1):
393 # record the proper existing parent in manifest
393 # record the proper existing parent in manifest
394 # no need to add a revision
394 # no need to add a revision
395 new[f] = fp1
395 new[f] = fp1
396 continue
396 continue
397
397
398 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
398 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
399 # remember what we've added so that we can later calculate
399 # remember what we've added so that we can later calculate
400 # the files to pull from a set of changesets
400 # the files to pull from a set of changesets
401 changed.append(f)
401 changed.append(f)
402
402
403 # update manifest
403 # update manifest
404 m1.update(new)
404 m1.update(new)
405 for f in remove:
405 for f in remove:
406 if f in m1:
406 if f in m1:
407 del m1[f]
407 del m1[f]
408 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
408 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
409 (new, remove))
409 (new, remove))
410
410
411 # add changeset
411 # add changeset
412 new = new.keys()
412 new = new.keys()
413 new.sort()
413 new.sort()
414
414
415 if not text:
415 if not text:
416 edittext = ""
416 edittext = ""
417 if p2 != nullid:
417 if p2 != nullid:
418 edittext += "HG: branch merge\n"
418 edittext += "HG: branch merge\n"
419 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
419 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
420 edittext += "".join(["HG: changed %s\n" % f for f in changed])
420 edittext += "".join(["HG: changed %s\n" % f for f in changed])
421 edittext += "".join(["HG: removed %s\n" % f for f in remove])
421 edittext += "".join(["HG: removed %s\n" % f for f in remove])
422 if not changed and not remove:
422 if not changed and not remove:
423 edittext += "HG: no files changed\n"
423 edittext += "HG: no files changed\n"
424 edittext = self.ui.edit(edittext)
424 edittext = self.ui.edit(edittext)
425 if not edittext.rstrip():
425 if not edittext.rstrip():
426 return None
426 return None
427 text = edittext
427 text = edittext
428
428
429 user = user or self.ui.username()
429 user = user or self.ui.username()
430 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
430 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
431 tr.close()
431 tr.close()
432
432
433 self.dirstate.setparents(n)
433 self.dirstate.setparents(n)
434 self.dirstate.update(new, "n")
434 self.dirstate.update(new, "n")
435 self.dirstate.forget(remove)
435 self.dirstate.forget(remove)
436
436
437 if not self.hook("commit", node=hex(n)):
437 if not self.hook("commit", node=hex(n)):
438 return None
438 return None
439 return n
439 return n
440
440
441 def walk(self, node=None, files=[], match=util.always):
441 def walk(self, node=None, files=[], match=util.always):
442 if node:
442 if node:
443 for fn in self.manifest.read(self.changelog.read(node)[0]):
443 for fn in self.manifest.read(self.changelog.read(node)[0]):
444 if match(fn): yield 'm', fn
444 if match(fn): yield 'm', fn
445 else:
445 else:
446 for src, fn in self.dirstate.walk(files, match):
446 for src, fn in self.dirstate.walk(files, match):
447 yield src, fn
447 yield src, fn
448
448
449 def changes(self, node1 = None, node2 = None, files = [],
449 def changes(self, node1 = None, node2 = None, files = [],
450 match = util.always):
450 match = util.always):
451 mf2, u = None, []
451 mf2, u = None, []
452
452
453 def fcmp(fn, mf):
453 def fcmp(fn, mf):
454 t1 = self.wread(fn)
454 t1 = self.wread(fn)
455 t2 = self.file(fn).read(mf.get(fn, nullid))
455 t2 = self.file(fn).read(mf.get(fn, nullid))
456 return cmp(t1, t2)
456 return cmp(t1, t2)
457
457
458 def mfmatches(node):
458 def mfmatches(node):
459 mf = dict(self.manifest.read(node))
459 mf = dict(self.manifest.read(node))
460 for fn in mf.keys():
460 for fn in mf.keys():
461 if not match(fn):
461 if not match(fn):
462 del mf[fn]
462 del mf[fn]
463 return mf
463 return mf
464
464
465 # are we comparing the working directory?
465 # are we comparing the working directory?
466 if not node2:
466 if not node2:
467 l, c, a, d, u = self.dirstate.changes(files, match)
467 l, c, a, d, u = self.dirstate.changes(files, match)
468
468
469 # are we comparing working dir against its parent?
469 # are we comparing working dir against its parent?
470 if not node1:
470 if not node1:
471 if l:
471 if l:
472 # do a full compare of any files that might have changed
472 # do a full compare of any files that might have changed
473 change = self.changelog.read(self.dirstate.parents()[0])
473 change = self.changelog.read(self.dirstate.parents()[0])
474 mf2 = mfmatches(change[0])
474 mf2 = mfmatches(change[0])
475 for f in l:
475 for f in l:
476 if fcmp(f, mf2):
476 if fcmp(f, mf2):
477 c.append(f)
477 c.append(f)
478
478
479 for l in c, a, d, u:
479 for l in c, a, d, u:
480 l.sort()
480 l.sort()
481
481
482 return (c, a, d, u)
482 return (c, a, d, u)
483
483
484 # are we comparing working dir against non-tip?
484 # are we comparing working dir against non-tip?
485 # generate a pseudo-manifest for the working dir
485 # generate a pseudo-manifest for the working dir
486 if not node2:
486 if not node2:
487 if not mf2:
487 if not mf2:
488 change = self.changelog.read(self.dirstate.parents()[0])
488 change = self.changelog.read(self.dirstate.parents()[0])
489 mf2 = mfmatches(change[0])
489 mf2 = mfmatches(change[0])
490 for f in a + c + l:
490 for f in a + c + l:
491 mf2[f] = ""
491 mf2[f] = ""
492 for f in d:
492 for f in d:
493 if f in mf2: del mf2[f]
493 if f in mf2: del mf2[f]
494 else:
494 else:
495 change = self.changelog.read(node2)
495 change = self.changelog.read(node2)
496 mf2 = mfmatches(change[0])
496 mf2 = mfmatches(change[0])
497
497
498 # flush lists from dirstate before comparing manifests
498 # flush lists from dirstate before comparing manifests
499 c, a = [], []
499 c, a = [], []
500
500
501 change = self.changelog.read(node1)
501 change = self.changelog.read(node1)
502 mf1 = mfmatches(change[0])
502 mf1 = mfmatches(change[0])
503
503
504 for fn in mf2:
504 for fn in mf2:
505 if mf1.has_key(fn):
505 if mf1.has_key(fn):
506 if mf1[fn] != mf2[fn]:
506 if mf1[fn] != mf2[fn]:
507 if mf2[fn] != "" or fcmp(fn, mf1):
507 if mf2[fn] != "" or fcmp(fn, mf1):
508 c.append(fn)
508 c.append(fn)
509 del mf1[fn]
509 del mf1[fn]
510 else:
510 else:
511 a.append(fn)
511 a.append(fn)
512
512
513 d = mf1.keys()
513 d = mf1.keys()
514
514
515 for l in c, a, d, u:
515 for l in c, a, d, u:
516 l.sort()
516 l.sort()
517
517
518 return (c, a, d, u)
518 return (c, a, d, u)
519
519
520 def add(self, list):
520 def add(self, list):
521 for f in list:
521 for f in list:
522 p = self.wjoin(f)
522 p = self.wjoin(f)
523 if not os.path.exists(p):
523 if not os.path.exists(p):
524 self.ui.warn(_("%s does not exist!\n") % f)
524 self.ui.warn(_("%s does not exist!\n") % f)
525 elif not os.path.isfile(p):
525 elif not os.path.isfile(p):
526 self.ui.warn(_("%s not added: only files supported currently\n") % f)
526 self.ui.warn(_("%s not added: only files supported currently\n") % f)
527 elif self.dirstate.state(f) in 'an':
527 elif self.dirstate.state(f) in 'an':
528 self.ui.warn(_("%s already tracked!\n") % f)
528 self.ui.warn(_("%s already tracked!\n") % f)
529 else:
529 else:
530 self.dirstate.update([f], "a")
530 self.dirstate.update([f], "a")
531
531
532 def forget(self, list):
532 def forget(self, list):
533 for f in list:
533 for f in list:
534 if self.dirstate.state(f) not in 'ai':
534 if self.dirstate.state(f) not in 'ai':
535 self.ui.warn(_("%s not added!\n") % f)
535 self.ui.warn(_("%s not added!\n") % f)
536 else:
536 else:
537 self.dirstate.forget([f])
537 self.dirstate.forget([f])
538
538
539 def remove(self, list, unlink=False):
539 def remove(self, list, unlink=False):
540 if unlink:
540 if unlink:
541 for f in list:
541 for f in list:
542 try:
542 try:
543 util.unlink(self.wjoin(f))
543 util.unlink(self.wjoin(f))
544 except OSError, inst:
544 except OSError, inst:
545 if inst.errno != errno.ENOENT: raise
545 if inst.errno != errno.ENOENT: raise
546 for f in list:
546 for f in list:
547 p = self.wjoin(f)
547 p = self.wjoin(f)
548 if os.path.exists(p):
548 if os.path.exists(p):
549 self.ui.warn(_("%s still exists!\n") % f)
549 self.ui.warn(_("%s still exists!\n") % f)
550 elif self.dirstate.state(f) == 'a':
550 elif self.dirstate.state(f) == 'a':
551 self.ui.warn(_("%s never committed!\n") % f)
551 self.ui.warn(_("%s never committed!\n") % f)
552 self.dirstate.forget([f])
552 self.dirstate.forget([f])
553 elif f not in self.dirstate:
553 elif f not in self.dirstate:
554 self.ui.warn(_("%s not tracked!\n") % f)
554 self.ui.warn(_("%s not tracked!\n") % f)
555 else:
555 else:
556 self.dirstate.update([f], "r")
556 self.dirstate.update([f], "r")
557
557
558 def undelete(self, list):
558 def undelete(self, list):
559 p = self.dirstate.parents()[0]
559 p = self.dirstate.parents()[0]
560 mn = self.changelog.read(p)[0]
560 mn = self.changelog.read(p)[0]
561 mf = self.manifest.readflags(mn)
561 mf = self.manifest.readflags(mn)
562 m = self.manifest.read(mn)
562 m = self.manifest.read(mn)
563 for f in list:
563 for f in list:
564 if self.dirstate.state(f) not in "r":
564 if self.dirstate.state(f) not in "r":
565 self.ui.warn("%s not removed!\n" % f)
565 self.ui.warn("%s not removed!\n" % f)
566 else:
566 else:
567 t = self.file(f).read(m[f])
567 t = self.file(f).read(m[f])
568 try:
568 try:
569 self.wwrite(f, t)
569 self.wwrite(f, t)
570 except IOError, e:
570 except IOError, e:
571 if e.errno != errno.ENOENT:
571 if e.errno != errno.ENOENT:
572 raise
572 raise
573 os.makedirs(os.path.dirname(self.wjoin(f)))
573 os.makedirs(os.path.dirname(self.wjoin(f)))
574 self.wwrite(f, t)
574 self.wwrite(f, t)
575 util.set_exec(self.wjoin(f), mf[f])
575 util.set_exec(self.wjoin(f), mf[f])
576 self.dirstate.update([f], "n")
576 self.dirstate.update([f], "n")
577
577
578 def copy(self, source, dest):
578 def copy(self, source, dest):
579 p = self.wjoin(dest)
579 p = self.wjoin(dest)
580 if not os.path.exists(p):
580 if not os.path.exists(p):
581 self.ui.warn(_("%s does not exist!\n") % dest)
581 self.ui.warn(_("%s does not exist!\n") % dest)
582 elif not os.path.isfile(p):
582 elif not os.path.isfile(p):
583 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
583 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
584 else:
584 else:
585 if self.dirstate.state(dest) == '?':
585 if self.dirstate.state(dest) == '?':
586 self.dirstate.update([dest], "a")
586 self.dirstate.update([dest], "a")
587 self.dirstate.copy(source, dest)
587 self.dirstate.copy(source, dest)
588
588
589 def heads(self):
589 def heads(self):
590 return self.changelog.heads()
590 return self.changelog.heads()
591
591
592 # branchlookup returns a dict giving a list of branches for
592 # branchlookup returns a dict giving a list of branches for
593 # each head. A branch is defined as the tag of a node or
593 # each head. A branch is defined as the tag of a node or
594 # the branch of the node's parents. If a node has multiple
594 # the branch of the node's parents. If a node has multiple
595 # branch tags, tags are eliminated if they are visible from other
595 # branch tags, tags are eliminated if they are visible from other
596 # branch tags.
596 # branch tags.
597 #
597 #
598 # So, for this graph: a->b->c->d->e
598 # So, for this graph: a->b->c->d->e
599 # \ /
599 # \ /
600 # aa -----/
600 # aa -----/
601 # a has tag 2.6.12
601 # a has tag 2.6.12
602 # d has tag 2.6.13
602 # d has tag 2.6.13
603 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
603 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
604 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
604 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
605 # from the list.
605 # from the list.
606 #
606 #
607 # It is possible that more than one head will have the same branch tag.
607 # It is possible that more than one head will have the same branch tag.
608 # callers need to check the result for multiple heads under the same
608 # callers need to check the result for multiple heads under the same
609 # branch tag if that is a problem for them (ie checkout of a specific
609 # branch tag if that is a problem for them (ie checkout of a specific
610 # branch).
610 # branch).
611 #
611 #
612 # passing in a specific branch will limit the depth of the search
612 # passing in a specific branch will limit the depth of the search
613 # through the parents. It won't limit the branches returned in the
613 # through the parents. It won't limit the branches returned in the
614 # result though.
614 # result though.
615 def branchlookup(self, heads=None, branch=None):
615 def branchlookup(self, heads=None, branch=None):
616 if not heads:
616 if not heads:
617 heads = self.heads()
617 heads = self.heads()
618 headt = [ h for h in heads ]
618 headt = [ h for h in heads ]
619 chlog = self.changelog
619 chlog = self.changelog
620 branches = {}
620 branches = {}
621 merges = []
621 merges = []
622 seenmerge = {}
622 seenmerge = {}
623
623
624 # traverse the tree once for each head, recording in the branches
624 # traverse the tree once for each head, recording in the branches
625 # dict which tags are visible from this head. The branches
625 # dict which tags are visible from this head. The branches
626 # dict also records which tags are visible from each tag
626 # dict also records which tags are visible from each tag
627 # while we traverse.
627 # while we traverse.
628 while headt or merges:
628 while headt or merges:
629 if merges:
629 if merges:
630 n, found = merges.pop()
630 n, found = merges.pop()
631 visit = [n]
631 visit = [n]
632 else:
632 else:
633 h = headt.pop()
633 h = headt.pop()
634 visit = [h]
634 visit = [h]
635 found = [h]
635 found = [h]
636 seen = {}
636 seen = {}
637 while visit:
637 while visit:
638 n = visit.pop()
638 n = visit.pop()
639 if n in seen:
639 if n in seen:
640 continue
640 continue
641 pp = chlog.parents(n)
641 pp = chlog.parents(n)
642 tags = self.nodetags(n)
642 tags = self.nodetags(n)
643 if tags:
643 if tags:
644 for x in tags:
644 for x in tags:
645 if x == 'tip':
645 if x == 'tip':
646 continue
646 continue
647 for f in found:
647 for f in found:
648 branches.setdefault(f, {})[n] = 1
648 branches.setdefault(f, {})[n] = 1
649 branches.setdefault(n, {})[n] = 1
649 branches.setdefault(n, {})[n] = 1
650 break
650 break
651 if n not in found:
651 if n not in found:
652 found.append(n)
652 found.append(n)
653 if branch in tags:
653 if branch in tags:
654 continue
654 continue
655 seen[n] = 1
655 seen[n] = 1
656 if pp[1] != nullid and n not in seenmerge:
656 if pp[1] != nullid and n not in seenmerge:
657 merges.append((pp[1], [x for x in found]))
657 merges.append((pp[1], [x for x in found]))
658 seenmerge[n] = 1
658 seenmerge[n] = 1
659 if pp[0] != nullid:
659 if pp[0] != nullid:
660 visit.append(pp[0])
660 visit.append(pp[0])
661 # traverse the branches dict, eliminating branch tags from each
661 # traverse the branches dict, eliminating branch tags from each
662 # head that are visible from another branch tag for that head.
662 # head that are visible from another branch tag for that head.
663 out = {}
663 out = {}
664 viscache = {}
664 viscache = {}
665 for h in heads:
665 for h in heads:
666 def visible(node):
666 def visible(node):
667 if node in viscache:
667 if node in viscache:
668 return viscache[node]
668 return viscache[node]
669 ret = {}
669 ret = {}
670 visit = [node]
670 visit = [node]
671 while visit:
671 while visit:
672 x = visit.pop()
672 x = visit.pop()
673 if x in viscache:
673 if x in viscache:
674 ret.update(viscache[x])
674 ret.update(viscache[x])
675 elif x not in ret:
675 elif x not in ret:
676 ret[x] = 1
676 ret[x] = 1
677 if x in branches:
677 if x in branches:
678 visit[len(visit):] = branches[x].keys()
678 visit[len(visit):] = branches[x].keys()
679 viscache[node] = ret
679 viscache[node] = ret
680 return ret
680 return ret
681 if h not in branches:
681 if h not in branches:
682 continue
682 continue
683 # O(n^2), but somewhat limited. This only searches the
683 # O(n^2), but somewhat limited. This only searches the
684 # tags visible from a specific head, not all the tags in the
684 # tags visible from a specific head, not all the tags in the
685 # whole repo.
685 # whole repo.
686 for b in branches[h]:
686 for b in branches[h]:
687 vis = False
687 vis = False
688 for bb in branches[h].keys():
688 for bb in branches[h].keys():
689 if b != bb:
689 if b != bb:
690 if b in visible(bb):
690 if b in visible(bb):
691 vis = True
691 vis = True
692 break
692 break
693 if not vis:
693 if not vis:
694 l = out.setdefault(h, [])
694 l = out.setdefault(h, [])
695 l[len(l):] = self.nodetags(b)
695 l[len(l):] = self.nodetags(b)
696 return out
696 return out
697
697
698 def branches(self, nodes):
698 def branches(self, nodes):
699 if not nodes: nodes = [self.changelog.tip()]
699 if not nodes: nodes = [self.changelog.tip()]
700 b = []
700 b = []
701 for n in nodes:
701 for n in nodes:
702 t = n
702 t = n
703 while n:
703 while n:
704 p = self.changelog.parents(n)
704 p = self.changelog.parents(n)
705 if p[1] != nullid or p[0] == nullid:
705 if p[1] != nullid or p[0] == nullid:
706 b.append((t, n, p[0], p[1]))
706 b.append((t, n, p[0], p[1]))
707 break
707 break
708 n = p[0]
708 n = p[0]
709 return b
709 return b
710
710
711 def between(self, pairs):
711 def between(self, pairs):
712 r = []
712 r = []
713
713
714 for top, bottom in pairs:
714 for top, bottom in pairs:
715 n, l, i = top, [], 0
715 n, l, i = top, [], 0
716 f = 1
716 f = 1
717
717
718 while n != bottom:
718 while n != bottom:
719 p = self.changelog.parents(n)[0]
719 p = self.changelog.parents(n)[0]
720 if i == f:
720 if i == f:
721 l.append(n)
721 l.append(n)
722 f = f * 2
722 f = f * 2
723 n = p
723 n = p
724 i += 1
724 i += 1
725
725
726 r.append(l)
726 r.append(l)
727
727
728 return r
728 return r
729
729
730 def newer(self, nodes):
731 m = {}
732 nl = []
733 pm = {}
734 cl = self.changelog
735 t = l = cl.count()
736
737 # find the lowest numbered node
738 for n in nodes:
739 l = min(l, cl.rev(n))
740 m[n] = 1
741
742 for i in xrange(l, t):
743 n = cl.node(i)
744 if n in m: # explicitly listed
745 pm[n] = 1
746 nl.append(n)
747 continue
748 for p in cl.parents(n):
749 if p in pm: # parent listed
750 pm[n] = 1
751 nl.append(n)
752 break
753
754 return nl
755
756 def findincoming(self, remote, base=None, heads=None):
730 def findincoming(self, remote, base=None, heads=None):
757 m = self.changelog.nodemap
731 m = self.changelog.nodemap
758 search = []
732 search = []
759 fetch = {}
733 fetch = {}
760 seen = {}
734 seen = {}
761 seenbranch = {}
735 seenbranch = {}
762 if base == None:
736 if base == None:
763 base = {}
737 base = {}
764
738
765 # assume we're closer to the tip than the root
739 # assume we're closer to the tip than the root
766 # and start by examining the heads
740 # and start by examining the heads
767 self.ui.status(_("searching for changes\n"))
741 self.ui.status(_("searching for changes\n"))
768
742
769 if not heads:
743 if not heads:
770 heads = remote.heads()
744 heads = remote.heads()
771
745
772 unknown = []
746 unknown = []
773 for h in heads:
747 for h in heads:
774 if h not in m:
748 if h not in m:
775 unknown.append(h)
749 unknown.append(h)
776 else:
750 else:
777 base[h] = 1
751 base[h] = 1
778
752
779 if not unknown:
753 if not unknown:
780 return None
754 return None
781
755
782 rep = {}
756 rep = {}
783 reqcnt = 0
757 reqcnt = 0
784
758
785 # search through remote branches
759 # search through remote branches
786 # a 'branch' here is a linear segment of history, with four parts:
760 # a 'branch' here is a linear segment of history, with four parts:
787 # head, root, first parent, second parent
761 # head, root, first parent, second parent
788 # (a branch always has two parents (or none) by definition)
762 # (a branch always has two parents (or none) by definition)
789 unknown = remote.branches(unknown)
763 unknown = remote.branches(unknown)
790 while unknown:
764 while unknown:
791 r = []
765 r = []
792 while unknown:
766 while unknown:
793 n = unknown.pop(0)
767 n = unknown.pop(0)
794 if n[0] in seen:
768 if n[0] in seen:
795 continue
769 continue
796
770
797 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
771 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
798 if n[0] == nullid:
772 if n[0] == nullid:
799 break
773 break
800 if n in seenbranch:
774 if n in seenbranch:
801 self.ui.debug(_("branch already found\n"))
775 self.ui.debug(_("branch already found\n"))
802 continue
776 continue
803 if n[1] and n[1] in m: # do we know the base?
777 if n[1] and n[1] in m: # do we know the base?
804 self.ui.debug(_("found incomplete branch %s:%s\n")
778 self.ui.debug(_("found incomplete branch %s:%s\n")
805 % (short(n[0]), short(n[1])))
779 % (short(n[0]), short(n[1])))
806 search.append(n) # schedule branch range for scanning
780 search.append(n) # schedule branch range for scanning
807 seenbranch[n] = 1
781 seenbranch[n] = 1
808 else:
782 else:
809 if n[1] not in seen and n[1] not in fetch:
783 if n[1] not in seen and n[1] not in fetch:
810 if n[2] in m and n[3] in m:
784 if n[2] in m and n[3] in m:
811 self.ui.debug(_("found new changeset %s\n") %
785 self.ui.debug(_("found new changeset %s\n") %
812 short(n[1]))
786 short(n[1]))
813 fetch[n[1]] = 1 # earliest unknown
787 fetch[n[1]] = 1 # earliest unknown
814 base[n[2]] = 1 # latest known
788 base[n[2]] = 1 # latest known
815 continue
789 continue
816
790
817 for a in n[2:4]:
791 for a in n[2:4]:
818 if a not in rep:
792 if a not in rep:
819 r.append(a)
793 r.append(a)
820 rep[a] = 1
794 rep[a] = 1
821
795
822 seen[n[0]] = 1
796 seen[n[0]] = 1
823
797
824 if r:
798 if r:
825 reqcnt += 1
799 reqcnt += 1
826 self.ui.debug(_("request %d: %s\n") %
800 self.ui.debug(_("request %d: %s\n") %
827 (reqcnt, " ".join(map(short, r))))
801 (reqcnt, " ".join(map(short, r))))
828 for p in range(0, len(r), 10):
802 for p in range(0, len(r), 10):
829 for b in remote.branches(r[p:p+10]):
803 for b in remote.branches(r[p:p+10]):
830 self.ui.debug(_("received %s:%s\n") %
804 self.ui.debug(_("received %s:%s\n") %
831 (short(b[0]), short(b[1])))
805 (short(b[0]), short(b[1])))
832 if b[0] in m:
806 if b[0] in m:
833 self.ui.debug(_("found base node %s\n") % short(b[0]))
807 self.ui.debug(_("found base node %s\n") % short(b[0]))
834 base[b[0]] = 1
808 base[b[0]] = 1
835 elif b[0] not in seen:
809 elif b[0] not in seen:
836 unknown.append(b)
810 unknown.append(b)
837
811
838 # do binary search on the branches we found
812 # do binary search on the branches we found
839 while search:
813 while search:
840 n = search.pop(0)
814 n = search.pop(0)
841 reqcnt += 1
815 reqcnt += 1
842 l = remote.between([(n[0], n[1])])[0]
816 l = remote.between([(n[0], n[1])])[0]
843 l.append(n[1])
817 l.append(n[1])
844 p = n[0]
818 p = n[0]
845 f = 1
819 f = 1
846 for i in l:
820 for i in l:
847 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
821 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
848 if i in m:
822 if i in m:
849 if f <= 2:
823 if f <= 2:
850 self.ui.debug(_("found new branch changeset %s\n") %
824 self.ui.debug(_("found new branch changeset %s\n") %
851 short(p))
825 short(p))
852 fetch[p] = 1
826 fetch[p] = 1
853 base[i] = 1
827 base[i] = 1
854 else:
828 else:
855 self.ui.debug(_("narrowed branch search to %s:%s\n")
829 self.ui.debug(_("narrowed branch search to %s:%s\n")
856 % (short(p), short(i)))
830 % (short(p), short(i)))
857 search.append((p, i))
831 search.append((p, i))
858 break
832 break
859 p, f = i, f * 2
833 p, f = i, f * 2
860
834
861 # sanity check our fetch list
835 # sanity check our fetch list
862 for f in fetch.keys():
836 for f in fetch.keys():
863 if f in m:
837 if f in m:
864 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
838 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
865
839
866 if base.keys() == [nullid]:
840 if base.keys() == [nullid]:
867 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
841 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
868
842
869 self.ui.note(_("found new changesets starting at ") +
843 self.ui.note(_("found new changesets starting at ") +
870 " ".join([short(f) for f in fetch]) + "\n")
844 " ".join([short(f) for f in fetch]) + "\n")
871
845
872 self.ui.debug(_("%d total queries\n") % reqcnt)
846 self.ui.debug(_("%d total queries\n") % reqcnt)
873
847
874 return fetch.keys()
848 return fetch.keys()
875
849
876 def findoutgoing(self, remote, base=None, heads=None):
850 def findoutgoing(self, remote, base=None, heads=None):
877 if base == None:
851 if base == None:
878 base = {}
852 base = {}
879 self.findincoming(remote, base, heads)
853 self.findincoming(remote, base, heads)
880
854
881 self.ui.debug(_("common changesets up to ")
855 self.ui.debug(_("common changesets up to ")
882 + " ".join(map(short, base.keys())) + "\n")
856 + " ".join(map(short, base.keys())) + "\n")
883
857
884 remain = dict.fromkeys(self.changelog.nodemap)
858 remain = dict.fromkeys(self.changelog.nodemap)
885
859
886 # prune everything remote has from the tree
860 # prune everything remote has from the tree
887 del remain[nullid]
861 del remain[nullid]
888 remove = base.keys()
862 remove = base.keys()
889 while remove:
863 while remove:
890 n = remove.pop(0)
864 n = remove.pop(0)
891 if n in remain:
865 if n in remain:
892 del remain[n]
866 del remain[n]
893 for p in self.changelog.parents(n):
867 for p in self.changelog.parents(n):
894 remove.append(p)
868 remove.append(p)
895
869
896 # find every node whose parents have been pruned
870 # find every node whose parents have been pruned
897 subset = []
871 subset = []
898 for n in remain:
872 for n in remain:
899 p1, p2 = self.changelog.parents(n)
873 p1, p2 = self.changelog.parents(n)
900 if p1 not in remain and p2 not in remain:
874 if p1 not in remain and p2 not in remain:
901 subset.append(n)
875 subset.append(n)
902
876
903 # this is the set of all roots we have to push
877 # this is the set of all roots we have to push
904 return subset
878 return subset
905
879
906 def pull(self, remote):
880 def pull(self, remote, heads = None):
907 lock = self.lock()
881 lock = self.lock()
908
882
909 # if we have an empty repo, fetch everything
883 # if we have an empty repo, fetch everything
910 if self.changelog.tip() == nullid:
884 if self.changelog.tip() == nullid:
911 self.ui.status(_("requesting all changes\n"))
885 self.ui.status(_("requesting all changes\n"))
912 fetch = [nullid]
886 fetch = [nullid]
913 else:
887 else:
914 fetch = self.findincoming(remote)
888 fetch = self.findincoming(remote)
915
889
916 if not fetch:
890 if not fetch:
917 self.ui.status(_("no changes found\n"))
891 self.ui.status(_("no changes found\n"))
918 return 1
892 return 1
919
893
894 if heads is None:
920 cg = remote.changegroup(fetch)
895 cg = remote.changegroup(fetch)
896 else:
897 cg = remote.changegroupsubset(fetch, heads)
921 return self.addchangegroup(cg)
898 return self.addchangegroup(cg)
922
899
923 def push(self, remote, force=False):
900 def push(self, remote, force=False):
924 lock = remote.lock()
901 lock = remote.lock()
925
902
926 base = {}
903 base = {}
927 heads = remote.heads()
904 heads = remote.heads()
928 inc = self.findincoming(remote, base, heads)
905 inc = self.findincoming(remote, base, heads)
929 if not force and inc:
906 if not force and inc:
930 self.ui.warn(_("abort: unsynced remote changes!\n"))
907 self.ui.warn(_("abort: unsynced remote changes!\n"))
931 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
908 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
932 return 1
909 return 1
933
910
934 update = self.findoutgoing(remote, base)
911 update = self.findoutgoing(remote, base)
935 if not update:
912 if not update:
936 self.ui.status(_("no changes found\n"))
913 self.ui.status(_("no changes found\n"))
937 return 1
914 return 1
938 elif not force:
915 elif not force:
939 if len(heads) < len(self.changelog.heads()):
916 if len(heads) < len(self.changelog.heads()):
940 self.ui.warn(_("abort: push creates new remote branches!\n"))
917 self.ui.warn(_("abort: push creates new remote branches!\n"))
941 self.ui.status(_("(did you forget to merge?"
918 self.ui.status(_("(did you forget to merge?"
942 " use push -f to force)\n"))
919 " use push -f to force)\n"))
943 return 1
920 return 1
944
921
945 cg = self.changegroup(update)
922 cg = self.changegroup(update)
946 return remote.addchangegroup(cg)
923 return remote.addchangegroup(cg)
947
924
925 def changegroupsubset(self, bases, heads):
926 """This function generates a changegroup consisting of all the nodes
927 that are descendents of any of the bases, and ancestors of any of
928 the heads.
929
930 It is fairly complex as determining which filenodes and which
931 manifest nodes need to be included for the changeset to be complete
932 is non-trivial.
933
934 Another wrinkle is doing the reverse, figuring out which changeset in
935 the changegroup a particular filenode or manifestnode belongs to."""
936
937 # Set up some initial variables
938 # Make it easy to refer to self.changelog
939 cl = self.changelog
940 # msng is short for missing - compute the list of changesets in this
941 # changegroup.
942 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
943 # Some bases may turn out to be superfluous, and some heads may be
944 # too. nodesbetween will return the minimal set of bases and heads
945 # necessary to re-create the changegroup.
946
947 # Known heads are the list of heads that it is assumed the recipient
948 # of this changegroup will know about.
949 knownheads = {}
950 # We assume that all parents of bases are known heads.
951 for n in bases:
952 for p in cl.parents(n):
953 if p != nullid:
954 knownheads[p] = 1
955 knownheads = knownheads.keys()
956 if knownheads:
957 # Now that we know what heads are known, we can compute which
958 # changesets are known. The recipient must know about all
959 # changesets required to reach the known heads from the null
960 # changeset.
961 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
962 junk = None
963 # Transform the list into an ersatz set.
964 has_cl_set = dict.fromkeys(has_cl_set)
965 else:
966 # If there were no known heads, the recipient cannot be assumed to
967 # know about any changesets.
968 has_cl_set = {}
969
970 # Make it easy to refer to self.manifest
971 mnfst = self.manifest
972 # We don't know which manifests are missing yet
973 msng_mnfst_set = {}
974 # Nor do we know which filenodes are missing.
975 msng_filenode_set = {}
976
977 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
978 junk = None
979
980 # A changeset always belongs to itself, so the changenode lookup
981 # function for a changenode is identity.
982 def identity(x):
983 return x
984
985 # A function generating function. Sets up an environment for the
986 # inner function.
987 def cmp_by_rev_func(revlog):
988 # Compare two nodes by their revision number in the environment's
989 # revision history. Since the revision number both represents the
990 # most efficient order to read the nodes in, and represents a
991 # topological sorting of the nodes, this function is often useful.
992 def cmp_by_rev(a, b):
993 return cmp(revlog.rev(a), revlog.rev(b))
994 return cmp_by_rev
995
996 # If we determine that a particular file or manifest node must be a
997 # node that the recipient of the changegroup will already have, we can
998 # also assume the recipient will have all the parents. This function
999 # prunes them from the set of missing nodes.
1000 def prune_parents(revlog, hasset, msngset):
1001 haslst = hasset.keys()
1002 haslst.sort(cmp_by_rev_func(revlog))
1003 for node in haslst:
1004 parentlst = [p for p in revlog.parents(node) if p != nullid]
1005 while parentlst:
1006 n = parentlst.pop()
1007 if n not in hasset:
1008 hasset[n] = 1
1009 p = [p for p in revlog.parents(n) if p != nullid]
1010 parentlst.extend(p)
1011 for n in hasset:
1012 msngset.pop(n, None)
1013
1014 # This is a function generating function used to set up an environment
1015 # for the inner function to execute in.
1016 def manifest_and_file_collector(changedfileset):
1017 # This is an information gathering function that gathers
1018 # information from each changeset node that goes out as part of
1019 # the changegroup. The information gathered is a list of which
1020 # manifest nodes are potentially required (the recipient may
1021 # already have them) and total list of all files which were
1022 # changed in any changeset in the changegroup.
1023 #
1024 # We also remember the first changenode we saw any manifest
1025 # referenced by so we can later determine which changenode 'owns'
1026 # the manifest.
1027 def collect_manifests_and_files(clnode):
1028 c = cl.read(clnode)
1029 for f in c[3]:
1030 # This is to make sure we only have one instance of each
1031 # filename string for each filename.
1032 changedfileset.setdefault(f, f)
1033 msng_mnfst_set.setdefault(c[0], clnode)
1034 return collect_manifests_and_files
1035
1036 # Figure out which manifest nodes (of the ones we think might be part
1037 # of the changegroup) the recipient must know about and remove them
1038 # from the changegroup.
1039 def prune_manifests():
1040 has_mnfst_set = {}
1041 for n in msng_mnfst_set:
1042 # If a 'missing' manifest thinks it belongs to a changenode
1043 # the recipient is assumed to have, obviously the recipient
1044 # must have that manifest.
1045 linknode = cl.node(mnfst.linkrev(n))
1046 if linknode in has_cl_set:
1047 has_mnfst_set[n] = 1
1048 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1049
1050 # Use the information collected in collect_manifests_and_files to say
1051 # which changenode any manifestnode belongs to.
1052 def lookup_manifest_link(mnfstnode):
1053 return msng_mnfst_set[mnfstnode]
1054
1055 # A function generating function that sets up the initial environment
1056 # the inner function.
1057 def filenode_collector(changedfiles):
1058 next_rev = [0]
1059 # This gathers information from each manifestnode included in the
1060 # changegroup about which filenodes the manifest node references
1061 # so we can include those in the changegroup too.
1062 #
1063 # It also remembers which changenode each filenode belongs to. It
1064 # does this by assuming the a filenode belongs to the changenode
1065 # the first manifest that references it belongs to.
1066 def collect_msng_filenodes(mnfstnode):
1067 r = mnfst.rev(mnfstnode)
1068 if r == next_rev[0]:
1069 # If the last rev we looked at was the one just previous,
1070 # we only need to see a diff.
1071 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1072 # For each line in the delta
1073 for dline in delta.splitlines():
1074 # get the filename and filenode for that line
1075 f, fnode = dline.split('\0')
1076 fnode = bin(fnode[:40])
1077 f = changedfiles.get(f, None)
1078 # And if the file is in the list of files we care
1079 # about.
1080 if f is not None:
1081 # Get the changenode this manifest belongs to
1082 clnode = msng_mnfst_set[mnfstnode]
1083 # Create the set of filenodes for the file if
1084 # there isn't one already.
1085 ndset = msng_filenode_set.setdefault(f, {})
1086 # And set the filenode's changelog node to the
1087 # manifest's if it hasn't been set already.
1088 ndset.setdefault(fnode, clnode)
1089 else:
1090 # Otherwise we need a full manifest.
1091 m = mnfst.read(mnfstnode)
1092 # For every file in we care about.
1093 for f in changedfiles:
1094 fnode = m.get(f, None)
1095 # If it's in the manifest
1096 if fnode is not None:
1097 # See comments above.
1098 clnode = msng_mnfst_set[mnfstnode]
1099 ndset = msng_filenode_set.setdefault(f, {})
1100 ndset.setdefault(fnode, clnode)
1101 # Remember the revision we hope to see next.
1102 next_rev[0] = r + 1
1103 return collect_msng_filenodes
1104
1105 # We have a list of filenodes we think we need for a file, lets remove
1106 # all those we now the recipient must have.
1107 def prune_filenodes(f, filerevlog):
1108 msngset = msng_filenode_set[f]
1109 hasset = {}
1110 # If a 'missing' filenode thinks it belongs to a changenode we
1111 # assume the recipient must have, then the recipient must have
1112 # that filenode.
1113 for n in msngset:
1114 clnode = cl.node(filerevlog.linkrev(n))
1115 if clnode in has_cl_set:
1116 hasset[n] = 1
1117 prune_parents(filerevlog, hasset, msngset)
1118
1119 # A function generator function that sets up the a context for the
1120 # inner function.
1121 def lookup_filenode_link_func(fname):
1122 msngset = msng_filenode_set[fname]
1123 # Lookup the changenode the filenode belongs to.
1124 def lookup_filenode_link(fnode):
1125 return msngset[fnode]
1126 return lookup_filenode_link
1127
1128 # Now that we have all theses utility functions to help out and
1129 # logically divide up the task, generate the group.
1130 def gengroup():
1131 # The set of changed files starts empty.
1132 changedfiles = {}
1133 # Create a changenode group generator that will call our functions
1134 # back to lookup the owning changenode and collect information.
1135 group = cl.group(msng_cl_lst, identity,
1136 manifest_and_file_collector(changedfiles))
1137 for chnk in group:
1138 yield chnk
1139
1140 # The list of manifests has been collected by the generator
1141 # calling our functions back.
1142 prune_manifests()
1143 msng_mnfst_lst = msng_mnfst_set.keys()
1144 # Sort the manifestnodes by revision number.
1145 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1146 # Create a generator for the manifestnodes that calls our lookup
1147 # and data collection functions back.
1148 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1149 filenode_collector(changedfiles))
1150 for chnk in group:
1151 yield chnk
1152
1153 # These are no longer needed, dereference and toss the memory for
1154 # them.
1155 msng_mnfst_lst = None
1156 msng_mnfst_set.clear()
1157
1158 changedfiles = changedfiles.keys()
1159 changedfiles.sort()
1160 # Go through all our files in order sorted by name.
1161 for fname in changedfiles:
1162 filerevlog = self.file(fname)
1163 # Toss out the filenodes that the recipient isn't really
1164 # missing.
1165 prune_filenodes(fname, filerevlog)
1166 msng_filenode_lst = msng_filenode_set[fname].keys()
1167 # If any filenodes are left, generate the group for them,
1168 # otherwise don't bother.
1169 if len(msng_filenode_lst) > 0:
1170 yield struct.pack(">l", len(fname) + 4) + fname
1171 # Sort the filenodes by their revision #
1172 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1173 # Create a group generator and only pass in a changenode
1174 # lookup function as we need to collect no information
1175 # from filenodes.
1176 group = filerevlog.group(msng_filenode_lst,
1177 lookup_filenode_link_func(fname))
1178 for chnk in group:
1179 yield chnk
1180 # Don't need this anymore, toss it to free memory.
1181 del msng_filenode_set[fname]
1182 # Signal that no more groups are left.
1183 yield struct.pack(">l", 0)
1184
1185 return util.chunkbuffer(gengroup())
1186
948 def changegroup(self, basenodes):
1187 def changegroup(self, basenodes):
949 genread = util.chunkbuffer
1188 """Generate a changegroup of all nodes that we have that a recipient
1189 doesn't.
1190
1191 This is much easier than the previous function as we can assume that
1192 the recipient has any changenode we aren't sending them."""
1193 cl = self.changelog
1194 nodes = cl.nodesbetween(basenodes, None)[0]
1195 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1196
1197 def identity(x):
1198 return x
1199
1200 def gennodelst(revlog):
1201 for r in xrange(0, revlog.count()):
1202 n = revlog.node(r)
1203 if revlog.linkrev(n) in revset:
1204 yield n
1205
1206 def changed_file_collector(changedfileset):
1207 def collect_changed_files(clnode):
1208 c = cl.read(clnode)
1209 for fname in c[3]:
1210 changedfileset[fname] = 1
1211 return collect_changed_files
1212
1213 def lookuprevlink_func(revlog):
1214 def lookuprevlink(n):
1215 return cl.node(revlog.linkrev(n))
1216 return lookuprevlink
950
1217
951 def gengroup():
1218 def gengroup():
952 nodes = self.newer(basenodes)
1219 # construct a list of all changed files
1220 changedfiles = {}
953
1221
954 # construct the link map
1222 for chnk in cl.group(nodes, identity,
955 linkmap = {}
1223 changed_file_collector(changedfiles)):
956 for n in nodes:
1224 yield chnk
957 linkmap[self.changelog.rev(n)] = n
1225 changedfiles = changedfiles.keys()
1226 changedfiles.sort()
958
1227
959 # construct a list of all changed files
1228 mnfst = self.manifest
960 changed = {}
1229 nodeiter = gennodelst(mnfst)
961 for n in nodes:
1230 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
962 c = self.changelog.read(n)
1231 yield chnk
963 for f in c[3]:
964 changed[f] = 1
965 changed = changed.keys()
966 changed.sort()
967
1232
968 # the changegroup is changesets + manifests + all file revs
1233 for fname in changedfiles:
969 revs = [ self.changelog.rev(n) for n in nodes ]
1234 filerevlog = self.file(fname)
970
1235 nodeiter = gennodelst(filerevlog)
971 for y in self.changelog.group(linkmap): yield y
1236 nodeiter = list(nodeiter)
972 for y in self.manifest.group(linkmap): yield y
1237 if nodeiter:
973 for f in changed:
1238 yield struct.pack(">l", len(fname) + 4) + fname
974 yield struct.pack(">l", len(f) + 4) + f
1239 lookup = lookuprevlink_func(filerevlog)
975 g = self.file(f).group(linkmap)
1240 for chnk in filerevlog.group(nodeiter, lookup):
976 for y in g:
1241 yield chnk
977 yield y
978
1242
979 yield struct.pack(">l", 0)
1243 yield struct.pack(">l", 0)
980
1244
981 return genread(gengroup())
1245 return util.chunkbuffer(gengroup())
982
1246
983 def addchangegroup(self, source):
1247 def addchangegroup(self, source):
984
1248
985 def getchunk():
1249 def getchunk():
986 d = source.read(4)
1250 d = source.read(4)
987 if not d: return ""
1251 if not d: return ""
988 l = struct.unpack(">l", d)[0]
1252 l = struct.unpack(">l", d)[0]
989 if l <= 4: return ""
1253 if l <= 4: return ""
990 d = source.read(l - 4)
1254 d = source.read(l - 4)
991 if len(d) < l - 4:
1255 if len(d) < l - 4:
992 raise repo.RepoError(_("premature EOF reading chunk"
1256 raise repo.RepoError(_("premature EOF reading chunk"
993 " (got %d bytes, expected %d)")
1257 " (got %d bytes, expected %d)")
994 % (len(d), l - 4))
1258 % (len(d), l - 4))
995 return d
1259 return d
996
1260
997 def getgroup():
1261 def getgroup():
998 while 1:
1262 while 1:
999 c = getchunk()
1263 c = getchunk()
1000 if not c: break
1264 if not c: break
1001 yield c
1265 yield c
1002
1266
1003 def csmap(x):
1267 def csmap(x):
1004 self.ui.debug(_("add changeset %s\n") % short(x))
1268 self.ui.debug(_("add changeset %s\n") % short(x))
1005 return self.changelog.count()
1269 return self.changelog.count()
1006
1270
1007 def revmap(x):
1271 def revmap(x):
1008 return self.changelog.rev(x)
1272 return self.changelog.rev(x)
1009
1273
1010 if not source: return
1274 if not source: return
1011 changesets = files = revisions = 0
1275 changesets = files = revisions = 0
1012
1276
1013 tr = self.transaction()
1277 tr = self.transaction()
1014
1278
1015 oldheads = len(self.changelog.heads())
1279 oldheads = len(self.changelog.heads())
1016
1280
1017 # pull off the changeset group
1281 # pull off the changeset group
1018 self.ui.status(_("adding changesets\n"))
1282 self.ui.status(_("adding changesets\n"))
1019 co = self.changelog.tip()
1283 co = self.changelog.tip()
1020 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1284 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1021 cnr, cor = map(self.changelog.rev, (cn, co))
1285 cnr, cor = map(self.changelog.rev, (cn, co))
1022 if cn == nullid:
1286 if cn == nullid:
1023 cnr = cor
1287 cnr = cor
1024 changesets = cnr - cor
1288 changesets = cnr - cor
1025
1289
1026 # pull off the manifest group
1290 # pull off the manifest group
1027 self.ui.status(_("adding manifests\n"))
1291 self.ui.status(_("adding manifests\n"))
1028 mm = self.manifest.tip()
1292 mm = self.manifest.tip()
1029 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1293 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1030
1294
1031 # process the files
1295 # process the files
1032 self.ui.status(_("adding file changes\n"))
1296 self.ui.status(_("adding file changes\n"))
1033 while 1:
1297 while 1:
1034 f = getchunk()
1298 f = getchunk()
1035 if not f: break
1299 if not f: break
1036 self.ui.debug(_("adding %s revisions\n") % f)
1300 self.ui.debug(_("adding %s revisions\n") % f)
1037 fl = self.file(f)
1301 fl = self.file(f)
1038 o = fl.count()
1302 o = fl.count()
1039 n = fl.addgroup(getgroup(), revmap, tr)
1303 n = fl.addgroup(getgroup(), revmap, tr)
1040 revisions += fl.count() - o
1304 revisions += fl.count() - o
1041 files += 1
1305 files += 1
1042
1306
1043 newheads = len(self.changelog.heads())
1307 newheads = len(self.changelog.heads())
1044 heads = ""
1308 heads = ""
1045 if oldheads and newheads > oldheads:
1309 if oldheads and newheads > oldheads:
1046 heads = _(" (+%d heads)") % (newheads - oldheads)
1310 heads = _(" (+%d heads)") % (newheads - oldheads)
1047
1311
1048 self.ui.status(_("added %d changesets"
1312 self.ui.status(_("added %d changesets"
1049 " with %d changes to %d files%s\n")
1313 " with %d changes to %d files%s\n")
1050 % (changesets, revisions, files, heads))
1314 % (changesets, revisions, files, heads))
1051
1315
1052 tr.close()
1316 tr.close()
1053
1317
1054 if changesets > 0:
1318 if changesets > 0:
1055 if not self.hook("changegroup",
1319 if not self.hook("changegroup",
1056 node=hex(self.changelog.node(cor+1))):
1320 node=hex(self.changelog.node(cor+1))):
1057 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1321 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1058 return 1
1322 return 1
1059
1323
1060 for i in range(cor + 1, cnr + 1):
1324 for i in range(cor + 1, cnr + 1):
1061 self.hook("commit", node=hex(self.changelog.node(i)))
1325 self.hook("commit", node=hex(self.changelog.node(i)))
1062
1326
1063 return
1327 return
1064
1328
1065 def update(self, node, allow=False, force=False, choose=None,
1329 def update(self, node, allow=False, force=False, choose=None,
1066 moddirstate=True):
1330 moddirstate=True):
1067 pl = self.dirstate.parents()
1331 pl = self.dirstate.parents()
1068 if not force and pl[1] != nullid:
1332 if not force and pl[1] != nullid:
1069 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1333 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1070 return 1
1334 return 1
1071
1335
1072 p1, p2 = pl[0], node
1336 p1, p2 = pl[0], node
1073 pa = self.changelog.ancestor(p1, p2)
1337 pa = self.changelog.ancestor(p1, p2)
1074 m1n = self.changelog.read(p1)[0]
1338 m1n = self.changelog.read(p1)[0]
1075 m2n = self.changelog.read(p2)[0]
1339 m2n = self.changelog.read(p2)[0]
1076 man = self.manifest.ancestor(m1n, m2n)
1340 man = self.manifest.ancestor(m1n, m2n)
1077 m1 = self.manifest.read(m1n)
1341 m1 = self.manifest.read(m1n)
1078 mf1 = self.manifest.readflags(m1n)
1342 mf1 = self.manifest.readflags(m1n)
1079 m2 = self.manifest.read(m2n)
1343 m2 = self.manifest.read(m2n)
1080 mf2 = self.manifest.readflags(m2n)
1344 mf2 = self.manifest.readflags(m2n)
1081 ma = self.manifest.read(man)
1345 ma = self.manifest.read(man)
1082 mfa = self.manifest.readflags(man)
1346 mfa = self.manifest.readflags(man)
1083
1347
1084 (c, a, d, u) = self.changes()
1348 (c, a, d, u) = self.changes()
1085
1349
1086 # is this a jump, or a merge? i.e. is there a linear path
1350 # is this a jump, or a merge? i.e. is there a linear path
1087 # from p1 to p2?
1351 # from p1 to p2?
1088 linear_path = (pa == p1 or pa == p2)
1352 linear_path = (pa == p1 or pa == p2)
1089
1353
1090 # resolve the manifest to determine which files
1354 # resolve the manifest to determine which files
1091 # we care about merging
1355 # we care about merging
1092 self.ui.note(_("resolving manifests\n"))
1356 self.ui.note(_("resolving manifests\n"))
1093 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1357 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1094 (force, allow, moddirstate, linear_path))
1358 (force, allow, moddirstate, linear_path))
1095 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1359 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1096 (short(man), short(m1n), short(m2n)))
1360 (short(man), short(m1n), short(m2n)))
1097
1361
1098 merge = {}
1362 merge = {}
1099 get = {}
1363 get = {}
1100 remove = []
1364 remove = []
1101
1365
1102 # construct a working dir manifest
1366 # construct a working dir manifest
1103 mw = m1.copy()
1367 mw = m1.copy()
1104 mfw = mf1.copy()
1368 mfw = mf1.copy()
1105 umap = dict.fromkeys(u)
1369 umap = dict.fromkeys(u)
1106
1370
1107 for f in a + c + u:
1371 for f in a + c + u:
1108 mw[f] = ""
1372 mw[f] = ""
1109 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1373 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1110
1374
1111 for f in d:
1375 for f in d:
1112 if f in mw: del mw[f]
1376 if f in mw: del mw[f]
1113
1377
1114 # If we're jumping between revisions (as opposed to merging),
1378 # If we're jumping between revisions (as opposed to merging),
1115 # and if neither the working directory nor the target rev has
1379 # and if neither the working directory nor the target rev has
1116 # the file, then we need to remove it from the dirstate, to
1380 # the file, then we need to remove it from the dirstate, to
1117 # prevent the dirstate from listing the file when it is no
1381 # prevent the dirstate from listing the file when it is no
1118 # longer in the manifest.
1382 # longer in the manifest.
1119 if moddirstate and linear_path and f not in m2:
1383 if moddirstate and linear_path and f not in m2:
1120 self.dirstate.forget((f,))
1384 self.dirstate.forget((f,))
1121
1385
1122 # Compare manifests
1386 # Compare manifests
1123 for f, n in mw.iteritems():
1387 for f, n in mw.iteritems():
1124 if choose and not choose(f): continue
1388 if choose and not choose(f): continue
1125 if f in m2:
1389 if f in m2:
1126 s = 0
1390 s = 0
1127
1391
1128 # is the wfile new since m1, and match m2?
1392 # is the wfile new since m1, and match m2?
1129 if f not in m1:
1393 if f not in m1:
1130 t1 = self.wread(f)
1394 t1 = self.wread(f)
1131 t2 = self.file(f).read(m2[f])
1395 t2 = self.file(f).read(m2[f])
1132 if cmp(t1, t2) == 0:
1396 if cmp(t1, t2) == 0:
1133 n = m2[f]
1397 n = m2[f]
1134 del t1, t2
1398 del t1, t2
1135
1399
1136 # are files different?
1400 # are files different?
1137 if n != m2[f]:
1401 if n != m2[f]:
1138 a = ma.get(f, nullid)
1402 a = ma.get(f, nullid)
1139 # are both different from the ancestor?
1403 # are both different from the ancestor?
1140 if n != a and m2[f] != a:
1404 if n != a and m2[f] != a:
1141 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1405 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1142 # merge executable bits
1406 # merge executable bits
1143 # "if we changed or they changed, change in merge"
1407 # "if we changed or they changed, change in merge"
1144 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1408 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1145 mode = ((a^b) | (a^c)) ^ a
1409 mode = ((a^b) | (a^c)) ^ a
1146 merge[f] = (m1.get(f, nullid), m2[f], mode)
1410 merge[f] = (m1.get(f, nullid), m2[f], mode)
1147 s = 1
1411 s = 1
1148 # are we clobbering?
1412 # are we clobbering?
1149 # is remote's version newer?
1413 # is remote's version newer?
1150 # or are we going back in time?
1414 # or are we going back in time?
1151 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1415 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1152 self.ui.debug(_(" remote %s is newer, get\n") % f)
1416 self.ui.debug(_(" remote %s is newer, get\n") % f)
1153 get[f] = m2[f]
1417 get[f] = m2[f]
1154 s = 1
1418 s = 1
1155 elif f in umap:
1419 elif f in umap:
1156 # this unknown file is the same as the checkout
1420 # this unknown file is the same as the checkout
1157 get[f] = m2[f]
1421 get[f] = m2[f]
1158
1422
1159 if not s and mfw[f] != mf2[f]:
1423 if not s and mfw[f] != mf2[f]:
1160 if force:
1424 if force:
1161 self.ui.debug(_(" updating permissions for %s\n") % f)
1425 self.ui.debug(_(" updating permissions for %s\n") % f)
1162 util.set_exec(self.wjoin(f), mf2[f])
1426 util.set_exec(self.wjoin(f), mf2[f])
1163 else:
1427 else:
1164 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1428 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1165 mode = ((a^b) | (a^c)) ^ a
1429 mode = ((a^b) | (a^c)) ^ a
1166 if mode != b:
1430 if mode != b:
1167 self.ui.debug(_(" updating permissions for %s\n") % f)
1431 self.ui.debug(_(" updating permissions for %s\n") % f)
1168 util.set_exec(self.wjoin(f), mode)
1432 util.set_exec(self.wjoin(f), mode)
1169 del m2[f]
1433 del m2[f]
1170 elif f in ma:
1434 elif f in ma:
1171 if n != ma[f]:
1435 if n != ma[f]:
1172 r = _("d")
1436 r = _("d")
1173 if not force and (linear_path or allow):
1437 if not force and (linear_path or allow):
1174 r = self.ui.prompt(
1438 r = self.ui.prompt(
1175 (_(" local changed %s which remote deleted\n") % f) +
1439 (_(" local changed %s which remote deleted\n") % f) +
1176 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1440 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1177 if r == _("d"):
1441 if r == _("d"):
1178 remove.append(f)
1442 remove.append(f)
1179 else:
1443 else:
1180 self.ui.debug(_("other deleted %s\n") % f)
1444 self.ui.debug(_("other deleted %s\n") % f)
1181 remove.append(f) # other deleted it
1445 remove.append(f) # other deleted it
1182 else:
1446 else:
1183 # file is created on branch or in working directory
1447 # file is created on branch or in working directory
1184 if force and f not in umap:
1448 if force and f not in umap:
1185 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1449 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1186 remove.append(f)
1450 remove.append(f)
1187 elif n == m1.get(f, nullid): # same as parent
1451 elif n == m1.get(f, nullid): # same as parent
1188 if p2 == pa: # going backwards?
1452 if p2 == pa: # going backwards?
1189 self.ui.debug(_("remote deleted %s\n") % f)
1453 self.ui.debug(_("remote deleted %s\n") % f)
1190 remove.append(f)
1454 remove.append(f)
1191 else:
1455 else:
1192 self.ui.debug(_("local modified %s, keeping\n") % f)
1456 self.ui.debug(_("local modified %s, keeping\n") % f)
1193 else:
1457 else:
1194 self.ui.debug(_("working dir created %s, keeping\n") % f)
1458 self.ui.debug(_("working dir created %s, keeping\n") % f)
1195
1459
1196 for f, n in m2.iteritems():
1460 for f, n in m2.iteritems():
1197 if choose and not choose(f): continue
1461 if choose and not choose(f): continue
1198 if f[0] == "/": continue
1462 if f[0] == "/": continue
1199 if f in ma and n != ma[f]:
1463 if f in ma and n != ma[f]:
1200 r = _("k")
1464 r = _("k")
1201 if not force and (linear_path or allow):
1465 if not force and (linear_path or allow):
1202 r = self.ui.prompt(
1466 r = self.ui.prompt(
1203 (_("remote changed %s which local deleted\n") % f) +
1467 (_("remote changed %s which local deleted\n") % f) +
1204 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1468 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1205 if r == _("k"): get[f] = n
1469 if r == _("k"): get[f] = n
1206 elif f not in ma:
1470 elif f not in ma:
1207 self.ui.debug(_("remote created %s\n") % f)
1471 self.ui.debug(_("remote created %s\n") % f)
1208 get[f] = n
1472 get[f] = n
1209 else:
1473 else:
1210 if force or p2 == pa: # going backwards?
1474 if force or p2 == pa: # going backwards?
1211 self.ui.debug(_("local deleted %s, recreating\n") % f)
1475 self.ui.debug(_("local deleted %s, recreating\n") % f)
1212 get[f] = n
1476 get[f] = n
1213 else:
1477 else:
1214 self.ui.debug(_("local deleted %s\n") % f)
1478 self.ui.debug(_("local deleted %s\n") % f)
1215
1479
1216 del mw, m1, m2, ma
1480 del mw, m1, m2, ma
1217
1481
1218 if force:
1482 if force:
1219 for f in merge:
1483 for f in merge:
1220 get[f] = merge[f][1]
1484 get[f] = merge[f][1]
1221 merge = {}
1485 merge = {}
1222
1486
1223 if linear_path or force:
1487 if linear_path or force:
1224 # we don't need to do any magic, just jump to the new rev
1488 # we don't need to do any magic, just jump to the new rev
1225 branch_merge = False
1489 branch_merge = False
1226 p1, p2 = p2, nullid
1490 p1, p2 = p2, nullid
1227 else:
1491 else:
1228 if not allow:
1492 if not allow:
1229 self.ui.status(_("this update spans a branch"
1493 self.ui.status(_("this update spans a branch"
1230 " affecting the following files:\n"))
1494 " affecting the following files:\n"))
1231 fl = merge.keys() + get.keys()
1495 fl = merge.keys() + get.keys()
1232 fl.sort()
1496 fl.sort()
1233 for f in fl:
1497 for f in fl:
1234 cf = ""
1498 cf = ""
1235 if f in merge: cf = _(" (resolve)")
1499 if f in merge: cf = _(" (resolve)")
1236 self.ui.status(" %s%s\n" % (f, cf))
1500 self.ui.status(" %s%s\n" % (f, cf))
1237 self.ui.warn(_("aborting update spanning branches!\n"))
1501 self.ui.warn(_("aborting update spanning branches!\n"))
1238 self.ui.status(_("(use update -m to merge across branches"
1502 self.ui.status(_("(use update -m to merge across branches"
1239 " or -C to lose changes)\n"))
1503 " or -C to lose changes)\n"))
1240 return 1
1504 return 1
1241 branch_merge = True
1505 branch_merge = True
1242
1506
1243 if moddirstate:
1507 if moddirstate:
1244 self.dirstate.setparents(p1, p2)
1508 self.dirstate.setparents(p1, p2)
1245
1509
1246 # get the files we don't need to change
1510 # get the files we don't need to change
1247 files = get.keys()
1511 files = get.keys()
1248 files.sort()
1512 files.sort()
1249 for f in files:
1513 for f in files:
1250 if f[0] == "/": continue
1514 if f[0] == "/": continue
1251 self.ui.note(_("getting %s\n") % f)
1515 self.ui.note(_("getting %s\n") % f)
1252 t = self.file(f).read(get[f])
1516 t = self.file(f).read(get[f])
1253 try:
1517 try:
1254 self.wwrite(f, t)
1518 self.wwrite(f, t)
1255 except IOError, e:
1519 except IOError, e:
1256 if e.errno != errno.ENOENT:
1520 if e.errno != errno.ENOENT:
1257 raise
1521 raise
1258 os.makedirs(os.path.dirname(self.wjoin(f)))
1522 os.makedirs(os.path.dirname(self.wjoin(f)))
1259 self.wwrite(f, t)
1523 self.wwrite(f, t)
1260 util.set_exec(self.wjoin(f), mf2[f])
1524 util.set_exec(self.wjoin(f), mf2[f])
1261 if moddirstate:
1525 if moddirstate:
1262 if branch_merge:
1526 if branch_merge:
1263 self.dirstate.update([f], 'n', st_mtime=-1)
1527 self.dirstate.update([f], 'n', st_mtime=-1)
1264 else:
1528 else:
1265 self.dirstate.update([f], 'n')
1529 self.dirstate.update([f], 'n')
1266
1530
1267 # merge the tricky bits
1531 # merge the tricky bits
1268 files = merge.keys()
1532 files = merge.keys()
1269 files.sort()
1533 files.sort()
1270 for f in files:
1534 for f in files:
1271 self.ui.status(_("merging %s\n") % f)
1535 self.ui.status(_("merging %s\n") % f)
1272 my, other, flag = merge[f]
1536 my, other, flag = merge[f]
1273 self.merge3(f, my, other)
1537 self.merge3(f, my, other)
1274 util.set_exec(self.wjoin(f), flag)
1538 util.set_exec(self.wjoin(f), flag)
1275 if moddirstate:
1539 if moddirstate:
1276 if branch_merge:
1540 if branch_merge:
1277 # We've done a branch merge, mark this file as merged
1541 # We've done a branch merge, mark this file as merged
1278 # so that we properly record the merger later
1542 # so that we properly record the merger later
1279 self.dirstate.update([f], 'm')
1543 self.dirstate.update([f], 'm')
1280 else:
1544 else:
1281 # We've update-merged a locally modified file, so
1545 # We've update-merged a locally modified file, so
1282 # we set the dirstate to emulate a normal checkout
1546 # we set the dirstate to emulate a normal checkout
1283 # of that file some time in the past. Thus our
1547 # of that file some time in the past. Thus our
1284 # merge will appear as a normal local file
1548 # merge will appear as a normal local file
1285 # modification.
1549 # modification.
1286 f_len = len(self.file(f).read(other))
1550 f_len = len(self.file(f).read(other))
1287 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1551 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1288
1552
1289 remove.sort()
1553 remove.sort()
1290 for f in remove:
1554 for f in remove:
1291 self.ui.note(_("removing %s\n") % f)
1555 self.ui.note(_("removing %s\n") % f)
1292 try:
1556 try:
1293 util.unlink(self.wjoin(f))
1557 util.unlink(self.wjoin(f))
1294 except OSError, inst:
1558 except OSError, inst:
1295 if inst.errno != errno.ENOENT:
1559 if inst.errno != errno.ENOENT:
1296 self.ui.warn(_("update failed to remove %s: %s!\n") %
1560 self.ui.warn(_("update failed to remove %s: %s!\n") %
1297 (f, inst.strerror))
1561 (f, inst.strerror))
1298 if moddirstate:
1562 if moddirstate:
1299 if branch_merge:
1563 if branch_merge:
1300 self.dirstate.update(remove, 'r')
1564 self.dirstate.update(remove, 'r')
1301 else:
1565 else:
1302 self.dirstate.forget(remove)
1566 self.dirstate.forget(remove)
1303
1567
1304 def merge3(self, fn, my, other):
1568 def merge3(self, fn, my, other):
1305 """perform a 3-way merge in the working directory"""
1569 """perform a 3-way merge in the working directory"""
1306
1570
1307 def temp(prefix, node):
1571 def temp(prefix, node):
1308 pre = "%s~%s." % (os.path.basename(fn), prefix)
1572 pre = "%s~%s." % (os.path.basename(fn), prefix)
1309 (fd, name) = tempfile.mkstemp("", pre)
1573 (fd, name) = tempfile.mkstemp("", pre)
1310 f = os.fdopen(fd, "wb")
1574 f = os.fdopen(fd, "wb")
1311 self.wwrite(fn, fl.read(node), f)
1575 self.wwrite(fn, fl.read(node), f)
1312 f.close()
1576 f.close()
1313 return name
1577 return name
1314
1578
1315 fl = self.file(fn)
1579 fl = self.file(fn)
1316 base = fl.ancestor(my, other)
1580 base = fl.ancestor(my, other)
1317 a = self.wjoin(fn)
1581 a = self.wjoin(fn)
1318 b = temp("base", base)
1582 b = temp("base", base)
1319 c = temp("other", other)
1583 c = temp("other", other)
1320
1584
1321 self.ui.note(_("resolving %s\n") % fn)
1585 self.ui.note(_("resolving %s\n") % fn)
1322 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1586 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1323 (fn, short(my), short(other), short(base)))
1587 (fn, short(my), short(other), short(base)))
1324
1588
1325 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1589 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1326 or "hgmerge")
1590 or "hgmerge")
1327 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1591 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1328 if r:
1592 if r:
1329 self.ui.warn(_("merging %s failed!\n") % fn)
1593 self.ui.warn(_("merging %s failed!\n") % fn)
1330
1594
1331 os.unlink(b)
1595 os.unlink(b)
1332 os.unlink(c)
1596 os.unlink(c)
1333
1597
1334 def verify(self):
1598 def verify(self):
1335 filelinkrevs = {}
1599 filelinkrevs = {}
1336 filenodes = {}
1600 filenodes = {}
1337 changesets = revisions = files = 0
1601 changesets = revisions = files = 0
1338 errors = [0]
1602 errors = [0]
1339 neededmanifests = {}
1603 neededmanifests = {}
1340
1604
1341 def err(msg):
1605 def err(msg):
1342 self.ui.warn(msg + "\n")
1606 self.ui.warn(msg + "\n")
1343 errors[0] += 1
1607 errors[0] += 1
1344
1608
1345 seen = {}
1609 seen = {}
1346 self.ui.status(_("checking changesets\n"))
1610 self.ui.status(_("checking changesets\n"))
1347 for i in range(self.changelog.count()):
1611 for i in range(self.changelog.count()):
1348 changesets += 1
1612 changesets += 1
1349 n = self.changelog.node(i)
1613 n = self.changelog.node(i)
1350 l = self.changelog.linkrev(n)
1614 l = self.changelog.linkrev(n)
1351 if l != i:
1615 if l != i:
1352 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1616 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1353 if n in seen:
1617 if n in seen:
1354 err(_("duplicate changeset at revision %d") % i)
1618 err(_("duplicate changeset at revision %d") % i)
1355 seen[n] = 1
1619 seen[n] = 1
1356
1620
1357 for p in self.changelog.parents(n):
1621 for p in self.changelog.parents(n):
1358 if p not in self.changelog.nodemap:
1622 if p not in self.changelog.nodemap:
1359 err(_("changeset %s has unknown parent %s") %
1623 err(_("changeset %s has unknown parent %s") %
1360 (short(n), short(p)))
1624 (short(n), short(p)))
1361 try:
1625 try:
1362 changes = self.changelog.read(n)
1626 changes = self.changelog.read(n)
1363 except Exception, inst:
1627 except Exception, inst:
1364 err(_("unpacking changeset %s: %s") % (short(n), inst))
1628 err(_("unpacking changeset %s: %s") % (short(n), inst))
1365
1629
1366 neededmanifests[changes[0]] = n
1630 neededmanifests[changes[0]] = n
1367
1631
1368 for f in changes[3]:
1632 for f in changes[3]:
1369 filelinkrevs.setdefault(f, []).append(i)
1633 filelinkrevs.setdefault(f, []).append(i)
1370
1634
1371 seen = {}
1635 seen = {}
1372 self.ui.status(_("checking manifests\n"))
1636 self.ui.status(_("checking manifests\n"))
1373 for i in range(self.manifest.count()):
1637 for i in range(self.manifest.count()):
1374 n = self.manifest.node(i)
1638 n = self.manifest.node(i)
1375 l = self.manifest.linkrev(n)
1639 l = self.manifest.linkrev(n)
1376
1640
1377 if l < 0 or l >= self.changelog.count():
1641 if l < 0 or l >= self.changelog.count():
1378 err(_("bad manifest link (%d) at revision %d") % (l, i))
1642 err(_("bad manifest link (%d) at revision %d") % (l, i))
1379
1643
1380 if n in neededmanifests:
1644 if n in neededmanifests:
1381 del neededmanifests[n]
1645 del neededmanifests[n]
1382
1646
1383 if n in seen:
1647 if n in seen:
1384 err(_("duplicate manifest at revision %d") % i)
1648 err(_("duplicate manifest at revision %d") % i)
1385
1649
1386 seen[n] = 1
1650 seen[n] = 1
1387
1651
1388 for p in self.manifest.parents(n):
1652 for p in self.manifest.parents(n):
1389 if p not in self.manifest.nodemap:
1653 if p not in self.manifest.nodemap:
1390 err(_("manifest %s has unknown parent %s") %
1654 err(_("manifest %s has unknown parent %s") %
1391 (short(n), short(p)))
1655 (short(n), short(p)))
1392
1656
1393 try:
1657 try:
1394 delta = mdiff.patchtext(self.manifest.delta(n))
1658 delta = mdiff.patchtext(self.manifest.delta(n))
1395 except KeyboardInterrupt:
1659 except KeyboardInterrupt:
1396 self.ui.warn(_("interrupted"))
1660 self.ui.warn(_("interrupted"))
1397 raise
1661 raise
1398 except Exception, inst:
1662 except Exception, inst:
1399 err(_("unpacking manifest %s: %s") % (short(n), inst))
1663 err(_("unpacking manifest %s: %s") % (short(n), inst))
1400
1664
1401 ff = [ l.split('\0') for l in delta.splitlines() ]
1665 ff = [ l.split('\0') for l in delta.splitlines() ]
1402 for f, fn in ff:
1666 for f, fn in ff:
1403 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1667 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1404
1668
1405 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1669 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1406
1670
1407 for m,c in neededmanifests.items():
1671 for m,c in neededmanifests.items():
1408 err(_("Changeset %s refers to unknown manifest %s") %
1672 err(_("Changeset %s refers to unknown manifest %s") %
1409 (short(m), short(c)))
1673 (short(m), short(c)))
1410 del neededmanifests
1674 del neededmanifests
1411
1675
1412 for f in filenodes:
1676 for f in filenodes:
1413 if f not in filelinkrevs:
1677 if f not in filelinkrevs:
1414 err(_("file %s in manifest but not in changesets") % f)
1678 err(_("file %s in manifest but not in changesets") % f)
1415
1679
1416 for f in filelinkrevs:
1680 for f in filelinkrevs:
1417 if f not in filenodes:
1681 if f not in filenodes:
1418 err(_("file %s in changeset but not in manifest") % f)
1682 err(_("file %s in changeset but not in manifest") % f)
1419
1683
1420 self.ui.status(_("checking files\n"))
1684 self.ui.status(_("checking files\n"))
1421 ff = filenodes.keys()
1685 ff = filenodes.keys()
1422 ff.sort()
1686 ff.sort()
1423 for f in ff:
1687 for f in ff:
1424 if f == "/dev/null": continue
1688 if f == "/dev/null": continue
1425 files += 1
1689 files += 1
1426 fl = self.file(f)
1690 fl = self.file(f)
1427 nodes = { nullid: 1 }
1691 nodes = { nullid: 1 }
1428 seen = {}
1692 seen = {}
1429 for i in range(fl.count()):
1693 for i in range(fl.count()):
1430 revisions += 1
1694 revisions += 1
1431 n = fl.node(i)
1695 n = fl.node(i)
1432
1696
1433 if n in seen:
1697 if n in seen:
1434 err(_("%s: duplicate revision %d") % (f, i))
1698 err(_("%s: duplicate revision %d") % (f, i))
1435 if n not in filenodes[f]:
1699 if n not in filenodes[f]:
1436 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1700 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1437 else:
1701 else:
1438 del filenodes[f][n]
1702 del filenodes[f][n]
1439
1703
1440 flr = fl.linkrev(n)
1704 flr = fl.linkrev(n)
1441 if flr not in filelinkrevs[f]:
1705 if flr not in filelinkrevs[f]:
1442 err(_("%s:%s points to unexpected changeset %d")
1706 err(_("%s:%s points to unexpected changeset %d")
1443 % (f, short(n), flr))
1707 % (f, short(n), flr))
1444 else:
1708 else:
1445 filelinkrevs[f].remove(flr)
1709 filelinkrevs[f].remove(flr)
1446
1710
1447 # verify contents
1711 # verify contents
1448 try:
1712 try:
1449 t = fl.read(n)
1713 t = fl.read(n)
1450 except Exception, inst:
1714 except Exception, inst:
1451 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1715 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1452
1716
1453 # verify parents
1717 # verify parents
1454 (p1, p2) = fl.parents(n)
1718 (p1, p2) = fl.parents(n)
1455 if p1 not in nodes:
1719 if p1 not in nodes:
1456 err(_("file %s:%s unknown parent 1 %s") %
1720 err(_("file %s:%s unknown parent 1 %s") %
1457 (f, short(n), short(p1)))
1721 (f, short(n), short(p1)))
1458 if p2 not in nodes:
1722 if p2 not in nodes:
1459 err(_("file %s:%s unknown parent 2 %s") %
1723 err(_("file %s:%s unknown parent 2 %s") %
1460 (f, short(n), short(p1)))
1724 (f, short(n), short(p1)))
1461 nodes[n] = 1
1725 nodes[n] = 1
1462
1726
1463 # cross-check
1727 # cross-check
1464 for node in filenodes[f]:
1728 for node in filenodes[f]:
1465 err(_("node %s in manifests not in %s") % (hex(node), f))
1729 err(_("node %s in manifests not in %s") % (hex(node), f))
1466
1730
1467 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1731 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1468 (files, changesets, revisions))
1732 (files, changesets, revisions))
1469
1733
1470 if errors[0]:
1734 if errors[0]:
1471 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1735 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1472 return 1
1736 return 1
@@ -1,677 +1,825 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 from demandload import demandload
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
17
17
18 def hash(text, p1, p2):
18 def hash(text, p1, p2):
19 """generate a hash from the given text and its parent hashes
19 """generate a hash from the given text and its parent hashes
20
20
21 This hash combines both the current file contents and its history
21 This hash combines both the current file contents and its history
22 in a manner that makes it easy to distinguish nodes with the same
22 in a manner that makes it easy to distinguish nodes with the same
23 content in the revision graph.
23 content in the revision graph.
24 """
24 """
25 l = [p1, p2]
25 l = [p1, p2]
26 l.sort()
26 l.sort()
27 s = sha.new(l[0])
27 s = sha.new(l[0])
28 s.update(l[1])
28 s.update(l[1])
29 s.update(text)
29 s.update(text)
30 return s.digest()
30 return s.digest()
31
31
32 def compress(text):
32 def compress(text):
33 """ generate a possibly-compressed representation of text """
33 """ generate a possibly-compressed representation of text """
34 if not text: return text
34 if not text: return text
35 if len(text) < 44:
35 if len(text) < 44:
36 if text[0] == '\0': return text
36 if text[0] == '\0': return text
37 return 'u' + text
37 return 'u' + text
38 bin = zlib.compress(text)
38 bin = zlib.compress(text)
39 if len(bin) > len(text):
39 if len(bin) > len(text):
40 if text[0] == '\0': return text
40 if text[0] == '\0': return text
41 return 'u' + text
41 return 'u' + text
42 return bin
42 return bin
43
43
44 def decompress(bin):
44 def decompress(bin):
45 """ decompress the given input """
45 """ decompress the given input """
46 if not bin: return bin
46 if not bin: return bin
47 t = bin[0]
47 t = bin[0]
48 if t == '\0': return bin
48 if t == '\0': return bin
49 if t == 'x': return zlib.decompress(bin)
49 if t == 'x': return zlib.decompress(bin)
50 if t == 'u': return bin[1:]
50 if t == 'u': return bin[1:]
51 raise RevlogError(_("unknown compression type %s") % t)
51 raise RevlogError(_("unknown compression type %s") % t)
52
52
53 indexformat = ">4l20s20s20s"
53 indexformat = ">4l20s20s20s"
54
54
55 class lazyparser:
55 class lazyparser:
56 """
56 """
57 this class avoids the need to parse the entirety of large indices
57 this class avoids the need to parse the entirety of large indices
58
58
59 By default we parse and load 1000 entries at a time.
59 By default we parse and load 1000 entries at a time.
60
60
61 If no position is specified, we load the whole index, and replace
61 If no position is specified, we load the whole index, and replace
62 the lazy objects in revlog with the underlying objects for
62 the lazy objects in revlog with the underlying objects for
63 efficiency in cases where we look at most of the nodes.
63 efficiency in cases where we look at most of the nodes.
64 """
64 """
65 def __init__(self, data, revlog):
65 def __init__(self, data, revlog):
66 self.data = data
66 self.data = data
67 self.s = struct.calcsize(indexformat)
67 self.s = struct.calcsize(indexformat)
68 self.l = len(data)/self.s
68 self.l = len(data)/self.s
69 self.index = [None] * self.l
69 self.index = [None] * self.l
70 self.map = {nullid: -1}
70 self.map = {nullid: -1}
71 self.all = 0
71 self.all = 0
72 self.revlog = revlog
72 self.revlog = revlog
73
73
74 def load(self, pos=None):
74 def load(self, pos=None):
75 if self.all: return
75 if self.all: return
76 if pos is not None:
76 if pos is not None:
77 block = pos / 1000
77 block = pos / 1000
78 i = block * 1000
78 i = block * 1000
79 end = min(self.l, i + 1000)
79 end = min(self.l, i + 1000)
80 else:
80 else:
81 self.all = 1
81 self.all = 1
82 i = 0
82 i = 0
83 end = self.l
83 end = self.l
84 self.revlog.index = self.index
84 self.revlog.index = self.index
85 self.revlog.nodemap = self.map
85 self.revlog.nodemap = self.map
86
86
87 while i < end:
87 while i < end:
88 d = self.data[i * self.s: (i + 1) * self.s]
88 d = self.data[i * self.s: (i + 1) * self.s]
89 e = struct.unpack(indexformat, d)
89 e = struct.unpack(indexformat, d)
90 self.index[i] = e
90 self.index[i] = e
91 self.map[e[6]] = i
91 self.map[e[6]] = i
92 i += 1
92 i += 1
93
93
94 class lazyindex:
94 class lazyindex:
95 """a lazy version of the index array"""
95 """a lazy version of the index array"""
96 def __init__(self, parser):
96 def __init__(self, parser):
97 self.p = parser
97 self.p = parser
98 def __len__(self):
98 def __len__(self):
99 return len(self.p.index)
99 return len(self.p.index)
100 def load(self, pos):
100 def load(self, pos):
101 if pos < 0:
101 if pos < 0:
102 pos += len(self.p.index)
102 pos += len(self.p.index)
103 self.p.load(pos)
103 self.p.load(pos)
104 return self.p.index[pos]
104 return self.p.index[pos]
105 def __getitem__(self, pos):
105 def __getitem__(self, pos):
106 return self.p.index[pos] or self.load(pos)
106 return self.p.index[pos] or self.load(pos)
107 def append(self, e):
107 def append(self, e):
108 self.p.index.append(e)
108 self.p.index.append(e)
109
109
110 class lazymap:
110 class lazymap:
111 """a lazy version of the node map"""
111 """a lazy version of the node map"""
112 def __init__(self, parser):
112 def __init__(self, parser):
113 self.p = parser
113 self.p = parser
114 def load(self, key):
114 def load(self, key):
115 if self.p.all: return
115 if self.p.all: return
116 n = self.p.data.find(key)
116 n = self.p.data.find(key)
117 if n < 0:
117 if n < 0:
118 raise KeyError(key)
118 raise KeyError(key)
119 pos = n / self.p.s
119 pos = n / self.p.s
120 self.p.load(pos)
120 self.p.load(pos)
121 def __contains__(self, key):
121 def __contains__(self, key):
122 self.p.load()
122 self.p.load()
123 return key in self.p.map
123 return key in self.p.map
124 def __iter__(self):
124 def __iter__(self):
125 yield nullid
125 yield nullid
126 for i in xrange(self.p.l):
126 for i in xrange(self.p.l):
127 try:
127 try:
128 yield self.p.index[i][6]
128 yield self.p.index[i][6]
129 except:
129 except:
130 self.p.load(i)
130 self.p.load(i)
131 yield self.p.index[i][6]
131 yield self.p.index[i][6]
132 def __getitem__(self, key):
132 def __getitem__(self, key):
133 try:
133 try:
134 return self.p.map[key]
134 return self.p.map[key]
135 except KeyError:
135 except KeyError:
136 try:
136 try:
137 self.load(key)
137 self.load(key)
138 return self.p.map[key]
138 return self.p.map[key]
139 except KeyError:
139 except KeyError:
140 raise KeyError("node " + hex(key))
140 raise KeyError("node " + hex(key))
141 def __setitem__(self, key, val):
141 def __setitem__(self, key, val):
142 self.p.map[key] = val
142 self.p.map[key] = val
143
143
144 class RevlogError(Exception): pass
144 class RevlogError(Exception): pass
145
145
146 class revlog:
146 class revlog:
147 """
147 """
148 the underlying revision storage object
148 the underlying revision storage object
149
149
150 A revlog consists of two parts, an index and the revision data.
150 A revlog consists of two parts, an index and the revision data.
151
151
152 The index is a file with a fixed record size containing
152 The index is a file with a fixed record size containing
153 information on each revision, includings its nodeid (hash), the
153 information on each revision, includings its nodeid (hash), the
154 nodeids of its parents, the position and offset of its data within
154 nodeids of its parents, the position and offset of its data within
155 the data file, and the revision it's based on. Finally, each entry
155 the data file, and the revision it's based on. Finally, each entry
156 contains a linkrev entry that can serve as a pointer to external
156 contains a linkrev entry that can serve as a pointer to external
157 data.
157 data.
158
158
159 The revision data itself is a linear collection of data chunks.
159 The revision data itself is a linear collection of data chunks.
160 Each chunk represents a revision and is usually represented as a
160 Each chunk represents a revision and is usually represented as a
161 delta against the previous chunk. To bound lookup time, runs of
161 delta against the previous chunk. To bound lookup time, runs of
162 deltas are limited to about 2 times the length of the original
162 deltas are limited to about 2 times the length of the original
163 version data. This makes retrieval of a version proportional to
163 version data. This makes retrieval of a version proportional to
164 its size, or O(1) relative to the number of revisions.
164 its size, or O(1) relative to the number of revisions.
165
165
166 Both pieces of the revlog are written to in an append-only
166 Both pieces of the revlog are written to in an append-only
167 fashion, which means we never need to rewrite a file to insert or
167 fashion, which means we never need to rewrite a file to insert or
168 remove data, and can use some simple techniques to avoid the need
168 remove data, and can use some simple techniques to avoid the need
169 for locking while reading.
169 for locking while reading.
170 """
170 """
171 def __init__(self, opener, indexfile, datafile):
171 def __init__(self, opener, indexfile, datafile):
172 """
172 """
173 create a revlog object
173 create a revlog object
174
174
175 opener is a function that abstracts the file opening operation
175 opener is a function that abstracts the file opening operation
176 and can be used to implement COW semantics or the like.
176 and can be used to implement COW semantics or the like.
177 """
177 """
178 self.indexfile = indexfile
178 self.indexfile = indexfile
179 self.datafile = datafile
179 self.datafile = datafile
180 self.opener = opener
180 self.opener = opener
181 self.cache = None
181 self.cache = None
182
182
183 try:
183 try:
184 i = self.opener(self.indexfile).read()
184 i = self.opener(self.indexfile).read()
185 except IOError, inst:
185 except IOError, inst:
186 if inst.errno != errno.ENOENT:
186 if inst.errno != errno.ENOENT:
187 raise
187 raise
188 i = ""
188 i = ""
189
189
190 if len(i) > 10000:
190 if len(i) > 10000:
191 # big index, let's parse it on demand
191 # big index, let's parse it on demand
192 parser = lazyparser(i, self)
192 parser = lazyparser(i, self)
193 self.index = lazyindex(parser)
193 self.index = lazyindex(parser)
194 self.nodemap = lazymap(parser)
194 self.nodemap = lazymap(parser)
195 else:
195 else:
196 s = struct.calcsize(indexformat)
196 s = struct.calcsize(indexformat)
197 l = len(i) / s
197 l = len(i) / s
198 self.index = [None] * l
198 self.index = [None] * l
199 m = [None] * l
199 m = [None] * l
200
200
201 n = 0
201 n = 0
202 for f in xrange(0, len(i), s):
202 for f in xrange(0, len(i), s):
203 # offset, size, base, linkrev, p1, p2, nodeid
203 # offset, size, base, linkrev, p1, p2, nodeid
204 e = struct.unpack(indexformat, i[f:f + s])
204 e = struct.unpack(indexformat, i[f:f + s])
205 m[n] = (e[6], n)
205 m[n] = (e[6], n)
206 self.index[n] = e
206 self.index[n] = e
207 n += 1
207 n += 1
208
208
209 self.nodemap = dict(m)
209 self.nodemap = dict(m)
210 self.nodemap[nullid] = -1
210 self.nodemap[nullid] = -1
211
211
212 def tip(self): return self.node(len(self.index) - 1)
212 def tip(self): return self.node(len(self.index) - 1)
213 def count(self): return len(self.index)
213 def count(self): return len(self.index)
214 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
214 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
215 def rev(self, node):
215 def rev(self, node):
216 try:
216 try:
217 return self.nodemap[node]
217 return self.nodemap[node]
218 except KeyError:
218 except KeyError:
219 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
219 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
220 def linkrev(self, node): return self.index[self.rev(node)][3]
220 def linkrev(self, node): return self.index[self.rev(node)][3]
221 def parents(self, node):
221 def parents(self, node):
222 if node == nullid: return (nullid, nullid)
222 if node == nullid: return (nullid, nullid)
223 return self.index[self.rev(node)][4:6]
223 return self.index[self.rev(node)][4:6]
224
224
225 def start(self, rev): return self.index[rev][0]
225 def start(self, rev): return self.index[rev][0]
226 def length(self, rev): return self.index[rev][1]
226 def length(self, rev): return self.index[rev][1]
227 def end(self, rev): return self.start(rev) + self.length(rev)
227 def end(self, rev): return self.start(rev) + self.length(rev)
228 def base(self, rev): return self.index[rev][2]
228 def base(self, rev): return self.index[rev][2]
229
229
230 def reachable(self, rev, stop=None):
230 def reachable(self, rev, stop=None):
231 reachable = {}
231 reachable = {}
232 visit = [rev]
232 visit = [rev]
233 reachable[rev] = 1
233 reachable[rev] = 1
234 if stop:
234 if stop:
235 stopn = self.rev(stop)
235 stopn = self.rev(stop)
236 else:
236 else:
237 stopn = 0
237 stopn = 0
238 while visit:
238 while visit:
239 n = visit.pop(0)
239 n = visit.pop(0)
240 if n == stop:
240 if n == stop:
241 continue
241 continue
242 if n == nullid:
242 if n == nullid:
243 continue
243 continue
244 for p in self.parents(n):
244 for p in self.parents(n):
245 if self.rev(p) < stopn:
245 if self.rev(p) < stopn:
246 continue
246 continue
247 if p not in reachable:
247 if p not in reachable:
248 reachable[p] = 1
248 reachable[p] = 1
249 visit.append(p)
249 visit.append(p)
250 return reachable
250 return reachable
251
251
252 def nodesbetween(self, roots=None, heads=None):
253 """Return a tuple containing three elements. Elements 1 and 2 contain
254 a final list bases and heads after all the unreachable ones have been
255 pruned. Element 0 contains a topologically sorted list of all
256
257 nodes that satisfy these constraints:
258 1. All nodes must be descended from a node in roots (the nodes on
259 roots are considered descended from themselves).
260 2. All nodes must also be ancestors of a node in heads (the nodes in
261 heads are considered to be their own ancestors).
262
263 If roots is unspecified, nullid is assumed as the only root.
264 If heads is unspecified, it is taken to be the output of the
265 heads method (i.e. a list of all nodes in the repository that
266 have no children)."""
267 nonodes = ([], [], [])
268 if roots is not None:
269 roots = list(roots)
270 if not roots:
271 return nonodes
272 lowestrev = min([self.rev(n) for n in roots])
273 else:
274 roots = [nullid] # Everybody's a descendent of nullid
275 lowestrev = -1
276 if (lowestrev == -1) and (heads is None):
277 # We want _all_ the nodes!
278 return ([self.node(r) for r in xrange(0, self.count())],
279 [nullid], list(self.heads()))
280 if heads is None:
281 # All nodes are ancestors, so the latest ancestor is the last
282 # node.
283 highestrev = self.count() - 1
284 # Set ancestors to None to signal that every node is an ancestor.
285 ancestors = None
286 # Set heads to an empty dictionary for later discovery of heads
287 heads = {}
288 else:
289 heads = list(heads)
290 if not heads:
291 return nonodes
292 ancestors = {}
293 # Start at the top and keep marking parents until we're done.
294 nodestotag = heads[:]
295 # Turn heads into a dictionary so we can remove 'fake' heads.
296 # Also, later we will be using it to filter out the heads we can't
297 # find from roots.
298 heads = dict.fromkeys(heads, 0)
299 # Remember where the top was so we can use it as a limit later.
300 highestrev = max([self.rev(n) for n in nodestotag])
301 while nodestotag:
302 # grab a node to tag
303 n = nodestotag.pop()
304 # Never tag nullid
305 if n == nullid:
306 continue
307 # A node's revision number represents its place in a
308 # topologically sorted list of nodes.
309 r = self.rev(n)
310 if r >= lowestrev:
311 if n not in ancestors:
312 # If we are possibly a descendent of one of the roots
313 # and we haven't already been marked as an ancestor
314 ancestors[n] = 1 # Mark as ancestor
315 # Add non-nullid parents to list of nodes to tag.
316 nodestotag.extend([p for p in self.parents(n) if
317 p != nullid])
318 elif n in heads: # We've seen it before, is it a fake head?
319 # So it is, real heads should not be the ancestors of
320 # any other heads.
321 heads.pop(n)
322 if not ancestors:
323 return nonodes
324 # Now that we have our set of ancestors, we want to remove any
325 # roots that are not ancestors.
326
327 # If one of the roots was nullid, everything is included anyway.
328 if lowestrev > -1:
329 # But, since we weren't, let's recompute the lowest rev to not
330 # include roots that aren't ancestors.
331
332 # Filter out roots that aren't ancestors of heads
333 roots = [n for n in roots if n in ancestors]
334 # Recompute the lowest revision
335 if roots:
336 lowestrev = min([self.rev(n) for n in roots])
337 else:
338 # No more roots? Return empty list
339 return nonodes
340 else:
341 # We are descending from nullid, and don't need to care about
342 # any other roots.
343 lowestrev = -1
344 roots = [nullid]
345 # Transform our roots list into a 'set' (i.e. a dictionary where the
346 # values don't matter.
347 descendents = dict.fromkeys(roots, 1)
348 # Also, keep the original roots so we can filter out roots that aren't
349 # 'real' roots (i.e. are descended from other roots).
350 roots = descendents.copy()
351 # Our topologically sorted list of output nodes.
352 orderedout = []
353 # Don't start at nullid since we don't want nullid in our output list,
354 # and if nullid shows up in descedents, empty parents will look like
355 # they're descendents.
356 for r in xrange(max(lowestrev, 0), highestrev + 1):
357 n = self.node(r)
358 isdescendent = False
359 if lowestrev == -1: # Everybody is a descendent of nullid
360 isdescendent = True
361 elif n in descendents:
362 # n is already a descendent
363 isdescendent = True
364 # This check only needs to be done here because all the roots
365 # will start being marked is descendents before the loop.
366 if n in roots:
367 # If n was a root, check if it's a 'real' root.
368 p = tuple(self.parents(n))
369 # If any of its parents are descendents, it's not a root.
370 if (p[0] in descendents) or (p[1] in descendents):
371 roots.pop(n)
372 else:
373 p = tuple(self.parents(n))
374 # A node is a descendent if either of its parents are
375 # descendents. (We seeded the dependents list with the roots
376 # up there, remember?)
377 if (p[0] in descendents) or (p[1] in descendents):
378 descendents[n] = 1
379 isdescendent = True
380 if isdescendent and ((ancestors is None) or (n in ancestors)):
381 # Only include nodes that are both descendents and ancestors.
382 orderedout.append(n)
383 if (ancestors is not None) and (n in heads):
384 # We're trying to figure out which heads are reachable
385 # from roots.
386 # Mark this head as having been reached
387 heads[n] = 1
388 elif ancestors is None:
389 # Otherwise, we're trying to discover the heads.
390 # Assume this is a head because if it isn't, the next step
391 # will eventually remove it.
392 heads[n] = 1
393 # But, obviously its parents aren't.
394 for p in self.parents(n):
395 heads.pop(p, None)
396 heads = [n for n in heads.iterkeys() if heads[n] != 0]
397 roots = roots.keys()
398 assert orderedout
399 assert roots
400 assert heads
401 return (orderedout, roots, heads)
402
252 def heads(self, stop=None):
403 def heads(self, stop=None):
253 """return the list of all nodes that have no children"""
404 """return the list of all nodes that have no children"""
254 p = {}
405 p = {}
255 h = []
406 h = []
256 stoprev = 0
407 stoprev = 0
257 if stop and stop in self.nodemap:
408 if stop and stop in self.nodemap:
258 stoprev = self.rev(stop)
409 stoprev = self.rev(stop)
259
410
260 for r in range(self.count() - 1, -1, -1):
411 for r in range(self.count() - 1, -1, -1):
261 n = self.node(r)
412 n = self.node(r)
262 if n not in p:
413 if n not in p:
263 h.append(n)
414 h.append(n)
264 if n == stop:
415 if n == stop:
265 break
416 break
266 if r < stoprev:
417 if r < stoprev:
267 break
418 break
268 for pn in self.parents(n):
419 for pn in self.parents(n):
269 p[pn] = 1
420 p[pn] = 1
270 return h
421 return h
271
422
272 def children(self, node):
423 def children(self, node):
273 """find the children of a given node"""
424 """find the children of a given node"""
274 c = []
425 c = []
275 p = self.rev(node)
426 p = self.rev(node)
276 for r in range(p + 1, self.count()):
427 for r in range(p + 1, self.count()):
277 n = self.node(r)
428 n = self.node(r)
278 for pn in self.parents(n):
429 for pn in self.parents(n):
279 if pn == node:
430 if pn == node:
280 c.append(n)
431 c.append(n)
281 continue
432 continue
282 elif pn == nullid:
433 elif pn == nullid:
283 continue
434 continue
284 return c
435 return c
285
436
286 def lookup(self, id):
437 def lookup(self, id):
287 """locate a node based on revision number or subset of hex nodeid"""
438 """locate a node based on revision number or subset of hex nodeid"""
288 try:
439 try:
289 rev = int(id)
440 rev = int(id)
290 if str(rev) != id: raise ValueError
441 if str(rev) != id: raise ValueError
291 if rev < 0: rev = self.count() + rev
442 if rev < 0: rev = self.count() + rev
292 if rev < 0 or rev >= self.count(): raise ValueError
443 if rev < 0 or rev >= self.count(): raise ValueError
293 return self.node(rev)
444 return self.node(rev)
294 except (ValueError, OverflowError):
445 except (ValueError, OverflowError):
295 c = []
446 c = []
296 for n in self.nodemap:
447 for n in self.nodemap:
297 if hex(n).startswith(id):
448 if hex(n).startswith(id):
298 c.append(n)
449 c.append(n)
299 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
450 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
300 if len(c) < 1: raise RevlogError(_("No match found"))
451 if len(c) < 1: raise RevlogError(_("No match found"))
301 return c[0]
452 return c[0]
302
453
303 return None
454 return None
304
455
305 def diff(self, a, b):
456 def diff(self, a, b):
306 """return a delta between two revisions"""
457 """return a delta between two revisions"""
307 return mdiff.textdiff(a, b)
458 return mdiff.textdiff(a, b)
308
459
309 def patches(self, t, pl):
460 def patches(self, t, pl):
310 """apply a list of patches to a string"""
461 """apply a list of patches to a string"""
311 return mdiff.patches(t, pl)
462 return mdiff.patches(t, pl)
312
463
313 def delta(self, node):
464 def delta(self, node):
314 """return or calculate a delta between a node and its predecessor"""
465 """return or calculate a delta between a node and its predecessor"""
315 r = self.rev(node)
466 r = self.rev(node)
316 b = self.base(r)
467 b = self.base(r)
317 if r == b:
468 if r == b:
318 return self.diff(self.revision(self.node(r - 1)),
469 return self.diff(self.revision(self.node(r - 1)),
319 self.revision(node))
470 self.revision(node))
320 else:
471 else:
321 f = self.opener(self.datafile)
472 f = self.opener(self.datafile)
322 f.seek(self.start(r))
473 f.seek(self.start(r))
323 data = f.read(self.length(r))
474 data = f.read(self.length(r))
324 return decompress(data)
475 return decompress(data)
325
476
326 def revision(self, node):
477 def revision(self, node):
327 """return an uncompressed revision of a given"""
478 """return an uncompressed revision of a given"""
328 if node == nullid: return ""
479 if node == nullid: return ""
329 if self.cache and self.cache[0] == node: return self.cache[2]
480 if self.cache and self.cache[0] == node: return self.cache[2]
330
481
331 # look up what we need to read
482 # look up what we need to read
332 text = None
483 text = None
333 rev = self.rev(node)
484 rev = self.rev(node)
334 start, length, base, link, p1, p2, node = self.index[rev]
485 start, length, base, link, p1, p2, node = self.index[rev]
335 end = start + length
486 end = start + length
336 if base != rev: start = self.start(base)
487 if base != rev: start = self.start(base)
337
488
338 # do we have useful data cached?
489 # do we have useful data cached?
339 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
490 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
340 base = self.cache[1]
491 base = self.cache[1]
341 start = self.start(base + 1)
492 start = self.start(base + 1)
342 text = self.cache[2]
493 text = self.cache[2]
343 last = 0
494 last = 0
344
495
345 f = self.opener(self.datafile)
496 f = self.opener(self.datafile)
346 f.seek(start)
497 f.seek(start)
347 data = f.read(end - start)
498 data = f.read(end - start)
348
499
349 if text is None:
500 if text is None:
350 last = self.length(base)
501 last = self.length(base)
351 text = decompress(data[:last])
502 text = decompress(data[:last])
352
503
353 bins = []
504 bins = []
354 for r in xrange(base + 1, rev + 1):
505 for r in xrange(base + 1, rev + 1):
355 s = self.length(r)
506 s = self.length(r)
356 bins.append(decompress(data[last:last + s]))
507 bins.append(decompress(data[last:last + s]))
357 last = last + s
508 last = last + s
358
509
359 text = mdiff.patches(text, bins)
510 text = mdiff.patches(text, bins)
360
511
361 if node != hash(text, p1, p2):
512 if node != hash(text, p1, p2):
362 raise RevlogError(_("integrity check failed on %s:%d")
513 raise RevlogError(_("integrity check failed on %s:%d")
363 % (self.datafile, rev))
514 % (self.datafile, rev))
364
515
365 self.cache = (node, rev, text)
516 self.cache = (node, rev, text)
366 return text
517 return text
367
518
368 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
519 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
369 """add a revision to the log
520 """add a revision to the log
370
521
371 text - the revision data to add
522 text - the revision data to add
372 transaction - the transaction object used for rollback
523 transaction - the transaction object used for rollback
373 link - the linkrev data to add
524 link - the linkrev data to add
374 p1, p2 - the parent nodeids of the revision
525 p1, p2 - the parent nodeids of the revision
375 d - an optional precomputed delta
526 d - an optional precomputed delta
376 """
527 """
377 if text is None: text = ""
528 if text is None: text = ""
378 if p1 is None: p1 = self.tip()
529 if p1 is None: p1 = self.tip()
379 if p2 is None: p2 = nullid
530 if p2 is None: p2 = nullid
380
531
381 node = hash(text, p1, p2)
532 node = hash(text, p1, p2)
382
533
383 if node in self.nodemap:
534 if node in self.nodemap:
384 return node
535 return node
385
536
386 n = self.count()
537 n = self.count()
387 t = n - 1
538 t = n - 1
388
539
389 if n:
540 if n:
390 base = self.base(t)
541 base = self.base(t)
391 start = self.start(base)
542 start = self.start(base)
392 end = self.end(t)
543 end = self.end(t)
393 if not d:
544 if not d:
394 prev = self.revision(self.tip())
545 prev = self.revision(self.tip())
395 d = self.diff(prev, text)
546 d = self.diff(prev, text)
396 data = compress(d)
547 data = compress(d)
397 dist = end - start + len(data)
548 dist = end - start + len(data)
398
549
399 # full versions are inserted when the needed deltas
550 # full versions are inserted when the needed deltas
400 # become comparable to the uncompressed text
551 # become comparable to the uncompressed text
401 if not n or dist > len(text) * 2:
552 if not n or dist > len(text) * 2:
402 data = compress(text)
553 data = compress(text)
403 base = n
554 base = n
404 else:
555 else:
405 base = self.base(t)
556 base = self.base(t)
406
557
407 offset = 0
558 offset = 0
408 if t >= 0:
559 if t >= 0:
409 offset = self.end(t)
560 offset = self.end(t)
410
561
411 e = (offset, len(data), base, link, p1, p2, node)
562 e = (offset, len(data), base, link, p1, p2, node)
412
563
413 self.index.append(e)
564 self.index.append(e)
414 self.nodemap[node] = n
565 self.nodemap[node] = n
415 entry = struct.pack(indexformat, *e)
566 entry = struct.pack(indexformat, *e)
416
567
417 transaction.add(self.datafile, e[0])
568 transaction.add(self.datafile, e[0])
418 self.opener(self.datafile, "a").write(data)
569 self.opener(self.datafile, "a").write(data)
419 transaction.add(self.indexfile, n * len(entry))
570 transaction.add(self.indexfile, n * len(entry))
420 self.opener(self.indexfile, "a").write(entry)
571 self.opener(self.indexfile, "a").write(entry)
421
572
422 self.cache = (node, n, text)
573 self.cache = (node, n, text)
423 return node
574 return node
424
575
425 def ancestor(self, a, b):
576 def ancestor(self, a, b):
426 """calculate the least common ancestor of nodes a and b"""
577 """calculate the least common ancestor of nodes a and b"""
427 # calculate the distance of every node from root
578 # calculate the distance of every node from root
428 dist = {nullid: 0}
579 dist = {nullid: 0}
429 for i in xrange(self.count()):
580 for i in xrange(self.count()):
430 n = self.node(i)
581 n = self.node(i)
431 p1, p2 = self.parents(n)
582 p1, p2 = self.parents(n)
432 dist[n] = max(dist[p1], dist[p2]) + 1
583 dist[n] = max(dist[p1], dist[p2]) + 1
433
584
434 # traverse ancestors in order of decreasing distance from root
585 # traverse ancestors in order of decreasing distance from root
435 def ancestors(node):
586 def ancestors(node):
436 # we store negative distances because heap returns smallest member
587 # we store negative distances because heap returns smallest member
437 h = [(-dist[node], node)]
588 h = [(-dist[node], node)]
438 seen = {}
589 seen = {}
439 earliest = self.count()
590 earliest = self.count()
440 while h:
591 while h:
441 d, n = heapq.heappop(h)
592 d, n = heapq.heappop(h)
442 if n not in seen:
593 if n not in seen:
443 seen[n] = 1
594 seen[n] = 1
444 r = self.rev(n)
595 r = self.rev(n)
445 yield (-d, n)
596 yield (-d, n)
446 for p in self.parents(n):
597 for p in self.parents(n):
447 heapq.heappush(h, (-dist[p], p))
598 heapq.heappush(h, (-dist[p], p))
448
599
449 def generations(node):
600 def generations(node):
450 sg, s = None, {}
601 sg, s = None, {}
451 for g,n in ancestors(node):
602 for g,n in ancestors(node):
452 if g != sg:
603 if g != sg:
453 if sg:
604 if sg:
454 yield sg, s
605 yield sg, s
455 sg, s = g, {n:1}
606 sg, s = g, {n:1}
456 else:
607 else:
457 s[n] = 1
608 s[n] = 1
458 yield sg, s
609 yield sg, s
459
610
460 x = generations(a)
611 x = generations(a)
461 y = generations(b)
612 y = generations(b)
462 gx = x.next()
613 gx = x.next()
463 gy = y.next()
614 gy = y.next()
464
615
465 # increment each ancestor list until it is closer to root than
616 # increment each ancestor list until it is closer to root than
466 # the other, or they match
617 # the other, or they match
467 while 1:
618 while 1:
468 #print "ancestor gen %s %s" % (gx[0], gy[0])
619 #print "ancestor gen %s %s" % (gx[0], gy[0])
469 if gx[0] == gy[0]:
620 if gx[0] == gy[0]:
470 # find the intersection
621 # find the intersection
471 i = [ n for n in gx[1] if n in gy[1] ]
622 i = [ n for n in gx[1] if n in gy[1] ]
472 if i:
623 if i:
473 return i[0]
624 return i[0]
474 else:
625 else:
475 #print "next"
626 #print "next"
476 gy = y.next()
627 gy = y.next()
477 gx = x.next()
628 gx = x.next()
478 elif gx[0] < gy[0]:
629 elif gx[0] < gy[0]:
479 #print "next y"
630 #print "next y"
480 gy = y.next()
631 gy = y.next()
481 else:
632 else:
482 #print "next x"
633 #print "next x"
483 gx = x.next()
634 gx = x.next()
484
635
485 def group(self, linkmap):
636 def group(self, nodelist, lookup, infocollect = None):
486 """calculate a delta group
637 """calculate a delta group
487
638
488 Given a list of changeset revs, return a set of deltas and
639 Given a list of changeset revs, return a set of deltas and
489 metadata corresponding to nodes. the first delta is
640 metadata corresponding to nodes. the first delta is
490 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
641 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
491 have this parent as it has all history before these
642 have this parent as it has all history before these
492 changesets. parent is parent[0]
643 changesets. parent is parent[0]
493 """
644 """
494 revs = []
645 revs = [self.rev(n) for n in nodelist]
495 needed = {}
646 needed = dict.fromkeys(revs, 1)
496
497 # find file nodes/revs that match changeset revs
498 for i in xrange(0, self.count()):
499 if self.index[i][3] in linkmap:
500 revs.append(i)
501 needed[i] = 1
502
647
503 # if we don't have any revisions touched by these changesets, bail
648 # if we don't have any revisions touched by these changesets, bail
504 if not revs:
649 if not revs:
505 yield struct.pack(">l", 0)
650 yield struct.pack(">l", 0)
506 return
651 return
507
652
508 # add the parent of the first rev
653 # add the parent of the first rev
509 p = self.parents(self.node(revs[0]))[0]
654 p = self.parents(self.node(revs[0]))[0]
510 revs.insert(0, self.rev(p))
655 revs.insert(0, self.rev(p))
511
656
512 # for each delta that isn't contiguous in the log, we need to
657 # for each delta that isn't contiguous in the log, we need to
513 # reconstruct the base, reconstruct the result, and then
658 # reconstruct the base, reconstruct the result, and then
514 # calculate the delta. We also need to do this where we've
659 # calculate the delta. We also need to do this where we've
515 # stored a full version and not a delta
660 # stored a full version and not a delta
516 for i in xrange(0, len(revs) - 1):
661 for i in xrange(0, len(revs) - 1):
517 a, b = revs[i], revs[i + 1]
662 a, b = revs[i], revs[i + 1]
518 if a + 1 != b or self.base(b) == b:
663 if a + 1 != b or self.base(b) == b:
519 for j in xrange(self.base(a), a + 1):
664 for j in xrange(self.base(a), a + 1):
520 needed[j] = 1
665 needed[j] = 1
521 for j in xrange(self.base(b), b + 1):
666 for j in xrange(self.base(b), b + 1):
522 needed[j] = 1
667 needed[j] = 1
523
668
524 # calculate spans to retrieve from datafile
669 # calculate spans to retrieve from datafile
525 needed = needed.keys()
670 needed = needed.keys()
526 needed.sort()
671 needed.sort()
527 spans = []
672 spans = []
528 oo = -1
673 oo = -1
529 ol = 0
674 ol = 0
530 for n in needed:
675 for n in needed:
531 if n < 0: continue
676 if n < 0: continue
532 o = self.start(n)
677 o = self.start(n)
533 l = self.length(n)
678 l = self.length(n)
534 if oo + ol == o: # can we merge with the previous?
679 if oo + ol == o: # can we merge with the previous?
535 nl = spans[-1][2]
680 nl = spans[-1][2]
536 nl.append((n, l))
681 nl.append((n, l))
537 ol += l
682 ol += l
538 spans[-1] = (oo, ol, nl)
683 spans[-1] = (oo, ol, nl)
539 else:
684 else:
540 oo = o
685 oo = o
541 ol = l
686 ol = l
542 spans.append((oo, ol, [(n, l)]))
687 spans.append((oo, ol, [(n, l)]))
543
688
544 # read spans in, divide up chunks
689 # read spans in, divide up chunks
545 chunks = {}
690 chunks = {}
546 for span in spans:
691 for span in spans:
547 # we reopen the file for each span to make http happy for now
692 # we reopen the file for each span to make http happy for now
548 f = self.opener(self.datafile)
693 f = self.opener(self.datafile)
549 f.seek(span[0])
694 f.seek(span[0])
550 data = f.read(span[1])
695 data = f.read(span[1])
551
696
552 # divide up the span
697 # divide up the span
553 pos = 0
698 pos = 0
554 for r, l in span[2]:
699 for r, l in span[2]:
555 chunks[r] = decompress(data[pos: pos + l])
700 chunks[r] = decompress(data[pos: pos + l])
556 pos += l
701 pos += l
557
702
558 # helper to reconstruct intermediate versions
703 # helper to reconstruct intermediate versions
559 def construct(text, base, rev):
704 def construct(text, base, rev):
560 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
705 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
561 return mdiff.patches(text, bins)
706 return mdiff.patches(text, bins)
562
707
563 # build deltas
708 # build deltas
564 deltas = []
709 deltas = []
565 for d in xrange(0, len(revs) - 1):
710 for d in xrange(0, len(revs) - 1):
566 a, b = revs[d], revs[d + 1]
711 a, b = revs[d], revs[d + 1]
567 n = self.node(b)
712 n = self.node(b)
568
713
714 if infocollect is not None:
715 infocollect(n)
716
569 # do we need to construct a new delta?
717 # do we need to construct a new delta?
570 if a + 1 != b or self.base(b) == b:
718 if a + 1 != b or self.base(b) == b:
571 if a >= 0:
719 if a >= 0:
572 base = self.base(a)
720 base = self.base(a)
573 ta = chunks[self.base(a)]
721 ta = chunks[self.base(a)]
574 ta = construct(ta, base, a)
722 ta = construct(ta, base, a)
575 else:
723 else:
576 ta = ""
724 ta = ""
577
725
578 base = self.base(b)
726 base = self.base(b)
579 if a > base:
727 if a > base:
580 base = a
728 base = a
581 tb = ta
729 tb = ta
582 else:
730 else:
583 tb = chunks[self.base(b)]
731 tb = chunks[self.base(b)]
584 tb = construct(tb, base, b)
732 tb = construct(tb, base, b)
585 d = self.diff(ta, tb)
733 d = self.diff(ta, tb)
586 else:
734 else:
587 d = chunks[b]
735 d = chunks[b]
588
736
589 p = self.parents(n)
737 p = self.parents(n)
590 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)]
738 meta = n + p[0] + p[1] + lookup(n)
591 l = struct.pack(">l", len(meta) + len(d) + 4)
739 l = struct.pack(">l", len(meta) + len(d) + 4)
592 yield l
740 yield l
593 yield meta
741 yield meta
594 yield d
742 yield d
595
743
596 yield struct.pack(">l", 0)
744 yield struct.pack(">l", 0)
597
745
598 def addgroup(self, revs, linkmapper, transaction, unique=0):
746 def addgroup(self, revs, linkmapper, transaction, unique=0):
599 """
747 """
600 add a delta group
748 add a delta group
601
749
602 given a set of deltas, add them to the revision log. the
750 given a set of deltas, add them to the revision log. the
603 first delta is against its parent, which should be in our
751 first delta is against its parent, which should be in our
604 log, the rest are against the previous delta.
752 log, the rest are against the previous delta.
605 """
753 """
606
754
607 #track the base of the current delta log
755 #track the base of the current delta log
608 r = self.count()
756 r = self.count()
609 t = r - 1
757 t = r - 1
610 node = nullid
758 node = nullid
611
759
612 base = prev = -1
760 base = prev = -1
613 start = end = measure = 0
761 start = end = measure = 0
614 if r:
762 if r:
615 start = self.start(self.base(t))
763 start = self.start(self.base(t))
616 end = self.end(t)
764 end = self.end(t)
617 measure = self.length(self.base(t))
765 measure = self.length(self.base(t))
618 base = self.base(t)
766 base = self.base(t)
619 prev = self.tip()
767 prev = self.tip()
620
768
621 transaction.add(self.datafile, end)
769 transaction.add(self.datafile, end)
622 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
770 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
623 dfh = self.opener(self.datafile, "a")
771 dfh = self.opener(self.datafile, "a")
624 ifh = self.opener(self.indexfile, "a")
772 ifh = self.opener(self.indexfile, "a")
625
773
626 # loop through our set of deltas
774 # loop through our set of deltas
627 chain = None
775 chain = None
628 for chunk in revs:
776 for chunk in revs:
629 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
777 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
630 link = linkmapper(cs)
778 link = linkmapper(cs)
631 if node in self.nodemap:
779 if node in self.nodemap:
632 # this can happen if two branches make the same change
780 # this can happen if two branches make the same change
633 # if unique:
781 # if unique:
634 # raise RevlogError(_("already have %s") % hex(node[:4]))
782 # raise RevlogError(_("already have %s") % hex(node[:4]))
635 chain = node
783 chain = node
636 continue
784 continue
637 delta = chunk[80:]
785 delta = chunk[80:]
638
786
639 if not chain:
787 if not chain:
640 # retrieve the parent revision of the delta chain
788 # retrieve the parent revision of the delta chain
641 chain = p1
789 chain = p1
642 if not chain in self.nodemap:
790 if not chain in self.nodemap:
643 raise RevlogError(_("unknown base %s") % short(chain[:4]))
791 raise RevlogError(_("unknown base %s") % short(chain[:4]))
644
792
645 # full versions are inserted when the needed deltas become
793 # full versions are inserted when the needed deltas become
646 # comparable to the uncompressed text or when the previous
794 # comparable to the uncompressed text or when the previous
647 # version is not the one we have a delta against. We use
795 # version is not the one we have a delta against. We use
648 # the size of the previous full rev as a proxy for the
796 # the size of the previous full rev as a proxy for the
649 # current size.
797 # current size.
650
798
651 if chain == prev:
799 if chain == prev:
652 cdelta = compress(delta)
800 cdelta = compress(delta)
653
801
654 if chain != prev or (end - start + len(cdelta)) > measure * 2:
802 if chain != prev or (end - start + len(cdelta)) > measure * 2:
655 # flush our writes here so we can read it in revision
803 # flush our writes here so we can read it in revision
656 dfh.flush()
804 dfh.flush()
657 ifh.flush()
805 ifh.flush()
658 text = self.revision(chain)
806 text = self.revision(chain)
659 text = self.patches(text, [delta])
807 text = self.patches(text, [delta])
660 chk = self.addrevision(text, transaction, link, p1, p2)
808 chk = self.addrevision(text, transaction, link, p1, p2)
661 if chk != node:
809 if chk != node:
662 raise RevlogError(_("consistency error adding group"))
810 raise RevlogError(_("consistency error adding group"))
663 measure = len(text)
811 measure = len(text)
664 else:
812 else:
665 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
813 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
666 self.index.append(e)
814 self.index.append(e)
667 self.nodemap[node] = r
815 self.nodemap[node] = r
668 dfh.write(cdelta)
816 dfh.write(cdelta)
669 ifh.write(struct.pack(indexformat, *e))
817 ifh.write(struct.pack(indexformat, *e))
670
818
671 t, r, chain, prev = r, r + 1, node, node
819 t, r, chain, prev = r, r + 1, node, node
672 start = self.start(self.base(t))
820 start = self.start(self.base(t))
673 end = self.end(t)
821 end = self.end(t)
674
822
675 dfh.close()
823 dfh.close()
676 ifh.close()
824 ifh.close()
677 return node
825 return node
General Comments 0
You need to be logged in to leave comments. Login now