##// END OF EJS Templates
merge with mainline
Vadim Gelfer -
r1605:428e0a9c merge default
parent child Browse files
Show More
@@ -1,71 +1,55 b''
1 #!/bin/sh
1 #!/bin/sh
2 #
2 #
3 # This is an example of using HGEDITOR to automate the signing of
3 # This is an example of using HGEDITOR to create of diff to review the
4 # commits and so on.
4 # changes while commiting.
5
6 # change this to one to turn on GPG support
7 SIGN=0
8
5
9 # If you want to pass your favourite editor some other parameters
6 # If you want to pass your favourite editor some other parameters
10 # only for Mercurial, modify this:
7 # only for Mercurial, modify this:
11 case "${EDITOR}" in
8 case "${EDITOR}" in
12 "")
9 "")
13 EDITOR="vi"
10 EDITOR="vi"
14 ;;
11 ;;
15 emacs)
12 emacs)
16 EDITOR="$EDITOR -nw"
13 EDITOR="$EDITOR -nw"
17 ;;
14 ;;
18 gvim|vim)
15 gvim|vim)
19 EDITOR="$EDITOR -f -o"
16 EDITOR="$EDITOR -f -o"
20 ;;
17 ;;
21 esac
18 esac
22
19
23
20
24 HGTMP=""
21 HGTMP=""
25 cleanup_exit() {
22 cleanup_exit() {
26 rm -rf "$HGTMP"
23 rm -rf "$HGTMP"
27 }
24 }
28
25
29 # Remove temporary files even if we get interrupted
26 # Remove temporary files even if we get interrupted
30 trap "cleanup_exit" 0 # normal exit
27 trap "cleanup_exit" 0 # normal exit
31 trap "exit 255" 1 2 3 6 15 # HUP INT QUIT ABRT TERM
28 trap "exit 255" 1 2 3 6 15 # HUP INT QUIT ABRT TERM
32
29
33 HGTMP="${TMPDIR-/tmp}/hgeditor.$RANDOM.$RANDOM.$RANDOM.$$"
30 HGTMP="${TMPDIR-/tmp}/hgeditor.$RANDOM.$RANDOM.$RANDOM.$$"
34 (umask 077 && mkdir "$HGTMP") || {
31 (umask 077 && mkdir "$HGTMP") || {
35 echo "Could not create temporary directory! Exiting." 1>&2
32 echo "Could not create temporary directory! Exiting." 1>&2
36 exit 1
33 exit 1
37 }
34 }
38
35
39 (
36 (
40 cd "`hg root`"
37 cd "`hg root`"
41 grep '^HG: changed' "$1" | cut -b 13- | while read changed; do
38 grep '^HG: changed' "$1" | cut -b 13- | while read changed; do
42 hg diff "$changed" >> "$HGTMP/diff"
39 hg diff "$changed" >> "$HGTMP/diff"
43 done
40 done
44 )
41 )
45
42
46 echo > "$HGTMP/msg"
43 cat "$1" > "$HGTMP/msg"
47 if [ "$SIGN" == "1" ]; then
48 MANIFEST=`grep '^HG: manifest hash' "$1" | cut -b 19-`
49 echo -e "\nmanifest hash: $MANIFEST" >> "$HGTMP/msg"
50 fi
51 grep -vE '^(HG: manifest hash .*)?$' "$1" >> "$HGTMP/msg"
52
44
53 CHECKSUM=`md5sum "$HGTMP/msg"`
45 CHECKSUM=`md5sum "$HGTMP/msg"`
54 if [ -s "$HGTMP/diff" ]; then
46 if [ -s "$HGTMP/diff" ]; then
55 $EDITOR "$HGTMP/msg" "$HGTMP/diff" || exit $?
47 $EDITOR "$HGTMP/msg" "$HGTMP/diff" || exit $?
56 else
48 else
57 $EDITOR "$HGTMP/msg" || exit $?
49 $EDITOR "$HGTMP/msg" || exit $?
58 fi
50 fi
59 echo "$CHECKSUM" | md5sum -c >/dev/null 2>&1 && exit 13
51 echo "$CHECKSUM" | md5sum -c >/dev/null 2>&1 && exit 13
60
52
61 if [ "$SIGN" == "1" ]; then
53 mv "$HGTMP/msg" "$1"
62 {
63 head -n 1 "$HGTMP/msg"
64 echo
65 grep -v "^HG:" "$HGTMP/msg" | gpg -t -a -u "${HGUSER}" --clearsign
66 } > "$HGTMP/msg.gpg" && mv "$HGTMP/msg.gpg" "$1"
67 else
68 mv "$HGTMP/msg" "$1"
69 fi
70
54
71 exit $?
55 exit $?
@@ -1,2704 +1,2738 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
12 demandload(globals(), "fancyopts ui hg util lock revlog")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
13 demandload(globals(), "fnmatch hgweb mdiff random signal time traceback")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
14 demandload(globals(), "errno socket version struct atexit sets bz2")
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20
20
21 def filterfiles(filters, files):
21 def filterfiles(filters, files):
22 l = [x for x in files if x in filters]
22 l = [x for x in files if x in filters]
23
23
24 for t in filters:
24 for t in filters:
25 if t and t[-1] != "/":
25 if t and t[-1] != "/":
26 t += "/"
26 t += "/"
27 l += [x for x in files if x.startswith(t)]
27 l += [x for x in files if x.startswith(t)]
28 return l
28 return l
29
29
30 def relpath(repo, args):
30 def relpath(repo, args):
31 cwd = repo.getcwd()
31 cwd = repo.getcwd()
32 if cwd:
32 if cwd:
33 return [util.normpath(os.path.join(cwd, x)) for x in args]
33 return [util.normpath(os.path.join(cwd, x)) for x in args]
34 return args
34 return args
35
35
36 def matchpats(repo, pats=[], opts={}, head=''):
36 def matchpats(repo, pats=[], opts={}, head=''):
37 cwd = repo.getcwd()
37 cwd = repo.getcwd()
38 if not pats and cwd:
38 if not pats and cwd:
39 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
39 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
40 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
40 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
41 cwd = ''
41 cwd = ''
42 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
42 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
43 opts.get('exclude'), head) + (cwd,)
43 opts.get('exclude'), head) + (cwd,)
44
44
45 def makewalk(repo, pats, opts, node=None, head=''):
45 def makewalk(repo, pats, opts, node=None, head=''):
46 files, matchfn, anypats, cwd = matchpats(repo, pats, opts, head)
46 files, matchfn, anypats, cwd = matchpats(repo, pats, opts, head)
47 exact = dict(zip(files, files))
47 exact = dict(zip(files, files))
48 def walk():
48 def walk():
49 for src, fn in repo.walk(node=node, files=files, match=matchfn):
49 for src, fn in repo.walk(node=node, files=files, match=matchfn):
50 yield src, fn, util.pathto(cwd, fn), fn in exact
50 yield src, fn, util.pathto(cwd, fn), fn in exact
51 return files, matchfn, walk()
51 return files, matchfn, walk()
52
52
53 def walk(repo, pats, opts, node=None, head=''):
53 def walk(repo, pats, opts, node=None, head=''):
54 files, matchfn, results = makewalk(repo, pats, opts, node, head)
54 files, matchfn, results = makewalk(repo, pats, opts, node, head)
55 for r in results:
55 for r in results:
56 yield r
56 yield r
57
57
58 def walkchangerevs(ui, repo, pats, opts):
58 def walkchangerevs(ui, repo, pats, opts):
59 '''Iterate over files and the revs they changed in.
59 '''Iterate over files and the revs they changed in.
60
60
61 Callers most commonly need to iterate backwards over the history
61 Callers most commonly need to iterate backwards over the history
62 it is interested in. Doing so has awful (quadratic-looking)
62 it is interested in. Doing so has awful (quadratic-looking)
63 performance, so we use iterators in a "windowed" way.
63 performance, so we use iterators in a "windowed" way.
64
64
65 We walk a window of revisions in the desired order. Within the
65 We walk a window of revisions in the desired order. Within the
66 window, we first walk forwards to gather data, then in the desired
66 window, we first walk forwards to gather data, then in the desired
67 order (usually backwards) to display it.
67 order (usually backwards) to display it.
68
68
69 This function returns an (iterator, getchange, matchfn) tuple. The
69 This function returns an (iterator, getchange, matchfn) tuple. The
70 getchange function returns the changelog entry for a numeric
70 getchange function returns the changelog entry for a numeric
71 revision. The iterator yields 3-tuples. They will be of one of
71 revision. The iterator yields 3-tuples. They will be of one of
72 the following forms:
72 the following forms:
73
73
74 "window", incrementing, lastrev: stepping through a window,
74 "window", incrementing, lastrev: stepping through a window,
75 positive if walking forwards through revs, last rev in the
75 positive if walking forwards through revs, last rev in the
76 sequence iterated over - use to reset state for the current window
76 sequence iterated over - use to reset state for the current window
77
77
78 "add", rev, fns: out-of-order traversal of the given file names
78 "add", rev, fns: out-of-order traversal of the given file names
79 fns, which changed during revision rev - use to gather data for
79 fns, which changed during revision rev - use to gather data for
80 possible display
80 possible display
81
81
82 "iter", rev, None: in-order traversal of the revs earlier iterated
82 "iter", rev, None: in-order traversal of the revs earlier iterated
83 over with "add" - use to display data'''
83 over with "add" - use to display data'''
84
84
85 files, matchfn, anypats, cwd = matchpats(repo, pats, opts)
85 files, matchfn, anypats, cwd = matchpats(repo, pats, opts)
86
86
87 if repo.changelog.count() == 0:
87 if repo.changelog.count() == 0:
88 return [], False, matchfn
88 return [], False, matchfn
89
89
90 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
90 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
91 wanted = {}
91 wanted = {}
92 slowpath = anypats
92 slowpath = anypats
93 window = 300
93 window = 300
94 fncache = {}
94 fncache = {}
95
95
96 chcache = {}
96 chcache = {}
97 def getchange(rev):
97 def getchange(rev):
98 ch = chcache.get(rev)
98 ch = chcache.get(rev)
99 if ch is None:
99 if ch is None:
100 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
100 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
101 return ch
101 return ch
102
102
103 if not slowpath and not files:
103 if not slowpath and not files:
104 # No files, no patterns. Display all revs.
104 # No files, no patterns. Display all revs.
105 wanted = dict(zip(revs, revs))
105 wanted = dict(zip(revs, revs))
106 if not slowpath:
106 if not slowpath:
107 # Only files, no patterns. Check the history of each file.
107 # Only files, no patterns. Check the history of each file.
108 def filerevgen(filelog):
108 def filerevgen(filelog):
109 for i in xrange(filelog.count() - 1, -1, -window):
109 for i in xrange(filelog.count() - 1, -1, -window):
110 revs = []
110 revs = []
111 for j in xrange(max(0, i - window), i + 1):
111 for j in xrange(max(0, i - window), i + 1):
112 revs.append(filelog.linkrev(filelog.node(j)))
112 revs.append(filelog.linkrev(filelog.node(j)))
113 revs.reverse()
113 revs.reverse()
114 for rev in revs:
114 for rev in revs:
115 yield rev
115 yield rev
116
116
117 minrev, maxrev = min(revs), max(revs)
117 minrev, maxrev = min(revs), max(revs)
118 for file in files:
118 for file in files:
119 filelog = repo.file(file)
119 filelog = repo.file(file)
120 # A zero count may be a directory or deleted file, so
120 # A zero count may be a directory or deleted file, so
121 # try to find matching entries on the slow path.
121 # try to find matching entries on the slow path.
122 if filelog.count() == 0:
122 if filelog.count() == 0:
123 slowpath = True
123 slowpath = True
124 break
124 break
125 for rev in filerevgen(filelog):
125 for rev in filerevgen(filelog):
126 if rev <= maxrev:
126 if rev <= maxrev:
127 if rev < minrev:
127 if rev < minrev:
128 break
128 break
129 fncache.setdefault(rev, [])
129 fncache.setdefault(rev, [])
130 fncache[rev].append(file)
130 fncache[rev].append(file)
131 wanted[rev] = 1
131 wanted[rev] = 1
132 if slowpath:
132 if slowpath:
133 # The slow path checks files modified in every changeset.
133 # The slow path checks files modified in every changeset.
134 def changerevgen():
134 def changerevgen():
135 for i in xrange(repo.changelog.count() - 1, -1, -window):
135 for i in xrange(repo.changelog.count() - 1, -1, -window):
136 for j in xrange(max(0, i - window), i + 1):
136 for j in xrange(max(0, i - window), i + 1):
137 yield j, getchange(j)[3]
137 yield j, getchange(j)[3]
138
138
139 for rev, changefiles in changerevgen():
139 for rev, changefiles in changerevgen():
140 matches = filter(matchfn, changefiles)
140 matches = filter(matchfn, changefiles)
141 if matches:
141 if matches:
142 fncache[rev] = matches
142 fncache[rev] = matches
143 wanted[rev] = 1
143 wanted[rev] = 1
144
144
145 def iterate():
145 def iterate():
146 for i in xrange(0, len(revs), window):
146 for i in xrange(0, len(revs), window):
147 yield 'window', revs[0] < revs[-1], revs[-1]
147 yield 'window', revs[0] < revs[-1], revs[-1]
148 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
148 nrevs = [rev for rev in revs[i:min(i+window, len(revs))]
149 if rev in wanted]
149 if rev in wanted]
150 srevs = list(nrevs)
150 srevs = list(nrevs)
151 srevs.sort()
151 srevs.sort()
152 for rev in srevs:
152 for rev in srevs:
153 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
153 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
154 yield 'add', rev, fns
154 yield 'add', rev, fns
155 for rev in nrevs:
155 for rev in nrevs:
156 yield 'iter', rev, None
156 yield 'iter', rev, None
157 return iterate(), getchange, matchfn
157 return iterate(), getchange, matchfn
158
158
159 revrangesep = ':'
159 revrangesep = ':'
160
160
161 def revrange(ui, repo, revs, revlog=None):
161 def revrange(ui, repo, revs, revlog=None):
162 """Yield revision as strings from a list of revision specifications."""
162 """Yield revision as strings from a list of revision specifications."""
163 if revlog is None:
163 if revlog is None:
164 revlog = repo.changelog
164 revlog = repo.changelog
165 revcount = revlog.count()
165 revcount = revlog.count()
166 def fix(val, defval):
166 def fix(val, defval):
167 if not val:
167 if not val:
168 return defval
168 return defval
169 try:
169 try:
170 num = int(val)
170 num = int(val)
171 if str(num) != val:
171 if str(num) != val:
172 raise ValueError
172 raise ValueError
173 if num < 0: num += revcount
173 if num < 0: num += revcount
174 if num < 0: num = 0
174 if num < 0: num = 0
175 elif num >= revcount:
175 elif num >= revcount:
176 raise ValueError
176 raise ValueError
177 except ValueError:
177 except ValueError:
178 try:
178 try:
179 num = repo.changelog.rev(repo.lookup(val))
179 num = repo.changelog.rev(repo.lookup(val))
180 except KeyError:
180 except KeyError:
181 try:
181 try:
182 num = revlog.rev(revlog.lookup(val))
182 num = revlog.rev(revlog.lookup(val))
183 except KeyError:
183 except KeyError:
184 raise util.Abort(_('invalid revision identifier %s'), val)
184 raise util.Abort(_('invalid revision identifier %s'), val)
185 return num
185 return num
186 seen = {}
186 seen = {}
187 for spec in revs:
187 for spec in revs:
188 if spec.find(revrangesep) >= 0:
188 if spec.find(revrangesep) >= 0:
189 start, end = spec.split(revrangesep, 1)
189 start, end = spec.split(revrangesep, 1)
190 start = fix(start, 0)
190 start = fix(start, 0)
191 end = fix(end, revcount - 1)
191 end = fix(end, revcount - 1)
192 step = start > end and -1 or 1
192 step = start > end and -1 or 1
193 for rev in xrange(start, end+step, step):
193 for rev in xrange(start, end+step, step):
194 if rev in seen: continue
194 if rev in seen: continue
195 seen[rev] = 1
195 seen[rev] = 1
196 yield str(rev)
196 yield str(rev)
197 else:
197 else:
198 rev = fix(spec, None)
198 rev = fix(spec, None)
199 if rev in seen: continue
199 if rev in seen: continue
200 seen[rev] = 1
200 seen[rev] = 1
201 yield str(rev)
201 yield str(rev)
202
202
203 def make_filename(repo, r, pat, node=None,
203 def make_filename(repo, r, pat, node=None,
204 total=None, seqno=None, revwidth=None, pathname=None):
204 total=None, seqno=None, revwidth=None, pathname=None):
205 node_expander = {
205 node_expander = {
206 'H': lambda: hex(node),
206 'H': lambda: hex(node),
207 'R': lambda: str(r.rev(node)),
207 'R': lambda: str(r.rev(node)),
208 'h': lambda: short(node),
208 'h': lambda: short(node),
209 }
209 }
210 expander = {
210 expander = {
211 '%': lambda: '%',
211 '%': lambda: '%',
212 'b': lambda: os.path.basename(repo.root),
212 'b': lambda: os.path.basename(repo.root),
213 }
213 }
214
214
215 try:
215 try:
216 if node:
216 if node:
217 expander.update(node_expander)
217 expander.update(node_expander)
218 if node and revwidth is not None:
218 if node and revwidth is not None:
219 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
219 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
220 if total is not None:
220 if total is not None:
221 expander['N'] = lambda: str(total)
221 expander['N'] = lambda: str(total)
222 if seqno is not None:
222 if seqno is not None:
223 expander['n'] = lambda: str(seqno)
223 expander['n'] = lambda: str(seqno)
224 if total is not None and seqno is not None:
224 if total is not None and seqno is not None:
225 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
225 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
226 if pathname is not None:
226 if pathname is not None:
227 expander['s'] = lambda: os.path.basename(pathname)
227 expander['s'] = lambda: os.path.basename(pathname)
228 expander['d'] = lambda: os.path.dirname(pathname) or '.'
228 expander['d'] = lambda: os.path.dirname(pathname) or '.'
229 expander['p'] = lambda: pathname
229 expander['p'] = lambda: pathname
230
230
231 newname = []
231 newname = []
232 patlen = len(pat)
232 patlen = len(pat)
233 i = 0
233 i = 0
234 while i < patlen:
234 while i < patlen:
235 c = pat[i]
235 c = pat[i]
236 if c == '%':
236 if c == '%':
237 i += 1
237 i += 1
238 c = pat[i]
238 c = pat[i]
239 c = expander[c]()
239 c = expander[c]()
240 newname.append(c)
240 newname.append(c)
241 i += 1
241 i += 1
242 return ''.join(newname)
242 return ''.join(newname)
243 except KeyError, inst:
243 except KeyError, inst:
244 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
244 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
245 inst.args[0])
245 inst.args[0])
246
246
247 def make_file(repo, r, pat, node=None,
247 def make_file(repo, r, pat, node=None,
248 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
248 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
249 if not pat or pat == '-':
249 if not pat or pat == '-':
250 return 'w' in mode and sys.stdout or sys.stdin
250 return 'w' in mode and sys.stdout or sys.stdin
251 if hasattr(pat, 'write') and 'w' in mode:
251 if hasattr(pat, 'write') and 'w' in mode:
252 return pat
252 return pat
253 if hasattr(pat, 'read') and 'r' in mode:
253 if hasattr(pat, 'read') and 'r' in mode:
254 return pat
254 return pat
255 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
255 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
256 pathname),
256 pathname),
257 mode)
257 mode)
258
258
259 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
259 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
260 changes=None, text=False):
260 changes=None, text=False):
261 if not changes:
261 if not changes:
262 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
262 (c, a, d, u) = repo.changes(node1, node2, files, match=match)
263 else:
263 else:
264 (c, a, d, u) = changes
264 (c, a, d, u) = changes
265 if files:
265 if files:
266 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
266 c, a, d = map(lambda x: filterfiles(files, x), (c, a, d))
267
267
268 if not c and not a and not d:
268 if not c and not a and not d:
269 return
269 return
270
270
271 if node2:
271 if node2:
272 change = repo.changelog.read(node2)
272 change = repo.changelog.read(node2)
273 mmap2 = repo.manifest.read(change[0])
273 mmap2 = repo.manifest.read(change[0])
274 date2 = util.datestr(change[2])
274 date2 = util.datestr(change[2])
275 def read(f):
275 def read(f):
276 return repo.file(f).read(mmap2[f])
276 return repo.file(f).read(mmap2[f])
277 else:
277 else:
278 date2 = util.datestr()
278 date2 = util.datestr()
279 if not node1:
279 if not node1:
280 node1 = repo.dirstate.parents()[0]
280 node1 = repo.dirstate.parents()[0]
281 def read(f):
281 def read(f):
282 return repo.wfile(f).read()
282 return repo.wfile(f).read()
283
283
284 if ui.quiet:
284 if ui.quiet:
285 r = None
285 r = None
286 else:
286 else:
287 hexfunc = ui.verbose and hex or short
287 hexfunc = ui.verbose and hex or short
288 r = [hexfunc(node) for node in [node1, node2] if node]
288 r = [hexfunc(node) for node in [node1, node2] if node]
289
289
290 change = repo.changelog.read(node1)
290 change = repo.changelog.read(node1)
291 mmap = repo.manifest.read(change[0])
291 mmap = repo.manifest.read(change[0])
292 date1 = util.datestr(change[2])
292 date1 = util.datestr(change[2])
293
293
294 for f in c:
294 for f in c:
295 to = None
295 to = None
296 if f in mmap:
296 if f in mmap:
297 to = repo.file(f).read(mmap[f])
297 to = repo.file(f).read(mmap[f])
298 tn = read(f)
298 tn = read(f)
299 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
299 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
300 for f in a:
300 for f in a:
301 to = None
301 to = None
302 tn = read(f)
302 tn = read(f)
303 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
303 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
304 for f in d:
304 for f in d:
305 to = repo.file(f).read(mmap[f])
305 to = repo.file(f).read(mmap[f])
306 tn = None
306 tn = None
307 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
307 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text))
308
308
309 def trimuser(ui, name, rev, revcache):
309 def trimuser(ui, name, rev, revcache):
310 """trim the name of the user who committed a change"""
310 """trim the name of the user who committed a change"""
311 user = revcache.get(rev)
311 user = revcache.get(rev)
312 if user is None:
312 if user is None:
313 user = revcache[rev] = ui.shortuser(name)
313 user = revcache[rev] = ui.shortuser(name)
314 return user
314 return user
315
315
316 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
316 def show_changeset(ui, repo, rev=0, changenode=None, brinfo=None):
317 """show a single changeset or file revision"""
317 """show a single changeset or file revision"""
318 log = repo.changelog
318 log = repo.changelog
319 if changenode is None:
319 if changenode is None:
320 changenode = log.node(rev)
320 changenode = log.node(rev)
321 elif not rev:
321 elif not rev:
322 rev = log.rev(changenode)
322 rev = log.rev(changenode)
323
323
324 if ui.quiet:
324 if ui.quiet:
325 ui.write("%d:%s\n" % (rev, short(changenode)))
325 ui.write("%d:%s\n" % (rev, short(changenode)))
326 return
326 return
327
327
328 changes = log.read(changenode)
328 changes = log.read(changenode)
329 date = util.datestr(changes[2])
329 date = util.datestr(changes[2])
330
330
331 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
331 parents = [(log.rev(p), ui.verbose and hex(p) or short(p))
332 for p in log.parents(changenode)
332 for p in log.parents(changenode)
333 if ui.debugflag or p != nullid]
333 if ui.debugflag or p != nullid]
334 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
334 if not ui.debugflag and len(parents) == 1 and parents[0][0] == rev-1:
335 parents = []
335 parents = []
336
336
337 if ui.verbose:
337 if ui.verbose:
338 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
338 ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
339 else:
339 else:
340 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
340 ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
341
341
342 for tag in repo.nodetags(changenode):
342 for tag in repo.nodetags(changenode):
343 ui.status(_("tag: %s\n") % tag)
343 ui.status(_("tag: %s\n") % tag)
344 for parent in parents:
344 for parent in parents:
345 ui.write(_("parent: %d:%s\n") % parent)
345 ui.write(_("parent: %d:%s\n") % parent)
346
346
347 if brinfo and changenode in brinfo:
347 if brinfo and changenode in brinfo:
348 br = brinfo[changenode]
348 br = brinfo[changenode]
349 ui.write(_("branch: %s\n") % " ".join(br))
349 ui.write(_("branch: %s\n") % " ".join(br))
350
350
351 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
351 ui.debug(_("manifest: %d:%s\n") % (repo.manifest.rev(changes[0]),
352 hex(changes[0])))
352 hex(changes[0])))
353 ui.status(_("user: %s\n") % changes[1])
353 ui.status(_("user: %s\n") % changes[1])
354 ui.status(_("date: %s\n") % date)
354 ui.status(_("date: %s\n") % date)
355
355
356 if ui.debugflag:
356 if ui.debugflag:
357 files = repo.changes(log.parents(changenode)[0], changenode)
357 files = repo.changes(log.parents(changenode)[0], changenode)
358 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
358 for key, value in zip([_("files:"), _("files+:"), _("files-:")], files):
359 if value:
359 if value:
360 ui.note("%-12s %s\n" % (key, " ".join(value)))
360 ui.note("%-12s %s\n" % (key, " ".join(value)))
361 else:
361 else:
362 ui.note(_("files: %s\n") % " ".join(changes[3]))
362 ui.note(_("files: %s\n") % " ".join(changes[3]))
363
363
364 description = changes[4].strip()
364 description = changes[4].strip()
365 if description:
365 if description:
366 if ui.verbose:
366 if ui.verbose:
367 ui.status(_("description:\n"))
367 ui.status(_("description:\n"))
368 ui.status(description)
368 ui.status(description)
369 ui.status("\n\n")
369 ui.status("\n\n")
370 else:
370 else:
371 ui.status(_("summary: %s\n") % description.splitlines()[0])
371 ui.status(_("summary: %s\n") % description.splitlines()[0])
372 ui.status("\n")
372 ui.status("\n")
373
373
374 def show_version(ui):
374 def show_version(ui):
375 """output version and copyright information"""
375 """output version and copyright information"""
376 ui.write(_("Mercurial Distributed SCM (version %s)\n")
376 ui.write(_("Mercurial Distributed SCM (version %s)\n")
377 % version.get_version())
377 % version.get_version())
378 ui.status(_(
378 ui.status(_(
379 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
379 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
380 "This is free software; see the source for copying conditions. "
380 "This is free software; see the source for copying conditions. "
381 "There is NO\nwarranty; "
381 "There is NO\nwarranty; "
382 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
382 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
383 ))
383 ))
384
384
385 def help_(ui, cmd=None, with_version=False):
385 def help_(ui, cmd=None, with_version=False):
386 """show help for a given command or all commands"""
386 """show help for a given command or all commands"""
387 option_lists = []
387 option_lists = []
388 if cmd and cmd != 'shortlist':
388 if cmd and cmd != 'shortlist':
389 if with_version:
389 if with_version:
390 show_version(ui)
390 show_version(ui)
391 ui.write('\n')
391 ui.write('\n')
392 aliases, i = find(cmd)
392 aliases, i = find(cmd)
393 # synopsis
393 # synopsis
394 ui.write("%s\n\n" % i[2])
394 ui.write("%s\n\n" % i[2])
395
395
396 # description
396 # description
397 doc = i[0].__doc__
397 doc = i[0].__doc__
398 if ui.quiet:
398 if ui.quiet:
399 doc = doc.splitlines(0)[0]
399 doc = doc.splitlines(0)[0]
400 ui.write("%s\n" % doc.rstrip())
400 ui.write("%s\n" % doc.rstrip())
401
401
402 if not ui.quiet:
402 if not ui.quiet:
403 # aliases
403 # aliases
404 if len(aliases) > 1:
404 if len(aliases) > 1:
405 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
405 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
406
406
407 # options
407 # options
408 if i[1]:
408 if i[1]:
409 option_lists.append(("options", i[1]))
409 option_lists.append(("options", i[1]))
410
410
411 else:
411 else:
412 # program name
412 # program name
413 if ui.verbose or with_version:
413 if ui.verbose or with_version:
414 show_version(ui)
414 show_version(ui)
415 else:
415 else:
416 ui.status(_("Mercurial Distributed SCM\n"))
416 ui.status(_("Mercurial Distributed SCM\n"))
417 ui.status('\n')
417 ui.status('\n')
418
418
419 # list of commands
419 # list of commands
420 if cmd == "shortlist":
420 if cmd == "shortlist":
421 ui.status(_('basic commands (use "hg help" '
421 ui.status(_('basic commands (use "hg help" '
422 'for the full list or option "-v" for details):\n\n'))
422 'for the full list or option "-v" for details):\n\n'))
423 elif ui.verbose:
423 elif ui.verbose:
424 ui.status(_('list of commands:\n\n'))
424 ui.status(_('list of commands:\n\n'))
425 else:
425 else:
426 ui.status(_('list of commands (use "hg help -v" '
426 ui.status(_('list of commands (use "hg help -v" '
427 'to show aliases and global options):\n\n'))
427 'to show aliases and global options):\n\n'))
428
428
429 h = {}
429 h = {}
430 cmds = {}
430 cmds = {}
431 for c, e in table.items():
431 for c, e in table.items():
432 f = c.split("|")[0]
432 f = c.split("|")[0]
433 if cmd == "shortlist" and not f.startswith("^"):
433 if cmd == "shortlist" and not f.startswith("^"):
434 continue
434 continue
435 f = f.lstrip("^")
435 f = f.lstrip("^")
436 if not ui.debugflag and f.startswith("debug"):
436 if not ui.debugflag and f.startswith("debug"):
437 continue
437 continue
438 d = ""
438 d = ""
439 if e[0].__doc__:
439 if e[0].__doc__:
440 d = e[0].__doc__.splitlines(0)[0].rstrip()
440 d = e[0].__doc__.splitlines(0)[0].rstrip()
441 h[f] = d
441 h[f] = d
442 cmds[f]=c.lstrip("^")
442 cmds[f]=c.lstrip("^")
443
443
444 fns = h.keys()
444 fns = h.keys()
445 fns.sort()
445 fns.sort()
446 m = max(map(len, fns))
446 m = max(map(len, fns))
447 for f in fns:
447 for f in fns:
448 if ui.verbose:
448 if ui.verbose:
449 commands = cmds[f].replace("|",", ")
449 commands = cmds[f].replace("|",", ")
450 ui.write(" %s:\n %s\n"%(commands,h[f]))
450 ui.write(" %s:\n %s\n"%(commands,h[f]))
451 else:
451 else:
452 ui.write(' %-*s %s\n' % (m, f, h[f]))
452 ui.write(' %-*s %s\n' % (m, f, h[f]))
453
453
454 # global options
454 # global options
455 if ui.verbose:
455 if ui.verbose:
456 option_lists.append(("global options", globalopts))
456 option_lists.append(("global options", globalopts))
457
457
458 # list all option lists
458 # list all option lists
459 opt_output = []
459 opt_output = []
460 for title, options in option_lists:
460 for title, options in option_lists:
461 opt_output.append(("\n%s:\n" % title, None))
461 opt_output.append(("\n%s:\n" % title, None))
462 for shortopt, longopt, default, desc in options:
462 for shortopt, longopt, default, desc in options:
463 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
463 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
464 longopt and " --%s" % longopt),
464 longopt and " --%s" % longopt),
465 "%s%s" % (desc,
465 "%s%s" % (desc,
466 default and _(" (default: %s)") % default
466 default
467 and _(" (default: %s)") % default
467 or "")))
468 or "")))
468
469
469 if opt_output:
470 if opt_output:
470 opts_len = max([len(line[0]) for line in opt_output if line[1]])
471 opts_len = max([len(line[0]) for line in opt_output if line[1]])
471 for first, second in opt_output:
472 for first, second in opt_output:
472 if second:
473 if second:
473 ui.write(" %-*s %s\n" % (opts_len, first, second))
474 ui.write(" %-*s %s\n" % (opts_len, first, second))
474 else:
475 else:
475 ui.write("%s\n" % first)
476 ui.write("%s\n" % first)
476
477
477 # Commands start here, listed alphabetically
478 # Commands start here, listed alphabetically
478
479
479 def add(ui, repo, *pats, **opts):
480 def add(ui, repo, *pats, **opts):
480 """add the specified files on the next commit
481 """add the specified files on the next commit
481
482
482 Schedule files to be version controlled and added to the repository.
483 Schedule files to be version controlled and added to the repository.
483
484
484 The files will be added to the repository at the next commit.
485 The files will be added to the repository at the next commit.
485
486
486 If no names are given, add all files in the repository.
487 If no names are given, add all files in the repository.
487 """
488 """
488
489
489 names = []
490 names = []
490 for src, abs, rel, exact in walk(repo, pats, opts):
491 for src, abs, rel, exact in walk(repo, pats, opts):
491 if exact:
492 if exact:
492 if ui.verbose: ui.status(_('adding %s\n') % rel)
493 if ui.verbose: ui.status(_('adding %s\n') % rel)
493 names.append(abs)
494 names.append(abs)
494 elif repo.dirstate.state(abs) == '?':
495 elif repo.dirstate.state(abs) == '?':
495 ui.status(_('adding %s\n') % rel)
496 ui.status(_('adding %s\n') % rel)
496 names.append(abs)
497 names.append(abs)
497 repo.add(names)
498 repo.add(names)
498
499
499 def addremove(ui, repo, *pats, **opts):
500 def addremove(ui, repo, *pats, **opts):
500 """add all new files, delete all missing files
501 """add all new files, delete all missing files
501
502
502 Add all new files and remove all missing files from the repository.
503 Add all new files and remove all missing files from the repository.
503
504
504 New files are ignored if they match any of the patterns in .hgignore. As
505 New files are ignored if they match any of the patterns in .hgignore. As
505 with add, these changes take effect at the next commit.
506 with add, these changes take effect at the next commit.
506 """
507 """
507 add, remove = [], []
508 add, remove = [], []
508 for src, abs, rel, exact in walk(repo, pats, opts):
509 for src, abs, rel, exact in walk(repo, pats, opts):
509 if src == 'f' and repo.dirstate.state(abs) == '?':
510 if src == 'f' and repo.dirstate.state(abs) == '?':
510 add.append(abs)
511 add.append(abs)
511 if ui.verbose or not exact:
512 if ui.verbose or not exact:
512 ui.status(_('adding %s\n') % rel)
513 ui.status(_('adding %s\n') % rel)
513 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
514 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
514 remove.append(abs)
515 remove.append(abs)
515 if ui.verbose or not exact:
516 if ui.verbose or not exact:
516 ui.status(_('removing %s\n') % rel)
517 ui.status(_('removing %s\n') % rel)
517 repo.add(add)
518 repo.add(add)
518 repo.remove(remove)
519 repo.remove(remove)
519
520
520 def annotate(ui, repo, *pats, **opts):
521 def annotate(ui, repo, *pats, **opts):
521 """show changeset information per file line
522 """show changeset information per file line
522
523
523 List changes in files, showing the revision id responsible for each line
524 List changes in files, showing the revision id responsible for each line
524
525
525 This command is useful to discover who did a change or when a change took
526 This command is useful to discover who did a change or when a change took
526 place.
527 place.
527
528
528 Without the -a option, annotate will avoid processing files it
529 Without the -a option, annotate will avoid processing files it
529 detects as binary. With -a, annotate will generate an annotation
530 detects as binary. With -a, annotate will generate an annotation
530 anyway, probably with undesirable results.
531 anyway, probably with undesirable results.
531 """
532 """
532 def getnode(rev):
533 def getnode(rev):
533 return short(repo.changelog.node(rev))
534 return short(repo.changelog.node(rev))
534
535
535 ucache = {}
536 ucache = {}
536 def getname(rev):
537 def getname(rev):
537 cl = repo.changelog.read(repo.changelog.node(rev))
538 cl = repo.changelog.read(repo.changelog.node(rev))
538 return trimuser(ui, cl[1], rev, ucache)
539 return trimuser(ui, cl[1], rev, ucache)
539
540
540 dcache = {}
541 dcache = {}
541 def getdate(rev):
542 def getdate(rev):
542 datestr = dcache.get(rev)
543 datestr = dcache.get(rev)
543 if datestr is None:
544 if datestr is None:
544 cl = repo.changelog.read(repo.changelog.node(rev))
545 cl = repo.changelog.read(repo.changelog.node(rev))
545 datestr = dcache[rev] = util.datestr(cl[2])
546 datestr = dcache[rev] = util.datestr(cl[2])
546 return datestr
547 return datestr
547
548
548 if not pats:
549 if not pats:
549 raise util.Abort(_('at least one file name or pattern required'))
550 raise util.Abort(_('at least one file name or pattern required'))
550
551
551 opmap = [['user', getname], ['number', str], ['changeset', getnode],
552 opmap = [['user', getname], ['number', str], ['changeset', getnode],
552 ['date', getdate]]
553 ['date', getdate]]
553 if not opts['user'] and not opts['changeset'] and not opts['date']:
554 if not opts['user'] and not opts['changeset'] and not opts['date']:
554 opts['number'] = 1
555 opts['number'] = 1
555
556
556 if opts['rev']:
557 if opts['rev']:
557 node = repo.changelog.lookup(opts['rev'])
558 node = repo.changelog.lookup(opts['rev'])
558 else:
559 else:
559 node = repo.dirstate.parents()[0]
560 node = repo.dirstate.parents()[0]
560 change = repo.changelog.read(node)
561 change = repo.changelog.read(node)
561 mmap = repo.manifest.read(change[0])
562 mmap = repo.manifest.read(change[0])
562
563
563 for src, abs, rel, exact in walk(repo, pats, opts):
564 for src, abs, rel, exact in walk(repo, pats, opts):
564 if abs not in mmap:
565 if abs not in mmap:
565 ui.warn(_("warning: %s is not in the repository!\n") % rel)
566 ui.warn(_("warning: %s is not in the repository!\n") % rel)
566 continue
567 continue
567
568
568 f = repo.file(abs)
569 f = repo.file(abs)
569 if not opts['text'] and util.binary(f.read(mmap[abs])):
570 if not opts['text'] and util.binary(f.read(mmap[abs])):
570 ui.write(_("%s: binary file\n") % rel)
571 ui.write(_("%s: binary file\n") % rel)
571 continue
572 continue
572
573
573 lines = f.annotate(mmap[abs])
574 lines = f.annotate(mmap[abs])
574 pieces = []
575 pieces = []
575
576
576 for o, f in opmap:
577 for o, f in opmap:
577 if opts[o]:
578 if opts[o]:
578 l = [f(n) for n, dummy in lines]
579 l = [f(n) for n, dummy in lines]
579 if l:
580 if l:
580 m = max(map(len, l))
581 m = max(map(len, l))
581 pieces.append(["%*s" % (m, x) for x in l])
582 pieces.append(["%*s" % (m, x) for x in l])
582
583
583 if pieces:
584 if pieces:
584 for p, l in zip(zip(*pieces), lines):
585 for p, l in zip(zip(*pieces), lines):
585 ui.write("%s: %s" % (" ".join(p), l[1]))
586 ui.write("%s: %s" % (" ".join(p), l[1]))
586
587
587 def bundle(ui, repo, fname, dest="default-push", **opts):
588 def bundle(ui, repo, fname, dest="default-push", **opts):
588 """create a changegroup file
589 """create a changegroup file
589
590
590 Generate a compressed changegroup file collecting all changesets
591 Generate a compressed changegroup file collecting all changesets
591 not found in the other repository.
592 not found in the other repository.
592
593
593 This file can then be transferred using conventional means and
594 This file can then be transferred using conventional means and
594 applied to another repository with the unbundle command. This is
595 applied to another repository with the unbundle command. This is
595 useful when native push and pull are not available or when
596 useful when native push and pull are not available or when
596 exporting an entire repository is undesirable. The standard file
597 exporting an entire repository is undesirable. The standard file
597 extension is ".hg".
598 extension is ".hg".
598
599
599 Unlike import/export, this exactly preserves all changeset
600 Unlike import/export, this exactly preserves all changeset
600 contents including permissions, rename data, and revision history.
601 contents including permissions, rename data, and revision history.
601 """
602 """
602 f = open(fname, "wb")
603 f = open(fname, "wb")
603 dest = ui.expandpath(dest, repo.root)
604 dest = ui.expandpath(dest, repo.root)
604 other = hg.repository(ui, dest)
605 other = hg.repository(ui, dest)
605 o = repo.findoutgoing(other)
606 o = repo.findoutgoing(other)
606 cg = repo.changegroup(o)
607 cg = repo.changegroup(o)
607
608
608 try:
609 try:
609 f.write("HG10")
610 f.write("HG10")
610 z = bz2.BZ2Compressor(9)
611 z = bz2.BZ2Compressor(9)
611 while 1:
612 while 1:
612 chunk = cg.read(4096)
613 chunk = cg.read(4096)
613 if not chunk:
614 if not chunk:
614 break
615 break
615 f.write(z.compress(chunk))
616 f.write(z.compress(chunk))
616 f.write(z.flush())
617 f.write(z.flush())
617 except:
618 except:
618 os.unlink(fname)
619 os.unlink(fname)
619 raise
620 raise
620
621
621 def cat(ui, repo, file1, *pats, **opts):
622 def cat(ui, repo, file1, *pats, **opts):
622 """output the latest or given revisions of files
623 """output the latest or given revisions of files
623
624
624 Print the specified files as they were at the given revision.
625 Print the specified files as they were at the given revision.
625 If no revision is given then the tip is used.
626 If no revision is given then the tip is used.
626
627
627 Output may be to a file, in which case the name of the file is
628 Output may be to a file, in which case the name of the file is
628 given using a format string. The formatting rules are the same as
629 given using a format string. The formatting rules are the same as
629 for the export command, with the following additions:
630 for the export command, with the following additions:
630
631
631 %s basename of file being printed
632 %s basename of file being printed
632 %d dirname of file being printed, or '.' if in repo root
633 %d dirname of file being printed, or '.' if in repo root
633 %p root-relative path name of file being printed
634 %p root-relative path name of file being printed
634 """
635 """
635 mf = {}
636 mf = {}
636 rev = opts['rev']
637 rev = opts['rev']
637 if rev:
638 if rev:
638 node = repo.lookup(rev)
639 node = repo.lookup(rev)
639 else:
640 else:
640 node = repo.changelog.tip()
641 node = repo.changelog.tip()
641 change = repo.changelog.read(node)
642 change = repo.changelog.read(node)
642 mf = repo.manifest.read(change[0])
643 mf = repo.manifest.read(change[0])
643 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
644 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
644 r = repo.file(abs)
645 r = repo.file(abs)
645 n = mf[abs]
646 n = mf[abs]
646 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
647 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
647 fp.write(r.read(n))
648 fp.write(r.read(n))
648
649
649 def clone(ui, source, dest=None, **opts):
650 def clone(ui, source, dest=None, **opts):
650 """make a copy of an existing repository
651 """make a copy of an existing repository
651
652
652 Create a copy of an existing repository in a new directory.
653 Create a copy of an existing repository in a new directory.
653
654
654 If no destination directory name is specified, it defaults to the
655 If no destination directory name is specified, it defaults to the
655 basename of the source.
656 basename of the source.
656
657
657 The location of the source is added to the new repository's
658 The location of the source is added to the new repository's
658 .hg/hgrc file, as the default to be used for future pulls.
659 .hg/hgrc file, as the default to be used for future pulls.
659
660
660 For efficiency, hardlinks are used for cloning whenever the source
661 For efficiency, hardlinks are used for cloning whenever the source
661 and destination are on the same filesystem. Some filesystems,
662 and destination are on the same filesystem. Some filesystems,
662 such as AFS, implement hardlinking incorrectly, but do not report
663 such as AFS, implement hardlinking incorrectly, but do not report
663 errors. In these cases, use the --pull option to avoid
664 errors. In these cases, use the --pull option to avoid
664 hardlinking.
665 hardlinking.
665 """
666 """
666 if dest is None:
667 if dest is None:
667 dest = os.path.basename(os.path.normpath(source))
668 dest = os.path.basename(os.path.normpath(source))
668
669
669 if os.path.exists(dest):
670 if os.path.exists(dest):
670 raise util.Abort(_("destination '%s' already exists"), dest)
671 raise util.Abort(_("destination '%s' already exists"), dest)
671
672
672 dest = os.path.realpath(dest)
673 dest = os.path.realpath(dest)
673
674
674 class Dircleanup(object):
675 class Dircleanup(object):
675 def __init__(self, dir_):
676 def __init__(self, dir_):
676 self.rmtree = shutil.rmtree
677 self.rmtree = shutil.rmtree
677 self.dir_ = dir_
678 self.dir_ = dir_
678 os.mkdir(dir_)
679 os.mkdir(dir_)
679 def close(self):
680 def close(self):
680 self.dir_ = None
681 self.dir_ = None
681 def __del__(self):
682 def __del__(self):
682 if self.dir_:
683 if self.dir_:
683 self.rmtree(self.dir_, True)
684 self.rmtree(self.dir_, True)
684
685
685 if opts['ssh']:
686 if opts['ssh']:
686 ui.setconfig("ui", "ssh", opts['ssh'])
687 ui.setconfig("ui", "ssh", opts['ssh'])
687 if opts['remotecmd']:
688 if opts['remotecmd']:
688 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
689 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
689
690
690 if not os.path.exists(source):
691 if not os.path.exists(source):
691 source = ui.expandpath(source)
692 source = ui.expandpath(source)
692
693
693 d = Dircleanup(dest)
694 d = Dircleanup(dest)
694 abspath = source
695 abspath = source
695 other = hg.repository(ui, source)
696 other = hg.repository(ui, source)
696
697
697 copy = False
698 copy = False
698 if other.dev() != -1:
699 if other.dev() != -1:
699 abspath = os.path.abspath(source)
700 abspath = os.path.abspath(source)
700 if not opts['pull'] and not opts['rev']:
701 if not opts['pull'] and not opts['rev']:
701 copy = True
702 copy = True
702
703
703 if copy:
704 if copy:
704 try:
705 try:
705 # we use a lock here because if we race with commit, we
706 # we use a lock here because if we race with commit, we
706 # can end up with extra data in the cloned revlogs that's
707 # can end up with extra data in the cloned revlogs that's
707 # not pointed to by changesets, thus causing verify to
708 # not pointed to by changesets, thus causing verify to
708 # fail
709 # fail
709 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
710 l1 = lock.lock(os.path.join(source, ".hg", "lock"))
710 except OSError:
711 except OSError:
711 copy = False
712 copy = False
712
713
713 if copy:
714 if copy:
714 # we lock here to avoid premature writing to the target
715 # we lock here to avoid premature writing to the target
715 os.mkdir(os.path.join(dest, ".hg"))
716 os.mkdir(os.path.join(dest, ".hg"))
716 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
717 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
717
718
718 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
719 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
719 for f in files.split():
720 for f in files.split():
720 src = os.path.join(source, ".hg", f)
721 src = os.path.join(source, ".hg", f)
721 dst = os.path.join(dest, ".hg", f)
722 dst = os.path.join(dest, ".hg", f)
722 try:
723 try:
723 util.copyfiles(src, dst)
724 util.copyfiles(src, dst)
724 except OSError, inst:
725 except OSError, inst:
725 if inst.errno != errno.ENOENT: raise
726 if inst.errno != errno.ENOENT: raise
726
727
727 repo = hg.repository(ui, dest)
728 repo = hg.repository(ui, dest)
728
729
729 else:
730 else:
730 revs = None
731 revs = None
731 if opts['rev']:
732 if opts['rev']:
732 if not other.local():
733 if not other.local():
733 raise util.Abort("clone -r not supported yet for remote repositories.")
734 error = "clone -r not supported yet for remote repositories."
735 raise util.Abort(error)
734 else:
736 else:
735 revs = [other.lookup(rev) for rev in opts['rev']]
737 revs = [other.lookup(rev) for rev in opts['rev']]
736 repo = hg.repository(ui, dest, create=1)
738 repo = hg.repository(ui, dest, create=1)
737 repo.pull(other, heads = revs)
739 repo.pull(other, heads = revs)
738
740
739 f = repo.opener("hgrc", "w", text=True)
741 f = repo.opener("hgrc", "w", text=True)
740 f.write("[paths]\n")
742 f.write("[paths]\n")
741 f.write("default = %s\n" % abspath)
743 f.write("default = %s\n" % abspath)
742 f.close()
744 f.close()
743
745
744 if not opts['noupdate']:
746 if not opts['noupdate']:
745 update(ui, repo)
747 update(ui, repo)
746
748
747 d.close()
749 d.close()
748
750
749 def commit(ui, repo, *pats, **opts):
751 def commit(ui, repo, *pats, **opts):
750 """commit the specified files or all outstanding changes
752 """commit the specified files or all outstanding changes
751
753
752 Commit changes to the given files into the repository.
754 Commit changes to the given files into the repository.
753
755
754 If a list of files is omitted, all changes reported by "hg status"
756 If a list of files is omitted, all changes reported by "hg status"
755 will be commited.
757 will be commited.
756
758
757 The HGEDITOR or EDITOR environment variables are used to start an
759 The HGEDITOR or EDITOR environment variables are used to start an
758 editor to add a commit comment.
760 editor to add a commit comment.
759 """
761 """
760 message = opts['message']
762 message = opts['message']
761 logfile = opts['logfile']
763 logfile = opts['logfile']
762
764
763 if message and logfile:
765 if message and logfile:
764 raise util.Abort(_('options --message and --logfile are mutually '
766 raise util.Abort(_('options --message and --logfile are mutually '
765 'exclusive'))
767 'exclusive'))
766 if not message and logfile:
768 if not message and logfile:
767 try:
769 try:
768 if logfile == '-':
770 if logfile == '-':
769 message = sys.stdin.read()
771 message = sys.stdin.read()
770 else:
772 else:
771 message = open(logfile).read()
773 message = open(logfile).read()
772 except IOError, inst:
774 except IOError, inst:
773 raise util.Abort(_("can't read commit message '%s': %s") %
775 raise util.Abort(_("can't read commit message '%s': %s") %
774 (logfile, inst.strerror))
776 (logfile, inst.strerror))
775
777
776 if opts['addremove']:
778 if opts['addremove']:
777 addremove(ui, repo, *pats, **opts)
779 addremove(ui, repo, *pats, **opts)
778 fns, match, anypats, cwd = matchpats(repo, pats, opts)
780 fns, match, anypats, cwd = matchpats(repo, pats, opts)
779 if pats:
781 if pats:
780 c, a, d, u = repo.changes(files=fns, match=match)
782 c, a, d, u = repo.changes(files=fns, match=match)
781 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
783 files = c + a + [fn for fn in d if repo.dirstate.state(fn) == 'r']
782 else:
784 else:
783 files = []
785 files = []
784 try:
786 try:
785 repo.commit(files, message, opts['user'], opts['date'], match)
787 repo.commit(files, message, opts['user'], opts['date'], match)
786 except ValueError, inst:
788 except ValueError, inst:
787 raise util.Abort(str(inst))
789 raise util.Abort(str(inst))
788
790
789 def docopy(ui, repo, pats, opts):
791 def docopy(ui, repo, pats, opts):
790 cwd = repo.getcwd()
792 cwd = repo.getcwd()
791 errors = 0
793 errors = 0
792 copied = []
794 copied = []
793 targets = {}
795 targets = {}
794
796
795 def okaytocopy(abs, rel, exact):
797 def okaytocopy(abs, rel, exact):
796 reasons = {'?': _('is not managed'),
798 reasons = {'?': _('is not managed'),
797 'a': _('has been marked for add')}
799 'a': _('has been marked for add')}
798 reason = reasons.get(repo.dirstate.state(abs))
800 reason = reasons.get(repo.dirstate.state(abs))
799 if reason:
801 if reason:
800 if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
802 if exact: ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
801 else:
803 else:
802 return True
804 return True
803
805
804 def copy(abssrc, relsrc, target, exact):
806 def copy(abssrc, relsrc, target, exact):
805 abstarget = util.canonpath(repo.root, cwd, target)
807 abstarget = util.canonpath(repo.root, cwd, target)
806 reltarget = util.pathto(cwd, abstarget)
808 reltarget = util.pathto(cwd, abstarget)
807 prevsrc = targets.get(abstarget)
809 prevsrc = targets.get(abstarget)
808 if prevsrc is not None:
810 if prevsrc is not None:
809 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
811 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
810 (reltarget, abssrc, prevsrc))
812 (reltarget, abssrc, prevsrc))
811 return
813 return
812 if (not opts['after'] and os.path.exists(reltarget) or
814 if (not opts['after'] and os.path.exists(reltarget) or
813 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
815 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
814 if not opts['force']:
816 if not opts['force']:
815 ui.warn(_('%s: not overwriting - file exists\n') %
817 ui.warn(_('%s: not overwriting - file exists\n') %
816 reltarget)
818 reltarget)
817 return
819 return
818 if not opts['after']:
820 if not opts['after']:
819 os.unlink(reltarget)
821 os.unlink(reltarget)
820 if opts['after']:
822 if opts['after']:
821 if not os.path.exists(reltarget):
823 if not os.path.exists(reltarget):
822 return
824 return
823 else:
825 else:
824 targetdir = os.path.dirname(reltarget) or '.'
826 targetdir = os.path.dirname(reltarget) or '.'
825 if not os.path.isdir(targetdir):
827 if not os.path.isdir(targetdir):
826 os.makedirs(targetdir)
828 os.makedirs(targetdir)
827 try:
829 try:
828 shutil.copyfile(relsrc, reltarget)
830 shutil.copyfile(relsrc, reltarget)
829 shutil.copymode(relsrc, reltarget)
831 shutil.copymode(relsrc, reltarget)
830 except shutil.Error, inst:
832 except shutil.Error, inst:
831 raise util.Abort(str(inst))
833 raise util.Abort(str(inst))
832 except IOError, inst:
834 except IOError, inst:
833 if inst.errno == errno.ENOENT:
835 if inst.errno == errno.ENOENT:
834 ui.warn(_('%s: deleted in working copy\n') % relsrc)
836 ui.warn(_('%s: deleted in working copy\n') % relsrc)
835 else:
837 else:
836 ui.warn(_('%s: cannot copy - %s\n') %
838 ui.warn(_('%s: cannot copy - %s\n') %
837 (relsrc, inst.strerror))
839 (relsrc, inst.strerror))
838 errors += 1
840 errors += 1
839 return
841 return
840 if ui.verbose or not exact:
842 if ui.verbose or not exact:
841 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
843 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
842 targets[abstarget] = abssrc
844 targets[abstarget] = abssrc
843 repo.copy(abssrc, abstarget)
845 repo.copy(abssrc, abstarget)
844 copied.append((abssrc, relsrc, exact))
846 copied.append((abssrc, relsrc, exact))
845
847
846 def targetpathfn(pat, dest, srcs):
848 def targetpathfn(pat, dest, srcs):
847 if os.path.isdir(pat):
849 if os.path.isdir(pat):
848 if pat.endswith(os.sep):
850 if pat.endswith(os.sep):
849 pat = pat[:-len(os.sep)]
851 pat = pat[:-len(os.sep)]
850 if destdirexists:
852 if destdirexists:
851 striplen = len(os.path.split(pat)[0])
853 striplen = len(os.path.split(pat)[0])
852 else:
854 else:
853 striplen = len(pat)
855 striplen = len(pat)
854 if striplen:
856 if striplen:
855 striplen += len(os.sep)
857 striplen += len(os.sep)
856 res = lambda p: os.path.join(dest, p[striplen:])
858 res = lambda p: os.path.join(dest, p[striplen:])
857 elif destdirexists:
859 elif destdirexists:
858 res = lambda p: os.path.join(dest, os.path.basename(p))
860 res = lambda p: os.path.join(dest, os.path.basename(p))
859 else:
861 else:
860 res = lambda p: dest
862 res = lambda p: dest
861 return res
863 return res
862
864
863 def targetpathafterfn(pat, dest, srcs):
865 def targetpathafterfn(pat, dest, srcs):
864 if util.patkind(pat, None)[0]:
866 if util.patkind(pat, None)[0]:
865 # a mercurial pattern
867 # a mercurial pattern
866 res = lambda p: os.path.join(dest, os.path.basename(p))
868 res = lambda p: os.path.join(dest, os.path.basename(p))
867 elif len(util.canonpath(repo.root, cwd, pat)) < len(srcs[0][0]):
869 elif len(util.canonpath(repo.root, cwd, pat)) < len(srcs[0][0]):
868 # A directory. Either the target path contains the last
870 # A directory. Either the target path contains the last
869 # component of the source path or it does not.
871 # component of the source path or it does not.
870 def evalpath(striplen):
872 def evalpath(striplen):
871 score = 0
873 score = 0
872 for s in srcs:
874 for s in srcs:
873 t = os.path.join(dest, s[1][striplen:])
875 t = os.path.join(dest, s[1][striplen:])
874 if os.path.exists(t):
876 if os.path.exists(t):
875 score += 1
877 score += 1
876 return score
878 return score
877
879
878 if pat.endswith(os.sep):
880 if pat.endswith(os.sep):
879 pat = pat[:-len(os.sep)]
881 pat = pat[:-len(os.sep)]
880 striplen = len(pat) + len(os.sep)
882 striplen = len(pat) + len(os.sep)
881 if os.path.isdir(os.path.join(dest, os.path.split(pat)[1])):
883 if os.path.isdir(os.path.join(dest, os.path.split(pat)[1])):
882 score = evalpath(striplen)
884 score = evalpath(striplen)
883 striplen1 = len(os.path.split(pat)[0])
885 striplen1 = len(os.path.split(pat)[0])
884 if striplen1:
886 if striplen1:
885 striplen1 += len(os.sep)
887 striplen1 += len(os.sep)
886 if evalpath(striplen1) > score:
888 if evalpath(striplen1) > score:
887 striplen = striplen1
889 striplen = striplen1
888 res = lambda p: os.path.join(dest, p[striplen:])
890 res = lambda p: os.path.join(dest, p[striplen:])
889 else:
891 else:
890 # a file
892 # a file
891 if destdirexists:
893 if destdirexists:
892 res = lambda p: os.path.join(dest, os.path.basename(p))
894 res = lambda p: os.path.join(dest, os.path.basename(p))
893 else:
895 else:
894 res = lambda p: dest
896 res = lambda p: dest
895 return res
897 return res
896
898
897
899
898 pats = list(pats)
900 pats = list(pats)
899 if not pats:
901 if not pats:
900 raise util.Abort(_('no source or destination specified'))
902 raise util.Abort(_('no source or destination specified'))
901 if len(pats) == 1:
903 if len(pats) == 1:
902 raise util.Abort(_('no destination specified'))
904 raise util.Abort(_('no destination specified'))
903 dest = pats.pop()
905 dest = pats.pop()
904 destdirexists = os.path.isdir(dest)
906 destdirexists = os.path.isdir(dest)
905 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
907 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
906 raise util.Abort(_('with multiple sources, destination must be an '
908 raise util.Abort(_('with multiple sources, destination must be an '
907 'existing directory'))
909 'existing directory'))
908 if opts['after']:
910 if opts['after']:
909 tfn = targetpathafterfn
911 tfn = targetpathafterfn
910 else:
912 else:
911 tfn = targetpathfn
913 tfn = targetpathfn
912 copylist = []
914 copylist = []
913 for pat in pats:
915 for pat in pats:
914 srcs = []
916 srcs = []
915 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
917 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
916 if okaytocopy(abssrc, relsrc, exact):
918 if okaytocopy(abssrc, relsrc, exact):
917 srcs.append((abssrc, relsrc, exact))
919 srcs.append((abssrc, relsrc, exact))
918 if not srcs:
920 if not srcs:
919 continue
921 continue
920 copylist.append((tfn(pat, dest, srcs), srcs))
922 copylist.append((tfn(pat, dest, srcs), srcs))
921 if not copylist:
923 if not copylist:
922 raise util.Abort(_('no files to copy'))
924 raise util.Abort(_('no files to copy'))
923
925
924 for targetpath, srcs in copylist:
926 for targetpath, srcs in copylist:
925 for abssrc, relsrc, exact in srcs:
927 for abssrc, relsrc, exact in srcs:
926 copy(abssrc, relsrc, targetpath(relsrc), exact)
928 copy(abssrc, relsrc, targetpath(relsrc), exact)
927
929
928 if errors:
930 if errors:
929 ui.warn(_('(consider using --after)\n'))
931 ui.warn(_('(consider using --after)\n'))
930 return errors, copied
932 return errors, copied
931
933
932 def copy(ui, repo, *pats, **opts):
934 def copy(ui, repo, *pats, **opts):
933 """mark files as copied for the next commit
935 """mark files as copied for the next commit
934
936
935 Mark dest as having copies of source files. If dest is a
937 Mark dest as having copies of source files. If dest is a
936 directory, copies are put in that directory. If dest is a file,
938 directory, copies are put in that directory. If dest is a file,
937 there can only be one source.
939 there can only be one source.
938
940
939 By default, this command copies the contents of files as they
941 By default, this command copies the contents of files as they
940 stand in the working directory. If invoked with --after, the
942 stand in the working directory. If invoked with --after, the
941 operation is recorded, but no copying is performed.
943 operation is recorded, but no copying is performed.
942
944
943 This command takes effect in the next commit.
945 This command takes effect in the next commit.
944
946
945 NOTE: This command should be treated as experimental. While it
947 NOTE: This command should be treated as experimental. While it
946 should properly record copied files, this information is not yet
948 should properly record copied files, this information is not yet
947 fully used by merge, nor fully reported by log.
949 fully used by merge, nor fully reported by log.
948 """
950 """
949 errs, copied = docopy(ui, repo, pats, opts)
951 errs, copied = docopy(ui, repo, pats, opts)
950 return errs
952 return errs
951
953
952 def debugancestor(ui, index, rev1, rev2):
954 def debugancestor(ui, index, rev1, rev2):
953 """find the ancestor revision of two revisions in a given index"""
955 """find the ancestor revision of two revisions in a given index"""
954 r = revlog.revlog(util.opener(os.getcwd()), index, "")
956 r = revlog.revlog(util.opener(os.getcwd()), index, "")
955 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
957 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
956 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
958 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
957
959
958 def debugcheckstate(ui, repo):
960 def debugcheckstate(ui, repo):
959 """validate the correctness of the current dirstate"""
961 """validate the correctness of the current dirstate"""
960 parent1, parent2 = repo.dirstate.parents()
962 parent1, parent2 = repo.dirstate.parents()
961 repo.dirstate.read()
963 repo.dirstate.read()
962 dc = repo.dirstate.map
964 dc = repo.dirstate.map
963 keys = dc.keys()
965 keys = dc.keys()
964 keys.sort()
966 keys.sort()
965 m1n = repo.changelog.read(parent1)[0]
967 m1n = repo.changelog.read(parent1)[0]
966 m2n = repo.changelog.read(parent2)[0]
968 m2n = repo.changelog.read(parent2)[0]
967 m1 = repo.manifest.read(m1n)
969 m1 = repo.manifest.read(m1n)
968 m2 = repo.manifest.read(m2n)
970 m2 = repo.manifest.read(m2n)
969 errors = 0
971 errors = 0
970 for f in dc:
972 for f in dc:
971 state = repo.dirstate.state(f)
973 state = repo.dirstate.state(f)
972 if state in "nr" and f not in m1:
974 if state in "nr" and f not in m1:
973 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
975 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
974 errors += 1
976 errors += 1
975 if state in "a" and f in m1:
977 if state in "a" and f in m1:
976 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
978 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
977 errors += 1
979 errors += 1
978 if state in "m" and f not in m1 and f not in m2:
980 if state in "m" and f not in m1 and f not in m2:
979 ui.warn(_("%s in state %s, but not in either manifest\n") %
981 ui.warn(_("%s in state %s, but not in either manifest\n") %
980 (f, state))
982 (f, state))
981 errors += 1
983 errors += 1
982 for f in m1:
984 for f in m1:
983 state = repo.dirstate.state(f)
985 state = repo.dirstate.state(f)
984 if state not in "nrm":
986 if state not in "nrm":
985 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
987 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
986 errors += 1
988 errors += 1
987 if errors:
989 if errors:
988 raise util.Abort(_(".hg/dirstate inconsistent with current parent's manifest"))
990 error = _(".hg/dirstate inconsistent with current parent's manifest")
991 raise util.Abort(error)
989
992
990 def debugconfig(ui):
993 def debugconfig(ui):
991 """show combined config settings from all hgrc files"""
994 """show combined config settings from all hgrc files"""
992 try:
995 try:
993 repo = hg.repository(ui)
996 repo = hg.repository(ui)
994 except hg.RepoError:
997 except hg.RepoError:
995 pass
998 pass
996 for section, name, value in ui.walkconfig():
999 for section, name, value in ui.walkconfig():
997 ui.write('%s.%s=%s\n' % (section, name, value))
1000 ui.write('%s.%s=%s\n' % (section, name, value))
998
1001
999 def debugsetparents(ui, repo, rev1, rev2=None):
1002 def debugsetparents(ui, repo, rev1, rev2=None):
1000 """manually set the parents of the current working directory
1003 """manually set the parents of the current working directory
1001
1004
1002 This is useful for writing repository conversion tools, but should
1005 This is useful for writing repository conversion tools, but should
1003 be used with care.
1006 be used with care.
1004 """
1007 """
1005
1008
1006 if not rev2:
1009 if not rev2:
1007 rev2 = hex(nullid)
1010 rev2 = hex(nullid)
1008
1011
1009 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1012 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1010
1013
1011 def debugstate(ui, repo):
1014 def debugstate(ui, repo):
1012 """show the contents of the current dirstate"""
1015 """show the contents of the current dirstate"""
1013 repo.dirstate.read()
1016 repo.dirstate.read()
1014 dc = repo.dirstate.map
1017 dc = repo.dirstate.map
1015 keys = dc.keys()
1018 keys = dc.keys()
1016 keys.sort()
1019 keys.sort()
1017 for file_ in keys:
1020 for file_ in keys:
1018 ui.write("%c %3o %10d %s %s\n"
1021 ui.write("%c %3o %10d %s %s\n"
1019 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1022 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1020 time.strftime("%x %X",
1023 time.strftime("%x %X",
1021 time.localtime(dc[file_][3])), file_))
1024 time.localtime(dc[file_][3])), file_))
1022 for f in repo.dirstate.copies:
1025 for f in repo.dirstate.copies:
1023 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1026 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1024
1027
1025 def debugdata(ui, file_, rev):
1028 def debugdata(ui, file_, rev):
1026 """dump the contents of an data file revision"""
1029 """dump the contents of an data file revision"""
1027 r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
1030 r = revlog.revlog(util.opener(os.getcwd()), file_[:-2] + ".i", file_)
1028 try:
1031 try:
1029 ui.write(r.revision(r.lookup(rev)))
1032 ui.write(r.revision(r.lookup(rev)))
1030 except KeyError:
1033 except KeyError:
1031 raise util.Abort(_('invalid revision identifier %s'), rev)
1034 raise util.Abort(_('invalid revision identifier %s'), rev)
1032
1035
1033 def debugindex(ui, file_):
1036 def debugindex(ui, file_):
1034 """dump the contents of an index file"""
1037 """dump the contents of an index file"""
1035 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1038 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1036 ui.write(" rev offset length base linkrev" +
1039 ui.write(" rev offset length base linkrev" +
1037 " nodeid p1 p2\n")
1040 " nodeid p1 p2\n")
1038 for i in range(r.count()):
1041 for i in range(r.count()):
1039 e = r.index[i]
1042 e = r.index[i]
1040 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1043 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1041 i, e[0], e[1], e[2], e[3],
1044 i, e[0], e[1], e[2], e[3],
1042 short(e[6]), short(e[4]), short(e[5])))
1045 short(e[6]), short(e[4]), short(e[5])))
1043
1046
1044 def debugindexdot(ui, file_):
1047 def debugindexdot(ui, file_):
1045 """dump an index DAG as a .dot file"""
1048 """dump an index DAG as a .dot file"""
1046 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1049 r = revlog.revlog(util.opener(os.getcwd()), file_, "")
1047 ui.write("digraph G {\n")
1050 ui.write("digraph G {\n")
1048 for i in range(r.count()):
1051 for i in range(r.count()):
1049 e = r.index[i]
1052 e = r.index[i]
1050 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1053 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1051 if e[5] != nullid:
1054 if e[5] != nullid:
1052 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1055 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1053 ui.write("}\n")
1056 ui.write("}\n")
1054
1057
1055 def debugrename(ui, repo, file, rev=None):
1058 def debugrename(ui, repo, file, rev=None):
1056 """dump rename information"""
1059 """dump rename information"""
1057 r = repo.file(relpath(repo, [file])[0])
1060 r = repo.file(relpath(repo, [file])[0])
1058 if rev:
1061 if rev:
1059 try:
1062 try:
1060 # assume all revision numbers are for changesets
1063 # assume all revision numbers are for changesets
1061 n = repo.lookup(rev)
1064 n = repo.lookup(rev)
1062 change = repo.changelog.read(n)
1065 change = repo.changelog.read(n)
1063 m = repo.manifest.read(change[0])
1066 m = repo.manifest.read(change[0])
1064 n = m[relpath(repo, [file])[0]]
1067 n = m[relpath(repo, [file])[0]]
1065 except (hg.RepoError, KeyError):
1068 except (hg.RepoError, KeyError):
1066 n = r.lookup(rev)
1069 n = r.lookup(rev)
1067 else:
1070 else:
1068 n = r.tip()
1071 n = r.tip()
1069 m = r.renamed(n)
1072 m = r.renamed(n)
1070 if m:
1073 if m:
1071 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1074 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1072 else:
1075 else:
1073 ui.write(_("not renamed\n"))
1076 ui.write(_("not renamed\n"))
1074
1077
1075 def debugwalk(ui, repo, *pats, **opts):
1078 def debugwalk(ui, repo, *pats, **opts):
1076 """show how files match on given patterns"""
1079 """show how files match on given patterns"""
1077 items = list(walk(repo, pats, opts))
1080 items = list(walk(repo, pats, opts))
1078 if not items:
1081 if not items:
1079 return
1082 return
1080 fmt = '%%s %%-%ds %%-%ds %%s' % (
1083 fmt = '%%s %%-%ds %%-%ds %%s' % (
1081 max([len(abs) for (src, abs, rel, exact) in items]),
1084 max([len(abs) for (src, abs, rel, exact) in items]),
1082 max([len(rel) for (src, abs, rel, exact) in items]))
1085 max([len(rel) for (src, abs, rel, exact) in items]))
1083 for src, abs, rel, exact in items:
1086 for src, abs, rel, exact in items:
1084 line = fmt % (src, abs, rel, exact and 'exact' or '')
1087 line = fmt % (src, abs, rel, exact and 'exact' or '')
1085 ui.write("%s\n" % line.rstrip())
1088 ui.write("%s\n" % line.rstrip())
1086
1089
1087 def diff(ui, repo, *pats, **opts):
1090 def diff(ui, repo, *pats, **opts):
1088 """diff repository (or selected files)
1091 """diff repository (or selected files)
1089
1092
1090 Show differences between revisions for the specified files.
1093 Show differences between revisions for the specified files.
1091
1094
1092 Differences between files are shown using the unified diff format.
1095 Differences between files are shown using the unified diff format.
1093
1096
1094 When two revision arguments are given, then changes are shown
1097 When two revision arguments are given, then changes are shown
1095 between those revisions. If only one revision is specified then
1098 between those revisions. If only one revision is specified then
1096 that revision is compared to the working directory, and, when no
1099 that revision is compared to the working directory, and, when no
1097 revisions are specified, the working directory files are compared
1100 revisions are specified, the working directory files are compared
1098 to its parent.
1101 to its parent.
1099
1102
1100 Without the -a option, diff will avoid generating diffs of files
1103 Without the -a option, diff will avoid generating diffs of files
1101 it detects as binary. With -a, diff will generate a diff anyway,
1104 it detects as binary. With -a, diff will generate a diff anyway,
1102 probably with undesirable results.
1105 probably with undesirable results.
1103 """
1106 """
1104 node1, node2 = None, None
1107 node1, node2 = None, None
1105 revs = [repo.lookup(x) for x in opts['rev']]
1108 revs = [repo.lookup(x) for x in opts['rev']]
1106
1109
1107 if len(revs) > 0:
1110 if len(revs) > 0:
1108 node1 = revs[0]
1111 node1 = revs[0]
1109 if len(revs) > 1:
1112 if len(revs) > 1:
1110 node2 = revs[1]
1113 node2 = revs[1]
1111 if len(revs) > 2:
1114 if len(revs) > 2:
1112 raise util.Abort(_("too many revisions to diff"))
1115 raise util.Abort(_("too many revisions to diff"))
1113
1116
1114 fns, matchfn, anypats, cwd = matchpats(repo, pats, opts)
1117 fns, matchfn, anypats, cwd = matchpats(repo, pats, opts)
1115
1118
1116 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1119 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1117 text=opts['text'])
1120 text=opts['text'])
1118
1121
1119 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1122 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1120 node = repo.lookup(changeset)
1123 node = repo.lookup(changeset)
1121 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1124 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1122 if opts['switch_parent']:
1125 if opts['switch_parent']:
1123 parents.reverse()
1126 parents.reverse()
1124 prev = (parents and parents[0]) or nullid
1127 prev = (parents and parents[0]) or nullid
1125 change = repo.changelog.read(node)
1128 change = repo.changelog.read(node)
1126
1129
1127 fp = make_file(repo, repo.changelog, opts['output'],
1130 fp = make_file(repo, repo.changelog, opts['output'],
1128 node=node, total=total, seqno=seqno,
1131 node=node, total=total, seqno=seqno,
1129 revwidth=revwidth)
1132 revwidth=revwidth)
1130 if fp != sys.stdout:
1133 if fp != sys.stdout:
1131 ui.note("%s\n" % fp.name)
1134 ui.note("%s\n" % fp.name)
1132
1135
1133 fp.write("# HG changeset patch\n")
1136 fp.write("# HG changeset patch\n")
1134 fp.write("# User %s\n" % change[1])
1137 fp.write("# User %s\n" % change[1])
1135 fp.write("# Node ID %s\n" % hex(node))
1138 fp.write("# Node ID %s\n" % hex(node))
1136 fp.write("# Parent %s\n" % hex(prev))
1139 fp.write("# Parent %s\n" % hex(prev))
1137 if len(parents) > 1:
1140 if len(parents) > 1:
1138 fp.write("# Parent %s\n" % hex(parents[1]))
1141 fp.write("# Parent %s\n" % hex(parents[1]))
1139 fp.write(change[4].rstrip())
1142 fp.write(change[4].rstrip())
1140 fp.write("\n\n")
1143 fp.write("\n\n")
1141
1144
1142 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1145 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1143 if fp != sys.stdout:
1146 if fp != sys.stdout:
1144 fp.close()
1147 fp.close()
1145
1148
1146 def export(ui, repo, *changesets, **opts):
1149 def export(ui, repo, *changesets, **opts):
1147 """dump the header and diffs for one or more changesets
1150 """dump the header and diffs for one or more changesets
1148
1151
1149 Print the changeset header and diffs for one or more revisions.
1152 Print the changeset header and diffs for one or more revisions.
1150
1153
1151 The information shown in the changeset header is: author,
1154 The information shown in the changeset header is: author,
1152 changeset hash, parent and commit comment.
1155 changeset hash, parent and commit comment.
1153
1156
1154 Output may be to a file, in which case the name of the file is
1157 Output may be to a file, in which case the name of the file is
1155 given using a format string. The formatting rules are as follows:
1158 given using a format string. The formatting rules are as follows:
1156
1159
1157 %% literal "%" character
1160 %% literal "%" character
1158 %H changeset hash (40 bytes of hexadecimal)
1161 %H changeset hash (40 bytes of hexadecimal)
1159 %N number of patches being generated
1162 %N number of patches being generated
1160 %R changeset revision number
1163 %R changeset revision number
1161 %b basename of the exporting repository
1164 %b basename of the exporting repository
1162 %h short-form changeset hash (12 bytes of hexadecimal)
1165 %h short-form changeset hash (12 bytes of hexadecimal)
1163 %n zero-padded sequence number, starting at 1
1166 %n zero-padded sequence number, starting at 1
1164 %r zero-padded changeset revision number
1167 %r zero-padded changeset revision number
1165
1168
1166 Without the -a option, export will avoid generating diffs of files
1169 Without the -a option, export will avoid generating diffs of files
1167 it detects as binary. With -a, export will generate a diff anyway,
1170 it detects as binary. With -a, export will generate a diff anyway,
1168 probably with undesirable results.
1171 probably with undesirable results.
1169
1172
1170 With the --switch-parent option, the diff will be against the second
1173 With the --switch-parent option, the diff will be against the second
1171 parent. It can be useful to review a merge.
1174 parent. It can be useful to review a merge.
1172 """
1175 """
1173 if not changesets:
1176 if not changesets:
1174 raise util.Abort(_("export requires at least one changeset"))
1177 raise util.Abort(_("export requires at least one changeset"))
1175 seqno = 0
1178 seqno = 0
1176 revs = list(revrange(ui, repo, changesets))
1179 revs = list(revrange(ui, repo, changesets))
1177 total = len(revs)
1180 total = len(revs)
1178 revwidth = max(map(len, revs))
1181 revwidth = max(map(len, revs))
1179 ui.note(len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n"))
1182 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1183 ui.note(msg)
1180 for cset in revs:
1184 for cset in revs:
1181 seqno += 1
1185 seqno += 1
1182 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1186 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1183
1187
1184 def forget(ui, repo, *pats, **opts):
1188 def forget(ui, repo, *pats, **opts):
1185 """don't add the specified files on the next commit
1189 """don't add the specified files on the next commit
1186
1190
1187 Undo an 'hg add' scheduled for the next commit.
1191 Undo an 'hg add' scheduled for the next commit.
1188 """
1192 """
1189 forget = []
1193 forget = []
1190 for src, abs, rel, exact in walk(repo, pats, opts):
1194 for src, abs, rel, exact in walk(repo, pats, opts):
1191 if repo.dirstate.state(abs) == 'a':
1195 if repo.dirstate.state(abs) == 'a':
1192 forget.append(abs)
1196 forget.append(abs)
1193 if ui.verbose or not exact:
1197 if ui.verbose or not exact:
1194 ui.status(_('forgetting %s\n') % rel)
1198 ui.status(_('forgetting %s\n') % rel)
1195 repo.forget(forget)
1199 repo.forget(forget)
1196
1200
1197 def grep(ui, repo, pattern, *pats, **opts):
1201 def grep(ui, repo, pattern, *pats, **opts):
1198 """search for a pattern in specified files and revisions
1202 """search for a pattern in specified files and revisions
1199
1203
1200 Search revisions of files for a regular expression.
1204 Search revisions of files for a regular expression.
1201
1205
1202 This command behaves differently than Unix grep. It only accepts
1206 This command behaves differently than Unix grep. It only accepts
1203 Python/Perl regexps. It searches repository history, not the
1207 Python/Perl regexps. It searches repository history, not the
1204 working directory. It always prints the revision number in which
1208 working directory. It always prints the revision number in which
1205 a match appears.
1209 a match appears.
1206
1210
1207 By default, grep only prints output for the first revision of a
1211 By default, grep only prints output for the first revision of a
1208 file in which it finds a match. To get it to print every revision
1212 file in which it finds a match. To get it to print every revision
1209 that contains a change in match status ("-" for a match that
1213 that contains a change in match status ("-" for a match that
1210 becomes a non-match, or "+" for a non-match that becomes a match),
1214 becomes a non-match, or "+" for a non-match that becomes a match),
1211 use the --all flag.
1215 use the --all flag.
1212 """
1216 """
1213 reflags = 0
1217 reflags = 0
1214 if opts['ignore_case']:
1218 if opts['ignore_case']:
1215 reflags |= re.I
1219 reflags |= re.I
1216 regexp = re.compile(pattern, reflags)
1220 regexp = re.compile(pattern, reflags)
1217 sep, eol = ':', '\n'
1221 sep, eol = ':', '\n'
1218 if opts['print0']:
1222 if opts['print0']:
1219 sep = eol = '\0'
1223 sep = eol = '\0'
1220
1224
1221 fcache = {}
1225 fcache = {}
1222 def getfile(fn):
1226 def getfile(fn):
1223 if fn not in fcache:
1227 if fn not in fcache:
1224 fcache[fn] = repo.file(fn)
1228 fcache[fn] = repo.file(fn)
1225 return fcache[fn]
1229 return fcache[fn]
1226
1230
1227 def matchlines(body):
1231 def matchlines(body):
1228 begin = 0
1232 begin = 0
1229 linenum = 0
1233 linenum = 0
1230 while True:
1234 while True:
1231 match = regexp.search(body, begin)
1235 match = regexp.search(body, begin)
1232 if not match:
1236 if not match:
1233 break
1237 break
1234 mstart, mend = match.span()
1238 mstart, mend = match.span()
1235 linenum += body.count('\n', begin, mstart) + 1
1239 linenum += body.count('\n', begin, mstart) + 1
1236 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1240 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1237 lend = body.find('\n', mend)
1241 lend = body.find('\n', mend)
1238 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1242 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1239 begin = lend + 1
1243 begin = lend + 1
1240
1244
1241 class linestate(object):
1245 class linestate(object):
1242 def __init__(self, line, linenum, colstart, colend):
1246 def __init__(self, line, linenum, colstart, colend):
1243 self.line = line
1247 self.line = line
1244 self.linenum = linenum
1248 self.linenum = linenum
1245 self.colstart = colstart
1249 self.colstart = colstart
1246 self.colend = colend
1250 self.colend = colend
1247 def __eq__(self, other):
1251 def __eq__(self, other):
1248 return self.line == other.line
1252 return self.line == other.line
1249 def __hash__(self):
1253 def __hash__(self):
1250 return hash(self.line)
1254 return hash(self.line)
1251
1255
1252 matches = {}
1256 matches = {}
1253 def grepbody(fn, rev, body):
1257 def grepbody(fn, rev, body):
1254 matches[rev].setdefault(fn, {})
1258 matches[rev].setdefault(fn, {})
1255 m = matches[rev][fn]
1259 m = matches[rev][fn]
1256 for lnum, cstart, cend, line in matchlines(body):
1260 for lnum, cstart, cend, line in matchlines(body):
1257 s = linestate(line, lnum, cstart, cend)
1261 s = linestate(line, lnum, cstart, cend)
1258 m[s] = s
1262 m[s] = s
1259
1263
1260 prev = {}
1264 prev = {}
1261 ucache = {}
1265 ucache = {}
1262 def display(fn, rev, states, prevstates):
1266 def display(fn, rev, states, prevstates):
1263 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1267 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1264 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1268 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1265 counts = {'-': 0, '+': 0}
1269 counts = {'-': 0, '+': 0}
1266 filerevmatches = {}
1270 filerevmatches = {}
1267 for l in diff:
1271 for l in diff:
1268 if incrementing or not opts['all']:
1272 if incrementing or not opts['all']:
1269 change = ((l in prevstates) and '-') or '+'
1273 change = ((l in prevstates) and '-') or '+'
1270 r = rev
1274 r = rev
1271 else:
1275 else:
1272 change = ((l in states) and '-') or '+'
1276 change = ((l in states) and '-') or '+'
1273 r = prev[fn]
1277 r = prev[fn]
1274 cols = [fn, str(rev)]
1278 cols = [fn, str(rev)]
1275 if opts['line_number']: cols.append(str(l.linenum))
1279 if opts['line_number']: cols.append(str(l.linenum))
1276 if opts['all']: cols.append(change)
1280 if opts['all']: cols.append(change)
1277 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1281 if opts['user']: cols.append(trimuser(ui, getchange(rev)[1], rev,
1278 ucache))
1282 ucache))
1279 if opts['files_with_matches']:
1283 if opts['files_with_matches']:
1280 c = (fn, rev)
1284 c = (fn, rev)
1281 if c in filerevmatches: continue
1285 if c in filerevmatches: continue
1282 filerevmatches[c] = 1
1286 filerevmatches[c] = 1
1283 else:
1287 else:
1284 cols.append(l.line)
1288 cols.append(l.line)
1285 ui.write(sep.join(cols), eol)
1289 ui.write(sep.join(cols), eol)
1286 counts[change] += 1
1290 counts[change] += 1
1287 return counts['+'], counts['-']
1291 return counts['+'], counts['-']
1288
1292
1289 fstate = {}
1293 fstate = {}
1290 skip = {}
1294 skip = {}
1291 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1295 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1292 count = 0
1296 count = 0
1293 incrementing = False
1297 incrementing = False
1294 for st, rev, fns in changeiter:
1298 for st, rev, fns in changeiter:
1295 if st == 'window':
1299 if st == 'window':
1296 incrementing = rev
1300 incrementing = rev
1297 matches.clear()
1301 matches.clear()
1298 elif st == 'add':
1302 elif st == 'add':
1299 change = repo.changelog.read(repo.lookup(str(rev)))
1303 change = repo.changelog.read(repo.lookup(str(rev)))
1300 mf = repo.manifest.read(change[0])
1304 mf = repo.manifest.read(change[0])
1301 matches[rev] = {}
1305 matches[rev] = {}
1302 for fn in fns:
1306 for fn in fns:
1303 if fn in skip: continue
1307 if fn in skip: continue
1304 fstate.setdefault(fn, {})
1308 fstate.setdefault(fn, {})
1305 try:
1309 try:
1306 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1310 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1307 except KeyError:
1311 except KeyError:
1308 pass
1312 pass
1309 elif st == 'iter':
1313 elif st == 'iter':
1310 states = matches[rev].items()
1314 states = matches[rev].items()
1311 states.sort()
1315 states.sort()
1312 for fn, m in states:
1316 for fn, m in states:
1313 if fn in skip: continue
1317 if fn in skip: continue
1314 if incrementing or not opts['all'] or fstate[fn]:
1318 if incrementing or not opts['all'] or fstate[fn]:
1315 pos, neg = display(fn, rev, m, fstate[fn])
1319 pos, neg = display(fn, rev, m, fstate[fn])
1316 count += pos + neg
1320 count += pos + neg
1317 if pos and not opts['all']:
1321 if pos and not opts['all']:
1318 skip[fn] = True
1322 skip[fn] = True
1319 fstate[fn] = m
1323 fstate[fn] = m
1320 prev[fn] = rev
1324 prev[fn] = rev
1321
1325
1322 if not incrementing:
1326 if not incrementing:
1323 fstate = fstate.items()
1327 fstate = fstate.items()
1324 fstate.sort()
1328 fstate.sort()
1325 for fn, state in fstate:
1329 for fn, state in fstate:
1326 if fn in skip: continue
1330 if fn in skip: continue
1327 display(fn, rev, {}, state)
1331 display(fn, rev, {}, state)
1328 return (count == 0 and 1) or 0
1332 return (count == 0 and 1) or 0
1329
1333
1330 def heads(ui, repo, **opts):
1334 def heads(ui, repo, **opts):
1331 """show current repository heads
1335 """show current repository heads
1332
1336
1333 Show all repository head changesets.
1337 Show all repository head changesets.
1334
1338
1335 Repository "heads" are changesets that don't have children
1339 Repository "heads" are changesets that don't have children
1336 changesets. They are where development generally takes place and
1340 changesets. They are where development generally takes place and
1337 are the usual targets for update and merge operations.
1341 are the usual targets for update and merge operations.
1338 """
1342 """
1339 if opts['rev']:
1343 if opts['rev']:
1340 heads = repo.heads(repo.lookup(opts['rev']))
1344 heads = repo.heads(repo.lookup(opts['rev']))
1341 else:
1345 else:
1342 heads = repo.heads()
1346 heads = repo.heads()
1343 br = None
1347 br = None
1344 if opts['branches']:
1348 if opts['branches']:
1345 br = repo.branchlookup(heads)
1349 br = repo.branchlookup(heads)
1346 for n in heads:
1350 for n in heads:
1347 show_changeset(ui, repo, changenode=n, brinfo=br)
1351 show_changeset(ui, repo, changenode=n, brinfo=br)
1348
1352
1349 def identify(ui, repo):
1353 def identify(ui, repo):
1350 """print information about the working copy
1354 """print information about the working copy
1351
1355
1352 Print a short summary of the current state of the repo.
1356 Print a short summary of the current state of the repo.
1353
1357
1354 This summary identifies the repository state using one or two parent
1358 This summary identifies the repository state using one or two parent
1355 hash identifiers, followed by a "+" if there are uncommitted changes
1359 hash identifiers, followed by a "+" if there are uncommitted changes
1356 in the working directory, followed by a list of tags for this revision.
1360 in the working directory, followed by a list of tags for this revision.
1357 """
1361 """
1358 parents = [p for p in repo.dirstate.parents() if p != nullid]
1362 parents = [p for p in repo.dirstate.parents() if p != nullid]
1359 if not parents:
1363 if not parents:
1360 ui.write(_("unknown\n"))
1364 ui.write(_("unknown\n"))
1361 return
1365 return
1362
1366
1363 hexfunc = ui.verbose and hex or short
1367 hexfunc = ui.verbose and hex or short
1364 (c, a, d, u) = repo.changes()
1368 (c, a, d, u) = repo.changes()
1365 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1369 output = ["%s%s" % ('+'.join([hexfunc(parent) for parent in parents]),
1366 (c or a or d) and "+" or "")]
1370 (c or a or d) and "+" or "")]
1367
1371
1368 if not ui.quiet:
1372 if not ui.quiet:
1369 # multiple tags for a single parent separated by '/'
1373 # multiple tags for a single parent separated by '/'
1370 parenttags = ['/'.join(tags)
1374 parenttags = ['/'.join(tags)
1371 for tags in map(repo.nodetags, parents) if tags]
1375 for tags in map(repo.nodetags, parents) if tags]
1372 # tags for multiple parents separated by ' + '
1376 # tags for multiple parents separated by ' + '
1373 if parenttags:
1377 if parenttags:
1374 output.append(' + '.join(parenttags))
1378 output.append(' + '.join(parenttags))
1375
1379
1376 ui.write("%s\n" % ' '.join(output))
1380 ui.write("%s\n" % ' '.join(output))
1377
1381
1378 def import_(ui, repo, patch1, *patches, **opts):
1382 def import_(ui, repo, patch1, *patches, **opts):
1379 """import an ordered set of patches
1383 """import an ordered set of patches
1380
1384
1381 Import a list of patches and commit them individually.
1385 Import a list of patches and commit them individually.
1382
1386
1383 If there are outstanding changes in the working directory, import
1387 If there are outstanding changes in the working directory, import
1384 will abort unless given the -f flag.
1388 will abort unless given the -f flag.
1385
1389
1386 If a patch looks like a mail message (its first line starts with
1390 If a patch looks like a mail message (its first line starts with
1387 "From " or looks like an RFC822 header), it will not be applied
1391 "From " or looks like an RFC822 header), it will not be applied
1388 unless the -f option is used. The importer neither parses nor
1392 unless the -f option is used. The importer neither parses nor
1389 discards mail headers, so use -f only to override the "mailness"
1393 discards mail headers, so use -f only to override the "mailness"
1390 safety check, not to import a real mail message.
1394 safety check, not to import a real mail message.
1391 """
1395 """
1392 patches = (patch1,) + patches
1396 patches = (patch1,) + patches
1393
1397
1394 if not opts['force']:
1398 if not opts['force']:
1395 (c, a, d, u) = repo.changes()
1399 (c, a, d, u) = repo.changes()
1396 if c or a or d:
1400 if c or a or d:
1397 raise util.Abort(_("outstanding uncommitted changes"))
1401 raise util.Abort(_("outstanding uncommitted changes"))
1398
1402
1399 d = opts["base"]
1403 d = opts["base"]
1400 strip = opts["strip"]
1404 strip = opts["strip"]
1401
1405
1402 mailre = re.compile(r'(?:From |[\w-]+:)')
1406 mailre = re.compile(r'(?:From |[\w-]+:)')
1403
1407
1404 # attempt to detect the start of a patch
1408 # attempt to detect the start of a patch
1405 # (this heuristic is borrowed from quilt)
1409 # (this heuristic is borrowed from quilt)
1406 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1410 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1407 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1411 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1408 '(---|\*\*\*)[ \t])')
1412 '(---|\*\*\*)[ \t])')
1409
1413
1410 for patch in patches:
1414 for patch in patches:
1411 ui.status(_("applying %s\n") % patch)
1415 ui.status(_("applying %s\n") % patch)
1412 pf = os.path.join(d, patch)
1416 pf = os.path.join(d, patch)
1413
1417
1414 message = []
1418 message = []
1415 user = None
1419 user = None
1416 hgpatch = False
1420 hgpatch = False
1417 for line in file(pf):
1421 for line in file(pf):
1418 line = line.rstrip()
1422 line = line.rstrip()
1419 if (not message and not hgpatch and
1423 if (not message and not hgpatch and
1420 mailre.match(line) and not opts['force']):
1424 mailre.match(line) and not opts['force']):
1421 if len(line) > 35: line = line[:32] + '...'
1425 if len(line) > 35: line = line[:32] + '...'
1422 raise util.Abort(_('first line looks like a '
1426 raise util.Abort(_('first line looks like a '
1423 'mail header: ') + line)
1427 'mail header: ') + line)
1424 if diffre.match(line):
1428 if diffre.match(line):
1425 break
1429 break
1426 elif hgpatch:
1430 elif hgpatch:
1427 # parse values when importing the result of an hg export
1431 # parse values when importing the result of an hg export
1428 if line.startswith("# User "):
1432 if line.startswith("# User "):
1429 user = line[7:]
1433 user = line[7:]
1430 ui.debug(_('User: %s\n') % user)
1434 ui.debug(_('User: %s\n') % user)
1431 elif not line.startswith("# ") and line:
1435 elif not line.startswith("# ") and line:
1432 message.append(line)
1436 message.append(line)
1433 hgpatch = False
1437 hgpatch = False
1434 elif line == '# HG changeset patch':
1438 elif line == '# HG changeset patch':
1435 hgpatch = True
1439 hgpatch = True
1436 message = [] # We may have collected garbage
1440 message = [] # We may have collected garbage
1437 else:
1441 else:
1438 message.append(line)
1442 message.append(line)
1439
1443
1440 # make sure message isn't empty
1444 # make sure message isn't empty
1441 if not message:
1445 if not message:
1442 message = _("imported patch %s\n") % patch
1446 message = _("imported patch %s\n") % patch
1443 else:
1447 else:
1444 message = "%s\n" % '\n'.join(message)
1448 message = "%s\n" % '\n'.join(message)
1445 ui.debug(_('message:\n%s\n') % message)
1449 ui.debug(_('message:\n%s\n') % message)
1446
1450
1447 files = util.patch(strip, pf, ui)
1451 files = util.patch(strip, pf, ui)
1448
1452
1449 if len(files) > 0:
1453 if len(files) > 0:
1450 addremove(ui, repo, *files)
1454 addremove(ui, repo, *files)
1451 repo.commit(files, message, user)
1455 repo.commit(files, message, user)
1452
1456
1453 def incoming(ui, repo, source="default", **opts):
1457 def incoming(ui, repo, source="default", **opts):
1454 """show new changesets found in source
1458 """show new changesets found in source
1455
1459
1456 Show new changesets found in the specified repo or the default
1460 Show new changesets found in the specified repo or the default
1457 pull repo. These are the changesets that would be pulled if a pull
1461 pull repo. These are the changesets that would be pulled if a pull
1458 was requested.
1462 was requested.
1459
1463
1460 Currently only local repositories are supported.
1464 Currently only local repositories are supported.
1461 """
1465 """
1462 source = ui.expandpath(source, repo.root)
1466 source = ui.expandpath(source, repo.root)
1463 other = hg.repository(ui, source)
1467 other = hg.repository(ui, source)
1464 if not other.local():
1468 if not other.local():
1465 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1469 raise util.Abort(_("incoming doesn't work for remote repositories yet"))
1466 o = repo.findincoming(other)
1470 o = repo.findincoming(other)
1467 if not o:
1471 if not o:
1468 return
1472 return
1469 o = other.changelog.nodesbetween(o)[0]
1473 o = other.changelog.nodesbetween(o)[0]
1470 if opts['newest_first']:
1474 if opts['newest_first']:
1471 o.reverse()
1475 o.reverse()
1472 for n in o:
1476 for n in o:
1473 parents = [p for p in other.changelog.parents(n) if p != nullid]
1477 parents = [p for p in other.changelog.parents(n) if p != nullid]
1474 if opts['no_merges'] and len(parents) == 2:
1478 if opts['no_merges'] and len(parents) == 2:
1475 continue
1479 continue
1476 show_changeset(ui, other, changenode=n)
1480 show_changeset(ui, other, changenode=n)
1477 if opts['patch']:
1481 if opts['patch']:
1478 prev = (parents and parents[0]) or nullid
1482 prev = (parents and parents[0]) or nullid
1479 dodiff(ui, ui, other, prev, n)
1483 dodiff(ui, ui, other, prev, n)
1480 ui.write("\n")
1484 ui.write("\n")
1481
1485
1482 def init(ui, dest="."):
1486 def init(ui, dest="."):
1483 """create a new repository in the given directory
1487 """create a new repository in the given directory
1484
1488
1485 Initialize a new repository in the given directory. If the given
1489 Initialize a new repository in the given directory. If the given
1486 directory does not exist, it is created.
1490 directory does not exist, it is created.
1487
1491
1488 If no directory is given, the current directory is used.
1492 If no directory is given, the current directory is used.
1489 """
1493 """
1490 if not os.path.exists(dest):
1494 if not os.path.exists(dest):
1491 os.mkdir(dest)
1495 os.mkdir(dest)
1492 hg.repository(ui, dest, create=1)
1496 hg.repository(ui, dest, create=1)
1493
1497
1494 def locate(ui, repo, *pats, **opts):
1498 def locate(ui, repo, *pats, **opts):
1495 """locate files matching specific patterns
1499 """locate files matching specific patterns
1496
1500
1497 Print all files under Mercurial control whose names match the
1501 Print all files under Mercurial control whose names match the
1498 given patterns.
1502 given patterns.
1499
1503
1500 This command searches the current directory and its
1504 This command searches the current directory and its
1501 subdirectories. To search an entire repository, move to the root
1505 subdirectories. To search an entire repository, move to the root
1502 of the repository.
1506 of the repository.
1503
1507
1504 If no patterns are given to match, this command prints all file
1508 If no patterns are given to match, this command prints all file
1505 names.
1509 names.
1506
1510
1507 If you want to feed the output of this command into the "xargs"
1511 If you want to feed the output of this command into the "xargs"
1508 command, use the "-0" option to both this command and "xargs".
1512 command, use the "-0" option to both this command and "xargs".
1509 This will avoid the problem of "xargs" treating single filenames
1513 This will avoid the problem of "xargs" treating single filenames
1510 that contain white space as multiple filenames.
1514 that contain white space as multiple filenames.
1511 """
1515 """
1512 end = opts['print0'] and '\0' or '\n'
1516 end = opts['print0'] and '\0' or '\n'
1513
1517
1514 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1518 for src, abs, rel, exact in walk(repo, pats, opts, '(?:.*/|)'):
1515 if repo.dirstate.state(abs) == '?':
1519 if repo.dirstate.state(abs) == '?':
1516 continue
1520 continue
1517 if opts['fullpath']:
1521 if opts['fullpath']:
1518 ui.write(os.path.join(repo.root, abs), end)
1522 ui.write(os.path.join(repo.root, abs), end)
1519 else:
1523 else:
1520 ui.write(rel, end)
1524 ui.write(rel, end)
1521
1525
1522 def log(ui, repo, *pats, **opts):
1526 def log(ui, repo, *pats, **opts):
1523 """show revision history of entire repository or files
1527 """show revision history of entire repository or files
1524
1528
1525 Print the revision history of the specified files or the entire project.
1529 Print the revision history of the specified files or the entire project.
1526
1530
1527 By default this command outputs: changeset id and hash, tags,
1531 By default this command outputs: changeset id and hash, tags,
1528 non-trivial parents, user, date and time, and a summary for each
1532 non-trivial parents, user, date and time, and a summary for each
1529 commit. When the -v/--verbose switch is used, the list of changed
1533 commit. When the -v/--verbose switch is used, the list of changed
1530 files and full commit message is shown.
1534 files and full commit message is shown.
1531 """
1535 """
1532 class dui(object):
1536 class dui(object):
1533 # Implement and delegate some ui protocol. Save hunks of
1537 # Implement and delegate some ui protocol. Save hunks of
1534 # output for later display in the desired order.
1538 # output for later display in the desired order.
1535 def __init__(self, ui):
1539 def __init__(self, ui):
1536 self.ui = ui
1540 self.ui = ui
1537 self.hunk = {}
1541 self.hunk = {}
1538 def bump(self, rev):
1542 def bump(self, rev):
1539 self.rev = rev
1543 self.rev = rev
1540 self.hunk[rev] = []
1544 self.hunk[rev] = []
1541 def note(self, *args):
1545 def note(self, *args):
1542 if self.verbose:
1546 if self.verbose:
1543 self.write(*args)
1547 self.write(*args)
1544 def status(self, *args):
1548 def status(self, *args):
1545 if not self.quiet:
1549 if not self.quiet:
1546 self.write(*args)
1550 self.write(*args)
1547 def write(self, *args):
1551 def write(self, *args):
1548 self.hunk[self.rev].append(args)
1552 self.hunk[self.rev].append(args)
1549 def debug(self, *args):
1553 def debug(self, *args):
1550 if self.debugflag:
1554 if self.debugflag:
1551 self.write(*args)
1555 self.write(*args)
1552 def __getattr__(self, key):
1556 def __getattr__(self, key):
1553 return getattr(self.ui, key)
1557 return getattr(self.ui, key)
1554 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1558 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1555 for st, rev, fns in changeiter:
1559 for st, rev, fns in changeiter:
1556 if st == 'window':
1560 if st == 'window':
1557 du = dui(ui)
1561 du = dui(ui)
1558 elif st == 'add':
1562 elif st == 'add':
1559 du.bump(rev)
1563 du.bump(rev)
1560 changenode = repo.changelog.node(rev)
1564 changenode = repo.changelog.node(rev)
1561 parents = [p for p in repo.changelog.parents(changenode)
1565 parents = [p for p in repo.changelog.parents(changenode)
1562 if p != nullid]
1566 if p != nullid]
1563 if opts['no_merges'] and len(parents) == 2:
1567 if opts['no_merges'] and len(parents) == 2:
1564 continue
1568 continue
1565 if opts['only_merges'] and len(parents) != 2:
1569 if opts['only_merges'] and len(parents) != 2:
1566 continue
1570 continue
1567
1571
1568 br = None
1572 br = None
1569 if opts['keyword']:
1573 if opts['keyword']:
1570 changes = getchange(rev)
1574 changes = getchange(rev)
1571 miss = 0
1575 miss = 0
1572 for k in [kw.lower() for kw in opts['keyword']]:
1576 for k in [kw.lower() for kw in opts['keyword']]:
1573 if not (k in changes[1].lower() or
1577 if not (k in changes[1].lower() or
1574 k in changes[4].lower() or
1578 k in changes[4].lower() or
1575 k in " ".join(changes[3][:20]).lower()):
1579 k in " ".join(changes[3][:20]).lower()):
1576 miss = 1
1580 miss = 1
1577 break
1581 break
1578 if miss:
1582 if miss:
1579 continue
1583 continue
1580
1584
1581 if opts['branch']:
1585 if opts['branch']:
1582 br = repo.branchlookup([repo.changelog.node(rev)])
1586 br = repo.branchlookup([repo.changelog.node(rev)])
1583
1587
1584 show_changeset(du, repo, rev, brinfo=br)
1588 show_changeset(du, repo, rev, brinfo=br)
1585 if opts['patch']:
1589 if opts['patch']:
1586 prev = (parents and parents[0]) or nullid
1590 prev = (parents and parents[0]) or nullid
1587 dodiff(du, du, repo, prev, changenode, match=matchfn)
1591 dodiff(du, du, repo, prev, changenode, match=matchfn)
1588 du.write("\n\n")
1592 du.write("\n\n")
1589 elif st == 'iter':
1593 elif st == 'iter':
1590 for args in du.hunk[rev]:
1594 for args in du.hunk[rev]:
1591 ui.write(*args)
1595 ui.write(*args)
1592
1596
1593 def manifest(ui, repo, rev=None):
1597 def manifest(ui, repo, rev=None):
1594 """output the latest or given revision of the project manifest
1598 """output the latest or given revision of the project manifest
1595
1599
1596 Print a list of version controlled files for the given revision.
1600 Print a list of version controlled files for the given revision.
1597
1601
1598 The manifest is the list of files being version controlled. If no revision
1602 The manifest is the list of files being version controlled. If no revision
1599 is given then the tip is used.
1603 is given then the tip is used.
1600 """
1604 """
1601 if rev:
1605 if rev:
1602 try:
1606 try:
1603 # assume all revision numbers are for changesets
1607 # assume all revision numbers are for changesets
1604 n = repo.lookup(rev)
1608 n = repo.lookup(rev)
1605 change = repo.changelog.read(n)
1609 change = repo.changelog.read(n)
1606 n = change[0]
1610 n = change[0]
1607 except hg.RepoError:
1611 except hg.RepoError:
1608 n = repo.manifest.lookup(rev)
1612 n = repo.manifest.lookup(rev)
1609 else:
1613 else:
1610 n = repo.manifest.tip()
1614 n = repo.manifest.tip()
1611 m = repo.manifest.read(n)
1615 m = repo.manifest.read(n)
1612 mf = repo.manifest.readflags(n)
1616 mf = repo.manifest.readflags(n)
1613 files = m.keys()
1617 files = m.keys()
1614 files.sort()
1618 files.sort()
1615
1619
1616 for f in files:
1620 for f in files:
1617 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1621 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1618
1622
1619 def outgoing(ui, repo, dest="default-push", **opts):
1623 def outgoing(ui, repo, dest="default-push", **opts):
1620 """show changesets not found in destination
1624 """show changesets not found in destination
1621
1625
1622 Show changesets not found in the specified destination repo or the
1626 Show changesets not found in the specified destination repo or the
1623 default push repo. These are the changesets that would be pushed
1627 default push repo. These are the changesets that would be pushed
1624 if a push was requested.
1628 if a push was requested.
1625 """
1629 """
1626 dest = ui.expandpath(dest, repo.root)
1630 dest = ui.expandpath(dest, repo.root)
1627 other = hg.repository(ui, dest)
1631 other = hg.repository(ui, dest)
1628 o = repo.findoutgoing(other)
1632 o = repo.findoutgoing(other)
1629 o = repo.changelog.nodesbetween(o)[0]
1633 o = repo.changelog.nodesbetween(o)[0]
1630 if opts['newest_first']:
1634 if opts['newest_first']:
1631 o.reverse()
1635 o.reverse()
1632 for n in o:
1636 for n in o:
1633 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1637 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1634 if opts['no_merges'] and len(parents) == 2:
1638 if opts['no_merges'] and len(parents) == 2:
1635 continue
1639 continue
1636 show_changeset(ui, repo, changenode=n)
1640 show_changeset(ui, repo, changenode=n)
1637 if opts['patch']:
1641 if opts['patch']:
1638 prev = (parents and parents[0]) or nullid
1642 prev = (parents and parents[0]) or nullid
1639 dodiff(ui, ui, repo, prev, n)
1643 dodiff(ui, ui, repo, prev, n)
1640 ui.write("\n")
1644 ui.write("\n")
1641
1645
1642 def parents(ui, repo, rev=None):
1646 def parents(ui, repo, rev=None):
1643 """show the parents of the working dir or revision
1647 """show the parents of the working dir or revision
1644
1648
1645 Print the working directory's parent revisions.
1649 Print the working directory's parent revisions.
1646 """
1650 """
1647 if rev:
1651 if rev:
1648 p = repo.changelog.parents(repo.lookup(rev))
1652 p = repo.changelog.parents(repo.lookup(rev))
1649 else:
1653 else:
1650 p = repo.dirstate.parents()
1654 p = repo.dirstate.parents()
1651
1655
1652 for n in p:
1656 for n in p:
1653 if n != nullid:
1657 if n != nullid:
1654 show_changeset(ui, repo, changenode=n)
1658 show_changeset(ui, repo, changenode=n)
1655
1659
1656 def paths(ui, search=None):
1660 def paths(ui, search=None):
1657 """show definition of symbolic path names
1661 """show definition of symbolic path names
1658
1662
1659 Show definition of symbolic path name NAME. If no name is given, show
1663 Show definition of symbolic path name NAME. If no name is given, show
1660 definition of available names.
1664 definition of available names.
1661
1665
1662 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1666 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1663 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1667 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1664 """
1668 """
1665 try:
1669 try:
1666 repo = hg.repository(ui=ui)
1670 repo = hg.repository(ui=ui)
1667 except hg.RepoError:
1671 except hg.RepoError:
1668 pass
1672 pass
1669
1673
1670 if search:
1674 if search:
1671 for name, path in ui.configitems("paths"):
1675 for name, path in ui.configitems("paths"):
1672 if name == search:
1676 if name == search:
1673 ui.write("%s\n" % path)
1677 ui.write("%s\n" % path)
1674 return
1678 return
1675 ui.warn(_("not found!\n"))
1679 ui.warn(_("not found!\n"))
1676 return 1
1680 return 1
1677 else:
1681 else:
1678 for name, path in ui.configitems("paths"):
1682 for name, path in ui.configitems("paths"):
1679 ui.write("%s = %s\n" % (name, path))
1683 ui.write("%s = %s\n" % (name, path))
1680
1684
1681 def pull(ui, repo, source="default", **opts):
1685 def pull(ui, repo, source="default", **opts):
1682 """pull changes from the specified source
1686 """pull changes from the specified source
1683
1687
1684 Pull changes from a remote repository to a local one.
1688 Pull changes from a remote repository to a local one.
1685
1689
1686 This finds all changes from the repository at the specified path
1690 This finds all changes from the repository at the specified path
1687 or URL and adds them to the local repository. By default, this
1691 or URL and adds them to the local repository. By default, this
1688 does not update the copy of the project in the working directory.
1692 does not update the copy of the project in the working directory.
1689
1693
1690 Valid URLs are of the form:
1694 Valid URLs are of the form:
1691
1695
1692 local/filesystem/path
1696 local/filesystem/path
1693 http://[user@]host[:port][/path]
1697 http://[user@]host[:port][/path]
1694 https://[user@]host[:port][/path]
1698 https://[user@]host[:port][/path]
1695 ssh://[user@]host[:port][/path]
1699 ssh://[user@]host[:port][/path]
1696
1700
1697 SSH requires an accessible shell account on the destination machine
1701 SSH requires an accessible shell account on the destination machine
1698 and a copy of hg in the remote path. With SSH, paths are relative
1702 and a copy of hg in the remote path. With SSH, paths are relative
1699 to the remote user's home directory by default; use two slashes at
1703 to the remote user's home directory by default; use two slashes at
1700 the start of a path to specify it as relative to the filesystem root.
1704 the start of a path to specify it as relative to the filesystem root.
1701 """
1705 """
1702 source = ui.expandpath(source, repo.root)
1706 source = ui.expandpath(source, repo.root)
1703 ui.status(_('pulling from %s\n') % (source))
1707 ui.status(_('pulling from %s\n') % (source))
1704
1708
1705 if opts['ssh']:
1709 if opts['ssh']:
1706 ui.setconfig("ui", "ssh", opts['ssh'])
1710 ui.setconfig("ui", "ssh", opts['ssh'])
1707 if opts['remotecmd']:
1711 if opts['remotecmd']:
1708 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1712 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1709
1713
1710 other = hg.repository(ui, source)
1714 other = hg.repository(ui, source)
1711 revs = None
1715 revs = None
1712 if opts['rev'] and not other.local():
1716 if opts['rev'] and not other.local():
1713 raise util.Abort("pull -r doesn't work for remote repositories yet")
1717 raise util.Abort("pull -r doesn't work for remote repositories yet")
1714 elif opts['rev']:
1718 elif opts['rev']:
1715 revs = [other.lookup(rev) for rev in opts['rev']]
1719 revs = [other.lookup(rev) for rev in opts['rev']]
1716 r = repo.pull(other, heads=revs)
1720 r = repo.pull(other, heads=revs)
1717 if not r:
1721 if not r:
1718 if opts['update']:
1722 if opts['update']:
1719 return update(ui, repo)
1723 return update(ui, repo)
1720 else:
1724 else:
1721 ui.status(_("(run 'hg update' to get a working copy)\n"))
1725 ui.status(_("(run 'hg update' to get a working copy)\n"))
1722
1726
1723 return r
1727 return r
1724
1728
1725 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1729 def push(ui, repo, dest="default-push", force=False, ssh=None, remotecmd=None):
1726 """push changes to the specified destination
1730 """push changes to the specified destination
1727
1731
1728 Push changes from the local repository to the given destination.
1732 Push changes from the local repository to the given destination.
1729
1733
1730 This is the symmetrical operation for pull. It helps to move
1734 This is the symmetrical operation for pull. It helps to move
1731 changes from the current repository to a different one. If the
1735 changes from the current repository to a different one. If the
1732 destination is local this is identical to a pull in that directory
1736 destination is local this is identical to a pull in that directory
1733 from the current one.
1737 from the current one.
1734
1738
1735 By default, push will refuse to run if it detects the result would
1739 By default, push will refuse to run if it detects the result would
1736 increase the number of remote heads. This generally indicates the
1740 increase the number of remote heads. This generally indicates the
1737 the client has forgotten to sync and merge before pushing.
1741 the client has forgotten to sync and merge before pushing.
1738
1742
1739 Valid URLs are of the form:
1743 Valid URLs are of the form:
1740
1744
1741 local/filesystem/path
1745 local/filesystem/path
1742 ssh://[user@]host[:port][/path]
1746 ssh://[user@]host[:port][/path]
1743
1747
1744 SSH requires an accessible shell account on the destination
1748 SSH requires an accessible shell account on the destination
1745 machine and a copy of hg in the remote path.
1749 machine and a copy of hg in the remote path.
1746 """
1750 """
1747 dest = ui.expandpath(dest, repo.root)
1751 dest = ui.expandpath(dest, repo.root)
1748 ui.status('pushing to %s\n' % (dest))
1752 ui.status('pushing to %s\n' % (dest))
1749
1753
1750 if ssh:
1754 if ssh:
1751 ui.setconfig("ui", "ssh", ssh)
1755 ui.setconfig("ui", "ssh", ssh)
1752 if remotecmd:
1756 if remotecmd:
1753 ui.setconfig("ui", "remotecmd", remotecmd)
1757 ui.setconfig("ui", "remotecmd", remotecmd)
1754
1758
1755 other = hg.repository(ui, dest)
1759 other = hg.repository(ui, dest)
1756 r = repo.push(other, force)
1760 r = repo.push(other, force)
1757 return r
1761 return r
1758
1762
1759 def rawcommit(ui, repo, *flist, **rc):
1763 def rawcommit(ui, repo, *flist, **rc):
1760 """raw commit interface
1764 """raw commit interface
1761
1765
1762 Lowlevel commit, for use in helper scripts.
1766 Lowlevel commit, for use in helper scripts.
1763
1767
1764 This command is not intended to be used by normal users, as it is
1768 This command is not intended to be used by normal users, as it is
1765 primarily useful for importing from other SCMs.
1769 primarily useful for importing from other SCMs.
1766 """
1770 """
1767 message = rc['message']
1771 message = rc['message']
1768 if not message and rc['logfile']:
1772 if not message and rc['logfile']:
1769 try:
1773 try:
1770 message = open(rc['logfile']).read()
1774 message = open(rc['logfile']).read()
1771 except IOError:
1775 except IOError:
1772 pass
1776 pass
1773 if not message and not rc['logfile']:
1777 if not message and not rc['logfile']:
1774 raise util.Abort(_("missing commit message"))
1778 raise util.Abort(_("missing commit message"))
1775
1779
1776 files = relpath(repo, list(flist))
1780 files = relpath(repo, list(flist))
1777 if rc['files']:
1781 if rc['files']:
1778 files += open(rc['files']).read().splitlines()
1782 files += open(rc['files']).read().splitlines()
1779
1783
1780 rc['parent'] = map(repo.lookup, rc['parent'])
1784 rc['parent'] = map(repo.lookup, rc['parent'])
1781
1785
1782 try:
1786 try:
1783 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1787 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
1784 except ValueError, inst:
1788 except ValueError, inst:
1785 raise util.Abort(str(inst))
1789 raise util.Abort(str(inst))
1786
1790
1787 def recover(ui, repo):
1791 def recover(ui, repo):
1788 """roll back an interrupted transaction
1792 """roll back an interrupted transaction
1789
1793
1790 Recover from an interrupted commit or pull.
1794 Recover from an interrupted commit or pull.
1791
1795
1792 This command tries to fix the repository status after an interrupted
1796 This command tries to fix the repository status after an interrupted
1793 operation. It should only be necessary when Mercurial suggests it.
1797 operation. It should only be necessary when Mercurial suggests it.
1794 """
1798 """
1795 if repo.recover():
1799 if repo.recover():
1796 return repo.verify()
1800 return repo.verify()
1797 return False
1801 return False
1798
1802
1799 def remove(ui, repo, pat, *pats, **opts):
1803 def remove(ui, repo, pat, *pats, **opts):
1800 """remove the specified files on the next commit
1804 """remove the specified files on the next commit
1801
1805
1802 Schedule the indicated files for removal from the repository.
1806 Schedule the indicated files for removal from the repository.
1803
1807
1804 This command schedules the files to be removed at the next commit.
1808 This command schedules the files to be removed at the next commit.
1805 This only removes files from the current branch, not from the
1809 This only removes files from the current branch, not from the
1806 entire project history. If the files still exist in the working
1810 entire project history. If the files still exist in the working
1807 directory, they will be deleted from it.
1811 directory, they will be deleted from it.
1808 """
1812 """
1809 names = []
1813 names = []
1810 def okaytoremove(abs, rel, exact):
1814 def okaytoremove(abs, rel, exact):
1811 c, a, d, u = repo.changes(files = [abs])
1815 c, a, d, u = repo.changes(files = [abs])
1812 reason = None
1816 reason = None
1813 if c: reason = _('is modified')
1817 if c: reason = _('is modified')
1814 elif a: reason = _('has been marked for add')
1818 elif a: reason = _('has been marked for add')
1815 elif u: reason = _('is not managed')
1819 elif u: reason = _('is not managed')
1816 if reason:
1820 if reason:
1817 if exact: ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1821 if exact: ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1818 else:
1822 else:
1819 return True
1823 return True
1820 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1824 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
1821 if okaytoremove(abs, rel, exact):
1825 if okaytoremove(abs, rel, exact):
1822 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1826 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1823 names.append(abs)
1827 names.append(abs)
1824 repo.remove(names, unlink=True)
1828 repo.remove(names, unlink=True)
1825
1829
1826 def rename(ui, repo, *pats, **opts):
1830 def rename(ui, repo, *pats, **opts):
1827 """rename files; equivalent of copy + remove
1831 """rename files; equivalent of copy + remove
1828
1832
1829 Mark dest as copies of sources; mark sources for deletion. If
1833 Mark dest as copies of sources; mark sources for deletion. If
1830 dest is a directory, copies are put in that directory. If dest is
1834 dest is a directory, copies are put in that directory. If dest is
1831 a file, there can only be one source.
1835 a file, there can only be one source.
1832
1836
1833 By default, this command copies the contents of files as they
1837 By default, this command copies the contents of files as they
1834 stand in the working directory. If invoked with --after, the
1838 stand in the working directory. If invoked with --after, the
1835 operation is recorded, but no copying is performed.
1839 operation is recorded, but no copying is performed.
1836
1840
1837 This command takes effect in the next commit.
1841 This command takes effect in the next commit.
1838
1842
1839 NOTE: This command should be treated as experimental. While it
1843 NOTE: This command should be treated as experimental. While it
1840 should properly record rename files, this information is not yet
1844 should properly record rename files, this information is not yet
1841 fully used by merge, nor fully reported by log.
1845 fully used by merge, nor fully reported by log.
1842 """
1846 """
1843 errs, copied = docopy(ui, repo, pats, opts)
1847 errs, copied = docopy(ui, repo, pats, opts)
1844 names = []
1848 names = []
1845 for abs, rel, exact in copied:
1849 for abs, rel, exact in copied:
1846 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1850 if ui.verbose or not exact: ui.status(_('removing %s\n') % rel)
1847 names.append(abs)
1851 names.append(abs)
1848 repo.remove(names, unlink=True)
1852 repo.remove(names, unlink=True)
1849 return errs
1853 return errs
1850
1854
1851 def revert(ui, repo, *pats, **opts):
1855 def revert(ui, repo, *pats, **opts):
1852 """revert modified files or dirs back to their unmodified states
1856 """revert modified files or dirs back to their unmodified states
1853
1857
1854 Revert any uncommitted modifications made to the named files or
1858 Revert any uncommitted modifications made to the named files or
1855 directories. This restores the contents of the affected files to
1859 directories. This restores the contents of the affected files to
1856 an unmodified state.
1860 an unmodified state.
1857
1861
1858 If a file has been deleted, it is recreated. If the executable
1862 If a file has been deleted, it is recreated. If the executable
1859 mode of a file was changed, it is reset.
1863 mode of a file was changed, it is reset.
1860
1864
1861 If names are given, all files matching the names are reverted.
1865 If names are given, all files matching the names are reverted.
1862
1866
1863 If no arguments are given, all files in the repository are reverted.
1867 If no arguments are given, all files in the repository are reverted.
1864 """
1868 """
1865 node = opts['rev'] and repo.lookup(opts['rev']) or \
1869 node = opts['rev'] and repo.lookup(opts['rev']) or \
1866 repo.dirstate.parents()[0]
1870 repo.dirstate.parents()[0]
1867
1871
1868 files, choose, anypats, cwd = matchpats(repo, pats, opts)
1872 files, choose, anypats, cwd = matchpats(repo, pats, opts)
1869 (c, a, d, u) = repo.changes(match=choose)
1873 (c, a, d, u) = repo.changes(match=choose)
1870 repo.forget(a)
1874 repo.forget(a)
1871 repo.undelete(d)
1875 repo.undelete(d)
1872
1876
1873 return repo.update(node, False, True, choose, False)
1877 return repo.update(node, False, True, choose, False)
1874
1878
1875 def root(ui, repo):
1879 def root(ui, repo):
1876 """print the root (top) of the current working dir
1880 """print the root (top) of the current working dir
1877
1881
1878 Print the root directory of the current repository.
1882 Print the root directory of the current repository.
1879 """
1883 """
1880 ui.write(repo.root + "\n")
1884 ui.write(repo.root + "\n")
1881
1885
1882 def serve(ui, repo, **opts):
1886 def serve(ui, repo, **opts):
1883 """export the repository via HTTP
1887 """export the repository via HTTP
1884
1888
1885 Start a local HTTP repository browser and pull server.
1889 Start a local HTTP repository browser and pull server.
1886
1890
1887 By default, the server logs accesses to stdout and errors to
1891 By default, the server logs accesses to stdout and errors to
1888 stderr. Use the "-A" and "-E" options to log to files.
1892 stderr. Use the "-A" and "-E" options to log to files.
1889 """
1893 """
1890
1894
1891 if opts["stdio"]:
1895 if opts["stdio"]:
1892 fin, fout = sys.stdin, sys.stdout
1896 fin, fout = sys.stdin, sys.stdout
1893 sys.stdout = sys.stderr
1897 sys.stdout = sys.stderr
1894
1898
1895 # Prevent insertion/deletion of CRs
1899 # Prevent insertion/deletion of CRs
1896 util.set_binary(fin)
1900 util.set_binary(fin)
1897 util.set_binary(fout)
1901 util.set_binary(fout)
1898
1902
1899 def getarg():
1903 def getarg():
1900 argline = fin.readline()[:-1]
1904 argline = fin.readline()[:-1]
1901 arg, l = argline.split()
1905 arg, l = argline.split()
1902 val = fin.read(int(l))
1906 val = fin.read(int(l))
1903 return arg, val
1907 return arg, val
1904 def respond(v):
1908 def respond(v):
1905 fout.write("%d\n" % len(v))
1909 fout.write("%d\n" % len(v))
1906 fout.write(v)
1910 fout.write(v)
1907 fout.flush()
1911 fout.flush()
1908
1912
1909 lock = None
1913 lock = None
1910
1914
1911 while 1:
1915 while 1:
1912 cmd = fin.readline()[:-1]
1916 cmd = fin.readline()[:-1]
1913 if cmd == '':
1917 if cmd == '':
1914 return
1918 return
1915 if cmd == "heads":
1919 if cmd == "heads":
1916 h = repo.heads()
1920 h = repo.heads()
1917 respond(" ".join(map(hex, h)) + "\n")
1921 respond(" ".join(map(hex, h)) + "\n")
1918 if cmd == "lock":
1922 if cmd == "lock":
1919 lock = repo.lock()
1923 lock = repo.lock()
1920 respond("")
1924 respond("")
1921 if cmd == "unlock":
1925 if cmd == "unlock":
1922 if lock:
1926 if lock:
1923 lock.release()
1927 lock.release()
1924 lock = None
1928 lock = None
1925 respond("")
1929 respond("")
1926 elif cmd == "branches":
1930 elif cmd == "branches":
1927 arg, nodes = getarg()
1931 arg, nodes = getarg()
1928 nodes = map(bin, nodes.split(" "))
1932 nodes = map(bin, nodes.split(" "))
1929 r = []
1933 r = []
1930 for b in repo.branches(nodes):
1934 for b in repo.branches(nodes):
1931 r.append(" ".join(map(hex, b)) + "\n")
1935 r.append(" ".join(map(hex, b)) + "\n")
1932 respond("".join(r))
1936 respond("".join(r))
1933 elif cmd == "between":
1937 elif cmd == "between":
1934 arg, pairs = getarg()
1938 arg, pairs = getarg()
1935 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1939 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
1936 r = []
1940 r = []
1937 for b in repo.between(pairs):
1941 for b in repo.between(pairs):
1938 r.append(" ".join(map(hex, b)) + "\n")
1942 r.append(" ".join(map(hex, b)) + "\n")
1939 respond("".join(r))
1943 respond("".join(r))
1940 elif cmd == "changegroup":
1944 elif cmd == "changegroup":
1941 nodes = []
1945 nodes = []
1942 arg, roots = getarg()
1946 arg, roots = getarg()
1943 nodes = map(bin, roots.split(" "))
1947 nodes = map(bin, roots.split(" "))
1944
1948
1945 cg = repo.changegroup(nodes)
1949 cg = repo.changegroup(nodes)
1946 while 1:
1950 while 1:
1947 d = cg.read(4096)
1951 d = cg.read(4096)
1948 if not d:
1952 if not d:
1949 break
1953 break
1950 fout.write(d)
1954 fout.write(d)
1951
1955
1952 fout.flush()
1956 fout.flush()
1953
1957
1954 elif cmd == "addchangegroup":
1958 elif cmd == "addchangegroup":
1955 if not lock:
1959 if not lock:
1956 respond("not locked")
1960 respond("not locked")
1957 continue
1961 continue
1958 respond("")
1962 respond("")
1959
1963
1960 r = repo.addchangegroup(fin)
1964 r = repo.addchangegroup(fin)
1961 respond("")
1965 respond("")
1962
1966
1963 optlist = "name templates style address port ipv6 accesslog errorlog"
1967 optlist = "name templates style address port ipv6 accesslog errorlog"
1964 for o in optlist.split():
1968 for o in optlist.split():
1965 if opts[o]:
1969 if opts[o]:
1966 ui.setconfig("web", o, opts[o])
1970 ui.setconfig("web", o, opts[o])
1967
1971
1968 try:
1972 try:
1969 httpd = hgweb.create_server(repo)
1973 httpd = hgweb.create_server(repo)
1970 except socket.error, inst:
1974 except socket.error, inst:
1971 raise util.Abort('cannot start server: ' + inst.args[1])
1975 raise util.Abort('cannot start server: ' + inst.args[1])
1972
1976
1973 if ui.verbose:
1977 if ui.verbose:
1974 addr, port = httpd.socket.getsockname()
1978 addr, port = httpd.socket.getsockname()
1975 if addr == '0.0.0.0':
1979 if addr == '0.0.0.0':
1976 addr = socket.gethostname()
1980 addr = socket.gethostname()
1977 else:
1981 else:
1978 try:
1982 try:
1979 addr = socket.gethostbyaddr(addr)[0]
1983 addr = socket.gethostbyaddr(addr)[0]
1980 except socket.error:
1984 except socket.error:
1981 pass
1985 pass
1982 if port != 80:
1986 if port != 80:
1983 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
1987 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
1984 else:
1988 else:
1985 ui.status(_('listening at http://%s/\n') % addr)
1989 ui.status(_('listening at http://%s/\n') % addr)
1986 httpd.serve_forever()
1990 httpd.serve_forever()
1987
1991
1988 def status(ui, repo, *pats, **opts):
1992 def status(ui, repo, *pats, **opts):
1989 """show changed files in the working directory
1993 """show changed files in the working directory
1990
1994
1991 Show changed files in the repository. If names are
1995 Show changed files in the repository. If names are
1992 given, only files that match are shown.
1996 given, only files that match are shown.
1993
1997
1994 The codes used to show the status of files are:
1998 The codes used to show the status of files are:
1995 M = modified
1999 M = modified
1996 A = added
2000 A = added
1997 R = removed
2001 R = removed
1998 ? = not tracked
2002 ? = not tracked
1999 """
2003 """
2000
2004
2001 files, matchfn, anypats, cwd = matchpats(repo, pats, opts)
2005 files, matchfn, anypats, cwd = matchpats(repo, pats, opts)
2002 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
2006 (c, a, d, u) = [[util.pathto(cwd, x) for x in n]
2003 for n in repo.changes(files=files, match=matchfn)]
2007 for n in repo.changes(files=files, match=matchfn)]
2004
2008
2005 changetypes = [(_('modified'), 'M', c),
2009 changetypes = [(_('modified'), 'M', c),
2006 (_('added'), 'A', a),
2010 (_('added'), 'A', a),
2007 (_('removed'), 'R', d),
2011 (_('removed'), 'R', d),
2008 (_('unknown'), '?', u)]
2012 (_('unknown'), '?', u)]
2009
2013
2010 end = opts['print0'] and '\0' or '\n'
2014 end = opts['print0'] and '\0' or '\n'
2011
2015
2012 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2016 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2013 or changetypes):
2017 or changetypes):
2014 if opts['no_status']:
2018 if opts['no_status']:
2015 format = "%%s%s" % end
2019 format = "%%s%s" % end
2016 else:
2020 else:
2017 format = "%s %%s%s" % (char, end);
2021 format = "%s %%s%s" % (char, end);
2018
2022
2019 for f in changes:
2023 for f in changes:
2020 ui.write(format % f)
2024 ui.write(format % f)
2021
2025
2022 def tag(ui, repo, name, rev=None, **opts):
2026 def tag(ui, repo, name, rev_=None, **opts):
2023 """add a tag for the current tip or a given revision
2027 """add a tag for the current tip or a given revision
2024
2028
2025 Name a particular revision using <name>.
2029 Name a particular revision using <name>.
2026
2030
2027 Tags are used to name particular revisions of the repository and are
2031 Tags are used to name particular revisions of the repository and are
2028 very useful to compare different revision, to go back to significant
2032 very useful to compare different revision, to go back to significant
2029 earlier versions or to mark branch points as releases, etc.
2033 earlier versions or to mark branch points as releases, etc.
2030
2034
2031 If no revision is given, the tip is used.
2035 If no revision is given, the tip is used.
2032
2036
2033 To facilitate version control, distribution, and merging of tags,
2037 To facilitate version control, distribution, and merging of tags,
2034 they are stored as a file named ".hgtags" which is managed
2038 they are stored as a file named ".hgtags" which is managed
2035 similarly to other project files and can be hand-edited if
2039 similarly to other project files and can be hand-edited if
2036 necessary.
2040 necessary.
2037 """
2041 """
2038 if name == "tip":
2042 if name == "tip":
2039 raise util.Abort(_("the name 'tip' is reserved"))
2043 raise util.Abort(_("the name 'tip' is reserved"))
2040 if 'rev' in opts:
2044 if opts['rev']:
2041 rev = opts['rev']
2045 rev_ = opts['rev']
2042 if rev:
2046 if rev_:
2043 r = hex(repo.lookup(rev))
2047 r = hex(repo.lookup(rev_))
2044 else:
2048 else:
2045 r = hex(repo.changelog.tip())
2049 r = hex(repo.changelog.tip())
2046
2050
2047 disallowed = (revrangesep, '\r', '\n')
2051 disallowed = (revrangesep, '\r', '\n')
2048 for c in disallowed:
2052 for c in disallowed:
2049 if name.find(c) >= 0:
2053 if name.find(c) >= 0:
2050 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2054 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2051
2055
2052 if opts['local']:
2056 if opts['local']:
2053 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2057 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2054 return
2058 return
2055
2059
2056 (c, a, d, u) = repo.changes()
2060 (c, a, d, u) = repo.changes()
2057 for x in (c, a, d, u):
2061 for x in (c, a, d, u):
2058 if ".hgtags" in x:
2062 if ".hgtags" in x:
2059 raise util.Abort(_("working copy of .hgtags is changed "
2063 raise util.Abort(_("working copy of .hgtags is changed "
2060 "(please commit .hgtags manually)"))
2064 "(please commit .hgtags manually)"))
2061
2065
2062 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2066 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2063 if repo.dirstate.state(".hgtags") == '?':
2067 if repo.dirstate.state(".hgtags") == '?':
2064 repo.add([".hgtags"])
2068 repo.add([".hgtags"])
2065
2069
2066 message = (opts['message'] or
2070 message = (opts['message'] or
2067 _("Added tag %s for changeset %s") % (name, r))
2071 _("Added tag %s for changeset %s") % (name, r))
2068 try:
2072 try:
2069 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2073 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2070 except ValueError, inst:
2074 except ValueError, inst:
2071 raise util.Abort(str(inst))
2075 raise util.Abort(str(inst))
2072
2076
2073 def tags(ui, repo):
2077 def tags(ui, repo):
2074 """list repository tags
2078 """list repository tags
2075
2079
2076 List the repository tags.
2080 List the repository tags.
2077
2081
2078 This lists both regular and local tags.
2082 This lists both regular and local tags.
2079 """
2083 """
2080
2084
2081 l = repo.tagslist()
2085 l = repo.tagslist()
2082 l.reverse()
2086 l.reverse()
2083 for t, n in l:
2087 for t, n in l:
2084 try:
2088 try:
2085 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2089 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2086 except KeyError:
2090 except KeyError:
2087 r = " ?:?"
2091 r = " ?:?"
2088 ui.write("%-30s %s\n" % (t, r))
2092 ui.write("%-30s %s\n" % (t, r))
2089
2093
2090 def tip(ui, repo):
2094 def tip(ui, repo):
2091 """show the tip revision
2095 """show the tip revision
2092
2096
2093 Show the tip revision.
2097 Show the tip revision.
2094 """
2098 """
2095 n = repo.changelog.tip()
2099 n = repo.changelog.tip()
2096 show_changeset(ui, repo, changenode=n)
2100 show_changeset(ui, repo, changenode=n)
2097
2101
2098 def unbundle(ui, repo, fname):
2102 def unbundle(ui, repo, fname, **opts):
2099 """apply a changegroup file
2103 """apply a changegroup file
2100
2104
2101 Apply a compressed changegroup file generated by the bundle
2105 Apply a compressed changegroup file generated by the bundle
2102 command.
2106 command.
2103 """
2107 """
2104 f = urllib.urlopen(fname)
2108 f = urllib.urlopen(fname)
2105
2109
2106 if f.read(4) != "HG10":
2110 if f.read(4) != "HG10":
2107 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2111 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2108
2112
2109 def bzgenerator(f):
2113 def bzgenerator(f):
2110 zd = bz2.BZ2Decompressor()
2114 zd = bz2.BZ2Decompressor()
2111 for chunk in f:
2115 for chunk in f:
2112 yield zd.decompress(chunk)
2116 yield zd.decompress(chunk)
2113
2117
2114 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2118 bzgen = bzgenerator(util.filechunkiter(f, 4096))
2115 repo.addchangegroup(util.chunkbuffer(bzgen))
2119 if repo.addchangegroup(util.chunkbuffer(bzgen)):
2120 return 1
2121
2122 if opts['update']:
2123 return update(ui, repo)
2124 else:
2125 ui.status(_("(run 'hg update' to get a working copy)\n"))
2116
2126
2117 def undo(ui, repo):
2127 def undo(ui, repo):
2118 """undo the last commit or pull
2128 """undo the last commit or pull
2119
2129
2120 Roll back the last pull or commit transaction on the
2130 Roll back the last pull or commit transaction on the
2121 repository, restoring the project to its earlier state.
2131 repository, restoring the project to its earlier state.
2122
2132
2123 This command should be used with care. There is only one level of
2133 This command should be used with care. There is only one level of
2124 undo and there is no redo.
2134 undo and there is no redo.
2125
2135
2126 This command is not intended for use on public repositories. Once
2136 This command is not intended for use on public repositories. Once
2127 a change is visible for pull by other users, undoing it locally is
2137 a change is visible for pull by other users, undoing it locally is
2128 ineffective.
2138 ineffective.
2129 """
2139 """
2130 repo.undo()
2140 repo.undo()
2131
2141
2132 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2142 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2133 branch=None):
2143 branch=None):
2134 """update or merge working directory
2144 """update or merge working directory
2135
2145
2136 Update the working directory to the specified revision.
2146 Update the working directory to the specified revision.
2137
2147
2138 If there are no outstanding changes in the working directory and
2148 If there are no outstanding changes in the working directory and
2139 there is a linear relationship between the current version and the
2149 there is a linear relationship between the current version and the
2140 requested version, the result is the requested version.
2150 requested version, the result is the requested version.
2141
2151
2142 Otherwise the result is a merge between the contents of the
2152 Otherwise the result is a merge between the contents of the
2143 current working directory and the requested version. Files that
2153 current working directory and the requested version. Files that
2144 changed between either parent are marked as changed for the next
2154 changed between either parent are marked as changed for the next
2145 commit and a commit must be performed before any further updates
2155 commit and a commit must be performed before any further updates
2146 are allowed.
2156 are allowed.
2147
2157
2148 By default, update will refuse to run if doing so would require
2158 By default, update will refuse to run if doing so would require
2149 merging or discarding local changes.
2159 merging or discarding local changes.
2150 """
2160 """
2151 if branch:
2161 if branch:
2152 br = repo.branchlookup(branch=branch)
2162 br = repo.branchlookup(branch=branch)
2153 found = []
2163 found = []
2154 for x in br:
2164 for x in br:
2155 if branch in br[x]:
2165 if branch in br[x]:
2156 found.append(x)
2166 found.append(x)
2157 if len(found) > 1:
2167 if len(found) > 1:
2158 ui.warn(_("Found multiple heads for %s\n") % branch)
2168 ui.warn(_("Found multiple heads for %s\n") % branch)
2159 for x in found:
2169 for x in found:
2160 show_changeset(ui, repo, changenode=x, brinfo=br)
2170 show_changeset(ui, repo, changenode=x, brinfo=br)
2161 return 1
2171 return 1
2162 if len(found) == 1:
2172 if len(found) == 1:
2163 node = found[0]
2173 node = found[0]
2164 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2174 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2165 else:
2175 else:
2166 ui.warn(_("branch %s not found\n") % (branch))
2176 ui.warn(_("branch %s not found\n") % (branch))
2167 return 1
2177 return 1
2168 else:
2178 else:
2169 node = node and repo.lookup(node) or repo.changelog.tip()
2179 node = node and repo.lookup(node) or repo.changelog.tip()
2170 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2180 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2171
2181
2172 def verify(ui, repo):
2182 def verify(ui, repo):
2173 """verify the integrity of the repository
2183 """verify the integrity of the repository
2174
2184
2175 Verify the integrity of the current repository.
2185 Verify the integrity of the current repository.
2176
2186
2177 This will perform an extensive check of the repository's
2187 This will perform an extensive check of the repository's
2178 integrity, validating the hashes and checksums of each entry in
2188 integrity, validating the hashes and checksums of each entry in
2179 the changelog, manifest, and tracked files, as well as the
2189 the changelog, manifest, and tracked files, as well as the
2180 integrity of their crosslinks and indices.
2190 integrity of their crosslinks and indices.
2181 """
2191 """
2182 return repo.verify()
2192 return repo.verify()
2183
2193
2184 # Command options and aliases are listed here, alphabetically
2194 # Command options and aliases are listed here, alphabetically
2185
2195
2186 table = {
2196 table = {
2187 "^add":
2197 "^add":
2188 (add,
2198 (add,
2189 [('I', 'include', [], _('include names matching the given patterns')),
2199 [('I', 'include', [], _('include names matching the given patterns')),
2190 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2200 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2191 "hg add [OPTION]... [FILE]..."),
2201 "hg add [OPTION]... [FILE]..."),
2192 "addremove":
2202 "addremove":
2193 (addremove,
2203 (addremove,
2194 [('I', 'include', [], _('include names matching the given patterns')),
2204 [('I', 'include', [], _('include names matching the given patterns')),
2195 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2205 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2196 "hg addremove [OPTION]... [FILE]..."),
2206 "hg addremove [OPTION]... [FILE]..."),
2197 "^annotate":
2207 "^annotate":
2198 (annotate,
2208 (annotate,
2199 [('r', 'rev', '', _('annotate the specified revision')),
2209 [('r', 'rev', '', _('annotate the specified revision')),
2200 ('a', 'text', None, _('treat all files as text')),
2210 ('a', 'text', None, _('treat all files as text')),
2201 ('u', 'user', None, _('list the author')),
2211 ('u', 'user', None, _('list the author')),
2202 ('d', 'date', None, _('list the date')),
2212 ('d', 'date', None, _('list the date')),
2203 ('n', 'number', None, _('list the revision number (default)')),
2213 ('n', 'number', None, _('list the revision number (default)')),
2204 ('c', 'changeset', None, _('list the changeset')),
2214 ('c', 'changeset', None, _('list the changeset')),
2205 ('I', 'include', [], _('include names matching the given patterns')),
2215 ('I', 'include', [], _('include names matching the given patterns')),
2206 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2216 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2207 _('hg annotate [OPTION]... FILE...')),
2217 _('hg annotate [OPTION]... FILE...')),
2208 "bundle":
2218 "bundle":
2209 (bundle,
2219 (bundle,
2210 [],
2220 [],
2211 _('hg bundle FILE DEST')),
2221 _('hg bundle FILE DEST')),
2212 "cat":
2222 "cat":
2213 (cat,
2223 (cat,
2214 [('I', 'include', [], _('include names matching the given patterns')),
2224 [('I', 'include', [], _('include names matching the given patterns')),
2215 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2225 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2216 ('o', 'output', "", _('print output to file with formatted name')),
2226 ('o', 'output', "", _('print output to file with formatted name')),
2217 ('r', 'rev', '', _('print the given revision'))],
2227 ('r', 'rev', '', _('print the given revision'))],
2218 _('hg cat [OPTION]... FILE...')),
2228 _('hg cat [OPTION]... FILE...')),
2219 "^clone":
2229 "^clone":
2220 (clone,
2230 (clone,
2221 [('U', 'noupdate', None, _('do not update the new working directory')),
2231 [('U', 'noupdate', None, _('do not update the new working directory')),
2222 ('e', 'ssh', "", _('specify ssh command to use')),
2232 ('e', 'ssh', "", _('specify ssh command to use')),
2223 ('', 'pull', None, _('use pull protocol to copy metadata')),
2233 ('', 'pull', None, _('use pull protocol to copy metadata')),
2224 ('r', 'rev', [], _('a changeset you would like to have after cloning')),
2234 ('r', 'rev', [],
2225 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2235 _('a changeset you would like to have after cloning')),
2236 ('', 'remotecmd', "",
2237 _('specify hg command to run on the remote side'))],
2226 _('hg clone [OPTION]... SOURCE [DEST]')),
2238 _('hg clone [OPTION]... SOURCE [DEST]')),
2227 "^commit|ci":
2239 "^commit|ci":
2228 (commit,
2240 (commit,
2229 [('A', 'addremove', None, _('run addremove during commit')),
2241 [('A', 'addremove', None, _('run addremove during commit')),
2230 ('I', 'include', [], _('include names matching the given patterns')),
2242 ('I', 'include', [], _('include names matching the given patterns')),
2231 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2243 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2232 ('m', 'message', "", _('use <text> as commit message')),
2244 ('m', 'message', "", _('use <text> as commit message')),
2233 ('l', 'logfile', "", _('read the commit message from <file>')),
2245 ('l', 'logfile', "", _('read the commit message from <file>')),
2234 ('d', 'date', "", _('record datecode as commit date')),
2246 ('d', 'date', "", _('record datecode as commit date')),
2235 ('u', 'user', "", _('record user as commiter'))],
2247 ('u', 'user', "", _('record user as commiter'))],
2236 _('hg commit [OPTION]... [FILE]...')),
2248 _('hg commit [OPTION]... [FILE]...')),
2237 "copy|cp": (copy,
2249 "copy|cp": (copy,
2238 [('I', 'include', [], _('include names matching the given patterns')),
2250 [('I', 'include', [],
2239 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2251 _('include names matching the given patterns')),
2240 ('A', 'after', None, _('record a copy that has already occurred')),
2252 ('X', 'exclude', [],
2241 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2253 _('exclude names matching the given patterns')),
2254 ('A', 'after', None,
2255 _('record a copy that has already occurred')),
2256 ('f', 'force', None,
2257 _('forcibly copy over an existing managed file'))],
2242 _('hg copy [OPTION]... [SOURCE]... DEST')),
2258 _('hg copy [OPTION]... [SOURCE]... DEST')),
2243 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2259 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2244 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2260 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2245 "debugconfig": (debugconfig, [], _('debugconfig')),
2261 "debugconfig": (debugconfig, [], _('debugconfig')),
2246 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2262 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2247 "debugstate": (debugstate, [], _('debugstate')),
2263 "debugstate": (debugstate, [], _('debugstate')),
2248 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2264 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2249 "debugindex": (debugindex, [], _('debugindex FILE')),
2265 "debugindex": (debugindex, [], _('debugindex FILE')),
2250 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2266 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2251 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2267 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2252 "debugwalk":
2268 "debugwalk":
2253 (debugwalk,
2269 (debugwalk,
2254 [('I', 'include', [], _('include names matching the given patterns')),
2270 [('I', 'include', [], _('include names matching the given patterns')),
2255 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2271 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2256 _('debugwalk [OPTION]... [FILE]...')),
2272 _('debugwalk [OPTION]... [FILE]...')),
2257 "^diff":
2273 "^diff":
2258 (diff,
2274 (diff,
2259 [('r', 'rev', [], _('revision')),
2275 [('r', 'rev', [], _('revision')),
2260 ('a', 'text', None, _('treat all files as text')),
2276 ('a', 'text', None, _('treat all files as text')),
2261 ('I', 'include', [], _('include names matching the given patterns')),
2277 ('I', 'include', [], _('include names matching the given patterns')),
2262 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2278 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2263 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2279 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2264 "^export":
2280 "^export":
2265 (export,
2281 (export,
2266 [('o', 'output', "", _('print output to file with formatted name')),
2282 [('o', 'output', "", _('print output to file with formatted name')),
2267 ('a', 'text', None, _('treat all files as text')),
2283 ('a', 'text', None, _('treat all files as text')),
2268 ('', 'switch-parent', None, _('diff against the second parent'))],
2284 ('', 'switch-parent', None, _('diff against the second parent'))],
2269 "hg export [-a] [-o OUTFILE] REV..."),
2285 "hg export [-a] [-o OUTFILE] REV..."),
2270 "forget":
2286 "forget":
2271 (forget,
2287 (forget,
2272 [('I', 'include', [], _('include names matching the given patterns')),
2288 [('I', 'include', [], _('include names matching the given patterns')),
2273 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2289 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2274 "hg forget [OPTION]... FILE..."),
2290 "hg forget [OPTION]... FILE..."),
2275 "grep":
2291 "grep":
2276 (grep,
2292 (grep,
2277 [('0', 'print0', None, _('end fields with NUL')),
2293 [('0', 'print0', None, _('end fields with NUL')),
2278 ('I', 'include', [], _('include names matching the given patterns')),
2294 ('I', 'include', [], _('include names matching the given patterns')),
2279 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2295 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2280 ('', 'all', None, _('print all revisions that match')),
2296 ('', 'all', None, _('print all revisions that match')),
2281 ('i', 'ignore-case', None, _('ignore case when matching')),
2297 ('i', 'ignore-case', None, _('ignore case when matching')),
2282 ('l', 'files-with-matches', None, _('print only filenames and revs that match')),
2298 ('l', 'files-with-matches', None,
2299 _('print only filenames and revs that match')),
2283 ('n', 'line-number', None, _('print matching line numbers')),
2300 ('n', 'line-number', None, _('print matching line numbers')),
2284 ('r', 'rev', [], _('search in given revision range')),
2301 ('r', 'rev', [], _('search in given revision range')),
2285 ('u', 'user', None, _('print user who committed change'))],
2302 ('u', 'user', None, _('print user who committed change'))],
2286 "hg grep [OPTION]... PATTERN [FILE]..."),
2303 "hg grep [OPTION]... PATTERN [FILE]..."),
2287 "heads":
2304 "heads":
2288 (heads,
2305 (heads,
2289 [('b', 'branches', None, _('find branch info')),
2306 [('b', 'branches', None, _('find branch info')),
2290 ('r', 'rev', "", _('show only heads which are descendants of rev'))],
2307 ('r', 'rev', "", _('show only heads which are descendants of rev'))],
2291 _('hg heads [-b] [-r <rev>]')),
2308 _('hg heads [-b] [-r <rev>]')),
2292 "help": (help_, [], _('hg help [COMMAND]')),
2309 "help": (help_, [], _('hg help [COMMAND]')),
2293 "identify|id": (identify, [], _('hg identify')),
2310 "identify|id": (identify, [], _('hg identify')),
2294 "import|patch":
2311 "import|patch":
2295 (import_,
2312 (import_,
2296 [('p', 'strip', 1, _('directory strip option for patch. This has the same\n') +
2313 [('p', 'strip', 1,
2297 _('meaning as the corresponding patch option')),
2314 _('directory strip option for patch. This has the same\n') +
2298 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
2315 _('meaning as the corresponding patch option')),
2316 ('f', 'force', None,
2317 _('skip check for outstanding uncommitted changes')),
2299 ('b', 'base', "", _('base path'))],
2318 ('b', 'base', "", _('base path'))],
2300 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
2319 "hg import [-f] [-p NUM] [-b BASE] PATCH..."),
2301 "incoming|in": (incoming,
2320 "incoming|in": (incoming,
2302 [('M', 'no-merges', None, _("do not show merges")),
2321 [('M', 'no-merges', None, _("do not show merges")),
2303 ('p', 'patch', None, _('show patch')),
2322 ('p', 'patch', None, _('show patch')),
2304 ('n', 'newest-first', None, _('show newest record first'))],
2323 ('n', 'newest-first', None, _('show newest record first'))],
2305 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2324 _('hg incoming [-p] [-n] [-M] [SOURCE]')),
2306 "^init": (init, [], _('hg init [DEST]')),
2325 "^init": (init, [], _('hg init [DEST]')),
2307 "locate":
2326 "locate":
2308 (locate,
2327 (locate,
2309 [('r', 'rev', '', _('search the repository as it stood at rev')),
2328 [('r', 'rev', '', _('search the repository as it stood at rev')),
2310 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2329 ('0', 'print0', None,
2311 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
2330 _('end filenames with NUL, for use with xargs')),
2331 ('f', 'fullpath', None,
2332 _('print complete paths from the filesystem root')),
2312 ('I', 'include', [], _('include names matching the given patterns')),
2333 ('I', 'include', [], _('include names matching the given patterns')),
2313 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2334 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2314 _('hg locate [OPTION]... [PATTERN]...')),
2335 _('hg locate [OPTION]... [PATTERN]...')),
2315 "^log|history":
2336 "^log|history":
2316 (log,
2337 (log,
2317 [('I', 'include', [], _('include names matching the given patterns')),
2338 [('I', 'include', [], _('include names matching the given patterns')),
2318 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2339 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2319 ('b', 'branch', None, _('show branches')),
2340 ('b', 'branch', None, _('show branches')),
2320 ('k', 'keyword', [], _('search for a keyword')),
2341 ('k', 'keyword', [], _('search for a keyword')),
2321 ('r', 'rev', [], _('show the specified revision or range')),
2342 ('r', 'rev', [], _('show the specified revision or range')),
2322 ('M', 'no-merges', None, _("do not show merges")),
2343 ('M', 'no-merges', None, _("do not show merges")),
2323 ('m', 'only-merges', None, _("show only merges")),
2344 ('m', 'only-merges', None, _("show only merges")),
2324 ('p', 'patch', None, _('show patch'))],
2345 ('p', 'patch', None, _('show patch'))],
2325 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2346 _('hg log [-I] [-X] [-r REV]... [-p] [FILE]')),
2326 "manifest": (manifest, [], _('hg manifest [REV]')),
2347 "manifest": (manifest, [], _('hg manifest [REV]')),
2327 "outgoing|out": (outgoing,
2348 "outgoing|out": (outgoing,
2328 [('M', 'no-merges', None, _("do not show merges")),
2349 [('M', 'no-merges', None, _("do not show merges")),
2329 ('p', 'patch', None, _('show patch')),
2350 ('p', 'patch', None, _('show patch')),
2330 ('n', 'newest-first', None, _('show newest record first'))],
2351 ('n', 'newest-first', None, _('show newest record first'))],
2331 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2352 _('hg outgoing [-p] [-n] [-M] [DEST]')),
2332 "^parents": (parents, [], _('hg parents [REV]')),
2353 "^parents": (parents, [], _('hg parents [REV]')),
2333 "paths": (paths, [], _('hg paths [NAME]')),
2354 "paths": (paths, [], _('hg paths [NAME]')),
2334 "^pull":
2355 "^pull":
2335 (pull,
2356 (pull,
2336 [('u', 'update', None, _('update the working directory to tip after pull')),
2357 [('u', 'update', None,
2358 _('update the working directory to tip after pull')),
2337 ('e', 'ssh', "", _('specify ssh command to use')),
2359 ('e', 'ssh', "", _('specify ssh command to use')),
2338 ('r', 'rev', [], _('a specific revision you would like to pull')),
2360 ('r', 'rev', [], _('a specific revision you would like to pull')),
2339 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2361 ('', 'remotecmd', "",
2362 _('specify hg command to run on the remote side'))],
2340 _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')),
2363 _('hg pull [-u] [-e FILE] [-r rev] [--remotecmd FILE] [SOURCE]')),
2341 "^push":
2364 "^push":
2342 (push,
2365 (push,
2343 [('f', 'force', None, _('force push')),
2366 [('f', 'force', None, _('force push')),
2344 ('e', 'ssh', "", _('specify ssh command to use')),
2367 ('e', 'ssh', "", _('specify ssh command to use')),
2345 ('', 'remotecmd', "", _('specify hg command to run on the remote side'))],
2368 ('', 'remotecmd', "",
2369 _('specify hg command to run on the remote side'))],
2346 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2370 _('hg push [-f] [-e FILE] [--remotecmd FILE] [DEST]')),
2347 "rawcommit":
2371 "rawcommit":
2348 (rawcommit,
2372 (rawcommit,
2349 [('p', 'parent', [], _('parent')),
2373 [('p', 'parent', [], _('parent')),
2350 ('d', 'date', "", _('date code')),
2374 ('d', 'date', "", _('date code')),
2351 ('u', 'user', "", _('user')),
2375 ('u', 'user', "", _('user')),
2352 ('F', 'files', "", _('file list')),
2376 ('F', 'files', "", _('file list')),
2353 ('m', 'message', "", _('commit message')),
2377 ('m', 'message', "", _('commit message')),
2354 ('l', 'logfile', "", _('commit message file'))],
2378 ('l', 'logfile', "", _('commit message file'))],
2355 _('hg rawcommit [OPTION]... [FILE]...')),
2379 _('hg rawcommit [OPTION]... [FILE]...')),
2356 "recover": (recover, [], _("hg recover")),
2380 "recover": (recover, [], _("hg recover")),
2357 "^remove|rm": (remove,
2381 "^remove|rm": (remove,
2358 [('I', 'include', [], _('include names matching the given patterns')),
2382 [('I', 'include', [],
2359 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2383 _('include names matching the given patterns')),
2384 ('X', 'exclude', [],
2385 _('exclude names matching the given patterns'))],
2360 _("hg remove [OPTION]... FILE...")),
2386 _("hg remove [OPTION]... FILE...")),
2361 "rename|mv": (rename,
2387 "rename|mv": (rename,
2362 [('I', 'include', [], _('include names matching the given patterns')),
2388 [('I', 'include', [],
2363 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2389 _('include names matching the given patterns')),
2364 ('A', 'after', None, _('record a rename that has already occurred')),
2390 ('X', 'exclude', [],
2365 ('f', 'force', None, _('forcibly copy over an existing managed file'))],
2391 _('exclude names matching the given patterns')),
2392 ('A', 'after', None,
2393 _('record a rename that has already occurred')),
2394 ('f', 'force', None,
2395 _('forcibly copy over an existing managed file'))],
2366 _('hg rename [OPTION]... [SOURCE]... DEST')),
2396 _('hg rename [OPTION]... [SOURCE]... DEST')),
2367 "^revert":
2397 "^revert":
2368 (revert,
2398 (revert,
2369 [('I', 'include', [], _('include names matching the given patterns')),
2399 [('I', 'include', [], _('include names matching the given patterns')),
2370 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2400 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2371 ("r", "rev", "", _("revision to revert to"))],
2401 ("r", "rev", "", _("revision to revert to"))],
2372 _("hg revert [-n] [-r REV] [NAME]...")),
2402 _("hg revert [-n] [-r REV] [NAME]...")),
2373 "root": (root, [], _("hg root")),
2403 "root": (root, [], _("hg root")),
2374 "^serve":
2404 "^serve":
2375 (serve,
2405 (serve,
2376 [('A', 'accesslog', '', _('name of access log file to write to')),
2406 [('A', 'accesslog', '', _('name of access log file to write to')),
2377 ('E', 'errorlog', '', _('name of error log file to write to')),
2407 ('E', 'errorlog', '', _('name of error log file to write to')),
2378 ('p', 'port', 0, _('port to use (default: 8000)')),
2408 ('p', 'port', 0, _('port to use (default: 8000)')),
2379 ('a', 'address', '', _('address to use')),
2409 ('a', 'address', '', _('address to use')),
2380 ('n', 'name', "", _('name to show in web pages (default: working dir)')),
2410 ('n', 'name', "",
2411 _('name to show in web pages (default: working dir)')),
2381 ('', 'stdio', None, _('for remote clients')),
2412 ('', 'stdio', None, _('for remote clients')),
2382 ('t', 'templates', "", _('web templates to use')),
2413 ('t', 'templates', "", _('web templates to use')),
2383 ('', 'style', "", _('template style to use')),
2414 ('', 'style', "", _('template style to use')),
2384 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2415 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2385 _("hg serve [OPTION]...")),
2416 _("hg serve [OPTION]...")),
2386 "^status|st":
2417 "^status|st":
2387 (status,
2418 (status,
2388 [('m', 'modified', None, _('show only modified files')),
2419 [('m', 'modified', None, _('show only modified files')),
2389 ('a', 'added', None, _('show only added files')),
2420 ('a', 'added', None, _('show only added files')),
2390 ('r', 'removed', None, _('show only removed files')),
2421 ('r', 'removed', None, _('show only removed files')),
2391 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2422 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2392 ('n', 'no-status', None, _('hide status prefix')),
2423 ('n', 'no-status', None, _('hide status prefix')),
2393 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
2424 ('0', 'print0', None,
2425 _('end filenames with NUL, for use with xargs')),
2394 ('I', 'include', [], _('include names matching the given patterns')),
2426 ('I', 'include', [], _('include names matching the given patterns')),
2395 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2427 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2396 _("hg status [OPTION]... [FILE]...")),
2428 _("hg status [OPTION]... [FILE]...")),
2397 "tag":
2429 "tag":
2398 (tag,
2430 (tag,
2399 [('l', 'local', None, _('make the tag local')),
2431 [('l', 'local', None, _('make the tag local')),
2400 ('m', 'message', "", _('message for tag commit log entry')),
2432 ('m', 'message', "", _('message for tag commit log entry')),
2401 ('d', 'date', "", _('record datecode as commit date')),
2433 ('d', 'date', "", _('record datecode as commit date')),
2402 ('u', 'user', "", _('record user as commiter')),
2434 ('u', 'user', "", _('record user as commiter')),
2403 ('r', 'rev', "", _('revision to tag'))],
2435 ('r', 'rev', "", _('revision to tag'))],
2404 _('hg tag [OPTION]... NAME [REV]')),
2436 _('hg tag [OPTION]... NAME [REV]')),
2405 "tags": (tags, [], _('hg tags')),
2437 "tags": (tags, [], _('hg tags')),
2406 "tip": (tip, [], _('hg tip')),
2438 "tip": (tip, [], _('hg tip')),
2407 "unbundle":
2439 "unbundle":
2408 (unbundle,
2440 (unbundle,
2409 [],
2441 [('u', 'update', None,
2410 _('hg unbundle FILE')),
2442 _('update the working directory to tip after unbundle'))],
2443 _('hg unbundle [-u] FILE')),
2411 "undo": (undo, [], _('hg undo')),
2444 "undo": (undo, [], _('hg undo')),
2412 "^update|up|checkout|co":
2445 "^update|up|checkout|co":
2413 (update,
2446 (update,
2414 [('b', 'branch', "", _('checkout the head of a specific branch')),
2447 [('b', 'branch', "", _('checkout the head of a specific branch')),
2415 ('m', 'merge', None, _('allow merging of branches')),
2448 ('m', 'merge', None, _('allow merging of branches')),
2416 ('C', 'clean', None, _('overwrite locally modified files')),
2449 ('C', 'clean', None, _('overwrite locally modified files')),
2417 ('f', 'force', None, _('force a merge with outstanding changes'))],
2450 ('f', 'force', None, _('force a merge with outstanding changes'))],
2418 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
2451 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
2419 "verify": (verify, [], _('hg verify')),
2452 "verify": (verify, [], _('hg verify')),
2420 "version": (show_version, [], _('hg version')),
2453 "version": (show_version, [], _('hg version')),
2421 }
2454 }
2422
2455
2423 globalopts = [
2456 globalopts = [
2424 ('R', 'repository', "", _("repository root directory")),
2457 ('R', 'repository', "", _("repository root directory")),
2425 ('', 'cwd', '', _("change working directory")),
2458 ('', 'cwd', '', _("change working directory")),
2426 ('y', 'noninteractive', None, _("do not prompt, assume 'yes' for any required answers")),
2459 ('y', 'noninteractive', None,
2460 _("do not prompt, assume 'yes' for any required answers")),
2427 ('q', 'quiet', None, _("suppress output")),
2461 ('q', 'quiet', None, _("suppress output")),
2428 ('v', 'verbose', None, _("enable additional output")),
2462 ('v', 'verbose', None, _("enable additional output")),
2429 ('', 'debug', None, _("enable debugging output")),
2463 ('', 'debug', None, _("enable debugging output")),
2430 ('', 'debugger', None, _("start debugger")),
2464 ('', 'debugger', None, _("start debugger")),
2431 ('', 'traceback', None, _("print traceback on exception")),
2465 ('', 'traceback', None, _("print traceback on exception")),
2432 ('', 'time', None, _("time how long the command takes")),
2466 ('', 'time', None, _("time how long the command takes")),
2433 ('', 'profile', None, _("print command execution profile")),
2467 ('', 'profile', None, _("print command execution profile")),
2434 ('', 'version', None, _("output version information and exit")),
2468 ('', 'version', None, _("output version information and exit")),
2435 ('h', 'help', None, _("display help and exit")),
2469 ('h', 'help', None, _("display help and exit")),
2436 ]
2470 ]
2437
2471
2438 norepo = ("clone init version help debugancestor debugconfig debugdata"
2472 norepo = ("clone init version help debugancestor debugconfig debugdata"
2439 " debugindex debugindexdot paths")
2473 " debugindex debugindexdot paths")
2440
2474
2441 def find(cmd):
2475 def find(cmd):
2442 """Return (aliases, command table entry) for command string."""
2476 """Return (aliases, command table entry) for command string."""
2443 choice = None
2477 choice = None
2444 for e in table.keys():
2478 for e in table.keys():
2445 aliases = e.lstrip("^").split("|")
2479 aliases = e.lstrip("^").split("|")
2446 if cmd in aliases:
2480 if cmd in aliases:
2447 return aliases, table[e]
2481 return aliases, table[e]
2448 for a in aliases:
2482 for a in aliases:
2449 if a.startswith(cmd):
2483 if a.startswith(cmd):
2450 if choice:
2484 if choice:
2451 raise AmbiguousCommand(cmd)
2485 raise AmbiguousCommand(cmd)
2452 else:
2486 else:
2453 choice = aliases, table[e]
2487 choice = aliases, table[e]
2454 break
2488 break
2455 if choice:
2489 if choice:
2456 return choice
2490 return choice
2457
2491
2458 raise UnknownCommand(cmd)
2492 raise UnknownCommand(cmd)
2459
2493
2460 class SignalInterrupt(Exception):
2494 class SignalInterrupt(Exception):
2461 """Exception raised on SIGTERM and SIGHUP."""
2495 """Exception raised on SIGTERM and SIGHUP."""
2462
2496
2463 def catchterm(*args):
2497 def catchterm(*args):
2464 raise SignalInterrupt
2498 raise SignalInterrupt
2465
2499
2466 def run():
2500 def run():
2467 sys.exit(dispatch(sys.argv[1:]))
2501 sys.exit(dispatch(sys.argv[1:]))
2468
2502
2469 class ParseError(Exception):
2503 class ParseError(Exception):
2470 """Exception raised on errors in parsing the command line."""
2504 """Exception raised on errors in parsing the command line."""
2471
2505
2472 def parse(ui, args):
2506 def parse(ui, args):
2473 options = {}
2507 options = {}
2474 cmdoptions = {}
2508 cmdoptions = {}
2475
2509
2476 try:
2510 try:
2477 args = fancyopts.fancyopts(args, globalopts, options)
2511 args = fancyopts.fancyopts(args, globalopts, options)
2478 except fancyopts.getopt.GetoptError, inst:
2512 except fancyopts.getopt.GetoptError, inst:
2479 raise ParseError(None, inst)
2513 raise ParseError(None, inst)
2480
2514
2481 if args:
2515 if args:
2482 cmd, args = args[0], args[1:]
2516 cmd, args = args[0], args[1:]
2483 aliases, i = find(cmd)
2517 aliases, i = find(cmd)
2484 cmd = aliases[0]
2518 cmd = aliases[0]
2485 defaults = ui.config("defaults", cmd)
2519 defaults = ui.config("defaults", cmd)
2486 if defaults:
2520 if defaults:
2487 args = defaults.split() + args
2521 args = defaults.split() + args
2488 c = list(i[1])
2522 c = list(i[1])
2489 else:
2523 else:
2490 cmd = None
2524 cmd = None
2491 c = []
2525 c = []
2492
2526
2493 # combine global options into local
2527 # combine global options into local
2494 for o in globalopts:
2528 for o in globalopts:
2495 c.append((o[0], o[1], options[o[1]], o[3]))
2529 c.append((o[0], o[1], options[o[1]], o[3]))
2496
2530
2497 try:
2531 try:
2498 args = fancyopts.fancyopts(args, c, cmdoptions)
2532 args = fancyopts.fancyopts(args, c, cmdoptions)
2499 except fancyopts.getopt.GetoptError, inst:
2533 except fancyopts.getopt.GetoptError, inst:
2500 raise ParseError(cmd, inst)
2534 raise ParseError(cmd, inst)
2501
2535
2502 # separate global options back out
2536 # separate global options back out
2503 for o in globalopts:
2537 for o in globalopts:
2504 n = o[1]
2538 n = o[1]
2505 options[n] = cmdoptions[n]
2539 options[n] = cmdoptions[n]
2506 del cmdoptions[n]
2540 del cmdoptions[n]
2507
2541
2508 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2542 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2509
2543
2510 def dispatch(args):
2544 def dispatch(args):
2511 signal.signal(signal.SIGTERM, catchterm)
2545 signal.signal(signal.SIGTERM, catchterm)
2512 try:
2546 try:
2513 signal.signal(signal.SIGHUP, catchterm)
2547 signal.signal(signal.SIGHUP, catchterm)
2514 except AttributeError:
2548 except AttributeError:
2515 pass
2549 pass
2516
2550
2517 try:
2551 try:
2518 u = ui.ui()
2552 u = ui.ui()
2519 except util.Abort, inst:
2553 except util.Abort, inst:
2520 sys.stderr.write(_("abort: %s\n") % inst)
2554 sys.stderr.write(_("abort: %s\n") % inst)
2521 sys.exit(1)
2555 sys.exit(1)
2522
2556
2523 external = []
2557 external = []
2524 for x in u.extensions():
2558 for x in u.extensions():
2525 def on_exception(exc, inst):
2559 def on_exception(exc, inst):
2526 u.warn(_("*** failed to import extension %s\n") % x[1])
2560 u.warn(_("*** failed to import extension %s\n") % x[1])
2527 u.warn("%s\n" % inst)
2561 u.warn("%s\n" % inst)
2528 if "--traceback" in sys.argv[1:]:
2562 if "--traceback" in sys.argv[1:]:
2529 traceback.print_exc()
2563 traceback.print_exc()
2530 if x[1]:
2564 if x[1]:
2531 try:
2565 try:
2532 mod = imp.load_source(x[0], x[1])
2566 mod = imp.load_source(x[0], x[1])
2533 except Exception, inst:
2567 except Exception, inst:
2534 on_exception(Exception, inst)
2568 on_exception(Exception, inst)
2535 continue
2569 continue
2536 else:
2570 else:
2537 def importh(name):
2571 def importh(name):
2538 mod = __import__(name)
2572 mod = __import__(name)
2539 components = name.split('.')
2573 components = name.split('.')
2540 for comp in components[1:]:
2574 for comp in components[1:]:
2541 mod = getattr(mod, comp)
2575 mod = getattr(mod, comp)
2542 return mod
2576 return mod
2543 try:
2577 try:
2544 mod = importh(x[0])
2578 mod = importh(x[0])
2545 except Exception, inst:
2579 except Exception, inst:
2546 on_exception(Exception, inst)
2580 on_exception(Exception, inst)
2547 continue
2581 continue
2548
2582
2549 external.append(mod)
2583 external.append(mod)
2550 for x in external:
2584 for x in external:
2551 cmdtable = getattr(x, 'cmdtable', {})
2585 cmdtable = getattr(x, 'cmdtable', {})
2552 for t in cmdtable:
2586 for t in cmdtable:
2553 if t in table:
2587 if t in table:
2554 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2588 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
2555 table.update(cmdtable)
2589 table.update(cmdtable)
2556
2590
2557 try:
2591 try:
2558 cmd, func, args, options, cmdoptions = parse(u, args)
2592 cmd, func, args, options, cmdoptions = parse(u, args)
2559 except ParseError, inst:
2593 except ParseError, inst:
2560 if inst.args[0]:
2594 if inst.args[0]:
2561 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2595 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2562 help_(u, inst.args[0])
2596 help_(u, inst.args[0])
2563 else:
2597 else:
2564 u.warn(_("hg: %s\n") % inst.args[1])
2598 u.warn(_("hg: %s\n") % inst.args[1])
2565 help_(u, 'shortlist')
2599 help_(u, 'shortlist')
2566 sys.exit(-1)
2600 sys.exit(-1)
2567 except AmbiguousCommand, inst:
2601 except AmbiguousCommand, inst:
2568 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2602 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2569 sys.exit(1)
2603 sys.exit(1)
2570 except UnknownCommand, inst:
2604 except UnknownCommand, inst:
2571 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2605 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2572 help_(u, 'shortlist')
2606 help_(u, 'shortlist')
2573 sys.exit(1)
2607 sys.exit(1)
2574
2608
2575 if options["time"]:
2609 if options["time"]:
2576 def get_times():
2610 def get_times():
2577 t = os.times()
2611 t = os.times()
2578 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2612 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2579 t = (t[0], t[1], t[2], t[3], time.clock())
2613 t = (t[0], t[1], t[2], t[3], time.clock())
2580 return t
2614 return t
2581 s = get_times()
2615 s = get_times()
2582 def print_time():
2616 def print_time():
2583 t = get_times()
2617 t = get_times()
2584 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2618 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2585 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2619 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2586 atexit.register(print_time)
2620 atexit.register(print_time)
2587
2621
2588 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2622 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2589 not options["noninteractive"])
2623 not options["noninteractive"])
2590
2624
2591 # enter the debugger before command execution
2625 # enter the debugger before command execution
2592 if options['debugger']:
2626 if options['debugger']:
2593 pdb.set_trace()
2627 pdb.set_trace()
2594
2628
2595 try:
2629 try:
2596 try:
2630 try:
2597 if options['help']:
2631 if options['help']:
2598 help_(u, cmd, options['version'])
2632 help_(u, cmd, options['version'])
2599 sys.exit(0)
2633 sys.exit(0)
2600 elif options['version']:
2634 elif options['version']:
2601 show_version(u)
2635 show_version(u)
2602 sys.exit(0)
2636 sys.exit(0)
2603 elif not cmd:
2637 elif not cmd:
2604 help_(u, 'shortlist')
2638 help_(u, 'shortlist')
2605 sys.exit(0)
2639 sys.exit(0)
2606
2640
2607 if options['cwd']:
2641 if options['cwd']:
2608 try:
2642 try:
2609 os.chdir(options['cwd'])
2643 os.chdir(options['cwd'])
2610 except OSError, inst:
2644 except OSError, inst:
2611 raise util.Abort('%s: %s' %
2645 raise util.Abort('%s: %s' %
2612 (options['cwd'], inst.strerror))
2646 (options['cwd'], inst.strerror))
2613
2647
2614 if cmd not in norepo.split():
2648 if cmd not in norepo.split():
2615 path = options["repository"] or ""
2649 path = options["repository"] or ""
2616 repo = hg.repository(ui=u, path=path)
2650 repo = hg.repository(ui=u, path=path)
2617 for x in external:
2651 for x in external:
2618 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2652 if hasattr(x, 'reposetup'): x.reposetup(u, repo)
2619 d = lambda: func(u, repo, *args, **cmdoptions)
2653 d = lambda: func(u, repo, *args, **cmdoptions)
2620 else:
2654 else:
2621 d = lambda: func(u, *args, **cmdoptions)
2655 d = lambda: func(u, *args, **cmdoptions)
2622
2656
2623 if options['profile']:
2657 if options['profile']:
2624 import hotshot, hotshot.stats
2658 import hotshot, hotshot.stats
2625 prof = hotshot.Profile("hg.prof")
2659 prof = hotshot.Profile("hg.prof")
2626 r = prof.runcall(d)
2660 r = prof.runcall(d)
2627 prof.close()
2661 prof.close()
2628 stats = hotshot.stats.load("hg.prof")
2662 stats = hotshot.stats.load("hg.prof")
2629 stats.strip_dirs()
2663 stats.strip_dirs()
2630 stats.sort_stats('time', 'calls')
2664 stats.sort_stats('time', 'calls')
2631 stats.print_stats(40)
2665 stats.print_stats(40)
2632 return r
2666 return r
2633 else:
2667 else:
2634 return d()
2668 return d()
2635 except:
2669 except:
2636 # enter the debugger when we hit an exception
2670 # enter the debugger when we hit an exception
2637 if options['debugger']:
2671 if options['debugger']:
2638 pdb.post_mortem(sys.exc_info()[2])
2672 pdb.post_mortem(sys.exc_info()[2])
2639 if options['traceback']:
2673 if options['traceback']:
2640 traceback.print_exc()
2674 traceback.print_exc()
2641 raise
2675 raise
2642 except hg.RepoError, inst:
2676 except hg.RepoError, inst:
2643 u.warn(_("abort: "), inst, "!\n")
2677 u.warn(_("abort: "), inst, "!\n")
2644 except revlog.RevlogError, inst:
2678 except revlog.RevlogError, inst:
2645 u.warn(_("abort: "), inst, "!\n")
2679 u.warn(_("abort: "), inst, "!\n")
2646 except SignalInterrupt:
2680 except SignalInterrupt:
2647 u.warn(_("killed!\n"))
2681 u.warn(_("killed!\n"))
2648 except KeyboardInterrupt:
2682 except KeyboardInterrupt:
2649 try:
2683 try:
2650 u.warn(_("interrupted!\n"))
2684 u.warn(_("interrupted!\n"))
2651 except IOError, inst:
2685 except IOError, inst:
2652 if inst.errno == errno.EPIPE:
2686 if inst.errno == errno.EPIPE:
2653 if u.debugflag:
2687 if u.debugflag:
2654 u.warn(_("\nbroken pipe\n"))
2688 u.warn(_("\nbroken pipe\n"))
2655 else:
2689 else:
2656 raise
2690 raise
2657 except IOError, inst:
2691 except IOError, inst:
2658 if hasattr(inst, "code"):
2692 if hasattr(inst, "code"):
2659 u.warn(_("abort: %s\n") % inst)
2693 u.warn(_("abort: %s\n") % inst)
2660 elif hasattr(inst, "reason"):
2694 elif hasattr(inst, "reason"):
2661 u.warn(_("abort: error: %s\n") % inst.reason[1])
2695 u.warn(_("abort: error: %s\n") % inst.reason[1])
2662 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2696 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
2663 if u.debugflag:
2697 if u.debugflag:
2664 u.warn(_("broken pipe\n"))
2698 u.warn(_("broken pipe\n"))
2665 elif getattr(inst, "strerror", None):
2699 elif getattr(inst, "strerror", None):
2666 if getattr(inst, "filename", None):
2700 if getattr(inst, "filename", None):
2667 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2701 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
2668 else:
2702 else:
2669 u.warn(_("abort: %s\n") % inst.strerror)
2703 u.warn(_("abort: %s\n") % inst.strerror)
2670 else:
2704 else:
2671 raise
2705 raise
2672 except OSError, inst:
2706 except OSError, inst:
2673 if hasattr(inst, "filename"):
2707 if hasattr(inst, "filename"):
2674 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2708 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
2675 else:
2709 else:
2676 u.warn(_("abort: %s\n") % inst.strerror)
2710 u.warn(_("abort: %s\n") % inst.strerror)
2677 except util.Abort, inst:
2711 except util.Abort, inst:
2678 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2712 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
2679 sys.exit(1)
2713 sys.exit(1)
2680 except TypeError, inst:
2714 except TypeError, inst:
2681 # was this an argument error?
2715 # was this an argument error?
2682 tb = traceback.extract_tb(sys.exc_info()[2])
2716 tb = traceback.extract_tb(sys.exc_info()[2])
2683 if len(tb) > 2: # no
2717 if len(tb) > 2: # no
2684 raise
2718 raise
2685 u.debug(inst, "\n")
2719 u.debug(inst, "\n")
2686 u.warn(_("%s: invalid arguments\n") % cmd)
2720 u.warn(_("%s: invalid arguments\n") % cmd)
2687 help_(u, cmd)
2721 help_(u, cmd)
2688 except AmbiguousCommand, inst:
2722 except AmbiguousCommand, inst:
2689 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2723 u.warn(_("hg: command '%s' is ambiguous.\n") % inst.args[0])
2690 help_(u, 'shortlist')
2724 help_(u, 'shortlist')
2691 except UnknownCommand, inst:
2725 except UnknownCommand, inst:
2692 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2726 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2693 help_(u, 'shortlist')
2727 help_(u, 'shortlist')
2694 except SystemExit:
2728 except SystemExit:
2695 # don't catch this in the catch-all below
2729 # don't catch this in the catch-all below
2696 raise
2730 raise
2697 except:
2731 except:
2698 u.warn(_("** unknown exception encountered, details follow\n"))
2732 u.warn(_("** unknown exception encountered, details follow\n"))
2699 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2733 u.warn(_("** report bug details to mercurial@selenic.com\n"))
2700 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
2734 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
2701 % version.get_version())
2735 % version.get_version())
2702 raise
2736 raise
2703
2737
2704 sys.exit(-1)
2738 sys.exit(-1)
@@ -1,1800 +1,1796 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import struct, os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno")
14
14
15 class localrepository(object):
15 class localrepository(object):
16 def __init__(self, ui, path=None, create=0):
16 def __init__(self, ui, path=None, create=0):
17 if not path:
17 if not path:
18 p = os.getcwd()
18 p = os.getcwd()
19 while not os.path.isdir(os.path.join(p, ".hg")):
19 while not os.path.isdir(os.path.join(p, ".hg")):
20 oldp = p
20 oldp = p
21 p = os.path.dirname(p)
21 p = os.path.dirname(p)
22 if p == oldp: raise repo.RepoError(_("no repo found"))
22 if p == oldp: raise repo.RepoError(_("no repo found"))
23 path = p
23 path = p
24 self.path = os.path.join(path, ".hg")
24 self.path = os.path.join(path, ".hg")
25
25
26 if not create and not os.path.isdir(self.path):
26 if not create and not os.path.isdir(self.path):
27 raise repo.RepoError(_("repository %s not found") % path)
27 raise repo.RepoError(_("repository %s not found") % path)
28
28
29 self.root = os.path.abspath(path)
29 self.root = os.path.abspath(path)
30 self.ui = ui
30 self.ui = ui
31 self.opener = util.opener(self.path)
31 self.opener = util.opener(self.path)
32 self.wopener = util.opener(self.root)
32 self.wopener = util.opener(self.root)
33 self.manifest = manifest.manifest(self.opener)
33 self.manifest = manifest.manifest(self.opener)
34 self.changelog = changelog.changelog(self.opener)
34 self.changelog = changelog.changelog(self.opener)
35 self.tagscache = None
35 self.tagscache = None
36 self.nodetagscache = None
36 self.nodetagscache = None
37 self.encodepats = None
37 self.encodepats = None
38 self.decodepats = None
38 self.decodepats = None
39
39
40 if create:
40 if create:
41 os.mkdir(self.path)
41 os.mkdir(self.path)
42 os.mkdir(self.join("data"))
42 os.mkdir(self.join("data"))
43
43
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
44 self.dirstate = dirstate.dirstate(self.opener, ui, self.root)
45 try:
45 try:
46 self.ui.readconfig(self.join("hgrc"))
46 self.ui.readconfig(self.join("hgrc"))
47 except IOError: pass
47 except IOError: pass
48
48
49 def hook(self, name, **args):
49 def hook(self, name, **args):
50 def runhook(name, cmd):
50 def runhook(name, cmd):
51 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
51 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
52 old = {}
52 old = {}
53 for k, v in args.items():
53 for k, v in args.items():
54 k = k.upper()
54 k = k.upper()
55 old[k] = os.environ.get(k, None)
55 old[k] = os.environ.get(k, None)
56 os.environ[k] = v
56 os.environ[k] = v
57
57
58 # Hooks run in the repository root
58 # Hooks run in the repository root
59 olddir = os.getcwd()
59 olddir = os.getcwd()
60 os.chdir(self.root)
60 os.chdir(self.root)
61 r = os.system(cmd)
61 r = os.system(cmd)
62 os.chdir(olddir)
62 os.chdir(olddir)
63
63
64 for k, v in old.items():
64 for k, v in old.items():
65 if v != None:
65 if v != None:
66 os.environ[k] = v
66 os.environ[k] = v
67 else:
67 else:
68 del os.environ[k]
68 del os.environ[k]
69
69
70 if r:
70 if r:
71 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
71 self.ui.warn(_("abort: %s hook failed with status %d!\n") %
72 (name, r))
72 (name, r))
73 return False
73 return False
74 return True
74 return True
75
75
76 r = True
76 r = True
77 for hname, cmd in self.ui.configitems("hooks"):
77 for hname, cmd in self.ui.configitems("hooks"):
78 s = hname.split(".")
78 s = hname.split(".")
79 if s[0] == name and cmd:
79 if s[0] == name and cmd:
80 r = runhook(hname, cmd) and r
80 r = runhook(hname, cmd) and r
81 return r
81 return r
82
82
83 def tags(self):
83 def tags(self):
84 '''return a mapping of tag to node'''
84 '''return a mapping of tag to node'''
85 if not self.tagscache:
85 if not self.tagscache:
86 self.tagscache = {}
86 self.tagscache = {}
87 def addtag(self, k, n):
87 def addtag(self, k, n):
88 try:
88 try:
89 bin_n = bin(n)
89 bin_n = bin(n)
90 except TypeError:
90 except TypeError:
91 bin_n = ''
91 bin_n = ''
92 self.tagscache[k.strip()] = bin_n
92 self.tagscache[k.strip()] = bin_n
93
93
94 try:
94 try:
95 # read each head of the tags file, ending with the tip
95 # read each head of the tags file, ending with the tip
96 # and add each tag found to the map, with "newer" ones
96 # and add each tag found to the map, with "newer" ones
97 # taking precedence
97 # taking precedence
98 fl = self.file(".hgtags")
98 fl = self.file(".hgtags")
99 h = fl.heads()
99 h = fl.heads()
100 h.reverse()
100 h.reverse()
101 for r in h:
101 for r in h:
102 for l in fl.read(r).splitlines():
102 for l in fl.read(r).splitlines():
103 if l:
103 if l:
104 n, k = l.split(" ", 1)
104 n, k = l.split(" ", 1)
105 addtag(self, k, n)
105 addtag(self, k, n)
106 except KeyError:
106 except KeyError:
107 pass
107 pass
108
108
109 try:
109 try:
110 f = self.opener("localtags")
110 f = self.opener("localtags")
111 for l in f:
111 for l in f:
112 n, k = l.split(" ", 1)
112 n, k = l.split(" ", 1)
113 addtag(self, k, n)
113 addtag(self, k, n)
114 except IOError:
114 except IOError:
115 pass
115 pass
116
116
117 self.tagscache['tip'] = self.changelog.tip()
117 self.tagscache['tip'] = self.changelog.tip()
118
118
119 return self.tagscache
119 return self.tagscache
120
120
121 def tagslist(self):
121 def tagslist(self):
122 '''return a list of tags ordered by revision'''
122 '''return a list of tags ordered by revision'''
123 l = []
123 l = []
124 for t, n in self.tags().items():
124 for t, n in self.tags().items():
125 try:
125 try:
126 r = self.changelog.rev(n)
126 r = self.changelog.rev(n)
127 except:
127 except:
128 r = -2 # sort to the beginning of the list if unknown
128 r = -2 # sort to the beginning of the list if unknown
129 l.append((r,t,n))
129 l.append((r,t,n))
130 l.sort()
130 l.sort()
131 return [(t,n) for r,t,n in l]
131 return [(t,n) for r,t,n in l]
132
132
133 def nodetags(self, node):
133 def nodetags(self, node):
134 '''return the tags associated with a node'''
134 '''return the tags associated with a node'''
135 if not self.nodetagscache:
135 if not self.nodetagscache:
136 self.nodetagscache = {}
136 self.nodetagscache = {}
137 for t,n in self.tags().items():
137 for t,n in self.tags().items():
138 self.nodetagscache.setdefault(n,[]).append(t)
138 self.nodetagscache.setdefault(n,[]).append(t)
139 return self.nodetagscache.get(node, [])
139 return self.nodetagscache.get(node, [])
140
140
141 def lookup(self, key):
141 def lookup(self, key):
142 try:
142 try:
143 return self.tags()[key]
143 return self.tags()[key]
144 except KeyError:
144 except KeyError:
145 try:
145 try:
146 return self.changelog.lookup(key)
146 return self.changelog.lookup(key)
147 except:
147 except:
148 raise repo.RepoError(_("unknown revision '%s'") % key)
148 raise repo.RepoError(_("unknown revision '%s'") % key)
149
149
150 def dev(self):
150 def dev(self):
151 return os.stat(self.path).st_dev
151 return os.stat(self.path).st_dev
152
152
153 def local(self):
153 def local(self):
154 return True
154 return True
155
155
156 def join(self, f):
156 def join(self, f):
157 return os.path.join(self.path, f)
157 return os.path.join(self.path, f)
158
158
159 def wjoin(self, f):
159 def wjoin(self, f):
160 return os.path.join(self.root, f)
160 return os.path.join(self.root, f)
161
161
162 def file(self, f):
162 def file(self, f):
163 if f[0] == '/': f = f[1:]
163 if f[0] == '/': f = f[1:]
164 return filelog.filelog(self.opener, f)
164 return filelog.filelog(self.opener, f)
165
165
166 def getcwd(self):
166 def getcwd(self):
167 return self.dirstate.getcwd()
167 return self.dirstate.getcwd()
168
168
169 def wfile(self, f, mode='r'):
169 def wfile(self, f, mode='r'):
170 return self.wopener(f, mode)
170 return self.wopener(f, mode)
171
171
172 def wread(self, filename):
172 def wread(self, filename):
173 if self.encodepats == None:
173 if self.encodepats == None:
174 l = []
174 l = []
175 for pat, cmd in self.ui.configitems("encode"):
175 for pat, cmd in self.ui.configitems("encode"):
176 mf = util.matcher("", "/", [pat], [], [])[1]
176 mf = util.matcher("", "/", [pat], [], [])[1]
177 l.append((mf, cmd))
177 l.append((mf, cmd))
178 self.encodepats = l
178 self.encodepats = l
179
179
180 data = self.wopener(filename, 'r').read()
180 data = self.wopener(filename, 'r').read()
181
181
182 for mf, cmd in self.encodepats:
182 for mf, cmd in self.encodepats:
183 if mf(filename):
183 if mf(filename):
184 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
184 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
185 data = util.filter(data, cmd)
185 data = util.filter(data, cmd)
186 break
186 break
187
187
188 return data
188 return data
189
189
190 def wwrite(self, filename, data, fd=None):
190 def wwrite(self, filename, data, fd=None):
191 if self.decodepats == None:
191 if self.decodepats == None:
192 l = []
192 l = []
193 for pat, cmd in self.ui.configitems("decode"):
193 for pat, cmd in self.ui.configitems("decode"):
194 mf = util.matcher("", "/", [pat], [], [])[1]
194 mf = util.matcher("", "/", [pat], [], [])[1]
195 l.append((mf, cmd))
195 l.append((mf, cmd))
196 self.decodepats = l
196 self.decodepats = l
197
197
198 for mf, cmd in self.decodepats:
198 for mf, cmd in self.decodepats:
199 if mf(filename):
199 if mf(filename):
200 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
200 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
201 data = util.filter(data, cmd)
201 data = util.filter(data, cmd)
202 break
202 break
203
203
204 if fd:
204 if fd:
205 return fd.write(data)
205 return fd.write(data)
206 return self.wopener(filename, 'w').write(data)
206 return self.wopener(filename, 'w').write(data)
207
207
208 def transaction(self):
208 def transaction(self):
209 # save dirstate for undo
209 # save dirstate for undo
210 try:
210 try:
211 ds = self.opener("dirstate").read()
211 ds = self.opener("dirstate").read()
212 except IOError:
212 except IOError:
213 ds = ""
213 ds = ""
214 self.opener("journal.dirstate", "w").write(ds)
214 self.opener("journal.dirstate", "w").write(ds)
215
215
216 def after():
216 def after():
217 util.rename(self.join("journal"), self.join("undo"))
217 util.rename(self.join("journal"), self.join("undo"))
218 util.rename(self.join("journal.dirstate"),
218 util.rename(self.join("journal.dirstate"),
219 self.join("undo.dirstate"))
219 self.join("undo.dirstate"))
220
220
221 return transaction.transaction(self.ui.warn, self.opener,
221 return transaction.transaction(self.ui.warn, self.opener,
222 self.join("journal"), after)
222 self.join("journal"), after)
223
223
224 def recover(self):
224 def recover(self):
225 lock = self.lock()
225 lock = self.lock()
226 if os.path.exists(self.join("journal")):
226 if os.path.exists(self.join("journal")):
227 self.ui.status(_("rolling back interrupted transaction\n"))
227 self.ui.status(_("rolling back interrupted transaction\n"))
228 transaction.rollback(self.opener, self.join("journal"))
228 transaction.rollback(self.opener, self.join("journal"))
229 self.manifest = manifest.manifest(self.opener)
229 self.manifest = manifest.manifest(self.opener)
230 self.changelog = changelog.changelog(self.opener)
230 self.changelog = changelog.changelog(self.opener)
231 return True
231 return True
232 else:
232 else:
233 self.ui.warn(_("no interrupted transaction available\n"))
233 self.ui.warn(_("no interrupted transaction available\n"))
234 return False
234 return False
235
235
236 def undo(self):
236 def undo(self):
237 wlock = self.wlock()
237 wlock = self.wlock()
238 lock = self.lock()
238 lock = self.lock()
239 if os.path.exists(self.join("undo")):
239 if os.path.exists(self.join("undo")):
240 self.ui.status(_("rolling back last transaction\n"))
240 self.ui.status(_("rolling back last transaction\n"))
241 transaction.rollback(self.opener, self.join("undo"))
241 transaction.rollback(self.opener, self.join("undo"))
242 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
242 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
243 self.dirstate.read()
243 self.dirstate.read()
244 else:
244 else:
245 self.ui.warn(_("no undo information available\n"))
245 self.ui.warn(_("no undo information available\n"))
246
246
247 def lock(self, wait=1):
247 def lock(self, wait=1):
248 try:
248 try:
249 return lock.lock(self.join("lock"), 0)
249 return lock.lock(self.join("lock"), 0)
250 except lock.LockHeld, inst:
250 except lock.LockHeld, inst:
251 if wait:
251 if wait:
252 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
252 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
253 return lock.lock(self.join("lock"), wait)
253 return lock.lock(self.join("lock"), wait)
254 raise inst
254 raise inst
255
255
256 def wlock(self, wait=1):
256 def wlock(self, wait=1):
257 try:
257 try:
258 wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write)
258 wlock = lock.lock(self.join("wlock"), 0, self.dirstate.write)
259 except lock.LockHeld, inst:
259 except lock.LockHeld, inst:
260 if not wait:
260 if not wait:
261 raise inst
261 raise inst
262 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
262 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
263 wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write)
263 wlock = lock.lock(self.join("wlock"), wait, self.dirstate.write)
264 self.dirstate.read()
264 self.dirstate.read()
265 return wlock
265 return wlock
266
266
267 def rawcommit(self, files, text, user, date, p1=None, p2=None):
267 def rawcommit(self, files, text, user, date, p1=None, p2=None):
268 orig_parent = self.dirstate.parents()[0] or nullid
268 orig_parent = self.dirstate.parents()[0] or nullid
269 p1 = p1 or self.dirstate.parents()[0] or nullid
269 p1 = p1 or self.dirstate.parents()[0] or nullid
270 p2 = p2 or self.dirstate.parents()[1] or nullid
270 p2 = p2 or self.dirstate.parents()[1] or nullid
271 c1 = self.changelog.read(p1)
271 c1 = self.changelog.read(p1)
272 c2 = self.changelog.read(p2)
272 c2 = self.changelog.read(p2)
273 m1 = self.manifest.read(c1[0])
273 m1 = self.manifest.read(c1[0])
274 mf1 = self.manifest.readflags(c1[0])
274 mf1 = self.manifest.readflags(c1[0])
275 m2 = self.manifest.read(c2[0])
275 m2 = self.manifest.read(c2[0])
276 changed = []
276 changed = []
277
277
278 if orig_parent == p1:
278 if orig_parent == p1:
279 update_dirstate = 1
279 update_dirstate = 1
280 else:
280 else:
281 update_dirstate = 0
281 update_dirstate = 0
282
282
283 wlock = self.wlock()
283 wlock = self.wlock()
284 lock = self.lock()
284 lock = self.lock()
285 tr = self.transaction()
285 tr = self.transaction()
286 mm = m1.copy()
286 mm = m1.copy()
287 mfm = mf1.copy()
287 mfm = mf1.copy()
288 linkrev = self.changelog.count()
288 linkrev = self.changelog.count()
289 for f in files:
289 for f in files:
290 try:
290 try:
291 t = self.wread(f)
291 t = self.wread(f)
292 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
292 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
293 r = self.file(f)
293 r = self.file(f)
294 mfm[f] = tm
294 mfm[f] = tm
295
295
296 fp1 = m1.get(f, nullid)
296 fp1 = m1.get(f, nullid)
297 fp2 = m2.get(f, nullid)
297 fp2 = m2.get(f, nullid)
298
298
299 # is the same revision on two branches of a merge?
299 # is the same revision on two branches of a merge?
300 if fp2 == fp1:
300 if fp2 == fp1:
301 fp2 = nullid
301 fp2 = nullid
302
302
303 if fp2 != nullid:
303 if fp2 != nullid:
304 # is one parent an ancestor of the other?
304 # is one parent an ancestor of the other?
305 fpa = r.ancestor(fp1, fp2)
305 fpa = r.ancestor(fp1, fp2)
306 if fpa == fp1:
306 if fpa == fp1:
307 fp1, fp2 = fp2, nullid
307 fp1, fp2 = fp2, nullid
308 elif fpa == fp2:
308 elif fpa == fp2:
309 fp2 = nullid
309 fp2 = nullid
310
310
311 # is the file unmodified from the parent?
311 # is the file unmodified from the parent?
312 if t == r.read(fp1):
312 if t == r.read(fp1):
313 # record the proper existing parent in manifest
313 # record the proper existing parent in manifest
314 # no need to add a revision
314 # no need to add a revision
315 mm[f] = fp1
315 mm[f] = fp1
316 continue
316 continue
317
317
318 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
318 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
319 changed.append(f)
319 changed.append(f)
320 if update_dirstate:
320 if update_dirstate:
321 self.dirstate.update([f], "n")
321 self.dirstate.update([f], "n")
322 except IOError:
322 except IOError:
323 try:
323 try:
324 del mm[f]
324 del mm[f]
325 del mfm[f]
325 del mfm[f]
326 if update_dirstate:
326 if update_dirstate:
327 self.dirstate.forget([f])
327 self.dirstate.forget([f])
328 except:
328 except:
329 # deleted from p2?
329 # deleted from p2?
330 pass
330 pass
331
331
332 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
332 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
333 user = user or self.ui.username()
333 user = user or self.ui.username()
334 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
334 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
335 tr.close()
335 tr.close()
336 if update_dirstate:
336 if update_dirstate:
337 self.dirstate.setparents(n, nullid)
337 self.dirstate.setparents(n, nullid)
338
338
339 def commit(self, files = None, text = "", user = None, date = None,
339 def commit(self, files = None, text = "", user = None, date = None,
340 match = util.always, force=False):
340 match = util.always, force=False):
341 commit = []
341 commit = []
342 remove = []
342 remove = []
343 changed = []
343 changed = []
344
344
345 if files:
345 if files:
346 for f in files:
346 for f in files:
347 s = self.dirstate.state(f)
347 s = self.dirstate.state(f)
348 if s in 'nmai':
348 if s in 'nmai':
349 commit.append(f)
349 commit.append(f)
350 elif s == 'r':
350 elif s == 'r':
351 remove.append(f)
351 remove.append(f)
352 else:
352 else:
353 self.ui.warn(_("%s not tracked!\n") % f)
353 self.ui.warn(_("%s not tracked!\n") % f)
354 else:
354 else:
355 (c, a, d, u) = self.changes(match=match)
355 (c, a, d, u) = self.changes(match=match)
356 commit = c + a
356 commit = c + a
357 remove = d
357 remove = d
358
358
359 p1, p2 = self.dirstate.parents()
359 p1, p2 = self.dirstate.parents()
360 c1 = self.changelog.read(p1)
360 c1 = self.changelog.read(p1)
361 c2 = self.changelog.read(p2)
361 c2 = self.changelog.read(p2)
362 m1 = self.manifest.read(c1[0])
362 m1 = self.manifest.read(c1[0])
363 mf1 = self.manifest.readflags(c1[0])
363 mf1 = self.manifest.readflags(c1[0])
364 m2 = self.manifest.read(c2[0])
364 m2 = self.manifest.read(c2[0])
365
365
366 if not commit and not remove and not force and p2 == nullid:
366 if not commit and not remove and not force and p2 == nullid:
367 self.ui.status(_("nothing changed\n"))
367 self.ui.status(_("nothing changed\n"))
368 return None
368 return None
369
369
370 if not self.hook("precommit"):
370 if not self.hook("precommit"):
371 return None
371 return None
372
372
373 wlock = self.wlock()
373 wlock = self.wlock()
374 lock = self.lock()
374 lock = self.lock()
375 tr = self.transaction()
375 tr = self.transaction()
376
376
377 # check in files
377 # check in files
378 new = {}
378 new = {}
379 linkrev = self.changelog.count()
379 linkrev = self.changelog.count()
380 commit.sort()
380 commit.sort()
381 for f in commit:
381 for f in commit:
382 self.ui.note(f + "\n")
382 self.ui.note(f + "\n")
383 try:
383 try:
384 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
384 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
385 t = self.wread(f)
385 t = self.wread(f)
386 except IOError:
386 except IOError:
387 self.ui.warn(_("trouble committing %s!\n") % f)
387 self.ui.warn(_("trouble committing %s!\n") % f)
388 raise
388 raise
389
389
390 r = self.file(f)
390 r = self.file(f)
391
391
392 meta = {}
392 meta = {}
393 cp = self.dirstate.copied(f)
393 cp = self.dirstate.copied(f)
394 if cp:
394 if cp:
395 meta["copy"] = cp
395 meta["copy"] = cp
396 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
396 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
397 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
397 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
398 fp1, fp2 = nullid, nullid
398 fp1, fp2 = nullid, nullid
399 else:
399 else:
400 fp1 = m1.get(f, nullid)
400 fp1 = m1.get(f, nullid)
401 fp2 = m2.get(f, nullid)
401 fp2 = m2.get(f, nullid)
402
402
403 # is the same revision on two branches of a merge?
404 if fp2 == fp1:
405 fp2 = nullid
406
407 if fp2 != nullid:
403 if fp2 != nullid:
408 # is one parent an ancestor of the other?
404 # is one parent an ancestor of the other?
409 fpa = r.ancestor(fp1, fp2)
405 fpa = r.ancestor(fp1, fp2)
410 if fpa == fp1:
406 if fpa == fp1:
411 fp1, fp2 = fp2, nullid
407 fp1, fp2 = fp2, nullid
412 elif fpa == fp2:
408 elif fpa == fp2:
413 fp2 = nullid
409 fp2 = nullid
414
410
415 # is the file unmodified from the parent?
411 # is the file unmodified from the parent?
416 if not meta and t == r.read(fp1):
412 if not meta and t == r.read(fp1) and fp2 == nullid:
417 # record the proper existing parent in manifest
413 # record the proper existing parent in manifest
418 # no need to add a revision
414 # no need to add a revision
419 new[f] = fp1
415 new[f] = fp1
420 continue
416 continue
421
417
422 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
418 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
423 # remember what we've added so that we can later calculate
419 # remember what we've added so that we can later calculate
424 # the files to pull from a set of changesets
420 # the files to pull from a set of changesets
425 changed.append(f)
421 changed.append(f)
426
422
427 # update manifest
423 # update manifest
428 m1.update(new)
424 m1.update(new)
429 for f in remove:
425 for f in remove:
430 if f in m1:
426 if f in m1:
431 del m1[f]
427 del m1[f]
432 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
428 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
433 (new, remove))
429 (new, remove))
434
430
435 # add changeset
431 # add changeset
436 new = new.keys()
432 new = new.keys()
437 new.sort()
433 new.sort()
438
434
439 if not text:
435 if not text:
440 edittext = ""
436 edittext = ""
441 if p2 != nullid:
437 if p2 != nullid:
442 edittext += "HG: branch merge\n"
438 edittext += "HG: branch merge\n"
443 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
439 edittext += "\n" + "HG: manifest hash %s\n" % hex(mn)
444 edittext += "".join(["HG: changed %s\n" % f for f in changed])
440 edittext += "".join(["HG: changed %s\n" % f for f in changed])
445 edittext += "".join(["HG: removed %s\n" % f for f in remove])
441 edittext += "".join(["HG: removed %s\n" % f for f in remove])
446 if not changed and not remove:
442 if not changed and not remove:
447 edittext += "HG: no files changed\n"
443 edittext += "HG: no files changed\n"
448 edittext = self.ui.edit(edittext)
444 edittext = self.ui.edit(edittext)
449 if not edittext.rstrip():
445 if not edittext.rstrip():
450 return None
446 return None
451 text = edittext
447 text = edittext
452
448
453 user = user or self.ui.username()
449 user = user or self.ui.username()
454 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
450 n = self.changelog.add(mn, changed, text, tr, p1, p2, user, date)
455 tr.close()
451 tr.close()
456
452
457 self.dirstate.setparents(n)
453 self.dirstate.setparents(n)
458 self.dirstate.update(new, "n")
454 self.dirstate.update(new, "n")
459 self.dirstate.forget(remove)
455 self.dirstate.forget(remove)
460
456
461 if not self.hook("commit", node=hex(n)):
457 if not self.hook("commit", node=hex(n)):
462 return None
458 return None
463 return n
459 return n
464
460
465 def walk(self, node=None, files=[], match=util.always):
461 def walk(self, node=None, files=[], match=util.always):
466 if node:
462 if node:
467 fdict = dict.fromkeys(files)
463 fdict = dict.fromkeys(files)
468 for fn in self.manifest.read(self.changelog.read(node)[0]):
464 for fn in self.manifest.read(self.changelog.read(node)[0]):
469 fdict.pop(fn, None)
465 fdict.pop(fn, None)
470 if match(fn):
466 if match(fn):
471 yield 'm', fn
467 yield 'm', fn
472 for fn in fdict:
468 for fn in fdict:
473 self.ui.warn(_('%s: No such file in rev %s\n') % (
469 self.ui.warn(_('%s: No such file in rev %s\n') % (
474 util.pathto(self.getcwd(), fn), short(node)))
470 util.pathto(self.getcwd(), fn), short(node)))
475 else:
471 else:
476 for src, fn in self.dirstate.walk(files, match):
472 for src, fn in self.dirstate.walk(files, match):
477 yield src, fn
473 yield src, fn
478
474
479 def changes(self, node1 = None, node2 = None, files = [],
475 def changes(self, node1 = None, node2 = None, files = [],
480 match = util.always):
476 match = util.always):
481 mf2, u = None, []
477 mf2, u = None, []
482
478
483 def fcmp(fn, mf):
479 def fcmp(fn, mf):
484 t1 = self.wread(fn)
480 t1 = self.wread(fn)
485 t2 = self.file(fn).read(mf.get(fn, nullid))
481 t2 = self.file(fn).read(mf.get(fn, nullid))
486 return cmp(t1, t2)
482 return cmp(t1, t2)
487
483
488 def mfmatches(node):
484 def mfmatches(node):
489 mf = dict(self.manifest.read(node))
485 mf = dict(self.manifest.read(node))
490 for fn in mf.keys():
486 for fn in mf.keys():
491 if not match(fn):
487 if not match(fn):
492 del mf[fn]
488 del mf[fn]
493 return mf
489 return mf
494
490
495 # are we comparing the working directory?
491 # are we comparing the working directory?
496 if not node2:
492 if not node2:
497 try:
493 try:
498 wlock = self.wlock(wait=0)
494 wlock = self.wlock(wait=0)
499 except lock.LockHeld:
495 except lock.LockHeld:
500 wlock = None
496 wlock = None
501 l, c, a, d, u = self.dirstate.changes(files, match)
497 l, c, a, d, u = self.dirstate.changes(files, match)
502
498
503 # are we comparing working dir against its parent?
499 # are we comparing working dir against its parent?
504 if not node1:
500 if not node1:
505 if l:
501 if l:
506 # do a full compare of any files that might have changed
502 # do a full compare of any files that might have changed
507 change = self.changelog.read(self.dirstate.parents()[0])
503 change = self.changelog.read(self.dirstate.parents()[0])
508 mf2 = mfmatches(change[0])
504 mf2 = mfmatches(change[0])
509 for f in l:
505 for f in l:
510 if fcmp(f, mf2):
506 if fcmp(f, mf2):
511 c.append(f)
507 c.append(f)
512 elif wlock is not None:
508 elif wlock is not None:
513 self.dirstate.update([f], "n")
509 self.dirstate.update([f], "n")
514
510
515 for l in c, a, d, u:
511 for l in c, a, d, u:
516 l.sort()
512 l.sort()
517
513
518 return (c, a, d, u)
514 return (c, a, d, u)
519
515
520 # are we comparing working dir against non-tip?
516 # are we comparing working dir against non-tip?
521 # generate a pseudo-manifest for the working dir
517 # generate a pseudo-manifest for the working dir
522 if not node2:
518 if not node2:
523 if not mf2:
519 if not mf2:
524 change = self.changelog.read(self.dirstate.parents()[0])
520 change = self.changelog.read(self.dirstate.parents()[0])
525 mf2 = mfmatches(change[0])
521 mf2 = mfmatches(change[0])
526 for f in a + c + l:
522 for f in a + c + l:
527 mf2[f] = ""
523 mf2[f] = ""
528 for f in d:
524 for f in d:
529 if f in mf2: del mf2[f]
525 if f in mf2: del mf2[f]
530 else:
526 else:
531 change = self.changelog.read(node2)
527 change = self.changelog.read(node2)
532 mf2 = mfmatches(change[0])
528 mf2 = mfmatches(change[0])
533
529
534 # flush lists from dirstate before comparing manifests
530 # flush lists from dirstate before comparing manifests
535 c, a = [], []
531 c, a = [], []
536
532
537 change = self.changelog.read(node1)
533 change = self.changelog.read(node1)
538 mf1 = mfmatches(change[0])
534 mf1 = mfmatches(change[0])
539
535
540 for fn in mf2:
536 for fn in mf2:
541 if mf1.has_key(fn):
537 if mf1.has_key(fn):
542 if mf1[fn] != mf2[fn]:
538 if mf1[fn] != mf2[fn]:
543 if mf2[fn] != "" or fcmp(fn, mf1):
539 if mf2[fn] != "" or fcmp(fn, mf1):
544 c.append(fn)
540 c.append(fn)
545 del mf1[fn]
541 del mf1[fn]
546 else:
542 else:
547 a.append(fn)
543 a.append(fn)
548
544
549 d = mf1.keys()
545 d = mf1.keys()
550
546
551 for l in c, a, d, u:
547 for l in c, a, d, u:
552 l.sort()
548 l.sort()
553
549
554 return (c, a, d, u)
550 return (c, a, d, u)
555
551
556 def add(self, list):
552 def add(self, list):
557 wlock = self.wlock()
553 wlock = self.wlock()
558 for f in list:
554 for f in list:
559 p = self.wjoin(f)
555 p = self.wjoin(f)
560 if not os.path.exists(p):
556 if not os.path.exists(p):
561 self.ui.warn(_("%s does not exist!\n") % f)
557 self.ui.warn(_("%s does not exist!\n") % f)
562 elif not os.path.isfile(p):
558 elif not os.path.isfile(p):
563 self.ui.warn(_("%s not added: only files supported currently\n") % f)
559 self.ui.warn(_("%s not added: only files supported currently\n") % f)
564 elif self.dirstate.state(f) in 'an':
560 elif self.dirstate.state(f) in 'an':
565 self.ui.warn(_("%s already tracked!\n") % f)
561 self.ui.warn(_("%s already tracked!\n") % f)
566 else:
562 else:
567 self.dirstate.update([f], "a")
563 self.dirstate.update([f], "a")
568
564
569 def forget(self, list):
565 def forget(self, list):
570 wlock = self.wlock()
566 wlock = self.wlock()
571 for f in list:
567 for f in list:
572 if self.dirstate.state(f) not in 'ai':
568 if self.dirstate.state(f) not in 'ai':
573 self.ui.warn(_("%s not added!\n") % f)
569 self.ui.warn(_("%s not added!\n") % f)
574 else:
570 else:
575 self.dirstate.forget([f])
571 self.dirstate.forget([f])
576
572
577 def remove(self, list, unlink=False):
573 def remove(self, list, unlink=False):
578 if unlink:
574 if unlink:
579 for f in list:
575 for f in list:
580 try:
576 try:
581 util.unlink(self.wjoin(f))
577 util.unlink(self.wjoin(f))
582 except OSError, inst:
578 except OSError, inst:
583 if inst.errno != errno.ENOENT: raise
579 if inst.errno != errno.ENOENT: raise
584 wlock = self.wlock()
580 wlock = self.wlock()
585 for f in list:
581 for f in list:
586 p = self.wjoin(f)
582 p = self.wjoin(f)
587 if os.path.exists(p):
583 if os.path.exists(p):
588 self.ui.warn(_("%s still exists!\n") % f)
584 self.ui.warn(_("%s still exists!\n") % f)
589 elif self.dirstate.state(f) == 'a':
585 elif self.dirstate.state(f) == 'a':
590 self.ui.warn(_("%s never committed!\n") % f)
586 self.ui.warn(_("%s never committed!\n") % f)
591 self.dirstate.forget([f])
587 self.dirstate.forget([f])
592 elif f not in self.dirstate:
588 elif f not in self.dirstate:
593 self.ui.warn(_("%s not tracked!\n") % f)
589 self.ui.warn(_("%s not tracked!\n") % f)
594 else:
590 else:
595 self.dirstate.update([f], "r")
591 self.dirstate.update([f], "r")
596
592
597 def undelete(self, list):
593 def undelete(self, list):
598 p = self.dirstate.parents()[0]
594 p = self.dirstate.parents()[0]
599 mn = self.changelog.read(p)[0]
595 mn = self.changelog.read(p)[0]
600 mf = self.manifest.readflags(mn)
596 mf = self.manifest.readflags(mn)
601 m = self.manifest.read(mn)
597 m = self.manifest.read(mn)
602 wlock = self.wlock()
598 wlock = self.wlock()
603 for f in list:
599 for f in list:
604 if self.dirstate.state(f) not in "r":
600 if self.dirstate.state(f) not in "r":
605 self.ui.warn("%s not removed!\n" % f)
601 self.ui.warn("%s not removed!\n" % f)
606 else:
602 else:
607 t = self.file(f).read(m[f])
603 t = self.file(f).read(m[f])
608 self.wwrite(f, t)
604 self.wwrite(f, t)
609 util.set_exec(self.wjoin(f), mf[f])
605 util.set_exec(self.wjoin(f), mf[f])
610 self.dirstate.update([f], "n")
606 self.dirstate.update([f], "n")
611
607
612 def copy(self, source, dest):
608 def copy(self, source, dest):
613 p = self.wjoin(dest)
609 p = self.wjoin(dest)
614 if not os.path.exists(p):
610 if not os.path.exists(p):
615 self.ui.warn(_("%s does not exist!\n") % dest)
611 self.ui.warn(_("%s does not exist!\n") % dest)
616 elif not os.path.isfile(p):
612 elif not os.path.isfile(p):
617 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
613 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
618 else:
614 else:
619 wlock = self.wlock()
615 wlock = self.wlock()
620 if self.dirstate.state(dest) == '?':
616 if self.dirstate.state(dest) == '?':
621 self.dirstate.update([dest], "a")
617 self.dirstate.update([dest], "a")
622 self.dirstate.copy(source, dest)
618 self.dirstate.copy(source, dest)
623
619
624 def heads(self, start=None):
620 def heads(self, start=None):
625 heads = self.changelog.heads(start)
621 heads = self.changelog.heads(start)
626 # sort the output in rev descending order
622 # sort the output in rev descending order
627 heads = [(-self.changelog.rev(h), h) for h in heads]
623 heads = [(-self.changelog.rev(h), h) for h in heads]
628 heads.sort()
624 heads.sort()
629 return [n for (r, n) in heads]
625 return [n for (r, n) in heads]
630
626
631 # branchlookup returns a dict giving a list of branches for
627 # branchlookup returns a dict giving a list of branches for
632 # each head. A branch is defined as the tag of a node or
628 # each head. A branch is defined as the tag of a node or
633 # the branch of the node's parents. If a node has multiple
629 # the branch of the node's parents. If a node has multiple
634 # branch tags, tags are eliminated if they are visible from other
630 # branch tags, tags are eliminated if they are visible from other
635 # branch tags.
631 # branch tags.
636 #
632 #
637 # So, for this graph: a->b->c->d->e
633 # So, for this graph: a->b->c->d->e
638 # \ /
634 # \ /
639 # aa -----/
635 # aa -----/
640 # a has tag 2.6.12
636 # a has tag 2.6.12
641 # d has tag 2.6.13
637 # d has tag 2.6.13
642 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
638 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
643 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
639 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
644 # from the list.
640 # from the list.
645 #
641 #
646 # It is possible that more than one head will have the same branch tag.
642 # It is possible that more than one head will have the same branch tag.
647 # callers need to check the result for multiple heads under the same
643 # callers need to check the result for multiple heads under the same
648 # branch tag if that is a problem for them (ie checkout of a specific
644 # branch tag if that is a problem for them (ie checkout of a specific
649 # branch).
645 # branch).
650 #
646 #
651 # passing in a specific branch will limit the depth of the search
647 # passing in a specific branch will limit the depth of the search
652 # through the parents. It won't limit the branches returned in the
648 # through the parents. It won't limit the branches returned in the
653 # result though.
649 # result though.
654 def branchlookup(self, heads=None, branch=None):
650 def branchlookup(self, heads=None, branch=None):
655 if not heads:
651 if not heads:
656 heads = self.heads()
652 heads = self.heads()
657 headt = [ h for h in heads ]
653 headt = [ h for h in heads ]
658 chlog = self.changelog
654 chlog = self.changelog
659 branches = {}
655 branches = {}
660 merges = []
656 merges = []
661 seenmerge = {}
657 seenmerge = {}
662
658
663 # traverse the tree once for each head, recording in the branches
659 # traverse the tree once for each head, recording in the branches
664 # dict which tags are visible from this head. The branches
660 # dict which tags are visible from this head. The branches
665 # dict also records which tags are visible from each tag
661 # dict also records which tags are visible from each tag
666 # while we traverse.
662 # while we traverse.
667 while headt or merges:
663 while headt or merges:
668 if merges:
664 if merges:
669 n, found = merges.pop()
665 n, found = merges.pop()
670 visit = [n]
666 visit = [n]
671 else:
667 else:
672 h = headt.pop()
668 h = headt.pop()
673 visit = [h]
669 visit = [h]
674 found = [h]
670 found = [h]
675 seen = {}
671 seen = {}
676 while visit:
672 while visit:
677 n = visit.pop()
673 n = visit.pop()
678 if n in seen:
674 if n in seen:
679 continue
675 continue
680 pp = chlog.parents(n)
676 pp = chlog.parents(n)
681 tags = self.nodetags(n)
677 tags = self.nodetags(n)
682 if tags:
678 if tags:
683 for x in tags:
679 for x in tags:
684 if x == 'tip':
680 if x == 'tip':
685 continue
681 continue
686 for f in found:
682 for f in found:
687 branches.setdefault(f, {})[n] = 1
683 branches.setdefault(f, {})[n] = 1
688 branches.setdefault(n, {})[n] = 1
684 branches.setdefault(n, {})[n] = 1
689 break
685 break
690 if n not in found:
686 if n not in found:
691 found.append(n)
687 found.append(n)
692 if branch in tags:
688 if branch in tags:
693 continue
689 continue
694 seen[n] = 1
690 seen[n] = 1
695 if pp[1] != nullid and n not in seenmerge:
691 if pp[1] != nullid and n not in seenmerge:
696 merges.append((pp[1], [x for x in found]))
692 merges.append((pp[1], [x for x in found]))
697 seenmerge[n] = 1
693 seenmerge[n] = 1
698 if pp[0] != nullid:
694 if pp[0] != nullid:
699 visit.append(pp[0])
695 visit.append(pp[0])
700 # traverse the branches dict, eliminating branch tags from each
696 # traverse the branches dict, eliminating branch tags from each
701 # head that are visible from another branch tag for that head.
697 # head that are visible from another branch tag for that head.
702 out = {}
698 out = {}
703 viscache = {}
699 viscache = {}
704 for h in heads:
700 for h in heads:
705 def visible(node):
701 def visible(node):
706 if node in viscache:
702 if node in viscache:
707 return viscache[node]
703 return viscache[node]
708 ret = {}
704 ret = {}
709 visit = [node]
705 visit = [node]
710 while visit:
706 while visit:
711 x = visit.pop()
707 x = visit.pop()
712 if x in viscache:
708 if x in viscache:
713 ret.update(viscache[x])
709 ret.update(viscache[x])
714 elif x not in ret:
710 elif x not in ret:
715 ret[x] = 1
711 ret[x] = 1
716 if x in branches:
712 if x in branches:
717 visit[len(visit):] = branches[x].keys()
713 visit[len(visit):] = branches[x].keys()
718 viscache[node] = ret
714 viscache[node] = ret
719 return ret
715 return ret
720 if h not in branches:
716 if h not in branches:
721 continue
717 continue
722 # O(n^2), but somewhat limited. This only searches the
718 # O(n^2), but somewhat limited. This only searches the
723 # tags visible from a specific head, not all the tags in the
719 # tags visible from a specific head, not all the tags in the
724 # whole repo.
720 # whole repo.
725 for b in branches[h]:
721 for b in branches[h]:
726 vis = False
722 vis = False
727 for bb in branches[h].keys():
723 for bb in branches[h].keys():
728 if b != bb:
724 if b != bb:
729 if b in visible(bb):
725 if b in visible(bb):
730 vis = True
726 vis = True
731 break
727 break
732 if not vis:
728 if not vis:
733 l = out.setdefault(h, [])
729 l = out.setdefault(h, [])
734 l[len(l):] = self.nodetags(b)
730 l[len(l):] = self.nodetags(b)
735 return out
731 return out
736
732
737 def branches(self, nodes):
733 def branches(self, nodes):
738 if not nodes: nodes = [self.changelog.tip()]
734 if not nodes: nodes = [self.changelog.tip()]
739 b = []
735 b = []
740 for n in nodes:
736 for n in nodes:
741 t = n
737 t = n
742 while n:
738 while n:
743 p = self.changelog.parents(n)
739 p = self.changelog.parents(n)
744 if p[1] != nullid or p[0] == nullid:
740 if p[1] != nullid or p[0] == nullid:
745 b.append((t, n, p[0], p[1]))
741 b.append((t, n, p[0], p[1]))
746 break
742 break
747 n = p[0]
743 n = p[0]
748 return b
744 return b
749
745
750 def between(self, pairs):
746 def between(self, pairs):
751 r = []
747 r = []
752
748
753 for top, bottom in pairs:
749 for top, bottom in pairs:
754 n, l, i = top, [], 0
750 n, l, i = top, [], 0
755 f = 1
751 f = 1
756
752
757 while n != bottom:
753 while n != bottom:
758 p = self.changelog.parents(n)[0]
754 p = self.changelog.parents(n)[0]
759 if i == f:
755 if i == f:
760 l.append(n)
756 l.append(n)
761 f = f * 2
757 f = f * 2
762 n = p
758 n = p
763 i += 1
759 i += 1
764
760
765 r.append(l)
761 r.append(l)
766
762
767 return r
763 return r
768
764
769 def findincoming(self, remote, base=None, heads=None):
765 def findincoming(self, remote, base=None, heads=None):
770 m = self.changelog.nodemap
766 m = self.changelog.nodemap
771 search = []
767 search = []
772 fetch = {}
768 fetch = {}
773 seen = {}
769 seen = {}
774 seenbranch = {}
770 seenbranch = {}
775 if base == None:
771 if base == None:
776 base = {}
772 base = {}
777
773
778 # assume we're closer to the tip than the root
774 # assume we're closer to the tip than the root
779 # and start by examining the heads
775 # and start by examining the heads
780 self.ui.status(_("searching for changes\n"))
776 self.ui.status(_("searching for changes\n"))
781
777
782 if not heads:
778 if not heads:
783 heads = remote.heads()
779 heads = remote.heads()
784
780
785 unknown = []
781 unknown = []
786 for h in heads:
782 for h in heads:
787 if h not in m:
783 if h not in m:
788 unknown.append(h)
784 unknown.append(h)
789 else:
785 else:
790 base[h] = 1
786 base[h] = 1
791
787
792 if not unknown:
788 if not unknown:
793 return None
789 return None
794
790
795 rep = {}
791 rep = {}
796 reqcnt = 0
792 reqcnt = 0
797
793
798 # search through remote branches
794 # search through remote branches
799 # a 'branch' here is a linear segment of history, with four parts:
795 # a 'branch' here is a linear segment of history, with four parts:
800 # head, root, first parent, second parent
796 # head, root, first parent, second parent
801 # (a branch always has two parents (or none) by definition)
797 # (a branch always has two parents (or none) by definition)
802 unknown = remote.branches(unknown)
798 unknown = remote.branches(unknown)
803 while unknown:
799 while unknown:
804 r = []
800 r = []
805 while unknown:
801 while unknown:
806 n = unknown.pop(0)
802 n = unknown.pop(0)
807 if n[0] in seen:
803 if n[0] in seen:
808 continue
804 continue
809
805
810 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
806 self.ui.debug(_("examining %s:%s\n") % (short(n[0]), short(n[1])))
811 if n[0] == nullid:
807 if n[0] == nullid:
812 break
808 break
813 if n in seenbranch:
809 if n in seenbranch:
814 self.ui.debug(_("branch already found\n"))
810 self.ui.debug(_("branch already found\n"))
815 continue
811 continue
816 if n[1] and n[1] in m: # do we know the base?
812 if n[1] and n[1] in m: # do we know the base?
817 self.ui.debug(_("found incomplete branch %s:%s\n")
813 self.ui.debug(_("found incomplete branch %s:%s\n")
818 % (short(n[0]), short(n[1])))
814 % (short(n[0]), short(n[1])))
819 search.append(n) # schedule branch range for scanning
815 search.append(n) # schedule branch range for scanning
820 seenbranch[n] = 1
816 seenbranch[n] = 1
821 else:
817 else:
822 if n[1] not in seen and n[1] not in fetch:
818 if n[1] not in seen and n[1] not in fetch:
823 if n[2] in m and n[3] in m:
819 if n[2] in m and n[3] in m:
824 self.ui.debug(_("found new changeset %s\n") %
820 self.ui.debug(_("found new changeset %s\n") %
825 short(n[1]))
821 short(n[1]))
826 fetch[n[1]] = 1 # earliest unknown
822 fetch[n[1]] = 1 # earliest unknown
827 base[n[2]] = 1 # latest known
823 base[n[2]] = 1 # latest known
828 continue
824 continue
829
825
830 for a in n[2:4]:
826 for a in n[2:4]:
831 if a not in rep:
827 if a not in rep:
832 r.append(a)
828 r.append(a)
833 rep[a] = 1
829 rep[a] = 1
834
830
835 seen[n[0]] = 1
831 seen[n[0]] = 1
836
832
837 if r:
833 if r:
838 reqcnt += 1
834 reqcnt += 1
839 self.ui.debug(_("request %d: %s\n") %
835 self.ui.debug(_("request %d: %s\n") %
840 (reqcnt, " ".join(map(short, r))))
836 (reqcnt, " ".join(map(short, r))))
841 for p in range(0, len(r), 10):
837 for p in range(0, len(r), 10):
842 for b in remote.branches(r[p:p+10]):
838 for b in remote.branches(r[p:p+10]):
843 self.ui.debug(_("received %s:%s\n") %
839 self.ui.debug(_("received %s:%s\n") %
844 (short(b[0]), short(b[1])))
840 (short(b[0]), short(b[1])))
845 if b[0] in m:
841 if b[0] in m:
846 self.ui.debug(_("found base node %s\n") % short(b[0]))
842 self.ui.debug(_("found base node %s\n") % short(b[0]))
847 base[b[0]] = 1
843 base[b[0]] = 1
848 elif b[0] not in seen:
844 elif b[0] not in seen:
849 unknown.append(b)
845 unknown.append(b)
850
846
851 # do binary search on the branches we found
847 # do binary search on the branches we found
852 while search:
848 while search:
853 n = search.pop(0)
849 n = search.pop(0)
854 reqcnt += 1
850 reqcnt += 1
855 l = remote.between([(n[0], n[1])])[0]
851 l = remote.between([(n[0], n[1])])[0]
856 l.append(n[1])
852 l.append(n[1])
857 p = n[0]
853 p = n[0]
858 f = 1
854 f = 1
859 for i in l:
855 for i in l:
860 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
856 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
861 if i in m:
857 if i in m:
862 if f <= 2:
858 if f <= 2:
863 self.ui.debug(_("found new branch changeset %s\n") %
859 self.ui.debug(_("found new branch changeset %s\n") %
864 short(p))
860 short(p))
865 fetch[p] = 1
861 fetch[p] = 1
866 base[i] = 1
862 base[i] = 1
867 else:
863 else:
868 self.ui.debug(_("narrowed branch search to %s:%s\n")
864 self.ui.debug(_("narrowed branch search to %s:%s\n")
869 % (short(p), short(i)))
865 % (short(p), short(i)))
870 search.append((p, i))
866 search.append((p, i))
871 break
867 break
872 p, f = i, f * 2
868 p, f = i, f * 2
873
869
874 # sanity check our fetch list
870 # sanity check our fetch list
875 for f in fetch.keys():
871 for f in fetch.keys():
876 if f in m:
872 if f in m:
877 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
873 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
878
874
879 if base.keys() == [nullid]:
875 if base.keys() == [nullid]:
880 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
876 self.ui.warn(_("warning: pulling from an unrelated repository!\n"))
881
877
882 self.ui.note(_("found new changesets starting at ") +
878 self.ui.note(_("found new changesets starting at ") +
883 " ".join([short(f) for f in fetch]) + "\n")
879 " ".join([short(f) for f in fetch]) + "\n")
884
880
885 self.ui.debug(_("%d total queries\n") % reqcnt)
881 self.ui.debug(_("%d total queries\n") % reqcnt)
886
882
887 return fetch.keys()
883 return fetch.keys()
888
884
889 def findoutgoing(self, remote, base=None, heads=None):
885 def findoutgoing(self, remote, base=None, heads=None):
890 if base == None:
886 if base == None:
891 base = {}
887 base = {}
892 self.findincoming(remote, base, heads)
888 self.findincoming(remote, base, heads)
893
889
894 self.ui.debug(_("common changesets up to ")
890 self.ui.debug(_("common changesets up to ")
895 + " ".join(map(short, base.keys())) + "\n")
891 + " ".join(map(short, base.keys())) + "\n")
896
892
897 remain = dict.fromkeys(self.changelog.nodemap)
893 remain = dict.fromkeys(self.changelog.nodemap)
898
894
899 # prune everything remote has from the tree
895 # prune everything remote has from the tree
900 del remain[nullid]
896 del remain[nullid]
901 remove = base.keys()
897 remove = base.keys()
902 while remove:
898 while remove:
903 n = remove.pop(0)
899 n = remove.pop(0)
904 if n in remain:
900 if n in remain:
905 del remain[n]
901 del remain[n]
906 for p in self.changelog.parents(n):
902 for p in self.changelog.parents(n):
907 remove.append(p)
903 remove.append(p)
908
904
909 # find every node whose parents have been pruned
905 # find every node whose parents have been pruned
910 subset = []
906 subset = []
911 for n in remain:
907 for n in remain:
912 p1, p2 = self.changelog.parents(n)
908 p1, p2 = self.changelog.parents(n)
913 if p1 not in remain and p2 not in remain:
909 if p1 not in remain and p2 not in remain:
914 subset.append(n)
910 subset.append(n)
915
911
916 # this is the set of all roots we have to push
912 # this is the set of all roots we have to push
917 return subset
913 return subset
918
914
919 def pull(self, remote, heads = None):
915 def pull(self, remote, heads = None):
920 lock = self.lock()
916 lock = self.lock()
921
917
922 # if we have an empty repo, fetch everything
918 # if we have an empty repo, fetch everything
923 if self.changelog.tip() == nullid:
919 if self.changelog.tip() == nullid:
924 self.ui.status(_("requesting all changes\n"))
920 self.ui.status(_("requesting all changes\n"))
925 fetch = [nullid]
921 fetch = [nullid]
926 else:
922 else:
927 fetch = self.findincoming(remote)
923 fetch = self.findincoming(remote)
928
924
929 if not fetch:
925 if not fetch:
930 self.ui.status(_("no changes found\n"))
926 self.ui.status(_("no changes found\n"))
931 return 1
927 return 1
932
928
933 if heads is None:
929 if heads is None:
934 cg = remote.changegroup(fetch)
930 cg = remote.changegroup(fetch)
935 else:
931 else:
936 cg = remote.changegroupsubset(fetch, heads)
932 cg = remote.changegroupsubset(fetch, heads)
937 return self.addchangegroup(cg)
933 return self.addchangegroup(cg)
938
934
939 def push(self, remote, force=False):
935 def push(self, remote, force=False):
940 lock = remote.lock()
936 lock = remote.lock()
941
937
942 base = {}
938 base = {}
943 heads = remote.heads()
939 heads = remote.heads()
944 inc = self.findincoming(remote, base, heads)
940 inc = self.findincoming(remote, base, heads)
945 if not force and inc:
941 if not force and inc:
946 self.ui.warn(_("abort: unsynced remote changes!\n"))
942 self.ui.warn(_("abort: unsynced remote changes!\n"))
947 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
943 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
948 return 1
944 return 1
949
945
950 update = self.findoutgoing(remote, base)
946 update = self.findoutgoing(remote, base)
951 if not update:
947 if not update:
952 self.ui.status(_("no changes found\n"))
948 self.ui.status(_("no changes found\n"))
953 return 1
949 return 1
954 elif not force:
950 elif not force:
955 if len(heads) < len(self.changelog.heads()):
951 if len(heads) < len(self.changelog.heads()):
956 self.ui.warn(_("abort: push creates new remote branches!\n"))
952 self.ui.warn(_("abort: push creates new remote branches!\n"))
957 self.ui.status(_("(did you forget to merge?"
953 self.ui.status(_("(did you forget to merge?"
958 " use push -f to force)\n"))
954 " use push -f to force)\n"))
959 return 1
955 return 1
960
956
961 cg = self.changegroup(update)
957 cg = self.changegroup(update)
962 return remote.addchangegroup(cg)
958 return remote.addchangegroup(cg)
963
959
964 def changegroupsubset(self, bases, heads):
960 def changegroupsubset(self, bases, heads):
965 """This function generates a changegroup consisting of all the nodes
961 """This function generates a changegroup consisting of all the nodes
966 that are descendents of any of the bases, and ancestors of any of
962 that are descendents of any of the bases, and ancestors of any of
967 the heads.
963 the heads.
968
964
969 It is fairly complex as determining which filenodes and which
965 It is fairly complex as determining which filenodes and which
970 manifest nodes need to be included for the changeset to be complete
966 manifest nodes need to be included for the changeset to be complete
971 is non-trivial.
967 is non-trivial.
972
968
973 Another wrinkle is doing the reverse, figuring out which changeset in
969 Another wrinkle is doing the reverse, figuring out which changeset in
974 the changegroup a particular filenode or manifestnode belongs to."""
970 the changegroup a particular filenode or manifestnode belongs to."""
975
971
976 # Set up some initial variables
972 # Set up some initial variables
977 # Make it easy to refer to self.changelog
973 # Make it easy to refer to self.changelog
978 cl = self.changelog
974 cl = self.changelog
979 # msng is short for missing - compute the list of changesets in this
975 # msng is short for missing - compute the list of changesets in this
980 # changegroup.
976 # changegroup.
981 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
977 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
982 # Some bases may turn out to be superfluous, and some heads may be
978 # Some bases may turn out to be superfluous, and some heads may be
983 # too. nodesbetween will return the minimal set of bases and heads
979 # too. nodesbetween will return the minimal set of bases and heads
984 # necessary to re-create the changegroup.
980 # necessary to re-create the changegroup.
985
981
986 # Known heads are the list of heads that it is assumed the recipient
982 # Known heads are the list of heads that it is assumed the recipient
987 # of this changegroup will know about.
983 # of this changegroup will know about.
988 knownheads = {}
984 knownheads = {}
989 # We assume that all parents of bases are known heads.
985 # We assume that all parents of bases are known heads.
990 for n in bases:
986 for n in bases:
991 for p in cl.parents(n):
987 for p in cl.parents(n):
992 if p != nullid:
988 if p != nullid:
993 knownheads[p] = 1
989 knownheads[p] = 1
994 knownheads = knownheads.keys()
990 knownheads = knownheads.keys()
995 if knownheads:
991 if knownheads:
996 # Now that we know what heads are known, we can compute which
992 # Now that we know what heads are known, we can compute which
997 # changesets are known. The recipient must know about all
993 # changesets are known. The recipient must know about all
998 # changesets required to reach the known heads from the null
994 # changesets required to reach the known heads from the null
999 # changeset.
995 # changeset.
1000 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
996 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1001 junk = None
997 junk = None
1002 # Transform the list into an ersatz set.
998 # Transform the list into an ersatz set.
1003 has_cl_set = dict.fromkeys(has_cl_set)
999 has_cl_set = dict.fromkeys(has_cl_set)
1004 else:
1000 else:
1005 # If there were no known heads, the recipient cannot be assumed to
1001 # If there were no known heads, the recipient cannot be assumed to
1006 # know about any changesets.
1002 # know about any changesets.
1007 has_cl_set = {}
1003 has_cl_set = {}
1008
1004
1009 # Make it easy to refer to self.manifest
1005 # Make it easy to refer to self.manifest
1010 mnfst = self.manifest
1006 mnfst = self.manifest
1011 # We don't know which manifests are missing yet
1007 # We don't know which manifests are missing yet
1012 msng_mnfst_set = {}
1008 msng_mnfst_set = {}
1013 # Nor do we know which filenodes are missing.
1009 # Nor do we know which filenodes are missing.
1014 msng_filenode_set = {}
1010 msng_filenode_set = {}
1015
1011
1016 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1012 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1017 junk = None
1013 junk = None
1018
1014
1019 # A changeset always belongs to itself, so the changenode lookup
1015 # A changeset always belongs to itself, so the changenode lookup
1020 # function for a changenode is identity.
1016 # function for a changenode is identity.
1021 def identity(x):
1017 def identity(x):
1022 return x
1018 return x
1023
1019
1024 # A function generating function. Sets up an environment for the
1020 # A function generating function. Sets up an environment for the
1025 # inner function.
1021 # inner function.
1026 def cmp_by_rev_func(revlog):
1022 def cmp_by_rev_func(revlog):
1027 # Compare two nodes by their revision number in the environment's
1023 # Compare two nodes by their revision number in the environment's
1028 # revision history. Since the revision number both represents the
1024 # revision history. Since the revision number both represents the
1029 # most efficient order to read the nodes in, and represents a
1025 # most efficient order to read the nodes in, and represents a
1030 # topological sorting of the nodes, this function is often useful.
1026 # topological sorting of the nodes, this function is often useful.
1031 def cmp_by_rev(a, b):
1027 def cmp_by_rev(a, b):
1032 return cmp(revlog.rev(a), revlog.rev(b))
1028 return cmp(revlog.rev(a), revlog.rev(b))
1033 return cmp_by_rev
1029 return cmp_by_rev
1034
1030
1035 # If we determine that a particular file or manifest node must be a
1031 # If we determine that a particular file or manifest node must be a
1036 # node that the recipient of the changegroup will already have, we can
1032 # node that the recipient of the changegroup will already have, we can
1037 # also assume the recipient will have all the parents. This function
1033 # also assume the recipient will have all the parents. This function
1038 # prunes them from the set of missing nodes.
1034 # prunes them from the set of missing nodes.
1039 def prune_parents(revlog, hasset, msngset):
1035 def prune_parents(revlog, hasset, msngset):
1040 haslst = hasset.keys()
1036 haslst = hasset.keys()
1041 haslst.sort(cmp_by_rev_func(revlog))
1037 haslst.sort(cmp_by_rev_func(revlog))
1042 for node in haslst:
1038 for node in haslst:
1043 parentlst = [p for p in revlog.parents(node) if p != nullid]
1039 parentlst = [p for p in revlog.parents(node) if p != nullid]
1044 while parentlst:
1040 while parentlst:
1045 n = parentlst.pop()
1041 n = parentlst.pop()
1046 if n not in hasset:
1042 if n not in hasset:
1047 hasset[n] = 1
1043 hasset[n] = 1
1048 p = [p for p in revlog.parents(n) if p != nullid]
1044 p = [p for p in revlog.parents(n) if p != nullid]
1049 parentlst.extend(p)
1045 parentlst.extend(p)
1050 for n in hasset:
1046 for n in hasset:
1051 msngset.pop(n, None)
1047 msngset.pop(n, None)
1052
1048
1053 # This is a function generating function used to set up an environment
1049 # This is a function generating function used to set up an environment
1054 # for the inner function to execute in.
1050 # for the inner function to execute in.
1055 def manifest_and_file_collector(changedfileset):
1051 def manifest_and_file_collector(changedfileset):
1056 # This is an information gathering function that gathers
1052 # This is an information gathering function that gathers
1057 # information from each changeset node that goes out as part of
1053 # information from each changeset node that goes out as part of
1058 # the changegroup. The information gathered is a list of which
1054 # the changegroup. The information gathered is a list of which
1059 # manifest nodes are potentially required (the recipient may
1055 # manifest nodes are potentially required (the recipient may
1060 # already have them) and total list of all files which were
1056 # already have them) and total list of all files which were
1061 # changed in any changeset in the changegroup.
1057 # changed in any changeset in the changegroup.
1062 #
1058 #
1063 # We also remember the first changenode we saw any manifest
1059 # We also remember the first changenode we saw any manifest
1064 # referenced by so we can later determine which changenode 'owns'
1060 # referenced by so we can later determine which changenode 'owns'
1065 # the manifest.
1061 # the manifest.
1066 def collect_manifests_and_files(clnode):
1062 def collect_manifests_and_files(clnode):
1067 c = cl.read(clnode)
1063 c = cl.read(clnode)
1068 for f in c[3]:
1064 for f in c[3]:
1069 # This is to make sure we only have one instance of each
1065 # This is to make sure we only have one instance of each
1070 # filename string for each filename.
1066 # filename string for each filename.
1071 changedfileset.setdefault(f, f)
1067 changedfileset.setdefault(f, f)
1072 msng_mnfst_set.setdefault(c[0], clnode)
1068 msng_mnfst_set.setdefault(c[0], clnode)
1073 return collect_manifests_and_files
1069 return collect_manifests_and_files
1074
1070
1075 # Figure out which manifest nodes (of the ones we think might be part
1071 # Figure out which manifest nodes (of the ones we think might be part
1076 # of the changegroup) the recipient must know about and remove them
1072 # of the changegroup) the recipient must know about and remove them
1077 # from the changegroup.
1073 # from the changegroup.
1078 def prune_manifests():
1074 def prune_manifests():
1079 has_mnfst_set = {}
1075 has_mnfst_set = {}
1080 for n in msng_mnfst_set:
1076 for n in msng_mnfst_set:
1081 # If a 'missing' manifest thinks it belongs to a changenode
1077 # If a 'missing' manifest thinks it belongs to a changenode
1082 # the recipient is assumed to have, obviously the recipient
1078 # the recipient is assumed to have, obviously the recipient
1083 # must have that manifest.
1079 # must have that manifest.
1084 linknode = cl.node(mnfst.linkrev(n))
1080 linknode = cl.node(mnfst.linkrev(n))
1085 if linknode in has_cl_set:
1081 if linknode in has_cl_set:
1086 has_mnfst_set[n] = 1
1082 has_mnfst_set[n] = 1
1087 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1083 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1088
1084
1089 # Use the information collected in collect_manifests_and_files to say
1085 # Use the information collected in collect_manifests_and_files to say
1090 # which changenode any manifestnode belongs to.
1086 # which changenode any manifestnode belongs to.
1091 def lookup_manifest_link(mnfstnode):
1087 def lookup_manifest_link(mnfstnode):
1092 return msng_mnfst_set[mnfstnode]
1088 return msng_mnfst_set[mnfstnode]
1093
1089
1094 # A function generating function that sets up the initial environment
1090 # A function generating function that sets up the initial environment
1095 # the inner function.
1091 # the inner function.
1096 def filenode_collector(changedfiles):
1092 def filenode_collector(changedfiles):
1097 next_rev = [0]
1093 next_rev = [0]
1098 # This gathers information from each manifestnode included in the
1094 # This gathers information from each manifestnode included in the
1099 # changegroup about which filenodes the manifest node references
1095 # changegroup about which filenodes the manifest node references
1100 # so we can include those in the changegroup too.
1096 # so we can include those in the changegroup too.
1101 #
1097 #
1102 # It also remembers which changenode each filenode belongs to. It
1098 # It also remembers which changenode each filenode belongs to. It
1103 # does this by assuming the a filenode belongs to the changenode
1099 # does this by assuming the a filenode belongs to the changenode
1104 # the first manifest that references it belongs to.
1100 # the first manifest that references it belongs to.
1105 def collect_msng_filenodes(mnfstnode):
1101 def collect_msng_filenodes(mnfstnode):
1106 r = mnfst.rev(mnfstnode)
1102 r = mnfst.rev(mnfstnode)
1107 if r == next_rev[0]:
1103 if r == next_rev[0]:
1108 # If the last rev we looked at was the one just previous,
1104 # If the last rev we looked at was the one just previous,
1109 # we only need to see a diff.
1105 # we only need to see a diff.
1110 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1106 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1111 # For each line in the delta
1107 # For each line in the delta
1112 for dline in delta.splitlines():
1108 for dline in delta.splitlines():
1113 # get the filename and filenode for that line
1109 # get the filename and filenode for that line
1114 f, fnode = dline.split('\0')
1110 f, fnode = dline.split('\0')
1115 fnode = bin(fnode[:40])
1111 fnode = bin(fnode[:40])
1116 f = changedfiles.get(f, None)
1112 f = changedfiles.get(f, None)
1117 # And if the file is in the list of files we care
1113 # And if the file is in the list of files we care
1118 # about.
1114 # about.
1119 if f is not None:
1115 if f is not None:
1120 # Get the changenode this manifest belongs to
1116 # Get the changenode this manifest belongs to
1121 clnode = msng_mnfst_set[mnfstnode]
1117 clnode = msng_mnfst_set[mnfstnode]
1122 # Create the set of filenodes for the file if
1118 # Create the set of filenodes for the file if
1123 # there isn't one already.
1119 # there isn't one already.
1124 ndset = msng_filenode_set.setdefault(f, {})
1120 ndset = msng_filenode_set.setdefault(f, {})
1125 # And set the filenode's changelog node to the
1121 # And set the filenode's changelog node to the
1126 # manifest's if it hasn't been set already.
1122 # manifest's if it hasn't been set already.
1127 ndset.setdefault(fnode, clnode)
1123 ndset.setdefault(fnode, clnode)
1128 else:
1124 else:
1129 # Otherwise we need a full manifest.
1125 # Otherwise we need a full manifest.
1130 m = mnfst.read(mnfstnode)
1126 m = mnfst.read(mnfstnode)
1131 # For every file in we care about.
1127 # For every file in we care about.
1132 for f in changedfiles:
1128 for f in changedfiles:
1133 fnode = m.get(f, None)
1129 fnode = m.get(f, None)
1134 # If it's in the manifest
1130 # If it's in the manifest
1135 if fnode is not None:
1131 if fnode is not None:
1136 # See comments above.
1132 # See comments above.
1137 clnode = msng_mnfst_set[mnfstnode]
1133 clnode = msng_mnfst_set[mnfstnode]
1138 ndset = msng_filenode_set.setdefault(f, {})
1134 ndset = msng_filenode_set.setdefault(f, {})
1139 ndset.setdefault(fnode, clnode)
1135 ndset.setdefault(fnode, clnode)
1140 # Remember the revision we hope to see next.
1136 # Remember the revision we hope to see next.
1141 next_rev[0] = r + 1
1137 next_rev[0] = r + 1
1142 return collect_msng_filenodes
1138 return collect_msng_filenodes
1143
1139
1144 # We have a list of filenodes we think we need for a file, lets remove
1140 # We have a list of filenodes we think we need for a file, lets remove
1145 # all those we now the recipient must have.
1141 # all those we now the recipient must have.
1146 def prune_filenodes(f, filerevlog):
1142 def prune_filenodes(f, filerevlog):
1147 msngset = msng_filenode_set[f]
1143 msngset = msng_filenode_set[f]
1148 hasset = {}
1144 hasset = {}
1149 # If a 'missing' filenode thinks it belongs to a changenode we
1145 # If a 'missing' filenode thinks it belongs to a changenode we
1150 # assume the recipient must have, then the recipient must have
1146 # assume the recipient must have, then the recipient must have
1151 # that filenode.
1147 # that filenode.
1152 for n in msngset:
1148 for n in msngset:
1153 clnode = cl.node(filerevlog.linkrev(n))
1149 clnode = cl.node(filerevlog.linkrev(n))
1154 if clnode in has_cl_set:
1150 if clnode in has_cl_set:
1155 hasset[n] = 1
1151 hasset[n] = 1
1156 prune_parents(filerevlog, hasset, msngset)
1152 prune_parents(filerevlog, hasset, msngset)
1157
1153
1158 # A function generator function that sets up the a context for the
1154 # A function generator function that sets up the a context for the
1159 # inner function.
1155 # inner function.
1160 def lookup_filenode_link_func(fname):
1156 def lookup_filenode_link_func(fname):
1161 msngset = msng_filenode_set[fname]
1157 msngset = msng_filenode_set[fname]
1162 # Lookup the changenode the filenode belongs to.
1158 # Lookup the changenode the filenode belongs to.
1163 def lookup_filenode_link(fnode):
1159 def lookup_filenode_link(fnode):
1164 return msngset[fnode]
1160 return msngset[fnode]
1165 return lookup_filenode_link
1161 return lookup_filenode_link
1166
1162
1167 # Now that we have all theses utility functions to help out and
1163 # Now that we have all theses utility functions to help out and
1168 # logically divide up the task, generate the group.
1164 # logically divide up the task, generate the group.
1169 def gengroup():
1165 def gengroup():
1170 # The set of changed files starts empty.
1166 # The set of changed files starts empty.
1171 changedfiles = {}
1167 changedfiles = {}
1172 # Create a changenode group generator that will call our functions
1168 # Create a changenode group generator that will call our functions
1173 # back to lookup the owning changenode and collect information.
1169 # back to lookup the owning changenode and collect information.
1174 group = cl.group(msng_cl_lst, identity,
1170 group = cl.group(msng_cl_lst, identity,
1175 manifest_and_file_collector(changedfiles))
1171 manifest_and_file_collector(changedfiles))
1176 for chnk in group:
1172 for chnk in group:
1177 yield chnk
1173 yield chnk
1178
1174
1179 # The list of manifests has been collected by the generator
1175 # The list of manifests has been collected by the generator
1180 # calling our functions back.
1176 # calling our functions back.
1181 prune_manifests()
1177 prune_manifests()
1182 msng_mnfst_lst = msng_mnfst_set.keys()
1178 msng_mnfst_lst = msng_mnfst_set.keys()
1183 # Sort the manifestnodes by revision number.
1179 # Sort the manifestnodes by revision number.
1184 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1180 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1185 # Create a generator for the manifestnodes that calls our lookup
1181 # Create a generator for the manifestnodes that calls our lookup
1186 # and data collection functions back.
1182 # and data collection functions back.
1187 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1183 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1188 filenode_collector(changedfiles))
1184 filenode_collector(changedfiles))
1189 for chnk in group:
1185 for chnk in group:
1190 yield chnk
1186 yield chnk
1191
1187
1192 # These are no longer needed, dereference and toss the memory for
1188 # These are no longer needed, dereference and toss the memory for
1193 # them.
1189 # them.
1194 msng_mnfst_lst = None
1190 msng_mnfst_lst = None
1195 msng_mnfst_set.clear()
1191 msng_mnfst_set.clear()
1196
1192
1197 changedfiles = changedfiles.keys()
1193 changedfiles = changedfiles.keys()
1198 changedfiles.sort()
1194 changedfiles.sort()
1199 # Go through all our files in order sorted by name.
1195 # Go through all our files in order sorted by name.
1200 for fname in changedfiles:
1196 for fname in changedfiles:
1201 filerevlog = self.file(fname)
1197 filerevlog = self.file(fname)
1202 # Toss out the filenodes that the recipient isn't really
1198 # Toss out the filenodes that the recipient isn't really
1203 # missing.
1199 # missing.
1204 prune_filenodes(fname, filerevlog)
1200 prune_filenodes(fname, filerevlog)
1205 msng_filenode_lst = msng_filenode_set[fname].keys()
1201 msng_filenode_lst = msng_filenode_set[fname].keys()
1206 # If any filenodes are left, generate the group for them,
1202 # If any filenodes are left, generate the group for them,
1207 # otherwise don't bother.
1203 # otherwise don't bother.
1208 if len(msng_filenode_lst) > 0:
1204 if len(msng_filenode_lst) > 0:
1209 yield struct.pack(">l", len(fname) + 4) + fname
1205 yield struct.pack(">l", len(fname) + 4) + fname
1210 # Sort the filenodes by their revision #
1206 # Sort the filenodes by their revision #
1211 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1207 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1212 # Create a group generator and only pass in a changenode
1208 # Create a group generator and only pass in a changenode
1213 # lookup function as we need to collect no information
1209 # lookup function as we need to collect no information
1214 # from filenodes.
1210 # from filenodes.
1215 group = filerevlog.group(msng_filenode_lst,
1211 group = filerevlog.group(msng_filenode_lst,
1216 lookup_filenode_link_func(fname))
1212 lookup_filenode_link_func(fname))
1217 for chnk in group:
1213 for chnk in group:
1218 yield chnk
1214 yield chnk
1219 # Don't need this anymore, toss it to free memory.
1215 # Don't need this anymore, toss it to free memory.
1220 del msng_filenode_set[fname]
1216 del msng_filenode_set[fname]
1221 # Signal that no more groups are left.
1217 # Signal that no more groups are left.
1222 yield struct.pack(">l", 0)
1218 yield struct.pack(">l", 0)
1223
1219
1224 return util.chunkbuffer(gengroup())
1220 return util.chunkbuffer(gengroup())
1225
1221
1226 def changegroup(self, basenodes):
1222 def changegroup(self, basenodes):
1227 """Generate a changegroup of all nodes that we have that a recipient
1223 """Generate a changegroup of all nodes that we have that a recipient
1228 doesn't.
1224 doesn't.
1229
1225
1230 This is much easier than the previous function as we can assume that
1226 This is much easier than the previous function as we can assume that
1231 the recipient has any changenode we aren't sending them."""
1227 the recipient has any changenode we aren't sending them."""
1232 cl = self.changelog
1228 cl = self.changelog
1233 nodes = cl.nodesbetween(basenodes, None)[0]
1229 nodes = cl.nodesbetween(basenodes, None)[0]
1234 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1230 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1235
1231
1236 def identity(x):
1232 def identity(x):
1237 return x
1233 return x
1238
1234
1239 def gennodelst(revlog):
1235 def gennodelst(revlog):
1240 for r in xrange(0, revlog.count()):
1236 for r in xrange(0, revlog.count()):
1241 n = revlog.node(r)
1237 n = revlog.node(r)
1242 if revlog.linkrev(n) in revset:
1238 if revlog.linkrev(n) in revset:
1243 yield n
1239 yield n
1244
1240
1245 def changed_file_collector(changedfileset):
1241 def changed_file_collector(changedfileset):
1246 def collect_changed_files(clnode):
1242 def collect_changed_files(clnode):
1247 c = cl.read(clnode)
1243 c = cl.read(clnode)
1248 for fname in c[3]:
1244 for fname in c[3]:
1249 changedfileset[fname] = 1
1245 changedfileset[fname] = 1
1250 return collect_changed_files
1246 return collect_changed_files
1251
1247
1252 def lookuprevlink_func(revlog):
1248 def lookuprevlink_func(revlog):
1253 def lookuprevlink(n):
1249 def lookuprevlink(n):
1254 return cl.node(revlog.linkrev(n))
1250 return cl.node(revlog.linkrev(n))
1255 return lookuprevlink
1251 return lookuprevlink
1256
1252
1257 def gengroup():
1253 def gengroup():
1258 # construct a list of all changed files
1254 # construct a list of all changed files
1259 changedfiles = {}
1255 changedfiles = {}
1260
1256
1261 for chnk in cl.group(nodes, identity,
1257 for chnk in cl.group(nodes, identity,
1262 changed_file_collector(changedfiles)):
1258 changed_file_collector(changedfiles)):
1263 yield chnk
1259 yield chnk
1264 changedfiles = changedfiles.keys()
1260 changedfiles = changedfiles.keys()
1265 changedfiles.sort()
1261 changedfiles.sort()
1266
1262
1267 mnfst = self.manifest
1263 mnfst = self.manifest
1268 nodeiter = gennodelst(mnfst)
1264 nodeiter = gennodelst(mnfst)
1269 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1265 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1270 yield chnk
1266 yield chnk
1271
1267
1272 for fname in changedfiles:
1268 for fname in changedfiles:
1273 filerevlog = self.file(fname)
1269 filerevlog = self.file(fname)
1274 nodeiter = gennodelst(filerevlog)
1270 nodeiter = gennodelst(filerevlog)
1275 nodeiter = list(nodeiter)
1271 nodeiter = list(nodeiter)
1276 if nodeiter:
1272 if nodeiter:
1277 yield struct.pack(">l", len(fname) + 4) + fname
1273 yield struct.pack(">l", len(fname) + 4) + fname
1278 lookup = lookuprevlink_func(filerevlog)
1274 lookup = lookuprevlink_func(filerevlog)
1279 for chnk in filerevlog.group(nodeiter, lookup):
1275 for chnk in filerevlog.group(nodeiter, lookup):
1280 yield chnk
1276 yield chnk
1281
1277
1282 yield struct.pack(">l", 0)
1278 yield struct.pack(">l", 0)
1283
1279
1284 return util.chunkbuffer(gengroup())
1280 return util.chunkbuffer(gengroup())
1285
1281
1286 def addchangegroup(self, source):
1282 def addchangegroup(self, source):
1287
1283
1288 def getchunk():
1284 def getchunk():
1289 d = source.read(4)
1285 d = source.read(4)
1290 if not d: return ""
1286 if not d: return ""
1291 l = struct.unpack(">l", d)[0]
1287 l = struct.unpack(">l", d)[0]
1292 if l <= 4: return ""
1288 if l <= 4: return ""
1293 d = source.read(l - 4)
1289 d = source.read(l - 4)
1294 if len(d) < l - 4:
1290 if len(d) < l - 4:
1295 raise repo.RepoError(_("premature EOF reading chunk"
1291 raise repo.RepoError(_("premature EOF reading chunk"
1296 " (got %d bytes, expected %d)")
1292 " (got %d bytes, expected %d)")
1297 % (len(d), l - 4))
1293 % (len(d), l - 4))
1298 return d
1294 return d
1299
1295
1300 def getgroup():
1296 def getgroup():
1301 while 1:
1297 while 1:
1302 c = getchunk()
1298 c = getchunk()
1303 if not c: break
1299 if not c: break
1304 yield c
1300 yield c
1305
1301
1306 def csmap(x):
1302 def csmap(x):
1307 self.ui.debug(_("add changeset %s\n") % short(x))
1303 self.ui.debug(_("add changeset %s\n") % short(x))
1308 return self.changelog.count()
1304 return self.changelog.count()
1309
1305
1310 def revmap(x):
1306 def revmap(x):
1311 return self.changelog.rev(x)
1307 return self.changelog.rev(x)
1312
1308
1313 if not source: return
1309 if not source: return
1314 changesets = files = revisions = 0
1310 changesets = files = revisions = 0
1315
1311
1316 tr = self.transaction()
1312 tr = self.transaction()
1317
1313
1318 oldheads = len(self.changelog.heads())
1314 oldheads = len(self.changelog.heads())
1319
1315
1320 # pull off the changeset group
1316 # pull off the changeset group
1321 self.ui.status(_("adding changesets\n"))
1317 self.ui.status(_("adding changesets\n"))
1322 co = self.changelog.tip()
1318 co = self.changelog.tip()
1323 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1319 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1324 cnr, cor = map(self.changelog.rev, (cn, co))
1320 cnr, cor = map(self.changelog.rev, (cn, co))
1325 if cn == nullid:
1321 if cn == nullid:
1326 cnr = cor
1322 cnr = cor
1327 changesets = cnr - cor
1323 changesets = cnr - cor
1328
1324
1329 # pull off the manifest group
1325 # pull off the manifest group
1330 self.ui.status(_("adding manifests\n"))
1326 self.ui.status(_("adding manifests\n"))
1331 mm = self.manifest.tip()
1327 mm = self.manifest.tip()
1332 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1328 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1333
1329
1334 # process the files
1330 # process the files
1335 self.ui.status(_("adding file changes\n"))
1331 self.ui.status(_("adding file changes\n"))
1336 while 1:
1332 while 1:
1337 f = getchunk()
1333 f = getchunk()
1338 if not f: break
1334 if not f: break
1339 self.ui.debug(_("adding %s revisions\n") % f)
1335 self.ui.debug(_("adding %s revisions\n") % f)
1340 fl = self.file(f)
1336 fl = self.file(f)
1341 o = fl.count()
1337 o = fl.count()
1342 n = fl.addgroup(getgroup(), revmap, tr)
1338 n = fl.addgroup(getgroup(), revmap, tr)
1343 revisions += fl.count() - o
1339 revisions += fl.count() - o
1344 files += 1
1340 files += 1
1345
1341
1346 newheads = len(self.changelog.heads())
1342 newheads = len(self.changelog.heads())
1347 heads = ""
1343 heads = ""
1348 if oldheads and newheads > oldheads:
1344 if oldheads and newheads > oldheads:
1349 heads = _(" (+%d heads)") % (newheads - oldheads)
1345 heads = _(" (+%d heads)") % (newheads - oldheads)
1350
1346
1351 self.ui.status(_("added %d changesets"
1347 self.ui.status(_("added %d changesets"
1352 " with %d changes to %d files%s\n")
1348 " with %d changes to %d files%s\n")
1353 % (changesets, revisions, files, heads))
1349 % (changesets, revisions, files, heads))
1354
1350
1355 tr.close()
1351 tr.close()
1356
1352
1357 if changesets > 0:
1353 if changesets > 0:
1358 if not self.hook("changegroup",
1354 if not self.hook("changegroup",
1359 node=hex(self.changelog.node(cor+1))):
1355 node=hex(self.changelog.node(cor+1))):
1360 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1356 self.ui.warn(_("abort: changegroup hook returned failure!\n"))
1361 return 1
1357 return 1
1362
1358
1363 for i in range(cor + 1, cnr + 1):
1359 for i in range(cor + 1, cnr + 1):
1364 self.hook("commit", node=hex(self.changelog.node(i)))
1360 self.hook("commit", node=hex(self.changelog.node(i)))
1365
1361
1366 return
1362 return
1367
1363
1368 def update(self, node, allow=False, force=False, choose=None,
1364 def update(self, node, allow=False, force=False, choose=None,
1369 moddirstate=True, forcemerge=False):
1365 moddirstate=True, forcemerge=False):
1370 pl = self.dirstate.parents()
1366 pl = self.dirstate.parents()
1371 if not force and pl[1] != nullid:
1367 if not force and pl[1] != nullid:
1372 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1368 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1373 return 1
1369 return 1
1374
1370
1375 p1, p2 = pl[0], node
1371 p1, p2 = pl[0], node
1376 pa = self.changelog.ancestor(p1, p2)
1372 pa = self.changelog.ancestor(p1, p2)
1377 m1n = self.changelog.read(p1)[0]
1373 m1n = self.changelog.read(p1)[0]
1378 m2n = self.changelog.read(p2)[0]
1374 m2n = self.changelog.read(p2)[0]
1379 man = self.manifest.ancestor(m1n, m2n)
1375 man = self.manifest.ancestor(m1n, m2n)
1380 m1 = self.manifest.read(m1n)
1376 m1 = self.manifest.read(m1n)
1381 mf1 = self.manifest.readflags(m1n)
1377 mf1 = self.manifest.readflags(m1n)
1382 m2 = self.manifest.read(m2n)
1378 m2 = self.manifest.read(m2n)
1383 mf2 = self.manifest.readflags(m2n)
1379 mf2 = self.manifest.readflags(m2n)
1384 ma = self.manifest.read(man)
1380 ma = self.manifest.read(man)
1385 mfa = self.manifest.readflags(man)
1381 mfa = self.manifest.readflags(man)
1386
1382
1387 (c, a, d, u) = self.changes()
1383 (c, a, d, u) = self.changes()
1388
1384
1389 if allow and not forcemerge:
1385 if allow and not forcemerge:
1390 if c or a or d:
1386 if c or a or d:
1391 raise util.Abort(_("outstanding uncommited changes"))
1387 raise util.Abort(_("outstanding uncommited changes"))
1392 if not forcemerge and not force:
1388 if not forcemerge and not force:
1393 for f in u:
1389 for f in u:
1394 if f in m2:
1390 if f in m2:
1395 t1 = self.wread(f)
1391 t1 = self.wread(f)
1396 t2 = self.file(f).read(m2[f])
1392 t2 = self.file(f).read(m2[f])
1397 if cmp(t1, t2) != 0:
1393 if cmp(t1, t2) != 0:
1398 raise util.Abort(_("'%s' already exists in the working"
1394 raise util.Abort(_("'%s' already exists in the working"
1399 " dir and differs from remote") % f)
1395 " dir and differs from remote") % f)
1400
1396
1401 # is this a jump, or a merge? i.e. is there a linear path
1397 # is this a jump, or a merge? i.e. is there a linear path
1402 # from p1 to p2?
1398 # from p1 to p2?
1403 linear_path = (pa == p1 or pa == p2)
1399 linear_path = (pa == p1 or pa == p2)
1404
1400
1405 # resolve the manifest to determine which files
1401 # resolve the manifest to determine which files
1406 # we care about merging
1402 # we care about merging
1407 self.ui.note(_("resolving manifests\n"))
1403 self.ui.note(_("resolving manifests\n"))
1408 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1404 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1409 (force, allow, moddirstate, linear_path))
1405 (force, allow, moddirstate, linear_path))
1410 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1406 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1411 (short(man), short(m1n), short(m2n)))
1407 (short(man), short(m1n), short(m2n)))
1412
1408
1413 merge = {}
1409 merge = {}
1414 get = {}
1410 get = {}
1415 remove = []
1411 remove = []
1416
1412
1417 # construct a working dir manifest
1413 # construct a working dir manifest
1418 mw = m1.copy()
1414 mw = m1.copy()
1419 mfw = mf1.copy()
1415 mfw = mf1.copy()
1420 umap = dict.fromkeys(u)
1416 umap = dict.fromkeys(u)
1421
1417
1422 for f in a + c + u:
1418 for f in a + c + u:
1423 mw[f] = ""
1419 mw[f] = ""
1424 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1420 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1425
1421
1426 if moddirstate:
1422 if moddirstate:
1427 wlock = self.wlock()
1423 wlock = self.wlock()
1428
1424
1429 for f in d:
1425 for f in d:
1430 if f in mw: del mw[f]
1426 if f in mw: del mw[f]
1431
1427
1432 # If we're jumping between revisions (as opposed to merging),
1428 # If we're jumping between revisions (as opposed to merging),
1433 # and if neither the working directory nor the target rev has
1429 # and if neither the working directory nor the target rev has
1434 # the file, then we need to remove it from the dirstate, to
1430 # the file, then we need to remove it from the dirstate, to
1435 # prevent the dirstate from listing the file when it is no
1431 # prevent the dirstate from listing the file when it is no
1436 # longer in the manifest.
1432 # longer in the manifest.
1437 if moddirstate and linear_path and f not in m2:
1433 if moddirstate and linear_path and f not in m2:
1438 self.dirstate.forget((f,))
1434 self.dirstate.forget((f,))
1439
1435
1440 # Compare manifests
1436 # Compare manifests
1441 for f, n in mw.iteritems():
1437 for f, n in mw.iteritems():
1442 if choose and not choose(f): continue
1438 if choose and not choose(f): continue
1443 if f in m2:
1439 if f in m2:
1444 s = 0
1440 s = 0
1445
1441
1446 # is the wfile new since m1, and match m2?
1442 # is the wfile new since m1, and match m2?
1447 if f not in m1:
1443 if f not in m1:
1448 t1 = self.wread(f)
1444 t1 = self.wread(f)
1449 t2 = self.file(f).read(m2[f])
1445 t2 = self.file(f).read(m2[f])
1450 if cmp(t1, t2) == 0:
1446 if cmp(t1, t2) == 0:
1451 n = m2[f]
1447 n = m2[f]
1452 del t1, t2
1448 del t1, t2
1453
1449
1454 # are files different?
1450 # are files different?
1455 if n != m2[f]:
1451 if n != m2[f]:
1456 a = ma.get(f, nullid)
1452 a = ma.get(f, nullid)
1457 # are both different from the ancestor?
1453 # are both different from the ancestor?
1458 if n != a and m2[f] != a:
1454 if n != a and m2[f] != a:
1459 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1455 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1460 # merge executable bits
1456 # merge executable bits
1461 # "if we changed or they changed, change in merge"
1457 # "if we changed or they changed, change in merge"
1462 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1458 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1463 mode = ((a^b) | (a^c)) ^ a
1459 mode = ((a^b) | (a^c)) ^ a
1464 merge[f] = (m1.get(f, nullid), m2[f], mode)
1460 merge[f] = (m1.get(f, nullid), m2[f], mode)
1465 s = 1
1461 s = 1
1466 # are we clobbering?
1462 # are we clobbering?
1467 # is remote's version newer?
1463 # is remote's version newer?
1468 # or are we going back in time?
1464 # or are we going back in time?
1469 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1465 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1470 self.ui.debug(_(" remote %s is newer, get\n") % f)
1466 self.ui.debug(_(" remote %s is newer, get\n") % f)
1471 get[f] = m2[f]
1467 get[f] = m2[f]
1472 s = 1
1468 s = 1
1473 elif f in umap:
1469 elif f in umap:
1474 # this unknown file is the same as the checkout
1470 # this unknown file is the same as the checkout
1475 get[f] = m2[f]
1471 get[f] = m2[f]
1476
1472
1477 if not s and mfw[f] != mf2[f]:
1473 if not s and mfw[f] != mf2[f]:
1478 if force:
1474 if force:
1479 self.ui.debug(_(" updating permissions for %s\n") % f)
1475 self.ui.debug(_(" updating permissions for %s\n") % f)
1480 util.set_exec(self.wjoin(f), mf2[f])
1476 util.set_exec(self.wjoin(f), mf2[f])
1481 else:
1477 else:
1482 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1478 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1483 mode = ((a^b) | (a^c)) ^ a
1479 mode = ((a^b) | (a^c)) ^ a
1484 if mode != b:
1480 if mode != b:
1485 self.ui.debug(_(" updating permissions for %s\n") % f)
1481 self.ui.debug(_(" updating permissions for %s\n") % f)
1486 util.set_exec(self.wjoin(f), mode)
1482 util.set_exec(self.wjoin(f), mode)
1487 del m2[f]
1483 del m2[f]
1488 elif f in ma:
1484 elif f in ma:
1489 if n != ma[f]:
1485 if n != ma[f]:
1490 r = _("d")
1486 r = _("d")
1491 if not force and (linear_path or allow):
1487 if not force and (linear_path or allow):
1492 r = self.ui.prompt(
1488 r = self.ui.prompt(
1493 (_(" local changed %s which remote deleted\n") % f) +
1489 (_(" local changed %s which remote deleted\n") % f) +
1494 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1490 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1495 if r == _("d"):
1491 if r == _("d"):
1496 remove.append(f)
1492 remove.append(f)
1497 else:
1493 else:
1498 self.ui.debug(_("other deleted %s\n") % f)
1494 self.ui.debug(_("other deleted %s\n") % f)
1499 remove.append(f) # other deleted it
1495 remove.append(f) # other deleted it
1500 else:
1496 else:
1501 # file is created on branch or in working directory
1497 # file is created on branch or in working directory
1502 if force and f not in umap:
1498 if force and f not in umap:
1503 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1499 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1504 remove.append(f)
1500 remove.append(f)
1505 elif n == m1.get(f, nullid): # same as parent
1501 elif n == m1.get(f, nullid): # same as parent
1506 if p2 == pa: # going backwards?
1502 if p2 == pa: # going backwards?
1507 self.ui.debug(_("remote deleted %s\n") % f)
1503 self.ui.debug(_("remote deleted %s\n") % f)
1508 remove.append(f)
1504 remove.append(f)
1509 else:
1505 else:
1510 self.ui.debug(_("local modified %s, keeping\n") % f)
1506 self.ui.debug(_("local modified %s, keeping\n") % f)
1511 else:
1507 else:
1512 self.ui.debug(_("working dir created %s, keeping\n") % f)
1508 self.ui.debug(_("working dir created %s, keeping\n") % f)
1513
1509
1514 for f, n in m2.iteritems():
1510 for f, n in m2.iteritems():
1515 if choose and not choose(f): continue
1511 if choose and not choose(f): continue
1516 if f[0] == "/": continue
1512 if f[0] == "/": continue
1517 if f in ma and n != ma[f]:
1513 if f in ma and n != ma[f]:
1518 r = _("k")
1514 r = _("k")
1519 if not force and (linear_path or allow):
1515 if not force and (linear_path or allow):
1520 r = self.ui.prompt(
1516 r = self.ui.prompt(
1521 (_("remote changed %s which local deleted\n") % f) +
1517 (_("remote changed %s which local deleted\n") % f) +
1522 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1518 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1523 if r == _("k"): get[f] = n
1519 if r == _("k"): get[f] = n
1524 elif f not in ma:
1520 elif f not in ma:
1525 self.ui.debug(_("remote created %s\n") % f)
1521 self.ui.debug(_("remote created %s\n") % f)
1526 get[f] = n
1522 get[f] = n
1527 else:
1523 else:
1528 if force or p2 == pa: # going backwards?
1524 if force or p2 == pa: # going backwards?
1529 self.ui.debug(_("local deleted %s, recreating\n") % f)
1525 self.ui.debug(_("local deleted %s, recreating\n") % f)
1530 get[f] = n
1526 get[f] = n
1531 else:
1527 else:
1532 self.ui.debug(_("local deleted %s\n") % f)
1528 self.ui.debug(_("local deleted %s\n") % f)
1533
1529
1534 del mw, m1, m2, ma
1530 del mw, m1, m2, ma
1535
1531
1536 if force:
1532 if force:
1537 for f in merge:
1533 for f in merge:
1538 get[f] = merge[f][1]
1534 get[f] = merge[f][1]
1539 merge = {}
1535 merge = {}
1540
1536
1541 if linear_path or force:
1537 if linear_path or force:
1542 # we don't need to do any magic, just jump to the new rev
1538 # we don't need to do any magic, just jump to the new rev
1543 branch_merge = False
1539 branch_merge = False
1544 p1, p2 = p2, nullid
1540 p1, p2 = p2, nullid
1545 else:
1541 else:
1546 if not allow:
1542 if not allow:
1547 self.ui.status(_("this update spans a branch"
1543 self.ui.status(_("this update spans a branch"
1548 " affecting the following files:\n"))
1544 " affecting the following files:\n"))
1549 fl = merge.keys() + get.keys()
1545 fl = merge.keys() + get.keys()
1550 fl.sort()
1546 fl.sort()
1551 for f in fl:
1547 for f in fl:
1552 cf = ""
1548 cf = ""
1553 if f in merge: cf = _(" (resolve)")
1549 if f in merge: cf = _(" (resolve)")
1554 self.ui.status(" %s%s\n" % (f, cf))
1550 self.ui.status(" %s%s\n" % (f, cf))
1555 self.ui.warn(_("aborting update spanning branches!\n"))
1551 self.ui.warn(_("aborting update spanning branches!\n"))
1556 self.ui.status(_("(use update -m to merge across branches"
1552 self.ui.status(_("(use update -m to merge across branches"
1557 " or -C to lose changes)\n"))
1553 " or -C to lose changes)\n"))
1558 return 1
1554 return 1
1559 branch_merge = True
1555 branch_merge = True
1560
1556
1561 # get the files we don't need to change
1557 # get the files we don't need to change
1562 files = get.keys()
1558 files = get.keys()
1563 files.sort()
1559 files.sort()
1564 for f in files:
1560 for f in files:
1565 if f[0] == "/": continue
1561 if f[0] == "/": continue
1566 self.ui.note(_("getting %s\n") % f)
1562 self.ui.note(_("getting %s\n") % f)
1567 t = self.file(f).read(get[f])
1563 t = self.file(f).read(get[f])
1568 self.wwrite(f, t)
1564 self.wwrite(f, t)
1569 util.set_exec(self.wjoin(f), mf2[f])
1565 util.set_exec(self.wjoin(f), mf2[f])
1570 if moddirstate:
1566 if moddirstate:
1571 if branch_merge:
1567 if branch_merge:
1572 self.dirstate.update([f], 'n', st_mtime=-1)
1568 self.dirstate.update([f], 'n', st_mtime=-1)
1573 else:
1569 else:
1574 self.dirstate.update([f], 'n')
1570 self.dirstate.update([f], 'n')
1575
1571
1576 # merge the tricky bits
1572 # merge the tricky bits
1577 files = merge.keys()
1573 files = merge.keys()
1578 files.sort()
1574 files.sort()
1579 for f in files:
1575 for f in files:
1580 self.ui.status(_("merging %s\n") % f)
1576 self.ui.status(_("merging %s\n") % f)
1581 my, other, flag = merge[f]
1577 my, other, flag = merge[f]
1582 self.merge3(f, my, other)
1578 self.merge3(f, my, other)
1583 util.set_exec(self.wjoin(f), flag)
1579 util.set_exec(self.wjoin(f), flag)
1584 if moddirstate:
1580 if moddirstate:
1585 if branch_merge:
1581 if branch_merge:
1586 # We've done a branch merge, mark this file as merged
1582 # We've done a branch merge, mark this file as merged
1587 # so that we properly record the merger later
1583 # so that we properly record the merger later
1588 self.dirstate.update([f], 'm')
1584 self.dirstate.update([f], 'm')
1589 else:
1585 else:
1590 # We've update-merged a locally modified file, so
1586 # We've update-merged a locally modified file, so
1591 # we set the dirstate to emulate a normal checkout
1587 # we set the dirstate to emulate a normal checkout
1592 # of that file some time in the past. Thus our
1588 # of that file some time in the past. Thus our
1593 # merge will appear as a normal local file
1589 # merge will appear as a normal local file
1594 # modification.
1590 # modification.
1595 f_len = len(self.file(f).read(other))
1591 f_len = len(self.file(f).read(other))
1596 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1592 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1597
1593
1598 remove.sort()
1594 remove.sort()
1599 for f in remove:
1595 for f in remove:
1600 self.ui.note(_("removing %s\n") % f)
1596 self.ui.note(_("removing %s\n") % f)
1601 try:
1597 try:
1602 util.unlink(self.wjoin(f))
1598 util.unlink(self.wjoin(f))
1603 except OSError, inst:
1599 except OSError, inst:
1604 if inst.errno != errno.ENOENT:
1600 if inst.errno != errno.ENOENT:
1605 self.ui.warn(_("update failed to remove %s: %s!\n") %
1601 self.ui.warn(_("update failed to remove %s: %s!\n") %
1606 (f, inst.strerror))
1602 (f, inst.strerror))
1607 if moddirstate:
1603 if moddirstate:
1608 if branch_merge:
1604 if branch_merge:
1609 self.dirstate.update(remove, 'r')
1605 self.dirstate.update(remove, 'r')
1610 else:
1606 else:
1611 self.dirstate.forget(remove)
1607 self.dirstate.forget(remove)
1612
1608
1613 if moddirstate:
1609 if moddirstate:
1614 self.dirstate.setparents(p1, p2)
1610 self.dirstate.setparents(p1, p2)
1615
1611
1616 def merge3(self, fn, my, other):
1612 def merge3(self, fn, my, other):
1617 """perform a 3-way merge in the working directory"""
1613 """perform a 3-way merge in the working directory"""
1618
1614
1619 def temp(prefix, node):
1615 def temp(prefix, node):
1620 pre = "%s~%s." % (os.path.basename(fn), prefix)
1616 pre = "%s~%s." % (os.path.basename(fn), prefix)
1621 (fd, name) = tempfile.mkstemp("", pre)
1617 (fd, name) = tempfile.mkstemp("", pre)
1622 f = os.fdopen(fd, "wb")
1618 f = os.fdopen(fd, "wb")
1623 self.wwrite(fn, fl.read(node), f)
1619 self.wwrite(fn, fl.read(node), f)
1624 f.close()
1620 f.close()
1625 return name
1621 return name
1626
1622
1627 fl = self.file(fn)
1623 fl = self.file(fn)
1628 base = fl.ancestor(my, other)
1624 base = fl.ancestor(my, other)
1629 a = self.wjoin(fn)
1625 a = self.wjoin(fn)
1630 b = temp("base", base)
1626 b = temp("base", base)
1631 c = temp("other", other)
1627 c = temp("other", other)
1632
1628
1633 self.ui.note(_("resolving %s\n") % fn)
1629 self.ui.note(_("resolving %s\n") % fn)
1634 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1630 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1635 (fn, short(my), short(other), short(base)))
1631 (fn, short(my), short(other), short(base)))
1636
1632
1637 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1633 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1638 or "hgmerge")
1634 or "hgmerge")
1639 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1635 r = os.system('%s "%s" "%s" "%s"' % (cmd, a, b, c))
1640 if r:
1636 if r:
1641 self.ui.warn(_("merging %s failed!\n") % fn)
1637 self.ui.warn(_("merging %s failed!\n") % fn)
1642
1638
1643 os.unlink(b)
1639 os.unlink(b)
1644 os.unlink(c)
1640 os.unlink(c)
1645
1641
1646 def verify(self):
1642 def verify(self):
1647 filelinkrevs = {}
1643 filelinkrevs = {}
1648 filenodes = {}
1644 filenodes = {}
1649 changesets = revisions = files = 0
1645 changesets = revisions = files = 0
1650 errors = [0]
1646 errors = [0]
1651 neededmanifests = {}
1647 neededmanifests = {}
1652
1648
1653 def err(msg):
1649 def err(msg):
1654 self.ui.warn(msg + "\n")
1650 self.ui.warn(msg + "\n")
1655 errors[0] += 1
1651 errors[0] += 1
1656
1652
1657 seen = {}
1653 seen = {}
1658 self.ui.status(_("checking changesets\n"))
1654 self.ui.status(_("checking changesets\n"))
1659 d = self.changelog.checksize()
1655 d = self.changelog.checksize()
1660 if d:
1656 if d:
1661 err(_("changeset data short %d bytes") % d)
1657 err(_("changeset data short %d bytes") % d)
1662 for i in range(self.changelog.count()):
1658 for i in range(self.changelog.count()):
1663 changesets += 1
1659 changesets += 1
1664 n = self.changelog.node(i)
1660 n = self.changelog.node(i)
1665 l = self.changelog.linkrev(n)
1661 l = self.changelog.linkrev(n)
1666 if l != i:
1662 if l != i:
1667 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1663 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1668 if n in seen:
1664 if n in seen:
1669 err(_("duplicate changeset at revision %d") % i)
1665 err(_("duplicate changeset at revision %d") % i)
1670 seen[n] = 1
1666 seen[n] = 1
1671
1667
1672 for p in self.changelog.parents(n):
1668 for p in self.changelog.parents(n):
1673 if p not in self.changelog.nodemap:
1669 if p not in self.changelog.nodemap:
1674 err(_("changeset %s has unknown parent %s") %
1670 err(_("changeset %s has unknown parent %s") %
1675 (short(n), short(p)))
1671 (short(n), short(p)))
1676 try:
1672 try:
1677 changes = self.changelog.read(n)
1673 changes = self.changelog.read(n)
1678 except KeyboardInterrupt:
1674 except KeyboardInterrupt:
1679 self.ui.warn(_("interrupted"))
1675 self.ui.warn(_("interrupted"))
1680 raise
1676 raise
1681 except Exception, inst:
1677 except Exception, inst:
1682 err(_("unpacking changeset %s: %s") % (short(n), inst))
1678 err(_("unpacking changeset %s: %s") % (short(n), inst))
1683
1679
1684 neededmanifests[changes[0]] = n
1680 neededmanifests[changes[0]] = n
1685
1681
1686 for f in changes[3]:
1682 for f in changes[3]:
1687 filelinkrevs.setdefault(f, []).append(i)
1683 filelinkrevs.setdefault(f, []).append(i)
1688
1684
1689 seen = {}
1685 seen = {}
1690 self.ui.status(_("checking manifests\n"))
1686 self.ui.status(_("checking manifests\n"))
1691 d = self.manifest.checksize()
1687 d = self.manifest.checksize()
1692 if d:
1688 if d:
1693 err(_("manifest data short %d bytes") % d)
1689 err(_("manifest data short %d bytes") % d)
1694 for i in range(self.manifest.count()):
1690 for i in range(self.manifest.count()):
1695 n = self.manifest.node(i)
1691 n = self.manifest.node(i)
1696 l = self.manifest.linkrev(n)
1692 l = self.manifest.linkrev(n)
1697
1693
1698 if l < 0 or l >= self.changelog.count():
1694 if l < 0 or l >= self.changelog.count():
1699 err(_("bad manifest link (%d) at revision %d") % (l, i))
1695 err(_("bad manifest link (%d) at revision %d") % (l, i))
1700
1696
1701 if n in neededmanifests:
1697 if n in neededmanifests:
1702 del neededmanifests[n]
1698 del neededmanifests[n]
1703
1699
1704 if n in seen:
1700 if n in seen:
1705 err(_("duplicate manifest at revision %d") % i)
1701 err(_("duplicate manifest at revision %d") % i)
1706
1702
1707 seen[n] = 1
1703 seen[n] = 1
1708
1704
1709 for p in self.manifest.parents(n):
1705 for p in self.manifest.parents(n):
1710 if p not in self.manifest.nodemap:
1706 if p not in self.manifest.nodemap:
1711 err(_("manifest %s has unknown parent %s") %
1707 err(_("manifest %s has unknown parent %s") %
1712 (short(n), short(p)))
1708 (short(n), short(p)))
1713
1709
1714 try:
1710 try:
1715 delta = mdiff.patchtext(self.manifest.delta(n))
1711 delta = mdiff.patchtext(self.manifest.delta(n))
1716 except KeyboardInterrupt:
1712 except KeyboardInterrupt:
1717 self.ui.warn(_("interrupted"))
1713 self.ui.warn(_("interrupted"))
1718 raise
1714 raise
1719 except Exception, inst:
1715 except Exception, inst:
1720 err(_("unpacking manifest %s: %s") % (short(n), inst))
1716 err(_("unpacking manifest %s: %s") % (short(n), inst))
1721
1717
1722 ff = [ l.split('\0') for l in delta.splitlines() ]
1718 ff = [ l.split('\0') for l in delta.splitlines() ]
1723 for f, fn in ff:
1719 for f, fn in ff:
1724 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1720 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1725
1721
1726 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1722 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1727
1723
1728 for m,c in neededmanifests.items():
1724 for m,c in neededmanifests.items():
1729 err(_("Changeset %s refers to unknown manifest %s") %
1725 err(_("Changeset %s refers to unknown manifest %s") %
1730 (short(m), short(c)))
1726 (short(m), short(c)))
1731 del neededmanifests
1727 del neededmanifests
1732
1728
1733 for f in filenodes:
1729 for f in filenodes:
1734 if f not in filelinkrevs:
1730 if f not in filelinkrevs:
1735 err(_("file %s in manifest but not in changesets") % f)
1731 err(_("file %s in manifest but not in changesets") % f)
1736
1732
1737 for f in filelinkrevs:
1733 for f in filelinkrevs:
1738 if f not in filenodes:
1734 if f not in filenodes:
1739 err(_("file %s in changeset but not in manifest") % f)
1735 err(_("file %s in changeset but not in manifest") % f)
1740
1736
1741 self.ui.status(_("checking files\n"))
1737 self.ui.status(_("checking files\n"))
1742 ff = filenodes.keys()
1738 ff = filenodes.keys()
1743 ff.sort()
1739 ff.sort()
1744 for f in ff:
1740 for f in ff:
1745 if f == "/dev/null": continue
1741 if f == "/dev/null": continue
1746 files += 1
1742 files += 1
1747 fl = self.file(f)
1743 fl = self.file(f)
1748 d = fl.checksize()
1744 d = fl.checksize()
1749 if d:
1745 if d:
1750 err(_("%s file data short %d bytes") % (f, d))
1746 err(_("%s file data short %d bytes") % (f, d))
1751
1747
1752 nodes = { nullid: 1 }
1748 nodes = { nullid: 1 }
1753 seen = {}
1749 seen = {}
1754 for i in range(fl.count()):
1750 for i in range(fl.count()):
1755 revisions += 1
1751 revisions += 1
1756 n = fl.node(i)
1752 n = fl.node(i)
1757
1753
1758 if n in seen:
1754 if n in seen:
1759 err(_("%s: duplicate revision %d") % (f, i))
1755 err(_("%s: duplicate revision %d") % (f, i))
1760 if n not in filenodes[f]:
1756 if n not in filenodes[f]:
1761 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1757 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1762 else:
1758 else:
1763 del filenodes[f][n]
1759 del filenodes[f][n]
1764
1760
1765 flr = fl.linkrev(n)
1761 flr = fl.linkrev(n)
1766 if flr not in filelinkrevs[f]:
1762 if flr not in filelinkrevs[f]:
1767 err(_("%s:%s points to unexpected changeset %d")
1763 err(_("%s:%s points to unexpected changeset %d")
1768 % (f, short(n), flr))
1764 % (f, short(n), flr))
1769 else:
1765 else:
1770 filelinkrevs[f].remove(flr)
1766 filelinkrevs[f].remove(flr)
1771
1767
1772 # verify contents
1768 # verify contents
1773 try:
1769 try:
1774 t = fl.read(n)
1770 t = fl.read(n)
1775 except KeyboardInterrupt:
1771 except KeyboardInterrupt:
1776 self.ui.warn(_("interrupted"))
1772 self.ui.warn(_("interrupted"))
1777 raise
1773 raise
1778 except Exception, inst:
1774 except Exception, inst:
1779 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1775 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1780
1776
1781 # verify parents
1777 # verify parents
1782 (p1, p2) = fl.parents(n)
1778 (p1, p2) = fl.parents(n)
1783 if p1 not in nodes:
1779 if p1 not in nodes:
1784 err(_("file %s:%s unknown parent 1 %s") %
1780 err(_("file %s:%s unknown parent 1 %s") %
1785 (f, short(n), short(p1)))
1781 (f, short(n), short(p1)))
1786 if p2 not in nodes:
1782 if p2 not in nodes:
1787 err(_("file %s:%s unknown parent 2 %s") %
1783 err(_("file %s:%s unknown parent 2 %s") %
1788 (f, short(n), short(p1)))
1784 (f, short(n), short(p1)))
1789 nodes[n] = 1
1785 nodes[n] = 1
1790
1786
1791 # cross-check
1787 # cross-check
1792 for node in filenodes[f]:
1788 for node in filenodes[f]:
1793 err(_("node %s in manifests not in %s") % (hex(node), f))
1789 err(_("node %s in manifests not in %s") % (hex(node), f))
1794
1790
1795 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1791 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1796 (files, changesets, revisions))
1792 (files, changesets, revisions))
1797
1793
1798 if errors[0]:
1794 if errors[0]:
1799 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1795 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1800 return 1
1796 return 1
@@ -1,893 +1,850 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 from demandload import demandload
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
16 demandload(globals(), "binascii errno heapq mdiff sha struct zlib")
17
17
18 def hash(text, p1, p2):
18 def hash(text, p1, p2):
19 """generate a hash from the given text and its parent hashes
19 """generate a hash from the given text and its parent hashes
20
20
21 This hash combines both the current file contents and its history
21 This hash combines both the current file contents and its history
22 in a manner that makes it easy to distinguish nodes with the same
22 in a manner that makes it easy to distinguish nodes with the same
23 content in the revision graph.
23 content in the revision graph.
24 """
24 """
25 l = [p1, p2]
25 l = [p1, p2]
26 l.sort()
26 l.sort()
27 s = sha.new(l[0])
27 s = sha.new(l[0])
28 s.update(l[1])
28 s.update(l[1])
29 s.update(text)
29 s.update(text)
30 return s.digest()
30 return s.digest()
31
31
32 def compress(text):
32 def compress(text):
33 """ generate a possibly-compressed representation of text """
33 """ generate a possibly-compressed representation of text """
34 if not text: return ("", text)
34 if not text: return ("", text)
35 if len(text) < 44:
35 if len(text) < 44:
36 if text[0] == '\0': return ("", text)
36 if text[0] == '\0': return ("", text)
37 return ('u', text)
37 return ('u', text)
38 bin = zlib.compress(text)
38 bin = zlib.compress(text)
39 if len(bin) > len(text):
39 if len(bin) > len(text):
40 if text[0] == '\0': return ("", text)
40 if text[0] == '\0': return ("", text)
41 return ('u', text)
41 return ('u', text)
42 return ("", bin)
42 return ("", bin)
43
43
44 def decompress(bin):
44 def decompress(bin):
45 """ decompress the given input """
45 """ decompress the given input """
46 if not bin: return bin
46 if not bin: return bin
47 t = bin[0]
47 t = bin[0]
48 if t == '\0': return bin
48 if t == '\0': return bin
49 if t == 'x': return zlib.decompress(bin)
49 if t == 'x': return zlib.decompress(bin)
50 if t == 'u': return bin[1:]
50 if t == 'u': return bin[1:]
51 raise RevlogError(_("unknown compression type %s") % t)
51 raise RevlogError(_("unknown compression type %s") % t)
52
52
53 indexformat = ">4l20s20s20s"
53 indexformat = ">4l20s20s20s"
54
54
55 class lazyparser(object):
55 class lazyparser(object):
56 """
56 """
57 this class avoids the need to parse the entirety of large indices
57 this class avoids the need to parse the entirety of large indices
58
58
59 By default we parse and load 1000 entries at a time.
59 By default we parse and load 1000 entries at a time.
60
60
61 If no position is specified, we load the whole index, and replace
61 If no position is specified, we load the whole index, and replace
62 the lazy objects in revlog with the underlying objects for
62 the lazy objects in revlog with the underlying objects for
63 efficiency in cases where we look at most of the nodes.
63 efficiency in cases where we look at most of the nodes.
64 """
64 """
65 def __init__(self, data, revlog):
65 def __init__(self, data, revlog):
66 self.data = data
66 self.data = data
67 self.s = struct.calcsize(indexformat)
67 self.s = struct.calcsize(indexformat)
68 self.l = len(data)/self.s
68 self.l = len(data)/self.s
69 self.index = [None] * self.l
69 self.index = [None] * self.l
70 self.map = {nullid: -1}
70 self.map = {nullid: -1}
71 self.all = 0
71 self.all = 0
72 self.revlog = revlog
72 self.revlog = revlog
73
73
74 def trunc(self, pos):
74 def trunc(self, pos):
75 self.l = pos/self.s
75 self.l = pos/self.s
76
76
77 def load(self, pos=None):
77 def load(self, pos=None):
78 if self.all: return
78 if self.all: return
79 if pos is not None:
79 if pos is not None:
80 block = pos / 1000
80 block = pos / 1000
81 i = block * 1000
81 i = block * 1000
82 end = min(self.l, i + 1000)
82 end = min(self.l, i + 1000)
83 else:
83 else:
84 self.all = 1
84 self.all = 1
85 i = 0
85 i = 0
86 end = self.l
86 end = self.l
87 self.revlog.index = self.index
87 self.revlog.index = self.index
88 self.revlog.nodemap = self.map
88 self.revlog.nodemap = self.map
89
89
90 while i < end:
90 while i < end:
91 d = self.data[i * self.s: (i + 1) * self.s]
91 d = self.data[i * self.s: (i + 1) * self.s]
92 e = struct.unpack(indexformat, d)
92 e = struct.unpack(indexformat, d)
93 self.index[i] = e
93 self.index[i] = e
94 self.map[e[6]] = i
94 self.map[e[6]] = i
95 i += 1
95 i += 1
96
96
97 class lazyindex(object):
97 class lazyindex(object):
98 """a lazy version of the index array"""
98 """a lazy version of the index array"""
99 def __init__(self, parser):
99 def __init__(self, parser):
100 self.p = parser
100 self.p = parser
101 def __len__(self):
101 def __len__(self):
102 return len(self.p.index)
102 return len(self.p.index)
103 def load(self, pos):
103 def load(self, pos):
104 if pos < 0:
104 if pos < 0:
105 pos += len(self.p.index)
105 pos += len(self.p.index)
106 self.p.load(pos)
106 self.p.load(pos)
107 return self.p.index[pos]
107 return self.p.index[pos]
108 def __getitem__(self, pos):
108 def __getitem__(self, pos):
109 return self.p.index[pos] or self.load(pos)
109 return self.p.index[pos] or self.load(pos)
110 def __delitem__(self, pos):
110 def __delitem__(self, pos):
111 del self.p.index[pos]
111 del self.p.index[pos]
112 def append(self, e):
112 def append(self, e):
113 self.p.index.append(e)
113 self.p.index.append(e)
114 def trunc(self, pos):
114 def trunc(self, pos):
115 self.p.trunc(pos)
115 self.p.trunc(pos)
116
116
117 class lazymap(object):
117 class lazymap(object):
118 """a lazy version of the node map"""
118 """a lazy version of the node map"""
119 def __init__(self, parser):
119 def __init__(self, parser):
120 self.p = parser
120 self.p = parser
121 def load(self, key):
121 def load(self, key):
122 if self.p.all: return
122 if self.p.all: return
123 n = self.p.data.find(key)
123 n = self.p.data.find(key)
124 if n < 0:
124 if n < 0:
125 raise KeyError(key)
125 raise KeyError(key)
126 pos = n / self.p.s
126 pos = n / self.p.s
127 self.p.load(pos)
127 self.p.load(pos)
128 def __contains__(self, key):
128 def __contains__(self, key):
129 self.p.load()
129 self.p.load()
130 return key in self.p.map
130 return key in self.p.map
131 def __iter__(self):
131 def __iter__(self):
132 yield nullid
132 yield nullid
133 for i in xrange(self.p.l):
133 for i in xrange(self.p.l):
134 try:
134 try:
135 yield self.p.index[i][6]
135 yield self.p.index[i][6]
136 except:
136 except:
137 self.p.load(i)
137 self.p.load(i)
138 yield self.p.index[i][6]
138 yield self.p.index[i][6]
139 def __getitem__(self, key):
139 def __getitem__(self, key):
140 try:
140 try:
141 return self.p.map[key]
141 return self.p.map[key]
142 except KeyError:
142 except KeyError:
143 try:
143 try:
144 self.load(key)
144 self.load(key)
145 return self.p.map[key]
145 return self.p.map[key]
146 except KeyError:
146 except KeyError:
147 raise KeyError("node " + hex(key))
147 raise KeyError("node " + hex(key))
148 def __setitem__(self, key, val):
148 def __setitem__(self, key, val):
149 self.p.map[key] = val
149 self.p.map[key] = val
150 def __delitem__(self, key):
150 def __delitem__(self, key):
151 del self.p.map[key]
151 del self.p.map[key]
152
152
153 class RevlogError(Exception): pass
153 class RevlogError(Exception): pass
154
154
155 class revlog(object):
155 class revlog(object):
156 """
156 """
157 the underlying revision storage object
157 the underlying revision storage object
158
158
159 A revlog consists of two parts, an index and the revision data.
159 A revlog consists of two parts, an index and the revision data.
160
160
161 The index is a file with a fixed record size containing
161 The index is a file with a fixed record size containing
162 information on each revision, includings its nodeid (hash), the
162 information on each revision, includings its nodeid (hash), the
163 nodeids of its parents, the position and offset of its data within
163 nodeids of its parents, the position and offset of its data within
164 the data file, and the revision it's based on. Finally, each entry
164 the data file, and the revision it's based on. Finally, each entry
165 contains a linkrev entry that can serve as a pointer to external
165 contains a linkrev entry that can serve as a pointer to external
166 data.
166 data.
167
167
168 The revision data itself is a linear collection of data chunks.
168 The revision data itself is a linear collection of data chunks.
169 Each chunk represents a revision and is usually represented as a
169 Each chunk represents a revision and is usually represented as a
170 delta against the previous chunk. To bound lookup time, runs of
170 delta against the previous chunk. To bound lookup time, runs of
171 deltas are limited to about 2 times the length of the original
171 deltas are limited to about 2 times the length of the original
172 version data. This makes retrieval of a version proportional to
172 version data. This makes retrieval of a version proportional to
173 its size, or O(1) relative to the number of revisions.
173 its size, or O(1) relative to the number of revisions.
174
174
175 Both pieces of the revlog are written to in an append-only
175 Both pieces of the revlog are written to in an append-only
176 fashion, which means we never need to rewrite a file to insert or
176 fashion, which means we never need to rewrite a file to insert or
177 remove data, and can use some simple techniques to avoid the need
177 remove data, and can use some simple techniques to avoid the need
178 for locking while reading.
178 for locking while reading.
179 """
179 """
180 def __init__(self, opener, indexfile, datafile):
180 def __init__(self, opener, indexfile, datafile):
181 """
181 """
182 create a revlog object
182 create a revlog object
183
183
184 opener is a function that abstracts the file opening operation
184 opener is a function that abstracts the file opening operation
185 and can be used to implement COW semantics or the like.
185 and can be used to implement COW semantics or the like.
186 """
186 """
187 self.indexfile = indexfile
187 self.indexfile = indexfile
188 self.datafile = datafile
188 self.datafile = datafile
189 self.opener = opener
189 self.opener = opener
190 self.cache = None
190 self.cache = None
191 self.chunkcache = None
191
192
192 try:
193 try:
193 i = self.opener(self.indexfile).read()
194 i = self.opener(self.indexfile).read()
194 except IOError, inst:
195 except IOError, inst:
195 if inst.errno != errno.ENOENT:
196 if inst.errno != errno.ENOENT:
196 raise
197 raise
197 i = ""
198 i = ""
198
199
199 if len(i) > 10000:
200 if len(i) > 10000:
200 # big index, let's parse it on demand
201 # big index, let's parse it on demand
201 parser = lazyparser(i, self)
202 parser = lazyparser(i, self)
202 self.index = lazyindex(parser)
203 self.index = lazyindex(parser)
203 self.nodemap = lazymap(parser)
204 self.nodemap = lazymap(parser)
204 else:
205 else:
205 s = struct.calcsize(indexformat)
206 s = struct.calcsize(indexformat)
206 l = len(i) / s
207 l = len(i) / s
207 self.index = [None] * l
208 self.index = [None] * l
208 m = [None] * l
209 m = [None] * l
209
210
210 n = 0
211 n = 0
211 for f in xrange(0, len(i), s):
212 for f in xrange(0, len(i), s):
212 # offset, size, base, linkrev, p1, p2, nodeid
213 # offset, size, base, linkrev, p1, p2, nodeid
213 e = struct.unpack(indexformat, i[f:f + s])
214 e = struct.unpack(indexformat, i[f:f + s])
214 m[n] = (e[6], n)
215 m[n] = (e[6], n)
215 self.index[n] = e
216 self.index[n] = e
216 n += 1
217 n += 1
217
218
218 self.nodemap = dict(m)
219 self.nodemap = dict(m)
219 self.nodemap[nullid] = -1
220 self.nodemap[nullid] = -1
220
221
221 def tip(self): return self.node(len(self.index) - 1)
222 def tip(self): return self.node(len(self.index) - 1)
222 def count(self): return len(self.index)
223 def count(self): return len(self.index)
223 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
224 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
224 def rev(self, node):
225 def rev(self, node):
225 try:
226 try:
226 return self.nodemap[node]
227 return self.nodemap[node]
227 except KeyError:
228 except KeyError:
228 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
229 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
229 def linkrev(self, node): return self.index[self.rev(node)][3]
230 def linkrev(self, node): return self.index[self.rev(node)][3]
230 def parents(self, node):
231 def parents(self, node):
231 if node == nullid: return (nullid, nullid)
232 if node == nullid: return (nullid, nullid)
232 return self.index[self.rev(node)][4:6]
233 return self.index[self.rev(node)][4:6]
233
234
234 def start(self, rev): return self.index[rev][0]
235 def start(self, rev): return self.index[rev][0]
235 def length(self, rev): return self.index[rev][1]
236 def length(self, rev): return self.index[rev][1]
236 def end(self, rev): return self.start(rev) + self.length(rev)
237 def end(self, rev): return self.start(rev) + self.length(rev)
237 def base(self, rev): return self.index[rev][2]
238 def base(self, rev): return self.index[rev][2]
238
239
239 def reachable(self, rev, stop=None):
240 def reachable(self, rev, stop=None):
240 reachable = {}
241 reachable = {}
241 visit = [rev]
242 visit = [rev]
242 reachable[rev] = 1
243 reachable[rev] = 1
243 if stop:
244 if stop:
244 stopn = self.rev(stop)
245 stopn = self.rev(stop)
245 else:
246 else:
246 stopn = 0
247 stopn = 0
247 while visit:
248 while visit:
248 n = visit.pop(0)
249 n = visit.pop(0)
249 if n == stop:
250 if n == stop:
250 continue
251 continue
251 if n == nullid:
252 if n == nullid:
252 continue
253 continue
253 for p in self.parents(n):
254 for p in self.parents(n):
254 if self.rev(p) < stopn:
255 if self.rev(p) < stopn:
255 continue
256 continue
256 if p not in reachable:
257 if p not in reachable:
257 reachable[p] = 1
258 reachable[p] = 1
258 visit.append(p)
259 visit.append(p)
259 return reachable
260 return reachable
260
261
261 def nodesbetween(self, roots=None, heads=None):
262 def nodesbetween(self, roots=None, heads=None):
262 """Return a tuple containing three elements. Elements 1 and 2 contain
263 """Return a tuple containing three elements. Elements 1 and 2 contain
263 a final list bases and heads after all the unreachable ones have been
264 a final list bases and heads after all the unreachable ones have been
264 pruned. Element 0 contains a topologically sorted list of all
265 pruned. Element 0 contains a topologically sorted list of all
265
266
266 nodes that satisfy these constraints:
267 nodes that satisfy these constraints:
267 1. All nodes must be descended from a node in roots (the nodes on
268 1. All nodes must be descended from a node in roots (the nodes on
268 roots are considered descended from themselves).
269 roots are considered descended from themselves).
269 2. All nodes must also be ancestors of a node in heads (the nodes in
270 2. All nodes must also be ancestors of a node in heads (the nodes in
270 heads are considered to be their own ancestors).
271 heads are considered to be their own ancestors).
271
272
272 If roots is unspecified, nullid is assumed as the only root.
273 If roots is unspecified, nullid is assumed as the only root.
273 If heads is unspecified, it is taken to be the output of the
274 If heads is unspecified, it is taken to be the output of the
274 heads method (i.e. a list of all nodes in the repository that
275 heads method (i.e. a list of all nodes in the repository that
275 have no children)."""
276 have no children)."""
276 nonodes = ([], [], [])
277 nonodes = ([], [], [])
277 if roots is not None:
278 if roots is not None:
278 roots = list(roots)
279 roots = list(roots)
279 if not roots:
280 if not roots:
280 return nonodes
281 return nonodes
281 lowestrev = min([self.rev(n) for n in roots])
282 lowestrev = min([self.rev(n) for n in roots])
282 else:
283 else:
283 roots = [nullid] # Everybody's a descendent of nullid
284 roots = [nullid] # Everybody's a descendent of nullid
284 lowestrev = -1
285 lowestrev = -1
285 if (lowestrev == -1) and (heads is None):
286 if (lowestrev == -1) and (heads is None):
286 # We want _all_ the nodes!
287 # We want _all_ the nodes!
287 return ([self.node(r) for r in xrange(0, self.count())],
288 return ([self.node(r) for r in xrange(0, self.count())],
288 [nullid], list(self.heads()))
289 [nullid], list(self.heads()))
289 if heads is None:
290 if heads is None:
290 # All nodes are ancestors, so the latest ancestor is the last
291 # All nodes are ancestors, so the latest ancestor is the last
291 # node.
292 # node.
292 highestrev = self.count() - 1
293 highestrev = self.count() - 1
293 # Set ancestors to None to signal that every node is an ancestor.
294 # Set ancestors to None to signal that every node is an ancestor.
294 ancestors = None
295 ancestors = None
295 # Set heads to an empty dictionary for later discovery of heads
296 # Set heads to an empty dictionary for later discovery of heads
296 heads = {}
297 heads = {}
297 else:
298 else:
298 heads = list(heads)
299 heads = list(heads)
299 if not heads:
300 if not heads:
300 return nonodes
301 return nonodes
301 ancestors = {}
302 ancestors = {}
302 # Start at the top and keep marking parents until we're done.
303 # Start at the top and keep marking parents until we're done.
303 nodestotag = heads[:]
304 nodestotag = heads[:]
304 # Turn heads into a dictionary so we can remove 'fake' heads.
305 # Turn heads into a dictionary so we can remove 'fake' heads.
305 # Also, later we will be using it to filter out the heads we can't
306 # Also, later we will be using it to filter out the heads we can't
306 # find from roots.
307 # find from roots.
307 heads = dict.fromkeys(heads, 0)
308 heads = dict.fromkeys(heads, 0)
308 # Remember where the top was so we can use it as a limit later.
309 # Remember where the top was so we can use it as a limit later.
309 highestrev = max([self.rev(n) for n in nodestotag])
310 highestrev = max([self.rev(n) for n in nodestotag])
310 while nodestotag:
311 while nodestotag:
311 # grab a node to tag
312 # grab a node to tag
312 n = nodestotag.pop()
313 n = nodestotag.pop()
313 # Never tag nullid
314 # Never tag nullid
314 if n == nullid:
315 if n == nullid:
315 continue
316 continue
316 # A node's revision number represents its place in a
317 # A node's revision number represents its place in a
317 # topologically sorted list of nodes.
318 # topologically sorted list of nodes.
318 r = self.rev(n)
319 r = self.rev(n)
319 if r >= lowestrev:
320 if r >= lowestrev:
320 if n not in ancestors:
321 if n not in ancestors:
321 # If we are possibly a descendent of one of the roots
322 # If we are possibly a descendent of one of the roots
322 # and we haven't already been marked as an ancestor
323 # and we haven't already been marked as an ancestor
323 ancestors[n] = 1 # Mark as ancestor
324 ancestors[n] = 1 # Mark as ancestor
324 # Add non-nullid parents to list of nodes to tag.
325 # Add non-nullid parents to list of nodes to tag.
325 nodestotag.extend([p for p in self.parents(n) if
326 nodestotag.extend([p for p in self.parents(n) if
326 p != nullid])
327 p != nullid])
327 elif n in heads: # We've seen it before, is it a fake head?
328 elif n in heads: # We've seen it before, is it a fake head?
328 # So it is, real heads should not be the ancestors of
329 # So it is, real heads should not be the ancestors of
329 # any other heads.
330 # any other heads.
330 heads.pop(n)
331 heads.pop(n)
331 if not ancestors:
332 if not ancestors:
332 return nonodes
333 return nonodes
333 # Now that we have our set of ancestors, we want to remove any
334 # Now that we have our set of ancestors, we want to remove any
334 # roots that are not ancestors.
335 # roots that are not ancestors.
335
336
336 # If one of the roots was nullid, everything is included anyway.
337 # If one of the roots was nullid, everything is included anyway.
337 if lowestrev > -1:
338 if lowestrev > -1:
338 # But, since we weren't, let's recompute the lowest rev to not
339 # But, since we weren't, let's recompute the lowest rev to not
339 # include roots that aren't ancestors.
340 # include roots that aren't ancestors.
340
341
341 # Filter out roots that aren't ancestors of heads
342 # Filter out roots that aren't ancestors of heads
342 roots = [n for n in roots if n in ancestors]
343 roots = [n for n in roots if n in ancestors]
343 # Recompute the lowest revision
344 # Recompute the lowest revision
344 if roots:
345 if roots:
345 lowestrev = min([self.rev(n) for n in roots])
346 lowestrev = min([self.rev(n) for n in roots])
346 else:
347 else:
347 # No more roots? Return empty list
348 # No more roots? Return empty list
348 return nonodes
349 return nonodes
349 else:
350 else:
350 # We are descending from nullid, and don't need to care about
351 # We are descending from nullid, and don't need to care about
351 # any other roots.
352 # any other roots.
352 lowestrev = -1
353 lowestrev = -1
353 roots = [nullid]
354 roots = [nullid]
354 # Transform our roots list into a 'set' (i.e. a dictionary where the
355 # Transform our roots list into a 'set' (i.e. a dictionary where the
355 # values don't matter.
356 # values don't matter.
356 descendents = dict.fromkeys(roots, 1)
357 descendents = dict.fromkeys(roots, 1)
357 # Also, keep the original roots so we can filter out roots that aren't
358 # Also, keep the original roots so we can filter out roots that aren't
358 # 'real' roots (i.e. are descended from other roots).
359 # 'real' roots (i.e. are descended from other roots).
359 roots = descendents.copy()
360 roots = descendents.copy()
360 # Our topologically sorted list of output nodes.
361 # Our topologically sorted list of output nodes.
361 orderedout = []
362 orderedout = []
362 # Don't start at nullid since we don't want nullid in our output list,
363 # Don't start at nullid since we don't want nullid in our output list,
363 # and if nullid shows up in descedents, empty parents will look like
364 # and if nullid shows up in descedents, empty parents will look like
364 # they're descendents.
365 # they're descendents.
365 for r in xrange(max(lowestrev, 0), highestrev + 1):
366 for r in xrange(max(lowestrev, 0), highestrev + 1):
366 n = self.node(r)
367 n = self.node(r)
367 isdescendent = False
368 isdescendent = False
368 if lowestrev == -1: # Everybody is a descendent of nullid
369 if lowestrev == -1: # Everybody is a descendent of nullid
369 isdescendent = True
370 isdescendent = True
370 elif n in descendents:
371 elif n in descendents:
371 # n is already a descendent
372 # n is already a descendent
372 isdescendent = True
373 isdescendent = True
373 # This check only needs to be done here because all the roots
374 # This check only needs to be done here because all the roots
374 # will start being marked is descendents before the loop.
375 # will start being marked is descendents before the loop.
375 if n in roots:
376 if n in roots:
376 # If n was a root, check if it's a 'real' root.
377 # If n was a root, check if it's a 'real' root.
377 p = tuple(self.parents(n))
378 p = tuple(self.parents(n))
378 # If any of its parents are descendents, it's not a root.
379 # If any of its parents are descendents, it's not a root.
379 if (p[0] in descendents) or (p[1] in descendents):
380 if (p[0] in descendents) or (p[1] in descendents):
380 roots.pop(n)
381 roots.pop(n)
381 else:
382 else:
382 p = tuple(self.parents(n))
383 p = tuple(self.parents(n))
383 # A node is a descendent if either of its parents are
384 # A node is a descendent if either of its parents are
384 # descendents. (We seeded the dependents list with the roots
385 # descendents. (We seeded the dependents list with the roots
385 # up there, remember?)
386 # up there, remember?)
386 if (p[0] in descendents) or (p[1] in descendents):
387 if (p[0] in descendents) or (p[1] in descendents):
387 descendents[n] = 1
388 descendents[n] = 1
388 isdescendent = True
389 isdescendent = True
389 if isdescendent and ((ancestors is None) or (n in ancestors)):
390 if isdescendent and ((ancestors is None) or (n in ancestors)):
390 # Only include nodes that are both descendents and ancestors.
391 # Only include nodes that are both descendents and ancestors.
391 orderedout.append(n)
392 orderedout.append(n)
392 if (ancestors is not None) and (n in heads):
393 if (ancestors is not None) and (n in heads):
393 # We're trying to figure out which heads are reachable
394 # We're trying to figure out which heads are reachable
394 # from roots.
395 # from roots.
395 # Mark this head as having been reached
396 # Mark this head as having been reached
396 heads[n] = 1
397 heads[n] = 1
397 elif ancestors is None:
398 elif ancestors is None:
398 # Otherwise, we're trying to discover the heads.
399 # Otherwise, we're trying to discover the heads.
399 # Assume this is a head because if it isn't, the next step
400 # Assume this is a head because if it isn't, the next step
400 # will eventually remove it.
401 # will eventually remove it.
401 heads[n] = 1
402 heads[n] = 1
402 # But, obviously its parents aren't.
403 # But, obviously its parents aren't.
403 for p in self.parents(n):
404 for p in self.parents(n):
404 heads.pop(p, None)
405 heads.pop(p, None)
405 heads = [n for n in heads.iterkeys() if heads[n] != 0]
406 heads = [n for n in heads.iterkeys() if heads[n] != 0]
406 roots = roots.keys()
407 roots = roots.keys()
407 assert orderedout
408 assert orderedout
408 assert roots
409 assert roots
409 assert heads
410 assert heads
410 return (orderedout, roots, heads)
411 return (orderedout, roots, heads)
411
412
412 def heads(self, start=None):
413 def heads(self, start=None):
413 """return the list of all nodes that have no children
414 """return the list of all nodes that have no children
414
415
415 if start is specified, only heads that are descendants of
416 if start is specified, only heads that are descendants of
416 start will be returned
417 start will be returned
417
418
418 """
419 """
419 if start is None:
420 if start is None:
420 start = nullid
421 start = nullid
421 reachable = {start: 1}
422 reachable = {start: 1}
422 heads = {start: 1}
423 heads = {start: 1}
423 startrev = self.rev(start)
424 startrev = self.rev(start)
424
425
425 for r in xrange(startrev + 1, self.count()):
426 for r in xrange(startrev + 1, self.count()):
426 n = self.node(r)
427 n = self.node(r)
427 for pn in self.parents(n):
428 for pn in self.parents(n):
428 if pn in reachable:
429 if pn in reachable:
429 reachable[n] = 1
430 reachable[n] = 1
430 heads[n] = 1
431 heads[n] = 1
431 if pn in heads:
432 if pn in heads:
432 del heads[pn]
433 del heads[pn]
433 return heads.keys()
434 return heads.keys()
434
435
435 def children(self, node):
436 def children(self, node):
436 """find the children of a given node"""
437 """find the children of a given node"""
437 c = []
438 c = []
438 p = self.rev(node)
439 p = self.rev(node)
439 for r in range(p + 1, self.count()):
440 for r in range(p + 1, self.count()):
440 n = self.node(r)
441 n = self.node(r)
441 for pn in self.parents(n):
442 for pn in self.parents(n):
442 if pn == node:
443 if pn == node:
443 c.append(n)
444 c.append(n)
444 continue
445 continue
445 elif pn == nullid:
446 elif pn == nullid:
446 continue
447 continue
447 return c
448 return c
448
449
449 def lookup(self, id):
450 def lookup(self, id):
450 """locate a node based on revision number or subset of hex nodeid"""
451 """locate a node based on revision number or subset of hex nodeid"""
451 try:
452 try:
452 rev = int(id)
453 rev = int(id)
453 if str(rev) != id: raise ValueError
454 if str(rev) != id: raise ValueError
454 if rev < 0: rev = self.count() + rev
455 if rev < 0: rev = self.count() + rev
455 if rev < 0 or rev >= self.count(): raise ValueError
456 if rev < 0 or rev >= self.count(): raise ValueError
456 return self.node(rev)
457 return self.node(rev)
457 except (ValueError, OverflowError):
458 except (ValueError, OverflowError):
458 c = []
459 c = []
459 for n in self.nodemap:
460 for n in self.nodemap:
460 if hex(n).startswith(id):
461 if hex(n).startswith(id):
461 c.append(n)
462 c.append(n)
462 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
463 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
463 if len(c) < 1: raise RevlogError(_("No match found"))
464 if len(c) < 1: raise RevlogError(_("No match found"))
464 return c[0]
465 return c[0]
465
466
466 return None
467 return None
467
468
468 def diff(self, a, b):
469 def diff(self, a, b):
469 """return a delta between two revisions"""
470 """return a delta between two revisions"""
470 return mdiff.textdiff(a, b)
471 return mdiff.textdiff(a, b)
471
472
472 def patches(self, t, pl):
473 def patches(self, t, pl):
473 """apply a list of patches to a string"""
474 """apply a list of patches to a string"""
474 return mdiff.patches(t, pl)
475 return mdiff.patches(t, pl)
475
476
477 def chunk(self, rev):
478 start, length = self.start(rev), self.length(rev)
479 end = start + length
480
481 def loadcache():
482 cache_length = max(4096 * 1024, length) # 4Mo
483 df = self.opener(self.datafile)
484 df.seek(start)
485 self.chunkcache = (start, df.read(cache_length))
486
487 if not self.chunkcache:
488 loadcache()
489
490 cache_start = self.chunkcache[0]
491 cache_end = cache_start + len(self.chunkcache[1])
492 if start >= cache_start and end <= cache_end:
493 # it is cached
494 offset = start - cache_start
495 else:
496 loadcache()
497 offset = 0
498
499 #def checkchunk():
500 # df = self.opener(self.datafile)
501 # df.seek(start)
502 # return df.read(length)
503 #assert s == checkchunk()
504 return decompress(self.chunkcache[1][offset:offset + length])
505
476 def delta(self, node):
506 def delta(self, node):
477 """return or calculate a delta between a node and its predecessor"""
507 """return or calculate a delta between a node and its predecessor"""
478 r = self.rev(node)
508 r = self.rev(node)
479 b = self.base(r)
509 b = self.base(r)
480 if r == b:
510 if r == b:
481 return self.diff(self.revision(self.node(r - 1)),
511 return self.diff(self.revision(self.node(r - 1)),
482 self.revision(node))
512 self.revision(node))
483 else:
513 else:
484 f = self.opener(self.datafile)
514 return self.chunk(r)
485 f.seek(self.start(r))
486 data = f.read(self.length(r))
487 return decompress(data)
488
515
489 def revision(self, node):
516 def revision(self, node):
490 """return an uncompressed revision of a given"""
517 """return an uncompressed revision of a given"""
491 if node == nullid: return ""
518 if node == nullid: return ""
492 if self.cache and self.cache[0] == node: return self.cache[2]
519 if self.cache and self.cache[0] == node: return self.cache[2]
493
520
494 # look up what we need to read
521 # look up what we need to read
495 text = None
522 text = None
496 rev = self.rev(node)
523 rev = self.rev(node)
497 start, length, base, link, p1, p2, node = self.index[rev]
524 base = self.base(rev)
498 end = start + length
499 if base != rev: start = self.start(base)
500
525
501 # do we have useful data cached?
526 # do we have useful data cached?
502 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
527 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
503 base = self.cache[1]
528 base = self.cache[1]
504 start = self.start(base + 1)
505 text = self.cache[2]
529 text = self.cache[2]
506 last = 0
530 else:
507
531 text = self.chunk(base)
508 f = self.opener(self.datafile)
509 f.seek(start)
510 data = f.read(end - start)
511
512 if text is None:
513 last = self.length(base)
514 text = decompress(data[:last])
515
532
516 bins = []
533 bins = []
517 for r in xrange(base + 1, rev + 1):
534 for r in xrange(base + 1, rev + 1):
518 s = self.length(r)
535 bins.append(self.chunk(r))
519 bins.append(decompress(data[last:last + s]))
520 last = last + s
521
536
522 text = mdiff.patches(text, bins)
537 text = mdiff.patches(text, bins)
523
538
539 p1, p2 = self.parents(node)
524 if node != hash(text, p1, p2):
540 if node != hash(text, p1, p2):
525 raise RevlogError(_("integrity check failed on %s:%d")
541 raise RevlogError(_("integrity check failed on %s:%d")
526 % (self.datafile, rev))
542 % (self.datafile, rev))
527
543
528 self.cache = (node, rev, text)
544 self.cache = (node, rev, text)
529 return text
545 return text
530
546
531 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
547 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
532 """add a revision to the log
548 """add a revision to the log
533
549
534 text - the revision data to add
550 text - the revision data to add
535 transaction - the transaction object used for rollback
551 transaction - the transaction object used for rollback
536 link - the linkrev data to add
552 link - the linkrev data to add
537 p1, p2 - the parent nodeids of the revision
553 p1, p2 - the parent nodeids of the revision
538 d - an optional precomputed delta
554 d - an optional precomputed delta
539 """
555 """
540 if text is None: text = ""
556 if text is None: text = ""
541 if p1 is None: p1 = self.tip()
557 if p1 is None: p1 = self.tip()
542 if p2 is None: p2 = nullid
558 if p2 is None: p2 = nullid
543
559
544 node = hash(text, p1, p2)
560 node = hash(text, p1, p2)
545
561
546 if node in self.nodemap:
562 if node in self.nodemap:
547 return node
563 return node
548
564
549 n = self.count()
565 n = self.count()
550 t = n - 1
566 t = n - 1
551
567
552 if n:
568 if n:
553 base = self.base(t)
569 base = self.base(t)
554 start = self.start(base)
570 start = self.start(base)
555 end = self.end(t)
571 end = self.end(t)
556 if not d:
572 if not d:
557 prev = self.revision(self.tip())
573 prev = self.revision(self.tip())
558 d = self.diff(prev, str(text))
574 d = self.diff(prev, str(text))
559 data = compress(d)
575 data = compress(d)
560 l = len(data[1]) + len(data[0])
576 l = len(data[1]) + len(data[0])
561 dist = end - start + l
577 dist = end - start + l
562
578
563 # full versions are inserted when the needed deltas
579 # full versions are inserted when the needed deltas
564 # become comparable to the uncompressed text
580 # become comparable to the uncompressed text
565 if not n or dist > len(text) * 2:
581 if not n or dist > len(text) * 2:
566 data = compress(text)
582 data = compress(text)
567 l = len(data[1]) + len(data[0])
583 l = len(data[1]) + len(data[0])
568 base = n
584 base = n
569 else:
585 else:
570 base = self.base(t)
586 base = self.base(t)
571
587
572 offset = 0
588 offset = 0
573 if t >= 0:
589 if t >= 0:
574 offset = self.end(t)
590 offset = self.end(t)
575
591
576 e = (offset, l, base, link, p1, p2, node)
592 e = (offset, l, base, link, p1, p2, node)
577
593
578 self.index.append(e)
594 self.index.append(e)
579 self.nodemap[node] = n
595 self.nodemap[node] = n
580 entry = struct.pack(indexformat, *e)
596 entry = struct.pack(indexformat, *e)
581
597
582 transaction.add(self.datafile, e[0])
598 transaction.add(self.datafile, e[0])
583 f = self.opener(self.datafile, "a")
599 f = self.opener(self.datafile, "a")
584 if data[0]:
600 if data[0]:
585 f.write(data[0])
601 f.write(data[0])
586 f.write(data[1])
602 f.write(data[1])
587 transaction.add(self.indexfile, n * len(entry))
603 transaction.add(self.indexfile, n * len(entry))
588 self.opener(self.indexfile, "a").write(entry)
604 self.opener(self.indexfile, "a").write(entry)
589
605
590 self.cache = (node, n, text)
606 self.cache = (node, n, text)
591 return node
607 return node
592
608
593 def ancestor(self, a, b):
609 def ancestor(self, a, b):
594 """calculate the least common ancestor of nodes a and b"""
610 """calculate the least common ancestor of nodes a and b"""
595 # calculate the distance of every node from root
611 # calculate the distance of every node from root
596 dist = {nullid: 0}
612 dist = {nullid: 0}
597 for i in xrange(self.count()):
613 for i in xrange(self.count()):
598 n = self.node(i)
614 n = self.node(i)
599 p1, p2 = self.parents(n)
615 p1, p2 = self.parents(n)
600 dist[n] = max(dist[p1], dist[p2]) + 1
616 dist[n] = max(dist[p1], dist[p2]) + 1
601
617
602 # traverse ancestors in order of decreasing distance from root
618 # traverse ancestors in order of decreasing distance from root
603 def ancestors(node):
619 def ancestors(node):
604 # we store negative distances because heap returns smallest member
620 # we store negative distances because heap returns smallest member
605 h = [(-dist[node], node)]
621 h = [(-dist[node], node)]
606 seen = {}
622 seen = {}
607 earliest = self.count()
623 earliest = self.count()
608 while h:
624 while h:
609 d, n = heapq.heappop(h)
625 d, n = heapq.heappop(h)
610 if n not in seen:
626 if n not in seen:
611 seen[n] = 1
627 seen[n] = 1
612 r = self.rev(n)
628 r = self.rev(n)
613 yield (-d, n)
629 yield (-d, n)
614 for p in self.parents(n):
630 for p in self.parents(n):
615 heapq.heappush(h, (-dist[p], p))
631 heapq.heappush(h, (-dist[p], p))
616
632
617 def generations(node):
633 def generations(node):
618 sg, s = None, {}
634 sg, s = None, {}
619 for g,n in ancestors(node):
635 for g,n in ancestors(node):
620 if g != sg:
636 if g != sg:
621 if sg:
637 if sg:
622 yield sg, s
638 yield sg, s
623 sg, s = g, {n:1}
639 sg, s = g, {n:1}
624 else:
640 else:
625 s[n] = 1
641 s[n] = 1
626 yield sg, s
642 yield sg, s
627
643
628 x = generations(a)
644 x = generations(a)
629 y = generations(b)
645 y = generations(b)
630 gx = x.next()
646 gx = x.next()
631 gy = y.next()
647 gy = y.next()
632
648
633 # increment each ancestor list until it is closer to root than
649 # increment each ancestor list until it is closer to root than
634 # the other, or they match
650 # the other, or they match
635 while 1:
651 while 1:
636 #print "ancestor gen %s %s" % (gx[0], gy[0])
652 #print "ancestor gen %s %s" % (gx[0], gy[0])
637 if gx[0] == gy[0]:
653 if gx[0] == gy[0]:
638 # find the intersection
654 # find the intersection
639 i = [ n for n in gx[1] if n in gy[1] ]
655 i = [ n for n in gx[1] if n in gy[1] ]
640 if i:
656 if i:
641 return i[0]
657 return i[0]
642 else:
658 else:
643 #print "next"
659 #print "next"
644 gy = y.next()
660 gy = y.next()
645 gx = x.next()
661 gx = x.next()
646 elif gx[0] < gy[0]:
662 elif gx[0] < gy[0]:
647 #print "next y"
663 #print "next y"
648 gy = y.next()
664 gy = y.next()
649 else:
665 else:
650 #print "next x"
666 #print "next x"
651 gx = x.next()
667 gx = x.next()
652
668
653 def group(self, nodelist, lookup, infocollect = None):
669 def group(self, nodelist, lookup, infocollect=None):
654 """calculate a delta group
670 """calculate a delta group
655
671
656 Given a list of changeset revs, return a set of deltas and
672 Given a list of changeset revs, return a set of deltas and
657 metadata corresponding to nodes. the first delta is
673 metadata corresponding to nodes. the first delta is
658 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
674 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
659 have this parent as it has all history before these
675 have this parent as it has all history before these
660 changesets. parent is parent[0]
676 changesets. parent is parent[0]
661 """
677 """
662 revs = [self.rev(n) for n in nodelist]
678 revs = [self.rev(n) for n in nodelist]
663 needed = dict.fromkeys(revs, 1)
664
679
665 # if we don't have any revisions touched by these changesets, bail
680 # if we don't have any revisions touched by these changesets, bail
666 if not revs:
681 if not revs:
667 yield struct.pack(">l", 0)
682 yield struct.pack(">l", 0)
668 return
683 return
669
684
670 # add the parent of the first rev
685 # add the parent of the first rev
671 p = self.parents(self.node(revs[0]))[0]
686 p = self.parents(self.node(revs[0]))[0]
672 revs.insert(0, self.rev(p))
687 revs.insert(0, self.rev(p))
673
688
674 # for each delta that isn't contiguous in the log, we need to
675 # reconstruct the base, reconstruct the result, and then
676 # calculate the delta. We also need to do this where we've
677 # stored a full version and not a delta
678 for i in xrange(0, len(revs) - 1):
679 a, b = revs[i], revs[i + 1]
680 if a + 1 != b or self.base(b) == b:
681 for j in xrange(self.base(a), a + 1):
682 needed[j] = 1
683 for j in xrange(self.base(b), b + 1):
684 needed[j] = 1
685
686 # calculate spans to retrieve from datafile
687 needed = needed.keys()
688 needed.sort()
689 spans = []
690 oo = -1
691 ol = 0
692 for n in needed:
693 if n < 0: continue
694 o = self.start(n)
695 l = self.length(n)
696 if oo + ol == o: # can we merge with the previous?
697 nl = spans[-1][2]
698 nl.append((n, l))
699 ol += l
700 spans[-1] = (oo, ol, nl)
701 else:
702 oo = o
703 ol = l
704 spans.append((oo, ol, [(n, l)]))
705
706 # read spans in, divide up chunks
707 chunks = {}
708 for span in spans:
709 # we reopen the file for each span to make http happy for now
710 f = self.opener(self.datafile)
711 f.seek(span[0])
712 data = f.read(span[1])
713
714 # divide up the span
715 pos = 0
716 for r, l in span[2]:
717 chunks[r] = decompress(data[pos: pos + l])
718 pos += l
719
720 # helper to reconstruct intermediate versions
689 # helper to reconstruct intermediate versions
721 def construct(text, base, rev):
690 def construct(text, base, rev):
722 bins = [chunks[r] for r in xrange(base + 1, rev + 1)]
691 bins = [self.chunk(r) for r in xrange(base + 1, rev + 1)]
723 return mdiff.patches(text, bins)
692 return mdiff.patches(text, bins)
724
693
725 # build deltas
694 # build deltas
726 deltas = []
727 for d in xrange(0, len(revs) - 1):
695 for d in xrange(0, len(revs) - 1):
728 a, b = revs[d], revs[d + 1]
696 a, b = revs[d], revs[d + 1]
729 n = self.node(b)
697 na = self.node(a)
698 nb = self.node(b)
730
699
731 if infocollect is not None:
700 if infocollect is not None:
732 infocollect(n)
701 infocollect(nb)
733
702
734 # do we need to construct a new delta?
703 # do we need to construct a new delta?
735 if a + 1 != b or self.base(b) == b:
704 if a + 1 != b or self.base(b) == b:
736 if a >= 0:
705 ta = self.revision(na)
737 base = self.base(a)
706 tb = self.revision(nb)
738 ta = chunks[self.base(a)]
739 ta = construct(ta, base, a)
740 else:
741 ta = ""
742
743 base = self.base(b)
744 if a > base:
745 base = a
746 tb = ta
747 else:
748 tb = chunks[self.base(b)]
749 tb = construct(tb, base, b)
750 d = self.diff(ta, tb)
707 d = self.diff(ta, tb)
751 else:
708 else:
752 d = chunks[b]
709 d = self.chunk(b)
753
710
754 p = self.parents(n)
711 p = self.parents(nb)
755 meta = n + p[0] + p[1] + lookup(n)
712 meta = nb + p[0] + p[1] + lookup(nb)
756 l = struct.pack(">l", len(meta) + len(d) + 4)
713 l = struct.pack(">l", len(meta) + len(d) + 4)
757 yield l
714 yield l
758 yield meta
715 yield meta
759 yield d
716 yield d
760
717
761 yield struct.pack(">l", 0)
718 yield struct.pack(">l", 0)
762
719
763 def addgroup(self, revs, linkmapper, transaction, unique=0):
720 def addgroup(self, revs, linkmapper, transaction, unique=0):
764 """
721 """
765 add a delta group
722 add a delta group
766
723
767 given a set of deltas, add them to the revision log. the
724 given a set of deltas, add them to the revision log. the
768 first delta is against its parent, which should be in our
725 first delta is against its parent, which should be in our
769 log, the rest are against the previous delta.
726 log, the rest are against the previous delta.
770 """
727 """
771
728
772 #track the base of the current delta log
729 #track the base of the current delta log
773 r = self.count()
730 r = self.count()
774 t = r - 1
731 t = r - 1
775 node = nullid
732 node = nullid
776
733
777 base = prev = -1
734 base = prev = -1
778 start = end = measure = 0
735 start = end = measure = 0
779 if r:
736 if r:
780 start = self.start(self.base(t))
737 start = self.start(self.base(t))
781 end = self.end(t)
738 end = self.end(t)
782 measure = self.length(self.base(t))
739 measure = self.length(self.base(t))
783 base = self.base(t)
740 base = self.base(t)
784 prev = self.tip()
741 prev = self.tip()
785
742
786 transaction.add(self.datafile, end)
743 transaction.add(self.datafile, end)
787 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
744 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
788 dfh = self.opener(self.datafile, "a")
745 dfh = self.opener(self.datafile, "a")
789 ifh = self.opener(self.indexfile, "a")
746 ifh = self.opener(self.indexfile, "a")
790
747
791 # loop through our set of deltas
748 # loop through our set of deltas
792 chain = None
749 chain = None
793 for chunk in revs:
750 for chunk in revs:
794 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
751 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
795 link = linkmapper(cs)
752 link = linkmapper(cs)
796 if node in self.nodemap:
753 if node in self.nodemap:
797 # this can happen if two branches make the same change
754 # this can happen if two branches make the same change
798 # if unique:
755 # if unique:
799 # raise RevlogError(_("already have %s") % hex(node[:4]))
756 # raise RevlogError(_("already have %s") % hex(node[:4]))
800 chain = node
757 chain = node
801 continue
758 continue
802 delta = chunk[80:]
759 delta = chunk[80:]
803
760
804 for p in (p1, p2):
761 for p in (p1, p2):
805 if not p in self.nodemap:
762 if not p in self.nodemap:
806 raise RevlogError(_("unknown parent %s") % short(p1))
763 raise RevlogError(_("unknown parent %s") % short(p1))
807
764
808 if not chain:
765 if not chain:
809 # retrieve the parent revision of the delta chain
766 # retrieve the parent revision of the delta chain
810 chain = p1
767 chain = p1
811 if not chain in self.nodemap:
768 if not chain in self.nodemap:
812 raise RevlogError(_("unknown base %s") % short(chain[:4]))
769 raise RevlogError(_("unknown base %s") % short(chain[:4]))
813
770
814 # full versions are inserted when the needed deltas become
771 # full versions are inserted when the needed deltas become
815 # comparable to the uncompressed text or when the previous
772 # comparable to the uncompressed text or when the previous
816 # version is not the one we have a delta against. We use
773 # version is not the one we have a delta against. We use
817 # the size of the previous full rev as a proxy for the
774 # the size of the previous full rev as a proxy for the
818 # current size.
775 # current size.
819
776
820 if chain == prev:
777 if chain == prev:
821 tempd = compress(delta)
778 tempd = compress(delta)
822 cdelta = tempd[0] + tempd[1]
779 cdelta = tempd[0] + tempd[1]
823
780
824 if chain != prev or (end - start + len(cdelta)) > measure * 2:
781 if chain != prev or (end - start + len(cdelta)) > measure * 2:
825 # flush our writes here so we can read it in revision
782 # flush our writes here so we can read it in revision
826 dfh.flush()
783 dfh.flush()
827 ifh.flush()
784 ifh.flush()
828 text = self.revision(chain)
785 text = self.revision(chain)
829 text = self.patches(text, [delta])
786 text = self.patches(text, [delta])
830 chk = self.addrevision(text, transaction, link, p1, p2)
787 chk = self.addrevision(text, transaction, link, p1, p2)
831 if chk != node:
788 if chk != node:
832 raise RevlogError(_("consistency error adding group"))
789 raise RevlogError(_("consistency error adding group"))
833 measure = len(text)
790 measure = len(text)
834 else:
791 else:
835 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
792 e = (end, len(cdelta), self.base(t), link, p1, p2, node)
836 self.index.append(e)
793 self.index.append(e)
837 self.nodemap[node] = r
794 self.nodemap[node] = r
838 dfh.write(cdelta)
795 dfh.write(cdelta)
839 ifh.write(struct.pack(indexformat, *e))
796 ifh.write(struct.pack(indexformat, *e))
840
797
841 t, r, chain, prev = r, r + 1, node, node
798 t, r, chain, prev = r, r + 1, node, node
842 start = self.start(self.base(t))
799 start = self.start(self.base(t))
843 end = self.end(t)
800 end = self.end(t)
844
801
845 dfh.close()
802 dfh.close()
846 ifh.close()
803 ifh.close()
847 return node
804 return node
848
805
849 def strip(self, rev, minlink):
806 def strip(self, rev, minlink):
850 if self.count() == 0 or rev >= self.count():
807 if self.count() == 0 or rev >= self.count():
851 return
808 return
852
809
853 # When stripping away a revision, we need to make sure it
810 # When stripping away a revision, we need to make sure it
854 # does not actually belong to an older changeset.
811 # does not actually belong to an older changeset.
855 # The minlink parameter defines the oldest revision
812 # The minlink parameter defines the oldest revision
856 # we're allowed to strip away.
813 # we're allowed to strip away.
857 while minlink > self.index[rev][3]:
814 while minlink > self.index[rev][3]:
858 rev += 1
815 rev += 1
859 if rev >= self.count():
816 if rev >= self.count():
860 return
817 return
861
818
862 # first truncate the files on disk
819 # first truncate the files on disk
863 end = self.start(rev)
820 end = self.start(rev)
864 self.opener(self.datafile, "a").truncate(end)
821 self.opener(self.datafile, "a").truncate(end)
865 end = rev * struct.calcsize(indexformat)
822 end = rev * struct.calcsize(indexformat)
866 self.opener(self.indexfile, "a").truncate(end)
823 self.opener(self.indexfile, "a").truncate(end)
867
824
868 # then reset internal state in memory to forget those revisions
825 # then reset internal state in memory to forget those revisions
869 self.cache = None
826 self.cache = None
870 for p in self.index[rev:]:
827 for p in self.index[rev:]:
871 del self.nodemap[p[6]]
828 del self.nodemap[p[6]]
872 del self.index[rev:]
829 del self.index[rev:]
873
830
874 # truncating the lazyindex also truncates the lazymap.
831 # truncating the lazyindex also truncates the lazymap.
875 if isinstance(self.index, lazyindex):
832 if isinstance(self.index, lazyindex):
876 self.index.trunc(end)
833 self.index.trunc(end)
877
834
878
835
879 def checksize(self):
836 def checksize(self):
880 expected = 0
837 expected = 0
881 if self.count():
838 if self.count():
882 expected = self.end(self.count() - 1)
839 expected = self.end(self.count() - 1)
883 try:
840 try:
884 f = self.opener(self.datafile)
841 f = self.opener(self.datafile)
885 f.seek(0, 2)
842 f.seek(0, 2)
886 actual = f.tell()
843 actual = f.tell()
887 return expected - actual
844 return expected - actual
888 except IOError, inst:
845 except IOError, inst:
889 if inst.errno == errno.ENOENT:
846 if inst.errno == errno.ENOENT:
890 return 0
847 return 0
891 raise
848 raise
892
849
893
850
@@ -1,43 +1,45 b''
1 # statichttprepo.py - simple http repository class for mercurial
1 # statichttprepo.py - simple http repository class for mercurial
2 #
2 #
3 # This provides read-only repo access to repositories exported via static http
3 # This provides read-only repo access to repositories exported via static http
4 #
4 #
5 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 # Copyright 2005 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms
7 # This software may be used and distributed according to the terms
8 # of the GNU General Public License, incorporated herein by reference.
8 # of the GNU General Public License, incorporated herein by reference.
9
9
10 from demandload import demandload
10 from demandload import demandload
11 demandload(globals(), "changelog filelog httprangereader")
11 demandload(globals(), "changelog filelog httprangereader")
12 demandload(globals(), "localrepo manifest os urllib urllib2")
12 demandload(globals(), "localrepo manifest os urllib urllib2")
13
13
14 class rangereader(httprangereader.httprangereader):
14 class rangereader(httprangereader.httprangereader):
15 def read(self, size=None):
15 def read(self, size=None):
16 try:
16 try:
17 return httprangereader.httprangereader.read(self, size)
17 return httprangereader.httprangereader.read(self, size)
18 except urllib2.URLError, inst:
18 except urllib2.URLError, inst:
19 raise IOError(None, str(inst))
19 raise IOError(None, str(inst))
20
20
21 def opener(base):
21 def opener(base):
22 """return a function that opens files over http"""
22 """return a function that opens files over http"""
23 p = base
23 p = base
24 def o(path, mode="r"):
24 def o(path, mode="r"):
25 f = os.path.join(p, urllib.quote(path))
25 f = os.path.join(p, urllib.quote(path))
26 return rangereader(f)
26 return rangereader(f)
27 return o
27 return o
28
28
29 class statichttprepository(localrepo.localrepository):
29 class statichttprepository(localrepo.localrepository):
30 def __init__(self, ui, path):
30 def __init__(self, ui, path):
31 self.path = (path + "/.hg")
31 self.path = (path + "/.hg")
32 self.ui = ui
32 self.ui = ui
33 self.opener = opener(self.path)
33 self.opener = opener(self.path)
34 self.manifest = manifest.manifest(self.opener)
34 self.manifest = manifest.manifest(self.opener)
35 self.changelog = changelog.changelog(self.opener)
35 self.changelog = changelog.changelog(self.opener)
36 self.tagscache = None
36 self.tagscache = None
37 self.nodetagscache = None
37 self.nodetagscache = None
38 self.encodepats = None
39 self.decodepats = None
38
40
39 def dev(self):
41 def dev(self):
40 return -1
42 return -1
41
43
42 def local(self):
44 def local(self):
43 return False
45 return False
@@ -1,17 +1,24 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 hg init
3 hg init
4 echo a > a
4 echo a > a
5 hg add a
5 hg add a
6 hg commit -m "test" -d "0 0"
6 hg commit -m "test" -d "0 0"
7 hg history
7 hg history
8 hg tag -d "0 0" "bleah"
8 hg tag -d "0 0" "bleah"
9 hg history
9 hg history
10
10
11 echo foo >> .hgtags
11 echo foo >> .hgtags
12 hg tag -d "0 0" "bleah2" || echo "failed"
12 hg tag -d "0 0" "bleah2" || echo "failed"
13
13
14 hg revert .hgtags
15 hg tag -d "0 0" -r 0 "bleah0"
16 hg tag -l -d "0 0" "bleah1" 1
17
18 cat .hgtags
19 cat .hg/localtags
20
14 hg tag -l 'xx
21 hg tag -l 'xx
15 newline'
22 newline'
16 hg tag -l 'xx:xx'
23 hg tag -l 'xx:xx'
17 true
24 true
@@ -1,22 +1,25 b''
1 changeset: 0:acb14030fe0a
1 changeset: 0:acb14030fe0a
2 tag: tip
2 tag: tip
3 user: test
3 user: test
4 date: Thu Jan 1 00:00:00 1970 +0000
4 date: Thu Jan 1 00:00:00 1970 +0000
5 summary: test
5 summary: test
6
6
7 changeset: 1:863197ef0378
7 changeset: 1:863197ef0378
8 tag: tip
8 tag: tip
9 user: test
9 user: test
10 date: Thu Jan 1 00:00:00 1970 +0000
10 date: Thu Jan 1 00:00:00 1970 +0000
11 summary: Added tag bleah for changeset acb14030fe0a21b60322c440ad2d20cf7685a376
11 summary: Added tag bleah for changeset acb14030fe0a21b60322c440ad2d20cf7685a376
12
12
13 changeset: 0:acb14030fe0a
13 changeset: 0:acb14030fe0a
14 tag: bleah
14 tag: bleah
15 user: test
15 user: test
16 date: Thu Jan 1 00:00:00 1970 +0000
16 date: Thu Jan 1 00:00:00 1970 +0000
17 summary: test
17 summary: test
18
18
19 abort: working copy of .hgtags is changed (please commit .hgtags manually)
19 abort: working copy of .hgtags is changed (please commit .hgtags manually)
20 failed
20 failed
21 acb14030fe0a21b60322c440ad2d20cf7685a376 bleah
22 acb14030fe0a21b60322c440ad2d20cf7685a376 bleah0
23 863197ef03781c4fc00276d83eb66c4cb9cd91df bleah1
21 abort: '\n' cannot be used in a tag name
24 abort: '\n' cannot be used in a tag name
22 abort: ':' cannot be used in a tag name
25 abort: ':' cannot be used in a tag name
General Comments 0
You need to be logged in to leave comments. Login now