##// END OF EJS Templates
make incoming work via ssh (issue139); move chunk code into separate module....
Thomas Arendsen Hein -
r1981:736b6c96 default
parent child Browse files
Show More
@@ -0,0 +1,43 b''
1 """
2 changegroup.py - Mercurial changegroup manipulation functions
3
4 Copyright 2006 Matt Mackall <mpm@selenic.com>
5
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
8 """
9 import struct
10 from demandload import *
11 demandload(globals(), "util")
12
13 def getchunk(source):
14 """get a chunk from a changegroup"""
15 d = source.read(4)
16 if not d:
17 return ""
18 l = struct.unpack(">l", d)[0]
19 if l <= 4:
20 return ""
21 d = source.read(l - 4)
22 if len(d) < l - 4:
23 raise util.Abort(_("premature EOF reading chunk"
24 " (got %d bytes, expected %d)")
25 % (len(d), l - 4))
26 return d
27
28 def chunkiter(source):
29 """iterate through the chunks in source"""
30 while 1:
31 c = getchunk(source)
32 if not c:
33 break
34 yield c
35
36 def genchunk(data):
37 """build a changegroup chunk"""
38 header = struct.pack(">l", len(data)+ 4)
39 return "%s%s" % (header, data)
40
41 def closechunk():
42 return struct.pack(">l", 0)
43
@@ -1,219 +1,201 b''
1 """
1 """
2 bundlerepo.py - repository class for viewing uncompressed bundles
2 bundlerepo.py - repository class for viewing uncompressed bundles
3
3
4 This provides a read-only repository interface to bundles as if
4 This provides a read-only repository interface to bundles as if
5 they were part of the actual repository.
5 they were part of the actual repository.
6
6
7 Copyright 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
7 Copyright 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 from demandload import demandload
16 demandload(globals(), "util os struct")
16 demandload(globals(), "changegroup util os struct")
17
17
18 import localrepo, changelog, manifest, filelog, revlog
18 import localrepo, changelog, manifest, filelog, revlog
19
19
20 def getchunk(source):
21 """get a chunk from a group"""
22 d = source.read(4)
23 if not d:
24 return ""
25 l = struct.unpack(">l", d)[0]
26 if l <= 4:
27 return ""
28 d = source.read(l - 4)
29 if len(d) < l - 4:
30 raise util.Abort(_("premature EOF reading chunk"
31 " (got %d bytes, expected %d)")
32 % (len(d), l - 4))
33 return d
34
35 class bundlerevlog(revlog.revlog):
20 class bundlerevlog(revlog.revlog):
36 def __init__(self, opener, indexfile, datafile, bundlefile,
21 def __init__(self, opener, indexfile, datafile, bundlefile,
37 linkmapper=None):
22 linkmapper=None):
38 # How it works:
23 # How it works:
39 # to retrieve a revision, we need to know the offset of
24 # to retrieve a revision, we need to know the offset of
40 # the revision in the bundlefile (an opened file).
25 # the revision in the bundlefile (an opened file).
41 #
26 #
42 # We store this offset in the index (start), to differentiate a
27 # We store this offset in the index (start), to differentiate a
43 # rev in the bundle and from a rev in the revlog, we check
28 # rev in the bundle and from a rev in the revlog, we check
44 # len(index[r]). If the tuple is bigger than 7, it is a bundle
29 # len(index[r]). If the tuple is bigger than 7, it is a bundle
45 # (it is bigger since we store the node to which the delta is)
30 # (it is bigger since we store the node to which the delta is)
46 #
31 #
47 revlog.revlog.__init__(self, opener, indexfile, datafile)
32 revlog.revlog.__init__(self, opener, indexfile, datafile)
48 self.bundlefile = bundlefile
33 self.bundlefile = bundlefile
49 def genchunk():
34 def chunkpositer():
50 while 1:
35 for chunk in changegroup.chunkiter(bundlefile):
51 pos = bundlefile.tell()
36 pos = bundlefile.tell()
52 chunk = getchunk(bundlefile)
37 yield chunk, pos - len(chunk)
53 if not chunk:
54 break
55 yield chunk, pos + 4 # XXX struct.calcsize(">l") == 4
56 n = self.count()
38 n = self.count()
57 prev = None
39 prev = None
58 for chunk, start in genchunk():
40 for chunk, start in chunkpositer():
59 size = len(chunk)
41 size = len(chunk)
60 if size < 80:
42 if size < 80:
61 raise util.Abort("invalid changegroup")
43 raise util.Abort("invalid changegroup")
62 start += 80
44 start += 80
63 size -= 80
45 size -= 80
64 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
46 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
65 if node in self.nodemap:
47 if node in self.nodemap:
66 prev = node
48 prev = node
67 continue
49 continue
68 for p in (p1, p2):
50 for p in (p1, p2):
69 if not p in self.nodemap:
51 if not p in self.nodemap:
70 raise RevlogError(_("unknown parent %s") % short(p1))
52 raise RevlogError(_("unknown parent %s") % short(p1))
71 if linkmapper is None:
53 if linkmapper is None:
72 link = n
54 link = n
73 else:
55 else:
74 link = linkmapper(cs)
56 link = linkmapper(cs)
75
57
76 if not prev:
58 if not prev:
77 prev = p1
59 prev = p1
78 # start, size, base is not used, link, p1, p2, delta ref
60 # start, size, base is not used, link, p1, p2, delta ref
79 e = (start, size, None, link, p1, p2, node, prev)
61 e = (start, size, None, link, p1, p2, node, prev)
80 self.index.append(e)
62 self.index.append(e)
81 self.nodemap[node] = n
63 self.nodemap[node] = n
82 prev = node
64 prev = node
83 n += 1
65 n += 1
84
66
85 def bundle(self, rev):
67 def bundle(self, rev):
86 """is rev from the bundle"""
68 """is rev from the bundle"""
87 if rev < 0:
69 if rev < 0:
88 return False
70 return False
89 return len(self.index[rev]) > 7
71 return len(self.index[rev]) > 7
90 def bundlebase(self, rev): return self.index[rev][7]
72 def bundlebase(self, rev): return self.index[rev][7]
91 def chunk(self, rev):
73 def chunk(self, rev):
92 # Warning: in case of bundle, the diff is against bundlebase,
74 # Warning: in case of bundle, the diff is against bundlebase,
93 # not against rev - 1
75 # not against rev - 1
94 # XXX: could use some caching
76 # XXX: could use some caching
95 if not self.bundle(rev):
77 if not self.bundle(rev):
96 return revlog.revlog.chunk(self, rev)
78 return revlog.revlog.chunk(self, rev)
97 self.bundlefile.seek(self.start(rev))
79 self.bundlefile.seek(self.start(rev))
98 return self.bundlefile.read(self.length(rev))
80 return self.bundlefile.read(self.length(rev))
99
81
100 def revdiff(self, rev1, rev2):
82 def revdiff(self, rev1, rev2):
101 """return or calculate a delta between two revisions"""
83 """return or calculate a delta between two revisions"""
102 if self.bundle(rev1) and self.bundle(rev2):
84 if self.bundle(rev1) and self.bundle(rev2):
103 # hot path for bundle
85 # hot path for bundle
104 revb = self.rev(self.bundlebase(rev2))
86 revb = self.rev(self.bundlebase(rev2))
105 if revb == rev1:
87 if revb == rev1:
106 return self.chunk(rev2)
88 return self.chunk(rev2)
107 elif not self.bundle(rev1) and not self.bundle(rev2):
89 elif not self.bundle(rev1) and not self.bundle(rev2):
108 return revlog.revlog.chunk(self, rev1, rev2)
90 return revlog.revlog.chunk(self, rev1, rev2)
109
91
110 return self.diff(self.revision(self.node(rev1)),
92 return self.diff(self.revision(self.node(rev1)),
111 self.revision(self.node(rev2)))
93 self.revision(self.node(rev2)))
112
94
113 def revision(self, node):
95 def revision(self, node):
114 """return an uncompressed revision of a given"""
96 """return an uncompressed revision of a given"""
115 if node == nullid: return ""
97 if node == nullid: return ""
116
98
117 text = None
99 text = None
118 chain = []
100 chain = []
119 iter_node = node
101 iter_node = node
120 rev = self.rev(iter_node)
102 rev = self.rev(iter_node)
121 # reconstruct the revision if it is from a changegroup
103 # reconstruct the revision if it is from a changegroup
122 while self.bundle(rev):
104 while self.bundle(rev):
123 if self.cache and self.cache[0] == iter_node:
105 if self.cache and self.cache[0] == iter_node:
124 text = self.cache[2]
106 text = self.cache[2]
125 break
107 break
126 chain.append(rev)
108 chain.append(rev)
127 iter_node = self.bundlebase(rev)
109 iter_node = self.bundlebase(rev)
128 rev = self.rev(iter_node)
110 rev = self.rev(iter_node)
129 if text is None:
111 if text is None:
130 text = revlog.revlog.revision(self, iter_node)
112 text = revlog.revlog.revision(self, iter_node)
131
113
132 while chain:
114 while chain:
133 delta = self.chunk(chain.pop())
115 delta = self.chunk(chain.pop())
134 text = self.patches(text, [delta])
116 text = self.patches(text, [delta])
135
117
136 p1, p2 = self.parents(node)
118 p1, p2 = self.parents(node)
137 if node != revlog.hash(text, p1, p2):
119 if node != revlog.hash(text, p1, p2):
138 raise RevlogError(_("integrity check failed on %s:%d")
120 raise RevlogError(_("integrity check failed on %s:%d")
139 % (self.datafile, self.rev(node)))
121 % (self.datafile, self.rev(node)))
140
122
141 self.cache = (node, rev, text)
123 self.cache = (node, rev, text)
142 return text
124 return text
143
125
144 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
126 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
145 raise NotImplementedError
127 raise NotImplementedError
146 def addgroup(self, revs, linkmapper, transaction, unique=0):
128 def addgroup(self, revs, linkmapper, transaction, unique=0):
147 raise NotImplementedError
129 raise NotImplementedError
148 def strip(self, rev, minlink):
130 def strip(self, rev, minlink):
149 raise NotImplementedError
131 raise NotImplementedError
150 def checksize(self):
132 def checksize(self):
151 raise NotImplementedError
133 raise NotImplementedError
152
134
153 class bundlechangelog(bundlerevlog, changelog.changelog):
135 class bundlechangelog(bundlerevlog, changelog.changelog):
154 def __init__(self, opener, bundlefile):
136 def __init__(self, opener, bundlefile):
155 changelog.changelog.__init__(self, opener)
137 changelog.changelog.__init__(self, opener)
156 bundlerevlog.__init__(self, opener, "00changelog.i", "00changelog.d",
138 bundlerevlog.__init__(self, opener, "00changelog.i", "00changelog.d",
157 bundlefile)
139 bundlefile)
158
140
159 class bundlemanifest(bundlerevlog, manifest.manifest):
141 class bundlemanifest(bundlerevlog, manifest.manifest):
160 def __init__(self, opener, bundlefile, linkmapper):
142 def __init__(self, opener, bundlefile, linkmapper):
161 manifest.manifest.__init__(self, opener)
143 manifest.manifest.__init__(self, opener)
162 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
144 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
163 bundlefile, linkmapper)
145 bundlefile, linkmapper)
164
146
165 class bundlefilelog(bundlerevlog, filelog.filelog):
147 class bundlefilelog(bundlerevlog, filelog.filelog):
166 def __init__(self, opener, path, bundlefile, linkmapper):
148 def __init__(self, opener, path, bundlefile, linkmapper):
167 filelog.filelog.__init__(self, opener, path)
149 filelog.filelog.__init__(self, opener, path)
168 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
150 bundlerevlog.__init__(self, opener, self.indexfile, self.datafile,
169 bundlefile, linkmapper)
151 bundlefile, linkmapper)
170
152
171 class bundlerepository(localrepo.localrepository):
153 class bundlerepository(localrepo.localrepository):
172 def __init__(self, ui, path, bundlename):
154 def __init__(self, ui, path, bundlename):
173 localrepo.localrepository.__init__(self, ui, path)
155 localrepo.localrepository.__init__(self, ui, path)
174 f = open(bundlename, "rb")
156 f = open(bundlename, "rb")
175 s = os.fstat(f.fileno())
157 s = os.fstat(f.fileno())
176 self.bundlefile = f
158 self.bundlefile = f
177 header = self.bundlefile.read(6)
159 header = self.bundlefile.read(6)
178 if not header.startswith("HG"):
160 if not header.startswith("HG"):
179 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
161 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
180 elif not header.startswith("HG10"):
162 elif not header.startswith("HG10"):
181 raise util.Abort(_("%s: unknown bundle version") % bundlename)
163 raise util.Abort(_("%s: unknown bundle version") % bundlename)
182 elif header == "HG10BZ":
164 elif header == "HG10BZ":
183 raise util.Abort(_("%s: compressed bundle not supported")
165 raise util.Abort(_("%s: compressed bundle not supported")
184 % bundlename)
166 % bundlename)
185 elif header == "HG10UN":
167 elif header == "HG10UN":
186 # uncompressed bundle supported
168 # uncompressed bundle supported
187 pass
169 pass
188 else:
170 else:
189 raise util.Abort(_("%s: unknown bundle compression type")
171 raise util.Abort(_("%s: unknown bundle compression type")
190 % bundlename)
172 % bundlename)
191 self.changelog = bundlechangelog(self.opener, self.bundlefile)
173 self.changelog = bundlechangelog(self.opener, self.bundlefile)
192 self.manifest = bundlemanifest(self.opener, self.bundlefile,
174 self.manifest = bundlemanifest(self.opener, self.bundlefile,
193 self.changelog.rev)
175 self.changelog.rev)
194 # dict with the mapping 'filename' -> position in the bundle
176 # dict with the mapping 'filename' -> position in the bundle
195 self.bundlefilespos = {}
177 self.bundlefilespos = {}
196 while 1:
178 while 1:
197 f = getchunk(self.bundlefile)
179 f = changegroup.getchunk(self.bundlefile)
198 if not f:
180 if not f:
199 break
181 break
200 self.bundlefilespos[f] = self.bundlefile.tell()
182 self.bundlefilespos[f] = self.bundlefile.tell()
201 while getchunk(self.bundlefile):
183 for c in changegroup.chunkiter(self.bundlefile):
202 pass
184 pass
203
185
204 def dev(self):
186 def dev(self):
205 return -1
187 return -1
206
188
207 def file(self, f):
189 def file(self, f):
208 if f[0] == '/':
190 if f[0] == '/':
209 f = f[1:]
191 f = f[1:]
210 if f in self.bundlefilespos:
192 if f in self.bundlefilespos:
211 self.bundlefile.seek(self.bundlefilespos[f])
193 self.bundlefile.seek(self.bundlefilespos[f])
212 return bundlefilelog(self.opener, f, self.bundlefile,
194 return bundlefilelog(self.opener, f, self.bundlefile,
213 self.changelog.rev)
195 self.changelog.rev)
214 else:
196 else:
215 return filelog.filelog(self.opener, f)
197 return filelog.filelog(self.opener, f)
216
198
217 def close(self):
199 def close(self):
218 """Close assigned bundle file immediately."""
200 """Close assigned bundle file immediately."""
219 self.bundlefile.close()
201 self.bundlefile.close()
@@ -1,3298 +1,3305 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch hgweb mdiff random signal tempfile time")
13 demandload(globals(), "fnmatch hgweb mdiff random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "changegroup")
15
16
16 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
20
21
21 def filterfiles(filters, files):
22 def filterfiles(filters, files):
22 l = [x for x in files if x in filters]
23 l = [x for x in files if x in filters]
23
24
24 for t in filters:
25 for t in filters:
25 if t and t[-1] != "/":
26 if t and t[-1] != "/":
26 t += "/"
27 t += "/"
27 l += [x for x in files if x.startswith(t)]
28 l += [x for x in files if x.startswith(t)]
28 return l
29 return l
29
30
30 def relpath(repo, args):
31 def relpath(repo, args):
31 cwd = repo.getcwd()
32 cwd = repo.getcwd()
32 if cwd:
33 if cwd:
33 return [util.normpath(os.path.join(cwd, x)) for x in args]
34 return [util.normpath(os.path.join(cwd, x)) for x in args]
34 return args
35 return args
35
36
36 def matchpats(repo, pats=[], opts={}, head=''):
37 def matchpats(repo, pats=[], opts={}, head=''):
37 cwd = repo.getcwd()
38 cwd = repo.getcwd()
38 if not pats and cwd:
39 if not pats and cwd:
39 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
40 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
40 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
41 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
41 cwd = ''
42 cwd = ''
42 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
43 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
43 opts.get('exclude'), head)
44 opts.get('exclude'), head)
44
45
45 def makewalk(repo, pats, opts, node=None, head=''):
46 def makewalk(repo, pats, opts, node=None, head=''):
46 files, matchfn, anypats = matchpats(repo, pats, opts, head)
47 files, matchfn, anypats = matchpats(repo, pats, opts, head)
47 exact = dict(zip(files, files))
48 exact = dict(zip(files, files))
48 def walk():
49 def walk():
49 for src, fn in repo.walk(node=node, files=files, match=matchfn):
50 for src, fn in repo.walk(node=node, files=files, match=matchfn):
50 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
51 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
51 return files, matchfn, walk()
52 return files, matchfn, walk()
52
53
53 def walk(repo, pats, opts, node=None, head=''):
54 def walk(repo, pats, opts, node=None, head=''):
54 files, matchfn, results = makewalk(repo, pats, opts, node, head)
55 files, matchfn, results = makewalk(repo, pats, opts, node, head)
55 for r in results:
56 for r in results:
56 yield r
57 yield r
57
58
58 def walkchangerevs(ui, repo, pats, opts):
59 def walkchangerevs(ui, repo, pats, opts):
59 '''Iterate over files and the revs they changed in.
60 '''Iterate over files and the revs they changed in.
60
61
61 Callers most commonly need to iterate backwards over the history
62 Callers most commonly need to iterate backwards over the history
62 it is interested in. Doing so has awful (quadratic-looking)
63 it is interested in. Doing so has awful (quadratic-looking)
63 performance, so we use iterators in a "windowed" way.
64 performance, so we use iterators in a "windowed" way.
64
65
65 We walk a window of revisions in the desired order. Within the
66 We walk a window of revisions in the desired order. Within the
66 window, we first walk forwards to gather data, then in the desired
67 window, we first walk forwards to gather data, then in the desired
67 order (usually backwards) to display it.
68 order (usually backwards) to display it.
68
69
69 This function returns an (iterator, getchange, matchfn) tuple. The
70 This function returns an (iterator, getchange, matchfn) tuple. The
70 getchange function returns the changelog entry for a numeric
71 getchange function returns the changelog entry for a numeric
71 revision. The iterator yields 3-tuples. They will be of one of
72 revision. The iterator yields 3-tuples. They will be of one of
72 the following forms:
73 the following forms:
73
74
74 "window", incrementing, lastrev: stepping through a window,
75 "window", incrementing, lastrev: stepping through a window,
75 positive if walking forwards through revs, last rev in the
76 positive if walking forwards through revs, last rev in the
76 sequence iterated over - use to reset state for the current window
77 sequence iterated over - use to reset state for the current window
77
78
78 "add", rev, fns: out-of-order traversal of the given file names
79 "add", rev, fns: out-of-order traversal of the given file names
79 fns, which changed during revision rev - use to gather data for
80 fns, which changed during revision rev - use to gather data for
80 possible display
81 possible display
81
82
82 "iter", rev, None: in-order traversal of the revs earlier iterated
83 "iter", rev, None: in-order traversal of the revs earlier iterated
83 over with "add" - use to display data'''
84 over with "add" - use to display data'''
84
85
85 def increasing_windows(start, end, windowsize=8, sizelimit=512):
86 def increasing_windows(start, end, windowsize=8, sizelimit=512):
86 if start < end:
87 if start < end:
87 while start < end:
88 while start < end:
88 yield start, min(windowsize, end-start)
89 yield start, min(windowsize, end-start)
89 start += windowsize
90 start += windowsize
90 if windowsize < sizelimit:
91 if windowsize < sizelimit:
91 windowsize *= 2
92 windowsize *= 2
92 else:
93 else:
93 while start > end:
94 while start > end:
94 yield start, min(windowsize, start-end-1)
95 yield start, min(windowsize, start-end-1)
95 start -= windowsize
96 start -= windowsize
96 if windowsize < sizelimit:
97 if windowsize < sizelimit:
97 windowsize *= 2
98 windowsize *= 2
98
99
99
100
100 files, matchfn, anypats = matchpats(repo, pats, opts)
101 files, matchfn, anypats = matchpats(repo, pats, opts)
101
102
102 if repo.changelog.count() == 0:
103 if repo.changelog.count() == 0:
103 return [], False, matchfn
104 return [], False, matchfn
104
105
105 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
106 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
106 wanted = {}
107 wanted = {}
107 slowpath = anypats
108 slowpath = anypats
108 fncache = {}
109 fncache = {}
109
110
110 chcache = {}
111 chcache = {}
111 def getchange(rev):
112 def getchange(rev):
112 ch = chcache.get(rev)
113 ch = chcache.get(rev)
113 if ch is None:
114 if ch is None:
114 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
115 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
115 return ch
116 return ch
116
117
117 if not slowpath and not files:
118 if not slowpath and not files:
118 # No files, no patterns. Display all revs.
119 # No files, no patterns. Display all revs.
119 wanted = dict(zip(revs, revs))
120 wanted = dict(zip(revs, revs))
120 if not slowpath:
121 if not slowpath:
121 # Only files, no patterns. Check the history of each file.
122 # Only files, no patterns. Check the history of each file.
122 def filerevgen(filelog):
123 def filerevgen(filelog):
123 for i, window in increasing_windows(filelog.count()-1, -1):
124 for i, window in increasing_windows(filelog.count()-1, -1):
124 revs = []
125 revs = []
125 for j in xrange(i - window, i + 1):
126 for j in xrange(i - window, i + 1):
126 revs.append(filelog.linkrev(filelog.node(j)))
127 revs.append(filelog.linkrev(filelog.node(j)))
127 revs.reverse()
128 revs.reverse()
128 for rev in revs:
129 for rev in revs:
129 yield rev
130 yield rev
130
131
131 minrev, maxrev = min(revs), max(revs)
132 minrev, maxrev = min(revs), max(revs)
132 for file_ in files:
133 for file_ in files:
133 filelog = repo.file(file_)
134 filelog = repo.file(file_)
134 # A zero count may be a directory or deleted file, so
135 # A zero count may be a directory or deleted file, so
135 # try to find matching entries on the slow path.
136 # try to find matching entries on the slow path.
136 if filelog.count() == 0:
137 if filelog.count() == 0:
137 slowpath = True
138 slowpath = True
138 break
139 break
139 for rev in filerevgen(filelog):
140 for rev in filerevgen(filelog):
140 if rev <= maxrev:
141 if rev <= maxrev:
141 if rev < minrev:
142 if rev < minrev:
142 break
143 break
143 fncache.setdefault(rev, [])
144 fncache.setdefault(rev, [])
144 fncache[rev].append(file_)
145 fncache[rev].append(file_)
145 wanted[rev] = 1
146 wanted[rev] = 1
146 if slowpath:
147 if slowpath:
147 # The slow path checks files modified in every changeset.
148 # The slow path checks files modified in every changeset.
148 def changerevgen():
149 def changerevgen():
149 for i, window in increasing_windows(repo.changelog.count()-1, -1):
150 for i, window in increasing_windows(repo.changelog.count()-1, -1):
150 for j in xrange(i - window, i + 1):
151 for j in xrange(i - window, i + 1):
151 yield j, getchange(j)[3]
152 yield j, getchange(j)[3]
152
153
153 for rev, changefiles in changerevgen():
154 for rev, changefiles in changerevgen():
154 matches = filter(matchfn, changefiles)
155 matches = filter(matchfn, changefiles)
155 if matches:
156 if matches:
156 fncache[rev] = matches
157 fncache[rev] = matches
157 wanted[rev] = 1
158 wanted[rev] = 1
158
159
159 def iterate():
160 def iterate():
160 for i, window in increasing_windows(0, len(revs)):
161 for i, window in increasing_windows(0, len(revs)):
161 yield 'window', revs[0] < revs[-1], revs[-1]
162 yield 'window', revs[0] < revs[-1], revs[-1]
162 nrevs = [rev for rev in revs[i:i+window]
163 nrevs = [rev for rev in revs[i:i+window]
163 if rev in wanted]
164 if rev in wanted]
164 srevs = list(nrevs)
165 srevs = list(nrevs)
165 srevs.sort()
166 srevs.sort()
166 for rev in srevs:
167 for rev in srevs:
167 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
168 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
168 yield 'add', rev, fns
169 yield 'add', rev, fns
169 for rev in nrevs:
170 for rev in nrevs:
170 yield 'iter', rev, None
171 yield 'iter', rev, None
171 return iterate(), getchange, matchfn
172 return iterate(), getchange, matchfn
172
173
173 revrangesep = ':'
174 revrangesep = ':'
174
175
175 def revrange(ui, repo, revs, revlog=None):
176 def revrange(ui, repo, revs, revlog=None):
176 """Yield revision as strings from a list of revision specifications."""
177 """Yield revision as strings from a list of revision specifications."""
177 if revlog is None:
178 if revlog is None:
178 revlog = repo.changelog
179 revlog = repo.changelog
179 revcount = revlog.count()
180 revcount = revlog.count()
180 def fix(val, defval):
181 def fix(val, defval):
181 if not val:
182 if not val:
182 return defval
183 return defval
183 try:
184 try:
184 num = int(val)
185 num = int(val)
185 if str(num) != val:
186 if str(num) != val:
186 raise ValueError
187 raise ValueError
187 if num < 0:
188 if num < 0:
188 num += revcount
189 num += revcount
189 if num < 0:
190 if num < 0:
190 num = 0
191 num = 0
191 elif num >= revcount:
192 elif num >= revcount:
192 raise ValueError
193 raise ValueError
193 except ValueError:
194 except ValueError:
194 try:
195 try:
195 num = repo.changelog.rev(repo.lookup(val))
196 num = repo.changelog.rev(repo.lookup(val))
196 except KeyError:
197 except KeyError:
197 try:
198 try:
198 num = revlog.rev(revlog.lookup(val))
199 num = revlog.rev(revlog.lookup(val))
199 except KeyError:
200 except KeyError:
200 raise util.Abort(_('invalid revision identifier %s'), val)
201 raise util.Abort(_('invalid revision identifier %s'), val)
201 return num
202 return num
202 seen = {}
203 seen = {}
203 for spec in revs:
204 for spec in revs:
204 if spec.find(revrangesep) >= 0:
205 if spec.find(revrangesep) >= 0:
205 start, end = spec.split(revrangesep, 1)
206 start, end = spec.split(revrangesep, 1)
206 start = fix(start, 0)
207 start = fix(start, 0)
207 end = fix(end, revcount - 1)
208 end = fix(end, revcount - 1)
208 step = start > end and -1 or 1
209 step = start > end and -1 or 1
209 for rev in xrange(start, end+step, step):
210 for rev in xrange(start, end+step, step):
210 if rev in seen:
211 if rev in seen:
211 continue
212 continue
212 seen[rev] = 1
213 seen[rev] = 1
213 yield str(rev)
214 yield str(rev)
214 else:
215 else:
215 rev = fix(spec, None)
216 rev = fix(spec, None)
216 if rev in seen:
217 if rev in seen:
217 continue
218 continue
218 seen[rev] = 1
219 seen[rev] = 1
219 yield str(rev)
220 yield str(rev)
220
221
221 def make_filename(repo, r, pat, node=None,
222 def make_filename(repo, r, pat, node=None,
222 total=None, seqno=None, revwidth=None, pathname=None):
223 total=None, seqno=None, revwidth=None, pathname=None):
223 node_expander = {
224 node_expander = {
224 'H': lambda: hex(node),
225 'H': lambda: hex(node),
225 'R': lambda: str(r.rev(node)),
226 'R': lambda: str(r.rev(node)),
226 'h': lambda: short(node),
227 'h': lambda: short(node),
227 }
228 }
228 expander = {
229 expander = {
229 '%': lambda: '%',
230 '%': lambda: '%',
230 'b': lambda: os.path.basename(repo.root),
231 'b': lambda: os.path.basename(repo.root),
231 }
232 }
232
233
233 try:
234 try:
234 if node:
235 if node:
235 expander.update(node_expander)
236 expander.update(node_expander)
236 if node and revwidth is not None:
237 if node and revwidth is not None:
237 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
238 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
238 if total is not None:
239 if total is not None:
239 expander['N'] = lambda: str(total)
240 expander['N'] = lambda: str(total)
240 if seqno is not None:
241 if seqno is not None:
241 expander['n'] = lambda: str(seqno)
242 expander['n'] = lambda: str(seqno)
242 if total is not None and seqno is not None:
243 if total is not None and seqno is not None:
243 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
244 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
244 if pathname is not None:
245 if pathname is not None:
245 expander['s'] = lambda: os.path.basename(pathname)
246 expander['s'] = lambda: os.path.basename(pathname)
246 expander['d'] = lambda: os.path.dirname(pathname) or '.'
247 expander['d'] = lambda: os.path.dirname(pathname) or '.'
247 expander['p'] = lambda: pathname
248 expander['p'] = lambda: pathname
248
249
249 newname = []
250 newname = []
250 patlen = len(pat)
251 patlen = len(pat)
251 i = 0
252 i = 0
252 while i < patlen:
253 while i < patlen:
253 c = pat[i]
254 c = pat[i]
254 if c == '%':
255 if c == '%':
255 i += 1
256 i += 1
256 c = pat[i]
257 c = pat[i]
257 c = expander[c]()
258 c = expander[c]()
258 newname.append(c)
259 newname.append(c)
259 i += 1
260 i += 1
260 return ''.join(newname)
261 return ''.join(newname)
261 except KeyError, inst:
262 except KeyError, inst:
262 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
263 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
263 inst.args[0])
264 inst.args[0])
264
265
265 def make_file(repo, r, pat, node=None,
266 def make_file(repo, r, pat, node=None,
266 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
267 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
267 if not pat or pat == '-':
268 if not pat or pat == '-':
268 return 'w' in mode and sys.stdout or sys.stdin
269 return 'w' in mode and sys.stdout or sys.stdin
269 if hasattr(pat, 'write') and 'w' in mode:
270 if hasattr(pat, 'write') and 'w' in mode:
270 return pat
271 return pat
271 if hasattr(pat, 'read') and 'r' in mode:
272 if hasattr(pat, 'read') and 'r' in mode:
272 return pat
273 return pat
273 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
274 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
274 pathname),
275 pathname),
275 mode)
276 mode)
276
277
277 def write_bundle(cg, filename=None, compress=True):
278 def write_bundle(cg, filename=None, compress=True):
278 """Write a bundle file and return its filename.
279 """Write a bundle file and return its filename.
279
280
280 Existing files will not be overwritten.
281 Existing files will not be overwritten.
281 If no filename is specified, a temporary file is created.
282 If no filename is specified, a temporary file is created.
282 bz2 compression can be turned off.
283 bz2 compression can be turned off.
283 The bundle file will be deleted in case of errors.
284 The bundle file will be deleted in case of errors.
284 """
285 """
285 class nocompress(object):
286 class nocompress(object):
286 def compress(self, x):
287 def compress(self, x):
287 return x
288 return x
288 def flush(self):
289 def flush(self):
289 return ""
290 return ""
290
291
291 fh = None
292 fh = None
292 cleanup = None
293 cleanup = None
293 try:
294 try:
294 if filename:
295 if filename:
295 if os.path.exists(filename):
296 if os.path.exists(filename):
296 raise util.Abort(_("file '%s' already exists"), filename)
297 raise util.Abort(_("file '%s' already exists"), filename)
297 fh = open(filename, "wb")
298 fh = open(filename, "wb")
298 else:
299 else:
299 fd, filename = tempfile.mkstemp(suffix=".hg", prefix="hg-bundle-")
300 fd, filename = tempfile.mkstemp(suffix=".hg", prefix="hg-bundle-")
300 fh = os.fdopen(fd, "wb")
301 fh = os.fdopen(fd, "wb")
301 cleanup = filename
302 cleanup = filename
302
303
303 if compress:
304 if compress:
304 fh.write("HG10")
305 fh.write("HG10")
305 z = bz2.BZ2Compressor(9)
306 z = bz2.BZ2Compressor(9)
306 else:
307 else:
307 fh.write("HG10UN")
308 fh.write("HG10UN")
308 z = nocompress()
309 z = nocompress()
309 while 1:
310 # parse the changegroup data, otherwise we will block
310 chunk = cg.read(4096)
311 # in case of sshrepo because we don't know the end of the stream
311 if not chunk:
312
312 break
313 # an empty chunkiter is the end of the changegroup
313 fh.write(z.compress(chunk))
314 empty = False
315 while not empty:
316 empty = True
317 for chunk in changegroup.chunkiter(cg):
318 empty = False
319 fh.write(z.compress(changegroup.genchunk(chunk)))
320 fh.write(z.compress(changegroup.closechunk()))
314 fh.write(z.flush())
321 fh.write(z.flush())
315 cleanup = None
322 cleanup = None
316 return filename
323 return filename
317 finally:
324 finally:
318 if fh is not None:
325 if fh is not None:
319 fh.close()
326 fh.close()
320 if cleanup is not None:
327 if cleanup is not None:
321 os.unlink(cleanup)
328 os.unlink(cleanup)
322
329
323 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
330 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
324 changes=None, text=False, opts={}):
331 changes=None, text=False, opts={}):
325 if not node1:
332 if not node1:
326 node1 = repo.dirstate.parents()[0]
333 node1 = repo.dirstate.parents()[0]
327 # reading the data for node1 early allows it to play nicely
334 # reading the data for node1 early allows it to play nicely
328 # with repo.changes and the revlog cache.
335 # with repo.changes and the revlog cache.
329 change = repo.changelog.read(node1)
336 change = repo.changelog.read(node1)
330 mmap = repo.manifest.read(change[0])
337 mmap = repo.manifest.read(change[0])
331 date1 = util.datestr(change[2])
338 date1 = util.datestr(change[2])
332
339
333 if not changes:
340 if not changes:
334 changes = repo.changes(node1, node2, files, match=match)
341 changes = repo.changes(node1, node2, files, match=match)
335 modified, added, removed, deleted, unknown = changes
342 modified, added, removed, deleted, unknown = changes
336 if files:
343 if files:
337 modified, added, removed = map(lambda x: filterfiles(files, x),
344 modified, added, removed = map(lambda x: filterfiles(files, x),
338 (modified, added, removed))
345 (modified, added, removed))
339
346
340 if not modified and not added and not removed:
347 if not modified and not added and not removed:
341 return
348 return
342
349
343 if node2:
350 if node2:
344 change = repo.changelog.read(node2)
351 change = repo.changelog.read(node2)
345 mmap2 = repo.manifest.read(change[0])
352 mmap2 = repo.manifest.read(change[0])
346 date2 = util.datestr(change[2])
353 date2 = util.datestr(change[2])
347 def read(f):
354 def read(f):
348 return repo.file(f).read(mmap2[f])
355 return repo.file(f).read(mmap2[f])
349 else:
356 else:
350 date2 = util.datestr()
357 date2 = util.datestr()
351 def read(f):
358 def read(f):
352 return repo.wread(f)
359 return repo.wread(f)
353
360
354 if ui.quiet:
361 if ui.quiet:
355 r = None
362 r = None
356 else:
363 else:
357 hexfunc = ui.verbose and hex or short
364 hexfunc = ui.verbose and hex or short
358 r = [hexfunc(node) for node in [node1, node2] if node]
365 r = [hexfunc(node) for node in [node1, node2] if node]
359
366
360 diffopts = ui.diffopts()
367 diffopts = ui.diffopts()
361 showfunc = opts.get('show_function') or diffopts['showfunc']
368 showfunc = opts.get('show_function') or diffopts['showfunc']
362 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
369 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
363 for f in modified:
370 for f in modified:
364 to = None
371 to = None
365 if f in mmap:
372 if f in mmap:
366 to = repo.file(f).read(mmap[f])
373 to = repo.file(f).read(mmap[f])
367 tn = read(f)
374 tn = read(f)
368 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
375 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
369 showfunc=showfunc, ignorews=ignorews))
376 showfunc=showfunc, ignorews=ignorews))
370 for f in added:
377 for f in added:
371 to = None
378 to = None
372 tn = read(f)
379 tn = read(f)
373 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
380 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
374 showfunc=showfunc, ignorews=ignorews))
381 showfunc=showfunc, ignorews=ignorews))
375 for f in removed:
382 for f in removed:
376 to = repo.file(f).read(mmap[f])
383 to = repo.file(f).read(mmap[f])
377 tn = None
384 tn = None
378 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
385 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
379 showfunc=showfunc, ignorews=ignorews))
386 showfunc=showfunc, ignorews=ignorews))
380
387
381 def trimuser(ui, name, rev, revcache):
388 def trimuser(ui, name, rev, revcache):
382 """trim the name of the user who committed a change"""
389 """trim the name of the user who committed a change"""
383 user = revcache.get(rev)
390 user = revcache.get(rev)
384 if user is None:
391 if user is None:
385 user = revcache[rev] = ui.shortuser(name)
392 user = revcache[rev] = ui.shortuser(name)
386 return user
393 return user
387
394
388 class changeset_templater(object):
395 class changeset_templater(object):
389 '''use templater module to format changeset information.'''
396 '''use templater module to format changeset information.'''
390
397
391 def __init__(self, ui, repo, mapfile):
398 def __init__(self, ui, repo, mapfile):
392 self.t = templater.templater(mapfile, templater.common_filters,
399 self.t = templater.templater(mapfile, templater.common_filters,
393 cache={'parent': '{rev}:{node|short} ',
400 cache={'parent': '{rev}:{node|short} ',
394 'manifest': '{rev}:{node|short}'})
401 'manifest': '{rev}:{node|short}'})
395 self.ui = ui
402 self.ui = ui
396 self.repo = repo
403 self.repo = repo
397
404
398 def use_template(self, t):
405 def use_template(self, t):
399 '''set template string to use'''
406 '''set template string to use'''
400 self.t.cache['changeset'] = t
407 self.t.cache['changeset'] = t
401
408
402 def write(self, thing):
409 def write(self, thing):
403 '''write expanded template.
410 '''write expanded template.
404 uses in-order recursive traverse of iterators.'''
411 uses in-order recursive traverse of iterators.'''
405 for t in thing:
412 for t in thing:
406 if hasattr(t, '__iter__'):
413 if hasattr(t, '__iter__'):
407 self.write(t)
414 self.write(t)
408 else:
415 else:
409 self.ui.write(t)
416 self.ui.write(t)
410
417
411 def show(self, rev=0, changenode=None, brinfo=None):
418 def show(self, rev=0, changenode=None, brinfo=None):
412 '''show a single changeset or file revision'''
419 '''show a single changeset or file revision'''
413 log = self.repo.changelog
420 log = self.repo.changelog
414 if changenode is None:
421 if changenode is None:
415 changenode = log.node(rev)
422 changenode = log.node(rev)
416 elif not rev:
423 elif not rev:
417 rev = log.rev(changenode)
424 rev = log.rev(changenode)
418
425
419 changes = log.read(changenode)
426 changes = log.read(changenode)
420
427
421 def showlist(name, values, plural=None, **args):
428 def showlist(name, values, plural=None, **args):
422 '''expand set of values.
429 '''expand set of values.
423 name is name of key in template map.
430 name is name of key in template map.
424 values is list of strings or dicts.
431 values is list of strings or dicts.
425 plural is plural of name, if not simply name + 's'.
432 plural is plural of name, if not simply name + 's'.
426
433
427 expansion works like this, given name 'foo'.
434 expansion works like this, given name 'foo'.
428
435
429 if values is empty, expand 'no_foos'.
436 if values is empty, expand 'no_foos'.
430
437
431 if 'foo' not in template map, return values as a string,
438 if 'foo' not in template map, return values as a string,
432 joined by space.
439 joined by space.
433
440
434 expand 'start_foos'.
441 expand 'start_foos'.
435
442
436 for each value, expand 'foo'. if 'last_foo' in template
443 for each value, expand 'foo'. if 'last_foo' in template
437 map, expand it instead of 'foo' for last key.
444 map, expand it instead of 'foo' for last key.
438
445
439 expand 'end_foos'.
446 expand 'end_foos'.
440 '''
447 '''
441 if plural: names = plural
448 if plural: names = plural
442 else: names = name + 's'
449 else: names = name + 's'
443 if not values:
450 if not values:
444 noname = 'no_' + names
451 noname = 'no_' + names
445 if noname in self.t:
452 if noname in self.t:
446 yield self.t(noname, **args)
453 yield self.t(noname, **args)
447 return
454 return
448 if name not in self.t:
455 if name not in self.t:
449 if isinstance(values[0], str):
456 if isinstance(values[0], str):
450 yield ' '.join(values)
457 yield ' '.join(values)
451 else:
458 else:
452 for v in values:
459 for v in values:
453 yield dict(v, **args)
460 yield dict(v, **args)
454 return
461 return
455 startname = 'start_' + names
462 startname = 'start_' + names
456 if startname in self.t:
463 if startname in self.t:
457 yield self.t(startname, **args)
464 yield self.t(startname, **args)
458 vargs = args.copy()
465 vargs = args.copy()
459 def one(v, tag=name):
466 def one(v, tag=name):
460 try:
467 try:
461 vargs.update(v)
468 vargs.update(v)
462 except (AttributeError, ValueError):
469 except (AttributeError, ValueError):
463 try:
470 try:
464 for a, b in v:
471 for a, b in v:
465 vargs[a] = b
472 vargs[a] = b
466 except ValueError:
473 except ValueError:
467 vargs[name] = v
474 vargs[name] = v
468 return self.t(tag, **vargs)
475 return self.t(tag, **vargs)
469 lastname = 'last_' + name
476 lastname = 'last_' + name
470 if lastname in self.t:
477 if lastname in self.t:
471 last = values.pop()
478 last = values.pop()
472 else:
479 else:
473 last = None
480 last = None
474 for v in values:
481 for v in values:
475 yield one(v)
482 yield one(v)
476 if last is not None:
483 if last is not None:
477 yield one(last, tag=lastname)
484 yield one(last, tag=lastname)
478 endname = 'end_' + names
485 endname = 'end_' + names
479 if endname in self.t:
486 if endname in self.t:
480 yield self.t(endname, **args)
487 yield self.t(endname, **args)
481
488
482 if brinfo:
489 if brinfo:
483 def showbranches(**args):
490 def showbranches(**args):
484 if changenode in brinfo:
491 if changenode in brinfo:
485 for x in showlist('branch', brinfo[changenode],
492 for x in showlist('branch', brinfo[changenode],
486 plural='branches', **args):
493 plural='branches', **args):
487 yield x
494 yield x
488 else:
495 else:
489 showbranches = ''
496 showbranches = ''
490
497
491 if self.ui.debugflag:
498 if self.ui.debugflag:
492 def showmanifest(**args):
499 def showmanifest(**args):
493 args = args.copy()
500 args = args.copy()
494 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
501 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
495 node=hex(changes[0])))
502 node=hex(changes[0])))
496 yield self.t('manifest', **args)
503 yield self.t('manifest', **args)
497 else:
504 else:
498 showmanifest = ''
505 showmanifest = ''
499
506
500 def showparents(**args):
507 def showparents(**args):
501 parents = [[('rev', log.rev(p)), ('node', hex(p))]
508 parents = [[('rev', log.rev(p)), ('node', hex(p))]
502 for p in log.parents(changenode)
509 for p in log.parents(changenode)
503 if self.ui.debugflag or p != nullid]
510 if self.ui.debugflag or p != nullid]
504 if (not self.ui.debugflag and len(parents) == 1 and
511 if (not self.ui.debugflag and len(parents) == 1 and
505 parents[0][0][1] == rev - 1):
512 parents[0][0][1] == rev - 1):
506 return
513 return
507 for x in showlist('parent', parents, **args):
514 for x in showlist('parent', parents, **args):
508 yield x
515 yield x
509
516
510 def showtags(**args):
517 def showtags(**args):
511 for x in showlist('tag', self.repo.nodetags(changenode), **args):
518 for x in showlist('tag', self.repo.nodetags(changenode), **args):
512 yield x
519 yield x
513
520
514 if self.ui.debugflag:
521 if self.ui.debugflag:
515 files = self.repo.changes(log.parents(changenode)[0], changenode)
522 files = self.repo.changes(log.parents(changenode)[0], changenode)
516 def showfiles(**args):
523 def showfiles(**args):
517 for x in showlist('file', files[0], **args): yield x
524 for x in showlist('file', files[0], **args): yield x
518 def showadds(**args):
525 def showadds(**args):
519 for x in showlist('file_add', files[1], **args): yield x
526 for x in showlist('file_add', files[1], **args): yield x
520 def showdels(**args):
527 def showdels(**args):
521 for x in showlist('file_del', files[2], **args): yield x
528 for x in showlist('file_del', files[2], **args): yield x
522 else:
529 else:
523 def showfiles(**args):
530 def showfiles(**args):
524 for x in showlist('file', changes[3], **args): yield x
531 for x in showlist('file', changes[3], **args): yield x
525 showadds = ''
532 showadds = ''
526 showdels = ''
533 showdels = ''
527
534
528 props = {
535 props = {
529 'author': changes[1],
536 'author': changes[1],
530 'branches': showbranches,
537 'branches': showbranches,
531 'date': changes[2],
538 'date': changes[2],
532 'desc': changes[4],
539 'desc': changes[4],
533 'file_adds': showadds,
540 'file_adds': showadds,
534 'file_dels': showdels,
541 'file_dels': showdels,
535 'files': showfiles,
542 'files': showfiles,
536 'manifest': showmanifest,
543 'manifest': showmanifest,
537 'node': hex(changenode),
544 'node': hex(changenode),
538 'parents': showparents,
545 'parents': showparents,
539 'rev': rev,
546 'rev': rev,
540 'tags': showtags,
547 'tags': showtags,
541 }
548 }
542
549
543 try:
550 try:
544 if self.ui.debugflag and 'changeset_debug' in self.t:
551 if self.ui.debugflag and 'changeset_debug' in self.t:
545 key = 'changeset_debug'
552 key = 'changeset_debug'
546 elif self.ui.quiet and 'changeset_quiet' in self.t:
553 elif self.ui.quiet and 'changeset_quiet' in self.t:
547 key = 'changeset_quiet'
554 key = 'changeset_quiet'
548 elif self.ui.verbose and 'changeset_verbose' in self.t:
555 elif self.ui.verbose and 'changeset_verbose' in self.t:
549 key = 'changeset_verbose'
556 key = 'changeset_verbose'
550 else:
557 else:
551 key = 'changeset'
558 key = 'changeset'
552 self.write(self.t(key, **props))
559 self.write(self.t(key, **props))
553 except KeyError, inst:
560 except KeyError, inst:
554 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
561 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
555 inst.args[0]))
562 inst.args[0]))
556 except SyntaxError, inst:
563 except SyntaxError, inst:
557 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
564 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
558
565
559 class changeset_printer(object):
566 class changeset_printer(object):
560 '''show changeset information when templating not requested.'''
567 '''show changeset information when templating not requested.'''
561
568
562 def __init__(self, ui, repo):
569 def __init__(self, ui, repo):
563 self.ui = ui
570 self.ui = ui
564 self.repo = repo
571 self.repo = repo
565
572
566 def show(self, rev=0, changenode=None, brinfo=None):
573 def show(self, rev=0, changenode=None, brinfo=None):
567 '''show a single changeset or file revision'''
574 '''show a single changeset or file revision'''
568 log = self.repo.changelog
575 log = self.repo.changelog
569 if changenode is None:
576 if changenode is None:
570 changenode = log.node(rev)
577 changenode = log.node(rev)
571 elif not rev:
578 elif not rev:
572 rev = log.rev(changenode)
579 rev = log.rev(changenode)
573
580
574 if self.ui.quiet:
581 if self.ui.quiet:
575 self.ui.write("%d:%s\n" % (rev, short(changenode)))
582 self.ui.write("%d:%s\n" % (rev, short(changenode)))
576 return
583 return
577
584
578 changes = log.read(changenode)
585 changes = log.read(changenode)
579 date = util.datestr(changes[2])
586 date = util.datestr(changes[2])
580
587
581 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
588 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
582 for p in log.parents(changenode)
589 for p in log.parents(changenode)
583 if self.ui.debugflag or p != nullid]
590 if self.ui.debugflag or p != nullid]
584 if (not self.ui.debugflag and len(parents) == 1 and
591 if (not self.ui.debugflag and len(parents) == 1 and
585 parents[0][0] == rev-1):
592 parents[0][0] == rev-1):
586 parents = []
593 parents = []
587
594
588 if self.ui.verbose:
595 if self.ui.verbose:
589 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
596 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
590 else:
597 else:
591 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
598 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
592
599
593 for tag in self.repo.nodetags(changenode):
600 for tag in self.repo.nodetags(changenode):
594 self.ui.status(_("tag: %s\n") % tag)
601 self.ui.status(_("tag: %s\n") % tag)
595 for parent in parents:
602 for parent in parents:
596 self.ui.write(_("parent: %d:%s\n") % parent)
603 self.ui.write(_("parent: %d:%s\n") % parent)
597
604
598 if brinfo and changenode in brinfo:
605 if brinfo and changenode in brinfo:
599 br = brinfo[changenode]
606 br = brinfo[changenode]
600 self.ui.write(_("branch: %s\n") % " ".join(br))
607 self.ui.write(_("branch: %s\n") % " ".join(br))
601
608
602 self.ui.debug(_("manifest: %d:%s\n") %
609 self.ui.debug(_("manifest: %d:%s\n") %
603 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
610 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
604 self.ui.status(_("user: %s\n") % changes[1])
611 self.ui.status(_("user: %s\n") % changes[1])
605 self.ui.status(_("date: %s\n") % date)
612 self.ui.status(_("date: %s\n") % date)
606
613
607 if self.ui.debugflag:
614 if self.ui.debugflag:
608 files = self.repo.changes(log.parents(changenode)[0], changenode)
615 files = self.repo.changes(log.parents(changenode)[0], changenode)
609 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
616 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
610 files):
617 files):
611 if value:
618 if value:
612 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
619 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
613 else:
620 else:
614 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
621 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
615
622
616 description = changes[4].strip()
623 description = changes[4].strip()
617 if description:
624 if description:
618 if self.ui.verbose:
625 if self.ui.verbose:
619 self.ui.status(_("description:\n"))
626 self.ui.status(_("description:\n"))
620 self.ui.status(description)
627 self.ui.status(description)
621 self.ui.status("\n\n")
628 self.ui.status("\n\n")
622 else:
629 else:
623 self.ui.status(_("summary: %s\n") %
630 self.ui.status(_("summary: %s\n") %
624 description.splitlines()[0])
631 description.splitlines()[0])
625 self.ui.status("\n")
632 self.ui.status("\n")
626
633
627 def show_changeset(ui, repo, opts):
634 def show_changeset(ui, repo, opts):
628 '''show one changeset. uses template or regular display. caller
635 '''show one changeset. uses template or regular display. caller
629 can pass in 'style' and 'template' options in opts.'''
636 can pass in 'style' and 'template' options in opts.'''
630
637
631 tmpl = opts.get('template')
638 tmpl = opts.get('template')
632 if tmpl:
639 if tmpl:
633 tmpl = templater.parsestring(tmpl, quoted=False)
640 tmpl = templater.parsestring(tmpl, quoted=False)
634 else:
641 else:
635 tmpl = ui.config('ui', 'logtemplate')
642 tmpl = ui.config('ui', 'logtemplate')
636 if tmpl: tmpl = templater.parsestring(tmpl)
643 if tmpl: tmpl = templater.parsestring(tmpl)
637 mapfile = opts.get('style') or ui.config('ui', 'style')
644 mapfile = opts.get('style') or ui.config('ui', 'style')
638 if tmpl or mapfile:
645 if tmpl or mapfile:
639 if mapfile:
646 if mapfile:
640 if not os.path.isfile(mapfile):
647 if not os.path.isfile(mapfile):
641 mapname = templater.templatepath('map-cmdline.' + mapfile)
648 mapname = templater.templatepath('map-cmdline.' + mapfile)
642 if not mapname: mapname = templater.templatepath(mapfile)
649 if not mapname: mapname = templater.templatepath(mapfile)
643 if mapname: mapfile = mapname
650 if mapname: mapfile = mapname
644 try:
651 try:
645 t = changeset_templater(ui, repo, mapfile)
652 t = changeset_templater(ui, repo, mapfile)
646 except SyntaxError, inst:
653 except SyntaxError, inst:
647 raise util.Abort(inst.args[0])
654 raise util.Abort(inst.args[0])
648 if tmpl: t.use_template(tmpl)
655 if tmpl: t.use_template(tmpl)
649 return t
656 return t
650 return changeset_printer(ui, repo)
657 return changeset_printer(ui, repo)
651
658
652 def show_version(ui):
659 def show_version(ui):
653 """output version and copyright information"""
660 """output version and copyright information"""
654 ui.write(_("Mercurial Distributed SCM (version %s)\n")
661 ui.write(_("Mercurial Distributed SCM (version %s)\n")
655 % version.get_version())
662 % version.get_version())
656 ui.status(_(
663 ui.status(_(
657 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
664 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
658 "This is free software; see the source for copying conditions. "
665 "This is free software; see the source for copying conditions. "
659 "There is NO\nwarranty; "
666 "There is NO\nwarranty; "
660 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
667 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
661 ))
668 ))
662
669
663 def help_(ui, cmd=None, with_version=False):
670 def help_(ui, cmd=None, with_version=False):
664 """show help for a given command or all commands"""
671 """show help for a given command or all commands"""
665 option_lists = []
672 option_lists = []
666 if cmd and cmd != 'shortlist':
673 if cmd and cmd != 'shortlist':
667 if with_version:
674 if with_version:
668 show_version(ui)
675 show_version(ui)
669 ui.write('\n')
676 ui.write('\n')
670 aliases, i = find(cmd)
677 aliases, i = find(cmd)
671 # synopsis
678 # synopsis
672 ui.write("%s\n\n" % i[2])
679 ui.write("%s\n\n" % i[2])
673
680
674 # description
681 # description
675 doc = i[0].__doc__
682 doc = i[0].__doc__
676 if not doc:
683 if not doc:
677 doc = _("(No help text available)")
684 doc = _("(No help text available)")
678 if ui.quiet:
685 if ui.quiet:
679 doc = doc.splitlines(0)[0]
686 doc = doc.splitlines(0)[0]
680 ui.write("%s\n" % doc.rstrip())
687 ui.write("%s\n" % doc.rstrip())
681
688
682 if not ui.quiet:
689 if not ui.quiet:
683 # aliases
690 # aliases
684 if len(aliases) > 1:
691 if len(aliases) > 1:
685 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
692 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
686
693
687 # options
694 # options
688 if i[1]:
695 if i[1]:
689 option_lists.append(("options", i[1]))
696 option_lists.append(("options", i[1]))
690
697
691 else:
698 else:
692 # program name
699 # program name
693 if ui.verbose or with_version:
700 if ui.verbose or with_version:
694 show_version(ui)
701 show_version(ui)
695 else:
702 else:
696 ui.status(_("Mercurial Distributed SCM\n"))
703 ui.status(_("Mercurial Distributed SCM\n"))
697 ui.status('\n')
704 ui.status('\n')
698
705
699 # list of commands
706 # list of commands
700 if cmd == "shortlist":
707 if cmd == "shortlist":
701 ui.status(_('basic commands (use "hg help" '
708 ui.status(_('basic commands (use "hg help" '
702 'for the full list or option "-v" for details):\n\n'))
709 'for the full list or option "-v" for details):\n\n'))
703 elif ui.verbose:
710 elif ui.verbose:
704 ui.status(_('list of commands:\n\n'))
711 ui.status(_('list of commands:\n\n'))
705 else:
712 else:
706 ui.status(_('list of commands (use "hg help -v" '
713 ui.status(_('list of commands (use "hg help -v" '
707 'to show aliases and global options):\n\n'))
714 'to show aliases and global options):\n\n'))
708
715
709 h = {}
716 h = {}
710 cmds = {}
717 cmds = {}
711 for c, e in table.items():
718 for c, e in table.items():
712 f = c.split("|")[0]
719 f = c.split("|")[0]
713 if cmd == "shortlist" and not f.startswith("^"):
720 if cmd == "shortlist" and not f.startswith("^"):
714 continue
721 continue
715 f = f.lstrip("^")
722 f = f.lstrip("^")
716 if not ui.debugflag and f.startswith("debug"):
723 if not ui.debugflag and f.startswith("debug"):
717 continue
724 continue
718 doc = e[0].__doc__
725 doc = e[0].__doc__
719 if not doc:
726 if not doc:
720 doc = _("(No help text available)")
727 doc = _("(No help text available)")
721 h[f] = doc.splitlines(0)[0].rstrip()
728 h[f] = doc.splitlines(0)[0].rstrip()
722 cmds[f] = c.lstrip("^")
729 cmds[f] = c.lstrip("^")
723
730
724 fns = h.keys()
731 fns = h.keys()
725 fns.sort()
732 fns.sort()
726 m = max(map(len, fns))
733 m = max(map(len, fns))
727 for f in fns:
734 for f in fns:
728 if ui.verbose:
735 if ui.verbose:
729 commands = cmds[f].replace("|",", ")
736 commands = cmds[f].replace("|",", ")
730 ui.write(" %s:\n %s\n"%(commands, h[f]))
737 ui.write(" %s:\n %s\n"%(commands, h[f]))
731 else:
738 else:
732 ui.write(' %-*s %s\n' % (m, f, h[f]))
739 ui.write(' %-*s %s\n' % (m, f, h[f]))
733
740
734 # global options
741 # global options
735 if ui.verbose:
742 if ui.verbose:
736 option_lists.append(("global options", globalopts))
743 option_lists.append(("global options", globalopts))
737
744
738 # list all option lists
745 # list all option lists
739 opt_output = []
746 opt_output = []
740 for title, options in option_lists:
747 for title, options in option_lists:
741 opt_output.append(("\n%s:\n" % title, None))
748 opt_output.append(("\n%s:\n" % title, None))
742 for shortopt, longopt, default, desc in options:
749 for shortopt, longopt, default, desc in options:
743 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
750 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
744 longopt and " --%s" % longopt),
751 longopt and " --%s" % longopt),
745 "%s%s" % (desc,
752 "%s%s" % (desc,
746 default
753 default
747 and _(" (default: %s)") % default
754 and _(" (default: %s)") % default
748 or "")))
755 or "")))
749
756
750 if opt_output:
757 if opt_output:
751 opts_len = max([len(line[0]) for line in opt_output if line[1]])
758 opts_len = max([len(line[0]) for line in opt_output if line[1]])
752 for first, second in opt_output:
759 for first, second in opt_output:
753 if second:
760 if second:
754 ui.write(" %-*s %s\n" % (opts_len, first, second))
761 ui.write(" %-*s %s\n" % (opts_len, first, second))
755 else:
762 else:
756 ui.write("%s\n" % first)
763 ui.write("%s\n" % first)
757
764
758 # Commands start here, listed alphabetically
765 # Commands start here, listed alphabetically
759
766
760 def add(ui, repo, *pats, **opts):
767 def add(ui, repo, *pats, **opts):
761 """add the specified files on the next commit
768 """add the specified files on the next commit
762
769
763 Schedule files to be version controlled and added to the repository.
770 Schedule files to be version controlled and added to the repository.
764
771
765 The files will be added to the repository at the next commit.
772 The files will be added to the repository at the next commit.
766
773
767 If no names are given, add all files in the repository.
774 If no names are given, add all files in the repository.
768 """
775 """
769
776
770 names = []
777 names = []
771 for src, abs, rel, exact in walk(repo, pats, opts):
778 for src, abs, rel, exact in walk(repo, pats, opts):
772 if exact:
779 if exact:
773 if ui.verbose:
780 if ui.verbose:
774 ui.status(_('adding %s\n') % rel)
781 ui.status(_('adding %s\n') % rel)
775 names.append(abs)
782 names.append(abs)
776 elif repo.dirstate.state(abs) == '?':
783 elif repo.dirstate.state(abs) == '?':
777 ui.status(_('adding %s\n') % rel)
784 ui.status(_('adding %s\n') % rel)
778 names.append(abs)
785 names.append(abs)
779 repo.add(names)
786 repo.add(names)
780
787
781 def addremove(ui, repo, *pats, **opts):
788 def addremove(ui, repo, *pats, **opts):
782 """add all new files, delete all missing files
789 """add all new files, delete all missing files
783
790
784 Add all new files and remove all missing files from the repository.
791 Add all new files and remove all missing files from the repository.
785
792
786 New files are ignored if they match any of the patterns in .hgignore. As
793 New files are ignored if they match any of the patterns in .hgignore. As
787 with add, these changes take effect at the next commit.
794 with add, these changes take effect at the next commit.
788 """
795 """
789 return addremove_lock(ui, repo, pats, opts)
796 return addremove_lock(ui, repo, pats, opts)
790
797
791 def addremove_lock(ui, repo, pats, opts, wlock=None):
798 def addremove_lock(ui, repo, pats, opts, wlock=None):
792 add, remove = [], []
799 add, remove = [], []
793 for src, abs, rel, exact in walk(repo, pats, opts):
800 for src, abs, rel, exact in walk(repo, pats, opts):
794 if src == 'f' and repo.dirstate.state(abs) == '?':
801 if src == 'f' and repo.dirstate.state(abs) == '?':
795 add.append(abs)
802 add.append(abs)
796 if ui.verbose or not exact:
803 if ui.verbose or not exact:
797 ui.status(_('adding %s\n') % ((pats and rel) or abs))
804 ui.status(_('adding %s\n') % ((pats and rel) or abs))
798 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
805 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
799 remove.append(abs)
806 remove.append(abs)
800 if ui.verbose or not exact:
807 if ui.verbose or not exact:
801 ui.status(_('removing %s\n') % ((pats and rel) or abs))
808 ui.status(_('removing %s\n') % ((pats and rel) or abs))
802 repo.add(add, wlock=wlock)
809 repo.add(add, wlock=wlock)
803 repo.remove(remove, wlock=wlock)
810 repo.remove(remove, wlock=wlock)
804
811
805 def annotate(ui, repo, *pats, **opts):
812 def annotate(ui, repo, *pats, **opts):
806 """show changeset information per file line
813 """show changeset information per file line
807
814
808 List changes in files, showing the revision id responsible for each line
815 List changes in files, showing the revision id responsible for each line
809
816
810 This command is useful to discover who did a change or when a change took
817 This command is useful to discover who did a change or when a change took
811 place.
818 place.
812
819
813 Without the -a option, annotate will avoid processing files it
820 Without the -a option, annotate will avoid processing files it
814 detects as binary. With -a, annotate will generate an annotation
821 detects as binary. With -a, annotate will generate an annotation
815 anyway, probably with undesirable results.
822 anyway, probably with undesirable results.
816 """
823 """
817 def getnode(rev):
824 def getnode(rev):
818 return short(repo.changelog.node(rev))
825 return short(repo.changelog.node(rev))
819
826
820 ucache = {}
827 ucache = {}
821 def getname(rev):
828 def getname(rev):
822 cl = repo.changelog.read(repo.changelog.node(rev))
829 cl = repo.changelog.read(repo.changelog.node(rev))
823 return trimuser(ui, cl[1], rev, ucache)
830 return trimuser(ui, cl[1], rev, ucache)
824
831
825 dcache = {}
832 dcache = {}
826 def getdate(rev):
833 def getdate(rev):
827 datestr = dcache.get(rev)
834 datestr = dcache.get(rev)
828 if datestr is None:
835 if datestr is None:
829 cl = repo.changelog.read(repo.changelog.node(rev))
836 cl = repo.changelog.read(repo.changelog.node(rev))
830 datestr = dcache[rev] = util.datestr(cl[2])
837 datestr = dcache[rev] = util.datestr(cl[2])
831 return datestr
838 return datestr
832
839
833 if not pats:
840 if not pats:
834 raise util.Abort(_('at least one file name or pattern required'))
841 raise util.Abort(_('at least one file name or pattern required'))
835
842
836 opmap = [['user', getname], ['number', str], ['changeset', getnode],
843 opmap = [['user', getname], ['number', str], ['changeset', getnode],
837 ['date', getdate]]
844 ['date', getdate]]
838 if not opts['user'] and not opts['changeset'] and not opts['date']:
845 if not opts['user'] and not opts['changeset'] and not opts['date']:
839 opts['number'] = 1
846 opts['number'] = 1
840
847
841 if opts['rev']:
848 if opts['rev']:
842 node = repo.changelog.lookup(opts['rev'])
849 node = repo.changelog.lookup(opts['rev'])
843 else:
850 else:
844 node = repo.dirstate.parents()[0]
851 node = repo.dirstate.parents()[0]
845 change = repo.changelog.read(node)
852 change = repo.changelog.read(node)
846 mmap = repo.manifest.read(change[0])
853 mmap = repo.manifest.read(change[0])
847
854
848 for src, abs, rel, exact in walk(repo, pats, opts, node=node):
855 for src, abs, rel, exact in walk(repo, pats, opts, node=node):
849 f = repo.file(abs)
856 f = repo.file(abs)
850 if not opts['text'] and util.binary(f.read(mmap[abs])):
857 if not opts['text'] and util.binary(f.read(mmap[abs])):
851 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
858 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
852 continue
859 continue
853
860
854 lines = f.annotate(mmap[abs])
861 lines = f.annotate(mmap[abs])
855 pieces = []
862 pieces = []
856
863
857 for o, f in opmap:
864 for o, f in opmap:
858 if opts[o]:
865 if opts[o]:
859 l = [f(n) for n, dummy in lines]
866 l = [f(n) for n, dummy in lines]
860 if l:
867 if l:
861 m = max(map(len, l))
868 m = max(map(len, l))
862 pieces.append(["%*s" % (m, x) for x in l])
869 pieces.append(["%*s" % (m, x) for x in l])
863
870
864 if pieces:
871 if pieces:
865 for p, l in zip(zip(*pieces), lines):
872 for p, l in zip(zip(*pieces), lines):
866 ui.write("%s: %s" % (" ".join(p), l[1]))
873 ui.write("%s: %s" % (" ".join(p), l[1]))
867
874
868 def bundle(ui, repo, fname, dest="default-push", **opts):
875 def bundle(ui, repo, fname, dest="default-push", **opts):
869 """create a changegroup file
876 """create a changegroup file
870
877
871 Generate a compressed changegroup file collecting all changesets
878 Generate a compressed changegroup file collecting all changesets
872 not found in the other repository.
879 not found in the other repository.
873
880
874 This file can then be transferred using conventional means and
881 This file can then be transferred using conventional means and
875 applied to another repository with the unbundle command. This is
882 applied to another repository with the unbundle command. This is
876 useful when native push and pull are not available or when
883 useful when native push and pull are not available or when
877 exporting an entire repository is undesirable. The standard file
884 exporting an entire repository is undesirable. The standard file
878 extension is ".hg".
885 extension is ".hg".
879
886
880 Unlike import/export, this exactly preserves all changeset
887 Unlike import/export, this exactly preserves all changeset
881 contents including permissions, rename data, and revision history.
888 contents including permissions, rename data, and revision history.
882 """
889 """
883 dest = ui.expandpath(dest)
890 dest = ui.expandpath(dest)
884 other = hg.repository(ui, dest)
891 other = hg.repository(ui, dest)
885 o = repo.findoutgoing(other, force=opts['force'])
892 o = repo.findoutgoing(other, force=opts['force'])
886 cg = repo.changegroup(o, 'bundle')
893 cg = repo.changegroup(o, 'bundle')
887 write_bundle(cg, fname)
894 write_bundle(cg, fname)
888
895
889 def cat(ui, repo, file1, *pats, **opts):
896 def cat(ui, repo, file1, *pats, **opts):
890 """output the latest or given revisions of files
897 """output the latest or given revisions of files
891
898
892 Print the specified files as they were at the given revision.
899 Print the specified files as they were at the given revision.
893 If no revision is given then the tip is used.
900 If no revision is given then the tip is used.
894
901
895 Output may be to a file, in which case the name of the file is
902 Output may be to a file, in which case the name of the file is
896 given using a format string. The formatting rules are the same as
903 given using a format string. The formatting rules are the same as
897 for the export command, with the following additions:
904 for the export command, with the following additions:
898
905
899 %s basename of file being printed
906 %s basename of file being printed
900 %d dirname of file being printed, or '.' if in repo root
907 %d dirname of file being printed, or '.' if in repo root
901 %p root-relative path name of file being printed
908 %p root-relative path name of file being printed
902 """
909 """
903 mf = {}
910 mf = {}
904 rev = opts['rev']
911 rev = opts['rev']
905 if rev:
912 if rev:
906 node = repo.lookup(rev)
913 node = repo.lookup(rev)
907 else:
914 else:
908 node = repo.changelog.tip()
915 node = repo.changelog.tip()
909 change = repo.changelog.read(node)
916 change = repo.changelog.read(node)
910 mf = repo.manifest.read(change[0])
917 mf = repo.manifest.read(change[0])
911 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
918 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
912 r = repo.file(abs)
919 r = repo.file(abs)
913 n = mf[abs]
920 n = mf[abs]
914 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
921 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
915 fp.write(r.read(n))
922 fp.write(r.read(n))
916
923
917 def clone(ui, source, dest=None, **opts):
924 def clone(ui, source, dest=None, **opts):
918 """make a copy of an existing repository
925 """make a copy of an existing repository
919
926
920 Create a copy of an existing repository in a new directory.
927 Create a copy of an existing repository in a new directory.
921
928
922 If no destination directory name is specified, it defaults to the
929 If no destination directory name is specified, it defaults to the
923 basename of the source.
930 basename of the source.
924
931
925 The location of the source is added to the new repository's
932 The location of the source is added to the new repository's
926 .hg/hgrc file, as the default to be used for future pulls.
933 .hg/hgrc file, as the default to be used for future pulls.
927
934
928 For efficiency, hardlinks are used for cloning whenever the source
935 For efficiency, hardlinks are used for cloning whenever the source
929 and destination are on the same filesystem. Some filesystems,
936 and destination are on the same filesystem. Some filesystems,
930 such as AFS, implement hardlinking incorrectly, but do not report
937 such as AFS, implement hardlinking incorrectly, but do not report
931 errors. In these cases, use the --pull option to avoid
938 errors. In these cases, use the --pull option to avoid
932 hardlinking.
939 hardlinking.
933
940
934 See pull for valid source format details.
941 See pull for valid source format details.
935 """
942 """
936 if dest is None:
943 if dest is None:
937 dest = os.path.basename(os.path.normpath(source))
944 dest = os.path.basename(os.path.normpath(source))
938
945
939 if os.path.exists(dest):
946 if os.path.exists(dest):
940 raise util.Abort(_("destination '%s' already exists"), dest)
947 raise util.Abort(_("destination '%s' already exists"), dest)
941
948
942 dest = os.path.realpath(dest)
949 dest = os.path.realpath(dest)
943
950
944 class Dircleanup(object):
951 class Dircleanup(object):
945 def __init__(self, dir_):
952 def __init__(self, dir_):
946 self.rmtree = shutil.rmtree
953 self.rmtree = shutil.rmtree
947 self.dir_ = dir_
954 self.dir_ = dir_
948 os.mkdir(dir_)
955 os.mkdir(dir_)
949 def close(self):
956 def close(self):
950 self.dir_ = None
957 self.dir_ = None
951 def __del__(self):
958 def __del__(self):
952 if self.dir_:
959 if self.dir_:
953 self.rmtree(self.dir_, True)
960 self.rmtree(self.dir_, True)
954
961
955 if opts['ssh']:
962 if opts['ssh']:
956 ui.setconfig("ui", "ssh", opts['ssh'])
963 ui.setconfig("ui", "ssh", opts['ssh'])
957 if opts['remotecmd']:
964 if opts['remotecmd']:
958 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
965 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
959
966
960 source = ui.expandpath(source)
967 source = ui.expandpath(source)
961
968
962 d = Dircleanup(dest)
969 d = Dircleanup(dest)
963 abspath = source
970 abspath = source
964 other = hg.repository(ui, source)
971 other = hg.repository(ui, source)
965
972
966 copy = False
973 copy = False
967 if other.dev() != -1:
974 if other.dev() != -1:
968 abspath = os.path.abspath(source)
975 abspath = os.path.abspath(source)
969 if not opts['pull'] and not opts['rev']:
976 if not opts['pull'] and not opts['rev']:
970 copy = True
977 copy = True
971
978
972 if copy:
979 if copy:
973 try:
980 try:
974 # we use a lock here because if we race with commit, we
981 # we use a lock here because if we race with commit, we
975 # can end up with extra data in the cloned revlogs that's
982 # can end up with extra data in the cloned revlogs that's
976 # not pointed to by changesets, thus causing verify to
983 # not pointed to by changesets, thus causing verify to
977 # fail
984 # fail
978 l1 = other.lock()
985 l1 = other.lock()
979 except lock.LockException:
986 except lock.LockException:
980 copy = False
987 copy = False
981
988
982 if copy:
989 if copy:
983 # we lock here to avoid premature writing to the target
990 # we lock here to avoid premature writing to the target
984 os.mkdir(os.path.join(dest, ".hg"))
991 os.mkdir(os.path.join(dest, ".hg"))
985 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
992 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
986
993
987 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
994 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
988 for f in files.split():
995 for f in files.split():
989 src = os.path.join(source, ".hg", f)
996 src = os.path.join(source, ".hg", f)
990 dst = os.path.join(dest, ".hg", f)
997 dst = os.path.join(dest, ".hg", f)
991 try:
998 try:
992 util.copyfiles(src, dst)
999 util.copyfiles(src, dst)
993 except OSError, inst:
1000 except OSError, inst:
994 if inst.errno != errno.ENOENT:
1001 if inst.errno != errno.ENOENT:
995 raise
1002 raise
996
1003
997 repo = hg.repository(ui, dest)
1004 repo = hg.repository(ui, dest)
998
1005
999 else:
1006 else:
1000 revs = None
1007 revs = None
1001 if opts['rev']:
1008 if opts['rev']:
1002 if not other.local():
1009 if not other.local():
1003 error = _("clone -r not supported yet for remote repositories.")
1010 error = _("clone -r not supported yet for remote repositories.")
1004 raise util.Abort(error)
1011 raise util.Abort(error)
1005 else:
1012 else:
1006 revs = [other.lookup(rev) for rev in opts['rev']]
1013 revs = [other.lookup(rev) for rev in opts['rev']]
1007 repo = hg.repository(ui, dest, create=1)
1014 repo = hg.repository(ui, dest, create=1)
1008 repo.pull(other, heads = revs)
1015 repo.pull(other, heads = revs)
1009
1016
1010 f = repo.opener("hgrc", "w", text=True)
1017 f = repo.opener("hgrc", "w", text=True)
1011 f.write("[paths]\n")
1018 f.write("[paths]\n")
1012 f.write("default = %s\n" % abspath)
1019 f.write("default = %s\n" % abspath)
1013 f.close()
1020 f.close()
1014
1021
1015 if not opts['noupdate']:
1022 if not opts['noupdate']:
1016 update(repo.ui, repo)
1023 update(repo.ui, repo)
1017
1024
1018 d.close()
1025 d.close()
1019
1026
1020 def commit(ui, repo, *pats, **opts):
1027 def commit(ui, repo, *pats, **opts):
1021 """commit the specified files or all outstanding changes
1028 """commit the specified files or all outstanding changes
1022
1029
1023 Commit changes to the given files into the repository.
1030 Commit changes to the given files into the repository.
1024
1031
1025 If a list of files is omitted, all changes reported by "hg status"
1032 If a list of files is omitted, all changes reported by "hg status"
1026 will be commited.
1033 will be commited.
1027
1034
1028 The HGEDITOR or EDITOR environment variables are used to start an
1035 The HGEDITOR or EDITOR environment variables are used to start an
1029 editor to add a commit comment.
1036 editor to add a commit comment.
1030 """
1037 """
1031 message = opts['message']
1038 message = opts['message']
1032 logfile = opts['logfile']
1039 logfile = opts['logfile']
1033
1040
1034 if message and logfile:
1041 if message and logfile:
1035 raise util.Abort(_('options --message and --logfile are mutually '
1042 raise util.Abort(_('options --message and --logfile are mutually '
1036 'exclusive'))
1043 'exclusive'))
1037 if not message and logfile:
1044 if not message and logfile:
1038 try:
1045 try:
1039 if logfile == '-':
1046 if logfile == '-':
1040 message = sys.stdin.read()
1047 message = sys.stdin.read()
1041 else:
1048 else:
1042 message = open(logfile).read()
1049 message = open(logfile).read()
1043 except IOError, inst:
1050 except IOError, inst:
1044 raise util.Abort(_("can't read commit message '%s': %s") %
1051 raise util.Abort(_("can't read commit message '%s': %s") %
1045 (logfile, inst.strerror))
1052 (logfile, inst.strerror))
1046
1053
1047 if opts['addremove']:
1054 if opts['addremove']:
1048 addremove(ui, repo, *pats, **opts)
1055 addremove(ui, repo, *pats, **opts)
1049 fns, match, anypats = matchpats(repo, pats, opts)
1056 fns, match, anypats = matchpats(repo, pats, opts)
1050 if pats:
1057 if pats:
1051 modified, added, removed, deleted, unknown = (
1058 modified, added, removed, deleted, unknown = (
1052 repo.changes(files=fns, match=match))
1059 repo.changes(files=fns, match=match))
1053 files = modified + added + removed
1060 files = modified + added + removed
1054 else:
1061 else:
1055 files = []
1062 files = []
1056 try:
1063 try:
1057 repo.commit(files, message, opts['user'], opts['date'], match)
1064 repo.commit(files, message, opts['user'], opts['date'], match)
1058 except ValueError, inst:
1065 except ValueError, inst:
1059 raise util.Abort(str(inst))
1066 raise util.Abort(str(inst))
1060
1067
1061 def docopy(ui, repo, pats, opts, wlock):
1068 def docopy(ui, repo, pats, opts, wlock):
1062 # called with the repo lock held
1069 # called with the repo lock held
1063 cwd = repo.getcwd()
1070 cwd = repo.getcwd()
1064 errors = 0
1071 errors = 0
1065 copied = []
1072 copied = []
1066 targets = {}
1073 targets = {}
1067
1074
1068 def okaytocopy(abs, rel, exact):
1075 def okaytocopy(abs, rel, exact):
1069 reasons = {'?': _('is not managed'),
1076 reasons = {'?': _('is not managed'),
1070 'a': _('has been marked for add'),
1077 'a': _('has been marked for add'),
1071 'r': _('has been marked for remove')}
1078 'r': _('has been marked for remove')}
1072 state = repo.dirstate.state(abs)
1079 state = repo.dirstate.state(abs)
1073 reason = reasons.get(state)
1080 reason = reasons.get(state)
1074 if reason:
1081 if reason:
1075 if state == 'a':
1082 if state == 'a':
1076 origsrc = repo.dirstate.copied(abs)
1083 origsrc = repo.dirstate.copied(abs)
1077 if origsrc is not None:
1084 if origsrc is not None:
1078 return origsrc
1085 return origsrc
1079 if exact:
1086 if exact:
1080 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1087 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1081 else:
1088 else:
1082 return abs
1089 return abs
1083
1090
1084 def copy(origsrc, abssrc, relsrc, target, exact):
1091 def copy(origsrc, abssrc, relsrc, target, exact):
1085 abstarget = util.canonpath(repo.root, cwd, target)
1092 abstarget = util.canonpath(repo.root, cwd, target)
1086 reltarget = util.pathto(cwd, abstarget)
1093 reltarget = util.pathto(cwd, abstarget)
1087 prevsrc = targets.get(abstarget)
1094 prevsrc = targets.get(abstarget)
1088 if prevsrc is not None:
1095 if prevsrc is not None:
1089 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1096 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1090 (reltarget, abssrc, prevsrc))
1097 (reltarget, abssrc, prevsrc))
1091 return
1098 return
1092 if (not opts['after'] and os.path.exists(reltarget) or
1099 if (not opts['after'] and os.path.exists(reltarget) or
1093 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1100 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1094 if not opts['force']:
1101 if not opts['force']:
1095 ui.warn(_('%s: not overwriting - file exists\n') %
1102 ui.warn(_('%s: not overwriting - file exists\n') %
1096 reltarget)
1103 reltarget)
1097 return
1104 return
1098 if not opts['after']:
1105 if not opts['after']:
1099 os.unlink(reltarget)
1106 os.unlink(reltarget)
1100 if opts['after']:
1107 if opts['after']:
1101 if not os.path.exists(reltarget):
1108 if not os.path.exists(reltarget):
1102 return
1109 return
1103 else:
1110 else:
1104 targetdir = os.path.dirname(reltarget) or '.'
1111 targetdir = os.path.dirname(reltarget) or '.'
1105 if not os.path.isdir(targetdir):
1112 if not os.path.isdir(targetdir):
1106 os.makedirs(targetdir)
1113 os.makedirs(targetdir)
1107 try:
1114 try:
1108 restore = repo.dirstate.state(abstarget) == 'r'
1115 restore = repo.dirstate.state(abstarget) == 'r'
1109 if restore:
1116 if restore:
1110 repo.undelete([abstarget], wlock)
1117 repo.undelete([abstarget], wlock)
1111 try:
1118 try:
1112 shutil.copyfile(relsrc, reltarget)
1119 shutil.copyfile(relsrc, reltarget)
1113 shutil.copymode(relsrc, reltarget)
1120 shutil.copymode(relsrc, reltarget)
1114 restore = False
1121 restore = False
1115 finally:
1122 finally:
1116 if restore:
1123 if restore:
1117 repo.remove([abstarget], wlock)
1124 repo.remove([abstarget], wlock)
1118 except shutil.Error, inst:
1125 except shutil.Error, inst:
1119 raise util.Abort(str(inst))
1126 raise util.Abort(str(inst))
1120 except IOError, inst:
1127 except IOError, inst:
1121 if inst.errno == errno.ENOENT:
1128 if inst.errno == errno.ENOENT:
1122 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1129 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1123 else:
1130 else:
1124 ui.warn(_('%s: cannot copy - %s\n') %
1131 ui.warn(_('%s: cannot copy - %s\n') %
1125 (relsrc, inst.strerror))
1132 (relsrc, inst.strerror))
1126 errors += 1
1133 errors += 1
1127 return
1134 return
1128 if ui.verbose or not exact:
1135 if ui.verbose or not exact:
1129 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1136 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1130 targets[abstarget] = abssrc
1137 targets[abstarget] = abssrc
1131 if abstarget != origsrc:
1138 if abstarget != origsrc:
1132 repo.copy(origsrc, abstarget, wlock)
1139 repo.copy(origsrc, abstarget, wlock)
1133 copied.append((abssrc, relsrc, exact))
1140 copied.append((abssrc, relsrc, exact))
1134
1141
1135 def targetpathfn(pat, dest, srcs):
1142 def targetpathfn(pat, dest, srcs):
1136 if os.path.isdir(pat):
1143 if os.path.isdir(pat):
1137 abspfx = util.canonpath(repo.root, cwd, pat)
1144 abspfx = util.canonpath(repo.root, cwd, pat)
1138 if destdirexists:
1145 if destdirexists:
1139 striplen = len(os.path.split(abspfx)[0])
1146 striplen = len(os.path.split(abspfx)[0])
1140 else:
1147 else:
1141 striplen = len(abspfx)
1148 striplen = len(abspfx)
1142 if striplen:
1149 if striplen:
1143 striplen += len(os.sep)
1150 striplen += len(os.sep)
1144 res = lambda p: os.path.join(dest, p[striplen:])
1151 res = lambda p: os.path.join(dest, p[striplen:])
1145 elif destdirexists:
1152 elif destdirexists:
1146 res = lambda p: os.path.join(dest, os.path.basename(p))
1153 res = lambda p: os.path.join(dest, os.path.basename(p))
1147 else:
1154 else:
1148 res = lambda p: dest
1155 res = lambda p: dest
1149 return res
1156 return res
1150
1157
1151 def targetpathafterfn(pat, dest, srcs):
1158 def targetpathafterfn(pat, dest, srcs):
1152 if util.patkind(pat, None)[0]:
1159 if util.patkind(pat, None)[0]:
1153 # a mercurial pattern
1160 # a mercurial pattern
1154 res = lambda p: os.path.join(dest, os.path.basename(p))
1161 res = lambda p: os.path.join(dest, os.path.basename(p))
1155 else:
1162 else:
1156 abspfx = util.canonpath(repo.root, cwd, pat)
1163 abspfx = util.canonpath(repo.root, cwd, pat)
1157 if len(abspfx) < len(srcs[0][0]):
1164 if len(abspfx) < len(srcs[0][0]):
1158 # A directory. Either the target path contains the last
1165 # A directory. Either the target path contains the last
1159 # component of the source path or it does not.
1166 # component of the source path or it does not.
1160 def evalpath(striplen):
1167 def evalpath(striplen):
1161 score = 0
1168 score = 0
1162 for s in srcs:
1169 for s in srcs:
1163 t = os.path.join(dest, s[0][striplen:])
1170 t = os.path.join(dest, s[0][striplen:])
1164 if os.path.exists(t):
1171 if os.path.exists(t):
1165 score += 1
1172 score += 1
1166 return score
1173 return score
1167
1174
1168 striplen = len(abspfx)
1175 striplen = len(abspfx)
1169 if striplen:
1176 if striplen:
1170 striplen += len(os.sep)
1177 striplen += len(os.sep)
1171 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1178 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1172 score = evalpath(striplen)
1179 score = evalpath(striplen)
1173 striplen1 = len(os.path.split(abspfx)[0])
1180 striplen1 = len(os.path.split(abspfx)[0])
1174 if striplen1:
1181 if striplen1:
1175 striplen1 += len(os.sep)
1182 striplen1 += len(os.sep)
1176 if evalpath(striplen1) > score:
1183 if evalpath(striplen1) > score:
1177 striplen = striplen1
1184 striplen = striplen1
1178 res = lambda p: os.path.join(dest, p[striplen:])
1185 res = lambda p: os.path.join(dest, p[striplen:])
1179 else:
1186 else:
1180 # a file
1187 # a file
1181 if destdirexists:
1188 if destdirexists:
1182 res = lambda p: os.path.join(dest, os.path.basename(p))
1189 res = lambda p: os.path.join(dest, os.path.basename(p))
1183 else:
1190 else:
1184 res = lambda p: dest
1191 res = lambda p: dest
1185 return res
1192 return res
1186
1193
1187
1194
1188 pats = list(pats)
1195 pats = list(pats)
1189 if not pats:
1196 if not pats:
1190 raise util.Abort(_('no source or destination specified'))
1197 raise util.Abort(_('no source or destination specified'))
1191 if len(pats) == 1:
1198 if len(pats) == 1:
1192 raise util.Abort(_('no destination specified'))
1199 raise util.Abort(_('no destination specified'))
1193 dest = pats.pop()
1200 dest = pats.pop()
1194 destdirexists = os.path.isdir(dest)
1201 destdirexists = os.path.isdir(dest)
1195 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1202 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1196 raise util.Abort(_('with multiple sources, destination must be an '
1203 raise util.Abort(_('with multiple sources, destination must be an '
1197 'existing directory'))
1204 'existing directory'))
1198 if opts['after']:
1205 if opts['after']:
1199 tfn = targetpathafterfn
1206 tfn = targetpathafterfn
1200 else:
1207 else:
1201 tfn = targetpathfn
1208 tfn = targetpathfn
1202 copylist = []
1209 copylist = []
1203 for pat in pats:
1210 for pat in pats:
1204 srcs = []
1211 srcs = []
1205 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1212 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1206 origsrc = okaytocopy(abssrc, relsrc, exact)
1213 origsrc = okaytocopy(abssrc, relsrc, exact)
1207 if origsrc:
1214 if origsrc:
1208 srcs.append((origsrc, abssrc, relsrc, exact))
1215 srcs.append((origsrc, abssrc, relsrc, exact))
1209 if not srcs:
1216 if not srcs:
1210 continue
1217 continue
1211 copylist.append((tfn(pat, dest, srcs), srcs))
1218 copylist.append((tfn(pat, dest, srcs), srcs))
1212 if not copylist:
1219 if not copylist:
1213 raise util.Abort(_('no files to copy'))
1220 raise util.Abort(_('no files to copy'))
1214
1221
1215 for targetpath, srcs in copylist:
1222 for targetpath, srcs in copylist:
1216 for origsrc, abssrc, relsrc, exact in srcs:
1223 for origsrc, abssrc, relsrc, exact in srcs:
1217 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1224 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1218
1225
1219 if errors:
1226 if errors:
1220 ui.warn(_('(consider using --after)\n'))
1227 ui.warn(_('(consider using --after)\n'))
1221 return errors, copied
1228 return errors, copied
1222
1229
1223 def copy(ui, repo, *pats, **opts):
1230 def copy(ui, repo, *pats, **opts):
1224 """mark files as copied for the next commit
1231 """mark files as copied for the next commit
1225
1232
1226 Mark dest as having copies of source files. If dest is a
1233 Mark dest as having copies of source files. If dest is a
1227 directory, copies are put in that directory. If dest is a file,
1234 directory, copies are put in that directory. If dest is a file,
1228 there can only be one source.
1235 there can only be one source.
1229
1236
1230 By default, this command copies the contents of files as they
1237 By default, this command copies the contents of files as they
1231 stand in the working directory. If invoked with --after, the
1238 stand in the working directory. If invoked with --after, the
1232 operation is recorded, but no copying is performed.
1239 operation is recorded, but no copying is performed.
1233
1240
1234 This command takes effect in the next commit.
1241 This command takes effect in the next commit.
1235
1242
1236 NOTE: This command should be treated as experimental. While it
1243 NOTE: This command should be treated as experimental. While it
1237 should properly record copied files, this information is not yet
1244 should properly record copied files, this information is not yet
1238 fully used by merge, nor fully reported by log.
1245 fully used by merge, nor fully reported by log.
1239 """
1246 """
1240 try:
1247 try:
1241 wlock = repo.wlock(0)
1248 wlock = repo.wlock(0)
1242 errs, copied = docopy(ui, repo, pats, opts, wlock)
1249 errs, copied = docopy(ui, repo, pats, opts, wlock)
1243 except lock.LockHeld, inst:
1250 except lock.LockHeld, inst:
1244 ui.warn(_("repository lock held by %s\n") % inst.args[0])
1251 ui.warn(_("repository lock held by %s\n") % inst.args[0])
1245 errs = 1
1252 errs = 1
1246 return errs
1253 return errs
1247
1254
1248 def debugancestor(ui, index, rev1, rev2):
1255 def debugancestor(ui, index, rev1, rev2):
1249 """find the ancestor revision of two revisions in a given index"""
1256 """find the ancestor revision of two revisions in a given index"""
1250 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "")
1257 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "")
1251 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1258 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1252 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1259 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1253
1260
1254 def debugcomplete(ui, cmd):
1261 def debugcomplete(ui, cmd):
1255 """returns the completion list associated with the given command"""
1262 """returns the completion list associated with the given command"""
1256 clist = findpossible(cmd).keys()
1263 clist = findpossible(cmd).keys()
1257 clist.sort()
1264 clist.sort()
1258 ui.write("%s\n" % " ".join(clist))
1265 ui.write("%s\n" % " ".join(clist))
1259
1266
1260 def debugrebuildstate(ui, repo, rev=None):
1267 def debugrebuildstate(ui, repo, rev=None):
1261 """rebuild the dirstate as it would look like for the given revision"""
1268 """rebuild the dirstate as it would look like for the given revision"""
1262 if not rev:
1269 if not rev:
1263 rev = repo.changelog.tip()
1270 rev = repo.changelog.tip()
1264 else:
1271 else:
1265 rev = repo.lookup(rev)
1272 rev = repo.lookup(rev)
1266 change = repo.changelog.read(rev)
1273 change = repo.changelog.read(rev)
1267 n = change[0]
1274 n = change[0]
1268 files = repo.manifest.readflags(n)
1275 files = repo.manifest.readflags(n)
1269 wlock = repo.wlock()
1276 wlock = repo.wlock()
1270 repo.dirstate.rebuild(rev, files.iteritems())
1277 repo.dirstate.rebuild(rev, files.iteritems())
1271
1278
1272 def debugcheckstate(ui, repo):
1279 def debugcheckstate(ui, repo):
1273 """validate the correctness of the current dirstate"""
1280 """validate the correctness of the current dirstate"""
1274 parent1, parent2 = repo.dirstate.parents()
1281 parent1, parent2 = repo.dirstate.parents()
1275 repo.dirstate.read()
1282 repo.dirstate.read()
1276 dc = repo.dirstate.map
1283 dc = repo.dirstate.map
1277 keys = dc.keys()
1284 keys = dc.keys()
1278 keys.sort()
1285 keys.sort()
1279 m1n = repo.changelog.read(parent1)[0]
1286 m1n = repo.changelog.read(parent1)[0]
1280 m2n = repo.changelog.read(parent2)[0]
1287 m2n = repo.changelog.read(parent2)[0]
1281 m1 = repo.manifest.read(m1n)
1288 m1 = repo.manifest.read(m1n)
1282 m2 = repo.manifest.read(m2n)
1289 m2 = repo.manifest.read(m2n)
1283 errors = 0
1290 errors = 0
1284 for f in dc:
1291 for f in dc:
1285 state = repo.dirstate.state(f)
1292 state = repo.dirstate.state(f)
1286 if state in "nr" and f not in m1:
1293 if state in "nr" and f not in m1:
1287 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1294 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1288 errors += 1
1295 errors += 1
1289 if state in "a" and f in m1:
1296 if state in "a" and f in m1:
1290 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1297 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1291 errors += 1
1298 errors += 1
1292 if state in "m" and f not in m1 and f not in m2:
1299 if state in "m" and f not in m1 and f not in m2:
1293 ui.warn(_("%s in state %s, but not in either manifest\n") %
1300 ui.warn(_("%s in state %s, but not in either manifest\n") %
1294 (f, state))
1301 (f, state))
1295 errors += 1
1302 errors += 1
1296 for f in m1:
1303 for f in m1:
1297 state = repo.dirstate.state(f)
1304 state = repo.dirstate.state(f)
1298 if state not in "nrm":
1305 if state not in "nrm":
1299 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1306 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1300 errors += 1
1307 errors += 1
1301 if errors:
1308 if errors:
1302 error = _(".hg/dirstate inconsistent with current parent's manifest")
1309 error = _(".hg/dirstate inconsistent with current parent's manifest")
1303 raise util.Abort(error)
1310 raise util.Abort(error)
1304
1311
1305 def debugconfig(ui, repo):
1312 def debugconfig(ui, repo):
1306 """show combined config settings from all hgrc files"""
1313 """show combined config settings from all hgrc files"""
1307 for section, name, value in ui.walkconfig():
1314 for section, name, value in ui.walkconfig():
1308 ui.write('%s.%s=%s\n' % (section, name, value))
1315 ui.write('%s.%s=%s\n' % (section, name, value))
1309
1316
1310 def debugsetparents(ui, repo, rev1, rev2=None):
1317 def debugsetparents(ui, repo, rev1, rev2=None):
1311 """manually set the parents of the current working directory
1318 """manually set the parents of the current working directory
1312
1319
1313 This is useful for writing repository conversion tools, but should
1320 This is useful for writing repository conversion tools, but should
1314 be used with care.
1321 be used with care.
1315 """
1322 """
1316
1323
1317 if not rev2:
1324 if not rev2:
1318 rev2 = hex(nullid)
1325 rev2 = hex(nullid)
1319
1326
1320 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1327 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1321
1328
1322 def debugstate(ui, repo):
1329 def debugstate(ui, repo):
1323 """show the contents of the current dirstate"""
1330 """show the contents of the current dirstate"""
1324 repo.dirstate.read()
1331 repo.dirstate.read()
1325 dc = repo.dirstate.map
1332 dc = repo.dirstate.map
1326 keys = dc.keys()
1333 keys = dc.keys()
1327 keys.sort()
1334 keys.sort()
1328 for file_ in keys:
1335 for file_ in keys:
1329 ui.write("%c %3o %10d %s %s\n"
1336 ui.write("%c %3o %10d %s %s\n"
1330 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1337 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1331 time.strftime("%x %X",
1338 time.strftime("%x %X",
1332 time.localtime(dc[file_][3])), file_))
1339 time.localtime(dc[file_][3])), file_))
1333 for f in repo.dirstate.copies:
1340 for f in repo.dirstate.copies:
1334 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1341 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1335
1342
1336 def debugdata(ui, file_, rev):
1343 def debugdata(ui, file_, rev):
1337 """dump the contents of an data file revision"""
1344 """dump the contents of an data file revision"""
1338 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1345 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1339 file_[:-2] + ".i", file_)
1346 file_[:-2] + ".i", file_)
1340 try:
1347 try:
1341 ui.write(r.revision(r.lookup(rev)))
1348 ui.write(r.revision(r.lookup(rev)))
1342 except KeyError:
1349 except KeyError:
1343 raise util.Abort(_('invalid revision identifier %s'), rev)
1350 raise util.Abort(_('invalid revision identifier %s'), rev)
1344
1351
1345 def debugindex(ui, file_):
1352 def debugindex(ui, file_):
1346 """dump the contents of an index file"""
1353 """dump the contents of an index file"""
1347 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "")
1354 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "")
1348 ui.write(" rev offset length base linkrev" +
1355 ui.write(" rev offset length base linkrev" +
1349 " nodeid p1 p2\n")
1356 " nodeid p1 p2\n")
1350 for i in range(r.count()):
1357 for i in range(r.count()):
1351 e = r.index[i]
1358 e = r.index[i]
1352 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1359 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1353 i, e[0], e[1], e[2], e[3],
1360 i, e[0], e[1], e[2], e[3],
1354 short(e[6]), short(e[4]), short(e[5])))
1361 short(e[6]), short(e[4]), short(e[5])))
1355
1362
1356 def debugindexdot(ui, file_):
1363 def debugindexdot(ui, file_):
1357 """dump an index DAG as a .dot file"""
1364 """dump an index DAG as a .dot file"""
1358 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "")
1365 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "")
1359 ui.write("digraph G {\n")
1366 ui.write("digraph G {\n")
1360 for i in range(r.count()):
1367 for i in range(r.count()):
1361 e = r.index[i]
1368 e = r.index[i]
1362 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1369 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1363 if e[5] != nullid:
1370 if e[5] != nullid:
1364 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1371 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1365 ui.write("}\n")
1372 ui.write("}\n")
1366
1373
1367 def debugrename(ui, repo, file, rev=None):
1374 def debugrename(ui, repo, file, rev=None):
1368 """dump rename information"""
1375 """dump rename information"""
1369 r = repo.file(relpath(repo, [file])[0])
1376 r = repo.file(relpath(repo, [file])[0])
1370 if rev:
1377 if rev:
1371 try:
1378 try:
1372 # assume all revision numbers are for changesets
1379 # assume all revision numbers are for changesets
1373 n = repo.lookup(rev)
1380 n = repo.lookup(rev)
1374 change = repo.changelog.read(n)
1381 change = repo.changelog.read(n)
1375 m = repo.manifest.read(change[0])
1382 m = repo.manifest.read(change[0])
1376 n = m[relpath(repo, [file])[0]]
1383 n = m[relpath(repo, [file])[0]]
1377 except (hg.RepoError, KeyError):
1384 except (hg.RepoError, KeyError):
1378 n = r.lookup(rev)
1385 n = r.lookup(rev)
1379 else:
1386 else:
1380 n = r.tip()
1387 n = r.tip()
1381 m = r.renamed(n)
1388 m = r.renamed(n)
1382 if m:
1389 if m:
1383 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1390 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1384 else:
1391 else:
1385 ui.write(_("not renamed\n"))
1392 ui.write(_("not renamed\n"))
1386
1393
1387 def debugwalk(ui, repo, *pats, **opts):
1394 def debugwalk(ui, repo, *pats, **opts):
1388 """show how files match on given patterns"""
1395 """show how files match on given patterns"""
1389 items = list(walk(repo, pats, opts))
1396 items = list(walk(repo, pats, opts))
1390 if not items:
1397 if not items:
1391 return
1398 return
1392 fmt = '%%s %%-%ds %%-%ds %%s' % (
1399 fmt = '%%s %%-%ds %%-%ds %%s' % (
1393 max([len(abs) for (src, abs, rel, exact) in items]),
1400 max([len(abs) for (src, abs, rel, exact) in items]),
1394 max([len(rel) for (src, abs, rel, exact) in items]))
1401 max([len(rel) for (src, abs, rel, exact) in items]))
1395 for src, abs, rel, exact in items:
1402 for src, abs, rel, exact in items:
1396 line = fmt % (src, abs, rel, exact and 'exact' or '')
1403 line = fmt % (src, abs, rel, exact and 'exact' or '')
1397 ui.write("%s\n" % line.rstrip())
1404 ui.write("%s\n" % line.rstrip())
1398
1405
1399 def diff(ui, repo, *pats, **opts):
1406 def diff(ui, repo, *pats, **opts):
1400 """diff repository (or selected files)
1407 """diff repository (or selected files)
1401
1408
1402 Show differences between revisions for the specified files.
1409 Show differences between revisions for the specified files.
1403
1410
1404 Differences between files are shown using the unified diff format.
1411 Differences between files are shown using the unified diff format.
1405
1412
1406 When two revision arguments are given, then changes are shown
1413 When two revision arguments are given, then changes are shown
1407 between those revisions. If only one revision is specified then
1414 between those revisions. If only one revision is specified then
1408 that revision is compared to the working directory, and, when no
1415 that revision is compared to the working directory, and, when no
1409 revisions are specified, the working directory files are compared
1416 revisions are specified, the working directory files are compared
1410 to its parent.
1417 to its parent.
1411
1418
1412 Without the -a option, diff will avoid generating diffs of files
1419 Without the -a option, diff will avoid generating diffs of files
1413 it detects as binary. With -a, diff will generate a diff anyway,
1420 it detects as binary. With -a, diff will generate a diff anyway,
1414 probably with undesirable results.
1421 probably with undesirable results.
1415 """
1422 """
1416 node1, node2 = None, None
1423 node1, node2 = None, None
1417 revs = [repo.lookup(x) for x in opts['rev']]
1424 revs = [repo.lookup(x) for x in opts['rev']]
1418
1425
1419 if len(revs) > 0:
1426 if len(revs) > 0:
1420 node1 = revs[0]
1427 node1 = revs[0]
1421 if len(revs) > 1:
1428 if len(revs) > 1:
1422 node2 = revs[1]
1429 node2 = revs[1]
1423 if len(revs) > 2:
1430 if len(revs) > 2:
1424 raise util.Abort(_("too many revisions to diff"))
1431 raise util.Abort(_("too many revisions to diff"))
1425
1432
1426 fns, matchfn, anypats = matchpats(repo, pats, opts)
1433 fns, matchfn, anypats = matchpats(repo, pats, opts)
1427
1434
1428 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1435 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1429 text=opts['text'], opts=opts)
1436 text=opts['text'], opts=opts)
1430
1437
1431 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1438 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1432 node = repo.lookup(changeset)
1439 node = repo.lookup(changeset)
1433 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1440 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1434 if opts['switch_parent']:
1441 if opts['switch_parent']:
1435 parents.reverse()
1442 parents.reverse()
1436 prev = (parents and parents[0]) or nullid
1443 prev = (parents and parents[0]) or nullid
1437 change = repo.changelog.read(node)
1444 change = repo.changelog.read(node)
1438
1445
1439 fp = make_file(repo, repo.changelog, opts['output'],
1446 fp = make_file(repo, repo.changelog, opts['output'],
1440 node=node, total=total, seqno=seqno,
1447 node=node, total=total, seqno=seqno,
1441 revwidth=revwidth)
1448 revwidth=revwidth)
1442 if fp != sys.stdout:
1449 if fp != sys.stdout:
1443 ui.note("%s\n" % fp.name)
1450 ui.note("%s\n" % fp.name)
1444
1451
1445 fp.write("# HG changeset patch\n")
1452 fp.write("# HG changeset patch\n")
1446 fp.write("# User %s\n" % change[1])
1453 fp.write("# User %s\n" % change[1])
1447 fp.write("# Node ID %s\n" % hex(node))
1454 fp.write("# Node ID %s\n" % hex(node))
1448 fp.write("# Parent %s\n" % hex(prev))
1455 fp.write("# Parent %s\n" % hex(prev))
1449 if len(parents) > 1:
1456 if len(parents) > 1:
1450 fp.write("# Parent %s\n" % hex(parents[1]))
1457 fp.write("# Parent %s\n" % hex(parents[1]))
1451 fp.write(change[4].rstrip())
1458 fp.write(change[4].rstrip())
1452 fp.write("\n\n")
1459 fp.write("\n\n")
1453
1460
1454 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1461 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1455 if fp != sys.stdout:
1462 if fp != sys.stdout:
1456 fp.close()
1463 fp.close()
1457
1464
1458 def export(ui, repo, *changesets, **opts):
1465 def export(ui, repo, *changesets, **opts):
1459 """dump the header and diffs for one or more changesets
1466 """dump the header and diffs for one or more changesets
1460
1467
1461 Print the changeset header and diffs for one or more revisions.
1468 Print the changeset header and diffs for one or more revisions.
1462
1469
1463 The information shown in the changeset header is: author,
1470 The information shown in the changeset header is: author,
1464 changeset hash, parent and commit comment.
1471 changeset hash, parent and commit comment.
1465
1472
1466 Output may be to a file, in which case the name of the file is
1473 Output may be to a file, in which case the name of the file is
1467 given using a format string. The formatting rules are as follows:
1474 given using a format string. The formatting rules are as follows:
1468
1475
1469 %% literal "%" character
1476 %% literal "%" character
1470 %H changeset hash (40 bytes of hexadecimal)
1477 %H changeset hash (40 bytes of hexadecimal)
1471 %N number of patches being generated
1478 %N number of patches being generated
1472 %R changeset revision number
1479 %R changeset revision number
1473 %b basename of the exporting repository
1480 %b basename of the exporting repository
1474 %h short-form changeset hash (12 bytes of hexadecimal)
1481 %h short-form changeset hash (12 bytes of hexadecimal)
1475 %n zero-padded sequence number, starting at 1
1482 %n zero-padded sequence number, starting at 1
1476 %r zero-padded changeset revision number
1483 %r zero-padded changeset revision number
1477
1484
1478 Without the -a option, export will avoid generating diffs of files
1485 Without the -a option, export will avoid generating diffs of files
1479 it detects as binary. With -a, export will generate a diff anyway,
1486 it detects as binary. With -a, export will generate a diff anyway,
1480 probably with undesirable results.
1487 probably with undesirable results.
1481
1488
1482 With the --switch-parent option, the diff will be against the second
1489 With the --switch-parent option, the diff will be against the second
1483 parent. It can be useful to review a merge.
1490 parent. It can be useful to review a merge.
1484 """
1491 """
1485 if not changesets:
1492 if not changesets:
1486 raise util.Abort(_("export requires at least one changeset"))
1493 raise util.Abort(_("export requires at least one changeset"))
1487 seqno = 0
1494 seqno = 0
1488 revs = list(revrange(ui, repo, changesets))
1495 revs = list(revrange(ui, repo, changesets))
1489 total = len(revs)
1496 total = len(revs)
1490 revwidth = max(map(len, revs))
1497 revwidth = max(map(len, revs))
1491 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1498 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1492 ui.note(msg)
1499 ui.note(msg)
1493 for cset in revs:
1500 for cset in revs:
1494 seqno += 1
1501 seqno += 1
1495 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1502 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1496
1503
1497 def forget(ui, repo, *pats, **opts):
1504 def forget(ui, repo, *pats, **opts):
1498 """don't add the specified files on the next commit
1505 """don't add the specified files on the next commit
1499
1506
1500 Undo an 'hg add' scheduled for the next commit.
1507 Undo an 'hg add' scheduled for the next commit.
1501 """
1508 """
1502 forget = []
1509 forget = []
1503 for src, abs, rel, exact in walk(repo, pats, opts):
1510 for src, abs, rel, exact in walk(repo, pats, opts):
1504 if repo.dirstate.state(abs) == 'a':
1511 if repo.dirstate.state(abs) == 'a':
1505 forget.append(abs)
1512 forget.append(abs)
1506 if ui.verbose or not exact:
1513 if ui.verbose or not exact:
1507 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1514 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1508 repo.forget(forget)
1515 repo.forget(forget)
1509
1516
1510 def grep(ui, repo, pattern, *pats, **opts):
1517 def grep(ui, repo, pattern, *pats, **opts):
1511 """search for a pattern in specified files and revisions
1518 """search for a pattern in specified files and revisions
1512
1519
1513 Search revisions of files for a regular expression.
1520 Search revisions of files for a regular expression.
1514
1521
1515 This command behaves differently than Unix grep. It only accepts
1522 This command behaves differently than Unix grep. It only accepts
1516 Python/Perl regexps. It searches repository history, not the
1523 Python/Perl regexps. It searches repository history, not the
1517 working directory. It always prints the revision number in which
1524 working directory. It always prints the revision number in which
1518 a match appears.
1525 a match appears.
1519
1526
1520 By default, grep only prints output for the first revision of a
1527 By default, grep only prints output for the first revision of a
1521 file in which it finds a match. To get it to print every revision
1528 file in which it finds a match. To get it to print every revision
1522 that contains a change in match status ("-" for a match that
1529 that contains a change in match status ("-" for a match that
1523 becomes a non-match, or "+" for a non-match that becomes a match),
1530 becomes a non-match, or "+" for a non-match that becomes a match),
1524 use the --all flag.
1531 use the --all flag.
1525 """
1532 """
1526 reflags = 0
1533 reflags = 0
1527 if opts['ignore_case']:
1534 if opts['ignore_case']:
1528 reflags |= re.I
1535 reflags |= re.I
1529 regexp = re.compile(pattern, reflags)
1536 regexp = re.compile(pattern, reflags)
1530 sep, eol = ':', '\n'
1537 sep, eol = ':', '\n'
1531 if opts['print0']:
1538 if opts['print0']:
1532 sep = eol = '\0'
1539 sep = eol = '\0'
1533
1540
1534 fcache = {}
1541 fcache = {}
1535 def getfile(fn):
1542 def getfile(fn):
1536 if fn not in fcache:
1543 if fn not in fcache:
1537 fcache[fn] = repo.file(fn)
1544 fcache[fn] = repo.file(fn)
1538 return fcache[fn]
1545 return fcache[fn]
1539
1546
1540 def matchlines(body):
1547 def matchlines(body):
1541 begin = 0
1548 begin = 0
1542 linenum = 0
1549 linenum = 0
1543 while True:
1550 while True:
1544 match = regexp.search(body, begin)
1551 match = regexp.search(body, begin)
1545 if not match:
1552 if not match:
1546 break
1553 break
1547 mstart, mend = match.span()
1554 mstart, mend = match.span()
1548 linenum += body.count('\n', begin, mstart) + 1
1555 linenum += body.count('\n', begin, mstart) + 1
1549 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1556 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1550 lend = body.find('\n', mend)
1557 lend = body.find('\n', mend)
1551 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1558 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1552 begin = lend + 1
1559 begin = lend + 1
1553
1560
1554 class linestate(object):
1561 class linestate(object):
1555 def __init__(self, line, linenum, colstart, colend):
1562 def __init__(self, line, linenum, colstart, colend):
1556 self.line = line
1563 self.line = line
1557 self.linenum = linenum
1564 self.linenum = linenum
1558 self.colstart = colstart
1565 self.colstart = colstart
1559 self.colend = colend
1566 self.colend = colend
1560 def __eq__(self, other):
1567 def __eq__(self, other):
1561 return self.line == other.line
1568 return self.line == other.line
1562 def __hash__(self):
1569 def __hash__(self):
1563 return hash(self.line)
1570 return hash(self.line)
1564
1571
1565 matches = {}
1572 matches = {}
1566 def grepbody(fn, rev, body):
1573 def grepbody(fn, rev, body):
1567 matches[rev].setdefault(fn, {})
1574 matches[rev].setdefault(fn, {})
1568 m = matches[rev][fn]
1575 m = matches[rev][fn]
1569 for lnum, cstart, cend, line in matchlines(body):
1576 for lnum, cstart, cend, line in matchlines(body):
1570 s = linestate(line, lnum, cstart, cend)
1577 s = linestate(line, lnum, cstart, cend)
1571 m[s] = s
1578 m[s] = s
1572
1579
1573 # FIXME: prev isn't used, why ?
1580 # FIXME: prev isn't used, why ?
1574 prev = {}
1581 prev = {}
1575 ucache = {}
1582 ucache = {}
1576 def display(fn, rev, states, prevstates):
1583 def display(fn, rev, states, prevstates):
1577 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1584 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1578 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1585 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1579 counts = {'-': 0, '+': 0}
1586 counts = {'-': 0, '+': 0}
1580 filerevmatches = {}
1587 filerevmatches = {}
1581 for l in diff:
1588 for l in diff:
1582 if incrementing or not opts['all']:
1589 if incrementing or not opts['all']:
1583 change = ((l in prevstates) and '-') or '+'
1590 change = ((l in prevstates) and '-') or '+'
1584 r = rev
1591 r = rev
1585 else:
1592 else:
1586 change = ((l in states) and '-') or '+'
1593 change = ((l in states) and '-') or '+'
1587 r = prev[fn]
1594 r = prev[fn]
1588 cols = [fn, str(rev)]
1595 cols = [fn, str(rev)]
1589 if opts['line_number']:
1596 if opts['line_number']:
1590 cols.append(str(l.linenum))
1597 cols.append(str(l.linenum))
1591 if opts['all']:
1598 if opts['all']:
1592 cols.append(change)
1599 cols.append(change)
1593 if opts['user']:
1600 if opts['user']:
1594 cols.append(trimuser(ui, getchange(rev)[1], rev,
1601 cols.append(trimuser(ui, getchange(rev)[1], rev,
1595 ucache))
1602 ucache))
1596 if opts['files_with_matches']:
1603 if opts['files_with_matches']:
1597 c = (fn, rev)
1604 c = (fn, rev)
1598 if c in filerevmatches:
1605 if c in filerevmatches:
1599 continue
1606 continue
1600 filerevmatches[c] = 1
1607 filerevmatches[c] = 1
1601 else:
1608 else:
1602 cols.append(l.line)
1609 cols.append(l.line)
1603 ui.write(sep.join(cols), eol)
1610 ui.write(sep.join(cols), eol)
1604 counts[change] += 1
1611 counts[change] += 1
1605 return counts['+'], counts['-']
1612 return counts['+'], counts['-']
1606
1613
1607 fstate = {}
1614 fstate = {}
1608 skip = {}
1615 skip = {}
1609 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1616 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1610 count = 0
1617 count = 0
1611 incrementing = False
1618 incrementing = False
1612 for st, rev, fns in changeiter:
1619 for st, rev, fns in changeiter:
1613 if st == 'window':
1620 if st == 'window':
1614 incrementing = rev
1621 incrementing = rev
1615 matches.clear()
1622 matches.clear()
1616 elif st == 'add':
1623 elif st == 'add':
1617 change = repo.changelog.read(repo.lookup(str(rev)))
1624 change = repo.changelog.read(repo.lookup(str(rev)))
1618 mf = repo.manifest.read(change[0])
1625 mf = repo.manifest.read(change[0])
1619 matches[rev] = {}
1626 matches[rev] = {}
1620 for fn in fns:
1627 for fn in fns:
1621 if fn in skip:
1628 if fn in skip:
1622 continue
1629 continue
1623 fstate.setdefault(fn, {})
1630 fstate.setdefault(fn, {})
1624 try:
1631 try:
1625 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1632 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1626 except KeyError:
1633 except KeyError:
1627 pass
1634 pass
1628 elif st == 'iter':
1635 elif st == 'iter':
1629 states = matches[rev].items()
1636 states = matches[rev].items()
1630 states.sort()
1637 states.sort()
1631 for fn, m in states:
1638 for fn, m in states:
1632 if fn in skip:
1639 if fn in skip:
1633 continue
1640 continue
1634 if incrementing or not opts['all'] or fstate[fn]:
1641 if incrementing or not opts['all'] or fstate[fn]:
1635 pos, neg = display(fn, rev, m, fstate[fn])
1642 pos, neg = display(fn, rev, m, fstate[fn])
1636 count += pos + neg
1643 count += pos + neg
1637 if pos and not opts['all']:
1644 if pos and not opts['all']:
1638 skip[fn] = True
1645 skip[fn] = True
1639 fstate[fn] = m
1646 fstate[fn] = m
1640 prev[fn] = rev
1647 prev[fn] = rev
1641
1648
1642 if not incrementing:
1649 if not incrementing:
1643 fstate = fstate.items()
1650 fstate = fstate.items()
1644 fstate.sort()
1651 fstate.sort()
1645 for fn, state in fstate:
1652 for fn, state in fstate:
1646 if fn in skip:
1653 if fn in skip:
1647 continue
1654 continue
1648 display(fn, rev, {}, state)
1655 display(fn, rev, {}, state)
1649 return (count == 0 and 1) or 0
1656 return (count == 0 and 1) or 0
1650
1657
1651 def heads(ui, repo, **opts):
1658 def heads(ui, repo, **opts):
1652 """show current repository heads
1659 """show current repository heads
1653
1660
1654 Show all repository head changesets.
1661 Show all repository head changesets.
1655
1662
1656 Repository "heads" are changesets that don't have children
1663 Repository "heads" are changesets that don't have children
1657 changesets. They are where development generally takes place and
1664 changesets. They are where development generally takes place and
1658 are the usual targets for update and merge operations.
1665 are the usual targets for update and merge operations.
1659 """
1666 """
1660 if opts['rev']:
1667 if opts['rev']:
1661 heads = repo.heads(repo.lookup(opts['rev']))
1668 heads = repo.heads(repo.lookup(opts['rev']))
1662 else:
1669 else:
1663 heads = repo.heads()
1670 heads = repo.heads()
1664 br = None
1671 br = None
1665 if opts['branches']:
1672 if opts['branches']:
1666 br = repo.branchlookup(heads)
1673 br = repo.branchlookup(heads)
1667 displayer = show_changeset(ui, repo, opts)
1674 displayer = show_changeset(ui, repo, opts)
1668 for n in heads:
1675 for n in heads:
1669 displayer.show(changenode=n, brinfo=br)
1676 displayer.show(changenode=n, brinfo=br)
1670
1677
1671 def identify(ui, repo):
1678 def identify(ui, repo):
1672 """print information about the working copy
1679 """print information about the working copy
1673
1680
1674 Print a short summary of the current state of the repo.
1681 Print a short summary of the current state of the repo.
1675
1682
1676 This summary identifies the repository state using one or two parent
1683 This summary identifies the repository state using one or two parent
1677 hash identifiers, followed by a "+" if there are uncommitted changes
1684 hash identifiers, followed by a "+" if there are uncommitted changes
1678 in the working directory, followed by a list of tags for this revision.
1685 in the working directory, followed by a list of tags for this revision.
1679 """
1686 """
1680 parents = [p for p in repo.dirstate.parents() if p != nullid]
1687 parents = [p for p in repo.dirstate.parents() if p != nullid]
1681 if not parents:
1688 if not parents:
1682 ui.write(_("unknown\n"))
1689 ui.write(_("unknown\n"))
1683 return
1690 return
1684
1691
1685 hexfunc = ui.verbose and hex or short
1692 hexfunc = ui.verbose and hex or short
1686 modified, added, removed, deleted, unknown = repo.changes()
1693 modified, added, removed, deleted, unknown = repo.changes()
1687 output = ["%s%s" %
1694 output = ["%s%s" %
1688 ('+'.join([hexfunc(parent) for parent in parents]),
1695 ('+'.join([hexfunc(parent) for parent in parents]),
1689 (modified or added or removed or deleted) and "+" or "")]
1696 (modified or added or removed or deleted) and "+" or "")]
1690
1697
1691 if not ui.quiet:
1698 if not ui.quiet:
1692 # multiple tags for a single parent separated by '/'
1699 # multiple tags for a single parent separated by '/'
1693 parenttags = ['/'.join(tags)
1700 parenttags = ['/'.join(tags)
1694 for tags in map(repo.nodetags, parents) if tags]
1701 for tags in map(repo.nodetags, parents) if tags]
1695 # tags for multiple parents separated by ' + '
1702 # tags for multiple parents separated by ' + '
1696 if parenttags:
1703 if parenttags:
1697 output.append(' + '.join(parenttags))
1704 output.append(' + '.join(parenttags))
1698
1705
1699 ui.write("%s\n" % ' '.join(output))
1706 ui.write("%s\n" % ' '.join(output))
1700
1707
1701 def import_(ui, repo, patch1, *patches, **opts):
1708 def import_(ui, repo, patch1, *patches, **opts):
1702 """import an ordered set of patches
1709 """import an ordered set of patches
1703
1710
1704 Import a list of patches and commit them individually.
1711 Import a list of patches and commit them individually.
1705
1712
1706 If there are outstanding changes in the working directory, import
1713 If there are outstanding changes in the working directory, import
1707 will abort unless given the -f flag.
1714 will abort unless given the -f flag.
1708
1715
1709 If a patch looks like a mail message (its first line starts with
1716 If a patch looks like a mail message (its first line starts with
1710 "From " or looks like an RFC822 header), it will not be applied
1717 "From " or looks like an RFC822 header), it will not be applied
1711 unless the -f option is used. The importer neither parses nor
1718 unless the -f option is used. The importer neither parses nor
1712 discards mail headers, so use -f only to override the "mailness"
1719 discards mail headers, so use -f only to override the "mailness"
1713 safety check, not to import a real mail message.
1720 safety check, not to import a real mail message.
1714 """
1721 """
1715 patches = (patch1,) + patches
1722 patches = (patch1,) + patches
1716
1723
1717 if not opts['force']:
1724 if not opts['force']:
1718 modified, added, removed, deleted, unknown = repo.changes()
1725 modified, added, removed, deleted, unknown = repo.changes()
1719 if modified or added or removed or deleted:
1726 if modified or added or removed or deleted:
1720 raise util.Abort(_("outstanding uncommitted changes"))
1727 raise util.Abort(_("outstanding uncommitted changes"))
1721
1728
1722 d = opts["base"]
1729 d = opts["base"]
1723 strip = opts["strip"]
1730 strip = opts["strip"]
1724
1731
1725 mailre = re.compile(r'(?:From |[\w-]+:)')
1732 mailre = re.compile(r'(?:From |[\w-]+:)')
1726
1733
1727 # attempt to detect the start of a patch
1734 # attempt to detect the start of a patch
1728 # (this heuristic is borrowed from quilt)
1735 # (this heuristic is borrowed from quilt)
1729 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1736 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1730 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1737 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1731 '(---|\*\*\*)[ \t])')
1738 '(---|\*\*\*)[ \t])')
1732
1739
1733 for patch in patches:
1740 for patch in patches:
1734 ui.status(_("applying %s\n") % patch)
1741 ui.status(_("applying %s\n") % patch)
1735 pf = os.path.join(d, patch)
1742 pf = os.path.join(d, patch)
1736
1743
1737 message = []
1744 message = []
1738 user = None
1745 user = None
1739 hgpatch = False
1746 hgpatch = False
1740 for line in file(pf):
1747 for line in file(pf):
1741 line = line.rstrip()
1748 line = line.rstrip()
1742 if (not message and not hgpatch and
1749 if (not message and not hgpatch and
1743 mailre.match(line) and not opts['force']):
1750 mailre.match(line) and not opts['force']):
1744 if len(line) > 35:
1751 if len(line) > 35:
1745 line = line[:32] + '...'
1752 line = line[:32] + '...'
1746 raise util.Abort(_('first line looks like a '
1753 raise util.Abort(_('first line looks like a '
1747 'mail header: ') + line)
1754 'mail header: ') + line)
1748 if diffre.match(line):
1755 if diffre.match(line):
1749 break
1756 break
1750 elif hgpatch:
1757 elif hgpatch:
1751 # parse values when importing the result of an hg export
1758 # parse values when importing the result of an hg export
1752 if line.startswith("# User "):
1759 if line.startswith("# User "):
1753 user = line[7:]
1760 user = line[7:]
1754 ui.debug(_('User: %s\n') % user)
1761 ui.debug(_('User: %s\n') % user)
1755 elif not line.startswith("# ") and line:
1762 elif not line.startswith("# ") and line:
1756 message.append(line)
1763 message.append(line)
1757 hgpatch = False
1764 hgpatch = False
1758 elif line == '# HG changeset patch':
1765 elif line == '# HG changeset patch':
1759 hgpatch = True
1766 hgpatch = True
1760 message = [] # We may have collected garbage
1767 message = [] # We may have collected garbage
1761 else:
1768 else:
1762 message.append(line)
1769 message.append(line)
1763
1770
1764 # make sure message isn't empty
1771 # make sure message isn't empty
1765 if not message:
1772 if not message:
1766 message = _("imported patch %s\n") % patch
1773 message = _("imported patch %s\n") % patch
1767 else:
1774 else:
1768 message = "%s\n" % '\n'.join(message)
1775 message = "%s\n" % '\n'.join(message)
1769 ui.debug(_('message:\n%s\n') % message)
1776 ui.debug(_('message:\n%s\n') % message)
1770
1777
1771 files = util.patch(strip, pf, ui)
1778 files = util.patch(strip, pf, ui)
1772
1779
1773 if len(files) > 0:
1780 if len(files) > 0:
1774 addremove(ui, repo, *files)
1781 addremove(ui, repo, *files)
1775 repo.commit(files, message, user)
1782 repo.commit(files, message, user)
1776
1783
1777 def incoming(ui, repo, source="default", **opts):
1784 def incoming(ui, repo, source="default", **opts):
1778 """show new changesets found in source
1785 """show new changesets found in source
1779
1786
1780 Show new changesets found in the specified path/URL or the default
1787 Show new changesets found in the specified path/URL or the default
1781 pull location. These are the changesets that would be pulled if a pull
1788 pull location. These are the changesets that would be pulled if a pull
1782 was requested.
1789 was requested.
1783
1790
1784 For remote repository, using --bundle avoids downloading the changesets
1791 For remote repository, using --bundle avoids downloading the changesets
1785 twice if the incoming is followed by a pull.
1792 twice if the incoming is followed by a pull.
1786
1793
1787 See pull for valid source format details.
1794 See pull for valid source format details.
1788 """
1795 """
1789 source = ui.expandpath(source)
1796 source = ui.expandpath(source)
1790 if opts['ssh']:
1797 if opts['ssh']:
1791 ui.setconfig("ui", "ssh", opts['ssh'])
1798 ui.setconfig("ui", "ssh", opts['ssh'])
1792 if opts['remotecmd']:
1799 if opts['remotecmd']:
1793 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1800 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1794
1801
1795 other = hg.repository(ui, source)
1802 other = hg.repository(ui, source)
1796 incoming = repo.findincoming(other, force=opts["force"])
1803 incoming = repo.findincoming(other, force=opts["force"])
1797 if not incoming:
1804 if not incoming:
1798 return
1805 return
1799
1806
1800 cleanup = None
1807 cleanup = None
1801 try:
1808 try:
1802 fname = opts["bundle"]
1809 fname = opts["bundle"]
1803 if fname or not other.local():
1810 if fname or not other.local():
1804 # create a bundle (uncompressed if other repo is not local)
1811 # create a bundle (uncompressed if other repo is not local)
1805 cg = other.changegroup(incoming, "incoming")
1812 cg = other.changegroup(incoming, "incoming")
1806 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1813 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1807 # keep written bundle?
1814 # keep written bundle?
1808 if opts["bundle"]:
1815 if opts["bundle"]:
1809 cleanup = None
1816 cleanup = None
1810 if not other.local():
1817 if not other.local():
1811 # use the created uncompressed bundlerepo
1818 # use the created uncompressed bundlerepo
1812 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1819 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1813
1820
1814 o = other.changelog.nodesbetween(incoming)[0]
1821 o = other.changelog.nodesbetween(incoming)[0]
1815 if opts['newest_first']:
1822 if opts['newest_first']:
1816 o.reverse()
1823 o.reverse()
1817 displayer = show_changeset(ui, other, opts)
1824 displayer = show_changeset(ui, other, opts)
1818 for n in o:
1825 for n in o:
1819 parents = [p for p in other.changelog.parents(n) if p != nullid]
1826 parents = [p for p in other.changelog.parents(n) if p != nullid]
1820 if opts['no_merges'] and len(parents) == 2:
1827 if opts['no_merges'] and len(parents) == 2:
1821 continue
1828 continue
1822 displayer.show(changenode=n)
1829 displayer.show(changenode=n)
1823 if opts['patch']:
1830 if opts['patch']:
1824 prev = (parents and parents[0]) or nullid
1831 prev = (parents and parents[0]) or nullid
1825 dodiff(ui, ui, other, prev, n)
1832 dodiff(ui, ui, other, prev, n)
1826 ui.write("\n")
1833 ui.write("\n")
1827 finally:
1834 finally:
1828 if hasattr(other, 'close'):
1835 if hasattr(other, 'close'):
1829 other.close()
1836 other.close()
1830 if cleanup:
1837 if cleanup:
1831 os.unlink(cleanup)
1838 os.unlink(cleanup)
1832
1839
1833 def init(ui, dest="."):
1840 def init(ui, dest="."):
1834 """create a new repository in the given directory
1841 """create a new repository in the given directory
1835
1842
1836 Initialize a new repository in the given directory. If the given
1843 Initialize a new repository in the given directory. If the given
1837 directory does not exist, it is created.
1844 directory does not exist, it is created.
1838
1845
1839 If no directory is given, the current directory is used.
1846 If no directory is given, the current directory is used.
1840 """
1847 """
1841 if not os.path.exists(dest):
1848 if not os.path.exists(dest):
1842 os.mkdir(dest)
1849 os.mkdir(dest)
1843 hg.repository(ui, dest, create=1)
1850 hg.repository(ui, dest, create=1)
1844
1851
1845 def locate(ui, repo, *pats, **opts):
1852 def locate(ui, repo, *pats, **opts):
1846 """locate files matching specific patterns
1853 """locate files matching specific patterns
1847
1854
1848 Print all files under Mercurial control whose names match the
1855 Print all files under Mercurial control whose names match the
1849 given patterns.
1856 given patterns.
1850
1857
1851 This command searches the current directory and its
1858 This command searches the current directory and its
1852 subdirectories. To search an entire repository, move to the root
1859 subdirectories. To search an entire repository, move to the root
1853 of the repository.
1860 of the repository.
1854
1861
1855 If no patterns are given to match, this command prints all file
1862 If no patterns are given to match, this command prints all file
1856 names.
1863 names.
1857
1864
1858 If you want to feed the output of this command into the "xargs"
1865 If you want to feed the output of this command into the "xargs"
1859 command, use the "-0" option to both this command and "xargs".
1866 command, use the "-0" option to both this command and "xargs".
1860 This will avoid the problem of "xargs" treating single filenames
1867 This will avoid the problem of "xargs" treating single filenames
1861 that contain white space as multiple filenames.
1868 that contain white space as multiple filenames.
1862 """
1869 """
1863 end = opts['print0'] and '\0' or '\n'
1870 end = opts['print0'] and '\0' or '\n'
1864 rev = opts['rev']
1871 rev = opts['rev']
1865 if rev:
1872 if rev:
1866 node = repo.lookup(rev)
1873 node = repo.lookup(rev)
1867 else:
1874 else:
1868 node = None
1875 node = None
1869
1876
1870 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1877 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1871 head='(?:.*/|)'):
1878 head='(?:.*/|)'):
1872 if not node and repo.dirstate.state(abs) == '?':
1879 if not node and repo.dirstate.state(abs) == '?':
1873 continue
1880 continue
1874 if opts['fullpath']:
1881 if opts['fullpath']:
1875 ui.write(os.path.join(repo.root, abs), end)
1882 ui.write(os.path.join(repo.root, abs), end)
1876 else:
1883 else:
1877 ui.write(((pats and rel) or abs), end)
1884 ui.write(((pats and rel) or abs), end)
1878
1885
1879 def log(ui, repo, *pats, **opts):
1886 def log(ui, repo, *pats, **opts):
1880 """show revision history of entire repository or files
1887 """show revision history of entire repository or files
1881
1888
1882 Print the revision history of the specified files or the entire project.
1889 Print the revision history of the specified files or the entire project.
1883
1890
1884 By default this command outputs: changeset id and hash, tags,
1891 By default this command outputs: changeset id and hash, tags,
1885 non-trivial parents, user, date and time, and a summary for each
1892 non-trivial parents, user, date and time, and a summary for each
1886 commit. When the -v/--verbose switch is used, the list of changed
1893 commit. When the -v/--verbose switch is used, the list of changed
1887 files and full commit message is shown.
1894 files and full commit message is shown.
1888 """
1895 """
1889 class dui(object):
1896 class dui(object):
1890 # Implement and delegate some ui protocol. Save hunks of
1897 # Implement and delegate some ui protocol. Save hunks of
1891 # output for later display in the desired order.
1898 # output for later display in the desired order.
1892 def __init__(self, ui):
1899 def __init__(self, ui):
1893 self.ui = ui
1900 self.ui = ui
1894 self.hunk = {}
1901 self.hunk = {}
1895 def bump(self, rev):
1902 def bump(self, rev):
1896 self.rev = rev
1903 self.rev = rev
1897 self.hunk[rev] = []
1904 self.hunk[rev] = []
1898 def note(self, *args):
1905 def note(self, *args):
1899 if self.verbose:
1906 if self.verbose:
1900 self.write(*args)
1907 self.write(*args)
1901 def status(self, *args):
1908 def status(self, *args):
1902 if not self.quiet:
1909 if not self.quiet:
1903 self.write(*args)
1910 self.write(*args)
1904 def write(self, *args):
1911 def write(self, *args):
1905 self.hunk[self.rev].append(args)
1912 self.hunk[self.rev].append(args)
1906 def debug(self, *args):
1913 def debug(self, *args):
1907 if self.debugflag:
1914 if self.debugflag:
1908 self.write(*args)
1915 self.write(*args)
1909 def __getattr__(self, key):
1916 def __getattr__(self, key):
1910 return getattr(self.ui, key)
1917 return getattr(self.ui, key)
1911
1918
1912 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1919 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1913
1920
1914 if opts['limit']:
1921 if opts['limit']:
1915 try:
1922 try:
1916 limit = int(opts['limit'])
1923 limit = int(opts['limit'])
1917 except ValueError:
1924 except ValueError:
1918 raise util.Abort(_('limit must be a positive integer'))
1925 raise util.Abort(_('limit must be a positive integer'))
1919 if limit <= 0: raise util.Abort(_('limit must be positive'))
1926 if limit <= 0: raise util.Abort(_('limit must be positive'))
1920 else:
1927 else:
1921 limit = sys.maxint
1928 limit = sys.maxint
1922 count = 0
1929 count = 0
1923
1930
1924 displayer = show_changeset(ui, repo, opts)
1931 displayer = show_changeset(ui, repo, opts)
1925 for st, rev, fns in changeiter:
1932 for st, rev, fns in changeiter:
1926 if st == 'window':
1933 if st == 'window':
1927 du = dui(ui)
1934 du = dui(ui)
1928 displayer.ui = du
1935 displayer.ui = du
1929 elif st == 'add':
1936 elif st == 'add':
1930 du.bump(rev)
1937 du.bump(rev)
1931 changenode = repo.changelog.node(rev)
1938 changenode = repo.changelog.node(rev)
1932 parents = [p for p in repo.changelog.parents(changenode)
1939 parents = [p for p in repo.changelog.parents(changenode)
1933 if p != nullid]
1940 if p != nullid]
1934 if opts['no_merges'] and len(parents) == 2:
1941 if opts['no_merges'] and len(parents) == 2:
1935 continue
1942 continue
1936 if opts['only_merges'] and len(parents) != 2:
1943 if opts['only_merges'] and len(parents) != 2:
1937 continue
1944 continue
1938
1945
1939 if opts['keyword']:
1946 if opts['keyword']:
1940 changes = getchange(rev)
1947 changes = getchange(rev)
1941 miss = 0
1948 miss = 0
1942 for k in [kw.lower() for kw in opts['keyword']]:
1949 for k in [kw.lower() for kw in opts['keyword']]:
1943 if not (k in changes[1].lower() or
1950 if not (k in changes[1].lower() or
1944 k in changes[4].lower() or
1951 k in changes[4].lower() or
1945 k in " ".join(changes[3][:20]).lower()):
1952 k in " ".join(changes[3][:20]).lower()):
1946 miss = 1
1953 miss = 1
1947 break
1954 break
1948 if miss:
1955 if miss:
1949 continue
1956 continue
1950
1957
1951 br = None
1958 br = None
1952 if opts['branches']:
1959 if opts['branches']:
1953 br = repo.branchlookup([repo.changelog.node(rev)])
1960 br = repo.branchlookup([repo.changelog.node(rev)])
1954
1961
1955 displayer.show(rev, brinfo=br)
1962 displayer.show(rev, brinfo=br)
1956 if opts['patch']:
1963 if opts['patch']:
1957 prev = (parents and parents[0]) or nullid
1964 prev = (parents and parents[0]) or nullid
1958 dodiff(du, du, repo, prev, changenode, match=matchfn)
1965 dodiff(du, du, repo, prev, changenode, match=matchfn)
1959 du.write("\n\n")
1966 du.write("\n\n")
1960 elif st == 'iter':
1967 elif st == 'iter':
1961 if count == limit: break
1968 if count == limit: break
1962 if du.hunk[rev]:
1969 if du.hunk[rev]:
1963 count += 1
1970 count += 1
1964 for args in du.hunk[rev]:
1971 for args in du.hunk[rev]:
1965 ui.write(*args)
1972 ui.write(*args)
1966
1973
1967 def manifest(ui, repo, rev=None):
1974 def manifest(ui, repo, rev=None):
1968 """output the latest or given revision of the project manifest
1975 """output the latest or given revision of the project manifest
1969
1976
1970 Print a list of version controlled files for the given revision.
1977 Print a list of version controlled files for the given revision.
1971
1978
1972 The manifest is the list of files being version controlled. If no revision
1979 The manifest is the list of files being version controlled. If no revision
1973 is given then the tip is used.
1980 is given then the tip is used.
1974 """
1981 """
1975 if rev:
1982 if rev:
1976 try:
1983 try:
1977 # assume all revision numbers are for changesets
1984 # assume all revision numbers are for changesets
1978 n = repo.lookup(rev)
1985 n = repo.lookup(rev)
1979 change = repo.changelog.read(n)
1986 change = repo.changelog.read(n)
1980 n = change[0]
1987 n = change[0]
1981 except hg.RepoError:
1988 except hg.RepoError:
1982 n = repo.manifest.lookup(rev)
1989 n = repo.manifest.lookup(rev)
1983 else:
1990 else:
1984 n = repo.manifest.tip()
1991 n = repo.manifest.tip()
1985 m = repo.manifest.read(n)
1992 m = repo.manifest.read(n)
1986 mf = repo.manifest.readflags(n)
1993 mf = repo.manifest.readflags(n)
1987 files = m.keys()
1994 files = m.keys()
1988 files.sort()
1995 files.sort()
1989
1996
1990 for f in files:
1997 for f in files:
1991 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1998 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
1992
1999
1993 def outgoing(ui, repo, dest="default-push", **opts):
2000 def outgoing(ui, repo, dest="default-push", **opts):
1994 """show changesets not found in destination
2001 """show changesets not found in destination
1995
2002
1996 Show changesets not found in the specified destination repository or
2003 Show changesets not found in the specified destination repository or
1997 the default push location. These are the changesets that would be pushed
2004 the default push location. These are the changesets that would be pushed
1998 if a push was requested.
2005 if a push was requested.
1999
2006
2000 See pull for valid destination format details.
2007 See pull for valid destination format details.
2001 """
2008 """
2002 dest = ui.expandpath(dest)
2009 dest = ui.expandpath(dest)
2003 if opts['ssh']:
2010 if opts['ssh']:
2004 ui.setconfig("ui", "ssh", opts['ssh'])
2011 ui.setconfig("ui", "ssh", opts['ssh'])
2005 if opts['remotecmd']:
2012 if opts['remotecmd']:
2006 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2013 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2007
2014
2008 other = hg.repository(ui, dest)
2015 other = hg.repository(ui, dest)
2009 o = repo.findoutgoing(other, force=opts['force'])
2016 o = repo.findoutgoing(other, force=opts['force'])
2010 o = repo.changelog.nodesbetween(o)[0]
2017 o = repo.changelog.nodesbetween(o)[0]
2011 if opts['newest_first']:
2018 if opts['newest_first']:
2012 o.reverse()
2019 o.reverse()
2013 displayer = show_changeset(ui, repo, opts)
2020 displayer = show_changeset(ui, repo, opts)
2014 for n in o:
2021 for n in o:
2015 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2022 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2016 if opts['no_merges'] and len(parents) == 2:
2023 if opts['no_merges'] and len(parents) == 2:
2017 continue
2024 continue
2018 displayer.show(changenode=n)
2025 displayer.show(changenode=n)
2019 if opts['patch']:
2026 if opts['patch']:
2020 prev = (parents and parents[0]) or nullid
2027 prev = (parents and parents[0]) or nullid
2021 dodiff(ui, ui, repo, prev, n)
2028 dodiff(ui, ui, repo, prev, n)
2022 ui.write("\n")
2029 ui.write("\n")
2023
2030
2024 def parents(ui, repo, rev=None, branches=None, **opts):
2031 def parents(ui, repo, rev=None, branches=None, **opts):
2025 """show the parents of the working dir or revision
2032 """show the parents of the working dir or revision
2026
2033
2027 Print the working directory's parent revisions.
2034 Print the working directory's parent revisions.
2028 """
2035 """
2029 if rev:
2036 if rev:
2030 p = repo.changelog.parents(repo.lookup(rev))
2037 p = repo.changelog.parents(repo.lookup(rev))
2031 else:
2038 else:
2032 p = repo.dirstate.parents()
2039 p = repo.dirstate.parents()
2033
2040
2034 br = None
2041 br = None
2035 if branches is not None:
2042 if branches is not None:
2036 br = repo.branchlookup(p)
2043 br = repo.branchlookup(p)
2037 displayer = show_changeset(ui, repo, opts)
2044 displayer = show_changeset(ui, repo, opts)
2038 for n in p:
2045 for n in p:
2039 if n != nullid:
2046 if n != nullid:
2040 displayer.show(changenode=n, brinfo=br)
2047 displayer.show(changenode=n, brinfo=br)
2041
2048
2042 def paths(ui, repo, search=None):
2049 def paths(ui, repo, search=None):
2043 """show definition of symbolic path names
2050 """show definition of symbolic path names
2044
2051
2045 Show definition of symbolic path name NAME. If no name is given, show
2052 Show definition of symbolic path name NAME. If no name is given, show
2046 definition of available names.
2053 definition of available names.
2047
2054
2048 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2055 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2049 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2056 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2050 """
2057 """
2051 if search:
2058 if search:
2052 for name, path in ui.configitems("paths"):
2059 for name, path in ui.configitems("paths"):
2053 if name == search:
2060 if name == search:
2054 ui.write("%s\n" % path)
2061 ui.write("%s\n" % path)
2055 return
2062 return
2056 ui.warn(_("not found!\n"))
2063 ui.warn(_("not found!\n"))
2057 return 1
2064 return 1
2058 else:
2065 else:
2059 for name, path in ui.configitems("paths"):
2066 for name, path in ui.configitems("paths"):
2060 ui.write("%s = %s\n" % (name, path))
2067 ui.write("%s = %s\n" % (name, path))
2061
2068
2062 def pull(ui, repo, source="default", **opts):
2069 def pull(ui, repo, source="default", **opts):
2063 """pull changes from the specified source
2070 """pull changes from the specified source
2064
2071
2065 Pull changes from a remote repository to a local one.
2072 Pull changes from a remote repository to a local one.
2066
2073
2067 This finds all changes from the repository at the specified path
2074 This finds all changes from the repository at the specified path
2068 or URL and adds them to the local repository. By default, this
2075 or URL and adds them to the local repository. By default, this
2069 does not update the copy of the project in the working directory.
2076 does not update the copy of the project in the working directory.
2070
2077
2071 Valid URLs are of the form:
2078 Valid URLs are of the form:
2072
2079
2073 local/filesystem/path
2080 local/filesystem/path
2074 http://[user@]host[:port][/path]
2081 http://[user@]host[:port][/path]
2075 https://[user@]host[:port][/path]
2082 https://[user@]host[:port][/path]
2076 ssh://[user@]host[:port][/path]
2083 ssh://[user@]host[:port][/path]
2077
2084
2078 Some notes about using SSH with Mercurial:
2085 Some notes about using SSH with Mercurial:
2079 - SSH requires an accessible shell account on the destination machine
2086 - SSH requires an accessible shell account on the destination machine
2080 and a copy of hg in the remote path or specified with as remotecmd.
2087 and a copy of hg in the remote path or specified with as remotecmd.
2081 - /path is relative to the remote user's home directory by default.
2088 - /path is relative to the remote user's home directory by default.
2082 Use two slashes at the start of a path to specify an absolute path.
2089 Use two slashes at the start of a path to specify an absolute path.
2083 - Mercurial doesn't use its own compression via SSH; the right thing
2090 - Mercurial doesn't use its own compression via SSH; the right thing
2084 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2091 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2085 Host *.mylocalnetwork.example.com
2092 Host *.mylocalnetwork.example.com
2086 Compression off
2093 Compression off
2087 Host *
2094 Host *
2088 Compression on
2095 Compression on
2089 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2096 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2090 with the --ssh command line option.
2097 with the --ssh command line option.
2091 """
2098 """
2092 source = ui.expandpath(source)
2099 source = ui.expandpath(source)
2093 ui.status(_('pulling from %s\n') % (source))
2100 ui.status(_('pulling from %s\n') % (source))
2094
2101
2095 if opts['ssh']:
2102 if opts['ssh']:
2096 ui.setconfig("ui", "ssh", opts['ssh'])
2103 ui.setconfig("ui", "ssh", opts['ssh'])
2097 if opts['remotecmd']:
2104 if opts['remotecmd']:
2098 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2105 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2099
2106
2100 other = hg.repository(ui, source)
2107 other = hg.repository(ui, source)
2101 revs = None
2108 revs = None
2102 if opts['rev'] and not other.local():
2109 if opts['rev'] and not other.local():
2103 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2110 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2104 elif opts['rev']:
2111 elif opts['rev']:
2105 revs = [other.lookup(rev) for rev in opts['rev']]
2112 revs = [other.lookup(rev) for rev in opts['rev']]
2106 r = repo.pull(other, heads=revs, force=opts['force'])
2113 r = repo.pull(other, heads=revs, force=opts['force'])
2107 if not r:
2114 if not r:
2108 if opts['update']:
2115 if opts['update']:
2109 return update(ui, repo)
2116 return update(ui, repo)
2110 else:
2117 else:
2111 ui.status(_("(run 'hg update' to get a working copy)\n"))
2118 ui.status(_("(run 'hg update' to get a working copy)\n"))
2112
2119
2113 return r
2120 return r
2114
2121
2115 def push(ui, repo, dest="default-push", **opts):
2122 def push(ui, repo, dest="default-push", **opts):
2116 """push changes to the specified destination
2123 """push changes to the specified destination
2117
2124
2118 Push changes from the local repository to the given destination.
2125 Push changes from the local repository to the given destination.
2119
2126
2120 This is the symmetrical operation for pull. It helps to move
2127 This is the symmetrical operation for pull. It helps to move
2121 changes from the current repository to a different one. If the
2128 changes from the current repository to a different one. If the
2122 destination is local this is identical to a pull in that directory
2129 destination is local this is identical to a pull in that directory
2123 from the current one.
2130 from the current one.
2124
2131
2125 By default, push will refuse to run if it detects the result would
2132 By default, push will refuse to run if it detects the result would
2126 increase the number of remote heads. This generally indicates the
2133 increase the number of remote heads. This generally indicates the
2127 the client has forgotten to sync and merge before pushing.
2134 the client has forgotten to sync and merge before pushing.
2128
2135
2129 Valid URLs are of the form:
2136 Valid URLs are of the form:
2130
2137
2131 local/filesystem/path
2138 local/filesystem/path
2132 ssh://[user@]host[:port][/path]
2139 ssh://[user@]host[:port][/path]
2133
2140
2134 Look at the help text for the pull command for important details
2141 Look at the help text for the pull command for important details
2135 about ssh:// URLs.
2142 about ssh:// URLs.
2136 """
2143 """
2137 dest = ui.expandpath(dest)
2144 dest = ui.expandpath(dest)
2138 ui.status('pushing to %s\n' % (dest))
2145 ui.status('pushing to %s\n' % (dest))
2139
2146
2140 if opts['ssh']:
2147 if opts['ssh']:
2141 ui.setconfig("ui", "ssh", opts['ssh'])
2148 ui.setconfig("ui", "ssh", opts['ssh'])
2142 if opts['remotecmd']:
2149 if opts['remotecmd']:
2143 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2150 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2144
2151
2145 other = hg.repository(ui, dest)
2152 other = hg.repository(ui, dest)
2146 revs = None
2153 revs = None
2147 if opts['rev']:
2154 if opts['rev']:
2148 revs = [repo.lookup(rev) for rev in opts['rev']]
2155 revs = [repo.lookup(rev) for rev in opts['rev']]
2149 r = repo.push(other, opts['force'], revs=revs)
2156 r = repo.push(other, opts['force'], revs=revs)
2150 return r
2157 return r
2151
2158
2152 def rawcommit(ui, repo, *flist, **rc):
2159 def rawcommit(ui, repo, *flist, **rc):
2153 """raw commit interface (DEPRECATED)
2160 """raw commit interface (DEPRECATED)
2154
2161
2155 (DEPRECATED)
2162 (DEPRECATED)
2156 Lowlevel commit, for use in helper scripts.
2163 Lowlevel commit, for use in helper scripts.
2157
2164
2158 This command is not intended to be used by normal users, as it is
2165 This command is not intended to be used by normal users, as it is
2159 primarily useful for importing from other SCMs.
2166 primarily useful for importing from other SCMs.
2160
2167
2161 This command is now deprecated and will be removed in a future
2168 This command is now deprecated and will be removed in a future
2162 release, please use debugsetparents and commit instead.
2169 release, please use debugsetparents and commit instead.
2163 """
2170 """
2164
2171
2165 ui.warn(_("(the rawcommit command is deprecated)\n"))
2172 ui.warn(_("(the rawcommit command is deprecated)\n"))
2166
2173
2167 message = rc['message']
2174 message = rc['message']
2168 if not message and rc['logfile']:
2175 if not message and rc['logfile']:
2169 try:
2176 try:
2170 message = open(rc['logfile']).read()
2177 message = open(rc['logfile']).read()
2171 except IOError:
2178 except IOError:
2172 pass
2179 pass
2173 if not message and not rc['logfile']:
2180 if not message and not rc['logfile']:
2174 raise util.Abort(_("missing commit message"))
2181 raise util.Abort(_("missing commit message"))
2175
2182
2176 files = relpath(repo, list(flist))
2183 files = relpath(repo, list(flist))
2177 if rc['files']:
2184 if rc['files']:
2178 files += open(rc['files']).read().splitlines()
2185 files += open(rc['files']).read().splitlines()
2179
2186
2180 rc['parent'] = map(repo.lookup, rc['parent'])
2187 rc['parent'] = map(repo.lookup, rc['parent'])
2181
2188
2182 try:
2189 try:
2183 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2190 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2184 except ValueError, inst:
2191 except ValueError, inst:
2185 raise util.Abort(str(inst))
2192 raise util.Abort(str(inst))
2186
2193
2187 def recover(ui, repo):
2194 def recover(ui, repo):
2188 """roll back an interrupted transaction
2195 """roll back an interrupted transaction
2189
2196
2190 Recover from an interrupted commit or pull.
2197 Recover from an interrupted commit or pull.
2191
2198
2192 This command tries to fix the repository status after an interrupted
2199 This command tries to fix the repository status after an interrupted
2193 operation. It should only be necessary when Mercurial suggests it.
2200 operation. It should only be necessary when Mercurial suggests it.
2194 """
2201 """
2195 if repo.recover():
2202 if repo.recover():
2196 return repo.verify()
2203 return repo.verify()
2197 return False
2204 return False
2198
2205
2199 def remove(ui, repo, pat, *pats, **opts):
2206 def remove(ui, repo, pat, *pats, **opts):
2200 """remove the specified files on the next commit
2207 """remove the specified files on the next commit
2201
2208
2202 Schedule the indicated files for removal from the repository.
2209 Schedule the indicated files for removal from the repository.
2203
2210
2204 This command schedules the files to be removed at the next commit.
2211 This command schedules the files to be removed at the next commit.
2205 This only removes files from the current branch, not from the
2212 This only removes files from the current branch, not from the
2206 entire project history. If the files still exist in the working
2213 entire project history. If the files still exist in the working
2207 directory, they will be deleted from it.
2214 directory, they will be deleted from it.
2208 """
2215 """
2209 names = []
2216 names = []
2210 def okaytoremove(abs, rel, exact):
2217 def okaytoremove(abs, rel, exact):
2211 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
2218 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
2212 reason = None
2219 reason = None
2213 if modified and not opts['force']:
2220 if modified and not opts['force']:
2214 reason = _('is modified')
2221 reason = _('is modified')
2215 elif added:
2222 elif added:
2216 reason = _('has been marked for add')
2223 reason = _('has been marked for add')
2217 elif unknown:
2224 elif unknown:
2218 reason = _('is not managed')
2225 reason = _('is not managed')
2219 if reason:
2226 if reason:
2220 if exact:
2227 if exact:
2221 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2228 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2222 else:
2229 else:
2223 return True
2230 return True
2224 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
2231 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
2225 if okaytoremove(abs, rel, exact):
2232 if okaytoremove(abs, rel, exact):
2226 if ui.verbose or not exact:
2233 if ui.verbose or not exact:
2227 ui.status(_('removing %s\n') % rel)
2234 ui.status(_('removing %s\n') % rel)
2228 names.append(abs)
2235 names.append(abs)
2229 repo.remove(names, unlink=True)
2236 repo.remove(names, unlink=True)
2230
2237
2231 def rename(ui, repo, *pats, **opts):
2238 def rename(ui, repo, *pats, **opts):
2232 """rename files; equivalent of copy + remove
2239 """rename files; equivalent of copy + remove
2233
2240
2234 Mark dest as copies of sources; mark sources for deletion. If
2241 Mark dest as copies of sources; mark sources for deletion. If
2235 dest is a directory, copies are put in that directory. If dest is
2242 dest is a directory, copies are put in that directory. If dest is
2236 a file, there can only be one source.
2243 a file, there can only be one source.
2237
2244
2238 By default, this command copies the contents of files as they
2245 By default, this command copies the contents of files as they
2239 stand in the working directory. If invoked with --after, the
2246 stand in the working directory. If invoked with --after, the
2240 operation is recorded, but no copying is performed.
2247 operation is recorded, but no copying is performed.
2241
2248
2242 This command takes effect in the next commit.
2249 This command takes effect in the next commit.
2243
2250
2244 NOTE: This command should be treated as experimental. While it
2251 NOTE: This command should be treated as experimental. While it
2245 should properly record rename files, this information is not yet
2252 should properly record rename files, this information is not yet
2246 fully used by merge, nor fully reported by log.
2253 fully used by merge, nor fully reported by log.
2247 """
2254 """
2248 try:
2255 try:
2249 wlock = repo.wlock(0)
2256 wlock = repo.wlock(0)
2250 errs, copied = docopy(ui, repo, pats, opts, wlock)
2257 errs, copied = docopy(ui, repo, pats, opts, wlock)
2251 names = []
2258 names = []
2252 for abs, rel, exact in copied:
2259 for abs, rel, exact in copied:
2253 if ui.verbose or not exact:
2260 if ui.verbose or not exact:
2254 ui.status(_('removing %s\n') % rel)
2261 ui.status(_('removing %s\n') % rel)
2255 names.append(abs)
2262 names.append(abs)
2256 repo.remove(names, True, wlock)
2263 repo.remove(names, True, wlock)
2257 except lock.LockHeld, inst:
2264 except lock.LockHeld, inst:
2258 ui.warn(_("repository lock held by %s\n") % inst.args[0])
2265 ui.warn(_("repository lock held by %s\n") % inst.args[0])
2259 errs = 1
2266 errs = 1
2260 return errs
2267 return errs
2261
2268
2262 def revert(ui, repo, *pats, **opts):
2269 def revert(ui, repo, *pats, **opts):
2263 """revert modified files or dirs back to their unmodified states
2270 """revert modified files or dirs back to their unmodified states
2264
2271
2265 In its default mode, it reverts any uncommitted modifications made
2272 In its default mode, it reverts any uncommitted modifications made
2266 to the named files or directories. This restores the contents of
2273 to the named files or directories. This restores the contents of
2267 the affected files to an unmodified state.
2274 the affected files to an unmodified state.
2268
2275
2269 Using the -r option, it reverts the given files or directories to
2276 Using the -r option, it reverts the given files or directories to
2270 their state as of an earlier revision. This can be helpful to "roll
2277 their state as of an earlier revision. This can be helpful to "roll
2271 back" some or all of a change that should not have been committed.
2278 back" some or all of a change that should not have been committed.
2272
2279
2273 Revert modifies the working directory. It does not commit any
2280 Revert modifies the working directory. It does not commit any
2274 changes, or change the parent of the current working directory.
2281 changes, or change the parent of the current working directory.
2275
2282
2276 If a file has been deleted, it is recreated. If the executable
2283 If a file has been deleted, it is recreated. If the executable
2277 mode of a file was changed, it is reset.
2284 mode of a file was changed, it is reset.
2278
2285
2279 If names are given, all files matching the names are reverted.
2286 If names are given, all files matching the names are reverted.
2280
2287
2281 If no arguments are given, all files in the repository are reverted.
2288 If no arguments are given, all files in the repository are reverted.
2282 """
2289 """
2283 node = opts['rev'] and repo.lookup(opts['rev']) or \
2290 node = opts['rev'] and repo.lookup(opts['rev']) or \
2284 repo.dirstate.parents()[0]
2291 repo.dirstate.parents()[0]
2285
2292
2286 files, choose, anypats = matchpats(repo, pats, opts)
2293 files, choose, anypats = matchpats(repo, pats, opts)
2287 modified, added, removed, deleted, unknown = repo.changes(match=choose)
2294 modified, added, removed, deleted, unknown = repo.changes(match=choose)
2288 repo.forget(added)
2295 repo.forget(added)
2289 repo.undelete(removed)
2296 repo.undelete(removed)
2290
2297
2291 return repo.update(node, False, True, choose, False)
2298 return repo.update(node, False, True, choose, False)
2292
2299
2293 def root(ui, repo):
2300 def root(ui, repo):
2294 """print the root (top) of the current working dir
2301 """print the root (top) of the current working dir
2295
2302
2296 Print the root directory of the current repository.
2303 Print the root directory of the current repository.
2297 """
2304 """
2298 ui.write(repo.root + "\n")
2305 ui.write(repo.root + "\n")
2299
2306
2300 def serve(ui, repo, **opts):
2307 def serve(ui, repo, **opts):
2301 """export the repository via HTTP
2308 """export the repository via HTTP
2302
2309
2303 Start a local HTTP repository browser and pull server.
2310 Start a local HTTP repository browser and pull server.
2304
2311
2305 By default, the server logs accesses to stdout and errors to
2312 By default, the server logs accesses to stdout and errors to
2306 stderr. Use the "-A" and "-E" options to log to files.
2313 stderr. Use the "-A" and "-E" options to log to files.
2307 """
2314 """
2308
2315
2309 if opts["stdio"]:
2316 if opts["stdio"]:
2310 fin, fout = sys.stdin, sys.stdout
2317 fin, fout = sys.stdin, sys.stdout
2311 sys.stdout = sys.stderr
2318 sys.stdout = sys.stderr
2312
2319
2313 # Prevent insertion/deletion of CRs
2320 # Prevent insertion/deletion of CRs
2314 util.set_binary(fin)
2321 util.set_binary(fin)
2315 util.set_binary(fout)
2322 util.set_binary(fout)
2316
2323
2317 def getarg():
2324 def getarg():
2318 argline = fin.readline()[:-1]
2325 argline = fin.readline()[:-1]
2319 arg, l = argline.split()
2326 arg, l = argline.split()
2320 val = fin.read(int(l))
2327 val = fin.read(int(l))
2321 return arg, val
2328 return arg, val
2322 def respond(v):
2329 def respond(v):
2323 fout.write("%d\n" % len(v))
2330 fout.write("%d\n" % len(v))
2324 fout.write(v)
2331 fout.write(v)
2325 fout.flush()
2332 fout.flush()
2326
2333
2327 lock = None
2334 lock = None
2328
2335
2329 while 1:
2336 while 1:
2330 cmd = fin.readline()[:-1]
2337 cmd = fin.readline()[:-1]
2331 if cmd == '':
2338 if cmd == '':
2332 return
2339 return
2333 if cmd == "heads":
2340 if cmd == "heads":
2334 h = repo.heads()
2341 h = repo.heads()
2335 respond(" ".join(map(hex, h)) + "\n")
2342 respond(" ".join(map(hex, h)) + "\n")
2336 if cmd == "lock":
2343 if cmd == "lock":
2337 lock = repo.lock()
2344 lock = repo.lock()
2338 respond("")
2345 respond("")
2339 if cmd == "unlock":
2346 if cmd == "unlock":
2340 if lock:
2347 if lock:
2341 lock.release()
2348 lock.release()
2342 lock = None
2349 lock = None
2343 respond("")
2350 respond("")
2344 elif cmd == "branches":
2351 elif cmd == "branches":
2345 arg, nodes = getarg()
2352 arg, nodes = getarg()
2346 nodes = map(bin, nodes.split(" "))
2353 nodes = map(bin, nodes.split(" "))
2347 r = []
2354 r = []
2348 for b in repo.branches(nodes):
2355 for b in repo.branches(nodes):
2349 r.append(" ".join(map(hex, b)) + "\n")
2356 r.append(" ".join(map(hex, b)) + "\n")
2350 respond("".join(r))
2357 respond("".join(r))
2351 elif cmd == "between":
2358 elif cmd == "between":
2352 arg, pairs = getarg()
2359 arg, pairs = getarg()
2353 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2360 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2354 r = []
2361 r = []
2355 for b in repo.between(pairs):
2362 for b in repo.between(pairs):
2356 r.append(" ".join(map(hex, b)) + "\n")
2363 r.append(" ".join(map(hex, b)) + "\n")
2357 respond("".join(r))
2364 respond("".join(r))
2358 elif cmd == "changegroup":
2365 elif cmd == "changegroup":
2359 nodes = []
2366 nodes = []
2360 arg, roots = getarg()
2367 arg, roots = getarg()
2361 nodes = map(bin, roots.split(" "))
2368 nodes = map(bin, roots.split(" "))
2362
2369
2363 cg = repo.changegroup(nodes, 'serve')
2370 cg = repo.changegroup(nodes, 'serve')
2364 while 1:
2371 while 1:
2365 d = cg.read(4096)
2372 d = cg.read(4096)
2366 if not d:
2373 if not d:
2367 break
2374 break
2368 fout.write(d)
2375 fout.write(d)
2369
2376
2370 fout.flush()
2377 fout.flush()
2371
2378
2372 elif cmd == "addchangegroup":
2379 elif cmd == "addchangegroup":
2373 if not lock:
2380 if not lock:
2374 respond("not locked")
2381 respond("not locked")
2375 continue
2382 continue
2376 respond("")
2383 respond("")
2377
2384
2378 r = repo.addchangegroup(fin)
2385 r = repo.addchangegroup(fin)
2379 respond("")
2386 respond("")
2380
2387
2381 optlist = "name templates style address port ipv6 accesslog errorlog"
2388 optlist = "name templates style address port ipv6 accesslog errorlog"
2382 for o in optlist.split():
2389 for o in optlist.split():
2383 if opts[o]:
2390 if opts[o]:
2384 ui.setconfig("web", o, opts[o])
2391 ui.setconfig("web", o, opts[o])
2385
2392
2386 if opts['daemon'] and not opts['daemon_pipefds']:
2393 if opts['daemon'] and not opts['daemon_pipefds']:
2387 rfd, wfd = os.pipe()
2394 rfd, wfd = os.pipe()
2388 args = sys.argv[:]
2395 args = sys.argv[:]
2389 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2396 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2390 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2397 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2391 args[0], args)
2398 args[0], args)
2392 os.close(wfd)
2399 os.close(wfd)
2393 os.read(rfd, 1)
2400 os.read(rfd, 1)
2394 os._exit(0)
2401 os._exit(0)
2395
2402
2396 try:
2403 try:
2397 httpd = hgweb.create_server(repo)
2404 httpd = hgweb.create_server(repo)
2398 except socket.error, inst:
2405 except socket.error, inst:
2399 raise util.Abort(_('cannot start server: ') + inst.args[1])
2406 raise util.Abort(_('cannot start server: ') + inst.args[1])
2400
2407
2401 if ui.verbose:
2408 if ui.verbose:
2402 addr, port = httpd.socket.getsockname()
2409 addr, port = httpd.socket.getsockname()
2403 if addr == '0.0.0.0':
2410 if addr == '0.0.0.0':
2404 addr = socket.gethostname()
2411 addr = socket.gethostname()
2405 else:
2412 else:
2406 try:
2413 try:
2407 addr = socket.gethostbyaddr(addr)[0]
2414 addr = socket.gethostbyaddr(addr)[0]
2408 except socket.error:
2415 except socket.error:
2409 pass
2416 pass
2410 if port != 80:
2417 if port != 80:
2411 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2418 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2412 else:
2419 else:
2413 ui.status(_('listening at http://%s/\n') % addr)
2420 ui.status(_('listening at http://%s/\n') % addr)
2414
2421
2415 if opts['pid_file']:
2422 if opts['pid_file']:
2416 fp = open(opts['pid_file'], 'w')
2423 fp = open(opts['pid_file'], 'w')
2417 fp.write(str(os.getpid()))
2424 fp.write(str(os.getpid()))
2418 fp.close()
2425 fp.close()
2419
2426
2420 if opts['daemon_pipefds']:
2427 if opts['daemon_pipefds']:
2421 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2428 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2422 os.close(rfd)
2429 os.close(rfd)
2423 os.write(wfd, 'y')
2430 os.write(wfd, 'y')
2424 os.close(wfd)
2431 os.close(wfd)
2425 sys.stdout.flush()
2432 sys.stdout.flush()
2426 sys.stderr.flush()
2433 sys.stderr.flush()
2427 fd = os.open(util.nulldev, os.O_RDWR)
2434 fd = os.open(util.nulldev, os.O_RDWR)
2428 if fd != 0: os.dup2(fd, 0)
2435 if fd != 0: os.dup2(fd, 0)
2429 if fd != 1: os.dup2(fd, 1)
2436 if fd != 1: os.dup2(fd, 1)
2430 if fd != 2: os.dup2(fd, 2)
2437 if fd != 2: os.dup2(fd, 2)
2431 if fd not in (0, 1, 2): os.close(fd)
2438 if fd not in (0, 1, 2): os.close(fd)
2432
2439
2433 httpd.serve_forever()
2440 httpd.serve_forever()
2434
2441
2435 def status(ui, repo, *pats, **opts):
2442 def status(ui, repo, *pats, **opts):
2436 """show changed files in the working directory
2443 """show changed files in the working directory
2437
2444
2438 Show changed files in the repository. If names are
2445 Show changed files in the repository. If names are
2439 given, only files that match are shown.
2446 given, only files that match are shown.
2440
2447
2441 The codes used to show the status of files are:
2448 The codes used to show the status of files are:
2442 M = modified
2449 M = modified
2443 A = added
2450 A = added
2444 R = removed
2451 R = removed
2445 ! = deleted, but still tracked
2452 ! = deleted, but still tracked
2446 ? = not tracked
2453 ? = not tracked
2447 """
2454 """
2448
2455
2449 files, matchfn, anypats = matchpats(repo, pats, opts)
2456 files, matchfn, anypats = matchpats(repo, pats, opts)
2450 cwd = (pats and repo.getcwd()) or ''
2457 cwd = (pats and repo.getcwd()) or ''
2451 modified, added, removed, deleted, unknown = [
2458 modified, added, removed, deleted, unknown = [
2452 [util.pathto(cwd, x) for x in n]
2459 [util.pathto(cwd, x) for x in n]
2453 for n in repo.changes(files=files, match=matchfn)]
2460 for n in repo.changes(files=files, match=matchfn)]
2454
2461
2455 changetypes = [('modified', 'M', modified),
2462 changetypes = [('modified', 'M', modified),
2456 ('added', 'A', added),
2463 ('added', 'A', added),
2457 ('removed', 'R', removed),
2464 ('removed', 'R', removed),
2458 ('deleted', '!', deleted),
2465 ('deleted', '!', deleted),
2459 ('unknown', '?', unknown)]
2466 ('unknown', '?', unknown)]
2460
2467
2461 end = opts['print0'] and '\0' or '\n'
2468 end = opts['print0'] and '\0' or '\n'
2462
2469
2463 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2470 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2464 or changetypes):
2471 or changetypes):
2465 if opts['no_status']:
2472 if opts['no_status']:
2466 format = "%%s%s" % end
2473 format = "%%s%s" % end
2467 else:
2474 else:
2468 format = "%s %%s%s" % (char, end)
2475 format = "%s %%s%s" % (char, end)
2469
2476
2470 for f in changes:
2477 for f in changes:
2471 ui.write(format % f)
2478 ui.write(format % f)
2472
2479
2473 def tag(ui, repo, name, rev_=None, **opts):
2480 def tag(ui, repo, name, rev_=None, **opts):
2474 """add a tag for the current tip or a given revision
2481 """add a tag for the current tip or a given revision
2475
2482
2476 Name a particular revision using <name>.
2483 Name a particular revision using <name>.
2477
2484
2478 Tags are used to name particular revisions of the repository and are
2485 Tags are used to name particular revisions of the repository and are
2479 very useful to compare different revision, to go back to significant
2486 very useful to compare different revision, to go back to significant
2480 earlier versions or to mark branch points as releases, etc.
2487 earlier versions or to mark branch points as releases, etc.
2481
2488
2482 If no revision is given, the tip is used.
2489 If no revision is given, the tip is used.
2483
2490
2484 To facilitate version control, distribution, and merging of tags,
2491 To facilitate version control, distribution, and merging of tags,
2485 they are stored as a file named ".hgtags" which is managed
2492 they are stored as a file named ".hgtags" which is managed
2486 similarly to other project files and can be hand-edited if
2493 similarly to other project files and can be hand-edited if
2487 necessary. The file '.hg/localtags' is used for local tags (not
2494 necessary. The file '.hg/localtags' is used for local tags (not
2488 shared among repositories).
2495 shared among repositories).
2489 """
2496 """
2490 if name == "tip":
2497 if name == "tip":
2491 raise util.Abort(_("the name 'tip' is reserved"))
2498 raise util.Abort(_("the name 'tip' is reserved"))
2492 if rev_ is not None:
2499 if rev_ is not None:
2493 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2500 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2494 "please use 'hg tag [-r REV] NAME' instead\n"))
2501 "please use 'hg tag [-r REV] NAME' instead\n"))
2495 if opts['rev']:
2502 if opts['rev']:
2496 raise util.Abort(_("use only one form to specify the revision"))
2503 raise util.Abort(_("use only one form to specify the revision"))
2497 if opts['rev']:
2504 if opts['rev']:
2498 rev_ = opts['rev']
2505 rev_ = opts['rev']
2499 if rev_:
2506 if rev_:
2500 r = hex(repo.lookup(rev_))
2507 r = hex(repo.lookup(rev_))
2501 else:
2508 else:
2502 r = hex(repo.changelog.tip())
2509 r = hex(repo.changelog.tip())
2503
2510
2504 disallowed = (revrangesep, '\r', '\n')
2511 disallowed = (revrangesep, '\r', '\n')
2505 for c in disallowed:
2512 for c in disallowed:
2506 if name.find(c) >= 0:
2513 if name.find(c) >= 0:
2507 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2514 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2508
2515
2509 repo.hook('pretag', throw=True, node=r, tag=name,
2516 repo.hook('pretag', throw=True, node=r, tag=name,
2510 local=int(not not opts['local']))
2517 local=int(not not opts['local']))
2511
2518
2512 if opts['local']:
2519 if opts['local']:
2513 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2520 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2514 repo.hook('tag', node=r, tag=name, local=1)
2521 repo.hook('tag', node=r, tag=name, local=1)
2515 return
2522 return
2516
2523
2517 for x in repo.changes():
2524 for x in repo.changes():
2518 if ".hgtags" in x:
2525 if ".hgtags" in x:
2519 raise util.Abort(_("working copy of .hgtags is changed "
2526 raise util.Abort(_("working copy of .hgtags is changed "
2520 "(please commit .hgtags manually)"))
2527 "(please commit .hgtags manually)"))
2521
2528
2522 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2529 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2523 if repo.dirstate.state(".hgtags") == '?':
2530 if repo.dirstate.state(".hgtags") == '?':
2524 repo.add([".hgtags"])
2531 repo.add([".hgtags"])
2525
2532
2526 message = (opts['message'] or
2533 message = (opts['message'] or
2527 _("Added tag %s for changeset %s") % (name, r))
2534 _("Added tag %s for changeset %s") % (name, r))
2528 try:
2535 try:
2529 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2536 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2530 repo.hook('tag', node=r, tag=name, local=0)
2537 repo.hook('tag', node=r, tag=name, local=0)
2531 except ValueError, inst:
2538 except ValueError, inst:
2532 raise util.Abort(str(inst))
2539 raise util.Abort(str(inst))
2533
2540
2534 def tags(ui, repo):
2541 def tags(ui, repo):
2535 """list repository tags
2542 """list repository tags
2536
2543
2537 List the repository tags.
2544 List the repository tags.
2538
2545
2539 This lists both regular and local tags.
2546 This lists both regular and local tags.
2540 """
2547 """
2541
2548
2542 l = repo.tagslist()
2549 l = repo.tagslist()
2543 l.reverse()
2550 l.reverse()
2544 for t, n in l:
2551 for t, n in l:
2545 try:
2552 try:
2546 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2553 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2547 except KeyError:
2554 except KeyError:
2548 r = " ?:?"
2555 r = " ?:?"
2549 ui.write("%-30s %s\n" % (t, r))
2556 ui.write("%-30s %s\n" % (t, r))
2550
2557
2551 def tip(ui, repo, **opts):
2558 def tip(ui, repo, **opts):
2552 """show the tip revision
2559 """show the tip revision
2553
2560
2554 Show the tip revision.
2561 Show the tip revision.
2555 """
2562 """
2556 n = repo.changelog.tip()
2563 n = repo.changelog.tip()
2557 br = None
2564 br = None
2558 if opts['branches']:
2565 if opts['branches']:
2559 br = repo.branchlookup([n])
2566 br = repo.branchlookup([n])
2560 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2567 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2561 if opts['patch']:
2568 if opts['patch']:
2562 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2569 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2563
2570
2564 def unbundle(ui, repo, fname, **opts):
2571 def unbundle(ui, repo, fname, **opts):
2565 """apply a changegroup file
2572 """apply a changegroup file
2566
2573
2567 Apply a compressed changegroup file generated by the bundle
2574 Apply a compressed changegroup file generated by the bundle
2568 command.
2575 command.
2569 """
2576 """
2570 f = urllib.urlopen(fname)
2577 f = urllib.urlopen(fname)
2571
2578
2572 header = f.read(6)
2579 header = f.read(6)
2573 if not header.startswith("HG"):
2580 if not header.startswith("HG"):
2574 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2581 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2575 elif not header.startswith("HG10"):
2582 elif not header.startswith("HG10"):
2576 raise util.Abort(_("%s: unknown bundle version") % fname)
2583 raise util.Abort(_("%s: unknown bundle version") % fname)
2577 elif header == "HG10BZ":
2584 elif header == "HG10BZ":
2578 def generator(f):
2585 def generator(f):
2579 zd = bz2.BZ2Decompressor()
2586 zd = bz2.BZ2Decompressor()
2580 zd.decompress("BZ")
2587 zd.decompress("BZ")
2581 for chunk in f:
2588 for chunk in f:
2582 yield zd.decompress(chunk)
2589 yield zd.decompress(chunk)
2583 elif header == "HG10UN":
2590 elif header == "HG10UN":
2584 def generator(f):
2591 def generator(f):
2585 for chunk in f:
2592 for chunk in f:
2586 yield chunk
2593 yield chunk
2587 else:
2594 else:
2588 raise util.Abort(_("%s: unknown bundle compression type")
2595 raise util.Abort(_("%s: unknown bundle compression type")
2589 % fname)
2596 % fname)
2590 gen = generator(util.filechunkiter(f, 4096))
2597 gen = generator(util.filechunkiter(f, 4096))
2591 if repo.addchangegroup(util.chunkbuffer(gen)):
2598 if repo.addchangegroup(util.chunkbuffer(gen)):
2592 return 1
2599 return 1
2593
2600
2594 if opts['update']:
2601 if opts['update']:
2595 return update(ui, repo)
2602 return update(ui, repo)
2596 else:
2603 else:
2597 ui.status(_("(run 'hg update' to get a working copy)\n"))
2604 ui.status(_("(run 'hg update' to get a working copy)\n"))
2598
2605
2599 def undo(ui, repo):
2606 def undo(ui, repo):
2600 """undo the last commit or pull
2607 """undo the last commit or pull
2601
2608
2602 Roll back the last pull or commit transaction on the
2609 Roll back the last pull or commit transaction on the
2603 repository, restoring the project to its earlier state.
2610 repository, restoring the project to its earlier state.
2604
2611
2605 This command should be used with care. There is only one level of
2612 This command should be used with care. There is only one level of
2606 undo and there is no redo.
2613 undo and there is no redo.
2607
2614
2608 This command is not intended for use on public repositories. Once
2615 This command is not intended for use on public repositories. Once
2609 a change is visible for pull by other users, undoing it locally is
2616 a change is visible for pull by other users, undoing it locally is
2610 ineffective.
2617 ineffective.
2611 """
2618 """
2612 repo.undo()
2619 repo.undo()
2613
2620
2614 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2621 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2615 branch=None, **opts):
2622 branch=None, **opts):
2616 """update or merge working directory
2623 """update or merge working directory
2617
2624
2618 Update the working directory to the specified revision.
2625 Update the working directory to the specified revision.
2619
2626
2620 If there are no outstanding changes in the working directory and
2627 If there are no outstanding changes in the working directory and
2621 there is a linear relationship between the current version and the
2628 there is a linear relationship between the current version and the
2622 requested version, the result is the requested version.
2629 requested version, the result is the requested version.
2623
2630
2624 Otherwise the result is a merge between the contents of the
2631 Otherwise the result is a merge between the contents of the
2625 current working directory and the requested version. Files that
2632 current working directory and the requested version. Files that
2626 changed between either parent are marked as changed for the next
2633 changed between either parent are marked as changed for the next
2627 commit and a commit must be performed before any further updates
2634 commit and a commit must be performed before any further updates
2628 are allowed.
2635 are allowed.
2629
2636
2630 By default, update will refuse to run if doing so would require
2637 By default, update will refuse to run if doing so would require
2631 merging or discarding local changes.
2638 merging or discarding local changes.
2632 """
2639 """
2633 if branch:
2640 if branch:
2634 br = repo.branchlookup(branch=branch)
2641 br = repo.branchlookup(branch=branch)
2635 found = []
2642 found = []
2636 for x in br:
2643 for x in br:
2637 if branch in br[x]:
2644 if branch in br[x]:
2638 found.append(x)
2645 found.append(x)
2639 if len(found) > 1:
2646 if len(found) > 1:
2640 ui.warn(_("Found multiple heads for %s\n") % branch)
2647 ui.warn(_("Found multiple heads for %s\n") % branch)
2641 for x in found:
2648 for x in found:
2642 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2649 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2643 return 1
2650 return 1
2644 if len(found) == 1:
2651 if len(found) == 1:
2645 node = found[0]
2652 node = found[0]
2646 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2653 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2647 else:
2654 else:
2648 ui.warn(_("branch %s not found\n") % (branch))
2655 ui.warn(_("branch %s not found\n") % (branch))
2649 return 1
2656 return 1
2650 else:
2657 else:
2651 node = node and repo.lookup(node) or repo.changelog.tip()
2658 node = node and repo.lookup(node) or repo.changelog.tip()
2652 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2659 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2653
2660
2654 def verify(ui, repo):
2661 def verify(ui, repo):
2655 """verify the integrity of the repository
2662 """verify the integrity of the repository
2656
2663
2657 Verify the integrity of the current repository.
2664 Verify the integrity of the current repository.
2658
2665
2659 This will perform an extensive check of the repository's
2666 This will perform an extensive check of the repository's
2660 integrity, validating the hashes and checksums of each entry in
2667 integrity, validating the hashes and checksums of each entry in
2661 the changelog, manifest, and tracked files, as well as the
2668 the changelog, manifest, and tracked files, as well as the
2662 integrity of their crosslinks and indices.
2669 integrity of their crosslinks and indices.
2663 """
2670 """
2664 return repo.verify()
2671 return repo.verify()
2665
2672
2666 # Command options and aliases are listed here, alphabetically
2673 # Command options and aliases are listed here, alphabetically
2667
2674
2668 table = {
2675 table = {
2669 "^add":
2676 "^add":
2670 (add,
2677 (add,
2671 [('I', 'include', [], _('include names matching the given patterns')),
2678 [('I', 'include', [], _('include names matching the given patterns')),
2672 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2679 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2673 _('hg add [OPTION]... [FILE]...')),
2680 _('hg add [OPTION]... [FILE]...')),
2674 "addremove":
2681 "addremove":
2675 (addremove,
2682 (addremove,
2676 [('I', 'include', [], _('include names matching the given patterns')),
2683 [('I', 'include', [], _('include names matching the given patterns')),
2677 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2684 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2678 _('hg addremove [OPTION]... [FILE]...')),
2685 _('hg addremove [OPTION]... [FILE]...')),
2679 "^annotate":
2686 "^annotate":
2680 (annotate,
2687 (annotate,
2681 [('r', 'rev', '', _('annotate the specified revision')),
2688 [('r', 'rev', '', _('annotate the specified revision')),
2682 ('a', 'text', None, _('treat all files as text')),
2689 ('a', 'text', None, _('treat all files as text')),
2683 ('u', 'user', None, _('list the author')),
2690 ('u', 'user', None, _('list the author')),
2684 ('d', 'date', None, _('list the date')),
2691 ('d', 'date', None, _('list the date')),
2685 ('n', 'number', None, _('list the revision number (default)')),
2692 ('n', 'number', None, _('list the revision number (default)')),
2686 ('c', 'changeset', None, _('list the changeset')),
2693 ('c', 'changeset', None, _('list the changeset')),
2687 ('I', 'include', [], _('include names matching the given patterns')),
2694 ('I', 'include', [], _('include names matching the given patterns')),
2688 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2695 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2689 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2696 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2690 "bundle":
2697 "bundle":
2691 (bundle,
2698 (bundle,
2692 [('f', 'force', None,
2699 [('f', 'force', None,
2693 _('run even when remote repository is unrelated'))],
2700 _('run even when remote repository is unrelated'))],
2694 _('hg bundle FILE DEST')),
2701 _('hg bundle FILE DEST')),
2695 "cat":
2702 "cat":
2696 (cat,
2703 (cat,
2697 [('o', 'output', '', _('print output to file with formatted name')),
2704 [('o', 'output', '', _('print output to file with formatted name')),
2698 ('r', 'rev', '', _('print the given revision')),
2705 ('r', 'rev', '', _('print the given revision')),
2699 ('I', 'include', [], _('include names matching the given patterns')),
2706 ('I', 'include', [], _('include names matching the given patterns')),
2700 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2707 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2701 _('hg cat [OPTION]... FILE...')),
2708 _('hg cat [OPTION]... FILE...')),
2702 "^clone":
2709 "^clone":
2703 (clone,
2710 (clone,
2704 [('U', 'noupdate', None, _('do not update the new working directory')),
2711 [('U', 'noupdate', None, _('do not update the new working directory')),
2705 ('r', 'rev', [],
2712 ('r', 'rev', [],
2706 _('a changeset you would like to have after cloning')),
2713 _('a changeset you would like to have after cloning')),
2707 ('', 'pull', None, _('use pull protocol to copy metadata')),
2714 ('', 'pull', None, _('use pull protocol to copy metadata')),
2708 ('e', 'ssh', '', _('specify ssh command to use')),
2715 ('e', 'ssh', '', _('specify ssh command to use')),
2709 ('', 'remotecmd', '',
2716 ('', 'remotecmd', '',
2710 _('specify hg command to run on the remote side'))],
2717 _('specify hg command to run on the remote side'))],
2711 _('hg clone [OPTION]... SOURCE [DEST]')),
2718 _('hg clone [OPTION]... SOURCE [DEST]')),
2712 "^commit|ci":
2719 "^commit|ci":
2713 (commit,
2720 (commit,
2714 [('A', 'addremove', None, _('run addremove during commit')),
2721 [('A', 'addremove', None, _('run addremove during commit')),
2715 ('m', 'message', '', _('use <text> as commit message')),
2722 ('m', 'message', '', _('use <text> as commit message')),
2716 ('l', 'logfile', '', _('read the commit message from <file>')),
2723 ('l', 'logfile', '', _('read the commit message from <file>')),
2717 ('d', 'date', '', _('record datecode as commit date')),
2724 ('d', 'date', '', _('record datecode as commit date')),
2718 ('u', 'user', '', _('record user as commiter')),
2725 ('u', 'user', '', _('record user as commiter')),
2719 ('I', 'include', [], _('include names matching the given patterns')),
2726 ('I', 'include', [], _('include names matching the given patterns')),
2720 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2727 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2721 _('hg commit [OPTION]... [FILE]...')),
2728 _('hg commit [OPTION]... [FILE]...')),
2722 "copy|cp":
2729 "copy|cp":
2723 (copy,
2730 (copy,
2724 [('A', 'after', None, _('record a copy that has already occurred')),
2731 [('A', 'after', None, _('record a copy that has already occurred')),
2725 ('f', 'force', None,
2732 ('f', 'force', None,
2726 _('forcibly copy over an existing managed file')),
2733 _('forcibly copy over an existing managed file')),
2727 ('I', 'include', [], _('include names matching the given patterns')),
2734 ('I', 'include', [], _('include names matching the given patterns')),
2728 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2735 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2729 _('hg copy [OPTION]... [SOURCE]... DEST')),
2736 _('hg copy [OPTION]... [SOURCE]... DEST')),
2730 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2737 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2731 "debugcomplete": (debugcomplete, [], _('debugcomplete CMD')),
2738 "debugcomplete": (debugcomplete, [], _('debugcomplete CMD')),
2732 "debugrebuildstate":
2739 "debugrebuildstate":
2733 (debugrebuildstate,
2740 (debugrebuildstate,
2734 [('r', 'rev', '', _('revision to rebuild to'))],
2741 [('r', 'rev', '', _('revision to rebuild to'))],
2735 _('debugrebuildstate [-r REV] [REV]')),
2742 _('debugrebuildstate [-r REV] [REV]')),
2736 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2743 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2737 "debugconfig": (debugconfig, [], _('debugconfig')),
2744 "debugconfig": (debugconfig, [], _('debugconfig')),
2738 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2745 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2739 "debugstate": (debugstate, [], _('debugstate')),
2746 "debugstate": (debugstate, [], _('debugstate')),
2740 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2747 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2741 "debugindex": (debugindex, [], _('debugindex FILE')),
2748 "debugindex": (debugindex, [], _('debugindex FILE')),
2742 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2749 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2743 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2750 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2744 "debugwalk":
2751 "debugwalk":
2745 (debugwalk,
2752 (debugwalk,
2746 [('I', 'include', [], _('include names matching the given patterns')),
2753 [('I', 'include', [], _('include names matching the given patterns')),
2747 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2754 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2748 _('debugwalk [OPTION]... [FILE]...')),
2755 _('debugwalk [OPTION]... [FILE]...')),
2749 "^diff":
2756 "^diff":
2750 (diff,
2757 (diff,
2751 [('r', 'rev', [], _('revision')),
2758 [('r', 'rev', [], _('revision')),
2752 ('a', 'text', None, _('treat all files as text')),
2759 ('a', 'text', None, _('treat all files as text')),
2753 ('p', 'show-function', None,
2760 ('p', 'show-function', None,
2754 _('show which function each change is in')),
2761 _('show which function each change is in')),
2755 ('w', 'ignore-all-space', None,
2762 ('w', 'ignore-all-space', None,
2756 _('ignore white space when comparing lines')),
2763 _('ignore white space when comparing lines')),
2757 ('I', 'include', [], _('include names matching the given patterns')),
2764 ('I', 'include', [], _('include names matching the given patterns')),
2758 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2765 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2759 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2766 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2760 "^export":
2767 "^export":
2761 (export,
2768 (export,
2762 [('o', 'output', '', _('print output to file with formatted name')),
2769 [('o', 'output', '', _('print output to file with formatted name')),
2763 ('a', 'text', None, _('treat all files as text')),
2770 ('a', 'text', None, _('treat all files as text')),
2764 ('', 'switch-parent', None, _('diff against the second parent'))],
2771 ('', 'switch-parent', None, _('diff against the second parent'))],
2765 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2772 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2766 "forget":
2773 "forget":
2767 (forget,
2774 (forget,
2768 [('I', 'include', [], _('include names matching the given patterns')),
2775 [('I', 'include', [], _('include names matching the given patterns')),
2769 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2776 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2770 _('hg forget [OPTION]... FILE...')),
2777 _('hg forget [OPTION]... FILE...')),
2771 "grep":
2778 "grep":
2772 (grep,
2779 (grep,
2773 [('0', 'print0', None, _('end fields with NUL')),
2780 [('0', 'print0', None, _('end fields with NUL')),
2774 ('', 'all', None, _('print all revisions that match')),
2781 ('', 'all', None, _('print all revisions that match')),
2775 ('i', 'ignore-case', None, _('ignore case when matching')),
2782 ('i', 'ignore-case', None, _('ignore case when matching')),
2776 ('l', 'files-with-matches', None,
2783 ('l', 'files-with-matches', None,
2777 _('print only filenames and revs that match')),
2784 _('print only filenames and revs that match')),
2778 ('n', 'line-number', None, _('print matching line numbers')),
2785 ('n', 'line-number', None, _('print matching line numbers')),
2779 ('r', 'rev', [], _('search in given revision range')),
2786 ('r', 'rev', [], _('search in given revision range')),
2780 ('u', 'user', None, _('print user who committed change')),
2787 ('u', 'user', None, _('print user who committed change')),
2781 ('I', 'include', [], _('include names matching the given patterns')),
2788 ('I', 'include', [], _('include names matching the given patterns')),
2782 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2789 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2783 _('hg grep [OPTION]... PATTERN [FILE]...')),
2790 _('hg grep [OPTION]... PATTERN [FILE]...')),
2784 "heads":
2791 "heads":
2785 (heads,
2792 (heads,
2786 [('b', 'branches', None, _('show branches')),
2793 [('b', 'branches', None, _('show branches')),
2787 ('', 'style', '', _('display using template map file')),
2794 ('', 'style', '', _('display using template map file')),
2788 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2795 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2789 ('', 'template', '', _('display with template'))],
2796 ('', 'template', '', _('display with template'))],
2790 _('hg heads [-b] [-r <rev>]')),
2797 _('hg heads [-b] [-r <rev>]')),
2791 "help": (help_, [], _('hg help [COMMAND]')),
2798 "help": (help_, [], _('hg help [COMMAND]')),
2792 "identify|id": (identify, [], _('hg identify')),
2799 "identify|id": (identify, [], _('hg identify')),
2793 "import|patch":
2800 "import|patch":
2794 (import_,
2801 (import_,
2795 [('p', 'strip', 1,
2802 [('p', 'strip', 1,
2796 _('directory strip option for patch. This has the same\n') +
2803 _('directory strip option for patch. This has the same\n') +
2797 _('meaning as the corresponding patch option')),
2804 _('meaning as the corresponding patch option')),
2798 ('b', 'base', '', _('base path')),
2805 ('b', 'base', '', _('base path')),
2799 ('f', 'force', None,
2806 ('f', 'force', None,
2800 _('skip check for outstanding uncommitted changes'))],
2807 _('skip check for outstanding uncommitted changes'))],
2801 _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
2808 _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
2802 "incoming|in": (incoming,
2809 "incoming|in": (incoming,
2803 [('M', 'no-merges', None, _('do not show merges')),
2810 [('M', 'no-merges', None, _('do not show merges')),
2804 ('f', 'force', None,
2811 ('f', 'force', None,
2805 _('run even when remote repository is unrelated')),
2812 _('run even when remote repository is unrelated')),
2806 ('', 'style', '', _('display using template map file')),
2813 ('', 'style', '', _('display using template map file')),
2807 ('n', 'newest-first', None, _('show newest record first')),
2814 ('n', 'newest-first', None, _('show newest record first')),
2808 ('', 'bundle', '', _('file to store the bundles into')),
2815 ('', 'bundle', '', _('file to store the bundles into')),
2809 ('p', 'patch', None, _('show patch')),
2816 ('p', 'patch', None, _('show patch')),
2810 ('', 'template', '', _('display with template')),
2817 ('', 'template', '', _('display with template')),
2811 ('e', 'ssh', '', _('specify ssh command to use')),
2818 ('e', 'ssh', '', _('specify ssh command to use')),
2812 ('', 'remotecmd', '',
2819 ('', 'remotecmd', '',
2813 _('specify hg command to run on the remote side'))],
2820 _('specify hg command to run on the remote side'))],
2814 _('hg incoming [-p] [-n] [-M] [--bundle FILENAME] [SOURCE]')),
2821 _('hg incoming [-p] [-n] [-M] [--bundle FILENAME] [SOURCE]')),
2815 "^init": (init, [], _('hg init [DEST]')),
2822 "^init": (init, [], _('hg init [DEST]')),
2816 "locate":
2823 "locate":
2817 (locate,
2824 (locate,
2818 [('r', 'rev', '', _('search the repository as it stood at rev')),
2825 [('r', 'rev', '', _('search the repository as it stood at rev')),
2819 ('0', 'print0', None,
2826 ('0', 'print0', None,
2820 _('end filenames with NUL, for use with xargs')),
2827 _('end filenames with NUL, for use with xargs')),
2821 ('f', 'fullpath', None,
2828 ('f', 'fullpath', None,
2822 _('print complete paths from the filesystem root')),
2829 _('print complete paths from the filesystem root')),
2823 ('I', 'include', [], _('include names matching the given patterns')),
2830 ('I', 'include', [], _('include names matching the given patterns')),
2824 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2831 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2825 _('hg locate [OPTION]... [PATTERN]...')),
2832 _('hg locate [OPTION]... [PATTERN]...')),
2826 "^log|history":
2833 "^log|history":
2827 (log,
2834 (log,
2828 [('b', 'branches', None, _('show branches')),
2835 [('b', 'branches', None, _('show branches')),
2829 ('k', 'keyword', [], _('search for a keyword')),
2836 ('k', 'keyword', [], _('search for a keyword')),
2830 ('l', 'limit', '', _('limit number of changes displayed')),
2837 ('l', 'limit', '', _('limit number of changes displayed')),
2831 ('r', 'rev', [], _('show the specified revision or range')),
2838 ('r', 'rev', [], _('show the specified revision or range')),
2832 ('M', 'no-merges', None, _('do not show merges')),
2839 ('M', 'no-merges', None, _('do not show merges')),
2833 ('', 'style', '', _('display using template map file')),
2840 ('', 'style', '', _('display using template map file')),
2834 ('m', 'only-merges', None, _('show only merges')),
2841 ('m', 'only-merges', None, _('show only merges')),
2835 ('p', 'patch', None, _('show patch')),
2842 ('p', 'patch', None, _('show patch')),
2836 ('', 'template', '', _('display with template')),
2843 ('', 'template', '', _('display with template')),
2837 ('I', 'include', [], _('include names matching the given patterns')),
2844 ('I', 'include', [], _('include names matching the given patterns')),
2838 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2845 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2839 _('hg log [OPTION]... [FILE]')),
2846 _('hg log [OPTION]... [FILE]')),
2840 "manifest": (manifest, [], _('hg manifest [REV]')),
2847 "manifest": (manifest, [], _('hg manifest [REV]')),
2841 "outgoing|out": (outgoing,
2848 "outgoing|out": (outgoing,
2842 [('M', 'no-merges', None, _('do not show merges')),
2849 [('M', 'no-merges', None, _('do not show merges')),
2843 ('f', 'force', None,
2850 ('f', 'force', None,
2844 _('run even when remote repository is unrelated')),
2851 _('run even when remote repository is unrelated')),
2845 ('p', 'patch', None, _('show patch')),
2852 ('p', 'patch', None, _('show patch')),
2846 ('', 'style', '', _('display using template map file')),
2853 ('', 'style', '', _('display using template map file')),
2847 ('n', 'newest-first', None, _('show newest record first')),
2854 ('n', 'newest-first', None, _('show newest record first')),
2848 ('', 'template', '', _('display with template')),
2855 ('', 'template', '', _('display with template')),
2849 ('e', 'ssh', '', _('specify ssh command to use')),
2856 ('e', 'ssh', '', _('specify ssh command to use')),
2850 ('', 'remotecmd', '',
2857 ('', 'remotecmd', '',
2851 _('specify hg command to run on the remote side'))],
2858 _('specify hg command to run on the remote side'))],
2852 _('hg outgoing [-M] [-p] [-n] [DEST]')),
2859 _('hg outgoing [-M] [-p] [-n] [DEST]')),
2853 "^parents":
2860 "^parents":
2854 (parents,
2861 (parents,
2855 [('b', 'branches', None, _('show branches')),
2862 [('b', 'branches', None, _('show branches')),
2856 ('', 'style', '', _('display using template map file')),
2863 ('', 'style', '', _('display using template map file')),
2857 ('', 'template', '', _('display with template'))],
2864 ('', 'template', '', _('display with template'))],
2858 _('hg parents [-b] [REV]')),
2865 _('hg parents [-b] [REV]')),
2859 "paths": (paths, [], _('hg paths [NAME]')),
2866 "paths": (paths, [], _('hg paths [NAME]')),
2860 "^pull":
2867 "^pull":
2861 (pull,
2868 (pull,
2862 [('u', 'update', None,
2869 [('u', 'update', None,
2863 _('update the working directory to tip after pull')),
2870 _('update the working directory to tip after pull')),
2864 ('e', 'ssh', '', _('specify ssh command to use')),
2871 ('e', 'ssh', '', _('specify ssh command to use')),
2865 ('f', 'force', None,
2872 ('f', 'force', None,
2866 _('run even when remote repository is unrelated')),
2873 _('run even when remote repository is unrelated')),
2867 ('r', 'rev', [], _('a specific revision you would like to pull')),
2874 ('r', 'rev', [], _('a specific revision you would like to pull')),
2868 ('', 'remotecmd', '',
2875 ('', 'remotecmd', '',
2869 _('specify hg command to run on the remote side'))],
2876 _('specify hg command to run on the remote side'))],
2870 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
2877 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
2871 "^push":
2878 "^push":
2872 (push,
2879 (push,
2873 [('f', 'force', None, _('force push')),
2880 [('f', 'force', None, _('force push')),
2874 ('e', 'ssh', '', _('specify ssh command to use')),
2881 ('e', 'ssh', '', _('specify ssh command to use')),
2875 ('r', 'rev', [], _('a specific revision you would like to push')),
2882 ('r', 'rev', [], _('a specific revision you would like to push')),
2876 ('', 'remotecmd', '',
2883 ('', 'remotecmd', '',
2877 _('specify hg command to run on the remote side'))],
2884 _('specify hg command to run on the remote side'))],
2878 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
2885 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
2879 "debugrawcommit|rawcommit":
2886 "debugrawcommit|rawcommit":
2880 (rawcommit,
2887 (rawcommit,
2881 [('p', 'parent', [], _('parent')),
2888 [('p', 'parent', [], _('parent')),
2882 ('d', 'date', '', _('date code')),
2889 ('d', 'date', '', _('date code')),
2883 ('u', 'user', '', _('user')),
2890 ('u', 'user', '', _('user')),
2884 ('F', 'files', '', _('file list')),
2891 ('F', 'files', '', _('file list')),
2885 ('m', 'message', '', _('commit message')),
2892 ('m', 'message', '', _('commit message')),
2886 ('l', 'logfile', '', _('commit message file'))],
2893 ('l', 'logfile', '', _('commit message file'))],
2887 _('hg debugrawcommit [OPTION]... [FILE]...')),
2894 _('hg debugrawcommit [OPTION]... [FILE]...')),
2888 "recover": (recover, [], _('hg recover')),
2895 "recover": (recover, [], _('hg recover')),
2889 "^remove|rm":
2896 "^remove|rm":
2890 (remove,
2897 (remove,
2891 [('f', 'force', None, _('remove file even if modified')),
2898 [('f', 'force', None, _('remove file even if modified')),
2892 ('I', 'include', [], _('include names matching the given patterns')),
2899 ('I', 'include', [], _('include names matching the given patterns')),
2893 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2900 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2894 _('hg remove [OPTION]... FILE...')),
2901 _('hg remove [OPTION]... FILE...')),
2895 "rename|mv":
2902 "rename|mv":
2896 (rename,
2903 (rename,
2897 [('A', 'after', None, _('record a rename that has already occurred')),
2904 [('A', 'after', None, _('record a rename that has already occurred')),
2898 ('f', 'force', None,
2905 ('f', 'force', None,
2899 _('forcibly copy over an existing managed file')),
2906 _('forcibly copy over an existing managed file')),
2900 ('I', 'include', [], _('include names matching the given patterns')),
2907 ('I', 'include', [], _('include names matching the given patterns')),
2901 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2908 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2902 _('hg rename [OPTION]... SOURCE... DEST')),
2909 _('hg rename [OPTION]... SOURCE... DEST')),
2903 "^revert":
2910 "^revert":
2904 (revert,
2911 (revert,
2905 [('r', 'rev', '', _('revision to revert to')),
2912 [('r', 'rev', '', _('revision to revert to')),
2906 ('I', 'include', [], _('include names matching the given patterns')),
2913 ('I', 'include', [], _('include names matching the given patterns')),
2907 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2914 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2908 _('hg revert [-r REV] [NAME]...')),
2915 _('hg revert [-r REV] [NAME]...')),
2909 "root": (root, [], _('hg root')),
2916 "root": (root, [], _('hg root')),
2910 "^serve":
2917 "^serve":
2911 (serve,
2918 (serve,
2912 [('A', 'accesslog', '', _('name of access log file to write to')),
2919 [('A', 'accesslog', '', _('name of access log file to write to')),
2913 ('d', 'daemon', None, _('run server in background')),
2920 ('d', 'daemon', None, _('run server in background')),
2914 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2921 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2915 ('E', 'errorlog', '', _('name of error log file to write to')),
2922 ('E', 'errorlog', '', _('name of error log file to write to')),
2916 ('p', 'port', 0, _('port to use (default: 8000)')),
2923 ('p', 'port', 0, _('port to use (default: 8000)')),
2917 ('a', 'address', '', _('address to use')),
2924 ('a', 'address', '', _('address to use')),
2918 ('n', 'name', '',
2925 ('n', 'name', '',
2919 _('name to show in web pages (default: working dir)')),
2926 _('name to show in web pages (default: working dir)')),
2920 ('', 'pid-file', '', _('name of file to write process ID to')),
2927 ('', 'pid-file', '', _('name of file to write process ID to')),
2921 ('', 'stdio', None, _('for remote clients')),
2928 ('', 'stdio', None, _('for remote clients')),
2922 ('t', 'templates', '', _('web templates to use')),
2929 ('t', 'templates', '', _('web templates to use')),
2923 ('', 'style', '', _('template style to use')),
2930 ('', 'style', '', _('template style to use')),
2924 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2931 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2925 _('hg serve [OPTION]...')),
2932 _('hg serve [OPTION]...')),
2926 "^status|st":
2933 "^status|st":
2927 (status,
2934 (status,
2928 [('m', 'modified', None, _('show only modified files')),
2935 [('m', 'modified', None, _('show only modified files')),
2929 ('a', 'added', None, _('show only added files')),
2936 ('a', 'added', None, _('show only added files')),
2930 ('r', 'removed', None, _('show only removed files')),
2937 ('r', 'removed', None, _('show only removed files')),
2931 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2938 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2932 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2939 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2933 ('n', 'no-status', None, _('hide status prefix')),
2940 ('n', 'no-status', None, _('hide status prefix')),
2934 ('0', 'print0', None,
2941 ('0', 'print0', None,
2935 _('end filenames with NUL, for use with xargs')),
2942 _('end filenames with NUL, for use with xargs')),
2936 ('I', 'include', [], _('include names matching the given patterns')),
2943 ('I', 'include', [], _('include names matching the given patterns')),
2937 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2944 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2938 _('hg status [OPTION]... [FILE]...')),
2945 _('hg status [OPTION]... [FILE]...')),
2939 "tag":
2946 "tag":
2940 (tag,
2947 (tag,
2941 [('l', 'local', None, _('make the tag local')),
2948 [('l', 'local', None, _('make the tag local')),
2942 ('m', 'message', '', _('message for tag commit log entry')),
2949 ('m', 'message', '', _('message for tag commit log entry')),
2943 ('d', 'date', '', _('record datecode as commit date')),
2950 ('d', 'date', '', _('record datecode as commit date')),
2944 ('u', 'user', '', _('record user as commiter')),
2951 ('u', 'user', '', _('record user as commiter')),
2945 ('r', 'rev', '', _('revision to tag'))],
2952 ('r', 'rev', '', _('revision to tag'))],
2946 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2953 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2947 "tags": (tags, [], _('hg tags')),
2954 "tags": (tags, [], _('hg tags')),
2948 "tip":
2955 "tip":
2949 (tip,
2956 (tip,
2950 [('b', 'branches', None, _('show branches')),
2957 [('b', 'branches', None, _('show branches')),
2951 ('', 'style', '', _('display using template map file')),
2958 ('', 'style', '', _('display using template map file')),
2952 ('p', 'patch', None, _('show patch')),
2959 ('p', 'patch', None, _('show patch')),
2953 ('', 'template', '', _('display with template'))],
2960 ('', 'template', '', _('display with template'))],
2954 _('hg tip [-b] [-p]')),
2961 _('hg tip [-b] [-p]')),
2955 "unbundle":
2962 "unbundle":
2956 (unbundle,
2963 (unbundle,
2957 [('u', 'update', None,
2964 [('u', 'update', None,
2958 _('update the working directory to tip after unbundle'))],
2965 _('update the working directory to tip after unbundle'))],
2959 _('hg unbundle [-u] FILE')),
2966 _('hg unbundle [-u] FILE')),
2960 "undo": (undo, [], _('hg undo')),
2967 "undo": (undo, [], _('hg undo')),
2961 "^update|up|checkout|co":
2968 "^update|up|checkout|co":
2962 (update,
2969 (update,
2963 [('b', 'branch', '', _('checkout the head of a specific branch')),
2970 [('b', 'branch', '', _('checkout the head of a specific branch')),
2964 ('', 'style', '', _('display using template map file')),
2971 ('', 'style', '', _('display using template map file')),
2965 ('m', 'merge', None, _('allow merging of branches')),
2972 ('m', 'merge', None, _('allow merging of branches')),
2966 ('C', 'clean', None, _('overwrite locally modified files')),
2973 ('C', 'clean', None, _('overwrite locally modified files')),
2967 ('f', 'force', None, _('force a merge with outstanding changes')),
2974 ('f', 'force', None, _('force a merge with outstanding changes')),
2968 ('', 'template', '', _('display with template'))],
2975 ('', 'template', '', _('display with template'))],
2969 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
2976 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
2970 "verify": (verify, [], _('hg verify')),
2977 "verify": (verify, [], _('hg verify')),
2971 "version": (show_version, [], _('hg version')),
2978 "version": (show_version, [], _('hg version')),
2972 }
2979 }
2973
2980
2974 globalopts = [
2981 globalopts = [
2975 ('R', 'repository', '',
2982 ('R', 'repository', '',
2976 _('repository root directory or symbolic path name')),
2983 _('repository root directory or symbolic path name')),
2977 ('', 'cwd', '', _('change working directory')),
2984 ('', 'cwd', '', _('change working directory')),
2978 ('y', 'noninteractive', None,
2985 ('y', 'noninteractive', None,
2979 _('do not prompt, assume \'yes\' for any required answers')),
2986 _('do not prompt, assume \'yes\' for any required answers')),
2980 ('q', 'quiet', None, _('suppress output')),
2987 ('q', 'quiet', None, _('suppress output')),
2981 ('v', 'verbose', None, _('enable additional output')),
2988 ('v', 'verbose', None, _('enable additional output')),
2982 ('', 'debug', None, _('enable debugging output')),
2989 ('', 'debug', None, _('enable debugging output')),
2983 ('', 'debugger', None, _('start debugger')),
2990 ('', 'debugger', None, _('start debugger')),
2984 ('', 'traceback', None, _('print traceback on exception')),
2991 ('', 'traceback', None, _('print traceback on exception')),
2985 ('', 'time', None, _('time how long the command takes')),
2992 ('', 'time', None, _('time how long the command takes')),
2986 ('', 'profile', None, _('print command execution profile')),
2993 ('', 'profile', None, _('print command execution profile')),
2987 ('', 'version', None, _('output version information and exit')),
2994 ('', 'version', None, _('output version information and exit')),
2988 ('h', 'help', None, _('display help and exit')),
2995 ('h', 'help', None, _('display help and exit')),
2989 ]
2996 ]
2990
2997
2991 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2998 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2992 " debugindex debugindexdot")
2999 " debugindex debugindexdot")
2993 optionalrepo = ("paths debugconfig")
3000 optionalrepo = ("paths debugconfig")
2994
3001
2995 def findpossible(cmd):
3002 def findpossible(cmd):
2996 """
3003 """
2997 Return cmd -> (aliases, command table entry)
3004 Return cmd -> (aliases, command table entry)
2998 for each matching command
3005 for each matching command
2999 """
3006 """
3000 choice = {}
3007 choice = {}
3001 debugchoice = {}
3008 debugchoice = {}
3002 for e in table.keys():
3009 for e in table.keys():
3003 aliases = e.lstrip("^").split("|")
3010 aliases = e.lstrip("^").split("|")
3004 if cmd in aliases:
3011 if cmd in aliases:
3005 choice[cmd] = (aliases, table[e])
3012 choice[cmd] = (aliases, table[e])
3006 continue
3013 continue
3007 for a in aliases:
3014 for a in aliases:
3008 if a.startswith(cmd):
3015 if a.startswith(cmd):
3009 if aliases[0].startswith("debug"):
3016 if aliases[0].startswith("debug"):
3010 debugchoice[a] = (aliases, table[e])
3017 debugchoice[a] = (aliases, table[e])
3011 else:
3018 else:
3012 choice[a] = (aliases, table[e])
3019 choice[a] = (aliases, table[e])
3013 break
3020 break
3014
3021
3015 if not choice and debugchoice:
3022 if not choice and debugchoice:
3016 choice = debugchoice
3023 choice = debugchoice
3017
3024
3018 return choice
3025 return choice
3019
3026
3020 def find(cmd):
3027 def find(cmd):
3021 """Return (aliases, command table entry) for command string."""
3028 """Return (aliases, command table entry) for command string."""
3022 choice = findpossible(cmd)
3029 choice = findpossible(cmd)
3023
3030
3024 if choice.has_key(cmd):
3031 if choice.has_key(cmd):
3025 return choice[cmd]
3032 return choice[cmd]
3026
3033
3027 if len(choice) > 1:
3034 if len(choice) > 1:
3028 clist = choice.keys()
3035 clist = choice.keys()
3029 clist.sort()
3036 clist.sort()
3030 raise AmbiguousCommand(cmd, clist)
3037 raise AmbiguousCommand(cmd, clist)
3031
3038
3032 if choice:
3039 if choice:
3033 return choice.values()[0]
3040 return choice.values()[0]
3034
3041
3035 raise UnknownCommand(cmd)
3042 raise UnknownCommand(cmd)
3036
3043
3037 class SignalInterrupt(Exception):
3044 class SignalInterrupt(Exception):
3038 """Exception raised on SIGTERM and SIGHUP."""
3045 """Exception raised on SIGTERM and SIGHUP."""
3039
3046
3040 def catchterm(*args):
3047 def catchterm(*args):
3041 raise SignalInterrupt
3048 raise SignalInterrupt
3042
3049
3043 def run():
3050 def run():
3044 sys.exit(dispatch(sys.argv[1:]))
3051 sys.exit(dispatch(sys.argv[1:]))
3045
3052
3046 class ParseError(Exception):
3053 class ParseError(Exception):
3047 """Exception raised on errors in parsing the command line."""
3054 """Exception raised on errors in parsing the command line."""
3048
3055
3049 def parse(ui, args):
3056 def parse(ui, args):
3050 options = {}
3057 options = {}
3051 cmdoptions = {}
3058 cmdoptions = {}
3052
3059
3053 try:
3060 try:
3054 args = fancyopts.fancyopts(args, globalopts, options)
3061 args = fancyopts.fancyopts(args, globalopts, options)
3055 except fancyopts.getopt.GetoptError, inst:
3062 except fancyopts.getopt.GetoptError, inst:
3056 raise ParseError(None, inst)
3063 raise ParseError(None, inst)
3057
3064
3058 if args:
3065 if args:
3059 cmd, args = args[0], args[1:]
3066 cmd, args = args[0], args[1:]
3060 aliases, i = find(cmd)
3067 aliases, i = find(cmd)
3061 cmd = aliases[0]
3068 cmd = aliases[0]
3062 defaults = ui.config("defaults", cmd)
3069 defaults = ui.config("defaults", cmd)
3063 if defaults:
3070 if defaults:
3064 args = defaults.split() + args
3071 args = defaults.split() + args
3065 c = list(i[1])
3072 c = list(i[1])
3066 else:
3073 else:
3067 cmd = None
3074 cmd = None
3068 c = []
3075 c = []
3069
3076
3070 # combine global options into local
3077 # combine global options into local
3071 for o in globalopts:
3078 for o in globalopts:
3072 c.append((o[0], o[1], options[o[1]], o[3]))
3079 c.append((o[0], o[1], options[o[1]], o[3]))
3073
3080
3074 try:
3081 try:
3075 args = fancyopts.fancyopts(args, c, cmdoptions)
3082 args = fancyopts.fancyopts(args, c, cmdoptions)
3076 except fancyopts.getopt.GetoptError, inst:
3083 except fancyopts.getopt.GetoptError, inst:
3077 raise ParseError(cmd, inst)
3084 raise ParseError(cmd, inst)
3078
3085
3079 # separate global options back out
3086 # separate global options back out
3080 for o in globalopts:
3087 for o in globalopts:
3081 n = o[1]
3088 n = o[1]
3082 options[n] = cmdoptions[n]
3089 options[n] = cmdoptions[n]
3083 del cmdoptions[n]
3090 del cmdoptions[n]
3084
3091
3085 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3092 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3086
3093
3087 def dispatch(args):
3094 def dispatch(args):
3088 signal.signal(signal.SIGTERM, catchterm)
3095 signal.signal(signal.SIGTERM, catchterm)
3089 try:
3096 try:
3090 signal.signal(signal.SIGHUP, catchterm)
3097 signal.signal(signal.SIGHUP, catchterm)
3091 except AttributeError:
3098 except AttributeError:
3092 pass
3099 pass
3093
3100
3094 try:
3101 try:
3095 u = ui.ui()
3102 u = ui.ui()
3096 except util.Abort, inst:
3103 except util.Abort, inst:
3097 sys.stderr.write(_("abort: %s\n") % inst)
3104 sys.stderr.write(_("abort: %s\n") % inst)
3098 sys.exit(1)
3105 sys.exit(1)
3099
3106
3100 external = []
3107 external = []
3101 for x in u.extensions():
3108 for x in u.extensions():
3102 def on_exception(exc, inst):
3109 def on_exception(exc, inst):
3103 u.warn(_("*** failed to import extension %s\n") % x[1])
3110 u.warn(_("*** failed to import extension %s\n") % x[1])
3104 u.warn("%s\n" % inst)
3111 u.warn("%s\n" % inst)
3105 if "--traceback" in sys.argv[1:]:
3112 if "--traceback" in sys.argv[1:]:
3106 traceback.print_exc()
3113 traceback.print_exc()
3107 if x[1]:
3114 if x[1]:
3108 try:
3115 try:
3109 mod = imp.load_source(x[0], x[1])
3116 mod = imp.load_source(x[0], x[1])
3110 except Exception, inst:
3117 except Exception, inst:
3111 on_exception(Exception, inst)
3118 on_exception(Exception, inst)
3112 continue
3119 continue
3113 else:
3120 else:
3114 def importh(name):
3121 def importh(name):
3115 mod = __import__(name)
3122 mod = __import__(name)
3116 components = name.split('.')
3123 components = name.split('.')
3117 for comp in components[1:]:
3124 for comp in components[1:]:
3118 mod = getattr(mod, comp)
3125 mod = getattr(mod, comp)
3119 return mod
3126 return mod
3120 try:
3127 try:
3121 try:
3128 try:
3122 mod = importh("hgext." + x[0])
3129 mod = importh("hgext." + x[0])
3123 except ImportError:
3130 except ImportError:
3124 mod = importh(x[0])
3131 mod = importh(x[0])
3125 except Exception, inst:
3132 except Exception, inst:
3126 on_exception(Exception, inst)
3133 on_exception(Exception, inst)
3127 continue
3134 continue
3128
3135
3129 external.append(mod)
3136 external.append(mod)
3130 for x in external:
3137 for x in external:
3131 cmdtable = getattr(x, 'cmdtable', {})
3138 cmdtable = getattr(x, 'cmdtable', {})
3132 for t in cmdtable:
3139 for t in cmdtable:
3133 if t in table:
3140 if t in table:
3134 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
3141 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
3135 table.update(cmdtable)
3142 table.update(cmdtable)
3136
3143
3137 try:
3144 try:
3138 cmd, func, args, options, cmdoptions = parse(u, args)
3145 cmd, func, args, options, cmdoptions = parse(u, args)
3139 if options["time"]:
3146 if options["time"]:
3140 def get_times():
3147 def get_times():
3141 t = os.times()
3148 t = os.times()
3142 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3149 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3143 t = (t[0], t[1], t[2], t[3], time.clock())
3150 t = (t[0], t[1], t[2], t[3], time.clock())
3144 return t
3151 return t
3145 s = get_times()
3152 s = get_times()
3146 def print_time():
3153 def print_time():
3147 t = get_times()
3154 t = get_times()
3148 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3155 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3149 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3156 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3150 atexit.register(print_time)
3157 atexit.register(print_time)
3151
3158
3152 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3159 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3153 not options["noninteractive"])
3160 not options["noninteractive"])
3154
3161
3155 # enter the debugger before command execution
3162 # enter the debugger before command execution
3156 if options['debugger']:
3163 if options['debugger']:
3157 pdb.set_trace()
3164 pdb.set_trace()
3158
3165
3159 try:
3166 try:
3160 if options['cwd']:
3167 if options['cwd']:
3161 try:
3168 try:
3162 os.chdir(options['cwd'])
3169 os.chdir(options['cwd'])
3163 except OSError, inst:
3170 except OSError, inst:
3164 raise util.Abort('%s: %s' %
3171 raise util.Abort('%s: %s' %
3165 (options['cwd'], inst.strerror))
3172 (options['cwd'], inst.strerror))
3166
3173
3167 path = u.expandpath(options["repository"]) or ""
3174 path = u.expandpath(options["repository"]) or ""
3168 repo = path and hg.repository(u, path=path) or None
3175 repo = path and hg.repository(u, path=path) or None
3169
3176
3170 if options['help']:
3177 if options['help']:
3171 help_(u, cmd, options['version'])
3178 help_(u, cmd, options['version'])
3172 sys.exit(0)
3179 sys.exit(0)
3173 elif options['version']:
3180 elif options['version']:
3174 show_version(u)
3181 show_version(u)
3175 sys.exit(0)
3182 sys.exit(0)
3176 elif not cmd:
3183 elif not cmd:
3177 help_(u, 'shortlist')
3184 help_(u, 'shortlist')
3178 sys.exit(0)
3185 sys.exit(0)
3179
3186
3180 if cmd not in norepo.split():
3187 if cmd not in norepo.split():
3181 try:
3188 try:
3182 if not repo:
3189 if not repo:
3183 repo = hg.repository(u, path=path)
3190 repo = hg.repository(u, path=path)
3184 u = repo.ui
3191 u = repo.ui
3185 for x in external:
3192 for x in external:
3186 if hasattr(x, 'reposetup'):
3193 if hasattr(x, 'reposetup'):
3187 x.reposetup(u, repo)
3194 x.reposetup(u, repo)
3188 except hg.RepoError:
3195 except hg.RepoError:
3189 if cmd not in optionalrepo.split():
3196 if cmd not in optionalrepo.split():
3190 raise
3197 raise
3191 d = lambda: func(u, repo, *args, **cmdoptions)
3198 d = lambda: func(u, repo, *args, **cmdoptions)
3192 else:
3199 else:
3193 d = lambda: func(u, *args, **cmdoptions)
3200 d = lambda: func(u, *args, **cmdoptions)
3194
3201
3195 try:
3202 try:
3196 if options['profile']:
3203 if options['profile']:
3197 import hotshot, hotshot.stats
3204 import hotshot, hotshot.stats
3198 prof = hotshot.Profile("hg.prof")
3205 prof = hotshot.Profile("hg.prof")
3199 try:
3206 try:
3200 try:
3207 try:
3201 return prof.runcall(d)
3208 return prof.runcall(d)
3202 except:
3209 except:
3203 try:
3210 try:
3204 u.warn(_('exception raised - generating '
3211 u.warn(_('exception raised - generating '
3205 'profile anyway\n'))
3212 'profile anyway\n'))
3206 except:
3213 except:
3207 pass
3214 pass
3208 raise
3215 raise
3209 finally:
3216 finally:
3210 prof.close()
3217 prof.close()
3211 stats = hotshot.stats.load("hg.prof")
3218 stats = hotshot.stats.load("hg.prof")
3212 stats.strip_dirs()
3219 stats.strip_dirs()
3213 stats.sort_stats('time', 'calls')
3220 stats.sort_stats('time', 'calls')
3214 stats.print_stats(40)
3221 stats.print_stats(40)
3215 else:
3222 else:
3216 return d()
3223 return d()
3217 finally:
3224 finally:
3218 u.flush()
3225 u.flush()
3219 except:
3226 except:
3220 # enter the debugger when we hit an exception
3227 # enter the debugger when we hit an exception
3221 if options['debugger']:
3228 if options['debugger']:
3222 pdb.post_mortem(sys.exc_info()[2])
3229 pdb.post_mortem(sys.exc_info()[2])
3223 if options['traceback']:
3230 if options['traceback']:
3224 traceback.print_exc()
3231 traceback.print_exc()
3225 raise
3232 raise
3226 except ParseError, inst:
3233 except ParseError, inst:
3227 if inst.args[0]:
3234 if inst.args[0]:
3228 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3235 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3229 help_(u, inst.args[0])
3236 help_(u, inst.args[0])
3230 else:
3237 else:
3231 u.warn(_("hg: %s\n") % inst.args[1])
3238 u.warn(_("hg: %s\n") % inst.args[1])
3232 help_(u, 'shortlist')
3239 help_(u, 'shortlist')
3233 sys.exit(-1)
3240 sys.exit(-1)
3234 except AmbiguousCommand, inst:
3241 except AmbiguousCommand, inst:
3235 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3242 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3236 (inst.args[0], " ".join(inst.args[1])))
3243 (inst.args[0], " ".join(inst.args[1])))
3237 sys.exit(1)
3244 sys.exit(1)
3238 except UnknownCommand, inst:
3245 except UnknownCommand, inst:
3239 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3246 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3240 help_(u, 'shortlist')
3247 help_(u, 'shortlist')
3241 sys.exit(1)
3248 sys.exit(1)
3242 except hg.RepoError, inst:
3249 except hg.RepoError, inst:
3243 u.warn(_("abort: "), inst, "!\n")
3250 u.warn(_("abort: "), inst, "!\n")
3244 except revlog.RevlogError, inst:
3251 except revlog.RevlogError, inst:
3245 u.warn(_("abort: "), inst, "!\n")
3252 u.warn(_("abort: "), inst, "!\n")
3246 except SignalInterrupt:
3253 except SignalInterrupt:
3247 u.warn(_("killed!\n"))
3254 u.warn(_("killed!\n"))
3248 except KeyboardInterrupt:
3255 except KeyboardInterrupt:
3249 try:
3256 try:
3250 u.warn(_("interrupted!\n"))
3257 u.warn(_("interrupted!\n"))
3251 except IOError, inst:
3258 except IOError, inst:
3252 if inst.errno == errno.EPIPE:
3259 if inst.errno == errno.EPIPE:
3253 if u.debugflag:
3260 if u.debugflag:
3254 u.warn(_("\nbroken pipe\n"))
3261 u.warn(_("\nbroken pipe\n"))
3255 else:
3262 else:
3256 raise
3263 raise
3257 except IOError, inst:
3264 except IOError, inst:
3258 if hasattr(inst, "code"):
3265 if hasattr(inst, "code"):
3259 u.warn(_("abort: %s\n") % inst)
3266 u.warn(_("abort: %s\n") % inst)
3260 elif hasattr(inst, "reason"):
3267 elif hasattr(inst, "reason"):
3261 u.warn(_("abort: error: %s\n") % inst.reason[1])
3268 u.warn(_("abort: error: %s\n") % inst.reason[1])
3262 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3269 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3263 if u.debugflag:
3270 if u.debugflag:
3264 u.warn(_("broken pipe\n"))
3271 u.warn(_("broken pipe\n"))
3265 elif getattr(inst, "strerror", None):
3272 elif getattr(inst, "strerror", None):
3266 if getattr(inst, "filename", None):
3273 if getattr(inst, "filename", None):
3267 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3274 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3268 else:
3275 else:
3269 u.warn(_("abort: %s\n") % inst.strerror)
3276 u.warn(_("abort: %s\n") % inst.strerror)
3270 else:
3277 else:
3271 raise
3278 raise
3272 except OSError, inst:
3279 except OSError, inst:
3273 if hasattr(inst, "filename"):
3280 if hasattr(inst, "filename"):
3274 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3281 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3275 else:
3282 else:
3276 u.warn(_("abort: %s\n") % inst.strerror)
3283 u.warn(_("abort: %s\n") % inst.strerror)
3277 except util.Abort, inst:
3284 except util.Abort, inst:
3278 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3285 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3279 sys.exit(1)
3286 sys.exit(1)
3280 except TypeError, inst:
3287 except TypeError, inst:
3281 # was this an argument error?
3288 # was this an argument error?
3282 tb = traceback.extract_tb(sys.exc_info()[2])
3289 tb = traceback.extract_tb(sys.exc_info()[2])
3283 if len(tb) > 2: # no
3290 if len(tb) > 2: # no
3284 raise
3291 raise
3285 u.debug(inst, "\n")
3292 u.debug(inst, "\n")
3286 u.warn(_("%s: invalid arguments\n") % cmd)
3293 u.warn(_("%s: invalid arguments\n") % cmd)
3287 help_(u, cmd)
3294 help_(u, cmd)
3288 except SystemExit:
3295 except SystemExit:
3289 # don't catch this in the catch-all below
3296 # don't catch this in the catch-all below
3290 raise
3297 raise
3291 except:
3298 except:
3292 u.warn(_("** unknown exception encountered, details follow\n"))
3299 u.warn(_("** unknown exception encountered, details follow\n"))
3293 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3300 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3294 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3301 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3295 % version.get_version())
3302 % version.get_version())
3296 raise
3303 raise
3297
3304
3298 sys.exit(-1)
3305 sys.exit(-1)
@@ -1,1911 +1,1894 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import struct, os, util
8 import os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
13 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "changegroup")
14
15
15 class localrepository(object):
16 class localrepository(object):
16 def __del__(self):
17 def __del__(self):
17 self.transhandle = None
18 self.transhandle = None
18 def __init__(self, parentui, path=None, create=0):
19 def __init__(self, parentui, path=None, create=0):
19 if not path:
20 if not path:
20 p = os.getcwd()
21 p = os.getcwd()
21 while not os.path.isdir(os.path.join(p, ".hg")):
22 while not os.path.isdir(os.path.join(p, ".hg")):
22 oldp = p
23 oldp = p
23 p = os.path.dirname(p)
24 p = os.path.dirname(p)
24 if p == oldp:
25 if p == oldp:
25 raise repo.RepoError(_("no repo found"))
26 raise repo.RepoError(_("no repo found"))
26 path = p
27 path = p
27 self.path = os.path.join(path, ".hg")
28 self.path = os.path.join(path, ".hg")
28
29
29 if not create and not os.path.isdir(self.path):
30 if not create and not os.path.isdir(self.path):
30 raise repo.RepoError(_("repository %s not found") % path)
31 raise repo.RepoError(_("repository %s not found") % path)
31
32
32 self.root = os.path.abspath(path)
33 self.root = os.path.abspath(path)
33 self.ui = ui.ui(parentui=parentui)
34 self.ui = ui.ui(parentui=parentui)
34 self.opener = util.opener(self.path)
35 self.opener = util.opener(self.path)
35 self.wopener = util.opener(self.root)
36 self.wopener = util.opener(self.root)
36 self.manifest = manifest.manifest(self.opener)
37 self.manifest = manifest.manifest(self.opener)
37 self.changelog = changelog.changelog(self.opener)
38 self.changelog = changelog.changelog(self.opener)
38 self.tagscache = None
39 self.tagscache = None
39 self.nodetagscache = None
40 self.nodetagscache = None
40 self.encodepats = None
41 self.encodepats = None
41 self.decodepats = None
42 self.decodepats = None
42 self.transhandle = None
43 self.transhandle = None
43
44
44 if create:
45 if create:
45 os.mkdir(self.path)
46 os.mkdir(self.path)
46 os.mkdir(self.join("data"))
47 os.mkdir(self.join("data"))
47
48
48 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
49 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
49 try:
50 try:
50 self.ui.readconfig(self.join("hgrc"), self.root)
51 self.ui.readconfig(self.join("hgrc"), self.root)
51 except IOError:
52 except IOError:
52 pass
53 pass
53
54
54 def hook(self, name, throw=False, **args):
55 def hook(self, name, throw=False, **args):
55 def runhook(name, cmd):
56 def runhook(name, cmd):
56 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
57 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
57 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
58 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
58 [(k.upper(), v) for k, v in args.iteritems()])
59 [(k.upper(), v) for k, v in args.iteritems()])
59 r = util.system(cmd, environ=env, cwd=self.root)
60 r = util.system(cmd, environ=env, cwd=self.root)
60 if r:
61 if r:
61 desc, r = util.explain_exit(r)
62 desc, r = util.explain_exit(r)
62 if throw:
63 if throw:
63 raise util.Abort(_('%s hook %s') % (name, desc))
64 raise util.Abort(_('%s hook %s') % (name, desc))
64 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
65 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
65 return False
66 return False
66 return True
67 return True
67
68
68 r = True
69 r = True
69 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
70 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
70 if hname.split(".", 1)[0] == name and cmd]
71 if hname.split(".", 1)[0] == name and cmd]
71 hooks.sort()
72 hooks.sort()
72 for hname, cmd in hooks:
73 for hname, cmd in hooks:
73 r = runhook(hname, cmd) and r
74 r = runhook(hname, cmd) and r
74 return r
75 return r
75
76
76 def tags(self):
77 def tags(self):
77 '''return a mapping of tag to node'''
78 '''return a mapping of tag to node'''
78 if not self.tagscache:
79 if not self.tagscache:
79 self.tagscache = {}
80 self.tagscache = {}
80 def addtag(self, k, n):
81 def addtag(self, k, n):
81 try:
82 try:
82 bin_n = bin(n)
83 bin_n = bin(n)
83 except TypeError:
84 except TypeError:
84 bin_n = ''
85 bin_n = ''
85 self.tagscache[k.strip()] = bin_n
86 self.tagscache[k.strip()] = bin_n
86
87
87 try:
88 try:
88 # read each head of the tags file, ending with the tip
89 # read each head of the tags file, ending with the tip
89 # and add each tag found to the map, with "newer" ones
90 # and add each tag found to the map, with "newer" ones
90 # taking precedence
91 # taking precedence
91 fl = self.file(".hgtags")
92 fl = self.file(".hgtags")
92 h = fl.heads()
93 h = fl.heads()
93 h.reverse()
94 h.reverse()
94 for r in h:
95 for r in h:
95 for l in fl.read(r).splitlines():
96 for l in fl.read(r).splitlines():
96 if l:
97 if l:
97 n, k = l.split(" ", 1)
98 n, k = l.split(" ", 1)
98 addtag(self, k, n)
99 addtag(self, k, n)
99 except KeyError:
100 except KeyError:
100 pass
101 pass
101
102
102 try:
103 try:
103 f = self.opener("localtags")
104 f = self.opener("localtags")
104 for l in f:
105 for l in f:
105 n, k = l.split(" ", 1)
106 n, k = l.split(" ", 1)
106 addtag(self, k, n)
107 addtag(self, k, n)
107 except IOError:
108 except IOError:
108 pass
109 pass
109
110
110 self.tagscache['tip'] = self.changelog.tip()
111 self.tagscache['tip'] = self.changelog.tip()
111
112
112 return self.tagscache
113 return self.tagscache
113
114
114 def tagslist(self):
115 def tagslist(self):
115 '''return a list of tags ordered by revision'''
116 '''return a list of tags ordered by revision'''
116 l = []
117 l = []
117 for t, n in self.tags().items():
118 for t, n in self.tags().items():
118 try:
119 try:
119 r = self.changelog.rev(n)
120 r = self.changelog.rev(n)
120 except:
121 except:
121 r = -2 # sort to the beginning of the list if unknown
122 r = -2 # sort to the beginning of the list if unknown
122 l.append((r, t, n))
123 l.append((r, t, n))
123 l.sort()
124 l.sort()
124 return [(t, n) for r, t, n in l]
125 return [(t, n) for r, t, n in l]
125
126
126 def nodetags(self, node):
127 def nodetags(self, node):
127 '''return the tags associated with a node'''
128 '''return the tags associated with a node'''
128 if not self.nodetagscache:
129 if not self.nodetagscache:
129 self.nodetagscache = {}
130 self.nodetagscache = {}
130 for t, n in self.tags().items():
131 for t, n in self.tags().items():
131 self.nodetagscache.setdefault(n, []).append(t)
132 self.nodetagscache.setdefault(n, []).append(t)
132 return self.nodetagscache.get(node, [])
133 return self.nodetagscache.get(node, [])
133
134
134 def lookup(self, key):
135 def lookup(self, key):
135 try:
136 try:
136 return self.tags()[key]
137 return self.tags()[key]
137 except KeyError:
138 except KeyError:
138 try:
139 try:
139 return self.changelog.lookup(key)
140 return self.changelog.lookup(key)
140 except:
141 except:
141 raise repo.RepoError(_("unknown revision '%s'") % key)
142 raise repo.RepoError(_("unknown revision '%s'") % key)
142
143
143 def dev(self):
144 def dev(self):
144 return os.stat(self.path).st_dev
145 return os.stat(self.path).st_dev
145
146
146 def local(self):
147 def local(self):
147 return True
148 return True
148
149
149 def join(self, f):
150 def join(self, f):
150 return os.path.join(self.path, f)
151 return os.path.join(self.path, f)
151
152
152 def wjoin(self, f):
153 def wjoin(self, f):
153 return os.path.join(self.root, f)
154 return os.path.join(self.root, f)
154
155
155 def file(self, f):
156 def file(self, f):
156 if f[0] == '/':
157 if f[0] == '/':
157 f = f[1:]
158 f = f[1:]
158 return filelog.filelog(self.opener, f)
159 return filelog.filelog(self.opener, f)
159
160
160 def getcwd(self):
161 def getcwd(self):
161 return self.dirstate.getcwd()
162 return self.dirstate.getcwd()
162
163
163 def wfile(self, f, mode='r'):
164 def wfile(self, f, mode='r'):
164 return self.wopener(f, mode)
165 return self.wopener(f, mode)
165
166
166 def wread(self, filename):
167 def wread(self, filename):
167 if self.encodepats == None:
168 if self.encodepats == None:
168 l = []
169 l = []
169 for pat, cmd in self.ui.configitems("encode"):
170 for pat, cmd in self.ui.configitems("encode"):
170 mf = util.matcher(self.root, "", [pat], [], [])[1]
171 mf = util.matcher(self.root, "", [pat], [], [])[1]
171 l.append((mf, cmd))
172 l.append((mf, cmd))
172 self.encodepats = l
173 self.encodepats = l
173
174
174 data = self.wopener(filename, 'r').read()
175 data = self.wopener(filename, 'r').read()
175
176
176 for mf, cmd in self.encodepats:
177 for mf, cmd in self.encodepats:
177 if mf(filename):
178 if mf(filename):
178 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
179 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
179 data = util.filter(data, cmd)
180 data = util.filter(data, cmd)
180 break
181 break
181
182
182 return data
183 return data
183
184
184 def wwrite(self, filename, data, fd=None):
185 def wwrite(self, filename, data, fd=None):
185 if self.decodepats == None:
186 if self.decodepats == None:
186 l = []
187 l = []
187 for pat, cmd in self.ui.configitems("decode"):
188 for pat, cmd in self.ui.configitems("decode"):
188 mf = util.matcher(self.root, "", [pat], [], [])[1]
189 mf = util.matcher(self.root, "", [pat], [], [])[1]
189 l.append((mf, cmd))
190 l.append((mf, cmd))
190 self.decodepats = l
191 self.decodepats = l
191
192
192 for mf, cmd in self.decodepats:
193 for mf, cmd in self.decodepats:
193 if mf(filename):
194 if mf(filename):
194 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
195 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
195 data = util.filter(data, cmd)
196 data = util.filter(data, cmd)
196 break
197 break
197
198
198 if fd:
199 if fd:
199 return fd.write(data)
200 return fd.write(data)
200 return self.wopener(filename, 'w').write(data)
201 return self.wopener(filename, 'w').write(data)
201
202
202 def transaction(self):
203 def transaction(self):
203 tr = self.transhandle
204 tr = self.transhandle
204 if tr != None and tr.running():
205 if tr != None and tr.running():
205 return tr.nest()
206 return tr.nest()
206
207
207 # save dirstate for undo
208 # save dirstate for undo
208 try:
209 try:
209 ds = self.opener("dirstate").read()
210 ds = self.opener("dirstate").read()
210 except IOError:
211 except IOError:
211 ds = ""
212 ds = ""
212 self.opener("journal.dirstate", "w").write(ds)
213 self.opener("journal.dirstate", "w").write(ds)
213
214
214 tr = transaction.transaction(self.ui.warn, self.opener,
215 tr = transaction.transaction(self.ui.warn, self.opener,
215 self.join("journal"),
216 self.join("journal"),
216 aftertrans(self.path))
217 aftertrans(self.path))
217 self.transhandle = tr
218 self.transhandle = tr
218 return tr
219 return tr
219
220
220 def recover(self):
221 def recover(self):
221 l = self.lock()
222 l = self.lock()
222 if os.path.exists(self.join("journal")):
223 if os.path.exists(self.join("journal")):
223 self.ui.status(_("rolling back interrupted transaction\n"))
224 self.ui.status(_("rolling back interrupted transaction\n"))
224 transaction.rollback(self.opener, self.join("journal"))
225 transaction.rollback(self.opener, self.join("journal"))
225 self.reload()
226 self.reload()
226 return True
227 return True
227 else:
228 else:
228 self.ui.warn(_("no interrupted transaction available\n"))
229 self.ui.warn(_("no interrupted transaction available\n"))
229 return False
230 return False
230
231
231 def undo(self, wlock=None):
232 def undo(self, wlock=None):
232 if not wlock:
233 if not wlock:
233 wlock = self.wlock()
234 wlock = self.wlock()
234 l = self.lock()
235 l = self.lock()
235 if os.path.exists(self.join("undo")):
236 if os.path.exists(self.join("undo")):
236 self.ui.status(_("rolling back last transaction\n"))
237 self.ui.status(_("rolling back last transaction\n"))
237 transaction.rollback(self.opener, self.join("undo"))
238 transaction.rollback(self.opener, self.join("undo"))
238 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
239 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
239 self.reload()
240 self.reload()
240 self.wreload()
241 self.wreload()
241 else:
242 else:
242 self.ui.warn(_("no undo information available\n"))
243 self.ui.warn(_("no undo information available\n"))
243
244
244 def wreload(self):
245 def wreload(self):
245 self.dirstate.read()
246 self.dirstate.read()
246
247
247 def reload(self):
248 def reload(self):
248 self.changelog.load()
249 self.changelog.load()
249 self.manifest.load()
250 self.manifest.load()
250 self.tagscache = None
251 self.tagscache = None
251 self.nodetagscache = None
252 self.nodetagscache = None
252
253
253 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None):
254 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None):
254 try:
255 try:
255 l = lock.lock(self.join(lockname), 0, releasefn)
256 l = lock.lock(self.join(lockname), 0, releasefn)
256 except lock.LockHeld, inst:
257 except lock.LockHeld, inst:
257 if not wait:
258 if not wait:
258 raise inst
259 raise inst
259 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
260 self.ui.warn(_("waiting for lock held by %s\n") % inst.args[0])
260 try:
261 try:
261 # default to 600 seconds timeout
262 # default to 600 seconds timeout
262 l = lock.lock(self.join(lockname),
263 l = lock.lock(self.join(lockname),
263 int(self.ui.config("ui", "timeout") or 600),
264 int(self.ui.config("ui", "timeout") or 600),
264 releasefn)
265 releasefn)
265 except lock.LockHeld, inst:
266 except lock.LockHeld, inst:
266 raise util.Abort(_("timeout while waiting for "
267 raise util.Abort(_("timeout while waiting for "
267 "lock held by %s") % inst.args[0])
268 "lock held by %s") % inst.args[0])
268 if acquirefn:
269 if acquirefn:
269 acquirefn()
270 acquirefn()
270 return l
271 return l
271
272
272 def lock(self, wait=1):
273 def lock(self, wait=1):
273 return self.do_lock("lock", wait, acquirefn=self.reload)
274 return self.do_lock("lock", wait, acquirefn=self.reload)
274
275
275 def wlock(self, wait=1):
276 def wlock(self, wait=1):
276 return self.do_lock("wlock", wait,
277 return self.do_lock("wlock", wait,
277 self.dirstate.write,
278 self.dirstate.write,
278 self.wreload)
279 self.wreload)
279
280
280 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
281 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
281 "determine whether a new filenode is needed"
282 "determine whether a new filenode is needed"
282 fp1 = manifest1.get(filename, nullid)
283 fp1 = manifest1.get(filename, nullid)
283 fp2 = manifest2.get(filename, nullid)
284 fp2 = manifest2.get(filename, nullid)
284
285
285 if fp2 != nullid:
286 if fp2 != nullid:
286 # is one parent an ancestor of the other?
287 # is one parent an ancestor of the other?
287 fpa = filelog.ancestor(fp1, fp2)
288 fpa = filelog.ancestor(fp1, fp2)
288 if fpa == fp1:
289 if fpa == fp1:
289 fp1, fp2 = fp2, nullid
290 fp1, fp2 = fp2, nullid
290 elif fpa == fp2:
291 elif fpa == fp2:
291 fp2 = nullid
292 fp2 = nullid
292
293
293 # is the file unmodified from the parent? report existing entry
294 # is the file unmodified from the parent? report existing entry
294 if fp2 == nullid and text == filelog.read(fp1):
295 if fp2 == nullid and text == filelog.read(fp1):
295 return (fp1, None, None)
296 return (fp1, None, None)
296
297
297 return (None, fp1, fp2)
298 return (None, fp1, fp2)
298
299
299 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
300 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
300 orig_parent = self.dirstate.parents()[0] or nullid
301 orig_parent = self.dirstate.parents()[0] or nullid
301 p1 = p1 or self.dirstate.parents()[0] or nullid
302 p1 = p1 or self.dirstate.parents()[0] or nullid
302 p2 = p2 or self.dirstate.parents()[1] or nullid
303 p2 = p2 or self.dirstate.parents()[1] or nullid
303 c1 = self.changelog.read(p1)
304 c1 = self.changelog.read(p1)
304 c2 = self.changelog.read(p2)
305 c2 = self.changelog.read(p2)
305 m1 = self.manifest.read(c1[0])
306 m1 = self.manifest.read(c1[0])
306 mf1 = self.manifest.readflags(c1[0])
307 mf1 = self.manifest.readflags(c1[0])
307 m2 = self.manifest.read(c2[0])
308 m2 = self.manifest.read(c2[0])
308 changed = []
309 changed = []
309
310
310 if orig_parent == p1:
311 if orig_parent == p1:
311 update_dirstate = 1
312 update_dirstate = 1
312 else:
313 else:
313 update_dirstate = 0
314 update_dirstate = 0
314
315
315 if not wlock:
316 if not wlock:
316 wlock = self.wlock()
317 wlock = self.wlock()
317 l = self.lock()
318 l = self.lock()
318 tr = self.transaction()
319 tr = self.transaction()
319 mm = m1.copy()
320 mm = m1.copy()
320 mfm = mf1.copy()
321 mfm = mf1.copy()
321 linkrev = self.changelog.count()
322 linkrev = self.changelog.count()
322 for f in files:
323 for f in files:
323 try:
324 try:
324 t = self.wread(f)
325 t = self.wread(f)
325 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
326 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
326 r = self.file(f)
327 r = self.file(f)
327 mfm[f] = tm
328 mfm[f] = tm
328
329
329 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
330 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
330 if entry:
331 if entry:
331 mm[f] = entry
332 mm[f] = entry
332 continue
333 continue
333
334
334 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
335 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
335 changed.append(f)
336 changed.append(f)
336 if update_dirstate:
337 if update_dirstate:
337 self.dirstate.update([f], "n")
338 self.dirstate.update([f], "n")
338 except IOError:
339 except IOError:
339 try:
340 try:
340 del mm[f]
341 del mm[f]
341 del mfm[f]
342 del mfm[f]
342 if update_dirstate:
343 if update_dirstate:
343 self.dirstate.forget([f])
344 self.dirstate.forget([f])
344 except:
345 except:
345 # deleted from p2?
346 # deleted from p2?
346 pass
347 pass
347
348
348 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
349 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
349 user = user or self.ui.username()
350 user = user or self.ui.username()
350 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
351 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
351 tr.close()
352 tr.close()
352 if update_dirstate:
353 if update_dirstate:
353 self.dirstate.setparents(n, nullid)
354 self.dirstate.setparents(n, nullid)
354
355
355 def commit(self, files=None, text="", user=None, date=None,
356 def commit(self, files=None, text="", user=None, date=None,
356 match=util.always, force=False, lock=None, wlock=None):
357 match=util.always, force=False, lock=None, wlock=None):
357 commit = []
358 commit = []
358 remove = []
359 remove = []
359 changed = []
360 changed = []
360
361
361 if files:
362 if files:
362 for f in files:
363 for f in files:
363 s = self.dirstate.state(f)
364 s = self.dirstate.state(f)
364 if s in 'nmai':
365 if s in 'nmai':
365 commit.append(f)
366 commit.append(f)
366 elif s == 'r':
367 elif s == 'r':
367 remove.append(f)
368 remove.append(f)
368 else:
369 else:
369 self.ui.warn(_("%s not tracked!\n") % f)
370 self.ui.warn(_("%s not tracked!\n") % f)
370 else:
371 else:
371 modified, added, removed, deleted, unknown = self.changes(match=match)
372 modified, added, removed, deleted, unknown = self.changes(match=match)
372 commit = modified + added
373 commit = modified + added
373 remove = removed
374 remove = removed
374
375
375 p1, p2 = self.dirstate.parents()
376 p1, p2 = self.dirstate.parents()
376 c1 = self.changelog.read(p1)
377 c1 = self.changelog.read(p1)
377 c2 = self.changelog.read(p2)
378 c2 = self.changelog.read(p2)
378 m1 = self.manifest.read(c1[0])
379 m1 = self.manifest.read(c1[0])
379 mf1 = self.manifest.readflags(c1[0])
380 mf1 = self.manifest.readflags(c1[0])
380 m2 = self.manifest.read(c2[0])
381 m2 = self.manifest.read(c2[0])
381
382
382 if not commit and not remove and not force and p2 == nullid:
383 if not commit and not remove and not force and p2 == nullid:
383 self.ui.status(_("nothing changed\n"))
384 self.ui.status(_("nothing changed\n"))
384 return None
385 return None
385
386
386 xp1 = hex(p1)
387 xp1 = hex(p1)
387 if p2 == nullid: xp2 = ''
388 if p2 == nullid: xp2 = ''
388 else: xp2 = hex(p2)
389 else: xp2 = hex(p2)
389
390
390 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
391 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
391
392
392 if not wlock:
393 if not wlock:
393 wlock = self.wlock()
394 wlock = self.wlock()
394 if not lock:
395 if not lock:
395 lock = self.lock()
396 lock = self.lock()
396 tr = self.transaction()
397 tr = self.transaction()
397
398
398 # check in files
399 # check in files
399 new = {}
400 new = {}
400 linkrev = self.changelog.count()
401 linkrev = self.changelog.count()
401 commit.sort()
402 commit.sort()
402 for f in commit:
403 for f in commit:
403 self.ui.note(f + "\n")
404 self.ui.note(f + "\n")
404 try:
405 try:
405 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
406 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
406 t = self.wread(f)
407 t = self.wread(f)
407 except IOError:
408 except IOError:
408 self.ui.warn(_("trouble committing %s!\n") % f)
409 self.ui.warn(_("trouble committing %s!\n") % f)
409 raise
410 raise
410
411
411 r = self.file(f)
412 r = self.file(f)
412
413
413 meta = {}
414 meta = {}
414 cp = self.dirstate.copied(f)
415 cp = self.dirstate.copied(f)
415 if cp:
416 if cp:
416 meta["copy"] = cp
417 meta["copy"] = cp
417 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
418 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
418 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
419 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
419 fp1, fp2 = nullid, nullid
420 fp1, fp2 = nullid, nullid
420 else:
421 else:
421 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
422 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
422 if entry:
423 if entry:
423 new[f] = entry
424 new[f] = entry
424 continue
425 continue
425
426
426 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
427 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
427 # remember what we've added so that we can later calculate
428 # remember what we've added so that we can later calculate
428 # the files to pull from a set of changesets
429 # the files to pull from a set of changesets
429 changed.append(f)
430 changed.append(f)
430
431
431 # update manifest
432 # update manifest
432 m1 = m1.copy()
433 m1 = m1.copy()
433 m1.update(new)
434 m1.update(new)
434 for f in remove:
435 for f in remove:
435 if f in m1:
436 if f in m1:
436 del m1[f]
437 del m1[f]
437 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
438 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
438 (new, remove))
439 (new, remove))
439
440
440 # add changeset
441 # add changeset
441 new = new.keys()
442 new = new.keys()
442 new.sort()
443 new.sort()
443
444
444 if not text:
445 if not text:
445 edittext = [""]
446 edittext = [""]
446 if p2 != nullid:
447 if p2 != nullid:
447 edittext.append("HG: branch merge")
448 edittext.append("HG: branch merge")
448 edittext.extend(["HG: changed %s" % f for f in changed])
449 edittext.extend(["HG: changed %s" % f for f in changed])
449 edittext.extend(["HG: removed %s" % f for f in remove])
450 edittext.extend(["HG: removed %s" % f for f in remove])
450 if not changed and not remove:
451 if not changed and not remove:
451 edittext.append("HG: no files changed")
452 edittext.append("HG: no files changed")
452 edittext.append("")
453 edittext.append("")
453 # run editor in the repository root
454 # run editor in the repository root
454 olddir = os.getcwd()
455 olddir = os.getcwd()
455 os.chdir(self.root)
456 os.chdir(self.root)
456 edittext = self.ui.edit("\n".join(edittext))
457 edittext = self.ui.edit("\n".join(edittext))
457 os.chdir(olddir)
458 os.chdir(olddir)
458 if not edittext.rstrip():
459 if not edittext.rstrip():
459 return None
460 return None
460 text = edittext
461 text = edittext
461
462
462 user = user or self.ui.username()
463 user = user or self.ui.username()
463 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
464 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
464 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
465 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
465 parent2=xp2)
466 parent2=xp2)
466 tr.close()
467 tr.close()
467
468
468 self.dirstate.setparents(n)
469 self.dirstate.setparents(n)
469 self.dirstate.update(new, "n")
470 self.dirstate.update(new, "n")
470 self.dirstate.forget(remove)
471 self.dirstate.forget(remove)
471
472
472 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
473 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
473 return n
474 return n
474
475
475 def walk(self, node=None, files=[], match=util.always):
476 def walk(self, node=None, files=[], match=util.always):
476 if node:
477 if node:
477 fdict = dict.fromkeys(files)
478 fdict = dict.fromkeys(files)
478 for fn in self.manifest.read(self.changelog.read(node)[0]):
479 for fn in self.manifest.read(self.changelog.read(node)[0]):
479 fdict.pop(fn, None)
480 fdict.pop(fn, None)
480 if match(fn):
481 if match(fn):
481 yield 'm', fn
482 yield 'm', fn
482 for fn in fdict:
483 for fn in fdict:
483 self.ui.warn(_('%s: No such file in rev %s\n') % (
484 self.ui.warn(_('%s: No such file in rev %s\n') % (
484 util.pathto(self.getcwd(), fn), short(node)))
485 util.pathto(self.getcwd(), fn), short(node)))
485 else:
486 else:
486 for src, fn in self.dirstate.walk(files, match):
487 for src, fn in self.dirstate.walk(files, match):
487 yield src, fn
488 yield src, fn
488
489
489 def changes(self, node1=None, node2=None, files=[], match=util.always,
490 def changes(self, node1=None, node2=None, files=[], match=util.always,
490 wlock=None):
491 wlock=None):
491 """return changes between two nodes or node and working directory
492 """return changes between two nodes or node and working directory
492
493
493 If node1 is None, use the first dirstate parent instead.
494 If node1 is None, use the first dirstate parent instead.
494 If node2 is None, compare node1 with working directory.
495 If node2 is None, compare node1 with working directory.
495 """
496 """
496
497
497 def fcmp(fn, mf):
498 def fcmp(fn, mf):
498 t1 = self.wread(fn)
499 t1 = self.wread(fn)
499 t2 = self.file(fn).read(mf.get(fn, nullid))
500 t2 = self.file(fn).read(mf.get(fn, nullid))
500 return cmp(t1, t2)
501 return cmp(t1, t2)
501
502
502 def mfmatches(node):
503 def mfmatches(node):
503 change = self.changelog.read(node)
504 change = self.changelog.read(node)
504 mf = dict(self.manifest.read(change[0]))
505 mf = dict(self.manifest.read(change[0]))
505 for fn in mf.keys():
506 for fn in mf.keys():
506 if not match(fn):
507 if not match(fn):
507 del mf[fn]
508 del mf[fn]
508 return mf
509 return mf
509
510
510 if node1:
511 if node1:
511 # read the manifest from node1 before the manifest from node2,
512 # read the manifest from node1 before the manifest from node2,
512 # so that we'll hit the manifest cache if we're going through
513 # so that we'll hit the manifest cache if we're going through
513 # all the revisions in parent->child order.
514 # all the revisions in parent->child order.
514 mf1 = mfmatches(node1)
515 mf1 = mfmatches(node1)
515
516
516 # are we comparing the working directory?
517 # are we comparing the working directory?
517 if not node2:
518 if not node2:
518 if not wlock:
519 if not wlock:
519 try:
520 try:
520 wlock = self.wlock(wait=0)
521 wlock = self.wlock(wait=0)
521 except lock.LockException:
522 except lock.LockException:
522 wlock = None
523 wlock = None
523 lookup, modified, added, removed, deleted, unknown = (
524 lookup, modified, added, removed, deleted, unknown = (
524 self.dirstate.changes(files, match))
525 self.dirstate.changes(files, match))
525
526
526 # are we comparing working dir against its parent?
527 # are we comparing working dir against its parent?
527 if not node1:
528 if not node1:
528 if lookup:
529 if lookup:
529 # do a full compare of any files that might have changed
530 # do a full compare of any files that might have changed
530 mf2 = mfmatches(self.dirstate.parents()[0])
531 mf2 = mfmatches(self.dirstate.parents()[0])
531 for f in lookup:
532 for f in lookup:
532 if fcmp(f, mf2):
533 if fcmp(f, mf2):
533 modified.append(f)
534 modified.append(f)
534 elif wlock is not None:
535 elif wlock is not None:
535 self.dirstate.update([f], "n")
536 self.dirstate.update([f], "n")
536 else:
537 else:
537 # we are comparing working dir against non-parent
538 # we are comparing working dir against non-parent
538 # generate a pseudo-manifest for the working dir
539 # generate a pseudo-manifest for the working dir
539 mf2 = mfmatches(self.dirstate.parents()[0])
540 mf2 = mfmatches(self.dirstate.parents()[0])
540 for f in lookup + modified + added:
541 for f in lookup + modified + added:
541 mf2[f] = ""
542 mf2[f] = ""
542 for f in removed:
543 for f in removed:
543 if f in mf2:
544 if f in mf2:
544 del mf2[f]
545 del mf2[f]
545 else:
546 else:
546 # we are comparing two revisions
547 # we are comparing two revisions
547 deleted, unknown = [], []
548 deleted, unknown = [], []
548 mf2 = mfmatches(node2)
549 mf2 = mfmatches(node2)
549
550
550 if node1:
551 if node1:
551 # flush lists from dirstate before comparing manifests
552 # flush lists from dirstate before comparing manifests
552 modified, added = [], []
553 modified, added = [], []
553
554
554 for fn in mf2:
555 for fn in mf2:
555 if mf1.has_key(fn):
556 if mf1.has_key(fn):
556 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
557 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
557 modified.append(fn)
558 modified.append(fn)
558 del mf1[fn]
559 del mf1[fn]
559 else:
560 else:
560 added.append(fn)
561 added.append(fn)
561
562
562 removed = mf1.keys()
563 removed = mf1.keys()
563
564
564 # sort and return results:
565 # sort and return results:
565 for l in modified, added, removed, deleted, unknown:
566 for l in modified, added, removed, deleted, unknown:
566 l.sort()
567 l.sort()
567 return (modified, added, removed, deleted, unknown)
568 return (modified, added, removed, deleted, unknown)
568
569
569 def add(self, list, wlock=None):
570 def add(self, list, wlock=None):
570 if not wlock:
571 if not wlock:
571 wlock = self.wlock()
572 wlock = self.wlock()
572 for f in list:
573 for f in list:
573 p = self.wjoin(f)
574 p = self.wjoin(f)
574 if not os.path.exists(p):
575 if not os.path.exists(p):
575 self.ui.warn(_("%s does not exist!\n") % f)
576 self.ui.warn(_("%s does not exist!\n") % f)
576 elif not os.path.isfile(p):
577 elif not os.path.isfile(p):
577 self.ui.warn(_("%s not added: only files supported currently\n")
578 self.ui.warn(_("%s not added: only files supported currently\n")
578 % f)
579 % f)
579 elif self.dirstate.state(f) in 'an':
580 elif self.dirstate.state(f) in 'an':
580 self.ui.warn(_("%s already tracked!\n") % f)
581 self.ui.warn(_("%s already tracked!\n") % f)
581 else:
582 else:
582 self.dirstate.update([f], "a")
583 self.dirstate.update([f], "a")
583
584
584 def forget(self, list, wlock=None):
585 def forget(self, list, wlock=None):
585 if not wlock:
586 if not wlock:
586 wlock = self.wlock()
587 wlock = self.wlock()
587 for f in list:
588 for f in list:
588 if self.dirstate.state(f) not in 'ai':
589 if self.dirstate.state(f) not in 'ai':
589 self.ui.warn(_("%s not added!\n") % f)
590 self.ui.warn(_("%s not added!\n") % f)
590 else:
591 else:
591 self.dirstate.forget([f])
592 self.dirstate.forget([f])
592
593
593 def remove(self, list, unlink=False, wlock=None):
594 def remove(self, list, unlink=False, wlock=None):
594 if unlink:
595 if unlink:
595 for f in list:
596 for f in list:
596 try:
597 try:
597 util.unlink(self.wjoin(f))
598 util.unlink(self.wjoin(f))
598 except OSError, inst:
599 except OSError, inst:
599 if inst.errno != errno.ENOENT:
600 if inst.errno != errno.ENOENT:
600 raise
601 raise
601 if not wlock:
602 if not wlock:
602 wlock = self.wlock()
603 wlock = self.wlock()
603 for f in list:
604 for f in list:
604 p = self.wjoin(f)
605 p = self.wjoin(f)
605 if os.path.exists(p):
606 if os.path.exists(p):
606 self.ui.warn(_("%s still exists!\n") % f)
607 self.ui.warn(_("%s still exists!\n") % f)
607 elif self.dirstate.state(f) == 'a':
608 elif self.dirstate.state(f) == 'a':
608 self.dirstate.forget([f])
609 self.dirstate.forget([f])
609 elif f not in self.dirstate:
610 elif f not in self.dirstate:
610 self.ui.warn(_("%s not tracked!\n") % f)
611 self.ui.warn(_("%s not tracked!\n") % f)
611 else:
612 else:
612 self.dirstate.update([f], "r")
613 self.dirstate.update([f], "r")
613
614
614 def undelete(self, list, wlock=None):
615 def undelete(self, list, wlock=None):
615 p = self.dirstate.parents()[0]
616 p = self.dirstate.parents()[0]
616 mn = self.changelog.read(p)[0]
617 mn = self.changelog.read(p)[0]
617 mf = self.manifest.readflags(mn)
618 mf = self.manifest.readflags(mn)
618 m = self.manifest.read(mn)
619 m = self.manifest.read(mn)
619 if not wlock:
620 if not wlock:
620 wlock = self.wlock()
621 wlock = self.wlock()
621 for f in list:
622 for f in list:
622 if self.dirstate.state(f) not in "r":
623 if self.dirstate.state(f) not in "r":
623 self.ui.warn("%s not removed!\n" % f)
624 self.ui.warn("%s not removed!\n" % f)
624 else:
625 else:
625 t = self.file(f).read(m[f])
626 t = self.file(f).read(m[f])
626 self.wwrite(f, t)
627 self.wwrite(f, t)
627 util.set_exec(self.wjoin(f), mf[f])
628 util.set_exec(self.wjoin(f), mf[f])
628 self.dirstate.update([f], "n")
629 self.dirstate.update([f], "n")
629
630
630 def copy(self, source, dest, wlock=None):
631 def copy(self, source, dest, wlock=None):
631 p = self.wjoin(dest)
632 p = self.wjoin(dest)
632 if not os.path.exists(p):
633 if not os.path.exists(p):
633 self.ui.warn(_("%s does not exist!\n") % dest)
634 self.ui.warn(_("%s does not exist!\n") % dest)
634 elif not os.path.isfile(p):
635 elif not os.path.isfile(p):
635 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
636 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
636 else:
637 else:
637 if not wlock:
638 if not wlock:
638 wlock = self.wlock()
639 wlock = self.wlock()
639 if self.dirstate.state(dest) == '?':
640 if self.dirstate.state(dest) == '?':
640 self.dirstate.update([dest], "a")
641 self.dirstate.update([dest], "a")
641 self.dirstate.copy(source, dest)
642 self.dirstate.copy(source, dest)
642
643
643 def heads(self, start=None):
644 def heads(self, start=None):
644 heads = self.changelog.heads(start)
645 heads = self.changelog.heads(start)
645 # sort the output in rev descending order
646 # sort the output in rev descending order
646 heads = [(-self.changelog.rev(h), h) for h in heads]
647 heads = [(-self.changelog.rev(h), h) for h in heads]
647 heads.sort()
648 heads.sort()
648 return [n for (r, n) in heads]
649 return [n for (r, n) in heads]
649
650
650 # branchlookup returns a dict giving a list of branches for
651 # branchlookup returns a dict giving a list of branches for
651 # each head. A branch is defined as the tag of a node or
652 # each head. A branch is defined as the tag of a node or
652 # the branch of the node's parents. If a node has multiple
653 # the branch of the node's parents. If a node has multiple
653 # branch tags, tags are eliminated if they are visible from other
654 # branch tags, tags are eliminated if they are visible from other
654 # branch tags.
655 # branch tags.
655 #
656 #
656 # So, for this graph: a->b->c->d->e
657 # So, for this graph: a->b->c->d->e
657 # \ /
658 # \ /
658 # aa -----/
659 # aa -----/
659 # a has tag 2.6.12
660 # a has tag 2.6.12
660 # d has tag 2.6.13
661 # d has tag 2.6.13
661 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
662 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
662 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
663 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
663 # from the list.
664 # from the list.
664 #
665 #
665 # It is possible that more than one head will have the same branch tag.
666 # It is possible that more than one head will have the same branch tag.
666 # callers need to check the result for multiple heads under the same
667 # callers need to check the result for multiple heads under the same
667 # branch tag if that is a problem for them (ie checkout of a specific
668 # branch tag if that is a problem for them (ie checkout of a specific
668 # branch).
669 # branch).
669 #
670 #
670 # passing in a specific branch will limit the depth of the search
671 # passing in a specific branch will limit the depth of the search
671 # through the parents. It won't limit the branches returned in the
672 # through the parents. It won't limit the branches returned in the
672 # result though.
673 # result though.
673 def branchlookup(self, heads=None, branch=None):
674 def branchlookup(self, heads=None, branch=None):
674 if not heads:
675 if not heads:
675 heads = self.heads()
676 heads = self.heads()
676 headt = [ h for h in heads ]
677 headt = [ h for h in heads ]
677 chlog = self.changelog
678 chlog = self.changelog
678 branches = {}
679 branches = {}
679 merges = []
680 merges = []
680 seenmerge = {}
681 seenmerge = {}
681
682
682 # traverse the tree once for each head, recording in the branches
683 # traverse the tree once for each head, recording in the branches
683 # dict which tags are visible from this head. The branches
684 # dict which tags are visible from this head. The branches
684 # dict also records which tags are visible from each tag
685 # dict also records which tags are visible from each tag
685 # while we traverse.
686 # while we traverse.
686 while headt or merges:
687 while headt or merges:
687 if merges:
688 if merges:
688 n, found = merges.pop()
689 n, found = merges.pop()
689 visit = [n]
690 visit = [n]
690 else:
691 else:
691 h = headt.pop()
692 h = headt.pop()
692 visit = [h]
693 visit = [h]
693 found = [h]
694 found = [h]
694 seen = {}
695 seen = {}
695 while visit:
696 while visit:
696 n = visit.pop()
697 n = visit.pop()
697 if n in seen:
698 if n in seen:
698 continue
699 continue
699 pp = chlog.parents(n)
700 pp = chlog.parents(n)
700 tags = self.nodetags(n)
701 tags = self.nodetags(n)
701 if tags:
702 if tags:
702 for x in tags:
703 for x in tags:
703 if x == 'tip':
704 if x == 'tip':
704 continue
705 continue
705 for f in found:
706 for f in found:
706 branches.setdefault(f, {})[n] = 1
707 branches.setdefault(f, {})[n] = 1
707 branches.setdefault(n, {})[n] = 1
708 branches.setdefault(n, {})[n] = 1
708 break
709 break
709 if n not in found:
710 if n not in found:
710 found.append(n)
711 found.append(n)
711 if branch in tags:
712 if branch in tags:
712 continue
713 continue
713 seen[n] = 1
714 seen[n] = 1
714 if pp[1] != nullid and n not in seenmerge:
715 if pp[1] != nullid and n not in seenmerge:
715 merges.append((pp[1], [x for x in found]))
716 merges.append((pp[1], [x for x in found]))
716 seenmerge[n] = 1
717 seenmerge[n] = 1
717 if pp[0] != nullid:
718 if pp[0] != nullid:
718 visit.append(pp[0])
719 visit.append(pp[0])
719 # traverse the branches dict, eliminating branch tags from each
720 # traverse the branches dict, eliminating branch tags from each
720 # head that are visible from another branch tag for that head.
721 # head that are visible from another branch tag for that head.
721 out = {}
722 out = {}
722 viscache = {}
723 viscache = {}
723 for h in heads:
724 for h in heads:
724 def visible(node):
725 def visible(node):
725 if node in viscache:
726 if node in viscache:
726 return viscache[node]
727 return viscache[node]
727 ret = {}
728 ret = {}
728 visit = [node]
729 visit = [node]
729 while visit:
730 while visit:
730 x = visit.pop()
731 x = visit.pop()
731 if x in viscache:
732 if x in viscache:
732 ret.update(viscache[x])
733 ret.update(viscache[x])
733 elif x not in ret:
734 elif x not in ret:
734 ret[x] = 1
735 ret[x] = 1
735 if x in branches:
736 if x in branches:
736 visit[len(visit):] = branches[x].keys()
737 visit[len(visit):] = branches[x].keys()
737 viscache[node] = ret
738 viscache[node] = ret
738 return ret
739 return ret
739 if h not in branches:
740 if h not in branches:
740 continue
741 continue
741 # O(n^2), but somewhat limited. This only searches the
742 # O(n^2), but somewhat limited. This only searches the
742 # tags visible from a specific head, not all the tags in the
743 # tags visible from a specific head, not all the tags in the
743 # whole repo.
744 # whole repo.
744 for b in branches[h]:
745 for b in branches[h]:
745 vis = False
746 vis = False
746 for bb in branches[h].keys():
747 for bb in branches[h].keys():
747 if b != bb:
748 if b != bb:
748 if b in visible(bb):
749 if b in visible(bb):
749 vis = True
750 vis = True
750 break
751 break
751 if not vis:
752 if not vis:
752 l = out.setdefault(h, [])
753 l = out.setdefault(h, [])
753 l[len(l):] = self.nodetags(b)
754 l[len(l):] = self.nodetags(b)
754 return out
755 return out
755
756
756 def branches(self, nodes):
757 def branches(self, nodes):
757 if not nodes:
758 if not nodes:
758 nodes = [self.changelog.tip()]
759 nodes = [self.changelog.tip()]
759 b = []
760 b = []
760 for n in nodes:
761 for n in nodes:
761 t = n
762 t = n
762 while n:
763 while n:
763 p = self.changelog.parents(n)
764 p = self.changelog.parents(n)
764 if p[1] != nullid or p[0] == nullid:
765 if p[1] != nullid or p[0] == nullid:
765 b.append((t, n, p[0], p[1]))
766 b.append((t, n, p[0], p[1]))
766 break
767 break
767 n = p[0]
768 n = p[0]
768 return b
769 return b
769
770
770 def between(self, pairs):
771 def between(self, pairs):
771 r = []
772 r = []
772
773
773 for top, bottom in pairs:
774 for top, bottom in pairs:
774 n, l, i = top, [], 0
775 n, l, i = top, [], 0
775 f = 1
776 f = 1
776
777
777 while n != bottom:
778 while n != bottom:
778 p = self.changelog.parents(n)[0]
779 p = self.changelog.parents(n)[0]
779 if i == f:
780 if i == f:
780 l.append(n)
781 l.append(n)
781 f = f * 2
782 f = f * 2
782 n = p
783 n = p
783 i += 1
784 i += 1
784
785
785 r.append(l)
786 r.append(l)
786
787
787 return r
788 return r
788
789
789 def findincoming(self, remote, base=None, heads=None, force=False):
790 def findincoming(self, remote, base=None, heads=None, force=False):
790 m = self.changelog.nodemap
791 m = self.changelog.nodemap
791 search = []
792 search = []
792 fetch = {}
793 fetch = {}
793 seen = {}
794 seen = {}
794 seenbranch = {}
795 seenbranch = {}
795 if base == None:
796 if base == None:
796 base = {}
797 base = {}
797
798
798 # assume we're closer to the tip than the root
799 # assume we're closer to the tip than the root
799 # and start by examining the heads
800 # and start by examining the heads
800 self.ui.status(_("searching for changes\n"))
801 self.ui.status(_("searching for changes\n"))
801
802
802 if not heads:
803 if not heads:
803 heads = remote.heads()
804 heads = remote.heads()
804
805
805 unknown = []
806 unknown = []
806 for h in heads:
807 for h in heads:
807 if h not in m:
808 if h not in m:
808 unknown.append(h)
809 unknown.append(h)
809 else:
810 else:
810 base[h] = 1
811 base[h] = 1
811
812
812 if not unknown:
813 if not unknown:
813 return []
814 return []
814
815
815 rep = {}
816 rep = {}
816 reqcnt = 0
817 reqcnt = 0
817
818
818 # search through remote branches
819 # search through remote branches
819 # a 'branch' here is a linear segment of history, with four parts:
820 # a 'branch' here is a linear segment of history, with four parts:
820 # head, root, first parent, second parent
821 # head, root, first parent, second parent
821 # (a branch always has two parents (or none) by definition)
822 # (a branch always has two parents (or none) by definition)
822 unknown = remote.branches(unknown)
823 unknown = remote.branches(unknown)
823 while unknown:
824 while unknown:
824 r = []
825 r = []
825 while unknown:
826 while unknown:
826 n = unknown.pop(0)
827 n = unknown.pop(0)
827 if n[0] in seen:
828 if n[0] in seen:
828 continue
829 continue
829
830
830 self.ui.debug(_("examining %s:%s\n")
831 self.ui.debug(_("examining %s:%s\n")
831 % (short(n[0]), short(n[1])))
832 % (short(n[0]), short(n[1])))
832 if n[0] == nullid:
833 if n[0] == nullid:
833 break
834 break
834 if n in seenbranch:
835 if n in seenbranch:
835 self.ui.debug(_("branch already found\n"))
836 self.ui.debug(_("branch already found\n"))
836 continue
837 continue
837 if n[1] and n[1] in m: # do we know the base?
838 if n[1] and n[1] in m: # do we know the base?
838 self.ui.debug(_("found incomplete branch %s:%s\n")
839 self.ui.debug(_("found incomplete branch %s:%s\n")
839 % (short(n[0]), short(n[1])))
840 % (short(n[0]), short(n[1])))
840 search.append(n) # schedule branch range for scanning
841 search.append(n) # schedule branch range for scanning
841 seenbranch[n] = 1
842 seenbranch[n] = 1
842 else:
843 else:
843 if n[1] not in seen and n[1] not in fetch:
844 if n[1] not in seen and n[1] not in fetch:
844 if n[2] in m and n[3] in m:
845 if n[2] in m and n[3] in m:
845 self.ui.debug(_("found new changeset %s\n") %
846 self.ui.debug(_("found new changeset %s\n") %
846 short(n[1]))
847 short(n[1]))
847 fetch[n[1]] = 1 # earliest unknown
848 fetch[n[1]] = 1 # earliest unknown
848 base[n[2]] = 1 # latest known
849 base[n[2]] = 1 # latest known
849 continue
850 continue
850
851
851 for a in n[2:4]:
852 for a in n[2:4]:
852 if a not in rep:
853 if a not in rep:
853 r.append(a)
854 r.append(a)
854 rep[a] = 1
855 rep[a] = 1
855
856
856 seen[n[0]] = 1
857 seen[n[0]] = 1
857
858
858 if r:
859 if r:
859 reqcnt += 1
860 reqcnt += 1
860 self.ui.debug(_("request %d: %s\n") %
861 self.ui.debug(_("request %d: %s\n") %
861 (reqcnt, " ".join(map(short, r))))
862 (reqcnt, " ".join(map(short, r))))
862 for p in range(0, len(r), 10):
863 for p in range(0, len(r), 10):
863 for b in remote.branches(r[p:p+10]):
864 for b in remote.branches(r[p:p+10]):
864 self.ui.debug(_("received %s:%s\n") %
865 self.ui.debug(_("received %s:%s\n") %
865 (short(b[0]), short(b[1])))
866 (short(b[0]), short(b[1])))
866 if b[0] in m:
867 if b[0] in m:
867 self.ui.debug(_("found base node %s\n")
868 self.ui.debug(_("found base node %s\n")
868 % short(b[0]))
869 % short(b[0]))
869 base[b[0]] = 1
870 base[b[0]] = 1
870 elif b[0] not in seen:
871 elif b[0] not in seen:
871 unknown.append(b)
872 unknown.append(b)
872
873
873 # do binary search on the branches we found
874 # do binary search on the branches we found
874 while search:
875 while search:
875 n = search.pop(0)
876 n = search.pop(0)
876 reqcnt += 1
877 reqcnt += 1
877 l = remote.between([(n[0], n[1])])[0]
878 l = remote.between([(n[0], n[1])])[0]
878 l.append(n[1])
879 l.append(n[1])
879 p = n[0]
880 p = n[0]
880 f = 1
881 f = 1
881 for i in l:
882 for i in l:
882 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
883 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
883 if i in m:
884 if i in m:
884 if f <= 2:
885 if f <= 2:
885 self.ui.debug(_("found new branch changeset %s\n") %
886 self.ui.debug(_("found new branch changeset %s\n") %
886 short(p))
887 short(p))
887 fetch[p] = 1
888 fetch[p] = 1
888 base[i] = 1
889 base[i] = 1
889 else:
890 else:
890 self.ui.debug(_("narrowed branch search to %s:%s\n")
891 self.ui.debug(_("narrowed branch search to %s:%s\n")
891 % (short(p), short(i)))
892 % (short(p), short(i)))
892 search.append((p, i))
893 search.append((p, i))
893 break
894 break
894 p, f = i, f * 2
895 p, f = i, f * 2
895
896
896 # sanity check our fetch list
897 # sanity check our fetch list
897 for f in fetch.keys():
898 for f in fetch.keys():
898 if f in m:
899 if f in m:
899 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
900 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
900
901
901 if base.keys() == [nullid]:
902 if base.keys() == [nullid]:
902 if force:
903 if force:
903 self.ui.warn(_("warning: repository is unrelated\n"))
904 self.ui.warn(_("warning: repository is unrelated\n"))
904 else:
905 else:
905 raise util.Abort(_("repository is unrelated"))
906 raise util.Abort(_("repository is unrelated"))
906
907
907 self.ui.note(_("found new changesets starting at ") +
908 self.ui.note(_("found new changesets starting at ") +
908 " ".join([short(f) for f in fetch]) + "\n")
909 " ".join([short(f) for f in fetch]) + "\n")
909
910
910 self.ui.debug(_("%d total queries\n") % reqcnt)
911 self.ui.debug(_("%d total queries\n") % reqcnt)
911
912
912 return fetch.keys()
913 return fetch.keys()
913
914
914 def findoutgoing(self, remote, base=None, heads=None, force=False):
915 def findoutgoing(self, remote, base=None, heads=None, force=False):
915 if base == None:
916 if base == None:
916 base = {}
917 base = {}
917 self.findincoming(remote, base, heads, force=force)
918 self.findincoming(remote, base, heads, force=force)
918
919
919 self.ui.debug(_("common changesets up to ")
920 self.ui.debug(_("common changesets up to ")
920 + " ".join(map(short, base.keys())) + "\n")
921 + " ".join(map(short, base.keys())) + "\n")
921
922
922 remain = dict.fromkeys(self.changelog.nodemap)
923 remain = dict.fromkeys(self.changelog.nodemap)
923
924
924 # prune everything remote has from the tree
925 # prune everything remote has from the tree
925 del remain[nullid]
926 del remain[nullid]
926 remove = base.keys()
927 remove = base.keys()
927 while remove:
928 while remove:
928 n = remove.pop(0)
929 n = remove.pop(0)
929 if n in remain:
930 if n in remain:
930 del remain[n]
931 del remain[n]
931 for p in self.changelog.parents(n):
932 for p in self.changelog.parents(n):
932 remove.append(p)
933 remove.append(p)
933
934
934 # find every node whose parents have been pruned
935 # find every node whose parents have been pruned
935 subset = []
936 subset = []
936 for n in remain:
937 for n in remain:
937 p1, p2 = self.changelog.parents(n)
938 p1, p2 = self.changelog.parents(n)
938 if p1 not in remain and p2 not in remain:
939 if p1 not in remain and p2 not in remain:
939 subset.append(n)
940 subset.append(n)
940
941
941 # this is the set of all roots we have to push
942 # this is the set of all roots we have to push
942 return subset
943 return subset
943
944
944 def pull(self, remote, heads=None, force=False):
945 def pull(self, remote, heads=None, force=False):
945 l = self.lock()
946 l = self.lock()
946
947
947 # if we have an empty repo, fetch everything
948 # if we have an empty repo, fetch everything
948 if self.changelog.tip() == nullid:
949 if self.changelog.tip() == nullid:
949 self.ui.status(_("requesting all changes\n"))
950 self.ui.status(_("requesting all changes\n"))
950 fetch = [nullid]
951 fetch = [nullid]
951 else:
952 else:
952 fetch = self.findincoming(remote, force=force)
953 fetch = self.findincoming(remote, force=force)
953
954
954 if not fetch:
955 if not fetch:
955 self.ui.status(_("no changes found\n"))
956 self.ui.status(_("no changes found\n"))
956 return 1
957 return 1
957
958
958 if heads is None:
959 if heads is None:
959 cg = remote.changegroup(fetch, 'pull')
960 cg = remote.changegroup(fetch, 'pull')
960 else:
961 else:
961 cg = remote.changegroupsubset(fetch, heads, 'pull')
962 cg = remote.changegroupsubset(fetch, heads, 'pull')
962 return self.addchangegroup(cg)
963 return self.addchangegroup(cg)
963
964
964 def push(self, remote, force=False, revs=None):
965 def push(self, remote, force=False, revs=None):
965 lock = remote.lock()
966 lock = remote.lock()
966
967
967 base = {}
968 base = {}
968 heads = remote.heads()
969 heads = remote.heads()
969 inc = self.findincoming(remote, base, heads, force=force)
970 inc = self.findincoming(remote, base, heads, force=force)
970 if not force and inc:
971 if not force and inc:
971 self.ui.warn(_("abort: unsynced remote changes!\n"))
972 self.ui.warn(_("abort: unsynced remote changes!\n"))
972 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
973 self.ui.status(_("(did you forget to sync? use push -f to force)\n"))
973 return 1
974 return 1
974
975
975 update = self.findoutgoing(remote, base)
976 update = self.findoutgoing(remote, base)
976 if revs is not None:
977 if revs is not None:
977 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
978 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
978 else:
979 else:
979 bases, heads = update, self.changelog.heads()
980 bases, heads = update, self.changelog.heads()
980
981
981 if not bases:
982 if not bases:
982 self.ui.status(_("no changes found\n"))
983 self.ui.status(_("no changes found\n"))
983 return 1
984 return 1
984 elif not force:
985 elif not force:
985 if len(bases) < len(heads):
986 if len(bases) < len(heads):
986 self.ui.warn(_("abort: push creates new remote branches!\n"))
987 self.ui.warn(_("abort: push creates new remote branches!\n"))
987 self.ui.status(_("(did you forget to merge?"
988 self.ui.status(_("(did you forget to merge?"
988 " use push -f to force)\n"))
989 " use push -f to force)\n"))
989 return 1
990 return 1
990
991
991 if revs is None:
992 if revs is None:
992 cg = self.changegroup(update, 'push')
993 cg = self.changegroup(update, 'push')
993 else:
994 else:
994 cg = self.changegroupsubset(update, revs, 'push')
995 cg = self.changegroupsubset(update, revs, 'push')
995 return remote.addchangegroup(cg)
996 return remote.addchangegroup(cg)
996
997
997 def changegroupsubset(self, bases, heads, source):
998 def changegroupsubset(self, bases, heads, source):
998 """This function generates a changegroup consisting of all the nodes
999 """This function generates a changegroup consisting of all the nodes
999 that are descendents of any of the bases, and ancestors of any of
1000 that are descendents of any of the bases, and ancestors of any of
1000 the heads.
1001 the heads.
1001
1002
1002 It is fairly complex as determining which filenodes and which
1003 It is fairly complex as determining which filenodes and which
1003 manifest nodes need to be included for the changeset to be complete
1004 manifest nodes need to be included for the changeset to be complete
1004 is non-trivial.
1005 is non-trivial.
1005
1006
1006 Another wrinkle is doing the reverse, figuring out which changeset in
1007 Another wrinkle is doing the reverse, figuring out which changeset in
1007 the changegroup a particular filenode or manifestnode belongs to."""
1008 the changegroup a particular filenode or manifestnode belongs to."""
1008
1009
1009 self.hook('preoutgoing', throw=True, source=source)
1010 self.hook('preoutgoing', throw=True, source=source)
1010
1011
1011 # Set up some initial variables
1012 # Set up some initial variables
1012 # Make it easy to refer to self.changelog
1013 # Make it easy to refer to self.changelog
1013 cl = self.changelog
1014 cl = self.changelog
1014 # msng is short for missing - compute the list of changesets in this
1015 # msng is short for missing - compute the list of changesets in this
1015 # changegroup.
1016 # changegroup.
1016 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1017 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1017 # Some bases may turn out to be superfluous, and some heads may be
1018 # Some bases may turn out to be superfluous, and some heads may be
1018 # too. nodesbetween will return the minimal set of bases and heads
1019 # too. nodesbetween will return the minimal set of bases and heads
1019 # necessary to re-create the changegroup.
1020 # necessary to re-create the changegroup.
1020
1021
1021 # Known heads are the list of heads that it is assumed the recipient
1022 # Known heads are the list of heads that it is assumed the recipient
1022 # of this changegroup will know about.
1023 # of this changegroup will know about.
1023 knownheads = {}
1024 knownheads = {}
1024 # We assume that all parents of bases are known heads.
1025 # We assume that all parents of bases are known heads.
1025 for n in bases:
1026 for n in bases:
1026 for p in cl.parents(n):
1027 for p in cl.parents(n):
1027 if p != nullid:
1028 if p != nullid:
1028 knownheads[p] = 1
1029 knownheads[p] = 1
1029 knownheads = knownheads.keys()
1030 knownheads = knownheads.keys()
1030 if knownheads:
1031 if knownheads:
1031 # Now that we know what heads are known, we can compute which
1032 # Now that we know what heads are known, we can compute which
1032 # changesets are known. The recipient must know about all
1033 # changesets are known. The recipient must know about all
1033 # changesets required to reach the known heads from the null
1034 # changesets required to reach the known heads from the null
1034 # changeset.
1035 # changeset.
1035 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1036 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1036 junk = None
1037 junk = None
1037 # Transform the list into an ersatz set.
1038 # Transform the list into an ersatz set.
1038 has_cl_set = dict.fromkeys(has_cl_set)
1039 has_cl_set = dict.fromkeys(has_cl_set)
1039 else:
1040 else:
1040 # If there were no known heads, the recipient cannot be assumed to
1041 # If there were no known heads, the recipient cannot be assumed to
1041 # know about any changesets.
1042 # know about any changesets.
1042 has_cl_set = {}
1043 has_cl_set = {}
1043
1044
1044 # Make it easy to refer to self.manifest
1045 # Make it easy to refer to self.manifest
1045 mnfst = self.manifest
1046 mnfst = self.manifest
1046 # We don't know which manifests are missing yet
1047 # We don't know which manifests are missing yet
1047 msng_mnfst_set = {}
1048 msng_mnfst_set = {}
1048 # Nor do we know which filenodes are missing.
1049 # Nor do we know which filenodes are missing.
1049 msng_filenode_set = {}
1050 msng_filenode_set = {}
1050
1051
1051 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1052 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1052 junk = None
1053 junk = None
1053
1054
1054 # A changeset always belongs to itself, so the changenode lookup
1055 # A changeset always belongs to itself, so the changenode lookup
1055 # function for a changenode is identity.
1056 # function for a changenode is identity.
1056 def identity(x):
1057 def identity(x):
1057 return x
1058 return x
1058
1059
1059 # A function generating function. Sets up an environment for the
1060 # A function generating function. Sets up an environment for the
1060 # inner function.
1061 # inner function.
1061 def cmp_by_rev_func(revlog):
1062 def cmp_by_rev_func(revlog):
1062 # Compare two nodes by their revision number in the environment's
1063 # Compare two nodes by their revision number in the environment's
1063 # revision history. Since the revision number both represents the
1064 # revision history. Since the revision number both represents the
1064 # most efficient order to read the nodes in, and represents a
1065 # most efficient order to read the nodes in, and represents a
1065 # topological sorting of the nodes, this function is often useful.
1066 # topological sorting of the nodes, this function is often useful.
1066 def cmp_by_rev(a, b):
1067 def cmp_by_rev(a, b):
1067 return cmp(revlog.rev(a), revlog.rev(b))
1068 return cmp(revlog.rev(a), revlog.rev(b))
1068 return cmp_by_rev
1069 return cmp_by_rev
1069
1070
1070 # If we determine that a particular file or manifest node must be a
1071 # If we determine that a particular file or manifest node must be a
1071 # node that the recipient of the changegroup will already have, we can
1072 # node that the recipient of the changegroup will already have, we can
1072 # also assume the recipient will have all the parents. This function
1073 # also assume the recipient will have all the parents. This function
1073 # prunes them from the set of missing nodes.
1074 # prunes them from the set of missing nodes.
1074 def prune_parents(revlog, hasset, msngset):
1075 def prune_parents(revlog, hasset, msngset):
1075 haslst = hasset.keys()
1076 haslst = hasset.keys()
1076 haslst.sort(cmp_by_rev_func(revlog))
1077 haslst.sort(cmp_by_rev_func(revlog))
1077 for node in haslst:
1078 for node in haslst:
1078 parentlst = [p for p in revlog.parents(node) if p != nullid]
1079 parentlst = [p for p in revlog.parents(node) if p != nullid]
1079 while parentlst:
1080 while parentlst:
1080 n = parentlst.pop()
1081 n = parentlst.pop()
1081 if n not in hasset:
1082 if n not in hasset:
1082 hasset[n] = 1
1083 hasset[n] = 1
1083 p = [p for p in revlog.parents(n) if p != nullid]
1084 p = [p for p in revlog.parents(n) if p != nullid]
1084 parentlst.extend(p)
1085 parentlst.extend(p)
1085 for n in hasset:
1086 for n in hasset:
1086 msngset.pop(n, None)
1087 msngset.pop(n, None)
1087
1088
1088 # This is a function generating function used to set up an environment
1089 # This is a function generating function used to set up an environment
1089 # for the inner function to execute in.
1090 # for the inner function to execute in.
1090 def manifest_and_file_collector(changedfileset):
1091 def manifest_and_file_collector(changedfileset):
1091 # This is an information gathering function that gathers
1092 # This is an information gathering function that gathers
1092 # information from each changeset node that goes out as part of
1093 # information from each changeset node that goes out as part of
1093 # the changegroup. The information gathered is a list of which
1094 # the changegroup. The information gathered is a list of which
1094 # manifest nodes are potentially required (the recipient may
1095 # manifest nodes are potentially required (the recipient may
1095 # already have them) and total list of all files which were
1096 # already have them) and total list of all files which were
1096 # changed in any changeset in the changegroup.
1097 # changed in any changeset in the changegroup.
1097 #
1098 #
1098 # We also remember the first changenode we saw any manifest
1099 # We also remember the first changenode we saw any manifest
1099 # referenced by so we can later determine which changenode 'owns'
1100 # referenced by so we can later determine which changenode 'owns'
1100 # the manifest.
1101 # the manifest.
1101 def collect_manifests_and_files(clnode):
1102 def collect_manifests_and_files(clnode):
1102 c = cl.read(clnode)
1103 c = cl.read(clnode)
1103 for f in c[3]:
1104 for f in c[3]:
1104 # This is to make sure we only have one instance of each
1105 # This is to make sure we only have one instance of each
1105 # filename string for each filename.
1106 # filename string for each filename.
1106 changedfileset.setdefault(f, f)
1107 changedfileset.setdefault(f, f)
1107 msng_mnfst_set.setdefault(c[0], clnode)
1108 msng_mnfst_set.setdefault(c[0], clnode)
1108 return collect_manifests_and_files
1109 return collect_manifests_and_files
1109
1110
1110 # Figure out which manifest nodes (of the ones we think might be part
1111 # Figure out which manifest nodes (of the ones we think might be part
1111 # of the changegroup) the recipient must know about and remove them
1112 # of the changegroup) the recipient must know about and remove them
1112 # from the changegroup.
1113 # from the changegroup.
1113 def prune_manifests():
1114 def prune_manifests():
1114 has_mnfst_set = {}
1115 has_mnfst_set = {}
1115 for n in msng_mnfst_set:
1116 for n in msng_mnfst_set:
1116 # If a 'missing' manifest thinks it belongs to a changenode
1117 # If a 'missing' manifest thinks it belongs to a changenode
1117 # the recipient is assumed to have, obviously the recipient
1118 # the recipient is assumed to have, obviously the recipient
1118 # must have that manifest.
1119 # must have that manifest.
1119 linknode = cl.node(mnfst.linkrev(n))
1120 linknode = cl.node(mnfst.linkrev(n))
1120 if linknode in has_cl_set:
1121 if linknode in has_cl_set:
1121 has_mnfst_set[n] = 1
1122 has_mnfst_set[n] = 1
1122 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1123 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1123
1124
1124 # Use the information collected in collect_manifests_and_files to say
1125 # Use the information collected in collect_manifests_and_files to say
1125 # which changenode any manifestnode belongs to.
1126 # which changenode any manifestnode belongs to.
1126 def lookup_manifest_link(mnfstnode):
1127 def lookup_manifest_link(mnfstnode):
1127 return msng_mnfst_set[mnfstnode]
1128 return msng_mnfst_set[mnfstnode]
1128
1129
1129 # A function generating function that sets up the initial environment
1130 # A function generating function that sets up the initial environment
1130 # the inner function.
1131 # the inner function.
1131 def filenode_collector(changedfiles):
1132 def filenode_collector(changedfiles):
1132 next_rev = [0]
1133 next_rev = [0]
1133 # This gathers information from each manifestnode included in the
1134 # This gathers information from each manifestnode included in the
1134 # changegroup about which filenodes the manifest node references
1135 # changegroup about which filenodes the manifest node references
1135 # so we can include those in the changegroup too.
1136 # so we can include those in the changegroup too.
1136 #
1137 #
1137 # It also remembers which changenode each filenode belongs to. It
1138 # It also remembers which changenode each filenode belongs to. It
1138 # does this by assuming the a filenode belongs to the changenode
1139 # does this by assuming the a filenode belongs to the changenode
1139 # the first manifest that references it belongs to.
1140 # the first manifest that references it belongs to.
1140 def collect_msng_filenodes(mnfstnode):
1141 def collect_msng_filenodes(mnfstnode):
1141 r = mnfst.rev(mnfstnode)
1142 r = mnfst.rev(mnfstnode)
1142 if r == next_rev[0]:
1143 if r == next_rev[0]:
1143 # If the last rev we looked at was the one just previous,
1144 # If the last rev we looked at was the one just previous,
1144 # we only need to see a diff.
1145 # we only need to see a diff.
1145 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1146 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1146 # For each line in the delta
1147 # For each line in the delta
1147 for dline in delta.splitlines():
1148 for dline in delta.splitlines():
1148 # get the filename and filenode for that line
1149 # get the filename and filenode for that line
1149 f, fnode = dline.split('\0')
1150 f, fnode = dline.split('\0')
1150 fnode = bin(fnode[:40])
1151 fnode = bin(fnode[:40])
1151 f = changedfiles.get(f, None)
1152 f = changedfiles.get(f, None)
1152 # And if the file is in the list of files we care
1153 # And if the file is in the list of files we care
1153 # about.
1154 # about.
1154 if f is not None:
1155 if f is not None:
1155 # Get the changenode this manifest belongs to
1156 # Get the changenode this manifest belongs to
1156 clnode = msng_mnfst_set[mnfstnode]
1157 clnode = msng_mnfst_set[mnfstnode]
1157 # Create the set of filenodes for the file if
1158 # Create the set of filenodes for the file if
1158 # there isn't one already.
1159 # there isn't one already.
1159 ndset = msng_filenode_set.setdefault(f, {})
1160 ndset = msng_filenode_set.setdefault(f, {})
1160 # And set the filenode's changelog node to the
1161 # And set the filenode's changelog node to the
1161 # manifest's if it hasn't been set already.
1162 # manifest's if it hasn't been set already.
1162 ndset.setdefault(fnode, clnode)
1163 ndset.setdefault(fnode, clnode)
1163 else:
1164 else:
1164 # Otherwise we need a full manifest.
1165 # Otherwise we need a full manifest.
1165 m = mnfst.read(mnfstnode)
1166 m = mnfst.read(mnfstnode)
1166 # For every file in we care about.
1167 # For every file in we care about.
1167 for f in changedfiles:
1168 for f in changedfiles:
1168 fnode = m.get(f, None)
1169 fnode = m.get(f, None)
1169 # If it's in the manifest
1170 # If it's in the manifest
1170 if fnode is not None:
1171 if fnode is not None:
1171 # See comments above.
1172 # See comments above.
1172 clnode = msng_mnfst_set[mnfstnode]
1173 clnode = msng_mnfst_set[mnfstnode]
1173 ndset = msng_filenode_set.setdefault(f, {})
1174 ndset = msng_filenode_set.setdefault(f, {})
1174 ndset.setdefault(fnode, clnode)
1175 ndset.setdefault(fnode, clnode)
1175 # Remember the revision we hope to see next.
1176 # Remember the revision we hope to see next.
1176 next_rev[0] = r + 1
1177 next_rev[0] = r + 1
1177 return collect_msng_filenodes
1178 return collect_msng_filenodes
1178
1179
1179 # We have a list of filenodes we think we need for a file, lets remove
1180 # We have a list of filenodes we think we need for a file, lets remove
1180 # all those we now the recipient must have.
1181 # all those we now the recipient must have.
1181 def prune_filenodes(f, filerevlog):
1182 def prune_filenodes(f, filerevlog):
1182 msngset = msng_filenode_set[f]
1183 msngset = msng_filenode_set[f]
1183 hasset = {}
1184 hasset = {}
1184 # If a 'missing' filenode thinks it belongs to a changenode we
1185 # If a 'missing' filenode thinks it belongs to a changenode we
1185 # assume the recipient must have, then the recipient must have
1186 # assume the recipient must have, then the recipient must have
1186 # that filenode.
1187 # that filenode.
1187 for n in msngset:
1188 for n in msngset:
1188 clnode = cl.node(filerevlog.linkrev(n))
1189 clnode = cl.node(filerevlog.linkrev(n))
1189 if clnode in has_cl_set:
1190 if clnode in has_cl_set:
1190 hasset[n] = 1
1191 hasset[n] = 1
1191 prune_parents(filerevlog, hasset, msngset)
1192 prune_parents(filerevlog, hasset, msngset)
1192
1193
1193 # A function generator function that sets up the a context for the
1194 # A function generator function that sets up the a context for the
1194 # inner function.
1195 # inner function.
1195 def lookup_filenode_link_func(fname):
1196 def lookup_filenode_link_func(fname):
1196 msngset = msng_filenode_set[fname]
1197 msngset = msng_filenode_set[fname]
1197 # Lookup the changenode the filenode belongs to.
1198 # Lookup the changenode the filenode belongs to.
1198 def lookup_filenode_link(fnode):
1199 def lookup_filenode_link(fnode):
1199 return msngset[fnode]
1200 return msngset[fnode]
1200 return lookup_filenode_link
1201 return lookup_filenode_link
1201
1202
1202 # Now that we have all theses utility functions to help out and
1203 # Now that we have all theses utility functions to help out and
1203 # logically divide up the task, generate the group.
1204 # logically divide up the task, generate the group.
1204 def gengroup():
1205 def gengroup():
1205 # The set of changed files starts empty.
1206 # The set of changed files starts empty.
1206 changedfiles = {}
1207 changedfiles = {}
1207 # Create a changenode group generator that will call our functions
1208 # Create a changenode group generator that will call our functions
1208 # back to lookup the owning changenode and collect information.
1209 # back to lookup the owning changenode and collect information.
1209 group = cl.group(msng_cl_lst, identity,
1210 group = cl.group(msng_cl_lst, identity,
1210 manifest_and_file_collector(changedfiles))
1211 manifest_and_file_collector(changedfiles))
1211 for chnk in group:
1212 for chnk in group:
1212 yield chnk
1213 yield chnk
1213
1214
1214 # The list of manifests has been collected by the generator
1215 # The list of manifests has been collected by the generator
1215 # calling our functions back.
1216 # calling our functions back.
1216 prune_manifests()
1217 prune_manifests()
1217 msng_mnfst_lst = msng_mnfst_set.keys()
1218 msng_mnfst_lst = msng_mnfst_set.keys()
1218 # Sort the manifestnodes by revision number.
1219 # Sort the manifestnodes by revision number.
1219 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1220 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1220 # Create a generator for the manifestnodes that calls our lookup
1221 # Create a generator for the manifestnodes that calls our lookup
1221 # and data collection functions back.
1222 # and data collection functions back.
1222 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1223 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1223 filenode_collector(changedfiles))
1224 filenode_collector(changedfiles))
1224 for chnk in group:
1225 for chnk in group:
1225 yield chnk
1226 yield chnk
1226
1227
1227 # These are no longer needed, dereference and toss the memory for
1228 # These are no longer needed, dereference and toss the memory for
1228 # them.
1229 # them.
1229 msng_mnfst_lst = None
1230 msng_mnfst_lst = None
1230 msng_mnfst_set.clear()
1231 msng_mnfst_set.clear()
1231
1232
1232 changedfiles = changedfiles.keys()
1233 changedfiles = changedfiles.keys()
1233 changedfiles.sort()
1234 changedfiles.sort()
1234 # Go through all our files in order sorted by name.
1235 # Go through all our files in order sorted by name.
1235 for fname in changedfiles:
1236 for fname in changedfiles:
1236 filerevlog = self.file(fname)
1237 filerevlog = self.file(fname)
1237 # Toss out the filenodes that the recipient isn't really
1238 # Toss out the filenodes that the recipient isn't really
1238 # missing.
1239 # missing.
1239 if msng_filenode_set.has_key(fname):
1240 if msng_filenode_set.has_key(fname):
1240 prune_filenodes(fname, filerevlog)
1241 prune_filenodes(fname, filerevlog)
1241 msng_filenode_lst = msng_filenode_set[fname].keys()
1242 msng_filenode_lst = msng_filenode_set[fname].keys()
1242 else:
1243 else:
1243 msng_filenode_lst = []
1244 msng_filenode_lst = []
1244 # If any filenodes are left, generate the group for them,
1245 # If any filenodes are left, generate the group for them,
1245 # otherwise don't bother.
1246 # otherwise don't bother.
1246 if len(msng_filenode_lst) > 0:
1247 if len(msng_filenode_lst) > 0:
1247 yield struct.pack(">l", len(fname) + 4) + fname
1248 yield changegroup.genchunk(fname)
1248 # Sort the filenodes by their revision #
1249 # Sort the filenodes by their revision #
1249 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1250 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1250 # Create a group generator and only pass in a changenode
1251 # Create a group generator and only pass in a changenode
1251 # lookup function as we need to collect no information
1252 # lookup function as we need to collect no information
1252 # from filenodes.
1253 # from filenodes.
1253 group = filerevlog.group(msng_filenode_lst,
1254 group = filerevlog.group(msng_filenode_lst,
1254 lookup_filenode_link_func(fname))
1255 lookup_filenode_link_func(fname))
1255 for chnk in group:
1256 for chnk in group:
1256 yield chnk
1257 yield chnk
1257 if msng_filenode_set.has_key(fname):
1258 if msng_filenode_set.has_key(fname):
1258 # Don't need this anymore, toss it to free memory.
1259 # Don't need this anymore, toss it to free memory.
1259 del msng_filenode_set[fname]
1260 del msng_filenode_set[fname]
1260 # Signal that no more groups are left.
1261 # Signal that no more groups are left.
1261 yield struct.pack(">l", 0)
1262 yield changegroup.closechunk()
1262
1263
1263 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1264 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1264
1265
1265 return util.chunkbuffer(gengroup())
1266 return util.chunkbuffer(gengroup())
1266
1267
1267 def changegroup(self, basenodes, source):
1268 def changegroup(self, basenodes, source):
1268 """Generate a changegroup of all nodes that we have that a recipient
1269 """Generate a changegroup of all nodes that we have that a recipient
1269 doesn't.
1270 doesn't.
1270
1271
1271 This is much easier than the previous function as we can assume that
1272 This is much easier than the previous function as we can assume that
1272 the recipient has any changenode we aren't sending them."""
1273 the recipient has any changenode we aren't sending them."""
1273
1274
1274 self.hook('preoutgoing', throw=True, source=source)
1275 self.hook('preoutgoing', throw=True, source=source)
1275
1276
1276 cl = self.changelog
1277 cl = self.changelog
1277 nodes = cl.nodesbetween(basenodes, None)[0]
1278 nodes = cl.nodesbetween(basenodes, None)[0]
1278 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1279 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1279
1280
1280 def identity(x):
1281 def identity(x):
1281 return x
1282 return x
1282
1283
1283 def gennodelst(revlog):
1284 def gennodelst(revlog):
1284 for r in xrange(0, revlog.count()):
1285 for r in xrange(0, revlog.count()):
1285 n = revlog.node(r)
1286 n = revlog.node(r)
1286 if revlog.linkrev(n) in revset:
1287 if revlog.linkrev(n) in revset:
1287 yield n
1288 yield n
1288
1289
1289 def changed_file_collector(changedfileset):
1290 def changed_file_collector(changedfileset):
1290 def collect_changed_files(clnode):
1291 def collect_changed_files(clnode):
1291 c = cl.read(clnode)
1292 c = cl.read(clnode)
1292 for fname in c[3]:
1293 for fname in c[3]:
1293 changedfileset[fname] = 1
1294 changedfileset[fname] = 1
1294 return collect_changed_files
1295 return collect_changed_files
1295
1296
1296 def lookuprevlink_func(revlog):
1297 def lookuprevlink_func(revlog):
1297 def lookuprevlink(n):
1298 def lookuprevlink(n):
1298 return cl.node(revlog.linkrev(n))
1299 return cl.node(revlog.linkrev(n))
1299 return lookuprevlink
1300 return lookuprevlink
1300
1301
1301 def gengroup():
1302 def gengroup():
1302 # construct a list of all changed files
1303 # construct a list of all changed files
1303 changedfiles = {}
1304 changedfiles = {}
1304
1305
1305 for chnk in cl.group(nodes, identity,
1306 for chnk in cl.group(nodes, identity,
1306 changed_file_collector(changedfiles)):
1307 changed_file_collector(changedfiles)):
1307 yield chnk
1308 yield chnk
1308 changedfiles = changedfiles.keys()
1309 changedfiles = changedfiles.keys()
1309 changedfiles.sort()
1310 changedfiles.sort()
1310
1311
1311 mnfst = self.manifest
1312 mnfst = self.manifest
1312 nodeiter = gennodelst(mnfst)
1313 nodeiter = gennodelst(mnfst)
1313 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1314 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1314 yield chnk
1315 yield chnk
1315
1316
1316 for fname in changedfiles:
1317 for fname in changedfiles:
1317 filerevlog = self.file(fname)
1318 filerevlog = self.file(fname)
1318 nodeiter = gennodelst(filerevlog)
1319 nodeiter = gennodelst(filerevlog)
1319 nodeiter = list(nodeiter)
1320 nodeiter = list(nodeiter)
1320 if nodeiter:
1321 if nodeiter:
1321 yield struct.pack(">l", len(fname) + 4) + fname
1322 yield changegroup.genchunk(fname)
1322 lookup = lookuprevlink_func(filerevlog)
1323 lookup = lookuprevlink_func(filerevlog)
1323 for chnk in filerevlog.group(nodeiter, lookup):
1324 for chnk in filerevlog.group(nodeiter, lookup):
1324 yield chnk
1325 yield chnk
1325
1326
1326 yield struct.pack(">l", 0)
1327 yield changegroup.closechunk()
1327 self.hook('outgoing', node=hex(nodes[0]), source=source)
1328 self.hook('outgoing', node=hex(nodes[0]), source=source)
1328
1329
1329 return util.chunkbuffer(gengroup())
1330 return util.chunkbuffer(gengroup())
1330
1331
1331 def addchangegroup(self, source):
1332 def addchangegroup(self, source):
1332
1333
1333 def getchunk():
1334 d = source.read(4)
1335 if not d:
1336 return ""
1337 l = struct.unpack(">l", d)[0]
1338 if l <= 4:
1339 return ""
1340 d = source.read(l - 4)
1341 if len(d) < l - 4:
1342 raise repo.RepoError(_("premature EOF reading chunk"
1343 " (got %d bytes, expected %d)")
1344 % (len(d), l - 4))
1345 return d
1346
1347 def getgroup():
1348 while 1:
1349 c = getchunk()
1350 if not c:
1351 break
1352 yield c
1353
1354 def csmap(x):
1334 def csmap(x):
1355 self.ui.debug(_("add changeset %s\n") % short(x))
1335 self.ui.debug(_("add changeset %s\n") % short(x))
1356 return self.changelog.count()
1336 return self.changelog.count()
1357
1337
1358 def revmap(x):
1338 def revmap(x):
1359 return self.changelog.rev(x)
1339 return self.changelog.rev(x)
1360
1340
1361 if not source:
1341 if not source:
1362 return
1342 return
1363
1343
1364 self.hook('prechangegroup', throw=True)
1344 self.hook('prechangegroup', throw=True)
1365
1345
1366 changesets = files = revisions = 0
1346 changesets = files = revisions = 0
1367
1347
1368 tr = self.transaction()
1348 tr = self.transaction()
1369
1349
1370 oldheads = len(self.changelog.heads())
1350 oldheads = len(self.changelog.heads())
1371
1351
1372 # pull off the changeset group
1352 # pull off the changeset group
1373 self.ui.status(_("adding changesets\n"))
1353 self.ui.status(_("adding changesets\n"))
1374 co = self.changelog.tip()
1354 co = self.changelog.tip()
1375 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1355 chunkiter = changegroup.chunkiter(source)
1356 cn = self.changelog.addgroup(chunkiter, csmap, tr, 1) # unique
1376 cnr, cor = map(self.changelog.rev, (cn, co))
1357 cnr, cor = map(self.changelog.rev, (cn, co))
1377 if cn == nullid:
1358 if cn == nullid:
1378 cnr = cor
1359 cnr = cor
1379 changesets = cnr - cor
1360 changesets = cnr - cor
1380
1361
1381 # pull off the manifest group
1362 # pull off the manifest group
1382 self.ui.status(_("adding manifests\n"))
1363 self.ui.status(_("adding manifests\n"))
1383 mm = self.manifest.tip()
1364 mm = self.manifest.tip()
1384 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1365 chunkiter = changegroup.chunkiter(source)
1366 mo = self.manifest.addgroup(chunkiter, revmap, tr)
1385
1367
1386 # process the files
1368 # process the files
1387 self.ui.status(_("adding file changes\n"))
1369 self.ui.status(_("adding file changes\n"))
1388 while 1:
1370 while 1:
1389 f = getchunk()
1371 f = changegroup.getchunk(source)
1390 if not f:
1372 if not f:
1391 break
1373 break
1392 self.ui.debug(_("adding %s revisions\n") % f)
1374 self.ui.debug(_("adding %s revisions\n") % f)
1393 fl = self.file(f)
1375 fl = self.file(f)
1394 o = fl.count()
1376 o = fl.count()
1395 n = fl.addgroup(getgroup(), revmap, tr)
1377 chunkiter = changegroup.chunkiter(source)
1378 n = fl.addgroup(chunkiter, revmap, tr)
1396 revisions += fl.count() - o
1379 revisions += fl.count() - o
1397 files += 1
1380 files += 1
1398
1381
1399 newheads = len(self.changelog.heads())
1382 newheads = len(self.changelog.heads())
1400 heads = ""
1383 heads = ""
1401 if oldheads and newheads > oldheads:
1384 if oldheads and newheads > oldheads:
1402 heads = _(" (+%d heads)") % (newheads - oldheads)
1385 heads = _(" (+%d heads)") % (newheads - oldheads)
1403
1386
1404 self.ui.status(_("added %d changesets"
1387 self.ui.status(_("added %d changesets"
1405 " with %d changes to %d files%s\n")
1388 " with %d changes to %d files%s\n")
1406 % (changesets, revisions, files, heads))
1389 % (changesets, revisions, files, heads))
1407
1390
1408 self.hook('pretxnchangegroup', throw=True,
1391 self.hook('pretxnchangegroup', throw=True,
1409 node=hex(self.changelog.node(cor+1)))
1392 node=hex(self.changelog.node(cor+1)))
1410
1393
1411 tr.close()
1394 tr.close()
1412
1395
1413 if changesets > 0:
1396 if changesets > 0:
1414 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1397 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1415
1398
1416 for i in range(cor + 1, cnr + 1):
1399 for i in range(cor + 1, cnr + 1):
1417 self.hook("incoming", node=hex(self.changelog.node(i)))
1400 self.hook("incoming", node=hex(self.changelog.node(i)))
1418
1401
1419 def update(self, node, allow=False, force=False, choose=None,
1402 def update(self, node, allow=False, force=False, choose=None,
1420 moddirstate=True, forcemerge=False, wlock=None):
1403 moddirstate=True, forcemerge=False, wlock=None):
1421 pl = self.dirstate.parents()
1404 pl = self.dirstate.parents()
1422 if not force and pl[1] != nullid:
1405 if not force and pl[1] != nullid:
1423 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1406 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1424 return 1
1407 return 1
1425
1408
1426 err = False
1409 err = False
1427
1410
1428 p1, p2 = pl[0], node
1411 p1, p2 = pl[0], node
1429 pa = self.changelog.ancestor(p1, p2)
1412 pa = self.changelog.ancestor(p1, p2)
1430 m1n = self.changelog.read(p1)[0]
1413 m1n = self.changelog.read(p1)[0]
1431 m2n = self.changelog.read(p2)[0]
1414 m2n = self.changelog.read(p2)[0]
1432 man = self.manifest.ancestor(m1n, m2n)
1415 man = self.manifest.ancestor(m1n, m2n)
1433 m1 = self.manifest.read(m1n)
1416 m1 = self.manifest.read(m1n)
1434 mf1 = self.manifest.readflags(m1n)
1417 mf1 = self.manifest.readflags(m1n)
1435 m2 = self.manifest.read(m2n).copy()
1418 m2 = self.manifest.read(m2n).copy()
1436 mf2 = self.manifest.readflags(m2n)
1419 mf2 = self.manifest.readflags(m2n)
1437 ma = self.manifest.read(man)
1420 ma = self.manifest.read(man)
1438 mfa = self.manifest.readflags(man)
1421 mfa = self.manifest.readflags(man)
1439
1422
1440 modified, added, removed, deleted, unknown = self.changes()
1423 modified, added, removed, deleted, unknown = self.changes()
1441
1424
1442 # is this a jump, or a merge? i.e. is there a linear path
1425 # is this a jump, or a merge? i.e. is there a linear path
1443 # from p1 to p2?
1426 # from p1 to p2?
1444 linear_path = (pa == p1 or pa == p2)
1427 linear_path = (pa == p1 or pa == p2)
1445
1428
1446 if allow and linear_path:
1429 if allow and linear_path:
1447 raise util.Abort(_("there is nothing to merge, "
1430 raise util.Abort(_("there is nothing to merge, "
1448 "just use 'hg update'"))
1431 "just use 'hg update'"))
1449 if allow and not forcemerge:
1432 if allow and not forcemerge:
1450 if modified or added or removed:
1433 if modified or added or removed:
1451 raise util.Abort(_("outstanding uncommited changes"))
1434 raise util.Abort(_("outstanding uncommited changes"))
1452 if not forcemerge and not force:
1435 if not forcemerge and not force:
1453 for f in unknown:
1436 for f in unknown:
1454 if f in m2:
1437 if f in m2:
1455 t1 = self.wread(f)
1438 t1 = self.wread(f)
1456 t2 = self.file(f).read(m2[f])
1439 t2 = self.file(f).read(m2[f])
1457 if cmp(t1, t2) != 0:
1440 if cmp(t1, t2) != 0:
1458 raise util.Abort(_("'%s' already exists in the working"
1441 raise util.Abort(_("'%s' already exists in the working"
1459 " dir and differs from remote") % f)
1442 " dir and differs from remote") % f)
1460
1443
1461 # resolve the manifest to determine which files
1444 # resolve the manifest to determine which files
1462 # we care about merging
1445 # we care about merging
1463 self.ui.note(_("resolving manifests\n"))
1446 self.ui.note(_("resolving manifests\n"))
1464 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1447 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1465 (force, allow, moddirstate, linear_path))
1448 (force, allow, moddirstate, linear_path))
1466 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1449 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1467 (short(man), short(m1n), short(m2n)))
1450 (short(man), short(m1n), short(m2n)))
1468
1451
1469 merge = {}
1452 merge = {}
1470 get = {}
1453 get = {}
1471 remove = []
1454 remove = []
1472
1455
1473 # construct a working dir manifest
1456 # construct a working dir manifest
1474 mw = m1.copy()
1457 mw = m1.copy()
1475 mfw = mf1.copy()
1458 mfw = mf1.copy()
1476 umap = dict.fromkeys(unknown)
1459 umap = dict.fromkeys(unknown)
1477
1460
1478 for f in added + modified + unknown:
1461 for f in added + modified + unknown:
1479 mw[f] = ""
1462 mw[f] = ""
1480 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1463 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1481
1464
1482 if moddirstate and not wlock:
1465 if moddirstate and not wlock:
1483 wlock = self.wlock()
1466 wlock = self.wlock()
1484
1467
1485 for f in deleted + removed:
1468 for f in deleted + removed:
1486 if f in mw:
1469 if f in mw:
1487 del mw[f]
1470 del mw[f]
1488
1471
1489 # If we're jumping between revisions (as opposed to merging),
1472 # If we're jumping between revisions (as opposed to merging),
1490 # and if neither the working directory nor the target rev has
1473 # and if neither the working directory nor the target rev has
1491 # the file, then we need to remove it from the dirstate, to
1474 # the file, then we need to remove it from the dirstate, to
1492 # prevent the dirstate from listing the file when it is no
1475 # prevent the dirstate from listing the file when it is no
1493 # longer in the manifest.
1476 # longer in the manifest.
1494 if moddirstate and linear_path and f not in m2:
1477 if moddirstate and linear_path and f not in m2:
1495 self.dirstate.forget((f,))
1478 self.dirstate.forget((f,))
1496
1479
1497 # Compare manifests
1480 # Compare manifests
1498 for f, n in mw.iteritems():
1481 for f, n in mw.iteritems():
1499 if choose and not choose(f):
1482 if choose and not choose(f):
1500 continue
1483 continue
1501 if f in m2:
1484 if f in m2:
1502 s = 0
1485 s = 0
1503
1486
1504 # is the wfile new since m1, and match m2?
1487 # is the wfile new since m1, and match m2?
1505 if f not in m1:
1488 if f not in m1:
1506 t1 = self.wread(f)
1489 t1 = self.wread(f)
1507 t2 = self.file(f).read(m2[f])
1490 t2 = self.file(f).read(m2[f])
1508 if cmp(t1, t2) == 0:
1491 if cmp(t1, t2) == 0:
1509 n = m2[f]
1492 n = m2[f]
1510 del t1, t2
1493 del t1, t2
1511
1494
1512 # are files different?
1495 # are files different?
1513 if n != m2[f]:
1496 if n != m2[f]:
1514 a = ma.get(f, nullid)
1497 a = ma.get(f, nullid)
1515 # are both different from the ancestor?
1498 # are both different from the ancestor?
1516 if n != a and m2[f] != a:
1499 if n != a and m2[f] != a:
1517 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1500 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1518 # merge executable bits
1501 # merge executable bits
1519 # "if we changed or they changed, change in merge"
1502 # "if we changed or they changed, change in merge"
1520 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1503 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1521 mode = ((a^b) | (a^c)) ^ a
1504 mode = ((a^b) | (a^c)) ^ a
1522 merge[f] = (m1.get(f, nullid), m2[f], mode)
1505 merge[f] = (m1.get(f, nullid), m2[f], mode)
1523 s = 1
1506 s = 1
1524 # are we clobbering?
1507 # are we clobbering?
1525 # is remote's version newer?
1508 # is remote's version newer?
1526 # or are we going back in time?
1509 # or are we going back in time?
1527 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1510 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1528 self.ui.debug(_(" remote %s is newer, get\n") % f)
1511 self.ui.debug(_(" remote %s is newer, get\n") % f)
1529 get[f] = m2[f]
1512 get[f] = m2[f]
1530 s = 1
1513 s = 1
1531 elif f in umap:
1514 elif f in umap:
1532 # this unknown file is the same as the checkout
1515 # this unknown file is the same as the checkout
1533 get[f] = m2[f]
1516 get[f] = m2[f]
1534
1517
1535 if not s and mfw[f] != mf2[f]:
1518 if not s and mfw[f] != mf2[f]:
1536 if force:
1519 if force:
1537 self.ui.debug(_(" updating permissions for %s\n") % f)
1520 self.ui.debug(_(" updating permissions for %s\n") % f)
1538 util.set_exec(self.wjoin(f), mf2[f])
1521 util.set_exec(self.wjoin(f), mf2[f])
1539 else:
1522 else:
1540 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1523 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1541 mode = ((a^b) | (a^c)) ^ a
1524 mode = ((a^b) | (a^c)) ^ a
1542 if mode != b:
1525 if mode != b:
1543 self.ui.debug(_(" updating permissions for %s\n")
1526 self.ui.debug(_(" updating permissions for %s\n")
1544 % f)
1527 % f)
1545 util.set_exec(self.wjoin(f), mode)
1528 util.set_exec(self.wjoin(f), mode)
1546 del m2[f]
1529 del m2[f]
1547 elif f in ma:
1530 elif f in ma:
1548 if n != ma[f]:
1531 if n != ma[f]:
1549 r = _("d")
1532 r = _("d")
1550 if not force and (linear_path or allow):
1533 if not force and (linear_path or allow):
1551 r = self.ui.prompt(
1534 r = self.ui.prompt(
1552 (_(" local changed %s which remote deleted\n") % f) +
1535 (_(" local changed %s which remote deleted\n") % f) +
1553 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1536 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1554 if r == _("d"):
1537 if r == _("d"):
1555 remove.append(f)
1538 remove.append(f)
1556 else:
1539 else:
1557 self.ui.debug(_("other deleted %s\n") % f)
1540 self.ui.debug(_("other deleted %s\n") % f)
1558 remove.append(f) # other deleted it
1541 remove.append(f) # other deleted it
1559 else:
1542 else:
1560 # file is created on branch or in working directory
1543 # file is created on branch or in working directory
1561 if force and f not in umap:
1544 if force and f not in umap:
1562 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1545 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1563 remove.append(f)
1546 remove.append(f)
1564 elif n == m1.get(f, nullid): # same as parent
1547 elif n == m1.get(f, nullid): # same as parent
1565 if p2 == pa: # going backwards?
1548 if p2 == pa: # going backwards?
1566 self.ui.debug(_("remote deleted %s\n") % f)
1549 self.ui.debug(_("remote deleted %s\n") % f)
1567 remove.append(f)
1550 remove.append(f)
1568 else:
1551 else:
1569 self.ui.debug(_("local modified %s, keeping\n") % f)
1552 self.ui.debug(_("local modified %s, keeping\n") % f)
1570 else:
1553 else:
1571 self.ui.debug(_("working dir created %s, keeping\n") % f)
1554 self.ui.debug(_("working dir created %s, keeping\n") % f)
1572
1555
1573 for f, n in m2.iteritems():
1556 for f, n in m2.iteritems():
1574 if choose and not choose(f):
1557 if choose and not choose(f):
1575 continue
1558 continue
1576 if f[0] == "/":
1559 if f[0] == "/":
1577 continue
1560 continue
1578 if f in ma and n != ma[f]:
1561 if f in ma and n != ma[f]:
1579 r = _("k")
1562 r = _("k")
1580 if not force and (linear_path or allow):
1563 if not force and (linear_path or allow):
1581 r = self.ui.prompt(
1564 r = self.ui.prompt(
1582 (_("remote changed %s which local deleted\n") % f) +
1565 (_("remote changed %s which local deleted\n") % f) +
1583 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1566 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1584 if r == _("k"):
1567 if r == _("k"):
1585 get[f] = n
1568 get[f] = n
1586 elif f not in ma:
1569 elif f not in ma:
1587 self.ui.debug(_("remote created %s\n") % f)
1570 self.ui.debug(_("remote created %s\n") % f)
1588 get[f] = n
1571 get[f] = n
1589 else:
1572 else:
1590 if force or p2 == pa: # going backwards?
1573 if force or p2 == pa: # going backwards?
1591 self.ui.debug(_("local deleted %s, recreating\n") % f)
1574 self.ui.debug(_("local deleted %s, recreating\n") % f)
1592 get[f] = n
1575 get[f] = n
1593 else:
1576 else:
1594 self.ui.debug(_("local deleted %s\n") % f)
1577 self.ui.debug(_("local deleted %s\n") % f)
1595
1578
1596 del mw, m1, m2, ma
1579 del mw, m1, m2, ma
1597
1580
1598 if force:
1581 if force:
1599 for f in merge:
1582 for f in merge:
1600 get[f] = merge[f][1]
1583 get[f] = merge[f][1]
1601 merge = {}
1584 merge = {}
1602
1585
1603 if linear_path or force:
1586 if linear_path or force:
1604 # we don't need to do any magic, just jump to the new rev
1587 # we don't need to do any magic, just jump to the new rev
1605 branch_merge = False
1588 branch_merge = False
1606 p1, p2 = p2, nullid
1589 p1, p2 = p2, nullid
1607 else:
1590 else:
1608 if not allow:
1591 if not allow:
1609 self.ui.status(_("this update spans a branch"
1592 self.ui.status(_("this update spans a branch"
1610 " affecting the following files:\n"))
1593 " affecting the following files:\n"))
1611 fl = merge.keys() + get.keys()
1594 fl = merge.keys() + get.keys()
1612 fl.sort()
1595 fl.sort()
1613 for f in fl:
1596 for f in fl:
1614 cf = ""
1597 cf = ""
1615 if f in merge:
1598 if f in merge:
1616 cf = _(" (resolve)")
1599 cf = _(" (resolve)")
1617 self.ui.status(" %s%s\n" % (f, cf))
1600 self.ui.status(" %s%s\n" % (f, cf))
1618 self.ui.warn(_("aborting update spanning branches!\n"))
1601 self.ui.warn(_("aborting update spanning branches!\n"))
1619 self.ui.status(_("(use update -m to merge across branches"
1602 self.ui.status(_("(use update -m to merge across branches"
1620 " or -C to lose changes)\n"))
1603 " or -C to lose changes)\n"))
1621 return 1
1604 return 1
1622 branch_merge = True
1605 branch_merge = True
1623
1606
1624 # get the files we don't need to change
1607 # get the files we don't need to change
1625 files = get.keys()
1608 files = get.keys()
1626 files.sort()
1609 files.sort()
1627 for f in files:
1610 for f in files:
1628 if f[0] == "/":
1611 if f[0] == "/":
1629 continue
1612 continue
1630 self.ui.note(_("getting %s\n") % f)
1613 self.ui.note(_("getting %s\n") % f)
1631 t = self.file(f).read(get[f])
1614 t = self.file(f).read(get[f])
1632 self.wwrite(f, t)
1615 self.wwrite(f, t)
1633 util.set_exec(self.wjoin(f), mf2[f])
1616 util.set_exec(self.wjoin(f), mf2[f])
1634 if moddirstate:
1617 if moddirstate:
1635 if branch_merge:
1618 if branch_merge:
1636 self.dirstate.update([f], 'n', st_mtime=-1)
1619 self.dirstate.update([f], 'n', st_mtime=-1)
1637 else:
1620 else:
1638 self.dirstate.update([f], 'n')
1621 self.dirstate.update([f], 'n')
1639
1622
1640 # merge the tricky bits
1623 # merge the tricky bits
1641 failedmerge = []
1624 failedmerge = []
1642 files = merge.keys()
1625 files = merge.keys()
1643 files.sort()
1626 files.sort()
1644 xp1 = hex(p1)
1627 xp1 = hex(p1)
1645 xp2 = hex(p2)
1628 xp2 = hex(p2)
1646 for f in files:
1629 for f in files:
1647 self.ui.status(_("merging %s\n") % f)
1630 self.ui.status(_("merging %s\n") % f)
1648 my, other, flag = merge[f]
1631 my, other, flag = merge[f]
1649 ret = self.merge3(f, my, other, xp1, xp2)
1632 ret = self.merge3(f, my, other, xp1, xp2)
1650 if ret:
1633 if ret:
1651 err = True
1634 err = True
1652 failedmerge.append(f)
1635 failedmerge.append(f)
1653 util.set_exec(self.wjoin(f), flag)
1636 util.set_exec(self.wjoin(f), flag)
1654 if moddirstate:
1637 if moddirstate:
1655 if branch_merge:
1638 if branch_merge:
1656 # We've done a branch merge, mark this file as merged
1639 # We've done a branch merge, mark this file as merged
1657 # so that we properly record the merger later
1640 # so that we properly record the merger later
1658 self.dirstate.update([f], 'm')
1641 self.dirstate.update([f], 'm')
1659 else:
1642 else:
1660 # We've update-merged a locally modified file, so
1643 # We've update-merged a locally modified file, so
1661 # we set the dirstate to emulate a normal checkout
1644 # we set the dirstate to emulate a normal checkout
1662 # of that file some time in the past. Thus our
1645 # of that file some time in the past. Thus our
1663 # merge will appear as a normal local file
1646 # merge will appear as a normal local file
1664 # modification.
1647 # modification.
1665 f_len = len(self.file(f).read(other))
1648 f_len = len(self.file(f).read(other))
1666 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1649 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1667
1650
1668 remove.sort()
1651 remove.sort()
1669 for f in remove:
1652 for f in remove:
1670 self.ui.note(_("removing %s\n") % f)
1653 self.ui.note(_("removing %s\n") % f)
1671 util.audit_path(f)
1654 util.audit_path(f)
1672 try:
1655 try:
1673 util.unlink(self.wjoin(f))
1656 util.unlink(self.wjoin(f))
1674 except OSError, inst:
1657 except OSError, inst:
1675 if inst.errno != errno.ENOENT:
1658 if inst.errno != errno.ENOENT:
1676 self.ui.warn(_("update failed to remove %s: %s!\n") %
1659 self.ui.warn(_("update failed to remove %s: %s!\n") %
1677 (f, inst.strerror))
1660 (f, inst.strerror))
1678 if moddirstate:
1661 if moddirstate:
1679 if branch_merge:
1662 if branch_merge:
1680 self.dirstate.update(remove, 'r')
1663 self.dirstate.update(remove, 'r')
1681 else:
1664 else:
1682 self.dirstate.forget(remove)
1665 self.dirstate.forget(remove)
1683
1666
1684 if moddirstate:
1667 if moddirstate:
1685 self.dirstate.setparents(p1, p2)
1668 self.dirstate.setparents(p1, p2)
1686
1669
1687 stat = ((len(get), _("updated")),
1670 stat = ((len(get), _("updated")),
1688 (len(merge) - len(failedmerge), _("merged")),
1671 (len(merge) - len(failedmerge), _("merged")),
1689 (len(remove), _("removed")),
1672 (len(remove), _("removed")),
1690 (len(failedmerge), _("unresolved")))
1673 (len(failedmerge), _("unresolved")))
1691 note = ", ".join([_("%d files %s") % s for s in stat])
1674 note = ", ".join([_("%d files %s") % s for s in stat])
1692 self.ui.note("%s\n" % note)
1675 self.ui.note("%s\n" % note)
1693 if moddirstate and branch_merge:
1676 if moddirstate and branch_merge:
1694 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1677 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1695
1678
1696 return err
1679 return err
1697
1680
1698 def merge3(self, fn, my, other, p1, p2):
1681 def merge3(self, fn, my, other, p1, p2):
1699 """perform a 3-way merge in the working directory"""
1682 """perform a 3-way merge in the working directory"""
1700
1683
1701 def temp(prefix, node):
1684 def temp(prefix, node):
1702 pre = "%s~%s." % (os.path.basename(fn), prefix)
1685 pre = "%s~%s." % (os.path.basename(fn), prefix)
1703 (fd, name) = tempfile.mkstemp("", pre)
1686 (fd, name) = tempfile.mkstemp("", pre)
1704 f = os.fdopen(fd, "wb")
1687 f = os.fdopen(fd, "wb")
1705 self.wwrite(fn, fl.read(node), f)
1688 self.wwrite(fn, fl.read(node), f)
1706 f.close()
1689 f.close()
1707 return name
1690 return name
1708
1691
1709 fl = self.file(fn)
1692 fl = self.file(fn)
1710 base = fl.ancestor(my, other)
1693 base = fl.ancestor(my, other)
1711 a = self.wjoin(fn)
1694 a = self.wjoin(fn)
1712 b = temp("base", base)
1695 b = temp("base", base)
1713 c = temp("other", other)
1696 c = temp("other", other)
1714
1697
1715 self.ui.note(_("resolving %s\n") % fn)
1698 self.ui.note(_("resolving %s\n") % fn)
1716 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1699 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1717 (fn, short(my), short(other), short(base)))
1700 (fn, short(my), short(other), short(base)))
1718
1701
1719 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1702 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1720 or "hgmerge")
1703 or "hgmerge")
1721 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1704 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1722 environ={'HG_FILE': fn,
1705 environ={'HG_FILE': fn,
1723 'HG_MY_NODE': p1,
1706 'HG_MY_NODE': p1,
1724 'HG_OTHER_NODE': p2,
1707 'HG_OTHER_NODE': p2,
1725 'HG_FILE_MY_NODE': hex(my),
1708 'HG_FILE_MY_NODE': hex(my),
1726 'HG_FILE_OTHER_NODE': hex(other),
1709 'HG_FILE_OTHER_NODE': hex(other),
1727 'HG_FILE_BASE_NODE': hex(base)})
1710 'HG_FILE_BASE_NODE': hex(base)})
1728 if r:
1711 if r:
1729 self.ui.warn(_("merging %s failed!\n") % fn)
1712 self.ui.warn(_("merging %s failed!\n") % fn)
1730
1713
1731 os.unlink(b)
1714 os.unlink(b)
1732 os.unlink(c)
1715 os.unlink(c)
1733 return r
1716 return r
1734
1717
1735 def verify(self):
1718 def verify(self):
1736 filelinkrevs = {}
1719 filelinkrevs = {}
1737 filenodes = {}
1720 filenodes = {}
1738 changesets = revisions = files = 0
1721 changesets = revisions = files = 0
1739 errors = [0]
1722 errors = [0]
1740 neededmanifests = {}
1723 neededmanifests = {}
1741
1724
1742 def err(msg):
1725 def err(msg):
1743 self.ui.warn(msg + "\n")
1726 self.ui.warn(msg + "\n")
1744 errors[0] += 1
1727 errors[0] += 1
1745
1728
1746 def checksize(obj, name):
1729 def checksize(obj, name):
1747 d = obj.checksize()
1730 d = obj.checksize()
1748 if d[0]:
1731 if d[0]:
1749 err(_("%s data length off by %d bytes") % (name, d[0]))
1732 err(_("%s data length off by %d bytes") % (name, d[0]))
1750 if d[1]:
1733 if d[1]:
1751 err(_("%s index contains %d extra bytes") % (name, d[1]))
1734 err(_("%s index contains %d extra bytes") % (name, d[1]))
1752
1735
1753 seen = {}
1736 seen = {}
1754 self.ui.status(_("checking changesets\n"))
1737 self.ui.status(_("checking changesets\n"))
1755 checksize(self.changelog, "changelog")
1738 checksize(self.changelog, "changelog")
1756
1739
1757 for i in range(self.changelog.count()):
1740 for i in range(self.changelog.count()):
1758 changesets += 1
1741 changesets += 1
1759 n = self.changelog.node(i)
1742 n = self.changelog.node(i)
1760 l = self.changelog.linkrev(n)
1743 l = self.changelog.linkrev(n)
1761 if l != i:
1744 if l != i:
1762 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1745 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1763 if n in seen:
1746 if n in seen:
1764 err(_("duplicate changeset at revision %d") % i)
1747 err(_("duplicate changeset at revision %d") % i)
1765 seen[n] = 1
1748 seen[n] = 1
1766
1749
1767 for p in self.changelog.parents(n):
1750 for p in self.changelog.parents(n):
1768 if p not in self.changelog.nodemap:
1751 if p not in self.changelog.nodemap:
1769 err(_("changeset %s has unknown parent %s") %
1752 err(_("changeset %s has unknown parent %s") %
1770 (short(n), short(p)))
1753 (short(n), short(p)))
1771 try:
1754 try:
1772 changes = self.changelog.read(n)
1755 changes = self.changelog.read(n)
1773 except KeyboardInterrupt:
1756 except KeyboardInterrupt:
1774 self.ui.warn(_("interrupted"))
1757 self.ui.warn(_("interrupted"))
1775 raise
1758 raise
1776 except Exception, inst:
1759 except Exception, inst:
1777 err(_("unpacking changeset %s: %s") % (short(n), inst))
1760 err(_("unpacking changeset %s: %s") % (short(n), inst))
1778 continue
1761 continue
1779
1762
1780 neededmanifests[changes[0]] = n
1763 neededmanifests[changes[0]] = n
1781
1764
1782 for f in changes[3]:
1765 for f in changes[3]:
1783 filelinkrevs.setdefault(f, []).append(i)
1766 filelinkrevs.setdefault(f, []).append(i)
1784
1767
1785 seen = {}
1768 seen = {}
1786 self.ui.status(_("checking manifests\n"))
1769 self.ui.status(_("checking manifests\n"))
1787 checksize(self.manifest, "manifest")
1770 checksize(self.manifest, "manifest")
1788
1771
1789 for i in range(self.manifest.count()):
1772 for i in range(self.manifest.count()):
1790 n = self.manifest.node(i)
1773 n = self.manifest.node(i)
1791 l = self.manifest.linkrev(n)
1774 l = self.manifest.linkrev(n)
1792
1775
1793 if l < 0 or l >= self.changelog.count():
1776 if l < 0 or l >= self.changelog.count():
1794 err(_("bad manifest link (%d) at revision %d") % (l, i))
1777 err(_("bad manifest link (%d) at revision %d") % (l, i))
1795
1778
1796 if n in neededmanifests:
1779 if n in neededmanifests:
1797 del neededmanifests[n]
1780 del neededmanifests[n]
1798
1781
1799 if n in seen:
1782 if n in seen:
1800 err(_("duplicate manifest at revision %d") % i)
1783 err(_("duplicate manifest at revision %d") % i)
1801
1784
1802 seen[n] = 1
1785 seen[n] = 1
1803
1786
1804 for p in self.manifest.parents(n):
1787 for p in self.manifest.parents(n):
1805 if p not in self.manifest.nodemap:
1788 if p not in self.manifest.nodemap:
1806 err(_("manifest %s has unknown parent %s") %
1789 err(_("manifest %s has unknown parent %s") %
1807 (short(n), short(p)))
1790 (short(n), short(p)))
1808
1791
1809 try:
1792 try:
1810 delta = mdiff.patchtext(self.manifest.delta(n))
1793 delta = mdiff.patchtext(self.manifest.delta(n))
1811 except KeyboardInterrupt:
1794 except KeyboardInterrupt:
1812 self.ui.warn(_("interrupted"))
1795 self.ui.warn(_("interrupted"))
1813 raise
1796 raise
1814 except Exception, inst:
1797 except Exception, inst:
1815 err(_("unpacking manifest %s: %s") % (short(n), inst))
1798 err(_("unpacking manifest %s: %s") % (short(n), inst))
1816 continue
1799 continue
1817
1800
1818 try:
1801 try:
1819 ff = [ l.split('\0') for l in delta.splitlines() ]
1802 ff = [ l.split('\0') for l in delta.splitlines() ]
1820 for f, fn in ff:
1803 for f, fn in ff:
1821 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1804 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1822 except (ValueError, TypeError), inst:
1805 except (ValueError, TypeError), inst:
1823 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1806 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1824
1807
1825 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1808 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1826
1809
1827 for m, c in neededmanifests.items():
1810 for m, c in neededmanifests.items():
1828 err(_("Changeset %s refers to unknown manifest %s") %
1811 err(_("Changeset %s refers to unknown manifest %s") %
1829 (short(m), short(c)))
1812 (short(m), short(c)))
1830 del neededmanifests
1813 del neededmanifests
1831
1814
1832 for f in filenodes:
1815 for f in filenodes:
1833 if f not in filelinkrevs:
1816 if f not in filelinkrevs:
1834 err(_("file %s in manifest but not in changesets") % f)
1817 err(_("file %s in manifest but not in changesets") % f)
1835
1818
1836 for f in filelinkrevs:
1819 for f in filelinkrevs:
1837 if f not in filenodes:
1820 if f not in filenodes:
1838 err(_("file %s in changeset but not in manifest") % f)
1821 err(_("file %s in changeset but not in manifest") % f)
1839
1822
1840 self.ui.status(_("checking files\n"))
1823 self.ui.status(_("checking files\n"))
1841 ff = filenodes.keys()
1824 ff = filenodes.keys()
1842 ff.sort()
1825 ff.sort()
1843 for f in ff:
1826 for f in ff:
1844 if f == "/dev/null":
1827 if f == "/dev/null":
1845 continue
1828 continue
1846 files += 1
1829 files += 1
1847 if not f:
1830 if not f:
1848 err(_("file without name in manifest %s") % short(n))
1831 err(_("file without name in manifest %s") % short(n))
1849 continue
1832 continue
1850 fl = self.file(f)
1833 fl = self.file(f)
1851 checksize(fl, f)
1834 checksize(fl, f)
1852
1835
1853 nodes = {nullid: 1}
1836 nodes = {nullid: 1}
1854 seen = {}
1837 seen = {}
1855 for i in range(fl.count()):
1838 for i in range(fl.count()):
1856 revisions += 1
1839 revisions += 1
1857 n = fl.node(i)
1840 n = fl.node(i)
1858
1841
1859 if n in seen:
1842 if n in seen:
1860 err(_("%s: duplicate revision %d") % (f, i))
1843 err(_("%s: duplicate revision %d") % (f, i))
1861 if n not in filenodes[f]:
1844 if n not in filenodes[f]:
1862 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1845 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1863 else:
1846 else:
1864 del filenodes[f][n]
1847 del filenodes[f][n]
1865
1848
1866 flr = fl.linkrev(n)
1849 flr = fl.linkrev(n)
1867 if flr not in filelinkrevs.get(f, []):
1850 if flr not in filelinkrevs.get(f, []):
1868 err(_("%s:%s points to unexpected changeset %d")
1851 err(_("%s:%s points to unexpected changeset %d")
1869 % (f, short(n), flr))
1852 % (f, short(n), flr))
1870 else:
1853 else:
1871 filelinkrevs[f].remove(flr)
1854 filelinkrevs[f].remove(flr)
1872
1855
1873 # verify contents
1856 # verify contents
1874 try:
1857 try:
1875 t = fl.read(n)
1858 t = fl.read(n)
1876 except KeyboardInterrupt:
1859 except KeyboardInterrupt:
1877 self.ui.warn(_("interrupted"))
1860 self.ui.warn(_("interrupted"))
1878 raise
1861 raise
1879 except Exception, inst:
1862 except Exception, inst:
1880 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1863 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1881
1864
1882 # verify parents
1865 # verify parents
1883 (p1, p2) = fl.parents(n)
1866 (p1, p2) = fl.parents(n)
1884 if p1 not in nodes:
1867 if p1 not in nodes:
1885 err(_("file %s:%s unknown parent 1 %s") %
1868 err(_("file %s:%s unknown parent 1 %s") %
1886 (f, short(n), short(p1)))
1869 (f, short(n), short(p1)))
1887 if p2 not in nodes:
1870 if p2 not in nodes:
1888 err(_("file %s:%s unknown parent 2 %s") %
1871 err(_("file %s:%s unknown parent 2 %s") %
1889 (f, short(n), short(p1)))
1872 (f, short(n), short(p1)))
1890 nodes[n] = 1
1873 nodes[n] = 1
1891
1874
1892 # cross-check
1875 # cross-check
1893 for node in filenodes[f]:
1876 for node in filenodes[f]:
1894 err(_("node %s in manifests not in %s") % (hex(node), f))
1877 err(_("node %s in manifests not in %s") % (hex(node), f))
1895
1878
1896 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1879 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1897 (files, changesets, revisions))
1880 (files, changesets, revisions))
1898
1881
1899 if errors[0]:
1882 if errors[0]:
1900 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1883 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1901 return 1
1884 return 1
1902
1885
1903 # used to avoid circular references so destructors work
1886 # used to avoid circular references so destructors work
1904 def aftertrans(base):
1887 def aftertrans(base):
1905 p = base
1888 p = base
1906 def a():
1889 def a():
1907 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1890 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1908 util.rename(os.path.join(p, "journal.dirstate"),
1891 util.rename(os.path.join(p, "journal.dirstate"),
1909 os.path.join(p, "undo.dirstate"))
1892 os.path.join(p, "undo.dirstate"))
1910 return a
1893 return a
1911
1894
@@ -1,883 +1,881 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
7 Copyright 2005 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 from demandload import demandload
16 demandload(globals(), "binascii errno heapq mdiff os sha struct zlib")
16 demandload(globals(), "binascii changegroup errno heapq mdiff os")
17 demandload(globals(), "sha struct zlib")
17
18
18 def hash(text, p1, p2):
19 def hash(text, p1, p2):
19 """generate a hash from the given text and its parent hashes
20 """generate a hash from the given text and its parent hashes
20
21
21 This hash combines both the current file contents and its history
22 This hash combines both the current file contents and its history
22 in a manner that makes it easy to distinguish nodes with the same
23 in a manner that makes it easy to distinguish nodes with the same
23 content in the revision graph.
24 content in the revision graph.
24 """
25 """
25 l = [p1, p2]
26 l = [p1, p2]
26 l.sort()
27 l.sort()
27 s = sha.new(l[0])
28 s = sha.new(l[0])
28 s.update(l[1])
29 s.update(l[1])
29 s.update(text)
30 s.update(text)
30 return s.digest()
31 return s.digest()
31
32
32 def compress(text):
33 def compress(text):
33 """ generate a possibly-compressed representation of text """
34 """ generate a possibly-compressed representation of text """
34 if not text: return ("", text)
35 if not text: return ("", text)
35 if len(text) < 44:
36 if len(text) < 44:
36 if text[0] == '\0': return ("", text)
37 if text[0] == '\0': return ("", text)
37 return ('u', text)
38 return ('u', text)
38 bin = zlib.compress(text)
39 bin = zlib.compress(text)
39 if len(bin) > len(text):
40 if len(bin) > len(text):
40 if text[0] == '\0': return ("", text)
41 if text[0] == '\0': return ("", text)
41 return ('u', text)
42 return ('u', text)
42 return ("", bin)
43 return ("", bin)
43
44
44 def decompress(bin):
45 def decompress(bin):
45 """ decompress the given input """
46 """ decompress the given input """
46 if not bin: return bin
47 if not bin: return bin
47 t = bin[0]
48 t = bin[0]
48 if t == '\0': return bin
49 if t == '\0': return bin
49 if t == 'x': return zlib.decompress(bin)
50 if t == 'x': return zlib.decompress(bin)
50 if t == 'u': return bin[1:]
51 if t == 'u': return bin[1:]
51 raise RevlogError(_("unknown compression type %r") % t)
52 raise RevlogError(_("unknown compression type %r") % t)
52
53
53 indexformat = ">4l20s20s20s"
54 indexformat = ">4l20s20s20s"
54
55
55 class lazyparser(object):
56 class lazyparser(object):
56 """
57 """
57 this class avoids the need to parse the entirety of large indices
58 this class avoids the need to parse the entirety of large indices
58
59
59 By default we parse and load 1000 entries at a time.
60 By default we parse and load 1000 entries at a time.
60
61
61 If no position is specified, we load the whole index, and replace
62 If no position is specified, we load the whole index, and replace
62 the lazy objects in revlog with the underlying objects for
63 the lazy objects in revlog with the underlying objects for
63 efficiency in cases where we look at most of the nodes.
64 efficiency in cases where we look at most of the nodes.
64 """
65 """
65 def __init__(self, data, revlog):
66 def __init__(self, data, revlog):
66 self.data = data
67 self.data = data
67 self.s = struct.calcsize(indexformat)
68 self.s = struct.calcsize(indexformat)
68 self.l = len(data)/self.s
69 self.l = len(data)/self.s
69 self.index = [None] * self.l
70 self.index = [None] * self.l
70 self.map = {nullid: -1}
71 self.map = {nullid: -1}
71 self.all = 0
72 self.all = 0
72 self.revlog = revlog
73 self.revlog = revlog
73
74
74 def trunc(self, pos):
75 def trunc(self, pos):
75 self.l = pos/self.s
76 self.l = pos/self.s
76
77
77 def load(self, pos=None):
78 def load(self, pos=None):
78 if self.all: return
79 if self.all: return
79 if pos is not None:
80 if pos is not None:
80 block = pos / 1000
81 block = pos / 1000
81 i = block * 1000
82 i = block * 1000
82 end = min(self.l, i + 1000)
83 end = min(self.l, i + 1000)
83 else:
84 else:
84 self.all = 1
85 self.all = 1
85 i = 0
86 i = 0
86 end = self.l
87 end = self.l
87 self.revlog.index = self.index
88 self.revlog.index = self.index
88 self.revlog.nodemap = self.map
89 self.revlog.nodemap = self.map
89
90
90 while i < end:
91 while i < end:
91 d = self.data[i * self.s: (i + 1) * self.s]
92 d = self.data[i * self.s: (i + 1) * self.s]
92 e = struct.unpack(indexformat, d)
93 e = struct.unpack(indexformat, d)
93 self.index[i] = e
94 self.index[i] = e
94 self.map[e[6]] = i
95 self.map[e[6]] = i
95 i += 1
96 i += 1
96
97
97 class lazyindex(object):
98 class lazyindex(object):
98 """a lazy version of the index array"""
99 """a lazy version of the index array"""
99 def __init__(self, parser):
100 def __init__(self, parser):
100 self.p = parser
101 self.p = parser
101 def __len__(self):
102 def __len__(self):
102 return len(self.p.index)
103 return len(self.p.index)
103 def load(self, pos):
104 def load(self, pos):
104 if pos < 0:
105 if pos < 0:
105 pos += len(self.p.index)
106 pos += len(self.p.index)
106 self.p.load(pos)
107 self.p.load(pos)
107 return self.p.index[pos]
108 return self.p.index[pos]
108 def __getitem__(self, pos):
109 def __getitem__(self, pos):
109 return self.p.index[pos] or self.load(pos)
110 return self.p.index[pos] or self.load(pos)
110 def __delitem__(self, pos):
111 def __delitem__(self, pos):
111 del self.p.index[pos]
112 del self.p.index[pos]
112 def append(self, e):
113 def append(self, e):
113 self.p.index.append(e)
114 self.p.index.append(e)
114 def trunc(self, pos):
115 def trunc(self, pos):
115 self.p.trunc(pos)
116 self.p.trunc(pos)
116
117
117 class lazymap(object):
118 class lazymap(object):
118 """a lazy version of the node map"""
119 """a lazy version of the node map"""
119 def __init__(self, parser):
120 def __init__(self, parser):
120 self.p = parser
121 self.p = parser
121 def load(self, key):
122 def load(self, key):
122 if self.p.all: return
123 if self.p.all: return
123 n = self.p.data.find(key)
124 n = self.p.data.find(key)
124 if n < 0:
125 if n < 0:
125 raise KeyError(key)
126 raise KeyError(key)
126 pos = n / self.p.s
127 pos = n / self.p.s
127 self.p.load(pos)
128 self.p.load(pos)
128 def __contains__(self, key):
129 def __contains__(self, key):
129 self.p.load()
130 self.p.load()
130 return key in self.p.map
131 return key in self.p.map
131 def __iter__(self):
132 def __iter__(self):
132 yield nullid
133 yield nullid
133 for i in xrange(self.p.l):
134 for i in xrange(self.p.l):
134 try:
135 try:
135 yield self.p.index[i][6]
136 yield self.p.index[i][6]
136 except:
137 except:
137 self.p.load(i)
138 self.p.load(i)
138 yield self.p.index[i][6]
139 yield self.p.index[i][6]
139 def __getitem__(self, key):
140 def __getitem__(self, key):
140 try:
141 try:
141 return self.p.map[key]
142 return self.p.map[key]
142 except KeyError:
143 except KeyError:
143 try:
144 try:
144 self.load(key)
145 self.load(key)
145 return self.p.map[key]
146 return self.p.map[key]
146 except KeyError:
147 except KeyError:
147 raise KeyError("node " + hex(key))
148 raise KeyError("node " + hex(key))
148 def __setitem__(self, key, val):
149 def __setitem__(self, key, val):
149 self.p.map[key] = val
150 self.p.map[key] = val
150 def __delitem__(self, key):
151 def __delitem__(self, key):
151 del self.p.map[key]
152 del self.p.map[key]
152
153
153 class RevlogError(Exception): pass
154 class RevlogError(Exception): pass
154
155
155 class revlog(object):
156 class revlog(object):
156 """
157 """
157 the underlying revision storage object
158 the underlying revision storage object
158
159
159 A revlog consists of two parts, an index and the revision data.
160 A revlog consists of two parts, an index and the revision data.
160
161
161 The index is a file with a fixed record size containing
162 The index is a file with a fixed record size containing
162 information on each revision, includings its nodeid (hash), the
163 information on each revision, includings its nodeid (hash), the
163 nodeids of its parents, the position and offset of its data within
164 nodeids of its parents, the position and offset of its data within
164 the data file, and the revision it's based on. Finally, each entry
165 the data file, and the revision it's based on. Finally, each entry
165 contains a linkrev entry that can serve as a pointer to external
166 contains a linkrev entry that can serve as a pointer to external
166 data.
167 data.
167
168
168 The revision data itself is a linear collection of data chunks.
169 The revision data itself is a linear collection of data chunks.
169 Each chunk represents a revision and is usually represented as a
170 Each chunk represents a revision and is usually represented as a
170 delta against the previous chunk. To bound lookup time, runs of
171 delta against the previous chunk. To bound lookup time, runs of
171 deltas are limited to about 2 times the length of the original
172 deltas are limited to about 2 times the length of the original
172 version data. This makes retrieval of a version proportional to
173 version data. This makes retrieval of a version proportional to
173 its size, or O(1) relative to the number of revisions.
174 its size, or O(1) relative to the number of revisions.
174
175
175 Both pieces of the revlog are written to in an append-only
176 Both pieces of the revlog are written to in an append-only
176 fashion, which means we never need to rewrite a file to insert or
177 fashion, which means we never need to rewrite a file to insert or
177 remove data, and can use some simple techniques to avoid the need
178 remove data, and can use some simple techniques to avoid the need
178 for locking while reading.
179 for locking while reading.
179 """
180 """
180 def __init__(self, opener, indexfile, datafile):
181 def __init__(self, opener, indexfile, datafile):
181 """
182 """
182 create a revlog object
183 create a revlog object
183
184
184 opener is a function that abstracts the file opening operation
185 opener is a function that abstracts the file opening operation
185 and can be used to implement COW semantics or the like.
186 and can be used to implement COW semantics or the like.
186 """
187 """
187 self.indexfile = indexfile
188 self.indexfile = indexfile
188 self.datafile = datafile
189 self.datafile = datafile
189 self.opener = opener
190 self.opener = opener
190
191
191 self.indexstat = None
192 self.indexstat = None
192 self.cache = None
193 self.cache = None
193 self.chunkcache = None
194 self.chunkcache = None
194 self.load()
195 self.load()
195
196
196 def load(self):
197 def load(self):
197 try:
198 try:
198 f = self.opener(self.indexfile)
199 f = self.opener(self.indexfile)
199 except IOError, inst:
200 except IOError, inst:
200 if inst.errno != errno.ENOENT:
201 if inst.errno != errno.ENOENT:
201 raise
202 raise
202 i = ""
203 i = ""
203 else:
204 else:
204 try:
205 try:
205 st = os.fstat(f.fileno())
206 st = os.fstat(f.fileno())
206 except AttributeError, inst:
207 except AttributeError, inst:
207 st = None
208 st = None
208 else:
209 else:
209 oldst = self.indexstat
210 oldst = self.indexstat
210 if (oldst and st.st_dev == oldst.st_dev
211 if (oldst and st.st_dev == oldst.st_dev
211 and st.st_ino == oldst.st_ino
212 and st.st_ino == oldst.st_ino
212 and st.st_mtime == oldst.st_mtime
213 and st.st_mtime == oldst.st_mtime
213 and st.st_ctime == oldst.st_ctime):
214 and st.st_ctime == oldst.st_ctime):
214 return
215 return
215 self.indexstat = st
216 self.indexstat = st
216 i = f.read()
217 i = f.read()
217
218
218 if i and i[:4] != "\0\0\0\0":
219 if i and i[:4] != "\0\0\0\0":
219 raise RevlogError(_("incompatible revlog signature on %s") %
220 raise RevlogError(_("incompatible revlog signature on %s") %
220 self.indexfile)
221 self.indexfile)
221
222
222 if len(i) > 10000:
223 if len(i) > 10000:
223 # big index, let's parse it on demand
224 # big index, let's parse it on demand
224 parser = lazyparser(i, self)
225 parser = lazyparser(i, self)
225 self.index = lazyindex(parser)
226 self.index = lazyindex(parser)
226 self.nodemap = lazymap(parser)
227 self.nodemap = lazymap(parser)
227 else:
228 else:
228 s = struct.calcsize(indexformat)
229 s = struct.calcsize(indexformat)
229 l = len(i) / s
230 l = len(i) / s
230 self.index = [None] * l
231 self.index = [None] * l
231 m = [None] * l
232 m = [None] * l
232
233
233 n = 0
234 n = 0
234 for f in xrange(0, l * s, s):
235 for f in xrange(0, l * s, s):
235 # offset, size, base, linkrev, p1, p2, nodeid
236 # offset, size, base, linkrev, p1, p2, nodeid
236 e = struct.unpack(indexformat, i[f:f + s])
237 e = struct.unpack(indexformat, i[f:f + s])
237 m[n] = (e[6], n)
238 m[n] = (e[6], n)
238 self.index[n] = e
239 self.index[n] = e
239 n += 1
240 n += 1
240
241
241 self.nodemap = dict(m)
242 self.nodemap = dict(m)
242 self.nodemap[nullid] = -1
243 self.nodemap[nullid] = -1
243
244
244 def tip(self): return self.node(len(self.index) - 1)
245 def tip(self): return self.node(len(self.index) - 1)
245 def count(self): return len(self.index)
246 def count(self): return len(self.index)
246 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
247 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6]
247 def rev(self, node):
248 def rev(self, node):
248 try:
249 try:
249 return self.nodemap[node]
250 return self.nodemap[node]
250 except KeyError:
251 except KeyError:
251 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
252 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
252 def linkrev(self, node): return self.index[self.rev(node)][3]
253 def linkrev(self, node): return self.index[self.rev(node)][3]
253 def parents(self, node):
254 def parents(self, node):
254 if node == nullid: return (nullid, nullid)
255 if node == nullid: return (nullid, nullid)
255 return self.index[self.rev(node)][4:6]
256 return self.index[self.rev(node)][4:6]
256
257
257 def start(self, rev): return (rev < 0) and -1 or self.index[rev][0]
258 def start(self, rev): return (rev < 0) and -1 or self.index[rev][0]
258 def length(self, rev):
259 def length(self, rev):
259 if rev < 0:
260 if rev < 0:
260 return 0
261 return 0
261 else:
262 else:
262 return self.index[rev][1]
263 return self.index[rev][1]
263 def end(self, rev): return self.start(rev) + self.length(rev)
264 def end(self, rev): return self.start(rev) + self.length(rev)
264 def base(self, rev): return (rev < 0) and rev or self.index[rev][2]
265 def base(self, rev): return (rev < 0) and rev or self.index[rev][2]
265
266
266 def reachable(self, rev, stop=None):
267 def reachable(self, rev, stop=None):
267 reachable = {}
268 reachable = {}
268 visit = [rev]
269 visit = [rev]
269 reachable[rev] = 1
270 reachable[rev] = 1
270 if stop:
271 if stop:
271 stopn = self.rev(stop)
272 stopn = self.rev(stop)
272 else:
273 else:
273 stopn = 0
274 stopn = 0
274 while visit:
275 while visit:
275 n = visit.pop(0)
276 n = visit.pop(0)
276 if n == stop:
277 if n == stop:
277 continue
278 continue
278 if n == nullid:
279 if n == nullid:
279 continue
280 continue
280 for p in self.parents(n):
281 for p in self.parents(n):
281 if self.rev(p) < stopn:
282 if self.rev(p) < stopn:
282 continue
283 continue
283 if p not in reachable:
284 if p not in reachable:
284 reachable[p] = 1
285 reachable[p] = 1
285 visit.append(p)
286 visit.append(p)
286 return reachable
287 return reachable
287
288
288 def nodesbetween(self, roots=None, heads=None):
289 def nodesbetween(self, roots=None, heads=None):
289 """Return a tuple containing three elements. Elements 1 and 2 contain
290 """Return a tuple containing three elements. Elements 1 and 2 contain
290 a final list bases and heads after all the unreachable ones have been
291 a final list bases and heads after all the unreachable ones have been
291 pruned. Element 0 contains a topologically sorted list of all
292 pruned. Element 0 contains a topologically sorted list of all
292
293
293 nodes that satisfy these constraints:
294 nodes that satisfy these constraints:
294 1. All nodes must be descended from a node in roots (the nodes on
295 1. All nodes must be descended from a node in roots (the nodes on
295 roots are considered descended from themselves).
296 roots are considered descended from themselves).
296 2. All nodes must also be ancestors of a node in heads (the nodes in
297 2. All nodes must also be ancestors of a node in heads (the nodes in
297 heads are considered to be their own ancestors).
298 heads are considered to be their own ancestors).
298
299
299 If roots is unspecified, nullid is assumed as the only root.
300 If roots is unspecified, nullid is assumed as the only root.
300 If heads is unspecified, it is taken to be the output of the
301 If heads is unspecified, it is taken to be the output of the
301 heads method (i.e. a list of all nodes in the repository that
302 heads method (i.e. a list of all nodes in the repository that
302 have no children)."""
303 have no children)."""
303 nonodes = ([], [], [])
304 nonodes = ([], [], [])
304 if roots is not None:
305 if roots is not None:
305 roots = list(roots)
306 roots = list(roots)
306 if not roots:
307 if not roots:
307 return nonodes
308 return nonodes
308 lowestrev = min([self.rev(n) for n in roots])
309 lowestrev = min([self.rev(n) for n in roots])
309 else:
310 else:
310 roots = [nullid] # Everybody's a descendent of nullid
311 roots = [nullid] # Everybody's a descendent of nullid
311 lowestrev = -1
312 lowestrev = -1
312 if (lowestrev == -1) and (heads is None):
313 if (lowestrev == -1) and (heads is None):
313 # We want _all_ the nodes!
314 # We want _all_ the nodes!
314 return ([self.node(r) for r in xrange(0, self.count())],
315 return ([self.node(r) for r in xrange(0, self.count())],
315 [nullid], list(self.heads()))
316 [nullid], list(self.heads()))
316 if heads is None:
317 if heads is None:
317 # All nodes are ancestors, so the latest ancestor is the last
318 # All nodes are ancestors, so the latest ancestor is the last
318 # node.
319 # node.
319 highestrev = self.count() - 1
320 highestrev = self.count() - 1
320 # Set ancestors to None to signal that every node is an ancestor.
321 # Set ancestors to None to signal that every node is an ancestor.
321 ancestors = None
322 ancestors = None
322 # Set heads to an empty dictionary for later discovery of heads
323 # Set heads to an empty dictionary for later discovery of heads
323 heads = {}
324 heads = {}
324 else:
325 else:
325 heads = list(heads)
326 heads = list(heads)
326 if not heads:
327 if not heads:
327 return nonodes
328 return nonodes
328 ancestors = {}
329 ancestors = {}
329 # Start at the top and keep marking parents until we're done.
330 # Start at the top and keep marking parents until we're done.
330 nodestotag = heads[:]
331 nodestotag = heads[:]
331 # Turn heads into a dictionary so we can remove 'fake' heads.
332 # Turn heads into a dictionary so we can remove 'fake' heads.
332 # Also, later we will be using it to filter out the heads we can't
333 # Also, later we will be using it to filter out the heads we can't
333 # find from roots.
334 # find from roots.
334 heads = dict.fromkeys(heads, 0)
335 heads = dict.fromkeys(heads, 0)
335 # Remember where the top was so we can use it as a limit later.
336 # Remember where the top was so we can use it as a limit later.
336 highestrev = max([self.rev(n) for n in nodestotag])
337 highestrev = max([self.rev(n) for n in nodestotag])
337 while nodestotag:
338 while nodestotag:
338 # grab a node to tag
339 # grab a node to tag
339 n = nodestotag.pop()
340 n = nodestotag.pop()
340 # Never tag nullid
341 # Never tag nullid
341 if n == nullid:
342 if n == nullid:
342 continue
343 continue
343 # A node's revision number represents its place in a
344 # A node's revision number represents its place in a
344 # topologically sorted list of nodes.
345 # topologically sorted list of nodes.
345 r = self.rev(n)
346 r = self.rev(n)
346 if r >= lowestrev:
347 if r >= lowestrev:
347 if n not in ancestors:
348 if n not in ancestors:
348 # If we are possibly a descendent of one of the roots
349 # If we are possibly a descendent of one of the roots
349 # and we haven't already been marked as an ancestor
350 # and we haven't already been marked as an ancestor
350 ancestors[n] = 1 # Mark as ancestor
351 ancestors[n] = 1 # Mark as ancestor
351 # Add non-nullid parents to list of nodes to tag.
352 # Add non-nullid parents to list of nodes to tag.
352 nodestotag.extend([p for p in self.parents(n) if
353 nodestotag.extend([p for p in self.parents(n) if
353 p != nullid])
354 p != nullid])
354 elif n in heads: # We've seen it before, is it a fake head?
355 elif n in heads: # We've seen it before, is it a fake head?
355 # So it is, real heads should not be the ancestors of
356 # So it is, real heads should not be the ancestors of
356 # any other heads.
357 # any other heads.
357 heads.pop(n)
358 heads.pop(n)
358 if not ancestors:
359 if not ancestors:
359 return nonodes
360 return nonodes
360 # Now that we have our set of ancestors, we want to remove any
361 # Now that we have our set of ancestors, we want to remove any
361 # roots that are not ancestors.
362 # roots that are not ancestors.
362
363
363 # If one of the roots was nullid, everything is included anyway.
364 # If one of the roots was nullid, everything is included anyway.
364 if lowestrev > -1:
365 if lowestrev > -1:
365 # But, since we weren't, let's recompute the lowest rev to not
366 # But, since we weren't, let's recompute the lowest rev to not
366 # include roots that aren't ancestors.
367 # include roots that aren't ancestors.
367
368
368 # Filter out roots that aren't ancestors of heads
369 # Filter out roots that aren't ancestors of heads
369 roots = [n for n in roots if n in ancestors]
370 roots = [n for n in roots if n in ancestors]
370 # Recompute the lowest revision
371 # Recompute the lowest revision
371 if roots:
372 if roots:
372 lowestrev = min([self.rev(n) for n in roots])
373 lowestrev = min([self.rev(n) for n in roots])
373 else:
374 else:
374 # No more roots? Return empty list
375 # No more roots? Return empty list
375 return nonodes
376 return nonodes
376 else:
377 else:
377 # We are descending from nullid, and don't need to care about
378 # We are descending from nullid, and don't need to care about
378 # any other roots.
379 # any other roots.
379 lowestrev = -1
380 lowestrev = -1
380 roots = [nullid]
381 roots = [nullid]
381 # Transform our roots list into a 'set' (i.e. a dictionary where the
382 # Transform our roots list into a 'set' (i.e. a dictionary where the
382 # values don't matter.
383 # values don't matter.
383 descendents = dict.fromkeys(roots, 1)
384 descendents = dict.fromkeys(roots, 1)
384 # Also, keep the original roots so we can filter out roots that aren't
385 # Also, keep the original roots so we can filter out roots that aren't
385 # 'real' roots (i.e. are descended from other roots).
386 # 'real' roots (i.e. are descended from other roots).
386 roots = descendents.copy()
387 roots = descendents.copy()
387 # Our topologically sorted list of output nodes.
388 # Our topologically sorted list of output nodes.
388 orderedout = []
389 orderedout = []
389 # Don't start at nullid since we don't want nullid in our output list,
390 # Don't start at nullid since we don't want nullid in our output list,
390 # and if nullid shows up in descedents, empty parents will look like
391 # and if nullid shows up in descedents, empty parents will look like
391 # they're descendents.
392 # they're descendents.
392 for r in xrange(max(lowestrev, 0), highestrev + 1):
393 for r in xrange(max(lowestrev, 0), highestrev + 1):
393 n = self.node(r)
394 n = self.node(r)
394 isdescendent = False
395 isdescendent = False
395 if lowestrev == -1: # Everybody is a descendent of nullid
396 if lowestrev == -1: # Everybody is a descendent of nullid
396 isdescendent = True
397 isdescendent = True
397 elif n in descendents:
398 elif n in descendents:
398 # n is already a descendent
399 # n is already a descendent
399 isdescendent = True
400 isdescendent = True
400 # This check only needs to be done here because all the roots
401 # This check only needs to be done here because all the roots
401 # will start being marked is descendents before the loop.
402 # will start being marked is descendents before the loop.
402 if n in roots:
403 if n in roots:
403 # If n was a root, check if it's a 'real' root.
404 # If n was a root, check if it's a 'real' root.
404 p = tuple(self.parents(n))
405 p = tuple(self.parents(n))
405 # If any of its parents are descendents, it's not a root.
406 # If any of its parents are descendents, it's not a root.
406 if (p[0] in descendents) or (p[1] in descendents):
407 if (p[0] in descendents) or (p[1] in descendents):
407 roots.pop(n)
408 roots.pop(n)
408 else:
409 else:
409 p = tuple(self.parents(n))
410 p = tuple(self.parents(n))
410 # A node is a descendent if either of its parents are
411 # A node is a descendent if either of its parents are
411 # descendents. (We seeded the dependents list with the roots
412 # descendents. (We seeded the dependents list with the roots
412 # up there, remember?)
413 # up there, remember?)
413 if (p[0] in descendents) or (p[1] in descendents):
414 if (p[0] in descendents) or (p[1] in descendents):
414 descendents[n] = 1
415 descendents[n] = 1
415 isdescendent = True
416 isdescendent = True
416 if isdescendent and ((ancestors is None) or (n in ancestors)):
417 if isdescendent and ((ancestors is None) or (n in ancestors)):
417 # Only include nodes that are both descendents and ancestors.
418 # Only include nodes that are both descendents and ancestors.
418 orderedout.append(n)
419 orderedout.append(n)
419 if (ancestors is not None) and (n in heads):
420 if (ancestors is not None) and (n in heads):
420 # We're trying to figure out which heads are reachable
421 # We're trying to figure out which heads are reachable
421 # from roots.
422 # from roots.
422 # Mark this head as having been reached
423 # Mark this head as having been reached
423 heads[n] = 1
424 heads[n] = 1
424 elif ancestors is None:
425 elif ancestors is None:
425 # Otherwise, we're trying to discover the heads.
426 # Otherwise, we're trying to discover the heads.
426 # Assume this is a head because if it isn't, the next step
427 # Assume this is a head because if it isn't, the next step
427 # will eventually remove it.
428 # will eventually remove it.
428 heads[n] = 1
429 heads[n] = 1
429 # But, obviously its parents aren't.
430 # But, obviously its parents aren't.
430 for p in self.parents(n):
431 for p in self.parents(n):
431 heads.pop(p, None)
432 heads.pop(p, None)
432 heads = [n for n in heads.iterkeys() if heads[n] != 0]
433 heads = [n for n in heads.iterkeys() if heads[n] != 0]
433 roots = roots.keys()
434 roots = roots.keys()
434 assert orderedout
435 assert orderedout
435 assert roots
436 assert roots
436 assert heads
437 assert heads
437 return (orderedout, roots, heads)
438 return (orderedout, roots, heads)
438
439
439 def heads(self, start=None):
440 def heads(self, start=None):
440 """return the list of all nodes that have no children
441 """return the list of all nodes that have no children
441
442
442 if start is specified, only heads that are descendants of
443 if start is specified, only heads that are descendants of
443 start will be returned
444 start will be returned
444
445
445 """
446 """
446 if start is None:
447 if start is None:
447 start = nullid
448 start = nullid
448 reachable = {start: 1}
449 reachable = {start: 1}
449 heads = {start: 1}
450 heads = {start: 1}
450 startrev = self.rev(start)
451 startrev = self.rev(start)
451
452
452 for r in xrange(startrev + 1, self.count()):
453 for r in xrange(startrev + 1, self.count()):
453 n = self.node(r)
454 n = self.node(r)
454 for pn in self.parents(n):
455 for pn in self.parents(n):
455 if pn in reachable:
456 if pn in reachable:
456 reachable[n] = 1
457 reachable[n] = 1
457 heads[n] = 1
458 heads[n] = 1
458 if pn in heads:
459 if pn in heads:
459 del heads[pn]
460 del heads[pn]
460 return heads.keys()
461 return heads.keys()
461
462
462 def children(self, node):
463 def children(self, node):
463 """find the children of a given node"""
464 """find the children of a given node"""
464 c = []
465 c = []
465 p = self.rev(node)
466 p = self.rev(node)
466 for r in range(p + 1, self.count()):
467 for r in range(p + 1, self.count()):
467 n = self.node(r)
468 n = self.node(r)
468 for pn in self.parents(n):
469 for pn in self.parents(n):
469 if pn == node:
470 if pn == node:
470 c.append(n)
471 c.append(n)
471 continue
472 continue
472 elif pn == nullid:
473 elif pn == nullid:
473 continue
474 continue
474 return c
475 return c
475
476
476 def lookup(self, id):
477 def lookup(self, id):
477 """locate a node based on revision number or subset of hex nodeid"""
478 """locate a node based on revision number or subset of hex nodeid"""
478 try:
479 try:
479 rev = int(id)
480 rev = int(id)
480 if str(rev) != id: raise ValueError
481 if str(rev) != id: raise ValueError
481 if rev < 0: rev = self.count() + rev
482 if rev < 0: rev = self.count() + rev
482 if rev < 0 or rev >= self.count(): raise ValueError
483 if rev < 0 or rev >= self.count(): raise ValueError
483 return self.node(rev)
484 return self.node(rev)
484 except (ValueError, OverflowError):
485 except (ValueError, OverflowError):
485 c = []
486 c = []
486 for n in self.nodemap:
487 for n in self.nodemap:
487 if hex(n).startswith(id):
488 if hex(n).startswith(id):
488 c.append(n)
489 c.append(n)
489 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
490 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
490 if len(c) < 1: raise RevlogError(_("No match found"))
491 if len(c) < 1: raise RevlogError(_("No match found"))
491 return c[0]
492 return c[0]
492
493
493 return None
494 return None
494
495
495 def diff(self, a, b):
496 def diff(self, a, b):
496 """return a delta between two revisions"""
497 """return a delta between two revisions"""
497 return mdiff.textdiff(a, b)
498 return mdiff.textdiff(a, b)
498
499
499 def patches(self, t, pl):
500 def patches(self, t, pl):
500 """apply a list of patches to a string"""
501 """apply a list of patches to a string"""
501 return mdiff.patches(t, pl)
502 return mdiff.patches(t, pl)
502
503
503 def chunk(self, rev):
504 def chunk(self, rev):
504 start, length = self.start(rev), self.length(rev)
505 start, length = self.start(rev), self.length(rev)
505 end = start + length
506 end = start + length
506
507
507 def loadcache():
508 def loadcache():
508 cache_length = max(4096 * 1024, length) # 4Mo
509 cache_length = max(4096 * 1024, length) # 4Mo
509 df = self.opener(self.datafile)
510 df = self.opener(self.datafile)
510 df.seek(start)
511 df.seek(start)
511 self.chunkcache = (start, df.read(cache_length))
512 self.chunkcache = (start, df.read(cache_length))
512
513
513 if not self.chunkcache:
514 if not self.chunkcache:
514 loadcache()
515 loadcache()
515
516
516 cache_start = self.chunkcache[0]
517 cache_start = self.chunkcache[0]
517 cache_end = cache_start + len(self.chunkcache[1])
518 cache_end = cache_start + len(self.chunkcache[1])
518 if start >= cache_start and end <= cache_end:
519 if start >= cache_start and end <= cache_end:
519 # it is cached
520 # it is cached
520 offset = start - cache_start
521 offset = start - cache_start
521 else:
522 else:
522 loadcache()
523 loadcache()
523 offset = 0
524 offset = 0
524
525
525 #def checkchunk():
526 #def checkchunk():
526 # df = self.opener(self.datafile)
527 # df = self.opener(self.datafile)
527 # df.seek(start)
528 # df.seek(start)
528 # return df.read(length)
529 # return df.read(length)
529 #assert s == checkchunk()
530 #assert s == checkchunk()
530 return decompress(self.chunkcache[1][offset:offset + length])
531 return decompress(self.chunkcache[1][offset:offset + length])
531
532
532 def delta(self, node):
533 def delta(self, node):
533 """return or calculate a delta between a node and its predecessor"""
534 """return or calculate a delta between a node and its predecessor"""
534 r = self.rev(node)
535 r = self.rev(node)
535 return self.revdiff(r - 1, r)
536 return self.revdiff(r - 1, r)
536
537
537 def revdiff(self, rev1, rev2):
538 def revdiff(self, rev1, rev2):
538 """return or calculate a delta between two revisions"""
539 """return or calculate a delta between two revisions"""
539 b1 = self.base(rev1)
540 b1 = self.base(rev1)
540 b2 = self.base(rev2)
541 b2 = self.base(rev2)
541 if b1 == b2 and rev1 + 1 == rev2:
542 if b1 == b2 and rev1 + 1 == rev2:
542 return self.chunk(rev2)
543 return self.chunk(rev2)
543 else:
544 else:
544 return self.diff(self.revision(self.node(rev1)),
545 return self.diff(self.revision(self.node(rev1)),
545 self.revision(self.node(rev2)))
546 self.revision(self.node(rev2)))
546
547
547 def revision(self, node):
548 def revision(self, node):
548 """return an uncompressed revision of a given"""
549 """return an uncompressed revision of a given"""
549 if node == nullid: return ""
550 if node == nullid: return ""
550 if self.cache and self.cache[0] == node: return self.cache[2]
551 if self.cache and self.cache[0] == node: return self.cache[2]
551
552
552 # look up what we need to read
553 # look up what we need to read
553 text = None
554 text = None
554 rev = self.rev(node)
555 rev = self.rev(node)
555 base = self.base(rev)
556 base = self.base(rev)
556
557
557 # do we have useful data cached?
558 # do we have useful data cached?
558 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
559 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
559 base = self.cache[1]
560 base = self.cache[1]
560 text = self.cache[2]
561 text = self.cache[2]
561 else:
562 else:
562 text = self.chunk(base)
563 text = self.chunk(base)
563
564
564 bins = []
565 bins = []
565 for r in xrange(base + 1, rev + 1):
566 for r in xrange(base + 1, rev + 1):
566 bins.append(self.chunk(r))
567 bins.append(self.chunk(r))
567
568
568 text = self.patches(text, bins)
569 text = self.patches(text, bins)
569
570
570 p1, p2 = self.parents(node)
571 p1, p2 = self.parents(node)
571 if node != hash(text, p1, p2):
572 if node != hash(text, p1, p2):
572 raise RevlogError(_("integrity check failed on %s:%d")
573 raise RevlogError(_("integrity check failed on %s:%d")
573 % (self.datafile, rev))
574 % (self.datafile, rev))
574
575
575 self.cache = (node, rev, text)
576 self.cache = (node, rev, text)
576 return text
577 return text
577
578
578 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
579 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
579 """add a revision to the log
580 """add a revision to the log
580
581
581 text - the revision data to add
582 text - the revision data to add
582 transaction - the transaction object used for rollback
583 transaction - the transaction object used for rollback
583 link - the linkrev data to add
584 link - the linkrev data to add
584 p1, p2 - the parent nodeids of the revision
585 p1, p2 - the parent nodeids of the revision
585 d - an optional precomputed delta
586 d - an optional precomputed delta
586 """
587 """
587 if text is None: text = ""
588 if text is None: text = ""
588 if p1 is None: p1 = self.tip()
589 if p1 is None: p1 = self.tip()
589 if p2 is None: p2 = nullid
590 if p2 is None: p2 = nullid
590
591
591 node = hash(text, p1, p2)
592 node = hash(text, p1, p2)
592
593
593 if node in self.nodemap:
594 if node in self.nodemap:
594 return node
595 return node
595
596
596 n = self.count()
597 n = self.count()
597 t = n - 1
598 t = n - 1
598
599
599 if n:
600 if n:
600 base = self.base(t)
601 base = self.base(t)
601 start = self.start(base)
602 start = self.start(base)
602 end = self.end(t)
603 end = self.end(t)
603 if not d:
604 if not d:
604 prev = self.revision(self.tip())
605 prev = self.revision(self.tip())
605 d = self.diff(prev, str(text))
606 d = self.diff(prev, str(text))
606 data = compress(d)
607 data = compress(d)
607 l = len(data[1]) + len(data[0])
608 l = len(data[1]) + len(data[0])
608 dist = end - start + l
609 dist = end - start + l
609
610
610 # full versions are inserted when the needed deltas
611 # full versions are inserted when the needed deltas
611 # become comparable to the uncompressed text
612 # become comparable to the uncompressed text
612 if not n or dist > len(text) * 2:
613 if not n or dist > len(text) * 2:
613 data = compress(text)
614 data = compress(text)
614 l = len(data[1]) + len(data[0])
615 l = len(data[1]) + len(data[0])
615 base = n
616 base = n
616 else:
617 else:
617 base = self.base(t)
618 base = self.base(t)
618
619
619 offset = 0
620 offset = 0
620 if t >= 0:
621 if t >= 0:
621 offset = self.end(t)
622 offset = self.end(t)
622
623
623 e = (offset, l, base, link, p1, p2, node)
624 e = (offset, l, base, link, p1, p2, node)
624
625
625 self.index.append(e)
626 self.index.append(e)
626 self.nodemap[node] = n
627 self.nodemap[node] = n
627 entry = struct.pack(indexformat, *e)
628 entry = struct.pack(indexformat, *e)
628
629
629 transaction.add(self.datafile, e[0])
630 transaction.add(self.datafile, e[0])
630 f = self.opener(self.datafile, "a")
631 f = self.opener(self.datafile, "a")
631 if data[0]:
632 if data[0]:
632 f.write(data[0])
633 f.write(data[0])
633 f.write(data[1])
634 f.write(data[1])
634 transaction.add(self.indexfile, n * len(entry))
635 transaction.add(self.indexfile, n * len(entry))
635 self.opener(self.indexfile, "a").write(entry)
636 self.opener(self.indexfile, "a").write(entry)
636
637
637 self.cache = (node, n, text)
638 self.cache = (node, n, text)
638 return node
639 return node
639
640
640 def ancestor(self, a, b):
641 def ancestor(self, a, b):
641 """calculate the least common ancestor of nodes a and b"""
642 """calculate the least common ancestor of nodes a and b"""
642 # calculate the distance of every node from root
643 # calculate the distance of every node from root
643 dist = {nullid: 0}
644 dist = {nullid: 0}
644 for i in xrange(self.count()):
645 for i in xrange(self.count()):
645 n = self.node(i)
646 n = self.node(i)
646 p1, p2 = self.parents(n)
647 p1, p2 = self.parents(n)
647 dist[n] = max(dist[p1], dist[p2]) + 1
648 dist[n] = max(dist[p1], dist[p2]) + 1
648
649
649 # traverse ancestors in order of decreasing distance from root
650 # traverse ancestors in order of decreasing distance from root
650 def ancestors(node):
651 def ancestors(node):
651 # we store negative distances because heap returns smallest member
652 # we store negative distances because heap returns smallest member
652 h = [(-dist[node], node)]
653 h = [(-dist[node], node)]
653 seen = {}
654 seen = {}
654 while h:
655 while h:
655 d, n = heapq.heappop(h)
656 d, n = heapq.heappop(h)
656 if n not in seen:
657 if n not in seen:
657 seen[n] = 1
658 seen[n] = 1
658 yield (-d, n)
659 yield (-d, n)
659 for p in self.parents(n):
660 for p in self.parents(n):
660 heapq.heappush(h, (-dist[p], p))
661 heapq.heappush(h, (-dist[p], p))
661
662
662 def generations(node):
663 def generations(node):
663 sg, s = None, {}
664 sg, s = None, {}
664 for g,n in ancestors(node):
665 for g,n in ancestors(node):
665 if g != sg:
666 if g != sg:
666 if sg:
667 if sg:
667 yield sg, s
668 yield sg, s
668 sg, s = g, {n:1}
669 sg, s = g, {n:1}
669 else:
670 else:
670 s[n] = 1
671 s[n] = 1
671 yield sg, s
672 yield sg, s
672
673
673 x = generations(a)
674 x = generations(a)
674 y = generations(b)
675 y = generations(b)
675 gx = x.next()
676 gx = x.next()
676 gy = y.next()
677 gy = y.next()
677
678
678 # increment each ancestor list until it is closer to root than
679 # increment each ancestor list until it is closer to root than
679 # the other, or they match
680 # the other, or they match
680 while 1:
681 while 1:
681 #print "ancestor gen %s %s" % (gx[0], gy[0])
682 #print "ancestor gen %s %s" % (gx[0], gy[0])
682 if gx[0] == gy[0]:
683 if gx[0] == gy[0]:
683 # find the intersection
684 # find the intersection
684 i = [ n for n in gx[1] if n in gy[1] ]
685 i = [ n for n in gx[1] if n in gy[1] ]
685 if i:
686 if i:
686 return i[0]
687 return i[0]
687 else:
688 else:
688 #print "next"
689 #print "next"
689 gy = y.next()
690 gy = y.next()
690 gx = x.next()
691 gx = x.next()
691 elif gx[0] < gy[0]:
692 elif gx[0] < gy[0]:
692 #print "next y"
693 #print "next y"
693 gy = y.next()
694 gy = y.next()
694 else:
695 else:
695 #print "next x"
696 #print "next x"
696 gx = x.next()
697 gx = x.next()
697
698
698 def group(self, nodelist, lookup, infocollect=None):
699 def group(self, nodelist, lookup, infocollect=None):
699 """calculate a delta group
700 """calculate a delta group
700
701
701 Given a list of changeset revs, return a set of deltas and
702 Given a list of changeset revs, return a set of deltas and
702 metadata corresponding to nodes. the first delta is
703 metadata corresponding to nodes. the first delta is
703 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
704 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
704 have this parent as it has all history before these
705 have this parent as it has all history before these
705 changesets. parent is parent[0]
706 changesets. parent is parent[0]
706 """
707 """
707 revs = [self.rev(n) for n in nodelist]
708 revs = [self.rev(n) for n in nodelist]
708
709
709 # if we don't have any revisions touched by these changesets, bail
710 # if we don't have any revisions touched by these changesets, bail
710 if not revs:
711 if not revs:
711 yield struct.pack(">l", 0)
712 yield changegroup.closechunk()
712 return
713 return
713
714
714 # add the parent of the first rev
715 # add the parent of the first rev
715 p = self.parents(self.node(revs[0]))[0]
716 p = self.parents(self.node(revs[0]))[0]
716 revs.insert(0, self.rev(p))
717 revs.insert(0, self.rev(p))
717
718
718 # build deltas
719 # build deltas
719 for d in xrange(0, len(revs) - 1):
720 for d in xrange(0, len(revs) - 1):
720 a, b = revs[d], revs[d + 1]
721 a, b = revs[d], revs[d + 1]
721 nb = self.node(b)
722 nb = self.node(b)
722
723
723 if infocollect is not None:
724 if infocollect is not None:
724 infocollect(nb)
725 infocollect(nb)
725
726
726 d = self.revdiff(a, b)
727 d = self.revdiff(a, b)
727 p = self.parents(nb)
728 p = self.parents(nb)
728 meta = nb + p[0] + p[1] + lookup(nb)
729 meta = nb + p[0] + p[1] + lookup(nb)
729 l = struct.pack(">l", len(meta) + len(d) + 4)
730 yield changegroup.genchunk("%s%s" % (meta, d))
730 yield l
731 yield meta
732 yield d
733
731
734 yield struct.pack(">l", 0)
732 yield changegroup.closechunk()
735
733
736 def addgroup(self, revs, linkmapper, transaction, unique=0):
734 def addgroup(self, revs, linkmapper, transaction, unique=0):
737 """
735 """
738 add a delta group
736 add a delta group
739
737
740 given a set of deltas, add them to the revision log. the
738 given a set of deltas, add them to the revision log. the
741 first delta is against its parent, which should be in our
739 first delta is against its parent, which should be in our
742 log, the rest are against the previous delta.
740 log, the rest are against the previous delta.
743 """
741 """
744
742
745 #track the base of the current delta log
743 #track the base of the current delta log
746 r = self.count()
744 r = self.count()
747 t = r - 1
745 t = r - 1
748 node = nullid
746 node = nullid
749
747
750 base = prev = -1
748 base = prev = -1
751 start = end = measure = 0
749 start = end = measure = 0
752 if r:
750 if r:
753 base = self.base(t)
751 base = self.base(t)
754 start = self.start(base)
752 start = self.start(base)
755 end = self.end(t)
753 end = self.end(t)
756 measure = self.length(base)
754 measure = self.length(base)
757 prev = self.tip()
755 prev = self.tip()
758
756
759 transaction.add(self.datafile, end)
757 transaction.add(self.datafile, end)
760 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
758 transaction.add(self.indexfile, r * struct.calcsize(indexformat))
761 dfh = self.opener(self.datafile, "a")
759 dfh = self.opener(self.datafile, "a")
762 ifh = self.opener(self.indexfile, "a")
760 ifh = self.opener(self.indexfile, "a")
763
761
764 # loop through our set of deltas
762 # loop through our set of deltas
765 chain = None
763 chain = None
766 for chunk in revs:
764 for chunk in revs:
767 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
765 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
768 link = linkmapper(cs)
766 link = linkmapper(cs)
769 if node in self.nodemap:
767 if node in self.nodemap:
770 # this can happen if two branches make the same change
768 # this can happen if two branches make the same change
771 # if unique:
769 # if unique:
772 # raise RevlogError(_("already have %s") % hex(node[:4]))
770 # raise RevlogError(_("already have %s") % hex(node[:4]))
773 chain = node
771 chain = node
774 continue
772 continue
775 delta = chunk[80:]
773 delta = chunk[80:]
776
774
777 for p in (p1, p2):
775 for p in (p1, p2):
778 if not p in self.nodemap:
776 if not p in self.nodemap:
779 raise RevlogError(_("unknown parent %s") % short(p1))
777 raise RevlogError(_("unknown parent %s") % short(p1))
780
778
781 if not chain:
779 if not chain:
782 # retrieve the parent revision of the delta chain
780 # retrieve the parent revision of the delta chain
783 chain = p1
781 chain = p1
784 if not chain in self.nodemap:
782 if not chain in self.nodemap:
785 raise RevlogError(_("unknown base %s") % short(chain[:4]))
783 raise RevlogError(_("unknown base %s") % short(chain[:4]))
786
784
787 # full versions are inserted when the needed deltas become
785 # full versions are inserted when the needed deltas become
788 # comparable to the uncompressed text or when the previous
786 # comparable to the uncompressed text or when the previous
789 # version is not the one we have a delta against. We use
787 # version is not the one we have a delta against. We use
790 # the size of the previous full rev as a proxy for the
788 # the size of the previous full rev as a proxy for the
791 # current size.
789 # current size.
792
790
793 if chain == prev:
791 if chain == prev:
794 tempd = compress(delta)
792 tempd = compress(delta)
795 cdelta = tempd[0] + tempd[1]
793 cdelta = tempd[0] + tempd[1]
796
794
797 if chain != prev or (end - start + len(cdelta)) > measure * 2:
795 if chain != prev or (end - start + len(cdelta)) > measure * 2:
798 # flush our writes here so we can read it in revision
796 # flush our writes here so we can read it in revision
799 dfh.flush()
797 dfh.flush()
800 ifh.flush()
798 ifh.flush()
801 text = self.revision(chain)
799 text = self.revision(chain)
802 text = self.patches(text, [delta])
800 text = self.patches(text, [delta])
803 chk = self.addrevision(text, transaction, link, p1, p2)
801 chk = self.addrevision(text, transaction, link, p1, p2)
804 if chk != node:
802 if chk != node:
805 raise RevlogError(_("consistency error adding group"))
803 raise RevlogError(_("consistency error adding group"))
806 measure = len(text)
804 measure = len(text)
807 else:
805 else:
808 e = (end, len(cdelta), base, link, p1, p2, node)
806 e = (end, len(cdelta), base, link, p1, p2, node)
809 self.index.append(e)
807 self.index.append(e)
810 self.nodemap[node] = r
808 self.nodemap[node] = r
811 dfh.write(cdelta)
809 dfh.write(cdelta)
812 ifh.write(struct.pack(indexformat, *e))
810 ifh.write(struct.pack(indexformat, *e))
813
811
814 t, r, chain, prev = r, r + 1, node, node
812 t, r, chain, prev = r, r + 1, node, node
815 base = self.base(t)
813 base = self.base(t)
816 start = self.start(base)
814 start = self.start(base)
817 end = self.end(t)
815 end = self.end(t)
818
816
819 dfh.close()
817 dfh.close()
820 ifh.close()
818 ifh.close()
821 return node
819 return node
822
820
823 def strip(self, rev, minlink):
821 def strip(self, rev, minlink):
824 if self.count() == 0 or rev >= self.count():
822 if self.count() == 0 or rev >= self.count():
825 return
823 return
826
824
827 # When stripping away a revision, we need to make sure it
825 # When stripping away a revision, we need to make sure it
828 # does not actually belong to an older changeset.
826 # does not actually belong to an older changeset.
829 # The minlink parameter defines the oldest revision
827 # The minlink parameter defines the oldest revision
830 # we're allowed to strip away.
828 # we're allowed to strip away.
831 while minlink > self.index[rev][3]:
829 while minlink > self.index[rev][3]:
832 rev += 1
830 rev += 1
833 if rev >= self.count():
831 if rev >= self.count():
834 return
832 return
835
833
836 # first truncate the files on disk
834 # first truncate the files on disk
837 end = self.start(rev)
835 end = self.start(rev)
838 self.opener(self.datafile, "a").truncate(end)
836 self.opener(self.datafile, "a").truncate(end)
839 end = rev * struct.calcsize(indexformat)
837 end = rev * struct.calcsize(indexformat)
840 self.opener(self.indexfile, "a").truncate(end)
838 self.opener(self.indexfile, "a").truncate(end)
841
839
842 # then reset internal state in memory to forget those revisions
840 # then reset internal state in memory to forget those revisions
843 self.cache = None
841 self.cache = None
844 self.chunkcache = None
842 self.chunkcache = None
845 for p in self.index[rev:]:
843 for p in self.index[rev:]:
846 del self.nodemap[p[6]]
844 del self.nodemap[p[6]]
847 del self.index[rev:]
845 del self.index[rev:]
848
846
849 # truncating the lazyindex also truncates the lazymap.
847 # truncating the lazyindex also truncates the lazymap.
850 if isinstance(self.index, lazyindex):
848 if isinstance(self.index, lazyindex):
851 self.index.trunc(end)
849 self.index.trunc(end)
852
850
853
851
854 def checksize(self):
852 def checksize(self):
855 expected = 0
853 expected = 0
856 if self.count():
854 if self.count():
857 expected = self.end(self.count() - 1)
855 expected = self.end(self.count() - 1)
858
856
859 try:
857 try:
860 f = self.opener(self.datafile)
858 f = self.opener(self.datafile)
861 f.seek(0, 2)
859 f.seek(0, 2)
862 actual = f.tell()
860 actual = f.tell()
863 dd = actual - expected
861 dd = actual - expected
864 except IOError, inst:
862 except IOError, inst:
865 if inst.errno != errno.ENOENT:
863 if inst.errno != errno.ENOENT:
866 raise
864 raise
867 dd = 0
865 dd = 0
868
866
869 try:
867 try:
870 f = self.opener(self.indexfile)
868 f = self.opener(self.indexfile)
871 f.seek(0, 2)
869 f.seek(0, 2)
872 actual = f.tell()
870 actual = f.tell()
873 s = struct.calcsize(indexformat)
871 s = struct.calcsize(indexformat)
874 i = actual / s
872 i = actual / s
875 di = actual - (i * s)
873 di = actual - (i * s)
876 except IOError, inst:
874 except IOError, inst:
877 if inst.errno != errno.ENOENT:
875 if inst.errno != errno.ENOENT:
878 raise
876 raise
879 di = 0
877 di = 0
880
878
881 return (dd, di)
879 return (dd, di)
882
880
883
881
General Comments 0
You need to be logged in to leave comments. Login now